stmt.c (expand_asm_operands): Twiddle generating_concat_p so that CONCATs are not...
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
48 #include "expr.h"
49 #include "hard-reg-set.h"
50 #include "obstack.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
61
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
65 #endif
66 \f
67 /* Functions and data structures for expanding case statements. */
68
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
72
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
76
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
81 node chain.
82
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
89 in order. */
90
91 struct case_node
92 {
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
99 int balance;
100 };
101
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
104
105 /* These are used by estimate_case_costs and balance_case_nodes. */
106
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static short *cost_table;
110 static int use_cost_table;
111 \f
112 /* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
137 struct nesting
138 {
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 union
144 {
145 /* For conds (if-then and if-then-else statements). */
146 struct
147 {
148 /* Label for the end of the if construct.
149 There is none if EXITFLAG was not set
150 and no `else' has been seen yet. */
151 rtx endif_label;
152 /* Label for the end of this alternative.
153 This may be the end of the if or the next else/elseif. */
154 rtx next_label;
155 } cond;
156 /* For loops. */
157 struct
158 {
159 /* Label at the top of the loop; place to loop back to. */
160 rtx start_label;
161 /* Label at the end of the whole construct. */
162 rtx end_label;
163 /* Label before a jump that branches to the end of the whole
164 construct. This is where destructors go if any. */
165 rtx alt_end_label;
166 /* Label for `continue' statement to jump to;
167 this is in front of the stepper of the loop. */
168 rtx continue_label;
169 } loop;
170 /* For variable binding contours. */
171 struct
172 {
173 /* Sequence number of this binding contour within the function,
174 in order of entry. */
175 int block_start_count;
176 /* Nonzero => value to restore stack to on exit. */
177 rtx stack_level;
178 /* The NOTE that starts this contour.
179 Used by expand_goto to check whether the destination
180 is within each contour or not. */
181 rtx first_insn;
182 /* Innermost containing binding contour that has a stack level. */
183 struct nesting *innermost_stack_block;
184 /* List of cleanups to be run on exit from this contour.
185 This is a list of expressions to be evaluated.
186 The TREE_PURPOSE of each link is the ..._DECL node
187 which the cleanup pertains to. */
188 tree cleanups;
189 /* List of cleanup-lists of blocks containing this block,
190 as they were at the locus where this block appears.
191 There is an element for each containing block,
192 ordered innermost containing block first.
193 The tail of this list can be 0,
194 if all remaining elements would be empty lists.
195 The element's TREE_VALUE is the cleanup-list of that block,
196 which may be null. */
197 tree outer_cleanups;
198 /* Chain of labels defined inside this binding contour.
199 For contours that have stack levels or cleanups. */
200 struct label_chain *label_chain;
201 /* Number of function calls seen, as of start of this block. */
202 int n_function_calls;
203 /* Nonzero if this is associated with a EH region. */
204 int exception_region;
205 /* The saved target_temp_slot_level from our outer block.
206 We may reset target_temp_slot_level to be the level of
207 this block, if that is done, target_temp_slot_level
208 reverts to the saved target_temp_slot_level at the very
209 end of the block. */
210 int block_target_temp_slot_level;
211 /* True if we are currently emitting insns in an area of
212 output code that is controlled by a conditional
213 expression. This is used by the cleanup handling code to
214 generate conditional cleanup actions. */
215 int conditional_code;
216 /* A place to move the start of the exception region for any
217 of the conditional cleanups, must be at the end or after
218 the start of the last unconditional cleanup, and before any
219 conditional branch points. */
220 rtx last_unconditional_cleanup;
221 /* When in a conditional context, this is the specific
222 cleanup list associated with last_unconditional_cleanup,
223 where we place the conditionalized cleanups. */
224 tree *cleanup_ptr;
225 } block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
228 struct
229 {
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
232 rtx start;
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
238 tree default_label;
239 /* The expression to be dispatched on. */
240 tree index_expr;
241 /* Type that INDEX_EXPR should be converted to. */
242 tree nominal_type;
243 /* Number of range exprs in case statement. */
244 int num_ranges;
245 /* Name of this kind of statement, for warnings. */
246 const char *printname;
247 /* Used to save no_line_numbers till we see the first case label.
248 We set this to -1 when we see the first case label in this
249 case statement. */
250 int line_number_status;
251 } case_stmt;
252 } data;
253 };
254
255 /* Allocate and return a new `struct nesting'. */
256
257 #define ALLOC_NESTING() \
258 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
259
260 /* Pop the nesting stack element by element until we pop off
261 the element which is at the top of STACK.
262 Update all the other stacks, popping off elements from them
263 as we pop them from nesting_stack. */
264
265 #define POPSTACK(STACK) \
266 do { struct nesting *target = STACK; \
267 struct nesting *this; \
268 do { this = nesting_stack; \
269 if (loop_stack == this) \
270 loop_stack = loop_stack->next; \
271 if (cond_stack == this) \
272 cond_stack = cond_stack->next; \
273 if (block_stack == this) \
274 block_stack = block_stack->next; \
275 if (stack_block_stack == this) \
276 stack_block_stack = stack_block_stack->next; \
277 if (case_stack == this) \
278 case_stack = case_stack->next; \
279 nesting_depth = nesting_stack->depth - 1; \
280 nesting_stack = this->all; \
281 obstack_free (&stmt_obstack, this); } \
282 while (this != target); } while (0)
283 \f
284 /* In some cases it is impossible to generate code for a forward goto
285 until the label definition is seen. This happens when it may be necessary
286 for the goto to reset the stack pointer: we don't yet know how to do that.
287 So expand_goto puts an entry on this fixup list.
288 Each time a binding contour that resets the stack is exited,
289 we check each fixup.
290 If the target label has now been defined, we can insert the proper code. */
291
292 struct goto_fixup
293 {
294 /* Points to following fixup. */
295 struct goto_fixup *next;
296 /* Points to the insn before the jump insn.
297 If more code must be inserted, it goes after this insn. */
298 rtx before_jump;
299 /* The LABEL_DECL that this jump is jumping to, or 0
300 for break, continue or return. */
301 tree target;
302 /* The BLOCK for the place where this goto was found. */
303 tree context;
304 /* The CODE_LABEL rtx that this is jumping to. */
305 rtx target_rtl;
306 /* Number of binding contours started in current function
307 before the label reference. */
308 int block_start_count;
309 /* The outermost stack level that should be restored for this jump.
310 Each time a binding contour that resets the stack is exited,
311 if the target label is *not* yet defined, this slot is updated. */
312 rtx stack_level;
313 /* List of lists of cleanup expressions to be run by this goto.
314 There is one element for each block that this goto is within.
315 The tail of this list can be 0,
316 if all remaining elements would be empty.
317 The TREE_VALUE contains the cleanup list of that block as of the
318 time this goto was seen.
319 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
320 tree cleanup_list_list;
321 };
322
323 /* Within any binding contour that must restore a stack level,
324 all labels are recorded with a chain of these structures. */
325
326 struct label_chain
327 {
328 /* Points to following fixup. */
329 struct label_chain *next;
330 tree label;
331 };
332
333 struct stmt_status
334 {
335 /* Chain of all pending binding contours. */
336 struct nesting *x_block_stack;
337
338 /* If any new stacks are added here, add them to POPSTACKS too. */
339
340 /* Chain of all pending binding contours that restore stack levels
341 or have cleanups. */
342 struct nesting *x_stack_block_stack;
343
344 /* Chain of all pending conditional statements. */
345 struct nesting *x_cond_stack;
346
347 /* Chain of all pending loops. */
348 struct nesting *x_loop_stack;
349
350 /* Chain of all pending case or switch statements. */
351 struct nesting *x_case_stack;
352
353 /* Separate chain including all of the above,
354 chained through the `all' field. */
355 struct nesting *x_nesting_stack;
356
357 /* Number of entries on nesting_stack now. */
358 int x_nesting_depth;
359
360 /* Number of binding contours started so far in this function. */
361 int x_block_start_count;
362
363 /* Each time we expand an expression-statement,
364 record the expr's type and its RTL value here. */
365 tree x_last_expr_type;
366 rtx x_last_expr_value;
367
368 /* Nonzero if within a ({...}) grouping, in which case we must
369 always compute a value for each expr-stmt in case it is the last one. */
370 int x_expr_stmts_for_value;
371
372 /* Filename and line number of last line-number note,
373 whether we actually emitted it or not. */
374 const char *x_emit_filename;
375 int x_emit_lineno;
376
377 struct goto_fixup *x_goto_fixup_chain;
378 };
379
380 #define block_stack (cfun->stmt->x_block_stack)
381 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
382 #define cond_stack (cfun->stmt->x_cond_stack)
383 #define loop_stack (cfun->stmt->x_loop_stack)
384 #define case_stack (cfun->stmt->x_case_stack)
385 #define nesting_stack (cfun->stmt->x_nesting_stack)
386 #define nesting_depth (cfun->stmt->x_nesting_depth)
387 #define current_block_start_count (cfun->stmt->x_block_start_count)
388 #define last_expr_type (cfun->stmt->x_last_expr_type)
389 #define last_expr_value (cfun->stmt->x_last_expr_value)
390 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
391 #define emit_filename (cfun->stmt->x_emit_filename)
392 #define emit_lineno (cfun->stmt->x_emit_lineno)
393 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
394
395 /* Non-zero if we are using EH to handle cleanus. */
396 static int using_eh_for_cleanups_p = 0;
397
398 /* Character strings, each containing a single decimal digit. */
399 static char *digit_strings[10];
400
401 static int n_occurrences PARAMS ((int, const char *));
402 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
403 static int expand_fixup PARAMS ((tree, rtx, rtx));
404 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
405 static void expand_nl_goto_receiver PARAMS ((void));
406 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
407 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
408 rtx, int));
409 static void expand_null_return_1 PARAMS ((rtx, int));
410 static void expand_value_return PARAMS ((rtx));
411 static int tail_recursion_args PARAMS ((tree, tree));
412 static void expand_cleanups PARAMS ((tree, tree, int, int));
413 static void check_seenlabel PARAMS ((void));
414 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
415 static int estimate_case_costs PARAMS ((case_node_ptr));
416 static void group_case_nodes PARAMS ((case_node_ptr));
417 static void balance_case_nodes PARAMS ((case_node_ptr *,
418 case_node_ptr));
419 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
420 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
421 static int node_is_bounded PARAMS ((case_node_ptr, tree));
422 static void emit_jump_if_reachable PARAMS ((rtx));
423 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
424 static int add_case_node PARAMS ((tree, tree, tree, tree *));
425 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
426 static void mark_cond_nesting PARAMS ((struct nesting *));
427 static void mark_loop_nesting PARAMS ((struct nesting *));
428 static void mark_block_nesting PARAMS ((struct nesting *));
429 static void mark_case_nesting PARAMS ((struct nesting *));
430 static void mark_case_node PARAMS ((struct case_node *));
431 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
432
433 \f
434 void
435 using_eh_for_cleanups ()
436 {
437 using_eh_for_cleanups_p = 1;
438 }
439
440 /* Mark N (known to be a cond-nesting) for GC. */
441
442 static void
443 mark_cond_nesting (n)
444 struct nesting *n;
445 {
446 while (n)
447 {
448 ggc_mark_rtx (n->exit_label);
449 ggc_mark_rtx (n->data.cond.endif_label);
450 ggc_mark_rtx (n->data.cond.next_label);
451
452 n = n->next;
453 }
454 }
455
456 /* Mark N (known to be a loop-nesting) for GC. */
457
458 static void
459 mark_loop_nesting (n)
460 struct nesting *n;
461 {
462
463 while (n)
464 {
465 ggc_mark_rtx (n->exit_label);
466 ggc_mark_rtx (n->data.loop.start_label);
467 ggc_mark_rtx (n->data.loop.end_label);
468 ggc_mark_rtx (n->data.loop.alt_end_label);
469 ggc_mark_rtx (n->data.loop.continue_label);
470
471 n = n->next;
472 }
473 }
474
475 /* Mark N (known to be a block-nesting) for GC. */
476
477 static void
478 mark_block_nesting (n)
479 struct nesting *n;
480 {
481 while (n)
482 {
483 struct label_chain *l;
484
485 ggc_mark_rtx (n->exit_label);
486 ggc_mark_rtx (n->data.block.stack_level);
487 ggc_mark_rtx (n->data.block.first_insn);
488 ggc_mark_tree (n->data.block.cleanups);
489 ggc_mark_tree (n->data.block.outer_cleanups);
490
491 for (l = n->data.block.label_chain; l != NULL; l = l->next)
492 ggc_mark_tree (l->label);
493
494 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
495
496 /* ??? cleanup_ptr never points outside the stack, does it? */
497
498 n = n->next;
499 }
500 }
501
502 /* Mark N (known to be a case-nesting) for GC. */
503
504 static void
505 mark_case_nesting (n)
506 struct nesting *n;
507 {
508 while (n)
509 {
510 ggc_mark_rtx (n->exit_label);
511 ggc_mark_rtx (n->data.case_stmt.start);
512
513 ggc_mark_tree (n->data.case_stmt.default_label);
514 ggc_mark_tree (n->data.case_stmt.index_expr);
515 ggc_mark_tree (n->data.case_stmt.nominal_type);
516
517 mark_case_node (n->data.case_stmt.case_list);
518 n = n->next;
519 }
520 }
521
522 /* Mark C for GC. */
523
524 static void
525 mark_case_node (c)
526 struct case_node *c;
527 {
528 if (c != 0)
529 {
530 ggc_mark_tree (c->low);
531 ggc_mark_tree (c->high);
532 ggc_mark_tree (c->code_label);
533
534 mark_case_node (c->right);
535 mark_case_node (c->left);
536 }
537 }
538
539 /* Mark G for GC. */
540
541 static void
542 mark_goto_fixup (g)
543 struct goto_fixup *g;
544 {
545 while (g)
546 {
547 ggc_mark (g);
548 ggc_mark_rtx (g->before_jump);
549 ggc_mark_tree (g->target);
550 ggc_mark_tree (g->context);
551 ggc_mark_rtx (g->target_rtl);
552 ggc_mark_rtx (g->stack_level);
553 ggc_mark_tree (g->cleanup_list_list);
554
555 g = g->next;
556 }
557 }
558
559 /* Clear out all parts of the state in F that can safely be discarded
560 after the function has been compiled, to let garbage collection
561 reclaim the memory. */
562
563 void
564 free_stmt_status (f)
565 struct function *f;
566 {
567 /* We're about to free the function obstack. If we hold pointers to
568 things allocated there, then we'll try to mark them when we do
569 GC. So, we clear them out here explicitly. */
570 if (f->stmt)
571 free (f->stmt);
572 f->stmt = NULL;
573 }
574
575 /* Mark P for GC. */
576
577 void
578 mark_stmt_status (p)
579 struct stmt_status *p;
580 {
581 if (p == 0)
582 return;
583
584 mark_block_nesting (p->x_block_stack);
585 mark_cond_nesting (p->x_cond_stack);
586 mark_loop_nesting (p->x_loop_stack);
587 mark_case_nesting (p->x_case_stack);
588
589 ggc_mark_tree (p->x_last_expr_type);
590 /* last_epxr_value is only valid if last_expr_type is nonzero. */
591 if (p->x_last_expr_type)
592 ggc_mark_rtx (p->x_last_expr_value);
593
594 mark_goto_fixup (p->x_goto_fixup_chain);
595 }
596
597 void
598 init_stmt ()
599 {
600 int i;
601
602 gcc_obstack_init (&stmt_obstack);
603
604 for (i = 0; i < 10; i++)
605 {
606 digit_strings[i] = ggc_alloc_string (NULL, 1);
607 digit_strings[i][0] = '0' + i;
608 }
609 ggc_add_string_root (digit_strings, 10);
610 }
611
612 void
613 init_stmt_for_function ()
614 {
615 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
616
617 /* We are not currently within any block, conditional, loop or case. */
618 block_stack = 0;
619 stack_block_stack = 0;
620 loop_stack = 0;
621 case_stack = 0;
622 cond_stack = 0;
623 nesting_stack = 0;
624 nesting_depth = 0;
625
626 current_block_start_count = 0;
627
628 /* No gotos have been expanded yet. */
629 goto_fixup_chain = 0;
630
631 /* We are not processing a ({...}) grouping. */
632 expr_stmts_for_value = 0;
633 last_expr_type = 0;
634 last_expr_value = NULL_RTX;
635 }
636 \f
637 /* Return nonzero if anything is pushed on the loop, condition, or case
638 stack. */
639 int
640 in_control_zone_p ()
641 {
642 return cond_stack || loop_stack || case_stack;
643 }
644
645 /* Record the current file and line. Called from emit_line_note. */
646 void
647 set_file_and_line_for_stmt (file, line)
648 const char *file;
649 int line;
650 {
651 /* If we're outputting an inline function, and we add a line note,
652 there may be no CFUN->STMT information. So, there's no need to
653 update it. */
654 if (cfun->stmt)
655 {
656 emit_filename = file;
657 emit_lineno = line;
658 }
659 }
660
661 /* Emit a no-op instruction. */
662
663 void
664 emit_nop ()
665 {
666 rtx last_insn;
667
668 last_insn = get_last_insn ();
669 if (!optimize
670 && (GET_CODE (last_insn) == CODE_LABEL
671 || (GET_CODE (last_insn) == NOTE
672 && prev_real_insn (last_insn) == 0)))
673 emit_insn (gen_nop ());
674 }
675 \f
676 /* Return the rtx-label that corresponds to a LABEL_DECL,
677 creating it if necessary. */
678
679 rtx
680 label_rtx (label)
681 tree label;
682 {
683 if (TREE_CODE (label) != LABEL_DECL)
684 abort ();
685
686 if (DECL_RTL (label))
687 return DECL_RTL (label);
688
689 return DECL_RTL (label) = gen_label_rtx ();
690 }
691
692 /* Add an unconditional jump to LABEL as the next sequential instruction. */
693
694 void
695 emit_jump (label)
696 rtx label;
697 {
698 do_pending_stack_adjust ();
699 emit_jump_insn (gen_jump (label));
700 emit_barrier ();
701 }
702
703 /* Emit code to jump to the address
704 specified by the pointer expression EXP. */
705
706 void
707 expand_computed_goto (exp)
708 tree exp;
709 {
710 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
711
712 #ifdef POINTERS_EXTEND_UNSIGNED
713 x = convert_memory_address (Pmode, x);
714 #endif
715
716 emit_queue ();
717 /* Be sure the function is executable. */
718 if (current_function_check_memory_usage)
719 emit_library_call (chkr_check_exec_libfunc, 1,
720 VOIDmode, 1, x, ptr_mode);
721
722 do_pending_stack_adjust ();
723 emit_indirect_jump (x);
724
725 current_function_has_computed_jump = 1;
726 }
727 \f
728 /* Handle goto statements and the labels that they can go to. */
729
730 /* Specify the location in the RTL code of a label LABEL,
731 which is a LABEL_DECL tree node.
732
733 This is used for the kind of label that the user can jump to with a
734 goto statement, and for alternatives of a switch or case statement.
735 RTL labels generated for loops and conditionals don't go through here;
736 they are generated directly at the RTL level, by other functions below.
737
738 Note that this has nothing to do with defining label *names*.
739 Languages vary in how they do that and what that even means. */
740
741 void
742 expand_label (label)
743 tree label;
744 {
745 struct label_chain *p;
746
747 do_pending_stack_adjust ();
748 emit_label (label_rtx (label));
749 if (DECL_NAME (label))
750 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
751
752 if (stack_block_stack != 0)
753 {
754 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
755 p->next = stack_block_stack->data.block.label_chain;
756 stack_block_stack->data.block.label_chain = p;
757 p->label = label;
758 }
759 }
760
761 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
762 from nested functions. */
763
764 void
765 declare_nonlocal_label (label)
766 tree label;
767 {
768 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
769
770 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
771 LABEL_PRESERVE_P (label_rtx (label)) = 1;
772 if (nonlocal_goto_handler_slots == 0)
773 {
774 emit_stack_save (SAVE_NONLOCAL,
775 &nonlocal_goto_stack_level,
776 PREV_INSN (tail_recursion_reentry));
777 }
778 nonlocal_goto_handler_slots
779 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
780 }
781
782 /* Generate RTL code for a `goto' statement with target label LABEL.
783 LABEL should be a LABEL_DECL tree node that was or will later be
784 defined with `expand_label'. */
785
786 void
787 expand_goto (label)
788 tree label;
789 {
790 tree context;
791
792 /* Check for a nonlocal goto to a containing function. */
793 context = decl_function_context (label);
794 if (context != 0 && context != current_function_decl)
795 {
796 struct function *p = find_function_data (context);
797 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
798 rtx handler_slot, static_chain, save_area;
799 tree link;
800
801 /* Find the corresponding handler slot for this label. */
802 handler_slot = p->x_nonlocal_goto_handler_slots;
803 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
804 link = TREE_CHAIN (link))
805 handler_slot = XEXP (handler_slot, 1);
806 handler_slot = XEXP (handler_slot, 0);
807
808 p->has_nonlocal_label = 1;
809 current_function_has_nonlocal_goto = 1;
810 LABEL_REF_NONLOCAL_P (label_ref) = 1;
811
812 /* Copy the rtl for the slots so that they won't be shared in
813 case the virtual stack vars register gets instantiated differently
814 in the parent than in the child. */
815
816 static_chain = copy_to_reg (lookup_static_chain (label));
817
818 /* Get addr of containing function's current nonlocal goto handler,
819 which will do any cleanups and then jump to the label. */
820 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
821 virtual_stack_vars_rtx,
822 static_chain));
823
824 /* Get addr of containing function's nonlocal save area. */
825 save_area = p->x_nonlocal_goto_stack_level;
826 if (save_area)
827 save_area = replace_rtx (copy_rtx (save_area),
828 virtual_stack_vars_rtx, static_chain);
829
830 #if HAVE_nonlocal_goto
831 if (HAVE_nonlocal_goto)
832 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
833 save_area, label_ref));
834 else
835 #endif
836 {
837 /* Restore frame pointer for containing function.
838 This sets the actual hard register used for the frame pointer
839 to the location of the function's incoming static chain info.
840 The non-local goto handler will then adjust it to contain the
841 proper value and reload the argument pointer, if needed. */
842 emit_move_insn (hard_frame_pointer_rtx, static_chain);
843 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
844
845 /* USE of hard_frame_pointer_rtx added for consistency;
846 not clear if really needed. */
847 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
848 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
849 emit_indirect_jump (handler_slot);
850 }
851 }
852 else
853 expand_goto_internal (label, label_rtx (label), NULL_RTX);
854 }
855
856 /* Generate RTL code for a `goto' statement with target label BODY.
857 LABEL should be a LABEL_REF.
858 LAST_INSN, if non-0, is the rtx we should consider as the last
859 insn emitted (for the purposes of cleaning up a return). */
860
861 static void
862 expand_goto_internal (body, label, last_insn)
863 tree body;
864 rtx label;
865 rtx last_insn;
866 {
867 struct nesting *block;
868 rtx stack_level = 0;
869
870 if (GET_CODE (label) != CODE_LABEL)
871 abort ();
872
873 /* If label has already been defined, we can tell now
874 whether and how we must alter the stack level. */
875
876 if (PREV_INSN (label) != 0)
877 {
878 /* Find the innermost pending block that contains the label.
879 (Check containment by comparing insn-uids.)
880 Then restore the outermost stack level within that block,
881 and do cleanups of all blocks contained in it. */
882 for (block = block_stack; block; block = block->next)
883 {
884 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
885 break;
886 if (block->data.block.stack_level != 0)
887 stack_level = block->data.block.stack_level;
888 /* Execute the cleanups for blocks we are exiting. */
889 if (block->data.block.cleanups != 0)
890 {
891 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
892 do_pending_stack_adjust ();
893 }
894 }
895
896 if (stack_level)
897 {
898 /* Ensure stack adjust isn't done by emit_jump, as this
899 would clobber the stack pointer. This one should be
900 deleted as dead by flow. */
901 clear_pending_stack_adjust ();
902 do_pending_stack_adjust ();
903
904 /* Don't do this adjust if it's to the end label and this function
905 is to return with a depressed stack pointer. */
906 if (label == return_label
907 && (((TREE_CODE (TREE_TYPE (current_function_decl))
908 == FUNCTION_TYPE)
909 && (TYPE_RETURNS_STACK_DEPRESSED
910 (TREE_TYPE (current_function_decl))))))
911 ;
912 else
913 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
914 }
915
916 if (body != 0 && DECL_TOO_LATE (body))
917 error ("jump to `%s' invalidly jumps into binding contour",
918 IDENTIFIER_POINTER (DECL_NAME (body)));
919 }
920 /* Label not yet defined: may need to put this goto
921 on the fixup list. */
922 else if (! expand_fixup (body, label, last_insn))
923 {
924 /* No fixup needed. Record that the label is the target
925 of at least one goto that has no fixup. */
926 if (body != 0)
927 TREE_ADDRESSABLE (body) = 1;
928 }
929
930 emit_jump (label);
931 }
932 \f
933 /* Generate if necessary a fixup for a goto
934 whose target label in tree structure (if any) is TREE_LABEL
935 and whose target in rtl is RTL_LABEL.
936
937 If LAST_INSN is nonzero, we pretend that the jump appears
938 after insn LAST_INSN instead of at the current point in the insn stream.
939
940 The fixup will be used later to insert insns just before the goto.
941 Those insns will restore the stack level as appropriate for the
942 target label, and will (in the case of C++) also invoke any object
943 destructors which have to be invoked when we exit the scopes which
944 are exited by the goto.
945
946 Value is nonzero if a fixup is made. */
947
948 static int
949 expand_fixup (tree_label, rtl_label, last_insn)
950 tree tree_label;
951 rtx rtl_label;
952 rtx last_insn;
953 {
954 struct nesting *block, *end_block;
955
956 /* See if we can recognize which block the label will be output in.
957 This is possible in some very common cases.
958 If we succeed, set END_BLOCK to that block.
959 Otherwise, set it to 0. */
960
961 if (cond_stack
962 && (rtl_label == cond_stack->data.cond.endif_label
963 || rtl_label == cond_stack->data.cond.next_label))
964 end_block = cond_stack;
965 /* If we are in a loop, recognize certain labels which
966 are likely targets. This reduces the number of fixups
967 we need to create. */
968 else if (loop_stack
969 && (rtl_label == loop_stack->data.loop.start_label
970 || rtl_label == loop_stack->data.loop.end_label
971 || rtl_label == loop_stack->data.loop.continue_label))
972 end_block = loop_stack;
973 else
974 end_block = 0;
975
976 /* Now set END_BLOCK to the binding level to which we will return. */
977
978 if (end_block)
979 {
980 struct nesting *next_block = end_block->all;
981 block = block_stack;
982
983 /* First see if the END_BLOCK is inside the innermost binding level.
984 If so, then no cleanups or stack levels are relevant. */
985 while (next_block && next_block != block)
986 next_block = next_block->all;
987
988 if (next_block)
989 return 0;
990
991 /* Otherwise, set END_BLOCK to the innermost binding level
992 which is outside the relevant control-structure nesting. */
993 next_block = block_stack->next;
994 for (block = block_stack; block != end_block; block = block->all)
995 if (block == next_block)
996 next_block = next_block->next;
997 end_block = next_block;
998 }
999
1000 /* Does any containing block have a stack level or cleanups?
1001 If not, no fixup is needed, and that is the normal case
1002 (the only case, for standard C). */
1003 for (block = block_stack; block != end_block; block = block->next)
1004 if (block->data.block.stack_level != 0
1005 || block->data.block.cleanups != 0)
1006 break;
1007
1008 if (block != end_block)
1009 {
1010 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1011 struct goto_fixup *fixup
1012 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1013 /* In case an old stack level is restored, make sure that comes
1014 after any pending stack adjust. */
1015 /* ?? If the fixup isn't to come at the present position,
1016 doing the stack adjust here isn't useful. Doing it with our
1017 settings at that location isn't useful either. Let's hope
1018 someone does it! */
1019 if (last_insn == 0)
1020 do_pending_stack_adjust ();
1021 fixup->target = tree_label;
1022 fixup->target_rtl = rtl_label;
1023
1024 /* Create a BLOCK node and a corresponding matched set of
1025 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1026 this point. The notes will encapsulate any and all fixup
1027 code which we might later insert at this point in the insn
1028 stream. Also, the BLOCK node will be the parent (i.e. the
1029 `SUPERBLOCK') of any other BLOCK nodes which we might create
1030 later on when we are expanding the fixup code.
1031
1032 Note that optimization passes (including expand_end_loop)
1033 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1034 as a placeholder. */
1035
1036 {
1037 register rtx original_before_jump
1038 = last_insn ? last_insn : get_last_insn ();
1039 rtx start;
1040 rtx end;
1041 tree block;
1042
1043 block = make_node (BLOCK);
1044 TREE_USED (block) = 1;
1045
1046 if (!cfun->x_whole_function_mode_p)
1047 insert_block (block);
1048 else
1049 {
1050 BLOCK_CHAIN (block)
1051 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1052 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1053 = block;
1054 }
1055
1056 start_sequence ();
1057 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1058 if (cfun->x_whole_function_mode_p)
1059 NOTE_BLOCK (start) = block;
1060 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1061 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1062 if (cfun->x_whole_function_mode_p)
1063 NOTE_BLOCK (end) = block;
1064 fixup->context = block;
1065 end_sequence ();
1066 emit_insns_after (start, original_before_jump);
1067 }
1068
1069 fixup->block_start_count = current_block_start_count;
1070 fixup->stack_level = 0;
1071 fixup->cleanup_list_list
1072 = ((block->data.block.outer_cleanups
1073 || block->data.block.cleanups)
1074 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1075 block->data.block.outer_cleanups)
1076 : 0);
1077 fixup->next = goto_fixup_chain;
1078 goto_fixup_chain = fixup;
1079 }
1080
1081 return block != 0;
1082 }
1083 \f
1084 /* Expand any needed fixups in the outputmost binding level of the
1085 function. FIRST_INSN is the first insn in the function. */
1086
1087 void
1088 expand_fixups (first_insn)
1089 rtx first_insn;
1090 {
1091 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1092 }
1093
1094 /* When exiting a binding contour, process all pending gotos requiring fixups.
1095 THISBLOCK is the structure that describes the block being exited.
1096 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1097 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1098 FIRST_INSN is the insn that began this contour.
1099
1100 Gotos that jump out of this contour must restore the
1101 stack level and do the cleanups before actually jumping.
1102
1103 DONT_JUMP_IN nonzero means report error there is a jump into this
1104 contour from before the beginning of the contour.
1105 This is also done if STACK_LEVEL is nonzero. */
1106
1107 static void
1108 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1109 struct nesting *thisblock;
1110 rtx stack_level;
1111 tree cleanup_list;
1112 rtx first_insn;
1113 int dont_jump_in;
1114 {
1115 register struct goto_fixup *f, *prev;
1116
1117 /* F is the fixup we are considering; PREV is the previous one. */
1118 /* We run this loop in two passes so that cleanups of exited blocks
1119 are run first, and blocks that are exited are marked so
1120 afterwards. */
1121
1122 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1123 {
1124 /* Test for a fixup that is inactive because it is already handled. */
1125 if (f->before_jump == 0)
1126 {
1127 /* Delete inactive fixup from the chain, if that is easy to do. */
1128 if (prev != 0)
1129 prev->next = f->next;
1130 }
1131 /* Has this fixup's target label been defined?
1132 If so, we can finalize it. */
1133 else if (PREV_INSN (f->target_rtl) != 0)
1134 {
1135 register rtx cleanup_insns;
1136
1137 /* If this fixup jumped into this contour from before the beginning
1138 of this contour, report an error. This code used to use
1139 the first non-label insn after f->target_rtl, but that's
1140 wrong since such can be added, by things like put_var_into_stack
1141 and have INSN_UIDs that are out of the range of the block. */
1142 /* ??? Bug: this does not detect jumping in through intermediate
1143 blocks that have stack levels or cleanups.
1144 It detects only a problem with the innermost block
1145 around the label. */
1146 if (f->target != 0
1147 && (dont_jump_in || stack_level || cleanup_list)
1148 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1149 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1150 && ! DECL_ERROR_ISSUED (f->target))
1151 {
1152 error_with_decl (f->target,
1153 "label `%s' used before containing binding contour");
1154 /* Prevent multiple errors for one label. */
1155 DECL_ERROR_ISSUED (f->target) = 1;
1156 }
1157
1158 /* We will expand the cleanups into a sequence of their own and
1159 then later on we will attach this new sequence to the insn
1160 stream just ahead of the actual jump insn. */
1161
1162 start_sequence ();
1163
1164 /* Temporarily restore the lexical context where we will
1165 logically be inserting the fixup code. We do this for the
1166 sake of getting the debugging information right. */
1167
1168 pushlevel (0);
1169 set_block (f->context);
1170
1171 /* Expand the cleanups for blocks this jump exits. */
1172 if (f->cleanup_list_list)
1173 {
1174 tree lists;
1175 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1176 /* Marked elements correspond to blocks that have been closed.
1177 Do their cleanups. */
1178 if (TREE_ADDRESSABLE (lists)
1179 && TREE_VALUE (lists) != 0)
1180 {
1181 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1182 /* Pop any pushes done in the cleanups,
1183 in case function is about to return. */
1184 do_pending_stack_adjust ();
1185 }
1186 }
1187
1188 /* Restore stack level for the biggest contour that this
1189 jump jumps out of. */
1190 if (f->stack_level
1191 && ! (f->target_rtl == return_label
1192 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1193 == FUNCTION_TYPE)
1194 && (TYPE_RETURNS_STACK_DEPRESSED
1195 (TREE_TYPE (current_function_decl))))))
1196 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1197
1198 /* Finish up the sequence containing the insns which implement the
1199 necessary cleanups, and then attach that whole sequence to the
1200 insn stream just ahead of the actual jump insn. Attaching it
1201 at that point insures that any cleanups which are in fact
1202 implicit C++ object destructions (which must be executed upon
1203 leaving the block) appear (to the debugger) to be taking place
1204 in an area of the generated code where the object(s) being
1205 destructed are still "in scope". */
1206
1207 cleanup_insns = get_insns ();
1208 poplevel (1, 0, 0);
1209
1210 end_sequence ();
1211 emit_insns_after (cleanup_insns, f->before_jump);
1212
1213 f->before_jump = 0;
1214 }
1215 }
1216
1217 /* For any still-undefined labels, do the cleanups for this block now.
1218 We must do this now since items in the cleanup list may go out
1219 of scope when the block ends. */
1220 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1221 if (f->before_jump != 0
1222 && PREV_INSN (f->target_rtl) == 0
1223 /* Label has still not appeared. If we are exiting a block with
1224 a stack level to restore, that started before the fixup,
1225 mark this stack level as needing restoration
1226 when the fixup is later finalized. */
1227 && thisblock != 0
1228 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1229 means the label is undefined. That's erroneous, but possible. */
1230 && (thisblock->data.block.block_start_count
1231 <= f->block_start_count))
1232 {
1233 tree lists = f->cleanup_list_list;
1234 rtx cleanup_insns;
1235
1236 for (; lists; lists = TREE_CHAIN (lists))
1237 /* If the following elt. corresponds to our containing block
1238 then the elt. must be for this block. */
1239 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1240 {
1241 start_sequence ();
1242 pushlevel (0);
1243 set_block (f->context);
1244 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1245 do_pending_stack_adjust ();
1246 cleanup_insns = get_insns ();
1247 poplevel (1, 0, 0);
1248 end_sequence ();
1249 if (cleanup_insns != 0)
1250 f->before_jump
1251 = emit_insns_after (cleanup_insns, f->before_jump);
1252
1253 f->cleanup_list_list = TREE_CHAIN (lists);
1254 }
1255
1256 if (stack_level)
1257 f->stack_level = stack_level;
1258 }
1259 }
1260 \f
1261 /* Return the number of times character C occurs in string S. */
1262 static int
1263 n_occurrences (c, s)
1264 int c;
1265 const char *s;
1266 {
1267 int n = 0;
1268 while (*s)
1269 n += (*s++ == c);
1270 return n;
1271 }
1272 \f
1273 /* Generate RTL for an asm statement (explicit assembler code).
1274 BODY is a STRING_CST node containing the assembler code text,
1275 or an ADDR_EXPR containing a STRING_CST. */
1276
1277 void
1278 expand_asm (body)
1279 tree body;
1280 {
1281 if (current_function_check_memory_usage)
1282 {
1283 error ("`asm' cannot be used in function where memory usage is checked");
1284 return;
1285 }
1286
1287 if (TREE_CODE (body) == ADDR_EXPR)
1288 body = TREE_OPERAND (body, 0);
1289
1290 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1291 TREE_STRING_POINTER (body)));
1292 last_expr_type = 0;
1293 }
1294
1295 /* Generate RTL for an asm statement with arguments.
1296 STRING is the instruction template.
1297 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1298 Each output or input has an expression in the TREE_VALUE and
1299 a constraint-string in the TREE_PURPOSE.
1300 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1301 that is clobbered by this insn.
1302
1303 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1304 Some elements of OUTPUTS may be replaced with trees representing temporary
1305 values. The caller should copy those temporary values to the originally
1306 specified lvalues.
1307
1308 VOL nonzero means the insn is volatile; don't optimize it. */
1309
1310 void
1311 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1312 tree string, outputs, inputs, clobbers;
1313 int vol;
1314 const char *filename;
1315 int line;
1316 {
1317 rtvec argvec, constraints;
1318 rtx body;
1319 int ninputs = list_length (inputs);
1320 int noutputs = list_length (outputs);
1321 int ninout = 0;
1322 int nclobbers;
1323 tree tail;
1324 register int i;
1325 /* Vector of RTX's of evaluated output operands. */
1326 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1327 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1328 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1329 enum machine_mode *inout_mode
1330 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1331 /* The insn we have emitted. */
1332 rtx insn;
1333 int old_generating_concat_p = generating_concat_p;
1334
1335 /* An ASM with no outputs needs to be treated as volatile, for now. */
1336 if (noutputs == 0)
1337 vol = 1;
1338
1339 if (current_function_check_memory_usage)
1340 {
1341 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1342 return;
1343 }
1344
1345 #ifdef MD_ASM_CLOBBERS
1346 /* Sometimes we wish to automatically clobber registers across an asm.
1347 Case in point is when the i386 backend moved from cc0 to a hard reg --
1348 maintaining source-level compatability means automatically clobbering
1349 the flags register. */
1350 MD_ASM_CLOBBERS (clobbers);
1351 #endif
1352
1353 if (current_function_check_memory_usage)
1354 {
1355 error ("`asm' cannot be used in function where memory usage is checked");
1356 return;
1357 }
1358
1359 /* Count the number of meaningful clobbered registers, ignoring what
1360 we would ignore later. */
1361 nclobbers = 0;
1362 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1363 {
1364 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1365
1366 i = decode_reg_name (regname);
1367 if (i >= 0 || i == -4)
1368 ++nclobbers;
1369 else if (i == -2)
1370 error ("unknown register name `%s' in `asm'", regname);
1371 }
1372
1373 last_expr_type = 0;
1374
1375 /* Check that the number of alternatives is constant across all
1376 operands. */
1377 if (outputs || inputs)
1378 {
1379 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1380 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1381 tree next = inputs;
1382
1383 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1384 {
1385 error ("too many alternatives in `asm'");
1386 return;
1387 }
1388
1389 tmp = outputs;
1390 while (tmp)
1391 {
1392 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1393
1394 if (n_occurrences (',', constraint) != nalternatives)
1395 {
1396 error ("operand constraints for `asm' differ in number of alternatives");
1397 return;
1398 }
1399
1400 if (TREE_CHAIN (tmp))
1401 tmp = TREE_CHAIN (tmp);
1402 else
1403 tmp = next, next = 0;
1404 }
1405 }
1406
1407 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1408 {
1409 tree val = TREE_VALUE (tail);
1410 tree type = TREE_TYPE (val);
1411 char *constraint;
1412 char *p;
1413 int c_len;
1414 int j;
1415 int is_inout = 0;
1416 int allows_reg = 0;
1417 int allows_mem = 0;
1418
1419 /* If there's an erroneous arg, emit no insn. */
1420 if (TREE_TYPE (val) == error_mark_node)
1421 return;
1422
1423 /* Make sure constraint has `=' and does not have `+'. Also, see
1424 if it allows any register. Be liberal on the latter test, since
1425 the worst that happens if we get it wrong is we issue an error
1426 message. */
1427
1428 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1429 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1430
1431 /* Allow the `=' or `+' to not be at the beginning of the string,
1432 since it wasn't explicitly documented that way, and there is a
1433 large body of code that puts it last. Swap the character to
1434 the front, so as not to uglify any place else. */
1435 switch (c_len)
1436 {
1437 default:
1438 if ((p = strchr (constraint, '=')) != NULL)
1439 break;
1440 if ((p = strchr (constraint, '+')) != NULL)
1441 break;
1442 case 0:
1443 error ("output operand constraint lacks `='");
1444 return;
1445 }
1446
1447 if (p != constraint)
1448 {
1449 j = *p;
1450 bcopy (constraint, constraint+1, p-constraint);
1451 *constraint = j;
1452
1453 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1454 }
1455
1456 is_inout = constraint[0] == '+';
1457 /* Replace '+' with '='. */
1458 constraint[0] = '=';
1459 /* Make sure we can specify the matching operand. */
1460 if (is_inout && i > 9)
1461 {
1462 error ("output operand constraint %d contains `+'", i);
1463 return;
1464 }
1465
1466 for (j = 1; j < c_len; j++)
1467 switch (constraint[j])
1468 {
1469 case '+':
1470 case '=':
1471 error ("operand constraint contains '+' or '=' at illegal position.");
1472 return;
1473
1474 case '%':
1475 if (i + 1 == ninputs + noutputs)
1476 {
1477 error ("`%%' constraint used with last operand");
1478 return;
1479 }
1480 break;
1481
1482 case '?': case '!': case '*': case '&':
1483 case 'E': case 'F': case 'G': case 'H':
1484 case 's': case 'i': case 'n':
1485 case 'I': case 'J': case 'K': case 'L': case 'M':
1486 case 'N': case 'O': case 'P': case ',':
1487 break;
1488
1489 case '0': case '1': case '2': case '3': case '4':
1490 case '5': case '6': case '7': case '8': case '9':
1491 error ("matching constraint not valid in output operand");
1492 break;
1493
1494 case 'V': case 'm': case 'o':
1495 allows_mem = 1;
1496 break;
1497
1498 case '<': case '>':
1499 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1500 excepting those that expand_call created. So match memory
1501 and hope. */
1502 allows_mem = 1;
1503 break;
1504
1505 case 'g': case 'X':
1506 allows_reg = 1;
1507 allows_mem = 1;
1508 break;
1509
1510 case 'p': case 'r':
1511 allows_reg = 1;
1512 break;
1513
1514 default:
1515 if (! ISALPHA (constraint[j]))
1516 {
1517 error ("invalid punctuation `%c' in constraint",
1518 constraint[j]);
1519 return;
1520 }
1521 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1522 allows_reg = 1;
1523 #ifdef EXTRA_CONSTRAINT
1524 else
1525 {
1526 /* Otherwise we can't assume anything about the nature of
1527 the constraint except that it isn't purely registers.
1528 Treat it like "g" and hope for the best. */
1529 allows_reg = 1;
1530 allows_mem = 1;
1531 }
1532 #endif
1533 break;
1534 }
1535
1536 /* If an output operand is not a decl or indirect ref and our constraint
1537 allows a register, make a temporary to act as an intermediate.
1538 Make the asm insn write into that, then our caller will copy it to
1539 the real output operand. Likewise for promoted variables. */
1540
1541 generating_concat_p = 0;
1542
1543 real_output_rtx[i] = NULL_RTX;
1544 if ((TREE_CODE (val) == INDIRECT_REF
1545 && allows_mem)
1546 || (DECL_P (val)
1547 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1548 && ! (GET_CODE (DECL_RTL (val)) == REG
1549 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1550 || ! allows_reg
1551 || is_inout)
1552 {
1553 if (! allows_reg)
1554 mark_addressable (TREE_VALUE (tail));
1555
1556 output_rtx[i]
1557 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1558 EXPAND_MEMORY_USE_WO);
1559
1560 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1561 error ("output number %d not directly addressable", i);
1562 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1563 || GET_CODE (output_rtx[i]) == CONCAT)
1564 {
1565 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1566 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1567 if (is_inout)
1568 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1569 }
1570 }
1571 else
1572 {
1573 output_rtx[i] = assign_temp (type, 0, 0, 1);
1574 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1575 }
1576
1577 generating_concat_p = old_generating_concat_p;
1578
1579 if (is_inout)
1580 {
1581 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1582 inout_opnum[ninout++] = i;
1583 }
1584 }
1585
1586 ninputs += ninout;
1587 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1588 {
1589 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1590 return;
1591 }
1592
1593 /* Make vectors for the expression-rtx and constraint strings. */
1594
1595 argvec = rtvec_alloc (ninputs);
1596 constraints = rtvec_alloc (ninputs);
1597
1598 body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
1599 empty_string, 0, argvec, constraints,
1600 filename, line);
1601
1602 MEM_VOLATILE_P (body) = vol;
1603
1604 /* Eval the inputs and put them into ARGVEC.
1605 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1606
1607 i = 0;
1608 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1609 {
1610 int j;
1611 int allows_reg = 0, allows_mem = 0;
1612 char *constraint, *orig_constraint;
1613 int c_len;
1614 rtx op;
1615
1616 /* If there's an erroneous arg, emit no insn,
1617 because the ASM_INPUT would get VOIDmode
1618 and that could cause a crash in reload. */
1619 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1620 return;
1621
1622 /* ??? Can this happen, and does the error message make any sense? */
1623 if (TREE_PURPOSE (tail) == NULL_TREE)
1624 {
1625 error ("hard register `%s' listed as input operand to `asm'",
1626 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1627 return;
1628 }
1629
1630 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1631 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1632 orig_constraint = constraint;
1633
1634 /* Make sure constraint has neither `=', `+', nor '&'. */
1635
1636 for (j = 0; j < c_len; j++)
1637 switch (constraint[j])
1638 {
1639 case '+': case '=': case '&':
1640 if (constraint == orig_constraint)
1641 {
1642 error ("input operand constraint contains `%c'",
1643 constraint[j]);
1644 return;
1645 }
1646 break;
1647
1648 case '%':
1649 if (constraint == orig_constraint
1650 && i + 1 == ninputs - ninout)
1651 {
1652 error ("`%%' constraint used with last operand");
1653 return;
1654 }
1655 break;
1656
1657 case 'V': case 'm': case 'o':
1658 allows_mem = 1;
1659 break;
1660
1661 case '<': case '>':
1662 case '?': case '!': case '*':
1663 case 'E': case 'F': case 'G': case 'H':
1664 case 's': case 'i': case 'n':
1665 case 'I': case 'J': case 'K': case 'L': case 'M':
1666 case 'N': case 'O': case 'P': case ',':
1667 break;
1668
1669 /* Whether or not a numeric constraint allows a register is
1670 decided by the matching constraint, and so there is no need
1671 to do anything special with them. We must handle them in
1672 the default case, so that we don't unnecessarily force
1673 operands to memory. */
1674 case '0': case '1': case '2': case '3': case '4':
1675 case '5': case '6': case '7': case '8': case '9':
1676 if (constraint[j] >= '0' + noutputs)
1677 {
1678 error
1679 ("matching constraint references invalid operand number");
1680 return;
1681 }
1682
1683 /* Try and find the real constraint for this dup. */
1684 if ((j == 0 && c_len == 1)
1685 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1686 {
1687 tree o = outputs;
1688
1689 for (j = constraint[j] - '0'; j > 0; --j)
1690 o = TREE_CHAIN (o);
1691
1692 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
1693 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1694 j = 0;
1695 break;
1696 }
1697
1698 /* Fall through. */
1699
1700 case 'p': case 'r':
1701 allows_reg = 1;
1702 break;
1703
1704 case 'g': case 'X':
1705 allows_reg = 1;
1706 allows_mem = 1;
1707 break;
1708
1709 default:
1710 if (! ISALPHA (constraint[j]))
1711 {
1712 error ("invalid punctuation `%c' in constraint",
1713 constraint[j]);
1714 return;
1715 }
1716 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1717 allows_reg = 1;
1718 #ifdef EXTRA_CONSTRAINT
1719 else
1720 {
1721 /* Otherwise we can't assume anything about the nature of
1722 the constraint except that it isn't purely registers.
1723 Treat it like "g" and hope for the best. */
1724 allows_reg = 1;
1725 allows_mem = 1;
1726 }
1727 #endif
1728 break;
1729 }
1730
1731 if (! allows_reg && allows_mem)
1732 mark_addressable (TREE_VALUE (tail));
1733
1734 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1735
1736 /* Never pass a CONCAT to an ASM. */
1737 generating_concat_p = 0;
1738 if (GET_CODE (op) == CONCAT)
1739 op = force_reg (GET_MODE (op), op);
1740
1741 if (asm_operand_ok (op, constraint) <= 0)
1742 {
1743 if (allows_reg)
1744 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1745 else if (!allows_mem)
1746 warning ("asm operand %d probably doesn't match constraints", i);
1747 else if (CONSTANT_P (op))
1748 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1749 op);
1750 else if (GET_CODE (op) == REG
1751 || GET_CODE (op) == SUBREG
1752 || GET_CODE (op) == CONCAT)
1753 {
1754 tree type = TREE_TYPE (TREE_VALUE (tail));
1755 rtx memloc = assign_temp (type, 1, 1, 1);
1756
1757 emit_move_insn (memloc, op);
1758 op = memloc;
1759 }
1760
1761 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1762 /* We won't recognize volatile memory as available a
1763 memory_operand at this point. Ignore it. */
1764 ;
1765 else if (queued_subexp_p (op))
1766 ;
1767 else
1768 /* ??? Leave this only until we have experience with what
1769 happens in combine and elsewhere when constraints are
1770 not satisfied. */
1771 warning ("asm operand %d probably doesn't match constraints", i);
1772 }
1773 generating_concat_p = old_generating_concat_p;
1774 XVECEXP (body, 3, i) = op;
1775
1776 XVECEXP (body, 4, i) /* constraints */
1777 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1778 orig_constraint);
1779 i++;
1780 }
1781
1782 /* Protect all the operands from the queue now that they have all been
1783 evaluated. */
1784
1785 generating_concat_p = 0;
1786
1787 for (i = 0; i < ninputs - ninout; i++)
1788 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1789
1790 for (i = 0; i < noutputs; i++)
1791 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1792
1793 /* For in-out operands, copy output rtx to input rtx. */
1794 for (i = 0; i < ninout; i++)
1795 {
1796 int j = inout_opnum[i];
1797
1798 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1799 = output_rtx[j];
1800 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1801 = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
1802 }
1803
1804 generating_concat_p = old_generating_concat_p;
1805
1806 /* Now, for each output, construct an rtx
1807 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1808 ARGVEC CONSTRAINTS))
1809 If there is more than one, put them inside a PARALLEL. */
1810
1811 if (noutputs == 1 && nclobbers == 0)
1812 {
1813 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1814 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1815 }
1816
1817 else if (noutputs == 0 && nclobbers == 0)
1818 {
1819 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1820 insn = emit_insn (body);
1821 }
1822
1823 else
1824 {
1825 rtx obody = body;
1826 int num = noutputs;
1827
1828 if (num == 0)
1829 num = 1;
1830
1831 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1832
1833 /* For each output operand, store a SET. */
1834 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1835 {
1836 XVECEXP (body, 0, i)
1837 = gen_rtx_SET (VOIDmode,
1838 output_rtx[i],
1839 gen_rtx_ASM_OPERANDS
1840 (VOIDmode,
1841 TREE_STRING_POINTER (string),
1842 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1843 i, argvec, constraints,
1844 filename, line));
1845
1846 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1847 }
1848
1849 /* If there are no outputs (but there are some clobbers)
1850 store the bare ASM_OPERANDS into the PARALLEL. */
1851
1852 if (i == 0)
1853 XVECEXP (body, 0, i++) = obody;
1854
1855 /* Store (clobber REG) for each clobbered register specified. */
1856
1857 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1858 {
1859 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1860 int j = decode_reg_name (regname);
1861
1862 if (j < 0)
1863 {
1864 if (j == -3) /* `cc', which is not a register */
1865 continue;
1866
1867 if (j == -4) /* `memory', don't cache memory across asm */
1868 {
1869 XVECEXP (body, 0, i++)
1870 = gen_rtx_CLOBBER (VOIDmode,
1871 gen_rtx_MEM
1872 (BLKmode,
1873 gen_rtx_SCRATCH (VOIDmode)));
1874 continue;
1875 }
1876
1877 /* Ignore unknown register, error already signaled. */
1878 continue;
1879 }
1880
1881 /* Use QImode since that's guaranteed to clobber just one reg. */
1882 XVECEXP (body, 0, i++)
1883 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1884 }
1885
1886 insn = emit_insn (body);
1887 }
1888
1889 /* For any outputs that needed reloading into registers, spill them
1890 back to where they belong. */
1891 for (i = 0; i < noutputs; ++i)
1892 if (real_output_rtx[i])
1893 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1894
1895 free_temp_slots ();
1896 }
1897 \f
1898 /* Generate RTL to evaluate the expression EXP
1899 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1900
1901 void
1902 expand_expr_stmt (exp)
1903 tree exp;
1904 {
1905 /* If -W, warn about statements with no side effects,
1906 except for an explicit cast to void (e.g. for assert()), and
1907 except inside a ({...}) where they may be useful. */
1908 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1909 {
1910 if (! TREE_SIDE_EFFECTS (exp)
1911 && (extra_warnings || warn_unused_value)
1912 && !(TREE_CODE (exp) == CONVERT_EXPR
1913 && VOID_TYPE_P (TREE_TYPE (exp))))
1914 warning_with_file_and_line (emit_filename, emit_lineno,
1915 "statement with no effect");
1916 else if (warn_unused_value)
1917 warn_if_unused_value (exp);
1918 }
1919
1920 /* If EXP is of function type and we are expanding statements for
1921 value, convert it to pointer-to-function. */
1922 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1923 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1924
1925 last_expr_type = TREE_TYPE (exp);
1926 last_expr_value = expand_expr (exp,
1927 (expr_stmts_for_value
1928 ? NULL_RTX : const0_rtx),
1929 VOIDmode, 0);
1930
1931 /* If all we do is reference a volatile value in memory,
1932 copy it to a register to be sure it is actually touched. */
1933 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1934 && TREE_THIS_VOLATILE (exp))
1935 {
1936 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1937 ;
1938 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1939 copy_to_reg (last_expr_value);
1940 else
1941 {
1942 rtx lab = gen_label_rtx ();
1943
1944 /* Compare the value with itself to reference it. */
1945 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1946 expand_expr (TYPE_SIZE (last_expr_type),
1947 NULL_RTX, VOIDmode, 0),
1948 BLKmode, 0,
1949 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1950 lab);
1951 emit_label (lab);
1952 }
1953 }
1954
1955 /* If this expression is part of a ({...}) and is in memory, we may have
1956 to preserve temporaries. */
1957 preserve_temp_slots (last_expr_value);
1958
1959 /* Free any temporaries used to evaluate this expression. Any temporary
1960 used as a result of this expression will already have been preserved
1961 above. */
1962 free_temp_slots ();
1963
1964 emit_queue ();
1965 }
1966
1967 /* Warn if EXP contains any computations whose results are not used.
1968 Return 1 if a warning is printed; 0 otherwise. */
1969
1970 int
1971 warn_if_unused_value (exp)
1972 tree exp;
1973 {
1974 if (TREE_USED (exp))
1975 return 0;
1976
1977 switch (TREE_CODE (exp))
1978 {
1979 case PREINCREMENT_EXPR:
1980 case POSTINCREMENT_EXPR:
1981 case PREDECREMENT_EXPR:
1982 case POSTDECREMENT_EXPR:
1983 case MODIFY_EXPR:
1984 case INIT_EXPR:
1985 case TARGET_EXPR:
1986 case CALL_EXPR:
1987 case METHOD_CALL_EXPR:
1988 case RTL_EXPR:
1989 case TRY_CATCH_EXPR:
1990 case WITH_CLEANUP_EXPR:
1991 case EXIT_EXPR:
1992 /* We don't warn about COND_EXPR because it may be a useful
1993 construct if either arm contains a side effect. */
1994 case COND_EXPR:
1995 return 0;
1996
1997 case BIND_EXPR:
1998 /* For a binding, warn if no side effect within it. */
1999 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2000
2001 case SAVE_EXPR:
2002 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2003
2004 case TRUTH_ORIF_EXPR:
2005 case TRUTH_ANDIF_EXPR:
2006 /* In && or ||, warn if 2nd operand has no side effect. */
2007 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2008
2009 case COMPOUND_EXPR:
2010 if (TREE_NO_UNUSED_WARNING (exp))
2011 return 0;
2012 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2013 return 1;
2014 /* Let people do `(foo (), 0)' without a warning. */
2015 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2016 return 0;
2017 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2018
2019 case NOP_EXPR:
2020 case CONVERT_EXPR:
2021 case NON_LVALUE_EXPR:
2022 /* Don't warn about values cast to void. */
2023 if (VOID_TYPE_P (TREE_TYPE (exp)))
2024 return 0;
2025 /* Don't warn about conversions not explicit in the user's program. */
2026 if (TREE_NO_UNUSED_WARNING (exp))
2027 return 0;
2028 /* Assignment to a cast usually results in a cast of a modify.
2029 Don't complain about that. There can be an arbitrary number of
2030 casts before the modify, so we must loop until we find the first
2031 non-cast expression and then test to see if that is a modify. */
2032 {
2033 tree tem = TREE_OPERAND (exp, 0);
2034
2035 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2036 tem = TREE_OPERAND (tem, 0);
2037
2038 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2039 || TREE_CODE (tem) == CALL_EXPR)
2040 return 0;
2041 }
2042 goto warn;
2043
2044 case INDIRECT_REF:
2045 /* Don't warn about automatic dereferencing of references, since
2046 the user cannot control it. */
2047 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2048 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2049 /* Fall through. */
2050
2051 default:
2052 /* Referencing a volatile value is a side effect, so don't warn. */
2053 if ((DECL_P (exp)
2054 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2055 && TREE_THIS_VOLATILE (exp))
2056 return 0;
2057
2058 /* If this is an expression which has no operands, there is no value
2059 to be unused. There are no such language-independent codes,
2060 but front ends may define such. */
2061 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2062 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2063 return 0;
2064
2065 warn:
2066 warning_with_file_and_line (emit_filename, emit_lineno,
2067 "value computed is not used");
2068 return 1;
2069 }
2070 }
2071
2072 /* Clear out the memory of the last expression evaluated. */
2073
2074 void
2075 clear_last_expr ()
2076 {
2077 last_expr_type = 0;
2078 }
2079
2080 /* Begin a statement which will return a value.
2081 Return the RTL_EXPR for this statement expr.
2082 The caller must save that value and pass it to expand_end_stmt_expr. */
2083
2084 tree
2085 expand_start_stmt_expr ()
2086 {
2087 int momentary;
2088 tree t;
2089
2090 /* Make the RTL_EXPR node temporary, not momentary,
2091 so that rtl_expr_chain doesn't become garbage. */
2092 momentary = suspend_momentary ();
2093 t = make_node (RTL_EXPR);
2094 resume_momentary (momentary);
2095 do_pending_stack_adjust ();
2096 start_sequence_for_rtl_expr (t);
2097 NO_DEFER_POP;
2098 expr_stmts_for_value++;
2099 return t;
2100 }
2101
2102 /* Restore the previous state at the end of a statement that returns a value.
2103 Returns a tree node representing the statement's value and the
2104 insns to compute the value.
2105
2106 The nodes of that expression have been freed by now, so we cannot use them.
2107 But we don't want to do that anyway; the expression has already been
2108 evaluated and now we just want to use the value. So generate a RTL_EXPR
2109 with the proper type and RTL value.
2110
2111 If the last substatement was not an expression,
2112 return something with type `void'. */
2113
2114 tree
2115 expand_end_stmt_expr (t)
2116 tree t;
2117 {
2118 OK_DEFER_POP;
2119
2120 if (last_expr_type == 0)
2121 {
2122 last_expr_type = void_type_node;
2123 last_expr_value = const0_rtx;
2124 }
2125 else if (last_expr_value == 0)
2126 /* There are some cases where this can happen, such as when the
2127 statement is void type. */
2128 last_expr_value = const0_rtx;
2129 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2130 /* Remove any possible QUEUED. */
2131 last_expr_value = protect_from_queue (last_expr_value, 0);
2132
2133 emit_queue ();
2134
2135 TREE_TYPE (t) = last_expr_type;
2136 RTL_EXPR_RTL (t) = last_expr_value;
2137 RTL_EXPR_SEQUENCE (t) = get_insns ();
2138
2139 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2140
2141 end_sequence ();
2142
2143 /* Don't consider deleting this expr or containing exprs at tree level. */
2144 TREE_SIDE_EFFECTS (t) = 1;
2145 /* Propagate volatility of the actual RTL expr. */
2146 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2147
2148 last_expr_type = 0;
2149 expr_stmts_for_value--;
2150
2151 return t;
2152 }
2153 \f
2154 /* Generate RTL for the start of an if-then. COND is the expression
2155 whose truth should be tested.
2156
2157 If EXITFLAG is nonzero, this conditional is visible to
2158 `exit_something'. */
2159
2160 void
2161 expand_start_cond (cond, exitflag)
2162 tree cond;
2163 int exitflag;
2164 {
2165 struct nesting *thiscond = ALLOC_NESTING ();
2166
2167 /* Make an entry on cond_stack for the cond we are entering. */
2168
2169 thiscond->next = cond_stack;
2170 thiscond->all = nesting_stack;
2171 thiscond->depth = ++nesting_depth;
2172 thiscond->data.cond.next_label = gen_label_rtx ();
2173 /* Before we encounter an `else', we don't need a separate exit label
2174 unless there are supposed to be exit statements
2175 to exit this conditional. */
2176 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2177 thiscond->data.cond.endif_label = thiscond->exit_label;
2178 cond_stack = thiscond;
2179 nesting_stack = thiscond;
2180
2181 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2182 }
2183
2184 /* Generate RTL between then-clause and the elseif-clause
2185 of an if-then-elseif-.... */
2186
2187 void
2188 expand_start_elseif (cond)
2189 tree cond;
2190 {
2191 if (cond_stack->data.cond.endif_label == 0)
2192 cond_stack->data.cond.endif_label = gen_label_rtx ();
2193 emit_jump (cond_stack->data.cond.endif_label);
2194 emit_label (cond_stack->data.cond.next_label);
2195 cond_stack->data.cond.next_label = gen_label_rtx ();
2196 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2197 }
2198
2199 /* Generate RTL between the then-clause and the else-clause
2200 of an if-then-else. */
2201
2202 void
2203 expand_start_else ()
2204 {
2205 if (cond_stack->data.cond.endif_label == 0)
2206 cond_stack->data.cond.endif_label = gen_label_rtx ();
2207
2208 emit_jump (cond_stack->data.cond.endif_label);
2209 emit_label (cond_stack->data.cond.next_label);
2210 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2211 }
2212
2213 /* After calling expand_start_else, turn this "else" into an "else if"
2214 by providing another condition. */
2215
2216 void
2217 expand_elseif (cond)
2218 tree cond;
2219 {
2220 cond_stack->data.cond.next_label = gen_label_rtx ();
2221 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2222 }
2223
2224 /* Generate RTL for the end of an if-then.
2225 Pop the record for it off of cond_stack. */
2226
2227 void
2228 expand_end_cond ()
2229 {
2230 struct nesting *thiscond = cond_stack;
2231
2232 do_pending_stack_adjust ();
2233 if (thiscond->data.cond.next_label)
2234 emit_label (thiscond->data.cond.next_label);
2235 if (thiscond->data.cond.endif_label)
2236 emit_label (thiscond->data.cond.endif_label);
2237
2238 POPSTACK (cond_stack);
2239 last_expr_type = 0;
2240 }
2241 \f
2242 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2243 loop should be exited by `exit_something'. This is a loop for which
2244 `expand_continue' will jump to the top of the loop.
2245
2246 Make an entry on loop_stack to record the labels associated with
2247 this loop. */
2248
2249 struct nesting *
2250 expand_start_loop (exit_flag)
2251 int exit_flag;
2252 {
2253 register struct nesting *thisloop = ALLOC_NESTING ();
2254
2255 /* Make an entry on loop_stack for the loop we are entering. */
2256
2257 thisloop->next = loop_stack;
2258 thisloop->all = nesting_stack;
2259 thisloop->depth = ++nesting_depth;
2260 thisloop->data.loop.start_label = gen_label_rtx ();
2261 thisloop->data.loop.end_label = gen_label_rtx ();
2262 thisloop->data.loop.alt_end_label = 0;
2263 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2264 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2265 loop_stack = thisloop;
2266 nesting_stack = thisloop;
2267
2268 do_pending_stack_adjust ();
2269 emit_queue ();
2270 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2271 emit_label (thisloop->data.loop.start_label);
2272
2273 return thisloop;
2274 }
2275
2276 /* Like expand_start_loop but for a loop where the continuation point
2277 (for expand_continue_loop) will be specified explicitly. */
2278
2279 struct nesting *
2280 expand_start_loop_continue_elsewhere (exit_flag)
2281 int exit_flag;
2282 {
2283 struct nesting *thisloop = expand_start_loop (exit_flag);
2284 loop_stack->data.loop.continue_label = gen_label_rtx ();
2285 return thisloop;
2286 }
2287
2288 /* Specify the continuation point for a loop started with
2289 expand_start_loop_continue_elsewhere.
2290 Use this at the point in the code to which a continue statement
2291 should jump. */
2292
2293 void
2294 expand_loop_continue_here ()
2295 {
2296 do_pending_stack_adjust ();
2297 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2298 emit_label (loop_stack->data.loop.continue_label);
2299 }
2300
2301 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2302 Pop the block off of loop_stack. */
2303
2304 void
2305 expand_end_loop ()
2306 {
2307 rtx start_label = loop_stack->data.loop.start_label;
2308 rtx insn = get_last_insn ();
2309 int needs_end_jump = 1;
2310
2311 /* Mark the continue-point at the top of the loop if none elsewhere. */
2312 if (start_label == loop_stack->data.loop.continue_label)
2313 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2314
2315 do_pending_stack_adjust ();
2316
2317 /* If optimizing, perhaps reorder the loop.
2318 First, try to use a condjump near the end.
2319 expand_exit_loop_if_false ends loops with unconditional jumps,
2320 like this:
2321
2322 if (test) goto label;
2323 optional: cleanup
2324 goto loop_stack->data.loop.end_label
2325 barrier
2326 label:
2327
2328 If we find such a pattern, we can end the loop earlier. */
2329
2330 if (optimize
2331 && GET_CODE (insn) == CODE_LABEL
2332 && LABEL_NAME (insn) == NULL
2333 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2334 {
2335 rtx label = insn;
2336 rtx jump = PREV_INSN (PREV_INSN (label));
2337
2338 if (GET_CODE (jump) == JUMP_INSN
2339 && GET_CODE (PATTERN (jump)) == SET
2340 && SET_DEST (PATTERN (jump)) == pc_rtx
2341 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2342 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2343 == loop_stack->data.loop.end_label))
2344 {
2345 rtx prev;
2346
2347 /* The test might be complex and reference LABEL multiple times,
2348 like the loop in loop_iterations to set vtop. To handle this,
2349 we move LABEL. */
2350 insn = PREV_INSN (label);
2351 reorder_insns (label, label, start_label);
2352
2353 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2354 {
2355 /* We ignore line number notes, but if we see any other note,
2356 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2357 NOTE_INSN_LOOP_*, we disable this optimization. */
2358 if (GET_CODE (prev) == NOTE)
2359 {
2360 if (NOTE_LINE_NUMBER (prev) < 0)
2361 break;
2362 continue;
2363 }
2364 if (GET_CODE (prev) == CODE_LABEL)
2365 break;
2366 if (GET_CODE (prev) == JUMP_INSN)
2367 {
2368 if (GET_CODE (PATTERN (prev)) == SET
2369 && SET_DEST (PATTERN (prev)) == pc_rtx
2370 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2371 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2372 == LABEL_REF)
2373 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2374 {
2375 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2376 = start_label;
2377 emit_note_after (NOTE_INSN_LOOP_END, prev);
2378 needs_end_jump = 0;
2379 }
2380 break;
2381 }
2382 }
2383 }
2384 }
2385
2386 /* If the loop starts with a loop exit, roll that to the end where
2387 it will optimize together with the jump back.
2388
2389 We look for the conditional branch to the exit, except that once
2390 we find such a branch, we don't look past 30 instructions.
2391
2392 In more detail, if the loop presently looks like this (in pseudo-C):
2393
2394 start_label:
2395 if (test) goto end_label;
2396 body;
2397 goto start_label;
2398 end_label:
2399
2400 transform it to look like:
2401
2402 goto start_label;
2403 newstart_label:
2404 body;
2405 start_label:
2406 if (test) goto end_label;
2407 goto newstart_label;
2408 end_label:
2409
2410 Here, the `test' may actually consist of some reasonably complex
2411 code, terminating in a test. */
2412
2413 if (optimize
2414 && needs_end_jump
2415 &&
2416 ! (GET_CODE (insn) == JUMP_INSN
2417 && GET_CODE (PATTERN (insn)) == SET
2418 && SET_DEST (PATTERN (insn)) == pc_rtx
2419 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2420 {
2421 int eh_regions = 0;
2422 int num_insns = 0;
2423 rtx last_test_insn = NULL_RTX;
2424
2425 /* Scan insns from the top of the loop looking for a qualified
2426 conditional exit. */
2427 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2428 insn = NEXT_INSN (insn))
2429 {
2430 if (GET_CODE (insn) == NOTE)
2431 {
2432 if (optimize < 2
2433 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2434 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2435 /* The code that actually moves the exit test will
2436 carefully leave BLOCK notes in their original
2437 location. That means, however, that we can't debug
2438 the exit test itself. So, we refuse to move code
2439 containing BLOCK notes at low optimization levels. */
2440 break;
2441
2442 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2443 ++eh_regions;
2444 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2445 {
2446 --eh_regions;
2447 if (eh_regions < 0)
2448 /* We've come to the end of an EH region, but
2449 never saw the beginning of that region. That
2450 means that an EH region begins before the top
2451 of the loop, and ends in the middle of it. The
2452 existence of such a situation violates a basic
2453 assumption in this code, since that would imply
2454 that even when EH_REGIONS is zero, we might
2455 move code out of an exception region. */
2456 abort ();
2457 }
2458
2459 /* We must not walk into a nested loop. */
2460 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2461 break;
2462
2463 /* We already know this INSN is a NOTE, so there's no
2464 point in looking at it to see if it's a JUMP. */
2465 continue;
2466 }
2467
2468 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2469 num_insns++;
2470
2471 if (last_test_insn && num_insns > 30)
2472 break;
2473
2474 if (eh_regions > 0)
2475 /* We don't want to move a partial EH region. Consider:
2476
2477 while ( ( { try {
2478 if (cond ()) 0;
2479 else {
2480 bar();
2481 1;
2482 }
2483 } catch (...) {
2484 1;
2485 } )) {
2486 body;
2487 }
2488
2489 This isn't legal C++, but here's what it's supposed to
2490 mean: if cond() is true, stop looping. Otherwise,
2491 call bar, and keep looping. In addition, if cond
2492 throws an exception, catch it and keep looping. Such
2493 constructs are certainy legal in LISP.
2494
2495 We should not move the `if (cond()) 0' test since then
2496 the EH-region for the try-block would be broken up.
2497 (In this case we would the EH_BEG note for the `try'
2498 and `if cond()' but not the call to bar() or the
2499 EH_END note.)
2500
2501 So we don't look for tests within an EH region. */
2502 continue;
2503
2504 if (GET_CODE (insn) == JUMP_INSN
2505 && GET_CODE (PATTERN (insn)) == SET
2506 && SET_DEST (PATTERN (insn)) == pc_rtx)
2507 {
2508 /* This is indeed a jump. */
2509 rtx dest1 = NULL_RTX;
2510 rtx dest2 = NULL_RTX;
2511 rtx potential_last_test;
2512 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2513 {
2514 /* A conditional jump. */
2515 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2516 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2517 potential_last_test = insn;
2518 }
2519 else
2520 {
2521 /* An unconditional jump. */
2522 dest1 = SET_SRC (PATTERN (insn));
2523 /* Include the BARRIER after the JUMP. */
2524 potential_last_test = NEXT_INSN (insn);
2525 }
2526
2527 do {
2528 if (dest1 && GET_CODE (dest1) == LABEL_REF
2529 && ((XEXP (dest1, 0)
2530 == loop_stack->data.loop.alt_end_label)
2531 || (XEXP (dest1, 0)
2532 == loop_stack->data.loop.end_label)))
2533 {
2534 last_test_insn = potential_last_test;
2535 break;
2536 }
2537
2538 /* If this was a conditional jump, there may be
2539 another label at which we should look. */
2540 dest1 = dest2;
2541 dest2 = NULL_RTX;
2542 } while (dest1);
2543 }
2544 }
2545
2546 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2547 {
2548 /* We found one. Move everything from there up
2549 to the end of the loop, and add a jump into the loop
2550 to jump to there. */
2551 register rtx newstart_label = gen_label_rtx ();
2552 register rtx start_move = start_label;
2553 rtx next_insn;
2554
2555 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2556 then we want to move this note also. */
2557 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2558 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2559 == NOTE_INSN_LOOP_CONT))
2560 start_move = PREV_INSN (start_move);
2561
2562 emit_label_after (newstart_label, PREV_INSN (start_move));
2563
2564 /* Actually move the insns. Start at the beginning, and
2565 keep copying insns until we've copied the
2566 last_test_insn. */
2567 for (insn = start_move; insn; insn = next_insn)
2568 {
2569 /* Figure out which insn comes after this one. We have
2570 to do this before we move INSN. */
2571 if (insn == last_test_insn)
2572 /* We've moved all the insns. */
2573 next_insn = NULL_RTX;
2574 else
2575 next_insn = NEXT_INSN (insn);
2576
2577 if (GET_CODE (insn) == NOTE
2578 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2579 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2580 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2581 NOTE_INSN_BLOCK_ENDs because the correct generation
2582 of debugging information depends on these appearing
2583 in the same order in the RTL and in the tree
2584 structure, where they are represented as BLOCKs.
2585 So, we don't move block notes. Of course, moving
2586 the code inside the block is likely to make it
2587 impossible to debug the instructions in the exit
2588 test, but such is the price of optimization. */
2589 continue;
2590
2591 /* Move the INSN. */
2592 reorder_insns (insn, insn, get_last_insn ());
2593 }
2594
2595 emit_jump_insn_after (gen_jump (start_label),
2596 PREV_INSN (newstart_label));
2597 emit_barrier_after (PREV_INSN (newstart_label));
2598 start_label = newstart_label;
2599 }
2600 }
2601
2602 if (needs_end_jump)
2603 {
2604 emit_jump (start_label);
2605 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2606 }
2607 emit_label (loop_stack->data.loop.end_label);
2608
2609 POPSTACK (loop_stack);
2610
2611 last_expr_type = 0;
2612 }
2613
2614 /* Generate a jump to the current loop's continue-point.
2615 This is usually the top of the loop, but may be specified
2616 explicitly elsewhere. If not currently inside a loop,
2617 return 0 and do nothing; caller will print an error message. */
2618
2619 int
2620 expand_continue_loop (whichloop)
2621 struct nesting *whichloop;
2622 {
2623 last_expr_type = 0;
2624 if (whichloop == 0)
2625 whichloop = loop_stack;
2626 if (whichloop == 0)
2627 return 0;
2628 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2629 NULL_RTX);
2630 return 1;
2631 }
2632
2633 /* Generate a jump to exit the current loop. If not currently inside a loop,
2634 return 0 and do nothing; caller will print an error message. */
2635
2636 int
2637 expand_exit_loop (whichloop)
2638 struct nesting *whichloop;
2639 {
2640 last_expr_type = 0;
2641 if (whichloop == 0)
2642 whichloop = loop_stack;
2643 if (whichloop == 0)
2644 return 0;
2645 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2646 return 1;
2647 }
2648
2649 /* Generate a conditional jump to exit the current loop if COND
2650 evaluates to zero. If not currently inside a loop,
2651 return 0 and do nothing; caller will print an error message. */
2652
2653 int
2654 expand_exit_loop_if_false (whichloop, cond)
2655 struct nesting *whichloop;
2656 tree cond;
2657 {
2658 rtx label = gen_label_rtx ();
2659 rtx last_insn;
2660 last_expr_type = 0;
2661
2662 if (whichloop == 0)
2663 whichloop = loop_stack;
2664 if (whichloop == 0)
2665 return 0;
2666 /* In order to handle fixups, we actually create a conditional jump
2667 around a unconditional branch to exit the loop. If fixups are
2668 necessary, they go before the unconditional branch. */
2669
2670 do_jump (cond, NULL_RTX, label);
2671 last_insn = get_last_insn ();
2672 if (GET_CODE (last_insn) == CODE_LABEL)
2673 whichloop->data.loop.alt_end_label = last_insn;
2674 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2675 NULL_RTX);
2676 emit_label (label);
2677
2678 return 1;
2679 }
2680
2681 /* Return nonzero if the loop nest is empty. Else return zero. */
2682
2683 int
2684 stmt_loop_nest_empty ()
2685 {
2686 /* cfun->stmt can be NULL if we are building a call to get the
2687 EH context for a setjmp/longjmp EH target and the current
2688 function was a deferred inline function. */
2689 return (cfun->stmt == NULL || loop_stack == NULL);
2690 }
2691
2692 /* Return non-zero if we should preserve sub-expressions as separate
2693 pseudos. We never do so if we aren't optimizing. We always do so
2694 if -fexpensive-optimizations.
2695
2696 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2697 the loop may still be a small one. */
2698
2699 int
2700 preserve_subexpressions_p ()
2701 {
2702 rtx insn;
2703
2704 if (flag_expensive_optimizations)
2705 return 1;
2706
2707 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2708 return 0;
2709
2710 insn = get_last_insn_anywhere ();
2711
2712 return (insn
2713 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2714 < n_non_fixed_regs * 3));
2715
2716 }
2717
2718 /* Generate a jump to exit the current loop, conditional, binding contour
2719 or case statement. Not all such constructs are visible to this function,
2720 only those started with EXIT_FLAG nonzero. Individual languages use
2721 the EXIT_FLAG parameter to control which kinds of constructs you can
2722 exit this way.
2723
2724 If not currently inside anything that can be exited,
2725 return 0 and do nothing; caller will print an error message. */
2726
2727 int
2728 expand_exit_something ()
2729 {
2730 struct nesting *n;
2731 last_expr_type = 0;
2732 for (n = nesting_stack; n; n = n->all)
2733 if (n->exit_label != 0)
2734 {
2735 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2736 return 1;
2737 }
2738
2739 return 0;
2740 }
2741 \f
2742 /* Generate RTL to return from the current function, with no value.
2743 (That is, we do not do anything about returning any value.) */
2744
2745 void
2746 expand_null_return ()
2747 {
2748 struct nesting *block = block_stack;
2749 rtx last_insn = get_last_insn ();
2750
2751 /* If this function was declared to return a value, but we
2752 didn't, clobber the return registers so that they are not
2753 propogated live to the rest of the function. */
2754 clobber_return_register ();
2755
2756 /* Does any pending block have cleanups? */
2757 while (block && block->data.block.cleanups == 0)
2758 block = block->next;
2759
2760 /* If yes, use a goto to return, since that runs cleanups. */
2761
2762 expand_null_return_1 (last_insn, block != 0);
2763 }
2764
2765 /* Generate RTL to return from the current function, with value VAL. */
2766
2767 static void
2768 expand_value_return (val)
2769 rtx val;
2770 {
2771 struct nesting *block = block_stack;
2772 rtx last_insn = get_last_insn ();
2773 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2774
2775 /* Copy the value to the return location
2776 unless it's already there. */
2777
2778 if (return_reg != val)
2779 {
2780 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2781 #ifdef PROMOTE_FUNCTION_RETURN
2782 int unsignedp = TREE_UNSIGNED (type);
2783 enum machine_mode old_mode
2784 = DECL_MODE (DECL_RESULT (current_function_decl));
2785 enum machine_mode mode
2786 = promote_mode (type, old_mode, &unsignedp, 1);
2787
2788 if (mode != old_mode)
2789 val = convert_modes (mode, old_mode, val, unsignedp);
2790 #endif
2791 if (GET_CODE (return_reg) == PARALLEL)
2792 emit_group_load (return_reg, val, int_size_in_bytes (type),
2793 TYPE_ALIGN (type));
2794 else
2795 emit_move_insn (return_reg, val);
2796 }
2797
2798 /* Does any pending block have cleanups? */
2799
2800 while (block && block->data.block.cleanups == 0)
2801 block = block->next;
2802
2803 /* If yes, use a goto to return, since that runs cleanups.
2804 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2805
2806 expand_null_return_1 (last_insn, block != 0);
2807 }
2808
2809 /* Output a return with no value. If LAST_INSN is nonzero,
2810 pretend that the return takes place after LAST_INSN.
2811 If USE_GOTO is nonzero then don't use a return instruction;
2812 go to the return label instead. This causes any cleanups
2813 of pending blocks to be executed normally. */
2814
2815 static void
2816 expand_null_return_1 (last_insn, use_goto)
2817 rtx last_insn;
2818 int use_goto;
2819 {
2820 rtx end_label = cleanup_label ? cleanup_label : return_label;
2821
2822 clear_pending_stack_adjust ();
2823 do_pending_stack_adjust ();
2824 last_expr_type = 0;
2825
2826 /* PCC-struct return always uses an epilogue. */
2827 if (current_function_returns_pcc_struct || use_goto)
2828 {
2829 if (end_label == 0)
2830 end_label = return_label = gen_label_rtx ();
2831 expand_goto_internal (NULL_TREE, end_label, last_insn);
2832 return;
2833 }
2834
2835 /* Otherwise output a simple return-insn if one is available,
2836 unless it won't do the job. */
2837 #ifdef HAVE_return
2838 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2839 {
2840 emit_jump_insn (gen_return ());
2841 emit_barrier ();
2842 return;
2843 }
2844 #endif
2845
2846 /* Otherwise jump to the epilogue. */
2847 expand_goto_internal (NULL_TREE, end_label, last_insn);
2848 }
2849 \f
2850 /* Generate RTL to evaluate the expression RETVAL and return it
2851 from the current function. */
2852
2853 void
2854 expand_return (retval)
2855 tree retval;
2856 {
2857 /* If there are any cleanups to be performed, then they will
2858 be inserted following LAST_INSN. It is desirable
2859 that the last_insn, for such purposes, should be the
2860 last insn before computing the return value. Otherwise, cleanups
2861 which call functions can clobber the return value. */
2862 /* ??? rms: I think that is erroneous, because in C++ it would
2863 run destructors on variables that might be used in the subsequent
2864 computation of the return value. */
2865 rtx last_insn = 0;
2866 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2867 register rtx val = 0;
2868 tree retval_rhs;
2869 int cleanups;
2870
2871 /* If function wants no value, give it none. */
2872 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2873 {
2874 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2875 emit_queue ();
2876 expand_null_return ();
2877 return;
2878 }
2879
2880 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2881 /* This is not sufficient. We also need to watch for cleanups of the
2882 expression we are about to expand. Unfortunately, we cannot know
2883 if it has cleanups until we expand it, and we want to change how we
2884 expand it depending upon if we need cleanups. We can't win. */
2885 #if 0
2886 cleanups = any_pending_cleanups (1);
2887 #else
2888 cleanups = 1;
2889 #endif
2890
2891 if (retval == error_mark_node)
2892 retval_rhs = NULL_TREE;
2893 else if (TREE_CODE (retval) == RESULT_DECL)
2894 retval_rhs = retval;
2895 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2896 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2897 retval_rhs = TREE_OPERAND (retval, 1);
2898 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2899 /* Recognize tail-recursive call to void function. */
2900 retval_rhs = retval;
2901 else
2902 retval_rhs = NULL_TREE;
2903
2904 /* Only use `last_insn' if there are cleanups which must be run. */
2905 if (cleanups || cleanup_label != 0)
2906 last_insn = get_last_insn ();
2907
2908 /* Distribute return down conditional expr if either of the sides
2909 may involve tail recursion (see test below). This enhances the number
2910 of tail recursions we see. Don't do this always since it can produce
2911 sub-optimal code in some cases and we distribute assignments into
2912 conditional expressions when it would help. */
2913
2914 if (optimize && retval_rhs != 0
2915 && frame_offset == 0
2916 && TREE_CODE (retval_rhs) == COND_EXPR
2917 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2918 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2919 {
2920 rtx label = gen_label_rtx ();
2921 tree expr;
2922
2923 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2924 start_cleanup_deferral ();
2925 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2926 DECL_RESULT (current_function_decl),
2927 TREE_OPERAND (retval_rhs, 1));
2928 TREE_SIDE_EFFECTS (expr) = 1;
2929 expand_return (expr);
2930 emit_label (label);
2931
2932 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2933 DECL_RESULT (current_function_decl),
2934 TREE_OPERAND (retval_rhs, 2));
2935 TREE_SIDE_EFFECTS (expr) = 1;
2936 expand_return (expr);
2937 end_cleanup_deferral ();
2938 return;
2939 }
2940
2941 /* If the result is an aggregate that is being returned in one (or more)
2942 registers, load the registers here. The compiler currently can't handle
2943 copying a BLKmode value into registers. We could put this code in a
2944 more general area (for use by everyone instead of just function
2945 call/return), but until this feature is generally usable it is kept here
2946 (and in expand_call). The value must go into a pseudo in case there
2947 are cleanups that will clobber the real return register. */
2948
2949 if (retval_rhs != 0
2950 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2951 && GET_CODE (result_rtl) == REG)
2952 {
2953 int i;
2954 unsigned HOST_WIDE_INT bitpos, xbitpos;
2955 unsigned HOST_WIDE_INT big_endian_correction = 0;
2956 unsigned HOST_WIDE_INT bytes
2957 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2958 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2959 unsigned int bitsize
2960 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2961 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2962 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2963 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2964 enum machine_mode tmpmode, result_reg_mode;
2965
2966 /* Structures whose size is not a multiple of a word are aligned
2967 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2968 machine, this means we must skip the empty high order bytes when
2969 calculating the bit offset. */
2970 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2971 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2972 * BITS_PER_UNIT));
2973
2974 /* Copy the structure BITSIZE bits at a time. */
2975 for (bitpos = 0, xbitpos = big_endian_correction;
2976 bitpos < bytes * BITS_PER_UNIT;
2977 bitpos += bitsize, xbitpos += bitsize)
2978 {
2979 /* We need a new destination pseudo each time xbitpos is
2980 on a word boundary and when xbitpos == big_endian_correction
2981 (the first time through). */
2982 if (xbitpos % BITS_PER_WORD == 0
2983 || xbitpos == big_endian_correction)
2984 {
2985 /* Generate an appropriate register. */
2986 dst = gen_reg_rtx (word_mode);
2987 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2988
2989 /* Clobber the destination before we move anything into it. */
2990 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2991 }
2992
2993 /* We need a new source operand each time bitpos is on a word
2994 boundary. */
2995 if (bitpos % BITS_PER_WORD == 0)
2996 src = operand_subword_force (result_val,
2997 bitpos / BITS_PER_WORD,
2998 BLKmode);
2999
3000 /* Use bitpos for the source extraction (left justified) and
3001 xbitpos for the destination store (right justified). */
3002 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3003 extract_bit_field (src, bitsize,
3004 bitpos % BITS_PER_WORD, 1,
3005 NULL_RTX, word_mode, word_mode,
3006 bitsize, BITS_PER_WORD),
3007 bitsize, BITS_PER_WORD);
3008 }
3009
3010 /* Find the smallest integer mode large enough to hold the
3011 entire structure and use that mode instead of BLKmode
3012 on the USE insn for the return register. */
3013 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
3014 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3015 tmpmode != VOIDmode;
3016 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3017 {
3018 /* Have we found a large enough mode? */
3019 if (GET_MODE_SIZE (tmpmode) >= bytes)
3020 break;
3021 }
3022
3023 /* No suitable mode found. */
3024 if (tmpmode == VOIDmode)
3025 abort ();
3026
3027 PUT_MODE (result_rtl, tmpmode);
3028
3029 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3030 result_reg_mode = word_mode;
3031 else
3032 result_reg_mode = tmpmode;
3033 result_reg = gen_reg_rtx (result_reg_mode);
3034
3035 emit_queue ();
3036 for (i = 0; i < n_regs; i++)
3037 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3038 result_pseudos[i]);
3039
3040 if (tmpmode != result_reg_mode)
3041 result_reg = gen_lowpart (tmpmode, result_reg);
3042
3043 expand_value_return (result_reg);
3044 }
3045 else if (cleanups
3046 && retval_rhs != 0
3047 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3048 && (GET_CODE (result_rtl) == REG
3049 || (GET_CODE (result_rtl) == PARALLEL)))
3050 {
3051 /* Calculate the return value into a temporary (usually a pseudo
3052 reg). */
3053 val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
3054 0, 0, 1);
3055 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3056 val = force_not_mem (val);
3057 emit_queue ();
3058 /* Return the calculated value, doing cleanups first. */
3059 expand_value_return (val);
3060 }
3061 else
3062 {
3063 /* No cleanups or no hard reg used;
3064 calculate value into hard return reg. */
3065 expand_expr (retval, const0_rtx, VOIDmode, 0);
3066 emit_queue ();
3067 expand_value_return (result_rtl);
3068 }
3069 }
3070
3071 /* Return 1 if the end of the generated RTX is not a barrier.
3072 This means code already compiled can drop through. */
3073
3074 int
3075 drop_through_at_end_p ()
3076 {
3077 rtx insn = get_last_insn ();
3078 while (insn && GET_CODE (insn) == NOTE)
3079 insn = PREV_INSN (insn);
3080 return insn && GET_CODE (insn) != BARRIER;
3081 }
3082 \f
3083 /* Attempt to optimize a potential tail recursion call into a goto.
3084 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3085 where to place the jump to the tail recursion label.
3086
3087 Return TRUE if the call was optimized into a goto. */
3088
3089 int
3090 optimize_tail_recursion (arguments, last_insn)
3091 tree arguments;
3092 rtx last_insn;
3093 {
3094 /* Finish checking validity, and if valid emit code to set the
3095 argument variables for the new call. */
3096 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3097 {
3098 if (tail_recursion_label == 0)
3099 {
3100 tail_recursion_label = gen_label_rtx ();
3101 emit_label_after (tail_recursion_label,
3102 tail_recursion_reentry);
3103 }
3104 emit_queue ();
3105 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3106 emit_barrier ();
3107 return 1;
3108 }
3109 return 0;
3110 }
3111
3112 /* Emit code to alter this function's formal parms for a tail-recursive call.
3113 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3114 FORMALS is the chain of decls of formals.
3115 Return 1 if this can be done;
3116 otherwise return 0 and do not emit any code. */
3117
3118 static int
3119 tail_recursion_args (actuals, formals)
3120 tree actuals, formals;
3121 {
3122 register tree a = actuals, f = formals;
3123 register int i;
3124 register rtx *argvec;
3125
3126 /* Check that number and types of actuals are compatible
3127 with the formals. This is not always true in valid C code.
3128 Also check that no formal needs to be addressable
3129 and that all formals are scalars. */
3130
3131 /* Also count the args. */
3132
3133 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3134 {
3135 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3136 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3137 return 0;
3138 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3139 return 0;
3140 }
3141 if (a != 0 || f != 0)
3142 return 0;
3143
3144 /* Compute all the actuals. */
3145
3146 argvec = (rtx *) alloca (i * sizeof (rtx));
3147
3148 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3149 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3150
3151 /* Find which actual values refer to current values of previous formals.
3152 Copy each of them now, before any formal is changed. */
3153
3154 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3155 {
3156 int copy = 0;
3157 register int j;
3158 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3159 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3160 {
3161 copy = 1;
3162 break;
3163 }
3164 if (copy)
3165 argvec[i] = copy_to_reg (argvec[i]);
3166 }
3167
3168 /* Store the values of the actuals into the formals. */
3169
3170 for (f = formals, a = actuals, i = 0; f;
3171 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3172 {
3173 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3174 emit_move_insn (DECL_RTL (f), argvec[i]);
3175 else
3176 convert_move (DECL_RTL (f), argvec[i],
3177 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3178 }
3179
3180 free_temp_slots ();
3181 return 1;
3182 }
3183 \f
3184 /* Generate the RTL code for entering a binding contour.
3185 The variables are declared one by one, by calls to `expand_decl'.
3186
3187 FLAGS is a bitwise or of the following flags:
3188
3189 1 - Nonzero if this construct should be visible to
3190 `exit_something'.
3191
3192 2 - Nonzero if this contour does not require a
3193 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3194 language-independent code should set this flag because they
3195 will not create corresponding BLOCK nodes. (There should be
3196 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3197 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3198 when expand_end_bindings is called.
3199
3200 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3201 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3202 note. */
3203
3204 void
3205 expand_start_bindings_and_block (flags, block)
3206 int flags;
3207 tree block;
3208 {
3209 struct nesting *thisblock = ALLOC_NESTING ();
3210 rtx note;
3211 int exit_flag = ((flags & 1) != 0);
3212 int block_flag = ((flags & 2) == 0);
3213
3214 /* If a BLOCK is supplied, then the caller should be requesting a
3215 NOTE_INSN_BLOCK_BEG note. */
3216 if (!block_flag && block)
3217 abort ();
3218
3219 /* Create a note to mark the beginning of the block. */
3220 if (block_flag)
3221 {
3222 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3223 NOTE_BLOCK (note) = block;
3224 }
3225 else
3226 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3227
3228 /* Make an entry on block_stack for the block we are entering. */
3229
3230 thisblock->next = block_stack;
3231 thisblock->all = nesting_stack;
3232 thisblock->depth = ++nesting_depth;
3233 thisblock->data.block.stack_level = 0;
3234 thisblock->data.block.cleanups = 0;
3235 thisblock->data.block.n_function_calls = 0;
3236 thisblock->data.block.exception_region = 0;
3237 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3238
3239 thisblock->data.block.conditional_code = 0;
3240 thisblock->data.block.last_unconditional_cleanup = note;
3241 /* When we insert instructions after the last unconditional cleanup,
3242 we don't adjust last_insn. That means that a later add_insn will
3243 clobber the instructions we've just added. The easiest way to
3244 fix this is to just insert another instruction here, so that the
3245 instructions inserted after the last unconditional cleanup are
3246 never the last instruction. */
3247 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3248 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3249
3250 if (block_stack
3251 && !(block_stack->data.block.cleanups == NULL_TREE
3252 && block_stack->data.block.outer_cleanups == NULL_TREE))
3253 thisblock->data.block.outer_cleanups
3254 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3255 block_stack->data.block.outer_cleanups);
3256 else
3257 thisblock->data.block.outer_cleanups = 0;
3258 thisblock->data.block.label_chain = 0;
3259 thisblock->data.block.innermost_stack_block = stack_block_stack;
3260 thisblock->data.block.first_insn = note;
3261 thisblock->data.block.block_start_count = ++current_block_start_count;
3262 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3263 block_stack = thisblock;
3264 nesting_stack = thisblock;
3265
3266 /* Make a new level for allocating stack slots. */
3267 push_temp_slots ();
3268 }
3269
3270 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3271 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3272 expand_expr are made. After we end the region, we know that all
3273 space for all temporaries that were created by TARGET_EXPRs will be
3274 destroyed and their space freed for reuse. */
3275
3276 void
3277 expand_start_target_temps ()
3278 {
3279 /* This is so that even if the result is preserved, the space
3280 allocated will be freed, as we know that it is no longer in use. */
3281 push_temp_slots ();
3282
3283 /* Start a new binding layer that will keep track of all cleanup
3284 actions to be performed. */
3285 expand_start_bindings (2);
3286
3287 target_temp_slot_level = temp_slot_level;
3288 }
3289
3290 void
3291 expand_end_target_temps ()
3292 {
3293 expand_end_bindings (NULL_TREE, 0, 0);
3294
3295 /* This is so that even if the result is preserved, the space
3296 allocated will be freed, as we know that it is no longer in use. */
3297 pop_temp_slots ();
3298 }
3299
3300 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3301 in question represents the outermost pair of curly braces (i.e. the "body
3302 block") of a function or method.
3303
3304 For any BLOCK node representing a "body block" of a function or method, the
3305 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3306 represents the outermost (function) scope for the function or method (i.e.
3307 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3308 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3309
3310 int
3311 is_body_block (stmt)
3312 register tree stmt;
3313 {
3314 if (TREE_CODE (stmt) == BLOCK)
3315 {
3316 tree parent = BLOCK_SUPERCONTEXT (stmt);
3317
3318 if (parent && TREE_CODE (parent) == BLOCK)
3319 {
3320 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3321
3322 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3323 return 1;
3324 }
3325 }
3326
3327 return 0;
3328 }
3329
3330 /* Mark top block of block_stack as an implicit binding for an
3331 exception region. This is used to prevent infinite recursion when
3332 ending a binding with expand_end_bindings. It is only ever called
3333 by expand_eh_region_start, as that it the only way to create a
3334 block stack for a exception region. */
3335
3336 void
3337 mark_block_as_eh_region ()
3338 {
3339 block_stack->data.block.exception_region = 1;
3340 if (block_stack->next
3341 && block_stack->next->data.block.conditional_code)
3342 {
3343 block_stack->data.block.conditional_code
3344 = block_stack->next->data.block.conditional_code;
3345 block_stack->data.block.last_unconditional_cleanup
3346 = block_stack->next->data.block.last_unconditional_cleanup;
3347 block_stack->data.block.cleanup_ptr
3348 = block_stack->next->data.block.cleanup_ptr;
3349 }
3350 }
3351
3352 /* True if we are currently emitting insns in an area of output code
3353 that is controlled by a conditional expression. This is used by
3354 the cleanup handling code to generate conditional cleanup actions. */
3355
3356 int
3357 conditional_context ()
3358 {
3359 return block_stack && block_stack->data.block.conditional_code;
3360 }
3361
3362 /* Mark top block of block_stack as not for an implicit binding for an
3363 exception region. This is only ever done by expand_eh_region_end
3364 to let expand_end_bindings know that it is being called explicitly
3365 to end the binding layer for just the binding layer associated with
3366 the exception region, otherwise expand_end_bindings would try and
3367 end all implicit binding layers for exceptions regions, and then
3368 one normal binding layer. */
3369
3370 void
3371 mark_block_as_not_eh_region ()
3372 {
3373 block_stack->data.block.exception_region = 0;
3374 }
3375
3376 /* True if the top block of block_stack was marked as for an exception
3377 region by mark_block_as_eh_region. */
3378
3379 int
3380 is_eh_region ()
3381 {
3382 return cfun && block_stack && block_stack->data.block.exception_region;
3383 }
3384
3385 /* Emit a handler label for a nonlocal goto handler.
3386 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3387
3388 static rtx
3389 expand_nl_handler_label (slot, before_insn)
3390 rtx slot, before_insn;
3391 {
3392 rtx insns;
3393 rtx handler_label = gen_label_rtx ();
3394
3395 /* Don't let jump_optimize delete the handler. */
3396 LABEL_PRESERVE_P (handler_label) = 1;
3397
3398 start_sequence ();
3399 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3400 insns = get_insns ();
3401 end_sequence ();
3402 emit_insns_before (insns, before_insn);
3403
3404 emit_label (handler_label);
3405
3406 return handler_label;
3407 }
3408
3409 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3410 handler. */
3411 static void
3412 expand_nl_goto_receiver ()
3413 {
3414 #ifdef HAVE_nonlocal_goto
3415 if (! HAVE_nonlocal_goto)
3416 #endif
3417 /* First adjust our frame pointer to its actual value. It was
3418 previously set to the start of the virtual area corresponding to
3419 the stacked variables when we branched here and now needs to be
3420 adjusted to the actual hardware fp value.
3421
3422 Assignments are to virtual registers are converted by
3423 instantiate_virtual_regs into the corresponding assignment
3424 to the underlying register (fp in this case) that makes
3425 the original assignment true.
3426 So the following insn will actually be
3427 decrementing fp by STARTING_FRAME_OFFSET. */
3428 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3429
3430 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3431 if (fixed_regs[ARG_POINTER_REGNUM])
3432 {
3433 #ifdef ELIMINABLE_REGS
3434 /* If the argument pointer can be eliminated in favor of the
3435 frame pointer, we don't need to restore it. We assume here
3436 that if such an elimination is present, it can always be used.
3437 This is the case on all known machines; if we don't make this
3438 assumption, we do unnecessary saving on many machines. */
3439 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3440 size_t i;
3441
3442 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3443 if (elim_regs[i].from == ARG_POINTER_REGNUM
3444 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3445 break;
3446
3447 if (i == ARRAY_SIZE (elim_regs))
3448 #endif
3449 {
3450 /* Now restore our arg pointer from the address at which it
3451 was saved in our stack frame.
3452 If there hasn't be space allocated for it yet, make
3453 some now. */
3454 if (arg_pointer_save_area == 0)
3455 arg_pointer_save_area
3456 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3457 emit_move_insn (virtual_incoming_args_rtx,
3458 /* We need a pseudo here, or else
3459 instantiate_virtual_regs_1 complains. */
3460 copy_to_reg (arg_pointer_save_area));
3461 }
3462 }
3463 #endif
3464
3465 #ifdef HAVE_nonlocal_goto_receiver
3466 if (HAVE_nonlocal_goto_receiver)
3467 emit_insn (gen_nonlocal_goto_receiver ());
3468 #endif
3469 }
3470
3471 /* Make handlers for nonlocal gotos taking place in the function calls in
3472 block THISBLOCK. */
3473
3474 static void
3475 expand_nl_goto_receivers (thisblock)
3476 struct nesting *thisblock;
3477 {
3478 tree link;
3479 rtx afterward = gen_label_rtx ();
3480 rtx insns, slot;
3481 rtx label_list;
3482 int any_invalid;
3483
3484 /* Record the handler address in the stack slot for that purpose,
3485 during this block, saving and restoring the outer value. */
3486 if (thisblock->next != 0)
3487 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3488 {
3489 rtx save_receiver = gen_reg_rtx (Pmode);
3490 emit_move_insn (XEXP (slot, 0), save_receiver);
3491
3492 start_sequence ();
3493 emit_move_insn (save_receiver, XEXP (slot, 0));
3494 insns = get_insns ();
3495 end_sequence ();
3496 emit_insns_before (insns, thisblock->data.block.first_insn);
3497 }
3498
3499 /* Jump around the handlers; they run only when specially invoked. */
3500 emit_jump (afterward);
3501
3502 /* Make a separate handler for each label. */
3503 link = nonlocal_labels;
3504 slot = nonlocal_goto_handler_slots;
3505 label_list = NULL_RTX;
3506 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3507 /* Skip any labels we shouldn't be able to jump to from here,
3508 we generate one special handler for all of them below which just calls
3509 abort. */
3510 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3511 {
3512 rtx lab;
3513 lab = expand_nl_handler_label (XEXP (slot, 0),
3514 thisblock->data.block.first_insn);
3515 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3516
3517 expand_nl_goto_receiver ();
3518
3519 /* Jump to the "real" nonlocal label. */
3520 expand_goto (TREE_VALUE (link));
3521 }
3522
3523 /* A second pass over all nonlocal labels; this time we handle those
3524 we should not be able to jump to at this point. */
3525 link = nonlocal_labels;
3526 slot = nonlocal_goto_handler_slots;
3527 any_invalid = 0;
3528 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3529 if (DECL_TOO_LATE (TREE_VALUE (link)))
3530 {
3531 rtx lab;
3532 lab = expand_nl_handler_label (XEXP (slot, 0),
3533 thisblock->data.block.first_insn);
3534 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3535 any_invalid = 1;
3536 }
3537
3538 if (any_invalid)
3539 {
3540 expand_nl_goto_receiver ();
3541 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3542 VOIDmode, 0);
3543 emit_barrier ();
3544 }
3545
3546 nonlocal_goto_handler_labels = label_list;
3547 emit_label (afterward);
3548 }
3549
3550 /* Warn about any unused VARS (which may contain nodes other than
3551 VAR_DECLs, but such nodes are ignored). The nodes are connected
3552 via the TREE_CHAIN field. */
3553
3554 void
3555 warn_about_unused_variables (vars)
3556 tree vars;
3557 {
3558 tree decl;
3559
3560 if (warn_unused_variable)
3561 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3562 if (TREE_CODE (decl) == VAR_DECL
3563 && ! TREE_USED (decl)
3564 && ! DECL_IN_SYSTEM_HEADER (decl)
3565 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3566 warning_with_decl (decl, "unused variable `%s'");
3567 }
3568
3569 /* Generate RTL code to terminate a binding contour.
3570
3571 VARS is the chain of VAR_DECL nodes for the variables bound in this
3572 contour. There may actually be other nodes in this chain, but any
3573 nodes other than VAR_DECLS are ignored.
3574
3575 MARK_ENDS is nonzero if we should put a note at the beginning
3576 and end of this binding contour.
3577
3578 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3579 (That is true automatically if the contour has a saved stack level.) */
3580
3581 void
3582 expand_end_bindings (vars, mark_ends, dont_jump_in)
3583 tree vars;
3584 int mark_ends;
3585 int dont_jump_in;
3586 {
3587 register struct nesting *thisblock;
3588
3589 while (block_stack->data.block.exception_region)
3590 {
3591 /* Because we don't need or want a new temporary level and
3592 because we didn't create one in expand_eh_region_start,
3593 create a fake one now to avoid removing one in
3594 expand_end_bindings. */
3595 push_temp_slots ();
3596
3597 block_stack->data.block.exception_region = 0;
3598
3599 expand_end_bindings (NULL_TREE, 0, 0);
3600 }
3601
3602 /* Since expand_eh_region_start does an expand_start_bindings, we
3603 have to first end all the bindings that were created by
3604 expand_eh_region_start. */
3605
3606 thisblock = block_stack;
3607
3608 /* If any of the variables in this scope were not used, warn the
3609 user. */
3610 warn_about_unused_variables (vars);
3611
3612 if (thisblock->exit_label)
3613 {
3614 do_pending_stack_adjust ();
3615 emit_label (thisblock->exit_label);
3616 }
3617
3618 /* If necessary, make handlers for nonlocal gotos taking
3619 place in the function calls in this block. */
3620 if (function_call_count != thisblock->data.block.n_function_calls
3621 && nonlocal_labels
3622 /* Make handler for outermost block
3623 if there were any nonlocal gotos to this function. */
3624 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3625 /* Make handler for inner block if it has something
3626 special to do when you jump out of it. */
3627 : (thisblock->data.block.cleanups != 0
3628 || thisblock->data.block.stack_level != 0)))
3629 expand_nl_goto_receivers (thisblock);
3630
3631 /* Don't allow jumping into a block that has a stack level.
3632 Cleanups are allowed, though. */
3633 if (dont_jump_in
3634 || thisblock->data.block.stack_level != 0)
3635 {
3636 struct label_chain *chain;
3637
3638 /* Any labels in this block are no longer valid to go to.
3639 Mark them to cause an error message. */
3640 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3641 {
3642 DECL_TOO_LATE (chain->label) = 1;
3643 /* If any goto without a fixup came to this label,
3644 that must be an error, because gotos without fixups
3645 come from outside all saved stack-levels. */
3646 if (TREE_ADDRESSABLE (chain->label))
3647 error_with_decl (chain->label,
3648 "label `%s' used before containing binding contour");
3649 }
3650 }
3651
3652 /* Restore stack level in effect before the block
3653 (only if variable-size objects allocated). */
3654 /* Perform any cleanups associated with the block. */
3655
3656 if (thisblock->data.block.stack_level != 0
3657 || thisblock->data.block.cleanups != 0)
3658 {
3659 int reachable;
3660 rtx insn;
3661
3662 /* Don't let cleanups affect ({...}) constructs. */
3663 int old_expr_stmts_for_value = expr_stmts_for_value;
3664 rtx old_last_expr_value = last_expr_value;
3665 tree old_last_expr_type = last_expr_type;
3666 expr_stmts_for_value = 0;
3667
3668 /* Only clean up here if this point can actually be reached. */
3669 insn = get_last_insn ();
3670 if (GET_CODE (insn) == NOTE)
3671 insn = prev_nonnote_insn (insn);
3672 reachable = (! insn || GET_CODE (insn) != BARRIER);
3673
3674 /* Do the cleanups. */
3675 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3676 if (reachable)
3677 do_pending_stack_adjust ();
3678
3679 expr_stmts_for_value = old_expr_stmts_for_value;
3680 last_expr_value = old_last_expr_value;
3681 last_expr_type = old_last_expr_type;
3682
3683 /* Restore the stack level. */
3684
3685 if (reachable && thisblock->data.block.stack_level != 0)
3686 {
3687 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3688 thisblock->data.block.stack_level, NULL_RTX);
3689 if (nonlocal_goto_handler_slots != 0)
3690 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3691 NULL_RTX);
3692 }
3693
3694 /* Any gotos out of this block must also do these things.
3695 Also report any gotos with fixups that came to labels in this
3696 level. */
3697 fixup_gotos (thisblock,
3698 thisblock->data.block.stack_level,
3699 thisblock->data.block.cleanups,
3700 thisblock->data.block.first_insn,
3701 dont_jump_in);
3702 }
3703
3704 /* Mark the beginning and end of the scope if requested.
3705 We do this now, after running cleanups on the variables
3706 just going out of scope, so they are in scope for their cleanups. */
3707
3708 if (mark_ends)
3709 {
3710 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3711 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3712 }
3713 else
3714 /* Get rid of the beginning-mark if we don't make an end-mark. */
3715 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3716
3717 /* Restore the temporary level of TARGET_EXPRs. */
3718 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3719
3720 /* Restore block_stack level for containing block. */
3721
3722 stack_block_stack = thisblock->data.block.innermost_stack_block;
3723 POPSTACK (block_stack);
3724
3725 /* Pop the stack slot nesting and free any slots at this level. */
3726 pop_temp_slots ();
3727 }
3728 \f
3729 /* Generate code to save the stack pointer at the start of the current block
3730 and set up to restore it on exit. */
3731
3732 void
3733 save_stack_pointer ()
3734 {
3735 struct nesting *thisblock = block_stack;
3736
3737 if (thisblock->data.block.stack_level == 0)
3738 {
3739 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3740 &thisblock->data.block.stack_level,
3741 thisblock->data.block.first_insn);
3742 stack_block_stack = thisblock;
3743 }
3744 }
3745 \f
3746 /* Generate RTL for the automatic variable declaration DECL.
3747 (Other kinds of declarations are simply ignored if seen here.) */
3748
3749 void
3750 expand_decl (decl)
3751 register tree decl;
3752 {
3753 struct nesting *thisblock;
3754 tree type;
3755
3756 type = TREE_TYPE (decl);
3757
3758 /* Only automatic variables need any expansion done.
3759 Static and external variables, and external functions,
3760 will be handled by `assemble_variable' (called from finish_decl).
3761 TYPE_DECL and CONST_DECL require nothing.
3762 PARM_DECLs are handled in `assign_parms'. */
3763
3764 if (TREE_CODE (decl) != VAR_DECL)
3765 return;
3766 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3767 return;
3768
3769 thisblock = block_stack;
3770
3771 /* Create the RTL representation for the variable. */
3772
3773 if (type == error_mark_node)
3774 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3775 else if (DECL_SIZE (decl) == 0)
3776 /* Variable with incomplete type. */
3777 {
3778 if (DECL_INITIAL (decl) == 0)
3779 /* Error message was already done; now avoid a crash. */
3780 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3781 else
3782 /* An initializer is going to decide the size of this array.
3783 Until we know the size, represent its address with a reg. */
3784 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3785
3786 set_mem_attributes (DECL_RTL (decl), decl, 1);
3787 }
3788 else if (DECL_MODE (decl) != BLKmode
3789 /* If -ffloat-store, don't put explicit float vars
3790 into regs. */
3791 && !(flag_float_store
3792 && TREE_CODE (type) == REAL_TYPE)
3793 && ! TREE_THIS_VOLATILE (decl)
3794 && ! TREE_ADDRESSABLE (decl)
3795 && (DECL_REGISTER (decl) || optimize)
3796 /* if -fcheck-memory-usage, check all variables. */
3797 && ! current_function_check_memory_usage)
3798 {
3799 /* Automatic variable that can go in a register. */
3800 int unsignedp = TREE_UNSIGNED (type);
3801 enum machine_mode reg_mode
3802 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3803
3804 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3805 mark_user_reg (DECL_RTL (decl));
3806
3807 if (POINTER_TYPE_P (type))
3808 mark_reg_pointer (DECL_RTL (decl),
3809 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3810
3811 maybe_set_unchanging (DECL_RTL (decl), decl);
3812 }
3813
3814 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3815 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3816 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3817 STACK_CHECK_MAX_VAR_SIZE)))
3818 {
3819 /* Variable of fixed size that goes on the stack. */
3820 rtx oldaddr = 0;
3821 rtx addr;
3822
3823 /* If we previously made RTL for this decl, it must be an array
3824 whose size was determined by the initializer.
3825 The old address was a register; set that register now
3826 to the proper address. */
3827 if (DECL_RTL (decl) != 0)
3828 {
3829 if (GET_CODE (DECL_RTL (decl)) != MEM
3830 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3831 abort ();
3832 oldaddr = XEXP (DECL_RTL (decl), 0);
3833 }
3834
3835 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3836
3837 /* Set alignment we actually gave this decl. */
3838 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3839 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3840 DECL_USER_ALIGN (decl) = 0;
3841
3842 if (oldaddr)
3843 {
3844 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3845 if (addr != oldaddr)
3846 emit_move_insn (oldaddr, addr);
3847 }
3848 }
3849 else
3850 /* Dynamic-size object: must push space on the stack. */
3851 {
3852 rtx address, size;
3853
3854 /* Record the stack pointer on entry to block, if have
3855 not already done so. */
3856 do_pending_stack_adjust ();
3857 save_stack_pointer ();
3858
3859 /* In function-at-a-time mode, variable_size doesn't expand this,
3860 so do it now. */
3861 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3862 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3863 const0_rtx, VOIDmode, 0);
3864
3865 /* Compute the variable's size, in bytes. */
3866 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3867 free_temp_slots ();
3868
3869 /* Allocate space on the stack for the variable. Note that
3870 DECL_ALIGN says how the variable is to be aligned and we
3871 cannot use it to conclude anything about the alignment of
3872 the size. */
3873 address = allocate_dynamic_stack_space (size, NULL_RTX,
3874 TYPE_ALIGN (TREE_TYPE (decl)));
3875
3876 /* Reference the variable indirect through that rtx. */
3877 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3878
3879 set_mem_attributes (DECL_RTL (decl), decl, 1);
3880
3881 /* Indicate the alignment we actually gave this variable. */
3882 #ifdef STACK_BOUNDARY
3883 DECL_ALIGN (decl) = STACK_BOUNDARY;
3884 #else
3885 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3886 #endif
3887 DECL_USER_ALIGN (decl) = 0;
3888 }
3889 }
3890 \f
3891 /* Emit code to perform the initialization of a declaration DECL. */
3892
3893 void
3894 expand_decl_init (decl)
3895 tree decl;
3896 {
3897 int was_used = TREE_USED (decl);
3898
3899 /* If this is a CONST_DECL, we don't have to generate any code, but
3900 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3901 to be set while in the obstack containing the constant. If we don't
3902 do this, we can lose if we have functions nested three deep and the middle
3903 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3904 the innermost function is the first to expand that STRING_CST. */
3905 if (TREE_CODE (decl) == CONST_DECL)
3906 {
3907 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3908 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3909 EXPAND_INITIALIZER);
3910 return;
3911 }
3912
3913 if (TREE_STATIC (decl))
3914 return;
3915
3916 /* Compute and store the initial value now. */
3917
3918 if (DECL_INITIAL (decl) == error_mark_node)
3919 {
3920 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3921
3922 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3923 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3924 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3925 0, 0);
3926 emit_queue ();
3927 }
3928 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3929 {
3930 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3931 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3932 emit_queue ();
3933 }
3934
3935 /* Don't let the initialization count as "using" the variable. */
3936 TREE_USED (decl) = was_used;
3937
3938 /* Free any temporaries we made while initializing the decl. */
3939 preserve_temp_slots (NULL_RTX);
3940 free_temp_slots ();
3941 }
3942
3943 /* CLEANUP is an expression to be executed at exit from this binding contour;
3944 for example, in C++, it might call the destructor for this variable.
3945
3946 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3947 CLEANUP multiple times, and have the correct semantics. This
3948 happens in exception handling, for gotos, returns, breaks that
3949 leave the current scope.
3950
3951 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3952 that is not associated with any particular variable. */
3953
3954 int
3955 expand_decl_cleanup (decl, cleanup)
3956 tree decl, cleanup;
3957 {
3958 struct nesting *thisblock;
3959
3960 /* Error if we are not in any block. */
3961 if (cfun == 0 || block_stack == 0)
3962 return 0;
3963
3964 thisblock = block_stack;
3965
3966 /* Record the cleanup if there is one. */
3967
3968 if (cleanup != 0)
3969 {
3970 tree t;
3971 rtx seq;
3972 tree *cleanups = &thisblock->data.block.cleanups;
3973 int cond_context = conditional_context ();
3974
3975 if (cond_context)
3976 {
3977 rtx flag = gen_reg_rtx (word_mode);
3978 rtx set_flag_0;
3979 tree cond;
3980
3981 start_sequence ();
3982 emit_move_insn (flag, const0_rtx);
3983 set_flag_0 = get_insns ();
3984 end_sequence ();
3985
3986 thisblock->data.block.last_unconditional_cleanup
3987 = emit_insns_after (set_flag_0,
3988 thisblock->data.block.last_unconditional_cleanup);
3989
3990 emit_move_insn (flag, const1_rtx);
3991
3992 /* All cleanups must be on the function_obstack. */
3993 push_obstacks_nochange ();
3994 resume_temporary_allocation ();
3995
3996 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3997 DECL_RTL (cond) = flag;
3998
3999 /* Conditionalize the cleanup. */
4000 cleanup = build (COND_EXPR, void_type_node,
4001 truthvalue_conversion (cond),
4002 cleanup, integer_zero_node);
4003 cleanup = fold (cleanup);
4004
4005 pop_obstacks ();
4006
4007 cleanups = thisblock->data.block.cleanup_ptr;
4008 }
4009
4010 /* All cleanups must be on the function_obstack. */
4011 push_obstacks_nochange ();
4012 resume_temporary_allocation ();
4013 cleanup = unsave_expr (cleanup);
4014 pop_obstacks ();
4015
4016 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
4017
4018 if (! cond_context)
4019 /* If this block has a cleanup, it belongs in stack_block_stack. */
4020 stack_block_stack = thisblock;
4021
4022 if (cond_context)
4023 {
4024 start_sequence ();
4025 }
4026
4027 /* If this was optimized so that there is no exception region for the
4028 cleanup, then mark the TREE_LIST node, so that we can later tell
4029 if we need to call expand_eh_region_end. */
4030 if (! using_eh_for_cleanups_p
4031 || expand_eh_region_start_tree (decl, cleanup))
4032 TREE_ADDRESSABLE (t) = 1;
4033 /* If that started a new EH region, we're in a new block. */
4034 thisblock = block_stack;
4035
4036 if (cond_context)
4037 {
4038 seq = get_insns ();
4039 end_sequence ();
4040 if (seq)
4041 thisblock->data.block.last_unconditional_cleanup
4042 = emit_insns_after (seq,
4043 thisblock->data.block.last_unconditional_cleanup);
4044 }
4045 else
4046 {
4047 thisblock->data.block.last_unconditional_cleanup
4048 = get_last_insn ();
4049 /* When we insert instructions after the last unconditional cleanup,
4050 we don't adjust last_insn. That means that a later add_insn will
4051 clobber the instructions we've just added. The easiest way to
4052 fix this is to just insert another instruction here, so that the
4053 instructions inserted after the last unconditional cleanup are
4054 never the last instruction. */
4055 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4056 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4057 }
4058 }
4059 return 1;
4060 }
4061
4062 /* Like expand_decl_cleanup, but suppress generating an exception handler
4063 to perform the cleanup. */
4064
4065 #if 0
4066 int
4067 expand_decl_cleanup_no_eh (decl, cleanup)
4068 tree decl, cleanup;
4069 {
4070 int save_eh = using_eh_for_cleanups_p;
4071 int result;
4072
4073 using_eh_for_cleanups_p = 0;
4074 result = expand_decl_cleanup (decl, cleanup);
4075 using_eh_for_cleanups_p = save_eh;
4076
4077 return result;
4078 }
4079 #endif
4080
4081 /* Arrange for the top element of the dynamic cleanup chain to be
4082 popped if we exit the current binding contour. DECL is the
4083 associated declaration, if any, otherwise NULL_TREE. If the
4084 current contour is left via an exception, then __sjthrow will pop
4085 the top element off the dynamic cleanup chain. The code that
4086 avoids doing the action we push into the cleanup chain in the
4087 exceptional case is contained in expand_cleanups.
4088
4089 This routine is only used by expand_eh_region_start, and that is
4090 the only way in which an exception region should be started. This
4091 routine is only used when using the setjmp/longjmp codegen method
4092 for exception handling. */
4093
4094 int
4095 expand_dcc_cleanup (decl)
4096 tree decl;
4097 {
4098 struct nesting *thisblock;
4099 tree cleanup;
4100
4101 /* Error if we are not in any block. */
4102 if (cfun == 0 || block_stack == 0)
4103 return 0;
4104 thisblock = block_stack;
4105
4106 /* Record the cleanup for the dynamic handler chain. */
4107
4108 /* All cleanups must be on the function_obstack. */
4109 push_obstacks_nochange ();
4110 resume_temporary_allocation ();
4111 cleanup = make_node (POPDCC_EXPR);
4112 pop_obstacks ();
4113
4114 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4115 thisblock->data.block.cleanups
4116 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4117
4118 /* If this block has a cleanup, it belongs in stack_block_stack. */
4119 stack_block_stack = thisblock;
4120 return 1;
4121 }
4122
4123 /* Arrange for the top element of the dynamic handler chain to be
4124 popped if we exit the current binding contour. DECL is the
4125 associated declaration, if any, otherwise NULL_TREE. If the current
4126 contour is left via an exception, then __sjthrow will pop the top
4127 element off the dynamic handler chain. The code that avoids doing
4128 the action we push into the handler chain in the exceptional case
4129 is contained in expand_cleanups.
4130
4131 This routine is only used by expand_eh_region_start, and that is
4132 the only way in which an exception region should be started. This
4133 routine is only used when using the setjmp/longjmp codegen method
4134 for exception handling. */
4135
4136 int
4137 expand_dhc_cleanup (decl)
4138 tree decl;
4139 {
4140 struct nesting *thisblock;
4141 tree cleanup;
4142
4143 /* Error if we are not in any block. */
4144 if (cfun == 0 || block_stack == 0)
4145 return 0;
4146 thisblock = block_stack;
4147
4148 /* Record the cleanup for the dynamic handler chain. */
4149
4150 /* All cleanups must be on the function_obstack. */
4151 push_obstacks_nochange ();
4152 resume_temporary_allocation ();
4153 cleanup = make_node (POPDHC_EXPR);
4154 pop_obstacks ();
4155
4156 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4157 thisblock->data.block.cleanups
4158 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4159
4160 /* If this block has a cleanup, it belongs in stack_block_stack. */
4161 stack_block_stack = thisblock;
4162 return 1;
4163 }
4164 \f
4165 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4166 DECL_ELTS is the list of elements that belong to DECL's type.
4167 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4168
4169 void
4170 expand_anon_union_decl (decl, cleanup, decl_elts)
4171 tree decl, cleanup, decl_elts;
4172 {
4173 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4174 rtx x;
4175 tree t;
4176
4177 /* If any of the elements are addressable, so is the entire union. */
4178 for (t = decl_elts; t; t = TREE_CHAIN (t))
4179 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4180 {
4181 TREE_ADDRESSABLE (decl) = 1;
4182 break;
4183 }
4184
4185 expand_decl (decl);
4186 expand_decl_cleanup (decl, cleanup);
4187 x = DECL_RTL (decl);
4188
4189 /* Go through the elements, assigning RTL to each. */
4190 for (t = decl_elts; t; t = TREE_CHAIN (t))
4191 {
4192 tree decl_elt = TREE_VALUE (t);
4193 tree cleanup_elt = TREE_PURPOSE (t);
4194 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4195
4196 /* Propagate the union's alignment to the elements. */
4197 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4198 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4199
4200 /* If the element has BLKmode and the union doesn't, the union is
4201 aligned such that the element doesn't need to have BLKmode, so
4202 change the element's mode to the appropriate one for its size. */
4203 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4204 DECL_MODE (decl_elt) = mode
4205 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4206
4207 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4208 instead create a new MEM rtx with the proper mode. */
4209 if (GET_CODE (x) == MEM)
4210 {
4211 if (mode == GET_MODE (x))
4212 DECL_RTL (decl_elt) = x;
4213 else
4214 {
4215 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4216 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4217 }
4218 }
4219 else if (GET_CODE (x) == REG)
4220 {
4221 if (mode == GET_MODE (x))
4222 DECL_RTL (decl_elt) = x;
4223 else
4224 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4225 }
4226 else
4227 abort ();
4228
4229 /* Record the cleanup if there is one. */
4230
4231 if (cleanup != 0)
4232 thisblock->data.block.cleanups
4233 = temp_tree_cons (decl_elt, cleanup_elt,
4234 thisblock->data.block.cleanups);
4235 }
4236 }
4237 \f
4238 /* Expand a list of cleanups LIST.
4239 Elements may be expressions or may be nested lists.
4240
4241 If DONT_DO is nonnull, then any list-element
4242 whose TREE_PURPOSE matches DONT_DO is omitted.
4243 This is sometimes used to avoid a cleanup associated with
4244 a value that is being returned out of the scope.
4245
4246 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4247 goto and handle protection regions specially in that case.
4248
4249 If REACHABLE, we emit code, otherwise just inform the exception handling
4250 code about this finalization. */
4251
4252 static void
4253 expand_cleanups (list, dont_do, in_fixup, reachable)
4254 tree list;
4255 tree dont_do;
4256 int in_fixup;
4257 int reachable;
4258 {
4259 tree tail;
4260 for (tail = list; tail; tail = TREE_CHAIN (tail))
4261 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4262 {
4263 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4264 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4265 else
4266 {
4267 if (! in_fixup)
4268 {
4269 tree cleanup = TREE_VALUE (tail);
4270
4271 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4272 if (TREE_CODE (cleanup) != POPDHC_EXPR
4273 && TREE_CODE (cleanup) != POPDCC_EXPR
4274 /* See expand_eh_region_start_tree for this case. */
4275 && ! TREE_ADDRESSABLE (tail))
4276 {
4277 cleanup = protect_with_terminate (cleanup);
4278 expand_eh_region_end (cleanup);
4279 }
4280 }
4281
4282 if (reachable)
4283 {
4284 /* Cleanups may be run multiple times. For example,
4285 when exiting a binding contour, we expand the
4286 cleanups associated with that contour. When a goto
4287 within that binding contour has a target outside that
4288 contour, it will expand all cleanups from its scope to
4289 the target. Though the cleanups are expanded multiple
4290 times, the control paths are non-overlapping so the
4291 cleanups will not be executed twice. */
4292
4293 /* We may need to protect fixups with rethrow regions. */
4294 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4295
4296 if (protect)
4297 expand_fixup_region_start ();
4298
4299 /* The cleanup might contain try-blocks, so we have to
4300 preserve our current queue. */
4301 push_ehqueue ();
4302 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4303 pop_ehqueue ();
4304 if (protect)
4305 expand_fixup_region_end (TREE_VALUE (tail));
4306 free_temp_slots ();
4307 }
4308 }
4309 }
4310 }
4311
4312 /* Mark when the context we are emitting RTL for as a conditional
4313 context, so that any cleanup actions we register with
4314 expand_decl_init will be properly conditionalized when those
4315 cleanup actions are later performed. Must be called before any
4316 expression (tree) is expanded that is within a conditional context. */
4317
4318 void
4319 start_cleanup_deferral ()
4320 {
4321 /* block_stack can be NULL if we are inside the parameter list. It is
4322 OK to do nothing, because cleanups aren't possible here. */
4323 if (block_stack)
4324 ++block_stack->data.block.conditional_code;
4325 }
4326
4327 /* Mark the end of a conditional region of code. Because cleanup
4328 deferrals may be nested, we may still be in a conditional region
4329 after we end the currently deferred cleanups, only after we end all
4330 deferred cleanups, are we back in unconditional code. */
4331
4332 void
4333 end_cleanup_deferral ()
4334 {
4335 /* block_stack can be NULL if we are inside the parameter list. It is
4336 OK to do nothing, because cleanups aren't possible here. */
4337 if (block_stack)
4338 --block_stack->data.block.conditional_code;
4339 }
4340
4341 /* Move all cleanups from the current block_stack
4342 to the containing block_stack, where they are assumed to
4343 have been created. If anything can cause a temporary to
4344 be created, but not expanded for more than one level of
4345 block_stacks, then this code will have to change. */
4346
4347 void
4348 move_cleanups_up ()
4349 {
4350 struct nesting *block = block_stack;
4351 struct nesting *outer = block->next;
4352
4353 outer->data.block.cleanups
4354 = chainon (block->data.block.cleanups,
4355 outer->data.block.cleanups);
4356 block->data.block.cleanups = 0;
4357 }
4358
4359 tree
4360 last_cleanup_this_contour ()
4361 {
4362 if (block_stack == 0)
4363 return 0;
4364
4365 return block_stack->data.block.cleanups;
4366 }
4367
4368 /* Return 1 if there are any pending cleanups at this point.
4369 If THIS_CONTOUR is nonzero, check the current contour as well.
4370 Otherwise, look only at the contours that enclose this one. */
4371
4372 int
4373 any_pending_cleanups (this_contour)
4374 int this_contour;
4375 {
4376 struct nesting *block;
4377
4378 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4379 return 0;
4380
4381 if (this_contour && block_stack->data.block.cleanups != NULL)
4382 return 1;
4383 if (block_stack->data.block.cleanups == 0
4384 && block_stack->data.block.outer_cleanups == 0)
4385 return 0;
4386
4387 for (block = block_stack->next; block; block = block->next)
4388 if (block->data.block.cleanups != 0)
4389 return 1;
4390
4391 return 0;
4392 }
4393 \f
4394 /* Enter a case (Pascal) or switch (C) statement.
4395 Push a block onto case_stack and nesting_stack
4396 to accumulate the case-labels that are seen
4397 and to record the labels generated for the statement.
4398
4399 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4400 Otherwise, this construct is transparent for `exit_something'.
4401
4402 EXPR is the index-expression to be dispatched on.
4403 TYPE is its nominal type. We could simply convert EXPR to this type,
4404 but instead we take short cuts. */
4405
4406 void
4407 expand_start_case (exit_flag, expr, type, printname)
4408 int exit_flag;
4409 tree expr;
4410 tree type;
4411 const char *printname;
4412 {
4413 register struct nesting *thiscase = ALLOC_NESTING ();
4414
4415 /* Make an entry on case_stack for the case we are entering. */
4416
4417 thiscase->next = case_stack;
4418 thiscase->all = nesting_stack;
4419 thiscase->depth = ++nesting_depth;
4420 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4421 thiscase->data.case_stmt.case_list = 0;
4422 thiscase->data.case_stmt.index_expr = expr;
4423 thiscase->data.case_stmt.nominal_type = type;
4424 thiscase->data.case_stmt.default_label = 0;
4425 thiscase->data.case_stmt.num_ranges = 0;
4426 thiscase->data.case_stmt.printname = printname;
4427 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4428 case_stack = thiscase;
4429 nesting_stack = thiscase;
4430
4431 do_pending_stack_adjust ();
4432
4433 /* Make sure case_stmt.start points to something that won't
4434 need any transformation before expand_end_case. */
4435 if (GET_CODE (get_last_insn ()) != NOTE)
4436 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4437
4438 thiscase->data.case_stmt.start = get_last_insn ();
4439
4440 start_cleanup_deferral ();
4441 }
4442
4443 /* Start a "dummy case statement" within which case labels are invalid
4444 and are not connected to any larger real case statement.
4445 This can be used if you don't want to let a case statement jump
4446 into the middle of certain kinds of constructs. */
4447
4448 void
4449 expand_start_case_dummy ()
4450 {
4451 register struct nesting *thiscase = ALLOC_NESTING ();
4452
4453 /* Make an entry on case_stack for the dummy. */
4454
4455 thiscase->next = case_stack;
4456 thiscase->all = nesting_stack;
4457 thiscase->depth = ++nesting_depth;
4458 thiscase->exit_label = 0;
4459 thiscase->data.case_stmt.case_list = 0;
4460 thiscase->data.case_stmt.start = 0;
4461 thiscase->data.case_stmt.nominal_type = 0;
4462 thiscase->data.case_stmt.default_label = 0;
4463 thiscase->data.case_stmt.num_ranges = 0;
4464 case_stack = thiscase;
4465 nesting_stack = thiscase;
4466 start_cleanup_deferral ();
4467 }
4468
4469 /* End a dummy case statement. */
4470
4471 void
4472 expand_end_case_dummy ()
4473 {
4474 end_cleanup_deferral ();
4475 POPSTACK (case_stack);
4476 }
4477
4478 /* Return the data type of the index-expression
4479 of the innermost case statement, or null if none. */
4480
4481 tree
4482 case_index_expr_type ()
4483 {
4484 if (case_stack)
4485 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4486 return 0;
4487 }
4488 \f
4489 static void
4490 check_seenlabel ()
4491 {
4492 /* If this is the first label, warn if any insns have been emitted. */
4493 if (case_stack->data.case_stmt.line_number_status >= 0)
4494 {
4495 rtx insn;
4496
4497 restore_line_number_status
4498 (case_stack->data.case_stmt.line_number_status);
4499 case_stack->data.case_stmt.line_number_status = -1;
4500
4501 for (insn = case_stack->data.case_stmt.start;
4502 insn;
4503 insn = NEXT_INSN (insn))
4504 {
4505 if (GET_CODE (insn) == CODE_LABEL)
4506 break;
4507 if (GET_CODE (insn) != NOTE
4508 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4509 {
4510 do
4511 insn = PREV_INSN (insn);
4512 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4513
4514 /* If insn is zero, then there must have been a syntax error. */
4515 if (insn)
4516 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4517 NOTE_LINE_NUMBER (insn),
4518 "unreachable code at beginning of %s",
4519 case_stack->data.case_stmt.printname);
4520 break;
4521 }
4522 }
4523 }
4524 }
4525
4526 /* Accumulate one case or default label inside a case or switch statement.
4527 VALUE is the value of the case (a null pointer, for a default label).
4528 The function CONVERTER, when applied to arguments T and V,
4529 converts the value V to the type T.
4530
4531 If not currently inside a case or switch statement, return 1 and do
4532 nothing. The caller will print a language-specific error message.
4533 If VALUE is a duplicate or overlaps, return 2 and do nothing
4534 except store the (first) duplicate node in *DUPLICATE.
4535 If VALUE is out of range, return 3 and do nothing.
4536 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4537 Return 0 on success.
4538
4539 Extended to handle range statements. */
4540
4541 int
4542 pushcase (value, converter, label, duplicate)
4543 register tree value;
4544 tree (*converter) PARAMS ((tree, tree));
4545 register tree label;
4546 tree *duplicate;
4547 {
4548 tree index_type;
4549 tree nominal_type;
4550
4551 /* Fail if not inside a real case statement. */
4552 if (! (case_stack && case_stack->data.case_stmt.start))
4553 return 1;
4554
4555 if (stack_block_stack
4556 && stack_block_stack->depth > case_stack->depth)
4557 return 5;
4558
4559 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4560 nominal_type = case_stack->data.case_stmt.nominal_type;
4561
4562 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4563 if (index_type == error_mark_node)
4564 return 0;
4565
4566 /* Convert VALUE to the type in which the comparisons are nominally done. */
4567 if (value != 0)
4568 value = (*converter) (nominal_type, value);
4569
4570 check_seenlabel ();
4571
4572 /* Fail if this value is out of range for the actual type of the index
4573 (which may be narrower than NOMINAL_TYPE). */
4574 if (value != 0
4575 && (TREE_CONSTANT_OVERFLOW (value)
4576 || ! int_fits_type_p (value, index_type)))
4577 return 3;
4578
4579 /* Fail if this is a duplicate or overlaps another entry. */
4580 if (value == 0)
4581 {
4582 if (case_stack->data.case_stmt.default_label != 0)
4583 {
4584 *duplicate = case_stack->data.case_stmt.default_label;
4585 return 2;
4586 }
4587 case_stack->data.case_stmt.default_label = label;
4588 }
4589 else
4590 return add_case_node (value, value, label, duplicate);
4591
4592 expand_label (label);
4593 return 0;
4594 }
4595
4596 /* Like pushcase but this case applies to all values between VALUE1 and
4597 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4598 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4599 starts at VALUE1 and ends at the highest value of the index type.
4600 If both are NULL, this case applies to all values.
4601
4602 The return value is the same as that of pushcase but there is one
4603 additional error code: 4 means the specified range was empty. */
4604
4605 int
4606 pushcase_range (value1, value2, converter, label, duplicate)
4607 register tree value1, value2;
4608 tree (*converter) PARAMS ((tree, tree));
4609 register tree label;
4610 tree *duplicate;
4611 {
4612 tree index_type;
4613 tree nominal_type;
4614
4615 /* Fail if not inside a real case statement. */
4616 if (! (case_stack && case_stack->data.case_stmt.start))
4617 return 1;
4618
4619 if (stack_block_stack
4620 && stack_block_stack->depth > case_stack->depth)
4621 return 5;
4622
4623 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4624 nominal_type = case_stack->data.case_stmt.nominal_type;
4625
4626 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4627 if (index_type == error_mark_node)
4628 return 0;
4629
4630 check_seenlabel ();
4631
4632 /* Convert VALUEs to type in which the comparisons are nominally done
4633 and replace any unspecified value with the corresponding bound. */
4634 if (value1 == 0)
4635 value1 = TYPE_MIN_VALUE (index_type);
4636 if (value2 == 0)
4637 value2 = TYPE_MAX_VALUE (index_type);
4638
4639 /* Fail if the range is empty. Do this before any conversion since
4640 we want to allow out-of-range empty ranges. */
4641 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4642 return 4;
4643
4644 /* If the max was unbounded, use the max of the nominal_type we are
4645 converting to. Do this after the < check above to suppress false
4646 positives. */
4647 if (value2 == 0)
4648 value2 = TYPE_MAX_VALUE (nominal_type);
4649
4650 value1 = (*converter) (nominal_type, value1);
4651 value2 = (*converter) (nominal_type, value2);
4652
4653 /* Fail if these values are out of range. */
4654 if (TREE_CONSTANT_OVERFLOW (value1)
4655 || ! int_fits_type_p (value1, index_type))
4656 return 3;
4657
4658 if (TREE_CONSTANT_OVERFLOW (value2)
4659 || ! int_fits_type_p (value2, index_type))
4660 return 3;
4661
4662 return add_case_node (value1, value2, label, duplicate);
4663 }
4664
4665 /* Do the actual insertion of a case label for pushcase and pushcase_range
4666 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4667 slowdown for large switch statements. */
4668
4669 static int
4670 add_case_node (low, high, label, duplicate)
4671 tree low, high;
4672 tree label;
4673 tree *duplicate;
4674 {
4675 struct case_node *p, **q, *r;
4676
4677 q = &case_stack->data.case_stmt.case_list;
4678 p = *q;
4679
4680 while ((r = *q))
4681 {
4682 p = r;
4683
4684 /* Keep going past elements distinctly greater than HIGH. */
4685 if (tree_int_cst_lt (high, p->low))
4686 q = &p->left;
4687
4688 /* or distinctly less than LOW. */
4689 else if (tree_int_cst_lt (p->high, low))
4690 q = &p->right;
4691
4692 else
4693 {
4694 /* We have an overlap; this is an error. */
4695 *duplicate = p->code_label;
4696 return 2;
4697 }
4698 }
4699
4700 /* Add this label to the chain, and succeed.
4701 Copy LOW, HIGH so they are on temporary rather than momentary
4702 obstack and will thus survive till the end of the case statement. */
4703
4704 r = (struct case_node *) oballoc (sizeof (struct case_node));
4705 r->low = copy_node (low);
4706
4707 /* If the bounds are equal, turn this into the one-value case. */
4708
4709 if (tree_int_cst_equal (low, high))
4710 r->high = r->low;
4711 else
4712 {
4713 r->high = copy_node (high);
4714 case_stack->data.case_stmt.num_ranges++;
4715 }
4716
4717 r->code_label = label;
4718 expand_label (label);
4719
4720 *q = r;
4721 r->parent = p;
4722 r->left = 0;
4723 r->right = 0;
4724 r->balance = 0;
4725
4726 while (p)
4727 {
4728 struct case_node *s;
4729
4730 if (r == p->left)
4731 {
4732 int b;
4733
4734 if (! (b = p->balance))
4735 /* Growth propagation from left side. */
4736 p->balance = -1;
4737 else if (b < 0)
4738 {
4739 if (r->balance < 0)
4740 {
4741 /* R-Rotation */
4742 if ((p->left = s = r->right))
4743 s->parent = p;
4744
4745 r->right = p;
4746 p->balance = 0;
4747 r->balance = 0;
4748 s = p->parent;
4749 p->parent = r;
4750
4751 if ((r->parent = s))
4752 {
4753 if (s->left == p)
4754 s->left = r;
4755 else
4756 s->right = r;
4757 }
4758 else
4759 case_stack->data.case_stmt.case_list = r;
4760 }
4761 else
4762 /* r->balance == +1 */
4763 {
4764 /* LR-Rotation */
4765
4766 int b2;
4767 struct case_node *t = r->right;
4768
4769 if ((p->left = s = t->right))
4770 s->parent = p;
4771
4772 t->right = p;
4773 if ((r->right = s = t->left))
4774 s->parent = r;
4775
4776 t->left = r;
4777 b = t->balance;
4778 b2 = b < 0;
4779 p->balance = b2;
4780 b2 = -b2 - b;
4781 r->balance = b2;
4782 t->balance = 0;
4783 s = p->parent;
4784 p->parent = t;
4785 r->parent = t;
4786
4787 if ((t->parent = s))
4788 {
4789 if (s->left == p)
4790 s->left = t;
4791 else
4792 s->right = t;
4793 }
4794 else
4795 case_stack->data.case_stmt.case_list = t;
4796 }
4797 break;
4798 }
4799
4800 else
4801 {
4802 /* p->balance == +1; growth of left side balances the node. */
4803 p->balance = 0;
4804 break;
4805 }
4806 }
4807 else
4808 /* r == p->right */
4809 {
4810 int b;
4811
4812 if (! (b = p->balance))
4813 /* Growth propagation from right side. */
4814 p->balance++;
4815 else if (b > 0)
4816 {
4817 if (r->balance > 0)
4818 {
4819 /* L-Rotation */
4820
4821 if ((p->right = s = r->left))
4822 s->parent = p;
4823
4824 r->left = p;
4825 p->balance = 0;
4826 r->balance = 0;
4827 s = p->parent;
4828 p->parent = r;
4829 if ((r->parent = s))
4830 {
4831 if (s->left == p)
4832 s->left = r;
4833 else
4834 s->right = r;
4835 }
4836
4837 else
4838 case_stack->data.case_stmt.case_list = r;
4839 }
4840
4841 else
4842 /* r->balance == -1 */
4843 {
4844 /* RL-Rotation */
4845 int b2;
4846 struct case_node *t = r->left;
4847
4848 if ((p->right = s = t->left))
4849 s->parent = p;
4850
4851 t->left = p;
4852
4853 if ((r->left = s = t->right))
4854 s->parent = r;
4855
4856 t->right = r;
4857 b = t->balance;
4858 b2 = b < 0;
4859 r->balance = b2;
4860 b2 = -b2 - b;
4861 p->balance = b2;
4862 t->balance = 0;
4863 s = p->parent;
4864 p->parent = t;
4865 r->parent = t;
4866
4867 if ((t->parent = s))
4868 {
4869 if (s->left == p)
4870 s->left = t;
4871 else
4872 s->right = t;
4873 }
4874
4875 else
4876 case_stack->data.case_stmt.case_list = t;
4877 }
4878 break;
4879 }
4880 else
4881 {
4882 /* p->balance == -1; growth of right side balances the node. */
4883 p->balance = 0;
4884 break;
4885 }
4886 }
4887
4888 r = p;
4889 p = p->parent;
4890 }
4891
4892 return 0;
4893 }
4894 \f
4895 /* Returns the number of possible values of TYPE.
4896 Returns -1 if the number is unknown, variable, or if the number does not
4897 fit in a HOST_WIDE_INT.
4898 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4899 do not increase monotonically (there may be duplicates);
4900 to 1 if the values increase monotonically, but not always by 1;
4901 otherwise sets it to 0. */
4902
4903 HOST_WIDE_INT
4904 all_cases_count (type, spareness)
4905 tree type;
4906 int *spareness;
4907 {
4908 tree t;
4909 HOST_WIDE_INT count, minval, lastval;
4910
4911 *spareness = 0;
4912
4913 switch (TREE_CODE (type))
4914 {
4915 case BOOLEAN_TYPE:
4916 count = 2;
4917 break;
4918
4919 case CHAR_TYPE:
4920 count = 1 << BITS_PER_UNIT;
4921 break;
4922
4923 default:
4924 case INTEGER_TYPE:
4925 if (TYPE_MAX_VALUE (type) != 0
4926 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4927 TYPE_MIN_VALUE (type))))
4928 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4929 convert (type, integer_zero_node))))
4930 && host_integerp (t, 1))
4931 count = tree_low_cst (t, 1);
4932 else
4933 return -1;
4934 break;
4935
4936 case ENUMERAL_TYPE:
4937 /* Don't waste time with enumeral types with huge values. */
4938 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4939 || TYPE_MAX_VALUE (type) == 0
4940 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4941 return -1;
4942
4943 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4944 count = 0;
4945
4946 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4947 {
4948 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4949
4950 if (*spareness == 2 || thisval < lastval)
4951 *spareness = 2;
4952 else if (thisval != minval + count)
4953 *spareness = 1;
4954
4955 count++;
4956 }
4957 }
4958
4959 return count;
4960 }
4961
4962 #define BITARRAY_TEST(ARRAY, INDEX) \
4963 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4964 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4965 #define BITARRAY_SET(ARRAY, INDEX) \
4966 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4967 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4968
4969 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4970 with the case values we have seen, assuming the case expression
4971 has the given TYPE.
4972 SPARSENESS is as determined by all_cases_count.
4973
4974 The time needed is proportional to COUNT, unless
4975 SPARSENESS is 2, in which case quadratic time is needed. */
4976
4977 void
4978 mark_seen_cases (type, cases_seen, count, sparseness)
4979 tree type;
4980 unsigned char *cases_seen;
4981 HOST_WIDE_INT count;
4982 int sparseness;
4983 {
4984 tree next_node_to_try = NULL_TREE;
4985 HOST_WIDE_INT next_node_offset = 0;
4986
4987 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4988 tree val = make_node (INTEGER_CST);
4989
4990 TREE_TYPE (val) = type;
4991 if (! root)
4992 /* Do nothing. */
4993 ;
4994 else if (sparseness == 2)
4995 {
4996 tree t;
4997 unsigned HOST_WIDE_INT xlo;
4998
4999 /* This less efficient loop is only needed to handle
5000 duplicate case values (multiple enum constants
5001 with the same value). */
5002 TREE_TYPE (val) = TREE_TYPE (root->low);
5003 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5004 t = TREE_CHAIN (t), xlo++)
5005 {
5006 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5007 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5008 n = root;
5009 do
5010 {
5011 /* Keep going past elements distinctly greater than VAL. */
5012 if (tree_int_cst_lt (val, n->low))
5013 n = n->left;
5014
5015 /* or distinctly less than VAL. */
5016 else if (tree_int_cst_lt (n->high, val))
5017 n = n->right;
5018
5019 else
5020 {
5021 /* We have found a matching range. */
5022 BITARRAY_SET (cases_seen, xlo);
5023 break;
5024 }
5025 }
5026 while (n);
5027 }
5028 }
5029 else
5030 {
5031 if (root->left)
5032 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5033
5034 for (n = root; n; n = n->right)
5035 {
5036 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5037 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5038 while (! tree_int_cst_lt (n->high, val))
5039 {
5040 /* Calculate (into xlo) the "offset" of the integer (val).
5041 The element with lowest value has offset 0, the next smallest
5042 element has offset 1, etc. */
5043
5044 unsigned HOST_WIDE_INT xlo;
5045 HOST_WIDE_INT xhi;
5046 tree t;
5047
5048 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5049 {
5050 /* The TYPE_VALUES will be in increasing order, so
5051 starting searching where we last ended. */
5052 t = next_node_to_try;
5053 xlo = next_node_offset;
5054 xhi = 0;
5055 for (;;)
5056 {
5057 if (t == NULL_TREE)
5058 {
5059 t = TYPE_VALUES (type);
5060 xlo = 0;
5061 }
5062 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5063 {
5064 next_node_to_try = TREE_CHAIN (t);
5065 next_node_offset = xlo + 1;
5066 break;
5067 }
5068 xlo++;
5069 t = TREE_CHAIN (t);
5070 if (t == next_node_to_try)
5071 {
5072 xlo = -1;
5073 break;
5074 }
5075 }
5076 }
5077 else
5078 {
5079 t = TYPE_MIN_VALUE (type);
5080 if (t)
5081 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5082 &xlo, &xhi);
5083 else
5084 xlo = xhi = 0;
5085 add_double (xlo, xhi,
5086 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5087 &xlo, &xhi);
5088 }
5089
5090 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5091 BITARRAY_SET (cases_seen, xlo);
5092
5093 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5094 1, 0,
5095 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5096 }
5097 }
5098 }
5099 }
5100
5101 /* Called when the index of a switch statement is an enumerated type
5102 and there is no default label.
5103
5104 Checks that all enumeration literals are covered by the case
5105 expressions of a switch. Also, warn if there are any extra
5106 switch cases that are *not* elements of the enumerated type.
5107
5108 If all enumeration literals were covered by the case expressions,
5109 turn one of the expressions into the default expression since it should
5110 not be possible to fall through such a switch. */
5111
5112 void
5113 check_for_full_enumeration_handling (type)
5114 tree type;
5115 {
5116 register struct case_node *n;
5117 register tree chain;
5118 #if 0 /* variable used by 'if 0'ed code below. */
5119 register struct case_node **l;
5120 int all_values = 1;
5121 #endif
5122
5123 /* True iff the selector type is a numbered set mode. */
5124 int sparseness = 0;
5125
5126 /* The number of possible selector values. */
5127 HOST_WIDE_INT size;
5128
5129 /* For each possible selector value. a one iff it has been matched
5130 by a case value alternative. */
5131 unsigned char *cases_seen;
5132
5133 /* The allocated size of cases_seen, in chars. */
5134 HOST_WIDE_INT bytes_needed;
5135
5136 if (! warn_switch)
5137 return;
5138
5139 size = all_cases_count (type, &sparseness);
5140 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5141
5142 if (size > 0 && size < 600000
5143 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5144 this optimization if we don't have enough memory rather than
5145 aborting, as xmalloc would do. */
5146 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5147 {
5148 HOST_WIDE_INT i;
5149 tree v = TYPE_VALUES (type);
5150
5151 /* The time complexity of this code is normally O(N), where
5152 N being the number of members in the enumerated type.
5153 However, if type is a ENUMERAL_TYPE whose values do not
5154 increase monotonically, O(N*log(N)) time may be needed. */
5155
5156 mark_seen_cases (type, cases_seen, size, sparseness);
5157
5158 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5159 if (BITARRAY_TEST (cases_seen, i) == 0)
5160 warning ("enumeration value `%s' not handled in switch",
5161 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5162
5163 free (cases_seen);
5164 }
5165
5166 /* Now we go the other way around; we warn if there are case
5167 expressions that don't correspond to enumerators. This can
5168 occur since C and C++ don't enforce type-checking of
5169 assignments to enumeration variables. */
5170
5171 if (case_stack->data.case_stmt.case_list
5172 && case_stack->data.case_stmt.case_list->left)
5173 case_stack->data.case_stmt.case_list
5174 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5175 if (warn_switch)
5176 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5177 {
5178 for (chain = TYPE_VALUES (type);
5179 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5180 chain = TREE_CHAIN (chain))
5181 ;
5182
5183 if (!chain)
5184 {
5185 if (TYPE_NAME (type) == 0)
5186 warning ("case value `%ld' not in enumerated type",
5187 (long) TREE_INT_CST_LOW (n->low));
5188 else
5189 warning ("case value `%ld' not in enumerated type `%s'",
5190 (long) TREE_INT_CST_LOW (n->low),
5191 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5192 == IDENTIFIER_NODE)
5193 ? TYPE_NAME (type)
5194 : DECL_NAME (TYPE_NAME (type))));
5195 }
5196 if (!tree_int_cst_equal (n->low, n->high))
5197 {
5198 for (chain = TYPE_VALUES (type);
5199 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5200 chain = TREE_CHAIN (chain))
5201 ;
5202
5203 if (!chain)
5204 {
5205 if (TYPE_NAME (type) == 0)
5206 warning ("case value `%ld' not in enumerated type",
5207 (long) TREE_INT_CST_LOW (n->high));
5208 else
5209 warning ("case value `%ld' not in enumerated type `%s'",
5210 (long) TREE_INT_CST_LOW (n->high),
5211 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5212 == IDENTIFIER_NODE)
5213 ? TYPE_NAME (type)
5214 : DECL_NAME (TYPE_NAME (type))));
5215 }
5216 }
5217 }
5218
5219 #if 0
5220 /* ??? This optimization is disabled because it causes valid programs to
5221 fail. ANSI C does not guarantee that an expression with enum type
5222 will have a value that is the same as one of the enumeration literals. */
5223
5224 /* If all values were found as case labels, make one of them the default
5225 label. Thus, this switch will never fall through. We arbitrarily pick
5226 the last one to make the default since this is likely the most
5227 efficient choice. */
5228
5229 if (all_values)
5230 {
5231 for (l = &case_stack->data.case_stmt.case_list;
5232 (*l)->right != 0;
5233 l = &(*l)->right)
5234 ;
5235
5236 case_stack->data.case_stmt.default_label = (*l)->code_label;
5237 *l = 0;
5238 }
5239 #endif /* 0 */
5240 }
5241
5242 \f
5243 /* Terminate a case (Pascal) or switch (C) statement
5244 in which ORIG_INDEX is the expression to be tested.
5245 Generate the code to test it and jump to the right place. */
5246
5247 void
5248 expand_end_case (orig_index)
5249 tree orig_index;
5250 {
5251 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5252 rtx default_label = 0;
5253 register struct case_node *n;
5254 unsigned int count;
5255 rtx index;
5256 rtx table_label;
5257 int ncases;
5258 rtx *labelvec;
5259 register int i;
5260 rtx before_case;
5261 register struct nesting *thiscase = case_stack;
5262 tree index_expr, index_type;
5263 int unsignedp;
5264
5265 /* Don't crash due to previous errors. */
5266 if (thiscase == NULL)
5267 return;
5268
5269 table_label = gen_label_rtx ();
5270 index_expr = thiscase->data.case_stmt.index_expr;
5271 index_type = TREE_TYPE (index_expr);
5272 unsignedp = TREE_UNSIGNED (index_type);
5273
5274 do_pending_stack_adjust ();
5275
5276 /* This might get an spurious warning in the presence of a syntax error;
5277 it could be fixed by moving the call to check_seenlabel after the
5278 check for error_mark_node, and copying the code of check_seenlabel that
5279 deals with case_stack->data.case_stmt.line_number_status /
5280 restore_line_number_status in front of the call to end_cleanup_deferral;
5281 However, this might miss some useful warnings in the presence of
5282 non-syntax errors. */
5283 check_seenlabel ();
5284
5285 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5286 if (index_type != error_mark_node)
5287 {
5288 /* If switch expression was an enumerated type, check that all
5289 enumeration literals are covered by the cases.
5290 No sense trying this if there's a default case, however. */
5291
5292 if (!thiscase->data.case_stmt.default_label
5293 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5294 && TREE_CODE (index_expr) != INTEGER_CST)
5295 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5296
5297 /* If we don't have a default-label, create one here,
5298 after the body of the switch. */
5299 if (thiscase->data.case_stmt.default_label == 0)
5300 {
5301 thiscase->data.case_stmt.default_label
5302 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5303 expand_label (thiscase->data.case_stmt.default_label);
5304 }
5305 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5306
5307 before_case = get_last_insn ();
5308
5309 if (thiscase->data.case_stmt.case_list
5310 && thiscase->data.case_stmt.case_list->left)
5311 thiscase->data.case_stmt.case_list
5312 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5313
5314 /* Simplify the case-list before we count it. */
5315 group_case_nodes (thiscase->data.case_stmt.case_list);
5316
5317 /* Get upper and lower bounds of case values.
5318 Also convert all the case values to the index expr's data type. */
5319
5320 count = 0;
5321 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5322 {
5323 /* Check low and high label values are integers. */
5324 if (TREE_CODE (n->low) != INTEGER_CST)
5325 abort ();
5326 if (TREE_CODE (n->high) != INTEGER_CST)
5327 abort ();
5328
5329 n->low = convert (index_type, n->low);
5330 n->high = convert (index_type, n->high);
5331
5332 /* Count the elements and track the largest and smallest
5333 of them (treating them as signed even if they are not). */
5334 if (count++ == 0)
5335 {
5336 minval = n->low;
5337 maxval = n->high;
5338 }
5339 else
5340 {
5341 if (INT_CST_LT (n->low, minval))
5342 minval = n->low;
5343 if (INT_CST_LT (maxval, n->high))
5344 maxval = n->high;
5345 }
5346 /* A range counts double, since it requires two compares. */
5347 if (! tree_int_cst_equal (n->low, n->high))
5348 count++;
5349 }
5350
5351 orig_minval = minval;
5352
5353 /* Compute span of values. */
5354 if (count != 0)
5355 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5356
5357 end_cleanup_deferral ();
5358
5359 if (count == 0)
5360 {
5361 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5362 emit_queue ();
5363 emit_jump (default_label);
5364 }
5365
5366 /* If range of values is much bigger than number of values,
5367 make a sequence of conditional branches instead of a dispatch.
5368 If the switch-index is a constant, do it this way
5369 because we can optimize it. */
5370
5371 #ifndef CASE_VALUES_THRESHOLD
5372 #ifdef HAVE_casesi
5373 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5374 #else
5375 /* If machine does not have a case insn that compares the
5376 bounds, this means extra overhead for dispatch tables
5377 which raises the threshold for using them. */
5378 #define CASE_VALUES_THRESHOLD 5
5379 #endif /* HAVE_casesi */
5380 #endif /* CASE_VALUES_THRESHOLD */
5381
5382 else if (count < CASE_VALUES_THRESHOLD
5383 || compare_tree_int (range, 10 * count) > 0
5384 /* RANGE may be signed, and really large ranges will show up
5385 as negative numbers. */
5386 || compare_tree_int (range, 0) < 0
5387 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5388 || flag_pic
5389 #endif
5390 || TREE_CODE (index_expr) == INTEGER_CST
5391 /* These will reduce to a constant. */
5392 || (TREE_CODE (index_expr) == CALL_EXPR
5393 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5394 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5395 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5396 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5397 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5398 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5399 {
5400 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5401
5402 /* If the index is a short or char that we do not have
5403 an insn to handle comparisons directly, convert it to
5404 a full integer now, rather than letting each comparison
5405 generate the conversion. */
5406
5407 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5408 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
5409 == CODE_FOR_nothing))
5410 {
5411 enum machine_mode wider_mode;
5412 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5413 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5414 if (cmp_optab->handlers[(int) wider_mode].insn_code
5415 != CODE_FOR_nothing)
5416 {
5417 index = convert_to_mode (wider_mode, index, unsignedp);
5418 break;
5419 }
5420 }
5421
5422 emit_queue ();
5423 do_pending_stack_adjust ();
5424
5425 index = protect_from_queue (index, 0);
5426 if (GET_CODE (index) == MEM)
5427 index = copy_to_reg (index);
5428 if (GET_CODE (index) == CONST_INT
5429 || TREE_CODE (index_expr) == INTEGER_CST)
5430 {
5431 /* Make a tree node with the proper constant value
5432 if we don't already have one. */
5433 if (TREE_CODE (index_expr) != INTEGER_CST)
5434 {
5435 index_expr
5436 = build_int_2 (INTVAL (index),
5437 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5438 index_expr = convert (index_type, index_expr);
5439 }
5440
5441 /* For constant index expressions we need only
5442 issue a unconditional branch to the appropriate
5443 target code. The job of removing any unreachable
5444 code is left to the optimisation phase if the
5445 "-O" option is specified. */
5446 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5447 if (! tree_int_cst_lt (index_expr, n->low)
5448 && ! tree_int_cst_lt (n->high, index_expr))
5449 break;
5450
5451 if (n)
5452 emit_jump (label_rtx (n->code_label));
5453 else
5454 emit_jump (default_label);
5455 }
5456 else
5457 {
5458 /* If the index expression is not constant we generate
5459 a binary decision tree to select the appropriate
5460 target code. This is done as follows:
5461
5462 The list of cases is rearranged into a binary tree,
5463 nearly optimal assuming equal probability for each case.
5464
5465 The tree is transformed into RTL, eliminating
5466 redundant test conditions at the same time.
5467
5468 If program flow could reach the end of the
5469 decision tree an unconditional jump to the
5470 default code is emitted. */
5471
5472 use_cost_table
5473 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5474 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5475 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5476 NULL_PTR);
5477 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5478 default_label, index_type);
5479 emit_jump_if_reachable (default_label);
5480 }
5481 }
5482 else
5483 {
5484 int win = 0;
5485 #ifdef HAVE_casesi
5486 if (HAVE_casesi)
5487 {
5488 enum machine_mode index_mode = SImode;
5489 int index_bits = GET_MODE_BITSIZE (index_mode);
5490 rtx op1, op2;
5491 enum machine_mode op_mode;
5492
5493 /* Convert the index to SImode. */
5494 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5495 > GET_MODE_BITSIZE (index_mode))
5496 {
5497 enum machine_mode omode = TYPE_MODE (index_type);
5498 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5499
5500 /* We must handle the endpoints in the original mode. */
5501 index_expr = build (MINUS_EXPR, index_type,
5502 index_expr, minval);
5503 minval = integer_zero_node;
5504 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5505 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5506 omode, 1, 0, default_label);
5507 /* Now we can safely truncate. */
5508 index = convert_to_mode (index_mode, index, 0);
5509 }
5510 else
5511 {
5512 if (TYPE_MODE (index_type) != index_mode)
5513 {
5514 index_expr = convert (type_for_size (index_bits, 0),
5515 index_expr);
5516 index_type = TREE_TYPE (index_expr);
5517 }
5518
5519 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5520 }
5521 emit_queue ();
5522 index = protect_from_queue (index, 0);
5523 do_pending_stack_adjust ();
5524
5525 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5526 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5527 (index, op_mode))
5528 index = copy_to_mode_reg (op_mode, index);
5529
5530 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5531
5532 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5533 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5534 (op1, op_mode))
5535 op1 = copy_to_mode_reg (op_mode, op1);
5536
5537 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5538
5539 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5540 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5541 (op2, op_mode))
5542 op2 = copy_to_mode_reg (op_mode, op2);
5543
5544 emit_jump_insn (gen_casesi (index, op1, op2,
5545 table_label, default_label));
5546 win = 1;
5547 }
5548 #endif
5549 #ifdef HAVE_tablejump
5550 if (! win && HAVE_tablejump)
5551 {
5552 index_type = thiscase->data.case_stmt.nominal_type;
5553 index_expr = fold (build (MINUS_EXPR, index_type,
5554 convert (index_type, index_expr),
5555 convert (index_type, minval)));
5556 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5557 emit_queue ();
5558 index = protect_from_queue (index, 0);
5559 do_pending_stack_adjust ();
5560
5561 do_tablejump (index, TYPE_MODE (index_type),
5562 expand_expr (range, NULL_RTX, VOIDmode, 0),
5563 table_label, default_label);
5564 win = 1;
5565 }
5566 #endif
5567 if (! win)
5568 abort ();
5569
5570 /* Get table of labels to jump to, in order of case index. */
5571
5572 ncases = TREE_INT_CST_LOW (range) + 1;
5573 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5574 bzero ((char *) labelvec, ncases * sizeof (rtx));
5575
5576 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5577 {
5578 register HOST_WIDE_INT i
5579 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5580
5581 while (1)
5582 {
5583 labelvec[i]
5584 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5585 if (i + TREE_INT_CST_LOW (orig_minval)
5586 == TREE_INT_CST_LOW (n->high))
5587 break;
5588 i++;
5589 }
5590 }
5591
5592 /* Fill in the gaps with the default. */
5593 for (i = 0; i < ncases; i++)
5594 if (labelvec[i] == 0)
5595 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5596
5597 /* Output the table */
5598 emit_label (table_label);
5599
5600 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5601 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5602 gen_rtx_LABEL_REF (Pmode, table_label),
5603 gen_rtvec_v (ncases, labelvec),
5604 const0_rtx, const0_rtx));
5605 else
5606 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5607 gen_rtvec_v (ncases, labelvec)));
5608
5609 /* If the case insn drops through the table,
5610 after the table we must jump to the default-label.
5611 Otherwise record no drop-through after the table. */
5612 #ifdef CASE_DROPS_THROUGH
5613 emit_jump (default_label);
5614 #else
5615 emit_barrier ();
5616 #endif
5617 }
5618
5619 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5620 reorder_insns (before_case, get_last_insn (),
5621 thiscase->data.case_stmt.start);
5622 }
5623 else
5624 end_cleanup_deferral ();
5625
5626 if (thiscase->exit_label)
5627 emit_label (thiscase->exit_label);
5628
5629 POPSTACK (case_stack);
5630
5631 free_temp_slots ();
5632 }
5633
5634 /* Convert the tree NODE into a list linked by the right field, with the left
5635 field zeroed. RIGHT is used for recursion; it is a list to be placed
5636 rightmost in the resulting list. */
5637
5638 static struct case_node *
5639 case_tree2list (node, right)
5640 struct case_node *node, *right;
5641 {
5642 struct case_node *left;
5643
5644 if (node->right)
5645 right = case_tree2list (node->right, right);
5646
5647 node->right = right;
5648 if ((left = node->left))
5649 {
5650 node->left = 0;
5651 return case_tree2list (left, node);
5652 }
5653
5654 return node;
5655 }
5656
5657 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5658
5659 static void
5660 do_jump_if_equal (op1, op2, label, unsignedp)
5661 rtx op1, op2, label;
5662 int unsignedp;
5663 {
5664 if (GET_CODE (op1) == CONST_INT
5665 && GET_CODE (op2) == CONST_INT)
5666 {
5667 if (INTVAL (op1) == INTVAL (op2))
5668 emit_jump (label);
5669 }
5670 else
5671 {
5672 enum machine_mode mode = GET_MODE (op1);
5673 if (mode == VOIDmode)
5674 mode = GET_MODE (op2);
5675 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5676 0, label);
5677 }
5678 }
5679 \f
5680 /* Not all case values are encountered equally. This function
5681 uses a heuristic to weight case labels, in cases where that
5682 looks like a reasonable thing to do.
5683
5684 Right now, all we try to guess is text, and we establish the
5685 following weights:
5686
5687 chars above space: 16
5688 digits: 16
5689 default: 12
5690 space, punct: 8
5691 tab: 4
5692 newline: 2
5693 other "\" chars: 1
5694 remaining chars: 0
5695
5696 If we find any cases in the switch that are not either -1 or in the range
5697 of valid ASCII characters, or are control characters other than those
5698 commonly used with "\", don't treat this switch scanning text.
5699
5700 Return 1 if these nodes are suitable for cost estimation, otherwise
5701 return 0. */
5702
5703 static int
5704 estimate_case_costs (node)
5705 case_node_ptr node;
5706 {
5707 tree min_ascii = build_int_2 (-1, -1);
5708 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5709 case_node_ptr n;
5710 int i;
5711
5712 /* If we haven't already made the cost table, make it now. Note that the
5713 lower bound of the table is -1, not zero. */
5714
5715 if (cost_table == NULL)
5716 {
5717 cost_table = cost_table_ + 1;
5718
5719 for (i = 0; i < 128; i++)
5720 {
5721 if (ISALNUM (i))
5722 cost_table[i] = 16;
5723 else if (ISPUNCT (i))
5724 cost_table[i] = 8;
5725 else if (ISCNTRL (i))
5726 cost_table[i] = -1;
5727 }
5728
5729 cost_table[' '] = 8;
5730 cost_table['\t'] = 4;
5731 cost_table['\0'] = 4;
5732 cost_table['\n'] = 2;
5733 cost_table['\f'] = 1;
5734 cost_table['\v'] = 1;
5735 cost_table['\b'] = 1;
5736 }
5737
5738 /* See if all the case expressions look like text. It is text if the
5739 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5740 as signed arithmetic since we don't want to ever access cost_table with a
5741 value less than -1. Also check that none of the constants in a range
5742 are strange control characters. */
5743
5744 for (n = node; n; n = n->right)
5745 {
5746 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5747 return 0;
5748
5749 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5750 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5751 if (cost_table[i] < 0)
5752 return 0;
5753 }
5754
5755 /* All interesting values are within the range of interesting
5756 ASCII characters. */
5757 return 1;
5758 }
5759
5760 /* Scan an ordered list of case nodes
5761 combining those with consecutive values or ranges.
5762
5763 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5764
5765 static void
5766 group_case_nodes (head)
5767 case_node_ptr head;
5768 {
5769 case_node_ptr node = head;
5770
5771 while (node)
5772 {
5773 rtx lb = next_real_insn (label_rtx (node->code_label));
5774 rtx lb2;
5775 case_node_ptr np = node;
5776
5777 /* Try to group the successors of NODE with NODE. */
5778 while (((np = np->right) != 0)
5779 /* Do they jump to the same place? */
5780 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5781 || (lb != 0 && lb2 != 0
5782 && simplejump_p (lb)
5783 && simplejump_p (lb2)
5784 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5785 SET_SRC (PATTERN (lb2)))))
5786 /* Are their ranges consecutive? */
5787 && tree_int_cst_equal (np->low,
5788 fold (build (PLUS_EXPR,
5789 TREE_TYPE (node->high),
5790 node->high,
5791 integer_one_node)))
5792 /* An overflow is not consecutive. */
5793 && tree_int_cst_lt (node->high,
5794 fold (build (PLUS_EXPR,
5795 TREE_TYPE (node->high),
5796 node->high,
5797 integer_one_node))))
5798 {
5799 node->high = np->high;
5800 }
5801 /* NP is the first node after NODE which can't be grouped with it.
5802 Delete the nodes in between, and move on to that node. */
5803 node->right = np;
5804 node = np;
5805 }
5806 }
5807
5808 /* Take an ordered list of case nodes
5809 and transform them into a near optimal binary tree,
5810 on the assumption that any target code selection value is as
5811 likely as any other.
5812
5813 The transformation is performed by splitting the ordered
5814 list into two equal sections plus a pivot. The parts are
5815 then attached to the pivot as left and right branches. Each
5816 branch is then transformed recursively. */
5817
5818 static void
5819 balance_case_nodes (head, parent)
5820 case_node_ptr *head;
5821 case_node_ptr parent;
5822 {
5823 register case_node_ptr np;
5824
5825 np = *head;
5826 if (np)
5827 {
5828 int cost = 0;
5829 int i = 0;
5830 int ranges = 0;
5831 register case_node_ptr *npp;
5832 case_node_ptr left;
5833
5834 /* Count the number of entries on branch. Also count the ranges. */
5835
5836 while (np)
5837 {
5838 if (!tree_int_cst_equal (np->low, np->high))
5839 {
5840 ranges++;
5841 if (use_cost_table)
5842 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5843 }
5844
5845 if (use_cost_table)
5846 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5847
5848 i++;
5849 np = np->right;
5850 }
5851
5852 if (i > 2)
5853 {
5854 /* Split this list if it is long enough for that to help. */
5855 npp = head;
5856 left = *npp;
5857 if (use_cost_table)
5858 {
5859 /* Find the place in the list that bisects the list's total cost,
5860 Here I gets half the total cost. */
5861 int n_moved = 0;
5862 i = (cost + 1) / 2;
5863 while (1)
5864 {
5865 /* Skip nodes while their cost does not reach that amount. */
5866 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5867 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5868 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5869 if (i <= 0)
5870 break;
5871 npp = &(*npp)->right;
5872 n_moved += 1;
5873 }
5874 if (n_moved == 0)
5875 {
5876 /* Leave this branch lopsided, but optimize left-hand
5877 side and fill in `parent' fields for right-hand side. */
5878 np = *head;
5879 np->parent = parent;
5880 balance_case_nodes (&np->left, np);
5881 for (; np->right; np = np->right)
5882 np->right->parent = np;
5883 return;
5884 }
5885 }
5886 /* If there are just three nodes, split at the middle one. */
5887 else if (i == 3)
5888 npp = &(*npp)->right;
5889 else
5890 {
5891 /* Find the place in the list that bisects the list's total cost,
5892 where ranges count as 2.
5893 Here I gets half the total cost. */
5894 i = (i + ranges + 1) / 2;
5895 while (1)
5896 {
5897 /* Skip nodes while their cost does not reach that amount. */
5898 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5899 i--;
5900 i--;
5901 if (i <= 0)
5902 break;
5903 npp = &(*npp)->right;
5904 }
5905 }
5906 *head = np = *npp;
5907 *npp = 0;
5908 np->parent = parent;
5909 np->left = left;
5910
5911 /* Optimize each of the two split parts. */
5912 balance_case_nodes (&np->left, np);
5913 balance_case_nodes (&np->right, np);
5914 }
5915 else
5916 {
5917 /* Else leave this branch as one level,
5918 but fill in `parent' fields. */
5919 np = *head;
5920 np->parent = parent;
5921 for (; np->right; np = np->right)
5922 np->right->parent = np;
5923 }
5924 }
5925 }
5926 \f
5927 /* Search the parent sections of the case node tree
5928 to see if a test for the lower bound of NODE would be redundant.
5929 INDEX_TYPE is the type of the index expression.
5930
5931 The instructions to generate the case decision tree are
5932 output in the same order as nodes are processed so it is
5933 known that if a parent node checks the range of the current
5934 node minus one that the current node is bounded at its lower
5935 span. Thus the test would be redundant. */
5936
5937 static int
5938 node_has_low_bound (node, index_type)
5939 case_node_ptr node;
5940 tree index_type;
5941 {
5942 tree low_minus_one;
5943 case_node_ptr pnode;
5944
5945 /* If the lower bound of this node is the lowest value in the index type,
5946 we need not test it. */
5947
5948 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5949 return 1;
5950
5951 /* If this node has a left branch, the value at the left must be less
5952 than that at this node, so it cannot be bounded at the bottom and
5953 we need not bother testing any further. */
5954
5955 if (node->left)
5956 return 0;
5957
5958 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5959 node->low, integer_one_node));
5960
5961 /* If the subtraction above overflowed, we can't verify anything.
5962 Otherwise, look for a parent that tests our value - 1. */
5963
5964 if (! tree_int_cst_lt (low_minus_one, node->low))
5965 return 0;
5966
5967 for (pnode = node->parent; pnode; pnode = pnode->parent)
5968 if (tree_int_cst_equal (low_minus_one, pnode->high))
5969 return 1;
5970
5971 return 0;
5972 }
5973
5974 /* Search the parent sections of the case node tree
5975 to see if a test for the upper bound of NODE would be redundant.
5976 INDEX_TYPE is the type of the index expression.
5977
5978 The instructions to generate the case decision tree are
5979 output in the same order as nodes are processed so it is
5980 known that if a parent node checks the range of the current
5981 node plus one that the current node is bounded at its upper
5982 span. Thus the test would be redundant. */
5983
5984 static int
5985 node_has_high_bound (node, index_type)
5986 case_node_ptr node;
5987 tree index_type;
5988 {
5989 tree high_plus_one;
5990 case_node_ptr pnode;
5991
5992 /* If there is no upper bound, obviously no test is needed. */
5993
5994 if (TYPE_MAX_VALUE (index_type) == NULL)
5995 return 1;
5996
5997 /* If the upper bound of this node is the highest value in the type
5998 of the index expression, we need not test against it. */
5999
6000 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6001 return 1;
6002
6003 /* If this node has a right branch, the value at the right must be greater
6004 than that at this node, so it cannot be bounded at the top and
6005 we need not bother testing any further. */
6006
6007 if (node->right)
6008 return 0;
6009
6010 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6011 node->high, integer_one_node));
6012
6013 /* If the addition above overflowed, we can't verify anything.
6014 Otherwise, look for a parent that tests our value + 1. */
6015
6016 if (! tree_int_cst_lt (node->high, high_plus_one))
6017 return 0;
6018
6019 for (pnode = node->parent; pnode; pnode = pnode->parent)
6020 if (tree_int_cst_equal (high_plus_one, pnode->low))
6021 return 1;
6022
6023 return 0;
6024 }
6025
6026 /* Search the parent sections of the
6027 case node tree to see if both tests for the upper and lower
6028 bounds of NODE would be redundant. */
6029
6030 static int
6031 node_is_bounded (node, index_type)
6032 case_node_ptr node;
6033 tree index_type;
6034 {
6035 return (node_has_low_bound (node, index_type)
6036 && node_has_high_bound (node, index_type));
6037 }
6038
6039 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6040
6041 static void
6042 emit_jump_if_reachable (label)
6043 rtx label;
6044 {
6045 if (GET_CODE (get_last_insn ()) != BARRIER)
6046 emit_jump (label);
6047 }
6048 \f
6049 /* Emit step-by-step code to select a case for the value of INDEX.
6050 The thus generated decision tree follows the form of the
6051 case-node binary tree NODE, whose nodes represent test conditions.
6052 INDEX_TYPE is the type of the index of the switch.
6053
6054 Care is taken to prune redundant tests from the decision tree
6055 by detecting any boundary conditions already checked by
6056 emitted rtx. (See node_has_high_bound, node_has_low_bound
6057 and node_is_bounded, above.)
6058
6059 Where the test conditions can be shown to be redundant we emit
6060 an unconditional jump to the target code. As a further
6061 optimization, the subordinates of a tree node are examined to
6062 check for bounded nodes. In this case conditional and/or
6063 unconditional jumps as a result of the boundary check for the
6064 current node are arranged to target the subordinates associated
6065 code for out of bound conditions on the current node.
6066
6067 We can assume that when control reaches the code generated here,
6068 the index value has already been compared with the parents
6069 of this node, and determined to be on the same side of each parent
6070 as this node is. Thus, if this node tests for the value 51,
6071 and a parent tested for 52, we don't need to consider
6072 the possibility of a value greater than 51. If another parent
6073 tests for the value 50, then this node need not test anything. */
6074
6075 static void
6076 emit_case_nodes (index, node, default_label, index_type)
6077 rtx index;
6078 case_node_ptr node;
6079 rtx default_label;
6080 tree index_type;
6081 {
6082 /* If INDEX has an unsigned type, we must make unsigned branches. */
6083 int unsignedp = TREE_UNSIGNED (index_type);
6084 enum machine_mode mode = GET_MODE (index);
6085
6086 /* See if our parents have already tested everything for us.
6087 If they have, emit an unconditional jump for this node. */
6088 if (node_is_bounded (node, index_type))
6089 emit_jump (label_rtx (node->code_label));
6090
6091 else if (tree_int_cst_equal (node->low, node->high))
6092 {
6093 /* Node is single valued. First see if the index expression matches
6094 this node and then check our children, if any. */
6095
6096 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6097 label_rtx (node->code_label), unsignedp);
6098
6099 if (node->right != 0 && node->left != 0)
6100 {
6101 /* This node has children on both sides.
6102 Dispatch to one side or the other
6103 by comparing the index value with this node's value.
6104 If one subtree is bounded, check that one first,
6105 so we can avoid real branches in the tree. */
6106
6107 if (node_is_bounded (node->right, index_type))
6108 {
6109 emit_cmp_and_jump_insns (index,
6110 expand_expr (node->high, NULL_RTX,
6111 VOIDmode, 0),
6112 GT, NULL_RTX, mode, unsignedp, 0,
6113 label_rtx (node->right->code_label));
6114 emit_case_nodes (index, node->left, default_label, index_type);
6115 }
6116
6117 else if (node_is_bounded (node->left, index_type))
6118 {
6119 emit_cmp_and_jump_insns (index,
6120 expand_expr (node->high, NULL_RTX,
6121 VOIDmode, 0),
6122 LT, NULL_RTX, mode, unsignedp, 0,
6123 label_rtx (node->left->code_label));
6124 emit_case_nodes (index, node->right, default_label, index_type);
6125 }
6126
6127 else
6128 {
6129 /* Neither node is bounded. First distinguish the two sides;
6130 then emit the code for one side at a time. */
6131
6132 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6133
6134 /* See if the value is on the right. */
6135 emit_cmp_and_jump_insns (index,
6136 expand_expr (node->high, NULL_RTX,
6137 VOIDmode, 0),
6138 GT, NULL_RTX, mode, unsignedp, 0,
6139 label_rtx (test_label));
6140
6141 /* Value must be on the left.
6142 Handle the left-hand subtree. */
6143 emit_case_nodes (index, node->left, default_label, index_type);
6144 /* If left-hand subtree does nothing,
6145 go to default. */
6146 emit_jump_if_reachable (default_label);
6147
6148 /* Code branches here for the right-hand subtree. */
6149 expand_label (test_label);
6150 emit_case_nodes (index, node->right, default_label, index_type);
6151 }
6152 }
6153
6154 else if (node->right != 0 && node->left == 0)
6155 {
6156 /* Here we have a right child but no left so we issue conditional
6157 branch to default and process the right child.
6158
6159 Omit the conditional branch to default if we it avoid only one
6160 right child; it costs too much space to save so little time. */
6161
6162 if (node->right->right || node->right->left
6163 || !tree_int_cst_equal (node->right->low, node->right->high))
6164 {
6165 if (!node_has_low_bound (node, index_type))
6166 {
6167 emit_cmp_and_jump_insns (index,
6168 expand_expr (node->high, NULL_RTX,
6169 VOIDmode, 0),
6170 LT, NULL_RTX, mode, unsignedp, 0,
6171 default_label);
6172 }
6173
6174 emit_case_nodes (index, node->right, default_label, index_type);
6175 }
6176 else
6177 /* We cannot process node->right normally
6178 since we haven't ruled out the numbers less than
6179 this node's value. So handle node->right explicitly. */
6180 do_jump_if_equal (index,
6181 expand_expr (node->right->low, NULL_RTX,
6182 VOIDmode, 0),
6183 label_rtx (node->right->code_label), unsignedp);
6184 }
6185
6186 else if (node->right == 0 && node->left != 0)
6187 {
6188 /* Just one subtree, on the left. */
6189
6190 #if 0 /* The following code and comment were formerly part
6191 of the condition here, but they didn't work
6192 and I don't understand what the idea was. -- rms. */
6193 /* If our "most probable entry" is less probable
6194 than the default label, emit a jump to
6195 the default label using condition codes
6196 already lying around. With no right branch,
6197 a branch-greater-than will get us to the default
6198 label correctly. */
6199 if (use_cost_table
6200 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6201 ;
6202 #endif /* 0 */
6203 if (node->left->left || node->left->right
6204 || !tree_int_cst_equal (node->left->low, node->left->high))
6205 {
6206 if (!node_has_high_bound (node, index_type))
6207 {
6208 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6209 NULL_RTX,
6210 VOIDmode, 0),
6211 GT, NULL_RTX, mode, unsignedp, 0,
6212 default_label);
6213 }
6214
6215 emit_case_nodes (index, node->left, default_label, index_type);
6216 }
6217 else
6218 /* We cannot process node->left normally
6219 since we haven't ruled out the numbers less than
6220 this node's value. So handle node->left explicitly. */
6221 do_jump_if_equal (index,
6222 expand_expr (node->left->low, NULL_RTX,
6223 VOIDmode, 0),
6224 label_rtx (node->left->code_label), unsignedp);
6225 }
6226 }
6227 else
6228 {
6229 /* Node is a range. These cases are very similar to those for a single
6230 value, except that we do not start by testing whether this node
6231 is the one to branch to. */
6232
6233 if (node->right != 0 && node->left != 0)
6234 {
6235 /* Node has subtrees on both sides.
6236 If the right-hand subtree is bounded,
6237 test for it first, since we can go straight there.
6238 Otherwise, we need to make a branch in the control structure,
6239 then handle the two subtrees. */
6240 tree test_label = 0;
6241
6242 if (node_is_bounded (node->right, index_type))
6243 /* Right hand node is fully bounded so we can eliminate any
6244 testing and branch directly to the target code. */
6245 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6246 VOIDmode, 0),
6247 GT, NULL_RTX, mode, unsignedp, 0,
6248 label_rtx (node->right->code_label));
6249 else
6250 {
6251 /* Right hand node requires testing.
6252 Branch to a label where we will handle it later. */
6253
6254 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6255 emit_cmp_and_jump_insns (index,
6256 expand_expr (node->high, NULL_RTX,
6257 VOIDmode, 0),
6258 GT, NULL_RTX, mode, unsignedp, 0,
6259 label_rtx (test_label));
6260 }
6261
6262 /* Value belongs to this node or to the left-hand subtree. */
6263
6264 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6265 VOIDmode, 0),
6266 GE, NULL_RTX, mode, unsignedp, 0,
6267 label_rtx (node->code_label));
6268
6269 /* Handle the left-hand subtree. */
6270 emit_case_nodes (index, node->left, default_label, index_type);
6271
6272 /* If right node had to be handled later, do that now. */
6273
6274 if (test_label)
6275 {
6276 /* If the left-hand subtree fell through,
6277 don't let it fall into the right-hand subtree. */
6278 emit_jump_if_reachable (default_label);
6279
6280 expand_label (test_label);
6281 emit_case_nodes (index, node->right, default_label, index_type);
6282 }
6283 }
6284
6285 else if (node->right != 0 && node->left == 0)
6286 {
6287 /* Deal with values to the left of this node,
6288 if they are possible. */
6289 if (!node_has_low_bound (node, index_type))
6290 {
6291 emit_cmp_and_jump_insns (index,
6292 expand_expr (node->low, NULL_RTX,
6293 VOIDmode, 0),
6294 LT, NULL_RTX, mode, unsignedp, 0,
6295 default_label);
6296 }
6297
6298 /* Value belongs to this node or to the right-hand subtree. */
6299
6300 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6301 VOIDmode, 0),
6302 LE, NULL_RTX, mode, unsignedp, 0,
6303 label_rtx (node->code_label));
6304
6305 emit_case_nodes (index, node->right, default_label, index_type);
6306 }
6307
6308 else if (node->right == 0 && node->left != 0)
6309 {
6310 /* Deal with values to the right of this node,
6311 if they are possible. */
6312 if (!node_has_high_bound (node, index_type))
6313 {
6314 emit_cmp_and_jump_insns (index,
6315 expand_expr (node->high, NULL_RTX,
6316 VOIDmode, 0),
6317 GT, NULL_RTX, mode, unsignedp, 0,
6318 default_label);
6319 }
6320
6321 /* Value belongs to this node or to the left-hand subtree. */
6322
6323 emit_cmp_and_jump_insns (index,
6324 expand_expr (node->low, NULL_RTX,
6325 VOIDmode, 0),
6326 GE, NULL_RTX, mode, unsignedp, 0,
6327 label_rtx (node->code_label));
6328
6329 emit_case_nodes (index, node->left, default_label, index_type);
6330 }
6331
6332 else
6333 {
6334 /* Node has no children so we check low and high bounds to remove
6335 redundant tests. Only one of the bounds can exist,
6336 since otherwise this node is bounded--a case tested already. */
6337
6338 if (!node_has_high_bound (node, index_type))
6339 {
6340 emit_cmp_and_jump_insns (index,
6341 expand_expr (node->high, NULL_RTX,
6342 VOIDmode, 0),
6343 GT, NULL_RTX, mode, unsignedp, 0,
6344 default_label);
6345 }
6346
6347 if (!node_has_low_bound (node, index_type))
6348 {
6349 emit_cmp_and_jump_insns (index,
6350 expand_expr (node->low, NULL_RTX,
6351 VOIDmode, 0),
6352 LT, NULL_RTX, mode, unsignedp, 0,
6353 default_label);
6354 }
6355
6356 emit_jump (label_rtx (node->code_label));
6357 }
6358 }
6359 }