regmove.c (optimize_reg_copy_1): Undo Aug 18 change.
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
59
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
64
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
69
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
72
73 int expr_stmts_for_value;
74
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
77
78 static tree last_expr_type;
79 static rtx last_expr_value;
80
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
85
86 static rtx last_block_end_note;
87
88 /* Number of binding contours started so far in this function. */
89
90 int block_start_count;
91
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
94
95 extern int current_function_returns_pcc_struct;
96
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
100
101 extern rtx cleanup_label;
102
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
106
107 extern rtx return_label;
108
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
113
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
117
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
120
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
125
126 extern rtx arg_pointer_save_area;
127
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
130 \f
131 /* Functions and data structures for expanding case statements. */
132
133 /* Case label structure, used to hold info on labels within case
134 statements. We handle "range" labels; for a single-value label
135 as in C, the high and low limits are the same.
136
137 An AVL tree of case nodes is initially created, and later transformed
138 to a list linked via the RIGHT fields in the nodes. Nodes with
139 higher case values are later in the list.
140
141 Switch statements can be output in one of two forms. A branch table
142 is used if there are more than a few labels and the labels are dense
143 within the range between the smallest and largest case value. If a
144 branch table is used, no further manipulations are done with the case
145 node chain.
146
147 The alternative to the use of a branch table is to generate a series
148 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
149 and PARENT fields to hold a binary tree. Initially the tree is
150 totally unbalanced, with everything on the right. We balance the tree
151 with nodes on the left having lower case values than the parent
152 and nodes on the right having higher values. We then output the tree
153 in order. */
154
155 struct case_node
156 {
157 struct case_node *left; /* Left son in binary tree */
158 struct case_node *right; /* Right son in binary tree; also node chain */
159 struct case_node *parent; /* Parent of node in binary tree */
160 tree low; /* Lowest index value for this label */
161 tree high; /* Highest index value for this label */
162 tree code_label; /* Label to jump to when node matches */
163 int balance;
164 };
165
166 typedef struct case_node case_node;
167 typedef struct case_node *case_node_ptr;
168
169 /* These are used by estimate_case_costs and balance_case_nodes. */
170
171 /* This must be a signed type, and non-ANSI compilers lack signed char. */
172 static short *cost_table;
173 static int use_cost_table;
174 \f
175 /* Stack of control and binding constructs we are currently inside.
176
177 These constructs begin when you call `expand_start_WHATEVER'
178 and end when you call `expand_end_WHATEVER'. This stack records
179 info about how the construct began that tells the end-function
180 what to do. It also may provide information about the construct
181 to alter the behavior of other constructs within the body.
182 For example, they may affect the behavior of C `break' and `continue'.
183
184 Each construct gets one `struct nesting' object.
185 All of these objects are chained through the `all' field.
186 `nesting_stack' points to the first object (innermost construct).
187 The position of an entry on `nesting_stack' is in its `depth' field.
188
189 Each type of construct has its own individual stack.
190 For example, loops have `loop_stack'. Each object points to the
191 next object of the same type through the `next' field.
192
193 Some constructs are visible to `break' exit-statements and others
194 are not. Which constructs are visible depends on the language.
195 Therefore, the data structure allows each construct to be visible
196 or not, according to the args given when the construct is started.
197 The construct is visible if the `exit_label' field is non-null.
198 In that case, the value should be a CODE_LABEL rtx. */
199
200 struct nesting
201 {
202 struct nesting *all;
203 struct nesting *next;
204 int depth;
205 rtx exit_label;
206 union
207 {
208 /* For conds (if-then and if-then-else statements). */
209 struct
210 {
211 /* Label for the end of the if construct.
212 There is none if EXITFLAG was not set
213 and no `else' has been seen yet. */
214 rtx endif_label;
215 /* Label for the end of this alternative.
216 This may be the end of the if or the next else/elseif. */
217 rtx next_label;
218 } cond;
219 /* For loops. */
220 struct
221 {
222 /* Label at the top of the loop; place to loop back to. */
223 rtx start_label;
224 /* Label at the end of the whole construct. */
225 rtx end_label;
226 /* Label before a jump that branches to the end of the whole
227 construct. This is where destructors go if any. */
228 rtx alt_end_label;
229 /* Label for `continue' statement to jump to;
230 this is in front of the stepper of the loop. */
231 rtx continue_label;
232 } loop;
233 /* For variable binding contours. */
234 struct
235 {
236 /* Sequence number of this binding contour within the function,
237 in order of entry. */
238 int block_start_count;
239 /* Nonzero => value to restore stack to on exit. */
240 rtx stack_level;
241 /* The NOTE that starts this contour.
242 Used by expand_goto to check whether the destination
243 is within each contour or not. */
244 rtx first_insn;
245 /* Innermost containing binding contour that has a stack level. */
246 struct nesting *innermost_stack_block;
247 /* List of cleanups to be run on exit from this contour.
248 This is a list of expressions to be evaluated.
249 The TREE_PURPOSE of each link is the ..._DECL node
250 which the cleanup pertains to. */
251 tree cleanups;
252 /* List of cleanup-lists of blocks containing this block,
253 as they were at the locus where this block appears.
254 There is an element for each containing block,
255 ordered innermost containing block first.
256 The tail of this list can be 0,
257 if all remaining elements would be empty lists.
258 The element's TREE_VALUE is the cleanup-list of that block,
259 which may be null. */
260 tree outer_cleanups;
261 /* Chain of labels defined inside this binding contour.
262 For contours that have stack levels or cleanups. */
263 struct label_chain *label_chain;
264 /* Number of function calls seen, as of start of this block. */
265 int function_call_count;
266 /* Nonzero if this is associated with a EH region. */
267 int exception_region;
268 /* The saved target_temp_slot_level from our outer block.
269 We may reset target_temp_slot_level to be the level of
270 this block, if that is done, target_temp_slot_level
271 reverts to the saved target_temp_slot_level at the very
272 end of the block. */
273 int target_temp_slot_level;
274 /* True if we are currently emitting insns in an area of
275 output code that is controlled by a conditional
276 expression. This is used by the cleanup handling code to
277 generate conditional cleanup actions. */
278 int conditional_code;
279 /* A place to move the start of the exception region for any
280 of the conditional cleanups, must be at the end or after
281 the start of the last unconditional cleanup, and before any
282 conditional branch points. */
283 rtx last_unconditional_cleanup;
284 /* When in a conditional context, this is the specific
285 cleanup list associated with last_unconditional_cleanup,
286 where we place the conditionalized cleanups. */
287 tree *cleanup_ptr;
288 } block;
289 /* For switch (C) or case (Pascal) statements,
290 and also for dummies (see `expand_start_case_dummy'). */
291 struct
292 {
293 /* The insn after which the case dispatch should finally
294 be emitted. Zero for a dummy. */
295 rtx start;
296 /* A list of case labels; it is first built as an AVL tree.
297 During expand_end_case, this is converted to a list, and may be
298 rearranged into a nearly balanced binary tree. */
299 struct case_node *case_list;
300 /* Label to jump to if no case matches. */
301 tree default_label;
302 /* The expression to be dispatched on. */
303 tree index_expr;
304 /* Type that INDEX_EXPR should be converted to. */
305 tree nominal_type;
306 /* Number of range exprs in case statement. */
307 int num_ranges;
308 /* Name of this kind of statement, for warnings. */
309 char *printname;
310 /* Used to save no_line_numbers till we see the first case label.
311 We set this to -1 when we see the first case label in this
312 case statement. */
313 int line_number_status;
314 } case_stmt;
315 } data;
316 };
317
318 /* Chain of all pending binding contours. */
319 struct nesting *block_stack;
320
321 /* If any new stacks are added here, add them to POPSTACKS too. */
322
323 /* Chain of all pending binding contours that restore stack levels
324 or have cleanups. */
325 struct nesting *stack_block_stack;
326
327 /* Chain of all pending conditional statements. */
328 struct nesting *cond_stack;
329
330 /* Chain of all pending loops. */
331 struct nesting *loop_stack;
332
333 /* Chain of all pending case or switch statements. */
334 struct nesting *case_stack;
335
336 /* Separate chain including all of the above,
337 chained through the `all' field. */
338 struct nesting *nesting_stack;
339
340 /* Number of entries on nesting_stack now. */
341 int nesting_depth;
342
343 /* Allocate and return a new `struct nesting'. */
344
345 #define ALLOC_NESTING() \
346 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
347
348 /* Pop the nesting stack element by element until we pop off
349 the element which is at the top of STACK.
350 Update all the other stacks, popping off elements from them
351 as we pop them from nesting_stack. */
352
353 #define POPSTACK(STACK) \
354 do { struct nesting *target = STACK; \
355 struct nesting *this; \
356 do { this = nesting_stack; \
357 if (loop_stack == this) \
358 loop_stack = loop_stack->next; \
359 if (cond_stack == this) \
360 cond_stack = cond_stack->next; \
361 if (block_stack == this) \
362 block_stack = block_stack->next; \
363 if (stack_block_stack == this) \
364 stack_block_stack = stack_block_stack->next; \
365 if (case_stack == this) \
366 case_stack = case_stack->next; \
367 nesting_depth = nesting_stack->depth - 1; \
368 nesting_stack = this->all; \
369 obstack_free (&stmt_obstack, this); } \
370 while (this != target); } while (0)
371 \f
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380 struct goto_fixup
381 {
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The BLOCK for the place where this goto was found. */
391 tree context;
392 /* The CODE_LABEL rtx that this is jumping to. */
393 rtx target_rtl;
394 /* Number of binding contours started in current function
395 before the label reference. */
396 int block_start_count;
397 /* The outermost stack level that should be restored for this jump.
398 Each time a binding contour that resets the stack is exited,
399 if the target label is *not* yet defined, this slot is updated. */
400 rtx stack_level;
401 /* List of lists of cleanup expressions to be run by this goto.
402 There is one element for each block that this goto is within.
403 The tail of this list can be 0,
404 if all remaining elements would be empty.
405 The TREE_VALUE contains the cleanup list of that block as of the
406 time this goto was seen.
407 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
408 tree cleanup_list_list;
409 };
410
411 static struct goto_fixup *goto_fixup_chain;
412
413 /* Within any binding contour that must restore a stack level,
414 all labels are recorded with a chain of these structures. */
415
416 struct label_chain
417 {
418 /* Points to following fixup. */
419 struct label_chain *next;
420 tree label;
421 };
422
423
424 /* Non-zero if we are using EH to handle cleanus. */
425 static int using_eh_for_cleanups_p = 0;
426
427
428 static int n_occurrences PROTO((int, char *));
429 static void expand_goto_internal PROTO((tree, rtx, rtx));
430 static int expand_fixup PROTO((tree, rtx, rtx));
431 static void expand_nl_handler_label PROTO((rtx, rtx));
432 static void expand_nl_goto_receiver PROTO((void));
433 static void expand_nl_goto_receivers PROTO((struct nesting *));
434 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
436 static void expand_null_return_1 PROTO((rtx, int));
437 static void expand_value_return PROTO((rtx));
438 static int tail_recursion_args PROTO((tree, tree));
439 static void expand_cleanups PROTO((tree, tree, int, int));
440 static void check_seenlabel PROTO((void));
441 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
442 static int estimate_case_costs PROTO((case_node_ptr));
443 static void group_case_nodes PROTO((case_node_ptr));
444 static void balance_case_nodes PROTO((case_node_ptr *,
445 case_node_ptr));
446 static int node_has_low_bound PROTO((case_node_ptr, tree));
447 static int node_has_high_bound PROTO((case_node_ptr, tree));
448 static int node_is_bounded PROTO((case_node_ptr, tree));
449 static void emit_jump_if_reachable PROTO((rtx));
450 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
451 static int add_case_node PROTO((tree, tree, tree, tree *));
452 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
453 \f
454 void
455 using_eh_for_cleanups ()
456 {
457 using_eh_for_cleanups_p = 1;
458 }
459
460 void
461 init_stmt ()
462 {
463 gcc_obstack_init (&stmt_obstack);
464 init_eh ();
465 }
466
467 void
468 init_stmt_for_function ()
469 {
470 /* We are not currently within any block, conditional, loop or case. */
471 block_stack = 0;
472 stack_block_stack = 0;
473 loop_stack = 0;
474 case_stack = 0;
475 cond_stack = 0;
476 nesting_stack = 0;
477 nesting_depth = 0;
478
479 block_start_count = 0;
480
481 /* No gotos have been expanded yet. */
482 goto_fixup_chain = 0;
483
484 /* We are not processing a ({...}) grouping. */
485 expr_stmts_for_value = 0;
486 last_expr_type = 0;
487
488 init_eh_for_function ();
489 }
490
491 void
492 save_stmt_status (p)
493 struct function *p;
494 {
495 p->block_stack = block_stack;
496 p->stack_block_stack = stack_block_stack;
497 p->cond_stack = cond_stack;
498 p->loop_stack = loop_stack;
499 p->case_stack = case_stack;
500 p->nesting_stack = nesting_stack;
501 p->nesting_depth = nesting_depth;
502 p->block_start_count = block_start_count;
503 p->last_expr_type = last_expr_type;
504 p->last_expr_value = last_expr_value;
505 p->expr_stmts_for_value = expr_stmts_for_value;
506 p->emit_filename = emit_filename;
507 p->emit_lineno = emit_lineno;
508 p->goto_fixup_chain = goto_fixup_chain;
509 save_eh_status (p);
510 }
511
512 void
513 restore_stmt_status (p)
514 struct function *p;
515 {
516 block_stack = p->block_stack;
517 stack_block_stack = p->stack_block_stack;
518 cond_stack = p->cond_stack;
519 loop_stack = p->loop_stack;
520 case_stack = p->case_stack;
521 nesting_stack = p->nesting_stack;
522 nesting_depth = p->nesting_depth;
523 block_start_count = p->block_start_count;
524 last_expr_type = p->last_expr_type;
525 last_expr_value = p->last_expr_value;
526 expr_stmts_for_value = p->expr_stmts_for_value;
527 emit_filename = p->emit_filename;
528 emit_lineno = p->emit_lineno;
529 goto_fixup_chain = p->goto_fixup_chain;
530 restore_eh_status (p);
531 }
532 \f
533 /* Emit a no-op instruction. */
534
535 void
536 emit_nop ()
537 {
538 rtx last_insn;
539
540 last_insn = get_last_insn ();
541 if (!optimize
542 && (GET_CODE (last_insn) == CODE_LABEL
543 || (GET_CODE (last_insn) == NOTE
544 && prev_real_insn (last_insn) == 0)))
545 emit_insn (gen_nop ());
546 }
547 \f
548 /* Return the rtx-label that corresponds to a LABEL_DECL,
549 creating it if necessary. */
550
551 rtx
552 label_rtx (label)
553 tree label;
554 {
555 if (TREE_CODE (label) != LABEL_DECL)
556 abort ();
557
558 if (DECL_RTL (label))
559 return DECL_RTL (label);
560
561 return DECL_RTL (label) = gen_label_rtx ();
562 }
563
564 /* Add an unconditional jump to LABEL as the next sequential instruction. */
565
566 void
567 emit_jump (label)
568 rtx label;
569 {
570 do_pending_stack_adjust ();
571 emit_jump_insn (gen_jump (label));
572 emit_barrier ();
573 }
574
575 /* Emit code to jump to the address
576 specified by the pointer expression EXP. */
577
578 void
579 expand_computed_goto (exp)
580 tree exp;
581 {
582 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
583
584 #ifdef POINTERS_EXTEND_UNSIGNED
585 x = convert_memory_address (Pmode, x);
586 #endif
587
588 emit_queue ();
589 /* Be sure the function is executable. */
590 if (current_function_check_memory_usage)
591 emit_library_call (chkr_check_exec_libfunc, 1,
592 VOIDmode, 1, x, ptr_mode);
593
594 do_pending_stack_adjust ();
595 emit_indirect_jump (x);
596 }
597 \f
598 /* Handle goto statements and the labels that they can go to. */
599
600 /* Specify the location in the RTL code of a label LABEL,
601 which is a LABEL_DECL tree node.
602
603 This is used for the kind of label that the user can jump to with a
604 goto statement, and for alternatives of a switch or case statement.
605 RTL labels generated for loops and conditionals don't go through here;
606 they are generated directly at the RTL level, by other functions below.
607
608 Note that this has nothing to do with defining label *names*.
609 Languages vary in how they do that and what that even means. */
610
611 void
612 expand_label (label)
613 tree label;
614 {
615 struct label_chain *p;
616
617 do_pending_stack_adjust ();
618 emit_label (label_rtx (label));
619 if (DECL_NAME (label))
620 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
621
622 if (stack_block_stack != 0)
623 {
624 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
625 p->next = stack_block_stack->data.block.label_chain;
626 stack_block_stack->data.block.label_chain = p;
627 p->label = label;
628 }
629 }
630
631 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
632 from nested functions. */
633
634 void
635 declare_nonlocal_label (label)
636 tree label;
637 {
638 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
639
640 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
641 LABEL_PRESERVE_P (label_rtx (label)) = 1;
642 if (nonlocal_goto_handler_slots == 0)
643 {
644 emit_stack_save (SAVE_NONLOCAL,
645 &nonlocal_goto_stack_level,
646 PREV_INSN (tail_recursion_reentry));
647 }
648 nonlocal_goto_handler_slots
649 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
650 }
651
652 /* Generate RTL code for a `goto' statement with target label LABEL.
653 LABEL should be a LABEL_DECL tree node that was or will later be
654 defined with `expand_label'. */
655
656 void
657 expand_goto (label)
658 tree label;
659 {
660 tree context;
661
662 /* Check for a nonlocal goto to a containing function. */
663 context = decl_function_context (label);
664 if (context != 0 && context != current_function_decl)
665 {
666 struct function *p = find_function_data (context);
667 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
668 rtx temp, handler_slot;
669 tree link;
670
671 /* Find the corresponding handler slot for this label. */
672 handler_slot = p->nonlocal_goto_handler_slots;
673 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
674 link = TREE_CHAIN (link))
675 handler_slot = XEXP (handler_slot, 1);
676 handler_slot = XEXP (handler_slot, 0);
677
678 p->has_nonlocal_label = 1;
679 current_function_has_nonlocal_goto = 1;
680 LABEL_REF_NONLOCAL_P (label_ref) = 1;
681
682 /* Copy the rtl for the slots so that they won't be shared in
683 case the virtual stack vars register gets instantiated differently
684 in the parent than in the child. */
685
686 #if HAVE_nonlocal_goto
687 if (HAVE_nonlocal_goto)
688 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
689 copy_rtx (handler_slot),
690 copy_rtx (p->nonlocal_goto_stack_level),
691 label_ref));
692 else
693 #endif
694 {
695 rtx addr;
696
697 /* Restore frame pointer for containing function.
698 This sets the actual hard register used for the frame pointer
699 to the location of the function's incoming static chain info.
700 The non-local goto handler will then adjust it to contain the
701 proper value and reload the argument pointer, if needed. */
702 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
703
704 /* We have now loaded the frame pointer hardware register with
705 the address of that corresponds to the start of the virtual
706 stack vars. So replace virtual_stack_vars_rtx in all
707 addresses we use with stack_pointer_rtx. */
708
709 /* Get addr of containing function's current nonlocal goto handler,
710 which will do any cleanups and then jump to the label. */
711 addr = copy_rtx (handler_slot);
712 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
713 hard_frame_pointer_rtx));
714
715 /* Restore the stack pointer. Note this uses fp just restored. */
716 addr = p->nonlocal_goto_stack_level;
717 if (addr)
718 addr = replace_rtx (copy_rtx (addr),
719 virtual_stack_vars_rtx,
720 hard_frame_pointer_rtx);
721
722 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
723
724 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
725 really needed. */
726 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
727 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
728 emit_indirect_jump (temp);
729 }
730 }
731 else
732 expand_goto_internal (label, label_rtx (label), NULL_RTX);
733 }
734
735 /* Generate RTL code for a `goto' statement with target label BODY.
736 LABEL should be a LABEL_REF.
737 LAST_INSN, if non-0, is the rtx we should consider as the last
738 insn emitted (for the purposes of cleaning up a return). */
739
740 static void
741 expand_goto_internal (body, label, last_insn)
742 tree body;
743 rtx label;
744 rtx last_insn;
745 {
746 struct nesting *block;
747 rtx stack_level = 0;
748
749 if (GET_CODE (label) != CODE_LABEL)
750 abort ();
751
752 /* If label has already been defined, we can tell now
753 whether and how we must alter the stack level. */
754
755 if (PREV_INSN (label) != 0)
756 {
757 /* Find the innermost pending block that contains the label.
758 (Check containment by comparing insn-uids.)
759 Then restore the outermost stack level within that block,
760 and do cleanups of all blocks contained in it. */
761 for (block = block_stack; block; block = block->next)
762 {
763 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
764 break;
765 if (block->data.block.stack_level != 0)
766 stack_level = block->data.block.stack_level;
767 /* Execute the cleanups for blocks we are exiting. */
768 if (block->data.block.cleanups != 0)
769 {
770 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
771 do_pending_stack_adjust ();
772 }
773 }
774
775 if (stack_level)
776 {
777 /* Ensure stack adjust isn't done by emit_jump, as this
778 would clobber the stack pointer. This one should be
779 deleted as dead by flow. */
780 clear_pending_stack_adjust ();
781 do_pending_stack_adjust ();
782 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
783 }
784
785 if (body != 0 && DECL_TOO_LATE (body))
786 error ("jump to `%s' invalidly jumps into binding contour",
787 IDENTIFIER_POINTER (DECL_NAME (body)));
788 }
789 /* Label not yet defined: may need to put this goto
790 on the fixup list. */
791 else if (! expand_fixup (body, label, last_insn))
792 {
793 /* No fixup needed. Record that the label is the target
794 of at least one goto that has no fixup. */
795 if (body != 0)
796 TREE_ADDRESSABLE (body) = 1;
797 }
798
799 emit_jump (label);
800 }
801 \f
802 /* Generate if necessary a fixup for a goto
803 whose target label in tree structure (if any) is TREE_LABEL
804 and whose target in rtl is RTL_LABEL.
805
806 If LAST_INSN is nonzero, we pretend that the jump appears
807 after insn LAST_INSN instead of at the current point in the insn stream.
808
809 The fixup will be used later to insert insns just before the goto.
810 Those insns will restore the stack level as appropriate for the
811 target label, and will (in the case of C++) also invoke any object
812 destructors which have to be invoked when we exit the scopes which
813 are exited by the goto.
814
815 Value is nonzero if a fixup is made. */
816
817 static int
818 expand_fixup (tree_label, rtl_label, last_insn)
819 tree tree_label;
820 rtx rtl_label;
821 rtx last_insn;
822 {
823 struct nesting *block, *end_block;
824
825 /* See if we can recognize which block the label will be output in.
826 This is possible in some very common cases.
827 If we succeed, set END_BLOCK to that block.
828 Otherwise, set it to 0. */
829
830 if (cond_stack
831 && (rtl_label == cond_stack->data.cond.endif_label
832 || rtl_label == cond_stack->data.cond.next_label))
833 end_block = cond_stack;
834 /* If we are in a loop, recognize certain labels which
835 are likely targets. This reduces the number of fixups
836 we need to create. */
837 else if (loop_stack
838 && (rtl_label == loop_stack->data.loop.start_label
839 || rtl_label == loop_stack->data.loop.end_label
840 || rtl_label == loop_stack->data.loop.continue_label))
841 end_block = loop_stack;
842 else
843 end_block = 0;
844
845 /* Now set END_BLOCK to the binding level to which we will return. */
846
847 if (end_block)
848 {
849 struct nesting *next_block = end_block->all;
850 block = block_stack;
851
852 /* First see if the END_BLOCK is inside the innermost binding level.
853 If so, then no cleanups or stack levels are relevant. */
854 while (next_block && next_block != block)
855 next_block = next_block->all;
856
857 if (next_block)
858 return 0;
859
860 /* Otherwise, set END_BLOCK to the innermost binding level
861 which is outside the relevant control-structure nesting. */
862 next_block = block_stack->next;
863 for (block = block_stack; block != end_block; block = block->all)
864 if (block == next_block)
865 next_block = next_block->next;
866 end_block = next_block;
867 }
868
869 /* Does any containing block have a stack level or cleanups?
870 If not, no fixup is needed, and that is the normal case
871 (the only case, for standard C). */
872 for (block = block_stack; block != end_block; block = block->next)
873 if (block->data.block.stack_level != 0
874 || block->data.block.cleanups != 0)
875 break;
876
877 if (block != end_block)
878 {
879 /* Ok, a fixup is needed. Add a fixup to the list of such. */
880 struct goto_fixup *fixup
881 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
882 /* In case an old stack level is restored, make sure that comes
883 after any pending stack adjust. */
884 /* ?? If the fixup isn't to come at the present position,
885 doing the stack adjust here isn't useful. Doing it with our
886 settings at that location isn't useful either. Let's hope
887 someone does it! */
888 if (last_insn == 0)
889 do_pending_stack_adjust ();
890 fixup->target = tree_label;
891 fixup->target_rtl = rtl_label;
892
893 /* Create a BLOCK node and a corresponding matched set of
894 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
895 this point. The notes will encapsulate any and all fixup
896 code which we might later insert at this point in the insn
897 stream. Also, the BLOCK node will be the parent (i.e. the
898 `SUPERBLOCK') of any other BLOCK nodes which we might create
899 later on when we are expanding the fixup code.
900
901 Note that optimization passes (including expand_end_loop)
902 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
903 as a placeholder. */
904
905 {
906 register rtx original_before_jump
907 = last_insn ? last_insn : get_last_insn ();
908 rtx start;
909
910 start_sequence ();
911 pushlevel (0);
912 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
913 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
914 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
915 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
916 end_sequence ();
917 emit_insns_after (start, original_before_jump);
918 }
919
920 fixup->block_start_count = block_start_count;
921 fixup->stack_level = 0;
922 fixup->cleanup_list_list
923 = ((block->data.block.outer_cleanups
924 || block->data.block.cleanups)
925 ? tree_cons (NULL_TREE, block->data.block.cleanups,
926 block->data.block.outer_cleanups)
927 : 0);
928 fixup->next = goto_fixup_chain;
929 goto_fixup_chain = fixup;
930 }
931
932 return block != 0;
933 }
934
935
936 \f
937 /* Expand any needed fixups in the outputmost binding level of the
938 function. FIRST_INSN is the first insn in the function. */
939
940 void
941 expand_fixups (first_insn)
942 rtx first_insn;
943 {
944 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
945 }
946
947 /* When exiting a binding contour, process all pending gotos requiring fixups.
948 THISBLOCK is the structure that describes the block being exited.
949 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
950 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
951 FIRST_INSN is the insn that began this contour.
952
953 Gotos that jump out of this contour must restore the
954 stack level and do the cleanups before actually jumping.
955
956 DONT_JUMP_IN nonzero means report error there is a jump into this
957 contour from before the beginning of the contour.
958 This is also done if STACK_LEVEL is nonzero. */
959
960 static void
961 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
962 struct nesting *thisblock;
963 rtx stack_level;
964 tree cleanup_list;
965 rtx first_insn;
966 int dont_jump_in;
967 {
968 register struct goto_fixup *f, *prev;
969
970 /* F is the fixup we are considering; PREV is the previous one. */
971 /* We run this loop in two passes so that cleanups of exited blocks
972 are run first, and blocks that are exited are marked so
973 afterwards. */
974
975 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
976 {
977 /* Test for a fixup that is inactive because it is already handled. */
978 if (f->before_jump == 0)
979 {
980 /* Delete inactive fixup from the chain, if that is easy to do. */
981 if (prev != 0)
982 prev->next = f->next;
983 }
984 /* Has this fixup's target label been defined?
985 If so, we can finalize it. */
986 else if (PREV_INSN (f->target_rtl) != 0)
987 {
988 register rtx cleanup_insns;
989
990 /* Get the first non-label after the label
991 this goto jumps to. If that's before this scope begins,
992 we don't have a jump into the scope. */
993 rtx after_label = f->target_rtl;
994 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
995 after_label = NEXT_INSN (after_label);
996
997 /* If this fixup jumped into this contour from before the beginning
998 of this contour, report an error. */
999 /* ??? Bug: this does not detect jumping in through intermediate
1000 blocks that have stack levels or cleanups.
1001 It detects only a problem with the innermost block
1002 around the label. */
1003 if (f->target != 0
1004 && (dont_jump_in || stack_level || cleanup_list)
1005 /* If AFTER_LABEL is 0, it means the jump goes to the end
1006 of the rtl, which means it jumps into this scope. */
1007 && (after_label == 0
1008 || INSN_UID (first_insn) < INSN_UID (after_label))
1009 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1010 && ! DECL_ERROR_ISSUED (f->target))
1011 {
1012 error_with_decl (f->target,
1013 "label `%s' used before containing binding contour");
1014 /* Prevent multiple errors for one label. */
1015 DECL_ERROR_ISSUED (f->target) = 1;
1016 }
1017
1018 /* We will expand the cleanups into a sequence of their own and
1019 then later on we will attach this new sequence to the insn
1020 stream just ahead of the actual jump insn. */
1021
1022 start_sequence ();
1023
1024 /* Temporarily restore the lexical context where we will
1025 logically be inserting the fixup code. We do this for the
1026 sake of getting the debugging information right. */
1027
1028 pushlevel (0);
1029 set_block (f->context);
1030
1031 /* Expand the cleanups for blocks this jump exits. */
1032 if (f->cleanup_list_list)
1033 {
1034 tree lists;
1035 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1036 /* Marked elements correspond to blocks that have been closed.
1037 Do their cleanups. */
1038 if (TREE_ADDRESSABLE (lists)
1039 && TREE_VALUE (lists) != 0)
1040 {
1041 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1042 /* Pop any pushes done in the cleanups,
1043 in case function is about to return. */
1044 do_pending_stack_adjust ();
1045 }
1046 }
1047
1048 /* Restore stack level for the biggest contour that this
1049 jump jumps out of. */
1050 if (f->stack_level)
1051 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1052
1053 /* Finish up the sequence containing the insns which implement the
1054 necessary cleanups, and then attach that whole sequence to the
1055 insn stream just ahead of the actual jump insn. Attaching it
1056 at that point insures that any cleanups which are in fact
1057 implicit C++ object destructions (which must be executed upon
1058 leaving the block) appear (to the debugger) to be taking place
1059 in an area of the generated code where the object(s) being
1060 destructed are still "in scope". */
1061
1062 cleanup_insns = get_insns ();
1063 poplevel (1, 0, 0);
1064
1065 end_sequence ();
1066 emit_insns_after (cleanup_insns, f->before_jump);
1067
1068
1069 f->before_jump = 0;
1070 }
1071 }
1072
1073 /* For any still-undefined labels, do the cleanups for this block now.
1074 We must do this now since items in the cleanup list may go out
1075 of scope when the block ends. */
1076 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1077 if (f->before_jump != 0
1078 && PREV_INSN (f->target_rtl) == 0
1079 /* Label has still not appeared. If we are exiting a block with
1080 a stack level to restore, that started before the fixup,
1081 mark this stack level as needing restoration
1082 when the fixup is later finalized. */
1083 && thisblock != 0
1084 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1085 means the label is undefined. That's erroneous, but possible. */
1086 && (thisblock->data.block.block_start_count
1087 <= f->block_start_count))
1088 {
1089 tree lists = f->cleanup_list_list;
1090 rtx cleanup_insns;
1091
1092 for (; lists; lists = TREE_CHAIN (lists))
1093 /* If the following elt. corresponds to our containing block
1094 then the elt. must be for this block. */
1095 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1096 {
1097 start_sequence ();
1098 pushlevel (0);
1099 set_block (f->context);
1100 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1101 do_pending_stack_adjust ();
1102 cleanup_insns = get_insns ();
1103 poplevel (1, 0, 0);
1104 end_sequence ();
1105 if (cleanup_insns != 0)
1106 f->before_jump
1107 = emit_insns_after (cleanup_insns, f->before_jump);
1108
1109 f->cleanup_list_list = TREE_CHAIN (lists);
1110 }
1111
1112 if (stack_level)
1113 f->stack_level = stack_level;
1114 }
1115 }
1116 \f
1117 /* Return the number of times character C occurs in string S. */
1118 static int
1119 n_occurrences (c, s)
1120 int c;
1121 char *s;
1122 {
1123 int n = 0;
1124 while (*s)
1125 n += (*s++ == c);
1126 return n;
1127 }
1128 \f
1129 /* Generate RTL for an asm statement (explicit assembler code).
1130 BODY is a STRING_CST node containing the assembler code text,
1131 or an ADDR_EXPR containing a STRING_CST. */
1132
1133 void
1134 expand_asm (body)
1135 tree body;
1136 {
1137 if (current_function_check_memory_usage)
1138 {
1139 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1140 return;
1141 }
1142
1143 if (TREE_CODE (body) == ADDR_EXPR)
1144 body = TREE_OPERAND (body, 0);
1145
1146 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1147 TREE_STRING_POINTER (body)));
1148 last_expr_type = 0;
1149 }
1150
1151 /* Generate RTL for an asm statement with arguments.
1152 STRING is the instruction template.
1153 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1154 Each output or input has an expression in the TREE_VALUE and
1155 a constraint-string in the TREE_PURPOSE.
1156 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1157 that is clobbered by this insn.
1158
1159 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1160 Some elements of OUTPUTS may be replaced with trees representing temporary
1161 values. The caller should copy those temporary values to the originally
1162 specified lvalues.
1163
1164 VOL nonzero means the insn is volatile; don't optimize it. */
1165
1166 void
1167 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1168 tree string, outputs, inputs, clobbers;
1169 int vol;
1170 char *filename;
1171 int line;
1172 {
1173 rtvec argvec, constraints;
1174 rtx body;
1175 int ninputs = list_length (inputs);
1176 int noutputs = list_length (outputs);
1177 int ninout = 0;
1178 int nclobbers;
1179 tree tail;
1180 register int i;
1181 /* Vector of RTX's of evaluated output operands. */
1182 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1183 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1184 enum machine_mode *inout_mode
1185 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1186 /* The insn we have emitted. */
1187 rtx insn;
1188
1189 /* An ASM with no outputs needs to be treated as volatile, for now. */
1190 if (noutputs == 0)
1191 vol = 1;
1192
1193 if (current_function_check_memory_usage)
1194 {
1195 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1196 return;
1197 }
1198
1199 /* Count the number of meaningful clobbered registers, ignoring what
1200 we would ignore later. */
1201 nclobbers = 0;
1202 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1203 {
1204 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1205 i = decode_reg_name (regname);
1206 if (i >= 0 || i == -4)
1207 ++nclobbers;
1208 else if (i == -2)
1209 error ("unknown register name `%s' in `asm'", regname);
1210 }
1211
1212 last_expr_type = 0;
1213
1214 /* Check that the number of alternatives is constant across all
1215 operands. */
1216 if (outputs || inputs)
1217 {
1218 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1219 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1220 tree next = inputs;
1221
1222 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1223 {
1224 error ("too many alternatives in `asm'");
1225 return;
1226 }
1227
1228 tmp = outputs;
1229 while (tmp)
1230 {
1231 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1232 if (n_occurrences (',', constraint) != nalternatives)
1233 {
1234 error ("operand constraints for `asm' differ in number of alternatives");
1235 return;
1236 }
1237 if (TREE_CHAIN (tmp))
1238 tmp = TREE_CHAIN (tmp);
1239 else
1240 tmp = next, next = 0;
1241 }
1242 }
1243
1244 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1245 {
1246 tree val = TREE_VALUE (tail);
1247 tree type = TREE_TYPE (val);
1248 char *constraint;
1249 char *p;
1250 int c_len;
1251 int j;
1252 int is_inout = 0;
1253 int allows_reg = 0;
1254
1255 /* If there's an erroneous arg, emit no insn. */
1256 if (TREE_TYPE (val) == error_mark_node)
1257 return;
1258
1259 /* Make sure constraint has `=' and does not have `+'. Also, see
1260 if it allows any register. Be liberal on the latter test, since
1261 the worst that happens if we get it wrong is we issue an error
1262 message. */
1263
1264 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1265 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1266
1267 /* Allow the `=' or `+' to not be at the beginning of the string,
1268 since it wasn't explicitly documented that way, and there is a
1269 large body of code that puts it last. Swap the character to
1270 the front, so as not to uglify any place else. */
1271 switch (c_len)
1272 {
1273 default:
1274 if ((p = strchr (constraint, '=')) != NULL)
1275 break;
1276 if ((p = strchr (constraint, '+')) != NULL)
1277 break;
1278 case 0:
1279 error ("output operand constraint lacks `='");
1280 return;
1281 }
1282
1283 if (p != constraint)
1284 {
1285 j = *p;
1286 bcopy (constraint, constraint+1, p-constraint);
1287 *constraint = j;
1288
1289 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1290 }
1291
1292 is_inout = constraint[0] == '+';
1293 /* Replace '+' with '='. */
1294 constraint[0] = '=';
1295 /* Make sure we can specify the matching operand. */
1296 if (is_inout && i > 9)
1297 {
1298 error ("output operand constraint %d contains `+'", i);
1299 return;
1300 }
1301
1302 for (j = 1; j < c_len; j++)
1303 switch (constraint[j])
1304 {
1305 case '+':
1306 case '=':
1307 error ("operand constraint contains '+' or '=' at illegal position.");
1308 return;
1309
1310 case '%':
1311 if (i + 1 == ninputs + noutputs)
1312 {
1313 error ("`%%' constraint used with last operand");
1314 return;
1315 }
1316 break;
1317
1318 case '?': case '!': case '*': case '&':
1319 case 'V': case 'm': case 'o': case '<': case '>':
1320 case 'E': case 'F': case 'G': case 'H': case 'X':
1321 case 's': case 'i': case 'n':
1322 case 'I': case 'J': case 'K': case 'L': case 'M':
1323 case 'N': case 'O': case 'P': case ',':
1324 #ifdef EXTRA_CONSTRAINT
1325 case 'Q': case 'R': case 'S': case 'T': case 'U':
1326 #endif
1327 break;
1328
1329 case '0': case '1': case '2': case '3': case '4':
1330 case '5': case '6': case '7': case '8': case '9':
1331 error ("matching constraint not valid in output operand");
1332 break;
1333
1334 case 'p': case 'g': case 'r':
1335 default:
1336 allows_reg = 1;
1337 break;
1338 }
1339
1340 /* If an output operand is not a decl or indirect ref and our constraint
1341 allows a register, make a temporary to act as an intermediate.
1342 Make the asm insn write into that, then our caller will copy it to
1343 the real output operand. Likewise for promoted variables. */
1344
1345 if (TREE_CODE (val) == INDIRECT_REF
1346 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1347 && ! (GET_CODE (DECL_RTL (val)) == REG
1348 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1349 || ! allows_reg
1350 || is_inout)
1351 {
1352 if (! allows_reg)
1353 mark_addressable (TREE_VALUE (tail));
1354
1355 output_rtx[i]
1356 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1357 EXPAND_MEMORY_USE_WO);
1358
1359 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1360 error ("output number %d not directly addressable", i);
1361 }
1362 else
1363 {
1364 output_rtx[i] = assign_temp (type, 0, 0, 0);
1365 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1366 }
1367
1368 if (is_inout)
1369 {
1370 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1371 inout_opnum[ninout++] = i;
1372 }
1373 }
1374
1375 ninputs += ninout;
1376 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1377 {
1378 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1379 return;
1380 }
1381
1382 /* Make vectors for the expression-rtx and constraint strings. */
1383
1384 argvec = rtvec_alloc (ninputs);
1385 constraints = rtvec_alloc (ninputs);
1386
1387 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1388 TREE_STRING_POINTER (string), "", 0, argvec,
1389 constraints, filename, line);
1390
1391 MEM_VOLATILE_P (body) = vol;
1392
1393 /* Eval the inputs and put them into ARGVEC.
1394 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1395
1396 i = 0;
1397 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1398 {
1399 int j;
1400 int allows_reg = 0, allows_mem = 0;
1401 char *constraint, *orig_constraint;
1402 int c_len;
1403 rtx op;
1404
1405 /* If there's an erroneous arg, emit no insn,
1406 because the ASM_INPUT would get VOIDmode
1407 and that could cause a crash in reload. */
1408 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1409 return;
1410
1411 /* ??? Can this happen, and does the error message make any sense? */
1412 if (TREE_PURPOSE (tail) == NULL_TREE)
1413 {
1414 error ("hard register `%s' listed as input operand to `asm'",
1415 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1416 return;
1417 }
1418
1419 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1420 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1421 orig_constraint = constraint;
1422
1423 /* Make sure constraint has neither `=', `+', nor '&'. */
1424
1425 for (j = 0; j < c_len; j++)
1426 switch (constraint[j])
1427 {
1428 case '+': case '=': case '&':
1429 if (constraint == orig_constraint)
1430 {
1431 error ("input operand constraint contains `%c'", constraint[j]);
1432 return;
1433 }
1434 break;
1435
1436 case '%':
1437 if (constraint == orig_constraint
1438 && i + 1 == ninputs - ninout)
1439 {
1440 error ("`%%' constraint used with last operand");
1441 return;
1442 }
1443 break;
1444
1445 case 'V': case 'm': case 'o':
1446 allows_mem = 1;
1447 break;
1448
1449 case '<': case '>':
1450 case '?': case '!': case '*':
1451 case 'E': case 'F': case 'G': case 'H': case 'X':
1452 case 's': case 'i': case 'n':
1453 case 'I': case 'J': case 'K': case 'L': case 'M':
1454 case 'N': case 'O': case 'P': case ',':
1455 #ifdef EXTRA_CONSTRAINT
1456 case 'Q': case 'R': case 'S': case 'T': case 'U':
1457 #endif
1458 break;
1459
1460 /* Whether or not a numeric constraint allows a register is
1461 decided by the matching constraint, and so there is no need
1462 to do anything special with them. We must handle them in
1463 the default case, so that we don't unnecessarily force
1464 operands to memory. */
1465 case '0': case '1': case '2': case '3': case '4':
1466 case '5': case '6': case '7': case '8': case '9':
1467 if (constraint[j] >= '0' + noutputs)
1468 {
1469 error
1470 ("matching constraint references invalid operand number");
1471 return;
1472 }
1473
1474 /* Try and find the real constraint for this dup. */
1475 if (j == 0 && c_len == 1)
1476 {
1477 tree o = outputs;
1478 for (j = constraint[j] - '0'; j > 0; --j)
1479 o = TREE_CHAIN (o);
1480
1481 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1482 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1483 j = 0;
1484 break;
1485 }
1486
1487 /* ... fall through ... */
1488
1489 case 'p': case 'r':
1490 default:
1491 allows_reg = 1;
1492 break;
1493
1494 case 'g':
1495 allows_reg = 1;
1496 allows_mem = 1;
1497 break;
1498 }
1499
1500 if (! allows_reg && allows_mem)
1501 mark_addressable (TREE_VALUE (tail));
1502
1503 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1504
1505 if (! asm_operand_ok (op, constraint))
1506 {
1507 if (allows_reg)
1508 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1509 else if (!allows_mem)
1510 warning ("asm operand %d probably doesn't match constraints", i);
1511 else if (CONSTANT_P (op))
1512 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1513 op);
1514 else if (GET_CODE (op) == REG
1515 || GET_CODE (op) == SUBREG
1516 || GET_CODE (op) == CONCAT)
1517 {
1518 tree type = TREE_TYPE (TREE_VALUE (tail));
1519 rtx memloc = assign_temp (type, 1, 1, 1);
1520
1521 emit_move_insn (memloc, op);
1522 op = memloc;
1523 }
1524 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1525 /* We won't recognize volatile memory as available a
1526 memory_operand at this point. Ignore it. */
1527 ;
1528 else if (queued_subexp_p (op))
1529 ;
1530 else
1531 /* ??? Leave this only until we have experience with what
1532 happens in combine and elsewhere when constraints are
1533 not satisfied. */
1534 warning ("asm operand %d probably doesn't match constraints", i);
1535 }
1536 XVECEXP (body, 3, i) = op;
1537
1538 XVECEXP (body, 4, i) /* constraints */
1539 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1540 orig_constraint);
1541 i++;
1542 }
1543
1544 /* Protect all the operands from the queue,
1545 now that they have all been evaluated. */
1546
1547 for (i = 0; i < ninputs - ninout; i++)
1548 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1549
1550 for (i = 0; i < noutputs; i++)
1551 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1552
1553 /* For in-out operands, copy output rtx to input rtx. */
1554 for (i = 0; i < ninout; i++)
1555 {
1556 static char match[9+1][2]
1557 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1558 int j = inout_opnum[i];
1559
1560 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1561 = output_rtx[j];
1562 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1563 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1564 }
1565
1566 /* Now, for each output, construct an rtx
1567 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1568 ARGVEC CONSTRAINTS))
1569 If there is more than one, put them inside a PARALLEL. */
1570
1571 if (noutputs == 1 && nclobbers == 0)
1572 {
1573 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1574 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1575 }
1576 else if (noutputs == 0 && nclobbers == 0)
1577 {
1578 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1579 insn = emit_insn (body);
1580 }
1581 else
1582 {
1583 rtx obody = body;
1584 int num = noutputs;
1585 if (num == 0) num = 1;
1586 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1587
1588 /* For each output operand, store a SET. */
1589
1590 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1591 {
1592 XVECEXP (body, 0, i)
1593 = gen_rtx_SET (VOIDmode,
1594 output_rtx[i],
1595 gen_rtx_ASM_OPERANDS (VOIDmode,
1596 TREE_STRING_POINTER (string),
1597 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1598 i, argvec, constraints,
1599 filename, line));
1600 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1601 }
1602
1603 /* If there are no outputs (but there are some clobbers)
1604 store the bare ASM_OPERANDS into the PARALLEL. */
1605
1606 if (i == 0)
1607 XVECEXP (body, 0, i++) = obody;
1608
1609 /* Store (clobber REG) for each clobbered register specified. */
1610
1611 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1612 {
1613 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1614 int j = decode_reg_name (regname);
1615
1616 if (j < 0)
1617 {
1618 if (j == -3) /* `cc', which is not a register */
1619 continue;
1620
1621 if (j == -4) /* `memory', don't cache memory across asm */
1622 {
1623 XVECEXP (body, 0, i++)
1624 = gen_rtx_CLOBBER (VOIDmode,
1625 gen_rtx_MEM (BLKmode,
1626 gen_rtx_SCRATCH (VOIDmode)));
1627 continue;
1628 }
1629
1630 /* Ignore unknown register, error already signaled. */
1631 continue;
1632 }
1633
1634 /* Use QImode since that's guaranteed to clobber just one reg. */
1635 XVECEXP (body, 0, i++)
1636 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1637 }
1638
1639 insn = emit_insn (body);
1640 }
1641
1642 free_temp_slots ();
1643 }
1644 \f
1645 /* Generate RTL to evaluate the expression EXP
1646 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1647
1648 void
1649 expand_expr_stmt (exp)
1650 tree exp;
1651 {
1652 /* If -W, warn about statements with no side effects,
1653 except for an explicit cast to void (e.g. for assert()), and
1654 except inside a ({...}) where they may be useful. */
1655 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1656 {
1657 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1658 && !(TREE_CODE (exp) == CONVERT_EXPR
1659 && TREE_TYPE (exp) == void_type_node))
1660 warning_with_file_and_line (emit_filename, emit_lineno,
1661 "statement with no effect");
1662 else if (warn_unused)
1663 warn_if_unused_value (exp);
1664 }
1665
1666 /* If EXP is of function type and we are expanding statements for
1667 value, convert it to pointer-to-function. */
1668 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1669 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1670
1671 last_expr_type = TREE_TYPE (exp);
1672 if (flag_syntax_only && ! expr_stmts_for_value)
1673 last_expr_value = 0;
1674 else
1675 last_expr_value = expand_expr (exp,
1676 (expr_stmts_for_value
1677 ? NULL_RTX : const0_rtx),
1678 VOIDmode, 0);
1679
1680 /* If all we do is reference a volatile value in memory,
1681 copy it to a register to be sure it is actually touched. */
1682 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1683 && TREE_THIS_VOLATILE (exp))
1684 {
1685 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1686 ;
1687 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1688 copy_to_reg (last_expr_value);
1689 else
1690 {
1691 rtx lab = gen_label_rtx ();
1692
1693 /* Compare the value with itself to reference it. */
1694 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1695 expand_expr (TYPE_SIZE (last_expr_type),
1696 NULL_RTX, VOIDmode, 0),
1697 BLKmode, 0,
1698 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1699 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1700 emit_label (lab);
1701 }
1702 }
1703
1704 /* If this expression is part of a ({...}) and is in memory, we may have
1705 to preserve temporaries. */
1706 preserve_temp_slots (last_expr_value);
1707
1708 /* Free any temporaries used to evaluate this expression. Any temporary
1709 used as a result of this expression will already have been preserved
1710 above. */
1711 free_temp_slots ();
1712
1713 emit_queue ();
1714 }
1715
1716 /* Warn if EXP contains any computations whose results are not used.
1717 Return 1 if a warning is printed; 0 otherwise. */
1718
1719 int
1720 warn_if_unused_value (exp)
1721 tree exp;
1722 {
1723 if (TREE_USED (exp))
1724 return 0;
1725
1726 switch (TREE_CODE (exp))
1727 {
1728 case PREINCREMENT_EXPR:
1729 case POSTINCREMENT_EXPR:
1730 case PREDECREMENT_EXPR:
1731 case POSTDECREMENT_EXPR:
1732 case MODIFY_EXPR:
1733 case INIT_EXPR:
1734 case TARGET_EXPR:
1735 case CALL_EXPR:
1736 case METHOD_CALL_EXPR:
1737 case RTL_EXPR:
1738 case TRY_CATCH_EXPR:
1739 case WITH_CLEANUP_EXPR:
1740 case EXIT_EXPR:
1741 /* We don't warn about COND_EXPR because it may be a useful
1742 construct if either arm contains a side effect. */
1743 case COND_EXPR:
1744 return 0;
1745
1746 case BIND_EXPR:
1747 /* For a binding, warn if no side effect within it. */
1748 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1749
1750 case SAVE_EXPR:
1751 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1752
1753 case TRUTH_ORIF_EXPR:
1754 case TRUTH_ANDIF_EXPR:
1755 /* In && or ||, warn if 2nd operand has no side effect. */
1756 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1757
1758 case COMPOUND_EXPR:
1759 if (TREE_NO_UNUSED_WARNING (exp))
1760 return 0;
1761 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1762 return 1;
1763 /* Let people do `(foo (), 0)' without a warning. */
1764 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1765 return 0;
1766 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1767
1768 case NOP_EXPR:
1769 case CONVERT_EXPR:
1770 case NON_LVALUE_EXPR:
1771 /* Don't warn about values cast to void. */
1772 if (TREE_TYPE (exp) == void_type_node)
1773 return 0;
1774 /* Don't warn about conversions not explicit in the user's program. */
1775 if (TREE_NO_UNUSED_WARNING (exp))
1776 return 0;
1777 /* Assignment to a cast usually results in a cast of a modify.
1778 Don't complain about that. There can be an arbitrary number of
1779 casts before the modify, so we must loop until we find the first
1780 non-cast expression and then test to see if that is a modify. */
1781 {
1782 tree tem = TREE_OPERAND (exp, 0);
1783
1784 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1785 tem = TREE_OPERAND (tem, 0);
1786
1787 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1788 || TREE_CODE (tem) == CALL_EXPR)
1789 return 0;
1790 }
1791 goto warn;
1792
1793 case INDIRECT_REF:
1794 /* Don't warn about automatic dereferencing of references, since
1795 the user cannot control it. */
1796 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1797 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1798 /* ... fall through ... */
1799
1800 default:
1801 /* Referencing a volatile value is a side effect, so don't warn. */
1802 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1803 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1804 && TREE_THIS_VOLATILE (exp))
1805 return 0;
1806 warn:
1807 warning_with_file_and_line (emit_filename, emit_lineno,
1808 "value computed is not used");
1809 return 1;
1810 }
1811 }
1812
1813 /* Clear out the memory of the last expression evaluated. */
1814
1815 void
1816 clear_last_expr ()
1817 {
1818 last_expr_type = 0;
1819 }
1820
1821 /* Begin a statement which will return a value.
1822 Return the RTL_EXPR for this statement expr.
1823 The caller must save that value and pass it to expand_end_stmt_expr. */
1824
1825 tree
1826 expand_start_stmt_expr ()
1827 {
1828 int momentary;
1829 tree t;
1830
1831 /* Make the RTL_EXPR node temporary, not momentary,
1832 so that rtl_expr_chain doesn't become garbage. */
1833 momentary = suspend_momentary ();
1834 t = make_node (RTL_EXPR);
1835 resume_momentary (momentary);
1836 do_pending_stack_adjust ();
1837 start_sequence_for_rtl_expr (t);
1838 NO_DEFER_POP;
1839 expr_stmts_for_value++;
1840 return t;
1841 }
1842
1843 /* Restore the previous state at the end of a statement that returns a value.
1844 Returns a tree node representing the statement's value and the
1845 insns to compute the value.
1846
1847 The nodes of that expression have been freed by now, so we cannot use them.
1848 But we don't want to do that anyway; the expression has already been
1849 evaluated and now we just want to use the value. So generate a RTL_EXPR
1850 with the proper type and RTL value.
1851
1852 If the last substatement was not an expression,
1853 return something with type `void'. */
1854
1855 tree
1856 expand_end_stmt_expr (t)
1857 tree t;
1858 {
1859 OK_DEFER_POP;
1860
1861 if (last_expr_type == 0)
1862 {
1863 last_expr_type = void_type_node;
1864 last_expr_value = const0_rtx;
1865 }
1866 else if (last_expr_value == 0)
1867 /* There are some cases where this can happen, such as when the
1868 statement is void type. */
1869 last_expr_value = const0_rtx;
1870 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1871 /* Remove any possible QUEUED. */
1872 last_expr_value = protect_from_queue (last_expr_value, 0);
1873
1874 emit_queue ();
1875
1876 TREE_TYPE (t) = last_expr_type;
1877 RTL_EXPR_RTL (t) = last_expr_value;
1878 RTL_EXPR_SEQUENCE (t) = get_insns ();
1879
1880 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1881
1882 end_sequence ();
1883
1884 /* Don't consider deleting this expr or containing exprs at tree level. */
1885 TREE_SIDE_EFFECTS (t) = 1;
1886 /* Propagate volatility of the actual RTL expr. */
1887 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1888
1889 last_expr_type = 0;
1890 expr_stmts_for_value--;
1891
1892 return t;
1893 }
1894 \f
1895 /* Generate RTL for the start of an if-then. COND is the expression
1896 whose truth should be tested.
1897
1898 If EXITFLAG is nonzero, this conditional is visible to
1899 `exit_something'. */
1900
1901 void
1902 expand_start_cond (cond, exitflag)
1903 tree cond;
1904 int exitflag;
1905 {
1906 struct nesting *thiscond = ALLOC_NESTING ();
1907
1908 /* Make an entry on cond_stack for the cond we are entering. */
1909
1910 thiscond->next = cond_stack;
1911 thiscond->all = nesting_stack;
1912 thiscond->depth = ++nesting_depth;
1913 thiscond->data.cond.next_label = gen_label_rtx ();
1914 /* Before we encounter an `else', we don't need a separate exit label
1915 unless there are supposed to be exit statements
1916 to exit this conditional. */
1917 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1918 thiscond->data.cond.endif_label = thiscond->exit_label;
1919 cond_stack = thiscond;
1920 nesting_stack = thiscond;
1921
1922 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1923 }
1924
1925 /* Generate RTL between then-clause and the elseif-clause
1926 of an if-then-elseif-.... */
1927
1928 void
1929 expand_start_elseif (cond)
1930 tree cond;
1931 {
1932 if (cond_stack->data.cond.endif_label == 0)
1933 cond_stack->data.cond.endif_label = gen_label_rtx ();
1934 emit_jump (cond_stack->data.cond.endif_label);
1935 emit_label (cond_stack->data.cond.next_label);
1936 cond_stack->data.cond.next_label = gen_label_rtx ();
1937 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1938 }
1939
1940 /* Generate RTL between the then-clause and the else-clause
1941 of an if-then-else. */
1942
1943 void
1944 expand_start_else ()
1945 {
1946 if (cond_stack->data.cond.endif_label == 0)
1947 cond_stack->data.cond.endif_label = gen_label_rtx ();
1948
1949 emit_jump (cond_stack->data.cond.endif_label);
1950 emit_label (cond_stack->data.cond.next_label);
1951 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1952 }
1953
1954 /* After calling expand_start_else, turn this "else" into an "else if"
1955 by providing another condition. */
1956
1957 void
1958 expand_elseif (cond)
1959 tree cond;
1960 {
1961 cond_stack->data.cond.next_label = gen_label_rtx ();
1962 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1963 }
1964
1965 /* Generate RTL for the end of an if-then.
1966 Pop the record for it off of cond_stack. */
1967
1968 void
1969 expand_end_cond ()
1970 {
1971 struct nesting *thiscond = cond_stack;
1972
1973 do_pending_stack_adjust ();
1974 if (thiscond->data.cond.next_label)
1975 emit_label (thiscond->data.cond.next_label);
1976 if (thiscond->data.cond.endif_label)
1977 emit_label (thiscond->data.cond.endif_label);
1978
1979 POPSTACK (cond_stack);
1980 last_expr_type = 0;
1981 }
1982
1983
1984 \f
1985 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1986 loop should be exited by `exit_something'. This is a loop for which
1987 `expand_continue' will jump to the top of the loop.
1988
1989 Make an entry on loop_stack to record the labels associated with
1990 this loop. */
1991
1992 struct nesting *
1993 expand_start_loop (exit_flag)
1994 int exit_flag;
1995 {
1996 register struct nesting *thisloop = ALLOC_NESTING ();
1997
1998 /* Make an entry on loop_stack for the loop we are entering. */
1999
2000 thisloop->next = loop_stack;
2001 thisloop->all = nesting_stack;
2002 thisloop->depth = ++nesting_depth;
2003 thisloop->data.loop.start_label = gen_label_rtx ();
2004 thisloop->data.loop.end_label = gen_label_rtx ();
2005 thisloop->data.loop.alt_end_label = 0;
2006 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2007 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2008 loop_stack = thisloop;
2009 nesting_stack = thisloop;
2010
2011 do_pending_stack_adjust ();
2012 emit_queue ();
2013 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2014 emit_label (thisloop->data.loop.start_label);
2015
2016 return thisloop;
2017 }
2018
2019 /* Like expand_start_loop but for a loop where the continuation point
2020 (for expand_continue_loop) will be specified explicitly. */
2021
2022 struct nesting *
2023 expand_start_loop_continue_elsewhere (exit_flag)
2024 int exit_flag;
2025 {
2026 struct nesting *thisloop = expand_start_loop (exit_flag);
2027 loop_stack->data.loop.continue_label = gen_label_rtx ();
2028 return thisloop;
2029 }
2030
2031 /* Specify the continuation point for a loop started with
2032 expand_start_loop_continue_elsewhere.
2033 Use this at the point in the code to which a continue statement
2034 should jump. */
2035
2036 void
2037 expand_loop_continue_here ()
2038 {
2039 do_pending_stack_adjust ();
2040 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2041 emit_label (loop_stack->data.loop.continue_label);
2042 }
2043
2044 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2045 Pop the block off of loop_stack. */
2046
2047 void
2048 expand_end_loop ()
2049 {
2050 rtx start_label = loop_stack->data.loop.start_label;
2051 rtx insn = get_last_insn ();
2052
2053 /* Mark the continue-point at the top of the loop if none elsewhere. */
2054 if (start_label == loop_stack->data.loop.continue_label)
2055 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2056
2057 do_pending_stack_adjust ();
2058
2059 /* If optimizing, perhaps reorder the loop. If the loop starts with
2060 a loop exit, roll that to the end where it will optimize together
2061 with the jump back.
2062
2063 We look for the conditional branch to the exit, except that once
2064 we find such a branch, we don't look past 30 instructions.
2065
2066 In more detail, if the loop presently looks like this (in pseudo-C):
2067
2068 start_label:
2069 if (test) goto end_label;
2070 body;
2071 goto start_label;
2072 end_label:
2073
2074 transform it to look like:
2075
2076 goto start_label;
2077 newstart_label:
2078 body;
2079 start_label:
2080 if (test) goto end_label;
2081 goto newstart_label;
2082 end_label:
2083
2084 Here, the `test' may actually consist of some reasonably complex
2085 code, terminating in a test. */
2086
2087 if (optimize
2088 &&
2089 ! (GET_CODE (insn) == JUMP_INSN
2090 && GET_CODE (PATTERN (insn)) == SET
2091 && SET_DEST (PATTERN (insn)) == pc_rtx
2092 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2093 {
2094 int eh_regions = 0;
2095 int num_insns = 0;
2096 rtx last_test_insn = NULL_RTX;
2097
2098 /* Scan insns from the top of the loop looking for a qualified
2099 conditional exit. */
2100 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2101 insn = NEXT_INSN (insn))
2102 {
2103 if (GET_CODE (insn) == NOTE)
2104 {
2105 if (optimize < 2
2106 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2107 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2108 /* The code that actually moves the exit test will
2109 carefully leave BLOCK notes in their original
2110 location. That means, however, that we can't debug
2111 the exit test itself. So, we refuse to move code
2112 containing BLOCK notes at low optimization levels. */
2113 break;
2114
2115 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2116 ++eh_regions;
2117 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2118 {
2119 --eh_regions;
2120 if (eh_regions < 0)
2121 /* We've come to the end of an EH region, but
2122 never saw the beginning of that region. That
2123 means that an EH region begins before the top
2124 of the loop, and ends in the middle of it. The
2125 existence of such a situation violates a basic
2126 assumption in this code, since that would imply
2127 that even when EH_REGIONS is zero, we might
2128 move code out of an exception region. */
2129 abort ();
2130 }
2131
2132 /* We already know this INSN is a NOTE, so there's no
2133 point in looking at it to see if it's a JUMP. */
2134 continue;
2135 }
2136
2137 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2138 num_insns++;
2139
2140 if (last_test_insn && num_insns > 30)
2141 break;
2142
2143 if (eh_regions > 0)
2144 /* We don't want to move a partial EH region. Consider:
2145
2146 while ( ( { try {
2147 if (cond ()) 0;
2148 else {
2149 bar();
2150 1;
2151 }
2152 } catch (...) {
2153 1;
2154 } )) {
2155 body;
2156 }
2157
2158 This isn't legal C++, but here's what it's supposed to
2159 mean: if cond() is true, stop looping. Otherwise,
2160 call bar, and keep looping. In addition, if cond
2161 throws an exception, catch it and keep looping. Such
2162 constructs are certainy legal in LISP.
2163
2164 We should not move the `if (cond()) 0' test since then
2165 the EH-region for the try-block would be broken up.
2166 (In this case we would the EH_BEG note for the `try'
2167 and `if cond()' but not the call to bar() or the
2168 EH_END note.)
2169
2170 So we don't look for tests within an EH region. */
2171 continue;
2172
2173 if (GET_CODE (insn) == JUMP_INSN
2174 && GET_CODE (PATTERN (insn)) == SET
2175 && SET_DEST (PATTERN (insn)) == pc_rtx)
2176 {
2177 /* This is indeed a jump. */
2178 rtx dest1 = NULL_RTX;
2179 rtx dest2 = NULL_RTX;
2180 rtx potential_last_test;
2181 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2182 {
2183 /* A conditional jump. */
2184 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2185 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2186 potential_last_test = insn;
2187 }
2188 else
2189 {
2190 /* An unconditional jump. */
2191 dest1 = SET_SRC (PATTERN (insn));
2192 /* Include the BARRIER after the JUMP. */
2193 potential_last_test = NEXT_INSN (insn);
2194 }
2195
2196 do {
2197 if (dest1 && GET_CODE (dest1) == LABEL_REF
2198 && ((XEXP (dest1, 0)
2199 == loop_stack->data.loop.alt_end_label)
2200 || (XEXP (dest1, 0)
2201 == loop_stack->data.loop.end_label)))
2202 {
2203 last_test_insn = potential_last_test;
2204 break;
2205 }
2206
2207 /* If this was a conditional jump, there may be
2208 another label at which we should look. */
2209 dest1 = dest2;
2210 dest2 = NULL_RTX;
2211 } while (dest1);
2212 }
2213 }
2214
2215 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2216 {
2217 /* We found one. Move everything from there up
2218 to the end of the loop, and add a jump into the loop
2219 to jump to there. */
2220 register rtx newstart_label = gen_label_rtx ();
2221 register rtx start_move = start_label;
2222 rtx next_insn;
2223
2224 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2225 then we want to move this note also. */
2226 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2227 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2228 == NOTE_INSN_LOOP_CONT))
2229 start_move = PREV_INSN (start_move);
2230
2231 emit_label_after (newstart_label, PREV_INSN (start_move));
2232
2233 /* Actually move the insns. Start at the beginning, and
2234 keep copying insns until we've copied the
2235 last_test_insn. */
2236 for (insn = start_move; insn; insn = next_insn)
2237 {
2238 /* Figure out which insn comes after this one. We have
2239 to do this before we move INSN. */
2240 if (insn == last_test_insn)
2241 /* We've moved all the insns. */
2242 next_insn = NULL_RTX;
2243 else
2244 next_insn = NEXT_INSN (insn);
2245
2246 if (GET_CODE (insn) == NOTE
2247 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2248 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2249 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2250 NOTE_INSN_BLOCK_ENDs because the correct generation
2251 of debugging information depends on these appearing
2252 in the same order in the RTL and in the tree
2253 structure, where they are represented as BLOCKs.
2254 So, we don't move block notes. Of course, moving
2255 the code inside the block is likely to make it
2256 impossible to debug the instructions in the exit
2257 test, but such is the price of optimization. */
2258 continue;
2259
2260 /* Move the INSN. */
2261 reorder_insns (insn, insn, get_last_insn ());
2262 }
2263
2264 emit_jump_insn_after (gen_jump (start_label),
2265 PREV_INSN (newstart_label));
2266 emit_barrier_after (PREV_INSN (newstart_label));
2267 start_label = newstart_label;
2268 }
2269 }
2270
2271 emit_jump (start_label);
2272 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2273 emit_label (loop_stack->data.loop.end_label);
2274
2275 POPSTACK (loop_stack);
2276
2277 last_expr_type = 0;
2278 }
2279
2280 /* Generate a jump to the current loop's continue-point.
2281 This is usually the top of the loop, but may be specified
2282 explicitly elsewhere. If not currently inside a loop,
2283 return 0 and do nothing; caller will print an error message. */
2284
2285 int
2286 expand_continue_loop (whichloop)
2287 struct nesting *whichloop;
2288 {
2289 last_expr_type = 0;
2290 if (whichloop == 0)
2291 whichloop = loop_stack;
2292 if (whichloop == 0)
2293 return 0;
2294 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2295 NULL_RTX);
2296 return 1;
2297 }
2298
2299 /* Generate a jump to exit the current loop. If not currently inside a loop,
2300 return 0 and do nothing; caller will print an error message. */
2301
2302 int
2303 expand_exit_loop (whichloop)
2304 struct nesting *whichloop;
2305 {
2306 last_expr_type = 0;
2307 if (whichloop == 0)
2308 whichloop = loop_stack;
2309 if (whichloop == 0)
2310 return 0;
2311 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2312 return 1;
2313 }
2314
2315 /* Generate a conditional jump to exit the current loop if COND
2316 evaluates to zero. If not currently inside a loop,
2317 return 0 and do nothing; caller will print an error message. */
2318
2319 int
2320 expand_exit_loop_if_false (whichloop, cond)
2321 struct nesting *whichloop;
2322 tree cond;
2323 {
2324 rtx label = gen_label_rtx ();
2325 rtx last_insn;
2326 last_expr_type = 0;
2327
2328 if (whichloop == 0)
2329 whichloop = loop_stack;
2330 if (whichloop == 0)
2331 return 0;
2332 /* In order to handle fixups, we actually create a conditional jump
2333 around a unconditional branch to exit the loop. If fixups are
2334 necessary, they go before the unconditional branch. */
2335
2336
2337 do_jump (cond, NULL_RTX, label);
2338 last_insn = get_last_insn ();
2339 if (GET_CODE (last_insn) == CODE_LABEL)
2340 whichloop->data.loop.alt_end_label = last_insn;
2341 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2342 NULL_RTX);
2343 emit_label (label);
2344
2345 return 1;
2346 }
2347
2348 /* Return non-zero if we should preserve sub-expressions as separate
2349 pseudos. We never do so if we aren't optimizing. We always do so
2350 if -fexpensive-optimizations.
2351
2352 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2353 the loop may still be a small one. */
2354
2355 int
2356 preserve_subexpressions_p ()
2357 {
2358 rtx insn;
2359
2360 if (flag_expensive_optimizations)
2361 return 1;
2362
2363 if (optimize == 0 || loop_stack == 0)
2364 return 0;
2365
2366 insn = get_last_insn_anywhere ();
2367
2368 return (insn
2369 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2370 < n_non_fixed_regs * 3));
2371
2372 }
2373
2374 /* Generate a jump to exit the current loop, conditional, binding contour
2375 or case statement. Not all such constructs are visible to this function,
2376 only those started with EXIT_FLAG nonzero. Individual languages use
2377 the EXIT_FLAG parameter to control which kinds of constructs you can
2378 exit this way.
2379
2380 If not currently inside anything that can be exited,
2381 return 0 and do nothing; caller will print an error message. */
2382
2383 int
2384 expand_exit_something ()
2385 {
2386 struct nesting *n;
2387 last_expr_type = 0;
2388 for (n = nesting_stack; n; n = n->all)
2389 if (n->exit_label != 0)
2390 {
2391 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2392 return 1;
2393 }
2394
2395 return 0;
2396 }
2397 \f
2398 /* Generate RTL to return from the current function, with no value.
2399 (That is, we do not do anything about returning any value.) */
2400
2401 void
2402 expand_null_return ()
2403 {
2404 struct nesting *block = block_stack;
2405 rtx last_insn = 0;
2406
2407 /* Does any pending block have cleanups? */
2408
2409 while (block && block->data.block.cleanups == 0)
2410 block = block->next;
2411
2412 /* If yes, use a goto to return, since that runs cleanups. */
2413
2414 expand_null_return_1 (last_insn, block != 0);
2415 }
2416
2417 /* Generate RTL to return from the current function, with value VAL. */
2418
2419 static void
2420 expand_value_return (val)
2421 rtx val;
2422 {
2423 struct nesting *block = block_stack;
2424 rtx last_insn = get_last_insn ();
2425 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2426
2427 /* Copy the value to the return location
2428 unless it's already there. */
2429
2430 if (return_reg != val)
2431 {
2432 #ifdef PROMOTE_FUNCTION_RETURN
2433 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2434 int unsignedp = TREE_UNSIGNED (type);
2435 enum machine_mode mode
2436 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2437 &unsignedp, 1);
2438
2439 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2440 convert_move (return_reg, val, unsignedp);
2441 else
2442 #endif
2443 emit_move_insn (return_reg, val);
2444 }
2445 if (GET_CODE (return_reg) == REG
2446 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2447 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2448 /* Handle calls that return values in multiple non-contiguous locations.
2449 The Irix 6 ABI has examples of this. */
2450 else if (GET_CODE (return_reg) == PARALLEL)
2451 {
2452 int i;
2453
2454 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2455 {
2456 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2457
2458 if (GET_CODE (x) == REG
2459 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2460 emit_insn (gen_rtx_USE (VOIDmode, x));
2461 }
2462 }
2463
2464 /* Does any pending block have cleanups? */
2465
2466 while (block && block->data.block.cleanups == 0)
2467 block = block->next;
2468
2469 /* If yes, use a goto to return, since that runs cleanups.
2470 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2471
2472 expand_null_return_1 (last_insn, block != 0);
2473 }
2474
2475 /* Output a return with no value. If LAST_INSN is nonzero,
2476 pretend that the return takes place after LAST_INSN.
2477 If USE_GOTO is nonzero then don't use a return instruction;
2478 go to the return label instead. This causes any cleanups
2479 of pending blocks to be executed normally. */
2480
2481 static void
2482 expand_null_return_1 (last_insn, use_goto)
2483 rtx last_insn;
2484 int use_goto;
2485 {
2486 rtx end_label = cleanup_label ? cleanup_label : return_label;
2487
2488 clear_pending_stack_adjust ();
2489 do_pending_stack_adjust ();
2490 last_expr_type = 0;
2491
2492 /* PCC-struct return always uses an epilogue. */
2493 if (current_function_returns_pcc_struct || use_goto)
2494 {
2495 if (end_label == 0)
2496 end_label = return_label = gen_label_rtx ();
2497 expand_goto_internal (NULL_TREE, end_label, last_insn);
2498 return;
2499 }
2500
2501 /* Otherwise output a simple return-insn if one is available,
2502 unless it won't do the job. */
2503 #ifdef HAVE_return
2504 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2505 {
2506 emit_jump_insn (gen_return ());
2507 emit_barrier ();
2508 return;
2509 }
2510 #endif
2511
2512 /* Otherwise jump to the epilogue. */
2513 expand_goto_internal (NULL_TREE, end_label, last_insn);
2514 }
2515 \f
2516 /* Generate RTL to evaluate the expression RETVAL and return it
2517 from the current function. */
2518
2519 void
2520 expand_return (retval)
2521 tree retval;
2522 {
2523 /* If there are any cleanups to be performed, then they will
2524 be inserted following LAST_INSN. It is desirable
2525 that the last_insn, for such purposes, should be the
2526 last insn before computing the return value. Otherwise, cleanups
2527 which call functions can clobber the return value. */
2528 /* ??? rms: I think that is erroneous, because in C++ it would
2529 run destructors on variables that might be used in the subsequent
2530 computation of the return value. */
2531 rtx last_insn = 0;
2532 register rtx val = 0;
2533 register rtx op0;
2534 tree retval_rhs;
2535 int cleanups;
2536
2537 /* If function wants no value, give it none. */
2538 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2539 {
2540 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2541 emit_queue ();
2542 expand_null_return ();
2543 return;
2544 }
2545
2546 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2547 /* This is not sufficient. We also need to watch for cleanups of the
2548 expression we are about to expand. Unfortunately, we cannot know
2549 if it has cleanups until we expand it, and we want to change how we
2550 expand it depending upon if we need cleanups. We can't win. */
2551 #if 0
2552 cleanups = any_pending_cleanups (1);
2553 #else
2554 cleanups = 1;
2555 #endif
2556
2557 if (TREE_CODE (retval) == RESULT_DECL)
2558 retval_rhs = retval;
2559 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2560 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2561 retval_rhs = TREE_OPERAND (retval, 1);
2562 else if (TREE_TYPE (retval) == void_type_node)
2563 /* Recognize tail-recursive call to void function. */
2564 retval_rhs = retval;
2565 else
2566 retval_rhs = NULL_TREE;
2567
2568 /* Only use `last_insn' if there are cleanups which must be run. */
2569 if (cleanups || cleanup_label != 0)
2570 last_insn = get_last_insn ();
2571
2572 /* Distribute return down conditional expr if either of the sides
2573 may involve tail recursion (see test below). This enhances the number
2574 of tail recursions we see. Don't do this always since it can produce
2575 sub-optimal code in some cases and we distribute assignments into
2576 conditional expressions when it would help. */
2577
2578 if (optimize && retval_rhs != 0
2579 && frame_offset == 0
2580 && TREE_CODE (retval_rhs) == COND_EXPR
2581 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2582 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2583 {
2584 rtx label = gen_label_rtx ();
2585 tree expr;
2586
2587 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2588 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2589 DECL_RESULT (current_function_decl),
2590 TREE_OPERAND (retval_rhs, 1));
2591 TREE_SIDE_EFFECTS (expr) = 1;
2592 expand_return (expr);
2593 emit_label (label);
2594
2595 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2596 DECL_RESULT (current_function_decl),
2597 TREE_OPERAND (retval_rhs, 2));
2598 TREE_SIDE_EFFECTS (expr) = 1;
2599 expand_return (expr);
2600 return;
2601 }
2602
2603 /* Attempt to optimize the call if it is tail recursive. */
2604 optimize_tail_recursion (retval_rhs, last_insn);
2605
2606 #ifdef HAVE_return
2607 /* This optimization is safe if there are local cleanups
2608 because expand_null_return takes care of them.
2609 ??? I think it should also be safe when there is a cleanup label,
2610 because expand_null_return takes care of them, too.
2611 Any reason why not? */
2612 if (HAVE_return && cleanup_label == 0
2613 && ! current_function_returns_pcc_struct
2614 && BRANCH_COST <= 1)
2615 {
2616 /* If this is return x == y; then generate
2617 if (x == y) return 1; else return 0;
2618 if we can do it with explicit return insns and branches are cheap,
2619 but not if we have the corresponding scc insn. */
2620 int has_scc = 0;
2621 if (retval_rhs)
2622 switch (TREE_CODE (retval_rhs))
2623 {
2624 case EQ_EXPR:
2625 #ifdef HAVE_seq
2626 has_scc = HAVE_seq;
2627 #endif
2628 case NE_EXPR:
2629 #ifdef HAVE_sne
2630 has_scc = HAVE_sne;
2631 #endif
2632 case GT_EXPR:
2633 #ifdef HAVE_sgt
2634 has_scc = HAVE_sgt;
2635 #endif
2636 case GE_EXPR:
2637 #ifdef HAVE_sge
2638 has_scc = HAVE_sge;
2639 #endif
2640 case LT_EXPR:
2641 #ifdef HAVE_slt
2642 has_scc = HAVE_slt;
2643 #endif
2644 case LE_EXPR:
2645 #ifdef HAVE_sle
2646 has_scc = HAVE_sle;
2647 #endif
2648 case TRUTH_ANDIF_EXPR:
2649 case TRUTH_ORIF_EXPR:
2650 case TRUTH_AND_EXPR:
2651 case TRUTH_OR_EXPR:
2652 case TRUTH_NOT_EXPR:
2653 case TRUTH_XOR_EXPR:
2654 if (! has_scc)
2655 {
2656 op0 = gen_label_rtx ();
2657 jumpifnot (retval_rhs, op0);
2658 expand_value_return (const1_rtx);
2659 emit_label (op0);
2660 expand_value_return (const0_rtx);
2661 return;
2662 }
2663 break;
2664
2665 default:
2666 break;
2667 }
2668 }
2669 #endif /* HAVE_return */
2670
2671 /* If the result is an aggregate that is being returned in one (or more)
2672 registers, load the registers here. The compiler currently can't handle
2673 copying a BLKmode value into registers. We could put this code in a
2674 more general area (for use by everyone instead of just function
2675 call/return), but until this feature is generally usable it is kept here
2676 (and in expand_call). The value must go into a pseudo in case there
2677 are cleanups that will clobber the real return register. */
2678
2679 if (retval_rhs != 0
2680 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2681 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2682 {
2683 int i, bitpos, xbitpos;
2684 int big_endian_correction = 0;
2685 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2686 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2687 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2688 (unsigned int)BITS_PER_WORD);
2689 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2690 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2691 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2692 enum machine_mode tmpmode, result_reg_mode;
2693
2694 /* Structures whose size is not a multiple of a word are aligned
2695 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2696 machine, this means we must skip the empty high order bytes when
2697 calculating the bit offset. */
2698 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2699 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2700 * BITS_PER_UNIT));
2701
2702 /* Copy the structure BITSIZE bits at a time. */
2703 for (bitpos = 0, xbitpos = big_endian_correction;
2704 bitpos < bytes * BITS_PER_UNIT;
2705 bitpos += bitsize, xbitpos += bitsize)
2706 {
2707 /* We need a new destination pseudo each time xbitpos is
2708 on a word boundary and when xbitpos == big_endian_correction
2709 (the first time through). */
2710 if (xbitpos % BITS_PER_WORD == 0
2711 || xbitpos == big_endian_correction)
2712 {
2713 /* Generate an appropriate register. */
2714 dst = gen_reg_rtx (word_mode);
2715 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2716
2717 /* Clobber the destination before we move anything into it. */
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2719 }
2720
2721 /* We need a new source operand each time bitpos is on a word
2722 boundary. */
2723 if (bitpos % BITS_PER_WORD == 0)
2724 src = operand_subword_force (result_val,
2725 bitpos / BITS_PER_WORD,
2726 BLKmode);
2727
2728 /* Use bitpos for the source extraction (left justified) and
2729 xbitpos for the destination store (right justified). */
2730 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2731 extract_bit_field (src, bitsize,
2732 bitpos % BITS_PER_WORD, 1,
2733 NULL_RTX, word_mode,
2734 word_mode,
2735 bitsize / BITS_PER_UNIT,
2736 BITS_PER_WORD),
2737 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2738 }
2739
2740 /* Find the smallest integer mode large enough to hold the
2741 entire structure and use that mode instead of BLKmode
2742 on the USE insn for the return register. */
2743 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2744 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2745 tmpmode != MAX_MACHINE_MODE;
2746 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2747 {
2748 /* Have we found a large enough mode? */
2749 if (GET_MODE_SIZE (tmpmode) >= bytes)
2750 break;
2751 }
2752
2753 /* No suitable mode found. */
2754 if (tmpmode == MAX_MACHINE_MODE)
2755 abort ();
2756
2757 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2758
2759 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2760 result_reg_mode = word_mode;
2761 else
2762 result_reg_mode = tmpmode;
2763 result_reg = gen_reg_rtx (result_reg_mode);
2764
2765 emit_queue ();
2766 for (i = 0; i < n_regs; i++)
2767 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2768 result_pseudos[i]);
2769
2770 if (tmpmode != result_reg_mode)
2771 result_reg = gen_lowpart (tmpmode, result_reg);
2772
2773 expand_value_return (result_reg);
2774 }
2775 else if (cleanups
2776 && retval_rhs != 0
2777 && TREE_TYPE (retval_rhs) != void_type_node
2778 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2779 {
2780 /* Calculate the return value into a pseudo reg. */
2781 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2782 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2783 val = force_not_mem (val);
2784 emit_queue ();
2785 /* Return the calculated value, doing cleanups first. */
2786 expand_value_return (val);
2787 }
2788 else
2789 {
2790 /* No cleanups or no hard reg used;
2791 calculate value into hard return reg. */
2792 expand_expr (retval, const0_rtx, VOIDmode, 0);
2793 emit_queue ();
2794 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2795 }
2796 }
2797
2798 /* Return 1 if the end of the generated RTX is not a barrier.
2799 This means code already compiled can drop through. */
2800
2801 int
2802 drop_through_at_end_p ()
2803 {
2804 rtx insn = get_last_insn ();
2805 while (insn && GET_CODE (insn) == NOTE)
2806 insn = PREV_INSN (insn);
2807 return insn && GET_CODE (insn) != BARRIER;
2808 }
2809 \f
2810 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2811 and emit code to optimize the tail recursion. LAST_INSN indicates where
2812 to place the jump to the tail recursion label.
2813
2814 This is only used by expand_return, but expand_call is expected to
2815 use it soon. */
2816
2817 void
2818 optimize_tail_recursion (call_expr, last_insn)
2819 tree call_expr;
2820 rtx last_insn;
2821 {
2822 /* For tail-recursive call to current function,
2823 just jump back to the beginning.
2824 It's unsafe if any auto variable in this function
2825 has its address taken; for simplicity,
2826 require stack frame to be empty. */
2827 if (optimize && call_expr != 0
2828 && frame_offset == 0
2829 && TREE_CODE (call_expr) == CALL_EXPR
2830 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2831 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2832 /* Finish checking validity, and if valid emit code
2833 to set the argument variables for the new call. */
2834 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2835 DECL_ARGUMENTS (current_function_decl)))
2836 {
2837 if (tail_recursion_label == 0)
2838 {
2839 tail_recursion_label = gen_label_rtx ();
2840 emit_label_after (tail_recursion_label,
2841 tail_recursion_reentry);
2842 }
2843 emit_queue ();
2844 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2845 emit_barrier ();
2846 }
2847 }
2848
2849 /* Emit code to alter this function's formal parms for a tail-recursive call.
2850 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2851 FORMALS is the chain of decls of formals.
2852 Return 1 if this can be done;
2853 otherwise return 0 and do not emit any code. */
2854
2855 static int
2856 tail_recursion_args (actuals, formals)
2857 tree actuals, formals;
2858 {
2859 register tree a = actuals, f = formals;
2860 register int i;
2861 register rtx *argvec;
2862
2863 /* Check that number and types of actuals are compatible
2864 with the formals. This is not always true in valid C code.
2865 Also check that no formal needs to be addressable
2866 and that all formals are scalars. */
2867
2868 /* Also count the args. */
2869
2870 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2871 {
2872 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2873 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2874 return 0;
2875 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2876 return 0;
2877 }
2878 if (a != 0 || f != 0)
2879 return 0;
2880
2881 /* Compute all the actuals. */
2882
2883 argvec = (rtx *) alloca (i * sizeof (rtx));
2884
2885 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2886 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2887
2888 /* Find which actual values refer to current values of previous formals.
2889 Copy each of them now, before any formal is changed. */
2890
2891 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2892 {
2893 int copy = 0;
2894 register int j;
2895 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2896 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2897 { copy = 1; break; }
2898 if (copy)
2899 argvec[i] = copy_to_reg (argvec[i]);
2900 }
2901
2902 /* Store the values of the actuals into the formals. */
2903
2904 for (f = formals, a = actuals, i = 0; f;
2905 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2906 {
2907 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2908 emit_move_insn (DECL_RTL (f), argvec[i]);
2909 else
2910 convert_move (DECL_RTL (f), argvec[i],
2911 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2912 }
2913
2914 free_temp_slots ();
2915 return 1;
2916 }
2917 \f
2918 /* Generate the RTL code for entering a binding contour.
2919 The variables are declared one by one, by calls to `expand_decl'.
2920
2921 EXIT_FLAG is nonzero if this construct should be visible to
2922 `exit_something'. */
2923
2924 void
2925 expand_start_bindings (exit_flag)
2926 int exit_flag;
2927 {
2928 struct nesting *thisblock = ALLOC_NESTING ();
2929 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2930
2931 /* Make an entry on block_stack for the block we are entering. */
2932
2933 thisblock->next = block_stack;
2934 thisblock->all = nesting_stack;
2935 thisblock->depth = ++nesting_depth;
2936 thisblock->data.block.stack_level = 0;
2937 thisblock->data.block.cleanups = 0;
2938 thisblock->data.block.function_call_count = 0;
2939 thisblock->data.block.exception_region = 0;
2940 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2941
2942 thisblock->data.block.conditional_code = 0;
2943 thisblock->data.block.last_unconditional_cleanup = note;
2944 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2945
2946 if (block_stack
2947 && !(block_stack->data.block.cleanups == NULL_TREE
2948 && block_stack->data.block.outer_cleanups == NULL_TREE))
2949 thisblock->data.block.outer_cleanups
2950 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2951 block_stack->data.block.outer_cleanups);
2952 else
2953 thisblock->data.block.outer_cleanups = 0;
2954 thisblock->data.block.label_chain = 0;
2955 thisblock->data.block.innermost_stack_block = stack_block_stack;
2956 thisblock->data.block.first_insn = note;
2957 thisblock->data.block.block_start_count = ++block_start_count;
2958 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2959 block_stack = thisblock;
2960 nesting_stack = thisblock;
2961
2962 /* Make a new level for allocating stack slots. */
2963 push_temp_slots ();
2964 }
2965
2966 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2967 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2968 expand_expr are made. After we end the region, we know that all
2969 space for all temporaries that were created by TARGET_EXPRs will be
2970 destroyed and their space freed for reuse. */
2971
2972 void
2973 expand_start_target_temps ()
2974 {
2975 /* This is so that even if the result is preserved, the space
2976 allocated will be freed, as we know that it is no longer in use. */
2977 push_temp_slots ();
2978
2979 /* Start a new binding layer that will keep track of all cleanup
2980 actions to be performed. */
2981 expand_start_bindings (0);
2982
2983 target_temp_slot_level = temp_slot_level;
2984 }
2985
2986 void
2987 expand_end_target_temps ()
2988 {
2989 expand_end_bindings (NULL_TREE, 0, 0);
2990
2991 /* This is so that even if the result is preserved, the space
2992 allocated will be freed, as we know that it is no longer in use. */
2993 pop_temp_slots ();
2994 }
2995
2996 /* Mark top block of block_stack as an implicit binding for an
2997 exception region. This is used to prevent infinite recursion when
2998 ending a binding with expand_end_bindings. It is only ever called
2999 by expand_eh_region_start, as that it the only way to create a
3000 block stack for a exception region. */
3001
3002 void
3003 mark_block_as_eh_region ()
3004 {
3005 block_stack->data.block.exception_region = 1;
3006 if (block_stack->next
3007 && block_stack->next->data.block.conditional_code)
3008 {
3009 block_stack->data.block.conditional_code
3010 = block_stack->next->data.block.conditional_code;
3011 block_stack->data.block.last_unconditional_cleanup
3012 = block_stack->next->data.block.last_unconditional_cleanup;
3013 block_stack->data.block.cleanup_ptr
3014 = block_stack->next->data.block.cleanup_ptr;
3015 }
3016 }
3017
3018 /* True if we are currently emitting insns in an area of output code
3019 that is controlled by a conditional expression. This is used by
3020 the cleanup handling code to generate conditional cleanup actions. */
3021
3022 int
3023 conditional_context ()
3024 {
3025 return block_stack && block_stack->data.block.conditional_code;
3026 }
3027
3028 /* Mark top block of block_stack as not for an implicit binding for an
3029 exception region. This is only ever done by expand_eh_region_end
3030 to let expand_end_bindings know that it is being called explicitly
3031 to end the binding layer for just the binding layer associated with
3032 the exception region, otherwise expand_end_bindings would try and
3033 end all implicit binding layers for exceptions regions, and then
3034 one normal binding layer. */
3035
3036 void
3037 mark_block_as_not_eh_region ()
3038 {
3039 block_stack->data.block.exception_region = 0;
3040 }
3041
3042 /* True if the top block of block_stack was marked as for an exception
3043 region by mark_block_as_eh_region. */
3044
3045 int
3046 is_eh_region ()
3047 {
3048 return block_stack && block_stack->data.block.exception_region;
3049 }
3050
3051 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3052 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3053 BLOCK node. */
3054
3055 void
3056 remember_end_note (block)
3057 register tree block;
3058 {
3059 BLOCK_END_NOTE (block) = last_block_end_note;
3060 last_block_end_note = NULL_RTX;
3061 }
3062
3063 /* Emit a handler label for a nonlocal goto handler.
3064 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3065
3066 static void
3067 expand_nl_handler_label (slot, before_insn)
3068 rtx slot, before_insn;
3069 {
3070 rtx insns;
3071 rtx handler_label = gen_label_rtx ();
3072
3073 /* Don't let jump_optimize delete the handler. */
3074 LABEL_PRESERVE_P (handler_label) = 1;
3075
3076 start_sequence ();
3077 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3078 insns = get_insns ();
3079 end_sequence ();
3080 emit_insns_before (insns, before_insn);
3081
3082 emit_label (handler_label);
3083 }
3084
3085 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3086 handler. */
3087 static void
3088 expand_nl_goto_receiver ()
3089 {
3090 #ifdef HAVE_nonlocal_goto
3091 if (! HAVE_nonlocal_goto)
3092 #endif
3093 /* First adjust our frame pointer to its actual value. It was
3094 previously set to the start of the virtual area corresponding to
3095 the stacked variables when we branched here and now needs to be
3096 adjusted to the actual hardware fp value.
3097
3098 Assignments are to virtual registers are converted by
3099 instantiate_virtual_regs into the corresponding assignment
3100 to the underlying register (fp in this case) that makes
3101 the original assignment true.
3102 So the following insn will actually be
3103 decrementing fp by STARTING_FRAME_OFFSET. */
3104 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3105
3106 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3107 if (fixed_regs[ARG_POINTER_REGNUM])
3108 {
3109 #ifdef ELIMINABLE_REGS
3110 /* If the argument pointer can be eliminated in favor of the
3111 frame pointer, we don't need to restore it. We assume here
3112 that if such an elimination is present, it can always be used.
3113 This is the case on all known machines; if we don't make this
3114 assumption, we do unnecessary saving on many machines. */
3115 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3116 size_t i;
3117
3118 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3119 if (elim_regs[i].from == ARG_POINTER_REGNUM
3120 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3121 break;
3122
3123 if (i == sizeof elim_regs / sizeof elim_regs [0])
3124 #endif
3125 {
3126 /* Now restore our arg pointer from the address at which it
3127 was saved in our stack frame.
3128 If there hasn't be space allocated for it yet, make
3129 some now. */
3130 if (arg_pointer_save_area == 0)
3131 arg_pointer_save_area
3132 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3133 emit_move_insn (virtual_incoming_args_rtx,
3134 /* We need a pseudo here, or else
3135 instantiate_virtual_regs_1 complains. */
3136 copy_to_reg (arg_pointer_save_area));
3137 }
3138 }
3139 #endif
3140
3141 #ifdef HAVE_nonlocal_goto_receiver
3142 if (HAVE_nonlocal_goto_receiver)
3143 emit_insn (gen_nonlocal_goto_receiver ());
3144 #endif
3145 }
3146
3147 /* Make handlers for nonlocal gotos taking place in the function calls in
3148 block THISBLOCK. */
3149
3150 static void
3151 expand_nl_goto_receivers (thisblock)
3152 struct nesting *thisblock;
3153 {
3154 tree link;
3155 rtx afterward = gen_label_rtx ();
3156 rtx insns, slot;
3157 int any_invalid;
3158
3159 /* Record the handler address in the stack slot for that purpose,
3160 during this block, saving and restoring the outer value. */
3161 if (thisblock->next != 0)
3162 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3163 {
3164 rtx save_receiver = gen_reg_rtx (Pmode);
3165 emit_move_insn (XEXP (slot, 0), save_receiver);
3166
3167 start_sequence ();
3168 emit_move_insn (save_receiver, XEXP (slot, 0));
3169 insns = get_insns ();
3170 end_sequence ();
3171 emit_insns_before (insns, thisblock->data.block.first_insn);
3172 }
3173
3174 /* Jump around the handlers; they run only when specially invoked. */
3175 emit_jump (afterward);
3176
3177 /* Make a separate handler for each label. */
3178 link = nonlocal_labels;
3179 slot = nonlocal_goto_handler_slots;
3180 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3181 /* Skip any labels we shouldn't be able to jump to from here,
3182 we generate one special handler for all of them below which just calls
3183 abort. */
3184 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3185 {
3186 expand_nl_handler_label (XEXP (slot, 0),
3187 thisblock->data.block.first_insn);
3188 expand_nl_goto_receiver ();
3189
3190 /* Jump to the "real" nonlocal label. */
3191 expand_goto (TREE_VALUE (link));
3192 }
3193
3194 /* A second pass over all nonlocal labels; this time we handle those
3195 we should not be able to jump to at this point. */
3196 link = nonlocal_labels;
3197 slot = nonlocal_goto_handler_slots;
3198 any_invalid = 0;
3199 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3200 if (DECL_TOO_LATE (TREE_VALUE (link)))
3201 {
3202 expand_nl_handler_label (XEXP (slot, 0),
3203 thisblock->data.block.first_insn);
3204 any_invalid = 1;
3205 }
3206
3207 if (any_invalid)
3208 {
3209 expand_nl_goto_receiver ();
3210 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3211 VOIDmode, 0);
3212 emit_barrier ();
3213 }
3214
3215 emit_label (afterward);
3216 }
3217
3218 /* Generate RTL code to terminate a binding contour.
3219 VARS is the chain of VAR_DECL nodes
3220 for the variables bound in this contour.
3221 MARK_ENDS is nonzero if we should put a note at the beginning
3222 and end of this binding contour.
3223
3224 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3225 (That is true automatically if the contour has a saved stack level.) */
3226
3227 void
3228 expand_end_bindings (vars, mark_ends, dont_jump_in)
3229 tree vars;
3230 int mark_ends;
3231 int dont_jump_in;
3232 {
3233 register struct nesting *thisblock;
3234 register tree decl;
3235
3236 while (block_stack->data.block.exception_region)
3237 {
3238 /* Because we don't need or want a new temporary level and
3239 because we didn't create one in expand_eh_region_start,
3240 create a fake one now to avoid removing one in
3241 expand_end_bindings. */
3242 push_temp_slots ();
3243
3244 block_stack->data.block.exception_region = 0;
3245
3246 expand_end_bindings (NULL_TREE, 0, 0);
3247 }
3248
3249 /* Since expand_eh_region_start does an expand_start_bindings, we
3250 have to first end all the bindings that were created by
3251 expand_eh_region_start. */
3252
3253 thisblock = block_stack;
3254
3255 if (warn_unused)
3256 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3257 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3258 && ! DECL_IN_SYSTEM_HEADER (decl)
3259 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3260 warning_with_decl (decl, "unused variable `%s'");
3261
3262 if (thisblock->exit_label)
3263 {
3264 do_pending_stack_adjust ();
3265 emit_label (thisblock->exit_label);
3266 }
3267
3268 /* If necessary, make handlers for nonlocal gotos taking
3269 place in the function calls in this block. */
3270 if (function_call_count != thisblock->data.block.function_call_count
3271 && nonlocal_labels
3272 /* Make handler for outermost block
3273 if there were any nonlocal gotos to this function. */
3274 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3275 /* Make handler for inner block if it has something
3276 special to do when you jump out of it. */
3277 : (thisblock->data.block.cleanups != 0
3278 || thisblock->data.block.stack_level != 0)))
3279 expand_nl_goto_receivers (thisblock);
3280
3281 /* Don't allow jumping into a block that has a stack level.
3282 Cleanups are allowed, though. */
3283 if (dont_jump_in
3284 || thisblock->data.block.stack_level != 0)
3285 {
3286 struct label_chain *chain;
3287
3288 /* Any labels in this block are no longer valid to go to.
3289 Mark them to cause an error message. */
3290 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3291 {
3292 DECL_TOO_LATE (chain->label) = 1;
3293 /* If any goto without a fixup came to this label,
3294 that must be an error, because gotos without fixups
3295 come from outside all saved stack-levels. */
3296 if (TREE_ADDRESSABLE (chain->label))
3297 error_with_decl (chain->label,
3298 "label `%s' used before containing binding contour");
3299 }
3300 }
3301
3302 /* Restore stack level in effect before the block
3303 (only if variable-size objects allocated). */
3304 /* Perform any cleanups associated with the block. */
3305
3306 if (thisblock->data.block.stack_level != 0
3307 || thisblock->data.block.cleanups != 0)
3308 {
3309 /* Only clean up here if this point can actually be reached. */
3310 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3311
3312 /* Don't let cleanups affect ({...}) constructs. */
3313 int old_expr_stmts_for_value = expr_stmts_for_value;
3314 rtx old_last_expr_value = last_expr_value;
3315 tree old_last_expr_type = last_expr_type;
3316 expr_stmts_for_value = 0;
3317
3318 /* Do the cleanups. */
3319 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3320 if (reachable)
3321 do_pending_stack_adjust ();
3322
3323 expr_stmts_for_value = old_expr_stmts_for_value;
3324 last_expr_value = old_last_expr_value;
3325 last_expr_type = old_last_expr_type;
3326
3327 /* Restore the stack level. */
3328
3329 if (reachable && thisblock->data.block.stack_level != 0)
3330 {
3331 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3332 thisblock->data.block.stack_level, NULL_RTX);
3333 if (nonlocal_goto_handler_slots != 0)
3334 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3335 NULL_RTX);
3336 }
3337
3338 /* Any gotos out of this block must also do these things.
3339 Also report any gotos with fixups that came to labels in this
3340 level. */
3341 fixup_gotos (thisblock,
3342 thisblock->data.block.stack_level,
3343 thisblock->data.block.cleanups,
3344 thisblock->data.block.first_insn,
3345 dont_jump_in);
3346 }
3347
3348 /* Mark the beginning and end of the scope if requested.
3349 We do this now, after running cleanups on the variables
3350 just going out of scope, so they are in scope for their cleanups. */
3351
3352 if (mark_ends)
3353 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3354 else
3355 /* Get rid of the beginning-mark if we don't make an end-mark. */
3356 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3357
3358 /* If doing stupid register allocation, make sure lives of all
3359 register variables declared here extend thru end of scope. */
3360
3361 if (obey_regdecls)
3362 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3363 {
3364 rtx rtl = DECL_RTL (decl);
3365 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3366 use_variable (rtl);
3367 }
3368
3369 /* Restore the temporary level of TARGET_EXPRs. */
3370 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3371
3372 /* Restore block_stack level for containing block. */
3373
3374 stack_block_stack = thisblock->data.block.innermost_stack_block;
3375 POPSTACK (block_stack);
3376
3377 /* Pop the stack slot nesting and free any slots at this level. */
3378 pop_temp_slots ();
3379 }
3380 \f
3381 /* Generate RTL for the automatic variable declaration DECL.
3382 (Other kinds of declarations are simply ignored if seen here.) */
3383
3384 void
3385 expand_decl (decl)
3386 register tree decl;
3387 {
3388 struct nesting *thisblock = block_stack;
3389 tree type;
3390
3391 type = TREE_TYPE (decl);
3392
3393 /* Only automatic variables need any expansion done.
3394 Static and external variables, and external functions,
3395 will be handled by `assemble_variable' (called from finish_decl).
3396 TYPE_DECL and CONST_DECL require nothing.
3397 PARM_DECLs are handled in `assign_parms'. */
3398
3399 if (TREE_CODE (decl) != VAR_DECL)
3400 return;
3401 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3402 return;
3403
3404 /* Create the RTL representation for the variable. */
3405
3406 if (type == error_mark_node)
3407 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3408 else if (DECL_SIZE (decl) == 0)
3409 /* Variable with incomplete type. */
3410 {
3411 if (DECL_INITIAL (decl) == 0)
3412 /* Error message was already done; now avoid a crash. */
3413 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3414 else
3415 /* An initializer is going to decide the size of this array.
3416 Until we know the size, represent its address with a reg. */
3417 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3418 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3419 }
3420 else if (DECL_MODE (decl) != BLKmode
3421 /* If -ffloat-store, don't put explicit float vars
3422 into regs. */
3423 && !(flag_float_store
3424 && TREE_CODE (type) == REAL_TYPE)
3425 && ! TREE_THIS_VOLATILE (decl)
3426 && ! TREE_ADDRESSABLE (decl)
3427 && (DECL_REGISTER (decl) || ! obey_regdecls)
3428 /* if -fcheck-memory-usage, check all variables. */
3429 && ! current_function_check_memory_usage)
3430 {
3431 /* Automatic variable that can go in a register. */
3432 int unsignedp = TREE_UNSIGNED (type);
3433 enum machine_mode reg_mode
3434 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3435
3436 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3437 mark_user_reg (DECL_RTL (decl));
3438
3439 if (POINTER_TYPE_P (type))
3440 mark_reg_pointer (DECL_RTL (decl),
3441 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3442 / BITS_PER_UNIT));
3443 }
3444
3445 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3446 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3447 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3448 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3449 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3450 {
3451 /* Variable of fixed size that goes on the stack. */
3452 rtx oldaddr = 0;
3453 rtx addr;
3454
3455 /* If we previously made RTL for this decl, it must be an array
3456 whose size was determined by the initializer.
3457 The old address was a register; set that register now
3458 to the proper address. */
3459 if (DECL_RTL (decl) != 0)
3460 {
3461 if (GET_CODE (DECL_RTL (decl)) != MEM
3462 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3463 abort ();
3464 oldaddr = XEXP (DECL_RTL (decl), 0);
3465 }
3466
3467 DECL_RTL (decl)
3468 = assign_stack_temp (DECL_MODE (decl),
3469 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3470 + BITS_PER_UNIT - 1)
3471 / BITS_PER_UNIT),
3472 1);
3473 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3474
3475 /* Set alignment we actually gave this decl. */
3476 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3477 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3478
3479 if (oldaddr)
3480 {
3481 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3482 if (addr != oldaddr)
3483 emit_move_insn (oldaddr, addr);
3484 }
3485
3486 /* If this is a memory ref that contains aggregate components,
3487 mark it as such for cse and loop optimize. */
3488 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3489 #if 0
3490 /* If this is in memory because of -ffloat-store,
3491 set the volatile bit, to prevent optimizations from
3492 undoing the effects. */
3493 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3494 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3495 #endif
3496
3497 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3498 }
3499 else
3500 /* Dynamic-size object: must push space on the stack. */
3501 {
3502 rtx address, size;
3503
3504 /* Record the stack pointer on entry to block, if have
3505 not already done so. */
3506 if (thisblock->data.block.stack_level == 0)
3507 {
3508 do_pending_stack_adjust ();
3509 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3510 &thisblock->data.block.stack_level,
3511 thisblock->data.block.first_insn);
3512 stack_block_stack = thisblock;
3513 }
3514
3515 /* Compute the variable's size, in bytes. */
3516 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3517 DECL_SIZE (decl),
3518 size_int (BITS_PER_UNIT)),
3519 NULL_RTX, VOIDmode, 0);
3520 free_temp_slots ();
3521
3522 /* Allocate space on the stack for the variable. Note that
3523 DECL_ALIGN says how the variable is to be aligned and we
3524 cannot use it to conclude anything about the alignment of
3525 the size. */
3526 address = allocate_dynamic_stack_space (size, NULL_RTX,
3527 TYPE_ALIGN (TREE_TYPE (decl)));
3528
3529 /* Reference the variable indirect through that rtx. */
3530 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3531
3532 /* If this is a memory ref that contains aggregate components,
3533 mark it as such for cse and loop optimize. */
3534 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3535
3536 /* Indicate the alignment we actually gave this variable. */
3537 #ifdef STACK_BOUNDARY
3538 DECL_ALIGN (decl) = STACK_BOUNDARY;
3539 #else
3540 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3541 #endif
3542 }
3543
3544 if (TREE_THIS_VOLATILE (decl))
3545 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3546 #if 0 /* A variable is not necessarily unchanging
3547 just because it is const. RTX_UNCHANGING_P
3548 means no change in the function,
3549 not merely no change in the variable's scope.
3550 It is correct to set RTX_UNCHANGING_P if the variable's scope
3551 is the whole function. There's no convenient way to test that. */
3552 if (TREE_READONLY (decl))
3553 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3554 #endif
3555
3556 /* If doing stupid register allocation, make sure life of any
3557 register variable starts here, at the start of its scope. */
3558
3559 if (obey_regdecls)
3560 use_variable (DECL_RTL (decl));
3561 }
3562
3563
3564 \f
3565 /* Emit code to perform the initialization of a declaration DECL. */
3566
3567 void
3568 expand_decl_init (decl)
3569 tree decl;
3570 {
3571 int was_used = TREE_USED (decl);
3572
3573 /* If this is a CONST_DECL, we don't have to generate any code, but
3574 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3575 to be set while in the obstack containing the constant. If we don't
3576 do this, we can lose if we have functions nested three deep and the middle
3577 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3578 the innermost function is the first to expand that STRING_CST. */
3579 if (TREE_CODE (decl) == CONST_DECL)
3580 {
3581 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3582 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3583 EXPAND_INITIALIZER);
3584 return;
3585 }
3586
3587 if (TREE_STATIC (decl))
3588 return;
3589
3590 /* Compute and store the initial value now. */
3591
3592 if (DECL_INITIAL (decl) == error_mark_node)
3593 {
3594 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3595
3596 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3597 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3598 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3599 0, 0);
3600 emit_queue ();
3601 }
3602 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3603 {
3604 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3605 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3606 emit_queue ();
3607 }
3608
3609 /* Don't let the initialization count as "using" the variable. */
3610 TREE_USED (decl) = was_used;
3611
3612 /* Free any temporaries we made while initializing the decl. */
3613 preserve_temp_slots (NULL_RTX);
3614 free_temp_slots ();
3615 }
3616
3617 /* CLEANUP is an expression to be executed at exit from this binding contour;
3618 for example, in C++, it might call the destructor for this variable.
3619
3620 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3621 CLEANUP multiple times, and have the correct semantics. This
3622 happens in exception handling, for gotos, returns, breaks that
3623 leave the current scope.
3624
3625 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3626 that is not associated with any particular variable. */
3627
3628 int
3629 expand_decl_cleanup (decl, cleanup)
3630 tree decl, cleanup;
3631 {
3632 struct nesting *thisblock = block_stack;
3633
3634 /* Error if we are not in any block. */
3635 if (thisblock == 0)
3636 return 0;
3637
3638 /* Record the cleanup if there is one. */
3639
3640 if (cleanup != 0)
3641 {
3642 tree t;
3643 rtx seq;
3644 tree *cleanups = &thisblock->data.block.cleanups;
3645 int cond_context = conditional_context ();
3646
3647 if (cond_context)
3648 {
3649 rtx flag = gen_reg_rtx (word_mode);
3650 rtx set_flag_0;
3651 tree cond;
3652
3653 start_sequence ();
3654 emit_move_insn (flag, const0_rtx);
3655 set_flag_0 = get_insns ();
3656 end_sequence ();
3657
3658 thisblock->data.block.last_unconditional_cleanup
3659 = emit_insns_after (set_flag_0,
3660 thisblock->data.block.last_unconditional_cleanup);
3661
3662 emit_move_insn (flag, const1_rtx);
3663
3664 /* All cleanups must be on the function_obstack. */
3665 push_obstacks_nochange ();
3666 resume_temporary_allocation ();
3667
3668 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3669 DECL_RTL (cond) = flag;
3670
3671 /* Conditionalize the cleanup. */
3672 cleanup = build (COND_EXPR, void_type_node,
3673 truthvalue_conversion (cond),
3674 cleanup, integer_zero_node);
3675 cleanup = fold (cleanup);
3676
3677 pop_obstacks ();
3678
3679 cleanups = thisblock->data.block.cleanup_ptr;
3680 }
3681
3682 /* All cleanups must be on the function_obstack. */
3683 push_obstacks_nochange ();
3684 resume_temporary_allocation ();
3685 cleanup = unsave_expr (cleanup);
3686 pop_obstacks ();
3687
3688 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3689
3690 if (! cond_context)
3691 /* If this block has a cleanup, it belongs in stack_block_stack. */
3692 stack_block_stack = thisblock;
3693
3694 if (cond_context)
3695 {
3696 start_sequence ();
3697 }
3698
3699 /* If this was optimized so that there is no exception region for the
3700 cleanup, then mark the TREE_LIST node, so that we can later tell
3701 if we need to call expand_eh_region_end. */
3702 if (! using_eh_for_cleanups_p
3703 || expand_eh_region_start_tree (decl, cleanup))
3704 TREE_ADDRESSABLE (t) = 1;
3705 /* If that started a new EH region, we're in a new block. */
3706 thisblock = block_stack;
3707
3708 if (cond_context)
3709 {
3710 seq = get_insns ();
3711 end_sequence ();
3712 if (seq)
3713 thisblock->data.block.last_unconditional_cleanup
3714 = emit_insns_after (seq,
3715 thisblock->data.block.last_unconditional_cleanup);
3716 }
3717 else
3718 {
3719 thisblock->data.block.last_unconditional_cleanup
3720 = get_last_insn ();
3721 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3722 }
3723 }
3724 return 1;
3725 }
3726
3727 /* Like expand_decl_cleanup, but suppress generating an exception handler
3728 to perform the cleanup. */
3729
3730 int
3731 expand_decl_cleanup_no_eh (decl, cleanup)
3732 tree decl, cleanup;
3733 {
3734 int save_eh = using_eh_for_cleanups_p;
3735 int result;
3736
3737 using_eh_for_cleanups_p = 0;
3738 result = expand_decl_cleanup (decl, cleanup);
3739 using_eh_for_cleanups_p = save_eh;
3740
3741 return result;
3742 }
3743
3744 /* Arrange for the top element of the dynamic cleanup chain to be
3745 popped if we exit the current binding contour. DECL is the
3746 associated declaration, if any, otherwise NULL_TREE. If the
3747 current contour is left via an exception, then __sjthrow will pop
3748 the top element off the dynamic cleanup chain. The code that
3749 avoids doing the action we push into the cleanup chain in the
3750 exceptional case is contained in expand_cleanups.
3751
3752 This routine is only used by expand_eh_region_start, and that is
3753 the only way in which an exception region should be started. This
3754 routine is only used when using the setjmp/longjmp codegen method
3755 for exception handling. */
3756
3757 int
3758 expand_dcc_cleanup (decl)
3759 tree decl;
3760 {
3761 struct nesting *thisblock = block_stack;
3762 tree cleanup;
3763
3764 /* Error if we are not in any block. */
3765 if (thisblock == 0)
3766 return 0;
3767
3768 /* Record the cleanup for the dynamic handler chain. */
3769
3770 /* All cleanups must be on the function_obstack. */
3771 push_obstacks_nochange ();
3772 resume_temporary_allocation ();
3773 cleanup = make_node (POPDCC_EXPR);
3774 pop_obstacks ();
3775
3776 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3777 thisblock->data.block.cleanups
3778 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3779
3780 /* If this block has a cleanup, it belongs in stack_block_stack. */
3781 stack_block_stack = thisblock;
3782 return 1;
3783 }
3784
3785 /* Arrange for the top element of the dynamic handler chain to be
3786 popped if we exit the current binding contour. DECL is the
3787 associated declaration, if any, otherwise NULL_TREE. If the current
3788 contour is left via an exception, then __sjthrow will pop the top
3789 element off the dynamic handler chain. The code that avoids doing
3790 the action we push into the handler chain in the exceptional case
3791 is contained in expand_cleanups.
3792
3793 This routine is only used by expand_eh_region_start, and that is
3794 the only way in which an exception region should be started. This
3795 routine is only used when using the setjmp/longjmp codegen method
3796 for exception handling. */
3797
3798 int
3799 expand_dhc_cleanup (decl)
3800 tree decl;
3801 {
3802 struct nesting *thisblock = block_stack;
3803 tree cleanup;
3804
3805 /* Error if we are not in any block. */
3806 if (thisblock == 0)
3807 return 0;
3808
3809 /* Record the cleanup for the dynamic handler chain. */
3810
3811 /* All cleanups must be on the function_obstack. */
3812 push_obstacks_nochange ();
3813 resume_temporary_allocation ();
3814 cleanup = make_node (POPDHC_EXPR);
3815 pop_obstacks ();
3816
3817 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3818 thisblock->data.block.cleanups
3819 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3820
3821 /* If this block has a cleanup, it belongs in stack_block_stack. */
3822 stack_block_stack = thisblock;
3823 return 1;
3824 }
3825 \f
3826 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3827 DECL_ELTS is the list of elements that belong to DECL's type.
3828 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3829
3830 void
3831 expand_anon_union_decl (decl, cleanup, decl_elts)
3832 tree decl, cleanup, decl_elts;
3833 {
3834 struct nesting *thisblock = block_stack;
3835 rtx x;
3836
3837 expand_decl (decl);
3838 expand_decl_cleanup (decl, cleanup);
3839 x = DECL_RTL (decl);
3840
3841 while (decl_elts)
3842 {
3843 tree decl_elt = TREE_VALUE (decl_elts);
3844 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3845 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3846
3847 /* Propagate the union's alignment to the elements. */
3848 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3849
3850 /* If the element has BLKmode and the union doesn't, the union is
3851 aligned such that the element doesn't need to have BLKmode, so
3852 change the element's mode to the appropriate one for its size. */
3853 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3854 DECL_MODE (decl_elt) = mode
3855 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3856 MODE_INT, 1);
3857
3858 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3859 instead create a new MEM rtx with the proper mode. */
3860 if (GET_CODE (x) == MEM)
3861 {
3862 if (mode == GET_MODE (x))
3863 DECL_RTL (decl_elt) = x;
3864 else
3865 {
3866 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3867 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3868 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3869 }
3870 }
3871 else if (GET_CODE (x) == REG)
3872 {
3873 if (mode == GET_MODE (x))
3874 DECL_RTL (decl_elt) = x;
3875 else
3876 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3877 }
3878 else
3879 abort ();
3880
3881 /* Record the cleanup if there is one. */
3882
3883 if (cleanup != 0)
3884 thisblock->data.block.cleanups
3885 = temp_tree_cons (decl_elt, cleanup_elt,
3886 thisblock->data.block.cleanups);
3887
3888 decl_elts = TREE_CHAIN (decl_elts);
3889 }
3890 }
3891 \f
3892 /* Expand a list of cleanups LIST.
3893 Elements may be expressions or may be nested lists.
3894
3895 If DONT_DO is nonnull, then any list-element
3896 whose TREE_PURPOSE matches DONT_DO is omitted.
3897 This is sometimes used to avoid a cleanup associated with
3898 a value that is being returned out of the scope.
3899
3900 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3901 goto and handle protection regions specially in that case.
3902
3903 If REACHABLE, we emit code, otherwise just inform the exception handling
3904 code about this finalization. */
3905
3906 static void
3907 expand_cleanups (list, dont_do, in_fixup, reachable)
3908 tree list;
3909 tree dont_do;
3910 int in_fixup;
3911 int reachable;
3912 {
3913 tree tail;
3914 for (tail = list; tail; tail = TREE_CHAIN (tail))
3915 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3916 {
3917 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3918 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3919 else
3920 {
3921 if (! in_fixup)
3922 {
3923 tree cleanup = TREE_VALUE (tail);
3924
3925 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3926 if (TREE_CODE (cleanup) != POPDHC_EXPR
3927 && TREE_CODE (cleanup) != POPDCC_EXPR
3928 /* See expand_eh_region_start_tree for this case. */
3929 && ! TREE_ADDRESSABLE (tail))
3930 {
3931 cleanup = protect_with_terminate (cleanup);
3932 expand_eh_region_end (cleanup);
3933 }
3934 }
3935
3936 if (reachable)
3937 {
3938 /* Cleanups may be run multiple times. For example,
3939 when exiting a binding contour, we expand the
3940 cleanups associated with that contour. When a goto
3941 within that binding contour has a target outside that
3942 contour, it will expand all cleanups from its scope to
3943 the target. Though the cleanups are expanded multiple
3944 times, the control paths are non-overlapping so the
3945 cleanups will not be executed twice. */
3946
3947 /* We may need to protect fixups with rethrow regions. */
3948 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3949
3950 if (protect)
3951 expand_fixup_region_start ();
3952
3953 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3954 if (protect)
3955 expand_fixup_region_end (TREE_VALUE (tail));
3956 free_temp_slots ();
3957 }
3958 }
3959 }
3960 }
3961
3962 /* Mark when the context we are emitting RTL for as a conditional
3963 context, so that any cleanup actions we register with
3964 expand_decl_init will be properly conditionalized when those
3965 cleanup actions are later performed. Must be called before any
3966 expression (tree) is expanded that is within a conditional context. */
3967
3968 void
3969 start_cleanup_deferral ()
3970 {
3971 /* block_stack can be NULL if we are inside the parameter list. It is
3972 OK to do nothing, because cleanups aren't possible here. */
3973 if (block_stack)
3974 ++block_stack->data.block.conditional_code;
3975 }
3976
3977 /* Mark the end of a conditional region of code. Because cleanup
3978 deferrals may be nested, we may still be in a conditional region
3979 after we end the currently deferred cleanups, only after we end all
3980 deferred cleanups, are we back in unconditional code. */
3981
3982 void
3983 end_cleanup_deferral ()
3984 {
3985 /* block_stack can be NULL if we are inside the parameter list. It is
3986 OK to do nothing, because cleanups aren't possible here. */
3987 if (block_stack)
3988 --block_stack->data.block.conditional_code;
3989 }
3990
3991 /* Move all cleanups from the current block_stack
3992 to the containing block_stack, where they are assumed to
3993 have been created. If anything can cause a temporary to
3994 be created, but not expanded for more than one level of
3995 block_stacks, then this code will have to change. */
3996
3997 void
3998 move_cleanups_up ()
3999 {
4000 struct nesting *block = block_stack;
4001 struct nesting *outer = block->next;
4002
4003 outer->data.block.cleanups
4004 = chainon (block->data.block.cleanups,
4005 outer->data.block.cleanups);
4006 block->data.block.cleanups = 0;
4007 }
4008
4009 tree
4010 last_cleanup_this_contour ()
4011 {
4012 if (block_stack == 0)
4013 return 0;
4014
4015 return block_stack->data.block.cleanups;
4016 }
4017
4018 /* Return 1 if there are any pending cleanups at this point.
4019 If THIS_CONTOUR is nonzero, check the current contour as well.
4020 Otherwise, look only at the contours that enclose this one. */
4021
4022 int
4023 any_pending_cleanups (this_contour)
4024 int this_contour;
4025 {
4026 struct nesting *block;
4027
4028 if (block_stack == 0)
4029 return 0;
4030
4031 if (this_contour && block_stack->data.block.cleanups != NULL)
4032 return 1;
4033 if (block_stack->data.block.cleanups == 0
4034 && block_stack->data.block.outer_cleanups == 0)
4035 return 0;
4036
4037 for (block = block_stack->next; block; block = block->next)
4038 if (block->data.block.cleanups != 0)
4039 return 1;
4040
4041 return 0;
4042 }
4043 \f
4044 /* Enter a case (Pascal) or switch (C) statement.
4045 Push a block onto case_stack and nesting_stack
4046 to accumulate the case-labels that are seen
4047 and to record the labels generated for the statement.
4048
4049 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4050 Otherwise, this construct is transparent for `exit_something'.
4051
4052 EXPR is the index-expression to be dispatched on.
4053 TYPE is its nominal type. We could simply convert EXPR to this type,
4054 but instead we take short cuts. */
4055
4056 void
4057 expand_start_case (exit_flag, expr, type, printname)
4058 int exit_flag;
4059 tree expr;
4060 tree type;
4061 char *printname;
4062 {
4063 register struct nesting *thiscase = ALLOC_NESTING ();
4064
4065 /* Make an entry on case_stack for the case we are entering. */
4066
4067 thiscase->next = case_stack;
4068 thiscase->all = nesting_stack;
4069 thiscase->depth = ++nesting_depth;
4070 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4071 thiscase->data.case_stmt.case_list = 0;
4072 thiscase->data.case_stmt.index_expr = expr;
4073 thiscase->data.case_stmt.nominal_type = type;
4074 thiscase->data.case_stmt.default_label = 0;
4075 thiscase->data.case_stmt.num_ranges = 0;
4076 thiscase->data.case_stmt.printname = printname;
4077 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4078 case_stack = thiscase;
4079 nesting_stack = thiscase;
4080
4081 do_pending_stack_adjust ();
4082
4083 /* Make sure case_stmt.start points to something that won't
4084 need any transformation before expand_end_case. */
4085 if (GET_CODE (get_last_insn ()) != NOTE)
4086 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4087
4088 thiscase->data.case_stmt.start = get_last_insn ();
4089
4090 start_cleanup_deferral ();
4091 }
4092
4093
4094 /* Start a "dummy case statement" within which case labels are invalid
4095 and are not connected to any larger real case statement.
4096 This can be used if you don't want to let a case statement jump
4097 into the middle of certain kinds of constructs. */
4098
4099 void
4100 expand_start_case_dummy ()
4101 {
4102 register struct nesting *thiscase = ALLOC_NESTING ();
4103
4104 /* Make an entry on case_stack for the dummy. */
4105
4106 thiscase->next = case_stack;
4107 thiscase->all = nesting_stack;
4108 thiscase->depth = ++nesting_depth;
4109 thiscase->exit_label = 0;
4110 thiscase->data.case_stmt.case_list = 0;
4111 thiscase->data.case_stmt.start = 0;
4112 thiscase->data.case_stmt.nominal_type = 0;
4113 thiscase->data.case_stmt.default_label = 0;
4114 thiscase->data.case_stmt.num_ranges = 0;
4115 case_stack = thiscase;
4116 nesting_stack = thiscase;
4117 start_cleanup_deferral ();
4118 }
4119
4120 /* End a dummy case statement. */
4121
4122 void
4123 expand_end_case_dummy ()
4124 {
4125 end_cleanup_deferral ();
4126 POPSTACK (case_stack);
4127 }
4128
4129 /* Return the data type of the index-expression
4130 of the innermost case statement, or null if none. */
4131
4132 tree
4133 case_index_expr_type ()
4134 {
4135 if (case_stack)
4136 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4137 return 0;
4138 }
4139 \f
4140 static void
4141 check_seenlabel ()
4142 {
4143 /* If this is the first label, warn if any insns have been emitted. */
4144 if (case_stack->data.case_stmt.line_number_status >= 0)
4145 {
4146 rtx insn;
4147
4148 restore_line_number_status
4149 (case_stack->data.case_stmt.line_number_status);
4150 case_stack->data.case_stmt.line_number_status = -1;
4151
4152 for (insn = case_stack->data.case_stmt.start;
4153 insn;
4154 insn = NEXT_INSN (insn))
4155 {
4156 if (GET_CODE (insn) == CODE_LABEL)
4157 break;
4158 if (GET_CODE (insn) != NOTE
4159 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4160 {
4161 do
4162 insn = PREV_INSN (insn);
4163 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4164
4165 /* If insn is zero, then there must have been a syntax error. */
4166 if (insn)
4167 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4168 NOTE_LINE_NUMBER(insn),
4169 "unreachable code at beginning of %s",
4170 case_stack->data.case_stmt.printname);
4171 break;
4172 }
4173 }
4174 }
4175 }
4176
4177 /* Accumulate one case or default label inside a case or switch statement.
4178 VALUE is the value of the case (a null pointer, for a default label).
4179 The function CONVERTER, when applied to arguments T and V,
4180 converts the value V to the type T.
4181
4182 If not currently inside a case or switch statement, return 1 and do
4183 nothing. The caller will print a language-specific error message.
4184 If VALUE is a duplicate or overlaps, return 2 and do nothing
4185 except store the (first) duplicate node in *DUPLICATE.
4186 If VALUE is out of range, return 3 and do nothing.
4187 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4188 Return 0 on success.
4189
4190 Extended to handle range statements. */
4191
4192 int
4193 pushcase (value, converter, label, duplicate)
4194 register tree value;
4195 tree (*converter) PROTO((tree, tree));
4196 register tree label;
4197 tree *duplicate;
4198 {
4199 tree index_type;
4200 tree nominal_type;
4201
4202 /* Fail if not inside a real case statement. */
4203 if (! (case_stack && case_stack->data.case_stmt.start))
4204 return 1;
4205
4206 if (stack_block_stack
4207 && stack_block_stack->depth > case_stack->depth)
4208 return 5;
4209
4210 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4211 nominal_type = case_stack->data.case_stmt.nominal_type;
4212
4213 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4214 if (index_type == error_mark_node)
4215 return 0;
4216
4217 /* Convert VALUE to the type in which the comparisons are nominally done. */
4218 if (value != 0)
4219 value = (*converter) (nominal_type, value);
4220
4221 check_seenlabel ();
4222
4223 /* Fail if this value is out of range for the actual type of the index
4224 (which may be narrower than NOMINAL_TYPE). */
4225 if (value != 0 && ! int_fits_type_p (value, index_type))
4226 return 3;
4227
4228 /* Fail if this is a duplicate or overlaps another entry. */
4229 if (value == 0)
4230 {
4231 if (case_stack->data.case_stmt.default_label != 0)
4232 {
4233 *duplicate = case_stack->data.case_stmt.default_label;
4234 return 2;
4235 }
4236 case_stack->data.case_stmt.default_label = label;
4237 }
4238 else
4239 return add_case_node (value, value, label, duplicate);
4240
4241 expand_label (label);
4242 return 0;
4243 }
4244
4245 /* Like pushcase but this case applies to all values between VALUE1 and
4246 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4247 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4248 starts at VALUE1 and ends at the highest value of the index type.
4249 If both are NULL, this case applies to all values.
4250
4251 The return value is the same as that of pushcase but there is one
4252 additional error code: 4 means the specified range was empty. */
4253
4254 int
4255 pushcase_range (value1, value2, converter, label, duplicate)
4256 register tree value1, value2;
4257 tree (*converter) PROTO((tree, tree));
4258 register tree label;
4259 tree *duplicate;
4260 {
4261 tree index_type;
4262 tree nominal_type;
4263
4264 /* Fail if not inside a real case statement. */
4265 if (! (case_stack && case_stack->data.case_stmt.start))
4266 return 1;
4267
4268 if (stack_block_stack
4269 && stack_block_stack->depth > case_stack->depth)
4270 return 5;
4271
4272 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4273 nominal_type = case_stack->data.case_stmt.nominal_type;
4274
4275 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4276 if (index_type == error_mark_node)
4277 return 0;
4278
4279 check_seenlabel ();
4280
4281 /* Convert VALUEs to type in which the comparisons are nominally done
4282 and replace any unspecified value with the corresponding bound. */
4283 if (value1 == 0)
4284 value1 = TYPE_MIN_VALUE (index_type);
4285 if (value2 == 0)
4286 value2 = TYPE_MAX_VALUE (index_type);
4287
4288 /* Fail if the range is empty. Do this before any conversion since
4289 we want to allow out-of-range empty ranges. */
4290 if (value2 && tree_int_cst_lt (value2, value1))
4291 return 4;
4292
4293 value1 = (*converter) (nominal_type, value1);
4294
4295 /* If the max was unbounded, use the max of the nominal_type we are
4296 converting to. Do this after the < check above to suppress false
4297 positives. */
4298 if (!value2)
4299 value2 = TYPE_MAX_VALUE (nominal_type);
4300 value2 = (*converter) (nominal_type, value2);
4301
4302 /* Fail if these values are out of range. */
4303 if (TREE_CONSTANT_OVERFLOW (value1)
4304 || ! int_fits_type_p (value1, index_type))
4305 return 3;
4306
4307 if (TREE_CONSTANT_OVERFLOW (value2)
4308 || ! int_fits_type_p (value2, index_type))
4309 return 3;
4310
4311 return add_case_node (value1, value2, label, duplicate);
4312 }
4313
4314 /* Do the actual insertion of a case label for pushcase and pushcase_range
4315 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4316 slowdown for large switch statements. */
4317
4318 static int
4319 add_case_node (low, high, label, duplicate)
4320 tree low, high;
4321 tree label;
4322 tree *duplicate;
4323 {
4324 struct case_node *p, **q, *r;
4325
4326 q = &case_stack->data.case_stmt.case_list;
4327 p = *q;
4328
4329 while ((r = *q))
4330 {
4331 p = r;
4332
4333 /* Keep going past elements distinctly greater than HIGH. */
4334 if (tree_int_cst_lt (high, p->low))
4335 q = &p->left;
4336
4337 /* or distinctly less than LOW. */
4338 else if (tree_int_cst_lt (p->high, low))
4339 q = &p->right;
4340
4341 else
4342 {
4343 /* We have an overlap; this is an error. */
4344 *duplicate = p->code_label;
4345 return 2;
4346 }
4347 }
4348
4349 /* Add this label to the chain, and succeed.
4350 Copy LOW, HIGH so they are on temporary rather than momentary
4351 obstack and will thus survive till the end of the case statement. */
4352
4353 r = (struct case_node *) oballoc (sizeof (struct case_node));
4354 r->low = copy_node (low);
4355
4356 /* If the bounds are equal, turn this into the one-value case. */
4357
4358 if (tree_int_cst_equal (low, high))
4359 r->high = r->low;
4360 else
4361 {
4362 r->high = copy_node (high);
4363 case_stack->data.case_stmt.num_ranges++;
4364 }
4365
4366 r->code_label = label;
4367 expand_label (label);
4368
4369 *q = r;
4370 r->parent = p;
4371 r->left = 0;
4372 r->right = 0;
4373 r->balance = 0;
4374
4375 while (p)
4376 {
4377 struct case_node *s;
4378
4379 if (r == p->left)
4380 {
4381 int b;
4382
4383 if (! (b = p->balance))
4384 /* Growth propagation from left side. */
4385 p->balance = -1;
4386 else if (b < 0)
4387 {
4388 if (r->balance < 0)
4389 {
4390 /* R-Rotation */
4391 if ((p->left = s = r->right))
4392 s->parent = p;
4393
4394 r->right = p;
4395 p->balance = 0;
4396 r->balance = 0;
4397 s = p->parent;
4398 p->parent = r;
4399
4400 if ((r->parent = s))
4401 {
4402 if (s->left == p)
4403 s->left = r;
4404 else
4405 s->right = r;
4406 }
4407 else
4408 case_stack->data.case_stmt.case_list = r;
4409 }
4410 else
4411 /* r->balance == +1 */
4412 {
4413 /* LR-Rotation */
4414
4415 int b2;
4416 struct case_node *t = r->right;
4417
4418 if ((p->left = s = t->right))
4419 s->parent = p;
4420
4421 t->right = p;
4422 if ((r->right = s = t->left))
4423 s->parent = r;
4424
4425 t->left = r;
4426 b = t->balance;
4427 b2 = b < 0;
4428 p->balance = b2;
4429 b2 = -b2 - b;
4430 r->balance = b2;
4431 t->balance = 0;
4432 s = p->parent;
4433 p->parent = t;
4434 r->parent = t;
4435
4436 if ((t->parent = s))
4437 {
4438 if (s->left == p)
4439 s->left = t;
4440 else
4441 s->right = t;
4442 }
4443 else
4444 case_stack->data.case_stmt.case_list = t;
4445 }
4446 break;
4447 }
4448
4449 else
4450 {
4451 /* p->balance == +1; growth of left side balances the node. */
4452 p->balance = 0;
4453 break;
4454 }
4455 }
4456 else
4457 /* r == p->right */
4458 {
4459 int b;
4460
4461 if (! (b = p->balance))
4462 /* Growth propagation from right side. */
4463 p->balance++;
4464 else if (b > 0)
4465 {
4466 if (r->balance > 0)
4467 {
4468 /* L-Rotation */
4469
4470 if ((p->right = s = r->left))
4471 s->parent = p;
4472
4473 r->left = p;
4474 p->balance = 0;
4475 r->balance = 0;
4476 s = p->parent;
4477 p->parent = r;
4478 if ((r->parent = s))
4479 {
4480 if (s->left == p)
4481 s->left = r;
4482 else
4483 s->right = r;
4484 }
4485
4486 else
4487 case_stack->data.case_stmt.case_list = r;
4488 }
4489
4490 else
4491 /* r->balance == -1 */
4492 {
4493 /* RL-Rotation */
4494 int b2;
4495 struct case_node *t = r->left;
4496
4497 if ((p->right = s = t->left))
4498 s->parent = p;
4499
4500 t->left = p;
4501
4502 if ((r->left = s = t->right))
4503 s->parent = r;
4504
4505 t->right = r;
4506 b = t->balance;
4507 b2 = b < 0;
4508 r->balance = b2;
4509 b2 = -b2 - b;
4510 p->balance = b2;
4511 t->balance = 0;
4512 s = p->parent;
4513 p->parent = t;
4514 r->parent = t;
4515
4516 if ((t->parent = s))
4517 {
4518 if (s->left == p)
4519 s->left = t;
4520 else
4521 s->right = t;
4522 }
4523
4524 else
4525 case_stack->data.case_stmt.case_list = t;
4526 }
4527 break;
4528 }
4529 else
4530 {
4531 /* p->balance == -1; growth of right side balances the node. */
4532 p->balance = 0;
4533 break;
4534 }
4535 }
4536
4537 r = p;
4538 p = p->parent;
4539 }
4540
4541 return 0;
4542 }
4543
4544 \f
4545 /* Returns the number of possible values of TYPE.
4546 Returns -1 if the number is unknown or variable.
4547 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4548 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4549 do not increase monotonically (there may be duplicates);
4550 to 1 if the values increase monotonically, but not always by 1;
4551 otherwise sets it to 0. */
4552
4553 HOST_WIDE_INT
4554 all_cases_count (type, spareness)
4555 tree type;
4556 int *spareness;
4557 {
4558 HOST_WIDE_INT count;
4559 *spareness = 0;
4560
4561 switch (TREE_CODE (type))
4562 {
4563 tree t;
4564 case BOOLEAN_TYPE:
4565 count = 2;
4566 break;
4567 case CHAR_TYPE:
4568 count = 1 << BITS_PER_UNIT;
4569 break;
4570 default:
4571 case INTEGER_TYPE:
4572 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4573 || TYPE_MAX_VALUE (type) == NULL
4574 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4575 return -1;
4576 else
4577 {
4578 /* count
4579 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4580 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4581 but with overflow checking. */
4582 tree mint = TYPE_MIN_VALUE (type);
4583 tree maxt = TYPE_MAX_VALUE (type);
4584 HOST_WIDE_INT lo, hi;
4585 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4586 &lo, &hi);
4587 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4588 lo, hi, &lo, &hi);
4589 add_double (lo, hi, 1, 0, &lo, &hi);
4590 if (hi != 0 || lo < 0)
4591 return -2;
4592 count = lo;
4593 }
4594 break;
4595 case ENUMERAL_TYPE:
4596 count = 0;
4597 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4598 {
4599 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4600 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4601 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4602 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4603 *spareness = 1;
4604 count++;
4605 }
4606 if (*spareness == 1)
4607 {
4608 tree prev = TREE_VALUE (TYPE_VALUES (type));
4609 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4610 {
4611 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4612 {
4613 *spareness = 2;
4614 break;
4615 }
4616 prev = TREE_VALUE (t);
4617 }
4618
4619 }
4620 }
4621 return count;
4622 }
4623
4624
4625 #define BITARRAY_TEST(ARRAY, INDEX) \
4626 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4627 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4628 #define BITARRAY_SET(ARRAY, INDEX) \
4629 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4630 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4631
4632 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4633 with the case values we have seen, assuming the case expression
4634 has the given TYPE.
4635 SPARSENESS is as determined by all_cases_count.
4636
4637 The time needed is proportional to COUNT, unless
4638 SPARSENESS is 2, in which case quadratic time is needed. */
4639
4640 void
4641 mark_seen_cases (type, cases_seen, count, sparseness)
4642 tree type;
4643 unsigned char *cases_seen;
4644 long count;
4645 int sparseness;
4646 {
4647 tree next_node_to_try = NULL_TREE;
4648 long next_node_offset = 0;
4649
4650 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4651 tree val = make_node (INTEGER_CST);
4652 TREE_TYPE (val) = type;
4653 if (! root)
4654 ; /* Do nothing */
4655 else if (sparseness == 2)
4656 {
4657 tree t;
4658 HOST_WIDE_INT xlo;
4659
4660 /* This less efficient loop is only needed to handle
4661 duplicate case values (multiple enum constants
4662 with the same value). */
4663 TREE_TYPE (val) = TREE_TYPE (root->low);
4664 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4665 t = TREE_CHAIN (t), xlo++)
4666 {
4667 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4668 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4669 n = root;
4670 do
4671 {
4672 /* Keep going past elements distinctly greater than VAL. */
4673 if (tree_int_cst_lt (val, n->low))
4674 n = n->left;
4675
4676 /* or distinctly less than VAL. */
4677 else if (tree_int_cst_lt (n->high, val))
4678 n = n->right;
4679
4680 else
4681 {
4682 /* We have found a matching range. */
4683 BITARRAY_SET (cases_seen, xlo);
4684 break;
4685 }
4686 }
4687 while (n);
4688 }
4689 }
4690 else
4691 {
4692 if (root->left)
4693 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4694 for (n = root; n; n = n->right)
4695 {
4696 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4697 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4698 while ( ! tree_int_cst_lt (n->high, val))
4699 {
4700 /* Calculate (into xlo) the "offset" of the integer (val).
4701 The element with lowest value has offset 0, the next smallest
4702 element has offset 1, etc. */
4703
4704 HOST_WIDE_INT xlo, xhi;
4705 tree t;
4706 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4707 {
4708 /* The TYPE_VALUES will be in increasing order, so
4709 starting searching where we last ended. */
4710 t = next_node_to_try;
4711 xlo = next_node_offset;
4712 xhi = 0;
4713 for (;;)
4714 {
4715 if (t == NULL_TREE)
4716 {
4717 t = TYPE_VALUES (type);
4718 xlo = 0;
4719 }
4720 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4721 {
4722 next_node_to_try = TREE_CHAIN (t);
4723 next_node_offset = xlo + 1;
4724 break;
4725 }
4726 xlo++;
4727 t = TREE_CHAIN (t);
4728 if (t == next_node_to_try)
4729 {
4730 xlo = -1;
4731 break;
4732 }
4733 }
4734 }
4735 else
4736 {
4737 t = TYPE_MIN_VALUE (type);
4738 if (t)
4739 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4740 &xlo, &xhi);
4741 else
4742 xlo = xhi = 0;
4743 add_double (xlo, xhi,
4744 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4745 &xlo, &xhi);
4746 }
4747
4748 if (xhi == 0 && xlo >= 0 && xlo < count)
4749 BITARRAY_SET (cases_seen, xlo);
4750 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4751 1, 0,
4752 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4753 }
4754 }
4755 }
4756 }
4757
4758 /* Called when the index of a switch statement is an enumerated type
4759 and there is no default label.
4760
4761 Checks that all enumeration literals are covered by the case
4762 expressions of a switch. Also, warn if there are any extra
4763 switch cases that are *not* elements of the enumerated type.
4764
4765 If all enumeration literals were covered by the case expressions,
4766 turn one of the expressions into the default expression since it should
4767 not be possible to fall through such a switch. */
4768
4769 void
4770 check_for_full_enumeration_handling (type)
4771 tree type;
4772 {
4773 register struct case_node *n;
4774 register tree chain;
4775 #if 0 /* variable used by 'if 0'ed code below. */
4776 register struct case_node **l;
4777 int all_values = 1;
4778 #endif
4779
4780 /* True iff the selector type is a numbered set mode. */
4781 int sparseness = 0;
4782
4783 /* The number of possible selector values. */
4784 HOST_WIDE_INT size;
4785
4786 /* For each possible selector value. a one iff it has been matched
4787 by a case value alternative. */
4788 unsigned char *cases_seen;
4789
4790 /* The allocated size of cases_seen, in chars. */
4791 long bytes_needed;
4792
4793 if (! warn_switch)
4794 return;
4795
4796 size = all_cases_count (type, &sparseness);
4797 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4798
4799 if (size > 0 && size < 600000
4800 /* We deliberately use malloc here - not xmalloc. */
4801 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4802 {
4803 long i;
4804 tree v = TYPE_VALUES (type);
4805 bzero (cases_seen, bytes_needed);
4806
4807 /* The time complexity of this code is normally O(N), where
4808 N being the number of members in the enumerated type.
4809 However, if type is a ENUMERAL_TYPE whose values do not
4810 increase monotonically, O(N*log(N)) time may be needed. */
4811
4812 mark_seen_cases (type, cases_seen, size, sparseness);
4813
4814 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4815 {
4816 if (BITARRAY_TEST(cases_seen, i) == 0)
4817 warning ("enumeration value `%s' not handled in switch",
4818 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4819 }
4820
4821 free (cases_seen);
4822 }
4823
4824 /* Now we go the other way around; we warn if there are case
4825 expressions that don't correspond to enumerators. This can
4826 occur since C and C++ don't enforce type-checking of
4827 assignments to enumeration variables. */
4828
4829 if (case_stack->data.case_stmt.case_list
4830 && case_stack->data.case_stmt.case_list->left)
4831 case_stack->data.case_stmt.case_list
4832 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4833 if (warn_switch)
4834 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4835 {
4836 for (chain = TYPE_VALUES (type);
4837 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4838 chain = TREE_CHAIN (chain))
4839 ;
4840
4841 if (!chain)
4842 {
4843 if (TYPE_NAME (type) == 0)
4844 warning ("case value `%ld' not in enumerated type",
4845 (long) TREE_INT_CST_LOW (n->low));
4846 else
4847 warning ("case value `%ld' not in enumerated type `%s'",
4848 (long) TREE_INT_CST_LOW (n->low),
4849 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4850 == IDENTIFIER_NODE)
4851 ? TYPE_NAME (type)
4852 : DECL_NAME (TYPE_NAME (type))));
4853 }
4854 if (!tree_int_cst_equal (n->low, n->high))
4855 {
4856 for (chain = TYPE_VALUES (type);
4857 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4858 chain = TREE_CHAIN (chain))
4859 ;
4860
4861 if (!chain)
4862 {
4863 if (TYPE_NAME (type) == 0)
4864 warning ("case value `%ld' not in enumerated type",
4865 (long) TREE_INT_CST_LOW (n->high));
4866 else
4867 warning ("case value `%ld' not in enumerated type `%s'",
4868 (long) TREE_INT_CST_LOW (n->high),
4869 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4870 == IDENTIFIER_NODE)
4871 ? TYPE_NAME (type)
4872 : DECL_NAME (TYPE_NAME (type))));
4873 }
4874 }
4875 }
4876
4877 #if 0
4878 /* ??? This optimization is disabled because it causes valid programs to
4879 fail. ANSI C does not guarantee that an expression with enum type
4880 will have a value that is the same as one of the enumeration literals. */
4881
4882 /* If all values were found as case labels, make one of them the default
4883 label. Thus, this switch will never fall through. We arbitrarily pick
4884 the last one to make the default since this is likely the most
4885 efficient choice. */
4886
4887 if (all_values)
4888 {
4889 for (l = &case_stack->data.case_stmt.case_list;
4890 (*l)->right != 0;
4891 l = &(*l)->right)
4892 ;
4893
4894 case_stack->data.case_stmt.default_label = (*l)->code_label;
4895 *l = 0;
4896 }
4897 #endif /* 0 */
4898 }
4899
4900 \f
4901 /* Terminate a case (Pascal) or switch (C) statement
4902 in which ORIG_INDEX is the expression to be tested.
4903 Generate the code to test it and jump to the right place. */
4904
4905 void
4906 expand_end_case (orig_index)
4907 tree orig_index;
4908 {
4909 tree minval, maxval, range, orig_minval;
4910 rtx default_label = 0;
4911 register struct case_node *n;
4912 unsigned int count;
4913 rtx index;
4914 rtx table_label;
4915 int ncases;
4916 rtx *labelvec;
4917 register int i;
4918 rtx before_case;
4919 register struct nesting *thiscase = case_stack;
4920 tree index_expr, index_type;
4921 int unsignedp;
4922
4923 table_label = gen_label_rtx ();
4924 index_expr = thiscase->data.case_stmt.index_expr;
4925 index_type = TREE_TYPE (index_expr);
4926 unsignedp = TREE_UNSIGNED (index_type);
4927
4928 do_pending_stack_adjust ();
4929
4930 /* This might get an spurious warning in the presence of a syntax error;
4931 it could be fixed by moving the call to check_seenlabel after the
4932 check for error_mark_node, and copying the code of check_seenlabel that
4933 deals with case_stack->data.case_stmt.line_number_status /
4934 restore_line_number_status in front of the call to end_cleanup_deferral;
4935 However, this might miss some useful warnings in the presence of
4936 non-syntax errors. */
4937 check_seenlabel ();
4938
4939 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4940 if (index_type != error_mark_node)
4941 {
4942 /* If switch expression was an enumerated type, check that all
4943 enumeration literals are covered by the cases.
4944 No sense trying this if there's a default case, however. */
4945
4946 if (!thiscase->data.case_stmt.default_label
4947 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4948 && TREE_CODE (index_expr) != INTEGER_CST)
4949 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4950
4951 /* If we don't have a default-label, create one here,
4952 after the body of the switch. */
4953 if (thiscase->data.case_stmt.default_label == 0)
4954 {
4955 thiscase->data.case_stmt.default_label
4956 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4957 expand_label (thiscase->data.case_stmt.default_label);
4958 }
4959 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4960
4961 before_case = get_last_insn ();
4962
4963 if (thiscase->data.case_stmt.case_list
4964 && thiscase->data.case_stmt.case_list->left)
4965 thiscase->data.case_stmt.case_list
4966 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4967
4968 /* Simplify the case-list before we count it. */
4969 group_case_nodes (thiscase->data.case_stmt.case_list);
4970
4971 /* Get upper and lower bounds of case values.
4972 Also convert all the case values to the index expr's data type. */
4973
4974 count = 0;
4975 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4976 {
4977 /* Check low and high label values are integers. */
4978 if (TREE_CODE (n->low) != INTEGER_CST)
4979 abort ();
4980 if (TREE_CODE (n->high) != INTEGER_CST)
4981 abort ();
4982
4983 n->low = convert (index_type, n->low);
4984 n->high = convert (index_type, n->high);
4985
4986 /* Count the elements and track the largest and smallest
4987 of them (treating them as signed even if they are not). */
4988 if (count++ == 0)
4989 {
4990 minval = n->low;
4991 maxval = n->high;
4992 }
4993 else
4994 {
4995 if (INT_CST_LT (n->low, minval))
4996 minval = n->low;
4997 if (INT_CST_LT (maxval, n->high))
4998 maxval = n->high;
4999 }
5000 /* A range counts double, since it requires two compares. */
5001 if (! tree_int_cst_equal (n->low, n->high))
5002 count++;
5003 }
5004
5005 orig_minval = minval;
5006
5007 /* Compute span of values. */
5008 if (count != 0)
5009 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5010
5011 end_cleanup_deferral ();
5012
5013 if (count == 0)
5014 {
5015 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5016 emit_queue ();
5017 emit_jump (default_label);
5018 }
5019
5020 /* If range of values is much bigger than number of values,
5021 make a sequence of conditional branches instead of a dispatch.
5022 If the switch-index is a constant, do it this way
5023 because we can optimize it. */
5024
5025 #ifndef CASE_VALUES_THRESHOLD
5026 #ifdef HAVE_casesi
5027 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5028 #else
5029 /* If machine does not have a case insn that compares the
5030 bounds, this means extra overhead for dispatch tables
5031 which raises the threshold for using them. */
5032 #define CASE_VALUES_THRESHOLD 5
5033 #endif /* HAVE_casesi */
5034 #endif /* CASE_VALUES_THRESHOLD */
5035
5036 else if (TREE_INT_CST_HIGH (range) != 0
5037 || count < (unsigned int) CASE_VALUES_THRESHOLD
5038 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5039 > 10 * count)
5040 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5041 || flag_pic
5042 #endif
5043 || TREE_CODE (index_expr) == INTEGER_CST
5044 /* These will reduce to a constant. */
5045 || (TREE_CODE (index_expr) == CALL_EXPR
5046 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5047 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5048 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5049 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5050 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5051 {
5052 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5053
5054 /* If the index is a short or char that we do not have
5055 an insn to handle comparisons directly, convert it to
5056 a full integer now, rather than letting each comparison
5057 generate the conversion. */
5058
5059 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5060 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5061 == CODE_FOR_nothing))
5062 {
5063 enum machine_mode wider_mode;
5064 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5065 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5066 if (cmp_optab->handlers[(int) wider_mode].insn_code
5067 != CODE_FOR_nothing)
5068 {
5069 index = convert_to_mode (wider_mode, index, unsignedp);
5070 break;
5071 }
5072 }
5073
5074 emit_queue ();
5075 do_pending_stack_adjust ();
5076
5077 index = protect_from_queue (index, 0);
5078 if (GET_CODE (index) == MEM)
5079 index = copy_to_reg (index);
5080 if (GET_CODE (index) == CONST_INT
5081 || TREE_CODE (index_expr) == INTEGER_CST)
5082 {
5083 /* Make a tree node with the proper constant value
5084 if we don't already have one. */
5085 if (TREE_CODE (index_expr) != INTEGER_CST)
5086 {
5087 index_expr
5088 = build_int_2 (INTVAL (index),
5089 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5090 index_expr = convert (index_type, index_expr);
5091 }
5092
5093 /* For constant index expressions we need only
5094 issue a unconditional branch to the appropriate
5095 target code. The job of removing any unreachable
5096 code is left to the optimisation phase if the
5097 "-O" option is specified. */
5098 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5099 if (! tree_int_cst_lt (index_expr, n->low)
5100 && ! tree_int_cst_lt (n->high, index_expr))
5101 break;
5102
5103 if (n)
5104 emit_jump (label_rtx (n->code_label));
5105 else
5106 emit_jump (default_label);
5107 }
5108 else
5109 {
5110 /* If the index expression is not constant we generate
5111 a binary decision tree to select the appropriate
5112 target code. This is done as follows:
5113
5114 The list of cases is rearranged into a binary tree,
5115 nearly optimal assuming equal probability for each case.
5116
5117 The tree is transformed into RTL, eliminating
5118 redundant test conditions at the same time.
5119
5120 If program flow could reach the end of the
5121 decision tree an unconditional jump to the
5122 default code is emitted. */
5123
5124 use_cost_table
5125 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5126 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5127 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5128 NULL_PTR);
5129 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5130 default_label, index_type);
5131 emit_jump_if_reachable (default_label);
5132 }
5133 }
5134 else
5135 {
5136 int win = 0;
5137 #ifdef HAVE_casesi
5138 if (HAVE_casesi)
5139 {
5140 enum machine_mode index_mode = SImode;
5141 int index_bits = GET_MODE_BITSIZE (index_mode);
5142 rtx op1, op2;
5143 enum machine_mode op_mode;
5144
5145 /* Convert the index to SImode. */
5146 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5147 > GET_MODE_BITSIZE (index_mode))
5148 {
5149 enum machine_mode omode = TYPE_MODE (index_type);
5150 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5151
5152 /* We must handle the endpoints in the original mode. */
5153 index_expr = build (MINUS_EXPR, index_type,
5154 index_expr, minval);
5155 minval = integer_zero_node;
5156 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5157 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5158 emit_jump_insn (gen_bltu (default_label));
5159 /* Now we can safely truncate. */
5160 index = convert_to_mode (index_mode, index, 0);
5161 }
5162 else
5163 {
5164 if (TYPE_MODE (index_type) != index_mode)
5165 {
5166 index_expr = convert (type_for_size (index_bits, 0),
5167 index_expr);
5168 index_type = TREE_TYPE (index_expr);
5169 }
5170
5171 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5172 }
5173 emit_queue ();
5174 index = protect_from_queue (index, 0);
5175 do_pending_stack_adjust ();
5176
5177 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5178 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5179 (index, op_mode))
5180 index = copy_to_mode_reg (op_mode, index);
5181
5182 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5183
5184 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5185 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5186 (op1, op_mode))
5187 op1 = copy_to_mode_reg (op_mode, op1);
5188
5189 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5190
5191 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5192 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5193 (op2, op_mode))
5194 op2 = copy_to_mode_reg (op_mode, op2);
5195
5196 emit_jump_insn (gen_casesi (index, op1, op2,
5197 table_label, default_label));
5198 win = 1;
5199 }
5200 #endif
5201 #ifdef HAVE_tablejump
5202 if (! win && HAVE_tablejump)
5203 {
5204 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5205 fold (build (MINUS_EXPR, index_type,
5206 index_expr, minval)));
5207 index_type = TREE_TYPE (index_expr);
5208 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5209 emit_queue ();
5210 index = protect_from_queue (index, 0);
5211 do_pending_stack_adjust ();
5212
5213 do_tablejump (index, TYPE_MODE (index_type),
5214 expand_expr (range, NULL_RTX, VOIDmode, 0),
5215 table_label, default_label);
5216 win = 1;
5217 }
5218 #endif
5219 if (! win)
5220 abort ();
5221
5222 /* Get table of labels to jump to, in order of case index. */
5223
5224 ncases = TREE_INT_CST_LOW (range) + 1;
5225 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5226 bzero ((char *) labelvec, ncases * sizeof (rtx));
5227
5228 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5229 {
5230 register HOST_WIDE_INT i
5231 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5232
5233 while (1)
5234 {
5235 labelvec[i]
5236 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5237 if (i + TREE_INT_CST_LOW (orig_minval)
5238 == TREE_INT_CST_LOW (n->high))
5239 break;
5240 i++;
5241 }
5242 }
5243
5244 /* Fill in the gaps with the default. */
5245 for (i = 0; i < ncases; i++)
5246 if (labelvec[i] == 0)
5247 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5248
5249 /* Output the table */
5250 emit_label (table_label);
5251
5252 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5253 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5254 gen_rtx_LABEL_REF (Pmode, table_label),
5255 gen_rtvec_v (ncases, labelvec),
5256 const0_rtx, const0_rtx, 0));
5257 else
5258 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5259 gen_rtvec_v (ncases, labelvec)));
5260
5261 /* If the case insn drops through the table,
5262 after the table we must jump to the default-label.
5263 Otherwise record no drop-through after the table. */
5264 #ifdef CASE_DROPS_THROUGH
5265 emit_jump (default_label);
5266 #else
5267 emit_barrier ();
5268 #endif
5269 }
5270
5271 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5272 reorder_insns (before_case, get_last_insn (),
5273 thiscase->data.case_stmt.start);
5274 }
5275 else
5276 end_cleanup_deferral ();
5277
5278 if (thiscase->exit_label)
5279 emit_label (thiscase->exit_label);
5280
5281 POPSTACK (case_stack);
5282
5283 free_temp_slots ();
5284 }
5285
5286 /* Convert the tree NODE into a list linked by the right field, with the left
5287 field zeroed. RIGHT is used for recursion; it is a list to be placed
5288 rightmost in the resulting list. */
5289
5290 static struct case_node *
5291 case_tree2list (node, right)
5292 struct case_node *node, *right;
5293 {
5294 struct case_node *left;
5295
5296 if (node->right)
5297 right = case_tree2list (node->right, right);
5298
5299 node->right = right;
5300 if ((left = node->left))
5301 {
5302 node->left = 0;
5303 return case_tree2list (left, node);
5304 }
5305
5306 return node;
5307 }
5308
5309 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5310
5311 static void
5312 do_jump_if_equal (op1, op2, label, unsignedp)
5313 rtx op1, op2, label;
5314 int unsignedp;
5315 {
5316 if (GET_CODE (op1) == CONST_INT
5317 && GET_CODE (op2) == CONST_INT)
5318 {
5319 if (INTVAL (op1) == INTVAL (op2))
5320 emit_jump (label);
5321 }
5322 else
5323 {
5324 enum machine_mode mode = GET_MODE (op1);
5325 if (mode == VOIDmode)
5326 mode = GET_MODE (op2);
5327 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5328 emit_jump_insn (gen_beq (label));
5329 }
5330 }
5331 \f
5332 /* Not all case values are encountered equally. This function
5333 uses a heuristic to weight case labels, in cases where that
5334 looks like a reasonable thing to do.
5335
5336 Right now, all we try to guess is text, and we establish the
5337 following weights:
5338
5339 chars above space: 16
5340 digits: 16
5341 default: 12
5342 space, punct: 8
5343 tab: 4
5344 newline: 2
5345 other "\" chars: 1
5346 remaining chars: 0
5347
5348 If we find any cases in the switch that are not either -1 or in the range
5349 of valid ASCII characters, or are control characters other than those
5350 commonly used with "\", don't treat this switch scanning text.
5351
5352 Return 1 if these nodes are suitable for cost estimation, otherwise
5353 return 0. */
5354
5355 static int
5356 estimate_case_costs (node)
5357 case_node_ptr node;
5358 {
5359 tree min_ascii = build_int_2 (-1, -1);
5360 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5361 case_node_ptr n;
5362 int i;
5363
5364 /* If we haven't already made the cost table, make it now. Note that the
5365 lower bound of the table is -1, not zero. */
5366
5367 if (cost_table == NULL)
5368 {
5369 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5370 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5371
5372 for (i = 0; i < 128; i++)
5373 {
5374 if (ISALNUM (i))
5375 cost_table[i] = 16;
5376 else if (ISPUNCT (i))
5377 cost_table[i] = 8;
5378 else if (ISCNTRL (i))
5379 cost_table[i] = -1;
5380 }
5381
5382 cost_table[' '] = 8;
5383 cost_table['\t'] = 4;
5384 cost_table['\0'] = 4;
5385 cost_table['\n'] = 2;
5386 cost_table['\f'] = 1;
5387 cost_table['\v'] = 1;
5388 cost_table['\b'] = 1;
5389 }
5390
5391 /* See if all the case expressions look like text. It is text if the
5392 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5393 as signed arithmetic since we don't want to ever access cost_table with a
5394 value less than -1. Also check that none of the constants in a range
5395 are strange control characters. */
5396
5397 for (n = node; n; n = n->right)
5398 {
5399 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5400 return 0;
5401
5402 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5403 if (cost_table[i] < 0)
5404 return 0;
5405 }
5406
5407 /* All interesting values are within the range of interesting
5408 ASCII characters. */
5409 return 1;
5410 }
5411
5412 /* Scan an ordered list of case nodes
5413 combining those with consecutive values or ranges.
5414
5415 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5416
5417 static void
5418 group_case_nodes (head)
5419 case_node_ptr head;
5420 {
5421 case_node_ptr node = head;
5422
5423 while (node)
5424 {
5425 rtx lb = next_real_insn (label_rtx (node->code_label));
5426 rtx lb2;
5427 case_node_ptr np = node;
5428
5429 /* Try to group the successors of NODE with NODE. */
5430 while (((np = np->right) != 0)
5431 /* Do they jump to the same place? */
5432 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5433 || (lb != 0 && lb2 != 0
5434 && simplejump_p (lb)
5435 && simplejump_p (lb2)
5436 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5437 SET_SRC (PATTERN (lb2)))))
5438 /* Are their ranges consecutive? */
5439 && tree_int_cst_equal (np->low,
5440 fold (build (PLUS_EXPR,
5441 TREE_TYPE (node->high),
5442 node->high,
5443 integer_one_node)))
5444 /* An overflow is not consecutive. */
5445 && tree_int_cst_lt (node->high,
5446 fold (build (PLUS_EXPR,
5447 TREE_TYPE (node->high),
5448 node->high,
5449 integer_one_node))))
5450 {
5451 node->high = np->high;
5452 }
5453 /* NP is the first node after NODE which can't be grouped with it.
5454 Delete the nodes in between, and move on to that node. */
5455 node->right = np;
5456 node = np;
5457 }
5458 }
5459
5460 /* Take an ordered list of case nodes
5461 and transform them into a near optimal binary tree,
5462 on the assumption that any target code selection value is as
5463 likely as any other.
5464
5465 The transformation is performed by splitting the ordered
5466 list into two equal sections plus a pivot. The parts are
5467 then attached to the pivot as left and right branches. Each
5468 branch is then transformed recursively. */
5469
5470 static void
5471 balance_case_nodes (head, parent)
5472 case_node_ptr *head;
5473 case_node_ptr parent;
5474 {
5475 register case_node_ptr np;
5476
5477 np = *head;
5478 if (np)
5479 {
5480 int cost = 0;
5481 int i = 0;
5482 int ranges = 0;
5483 register case_node_ptr *npp;
5484 case_node_ptr left;
5485
5486 /* Count the number of entries on branch. Also count the ranges. */
5487
5488 while (np)
5489 {
5490 if (!tree_int_cst_equal (np->low, np->high))
5491 {
5492 ranges++;
5493 if (use_cost_table)
5494 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5495 }
5496
5497 if (use_cost_table)
5498 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5499
5500 i++;
5501 np = np->right;
5502 }
5503
5504 if (i > 2)
5505 {
5506 /* Split this list if it is long enough for that to help. */
5507 npp = head;
5508 left = *npp;
5509 if (use_cost_table)
5510 {
5511 /* Find the place in the list that bisects the list's total cost,
5512 Here I gets half the total cost. */
5513 int n_moved = 0;
5514 i = (cost + 1) / 2;
5515 while (1)
5516 {
5517 /* Skip nodes while their cost does not reach that amount. */
5518 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5519 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5520 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5521 if (i <= 0)
5522 break;
5523 npp = &(*npp)->right;
5524 n_moved += 1;
5525 }
5526 if (n_moved == 0)
5527 {
5528 /* Leave this branch lopsided, but optimize left-hand
5529 side and fill in `parent' fields for right-hand side. */
5530 np = *head;
5531 np->parent = parent;
5532 balance_case_nodes (&np->left, np);
5533 for (; np->right; np = np->right)
5534 np->right->parent = np;
5535 return;
5536 }
5537 }
5538 /* If there are just three nodes, split at the middle one. */
5539 else if (i == 3)
5540 npp = &(*npp)->right;
5541 else
5542 {
5543 /* Find the place in the list that bisects the list's total cost,
5544 where ranges count as 2.
5545 Here I gets half the total cost. */
5546 i = (i + ranges + 1) / 2;
5547 while (1)
5548 {
5549 /* Skip nodes while their cost does not reach that amount. */
5550 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5551 i--;
5552 i--;
5553 if (i <= 0)
5554 break;
5555 npp = &(*npp)->right;
5556 }
5557 }
5558 *head = np = *npp;
5559 *npp = 0;
5560 np->parent = parent;
5561 np->left = left;
5562
5563 /* Optimize each of the two split parts. */
5564 balance_case_nodes (&np->left, np);
5565 balance_case_nodes (&np->right, np);
5566 }
5567 else
5568 {
5569 /* Else leave this branch as one level,
5570 but fill in `parent' fields. */
5571 np = *head;
5572 np->parent = parent;
5573 for (; np->right; np = np->right)
5574 np->right->parent = np;
5575 }
5576 }
5577 }
5578 \f
5579 /* Search the parent sections of the case node tree
5580 to see if a test for the lower bound of NODE would be redundant.
5581 INDEX_TYPE is the type of the index expression.
5582
5583 The instructions to generate the case decision tree are
5584 output in the same order as nodes are processed so it is
5585 known that if a parent node checks the range of the current
5586 node minus one that the current node is bounded at its lower
5587 span. Thus the test would be redundant. */
5588
5589 static int
5590 node_has_low_bound (node, index_type)
5591 case_node_ptr node;
5592 tree index_type;
5593 {
5594 tree low_minus_one;
5595 case_node_ptr pnode;
5596
5597 /* If the lower bound of this node is the lowest value in the index type,
5598 we need not test it. */
5599
5600 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5601 return 1;
5602
5603 /* If this node has a left branch, the value at the left must be less
5604 than that at this node, so it cannot be bounded at the bottom and
5605 we need not bother testing any further. */
5606
5607 if (node->left)
5608 return 0;
5609
5610 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5611 node->low, integer_one_node));
5612
5613 /* If the subtraction above overflowed, we can't verify anything.
5614 Otherwise, look for a parent that tests our value - 1. */
5615
5616 if (! tree_int_cst_lt (low_minus_one, node->low))
5617 return 0;
5618
5619 for (pnode = node->parent; pnode; pnode = pnode->parent)
5620 if (tree_int_cst_equal (low_minus_one, pnode->high))
5621 return 1;
5622
5623 return 0;
5624 }
5625
5626 /* Search the parent sections of the case node tree
5627 to see if a test for the upper bound of NODE would be redundant.
5628 INDEX_TYPE is the type of the index expression.
5629
5630 The instructions to generate the case decision tree are
5631 output in the same order as nodes are processed so it is
5632 known that if a parent node checks the range of the current
5633 node plus one that the current node is bounded at its upper
5634 span. Thus the test would be redundant. */
5635
5636 static int
5637 node_has_high_bound (node, index_type)
5638 case_node_ptr node;
5639 tree index_type;
5640 {
5641 tree high_plus_one;
5642 case_node_ptr pnode;
5643
5644 /* If there is no upper bound, obviously no test is needed. */
5645
5646 if (TYPE_MAX_VALUE (index_type) == NULL)
5647 return 1;
5648
5649 /* If the upper bound of this node is the highest value in the type
5650 of the index expression, we need not test against it. */
5651
5652 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5653 return 1;
5654
5655 /* If this node has a right branch, the value at the right must be greater
5656 than that at this node, so it cannot be bounded at the top and
5657 we need not bother testing any further. */
5658
5659 if (node->right)
5660 return 0;
5661
5662 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5663 node->high, integer_one_node));
5664
5665 /* If the addition above overflowed, we can't verify anything.
5666 Otherwise, look for a parent that tests our value + 1. */
5667
5668 if (! tree_int_cst_lt (node->high, high_plus_one))
5669 return 0;
5670
5671 for (pnode = node->parent; pnode; pnode = pnode->parent)
5672 if (tree_int_cst_equal (high_plus_one, pnode->low))
5673 return 1;
5674
5675 return 0;
5676 }
5677
5678 /* Search the parent sections of the
5679 case node tree to see if both tests for the upper and lower
5680 bounds of NODE would be redundant. */
5681
5682 static int
5683 node_is_bounded (node, index_type)
5684 case_node_ptr node;
5685 tree index_type;
5686 {
5687 return (node_has_low_bound (node, index_type)
5688 && node_has_high_bound (node, index_type));
5689 }
5690
5691 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5692
5693 static void
5694 emit_jump_if_reachable (label)
5695 rtx label;
5696 {
5697 if (GET_CODE (get_last_insn ()) != BARRIER)
5698 emit_jump (label);
5699 }
5700 \f
5701 /* Emit step-by-step code to select a case for the value of INDEX.
5702 The thus generated decision tree follows the form of the
5703 case-node binary tree NODE, whose nodes represent test conditions.
5704 INDEX_TYPE is the type of the index of the switch.
5705
5706 Care is taken to prune redundant tests from the decision tree
5707 by detecting any boundary conditions already checked by
5708 emitted rtx. (See node_has_high_bound, node_has_low_bound
5709 and node_is_bounded, above.)
5710
5711 Where the test conditions can be shown to be redundant we emit
5712 an unconditional jump to the target code. As a further
5713 optimization, the subordinates of a tree node are examined to
5714 check for bounded nodes. In this case conditional and/or
5715 unconditional jumps as a result of the boundary check for the
5716 current node are arranged to target the subordinates associated
5717 code for out of bound conditions on the current node.
5718
5719 We can assume that when control reaches the code generated here,
5720 the index value has already been compared with the parents
5721 of this node, and determined to be on the same side of each parent
5722 as this node is. Thus, if this node tests for the value 51,
5723 and a parent tested for 52, we don't need to consider
5724 the possibility of a value greater than 51. If another parent
5725 tests for the value 50, then this node need not test anything. */
5726
5727 static void
5728 emit_case_nodes (index, node, default_label, index_type)
5729 rtx index;
5730 case_node_ptr node;
5731 rtx default_label;
5732 tree index_type;
5733 {
5734 /* If INDEX has an unsigned type, we must make unsigned branches. */
5735 int unsignedp = TREE_UNSIGNED (index_type);
5736 typedef rtx rtx_fn ();
5737 rtx_fn *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5738 rtx_fn *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5739 rtx_fn *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5740 rtx_fn *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5741 enum machine_mode mode = GET_MODE (index);
5742
5743 /* See if our parents have already tested everything for us.
5744 If they have, emit an unconditional jump for this node. */
5745 if (node_is_bounded (node, index_type))
5746 emit_jump (label_rtx (node->code_label));
5747
5748 else if (tree_int_cst_equal (node->low, node->high))
5749 {
5750 /* Node is single valued. First see if the index expression matches
5751 this node and then check our children, if any. */
5752
5753 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5754 label_rtx (node->code_label), unsignedp);
5755
5756 if (node->right != 0 && node->left != 0)
5757 {
5758 /* This node has children on both sides.
5759 Dispatch to one side or the other
5760 by comparing the index value with this node's value.
5761 If one subtree is bounded, check that one first,
5762 so we can avoid real branches in the tree. */
5763
5764 if (node_is_bounded (node->right, index_type))
5765 {
5766 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5767 VOIDmode, 0),
5768 GT, NULL_RTX, mode, unsignedp, 0);
5769
5770 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5771 emit_case_nodes (index, node->left, default_label, index_type);
5772 }
5773
5774 else if (node_is_bounded (node->left, index_type))
5775 {
5776 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5777 VOIDmode, 0),
5778 LT, NULL_RTX, mode, unsignedp, 0);
5779 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5780 emit_case_nodes (index, node->right, default_label, index_type);
5781 }
5782
5783 else
5784 {
5785 /* Neither node is bounded. First distinguish the two sides;
5786 then emit the code for one side at a time. */
5787
5788 tree test_label
5789 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5790
5791 /* See if the value is on the right. */
5792 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5793 VOIDmode, 0),
5794 GT, NULL_RTX, mode, unsignedp, 0);
5795 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5796
5797 /* Value must be on the left.
5798 Handle the left-hand subtree. */
5799 emit_case_nodes (index, node->left, default_label, index_type);
5800 /* If left-hand subtree does nothing,
5801 go to default. */
5802 emit_jump_if_reachable (default_label);
5803
5804 /* Code branches here for the right-hand subtree. */
5805 expand_label (test_label);
5806 emit_case_nodes (index, node->right, default_label, index_type);
5807 }
5808 }
5809
5810 else if (node->right != 0 && node->left == 0)
5811 {
5812 /* Here we have a right child but no left so we issue conditional
5813 branch to default and process the right child.
5814
5815 Omit the conditional branch to default if we it avoid only one
5816 right child; it costs too much space to save so little time. */
5817
5818 if (node->right->right || node->right->left
5819 || !tree_int_cst_equal (node->right->low, node->right->high))
5820 {
5821 if (!node_has_low_bound (node, index_type))
5822 {
5823 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5824 VOIDmode, 0),
5825 LT, NULL_RTX, mode, unsignedp, 0);
5826 emit_jump_insn ((*gen_blt_pat) (default_label));
5827 }
5828
5829 emit_case_nodes (index, node->right, default_label, index_type);
5830 }
5831 else
5832 /* We cannot process node->right normally
5833 since we haven't ruled out the numbers less than
5834 this node's value. So handle node->right explicitly. */
5835 do_jump_if_equal (index,
5836 expand_expr (node->right->low, NULL_RTX,
5837 VOIDmode, 0),
5838 label_rtx (node->right->code_label), unsignedp);
5839 }
5840
5841 else if (node->right == 0 && node->left != 0)
5842 {
5843 /* Just one subtree, on the left. */
5844
5845 #if 0 /* The following code and comment were formerly part
5846 of the condition here, but they didn't work
5847 and I don't understand what the idea was. -- rms. */
5848 /* If our "most probable entry" is less probable
5849 than the default label, emit a jump to
5850 the default label using condition codes
5851 already lying around. With no right branch,
5852 a branch-greater-than will get us to the default
5853 label correctly. */
5854 if (use_cost_table
5855 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5856 ;
5857 #endif /* 0 */
5858 if (node->left->left || node->left->right
5859 || !tree_int_cst_equal (node->left->low, node->left->high))
5860 {
5861 if (!node_has_high_bound (node, index_type))
5862 {
5863 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5864 VOIDmode, 0),
5865 GT, NULL_RTX, mode, unsignedp, 0);
5866 emit_jump_insn ((*gen_bgt_pat) (default_label));
5867 }
5868
5869 emit_case_nodes (index, node->left, default_label, index_type);
5870 }
5871 else
5872 /* We cannot process node->left normally
5873 since we haven't ruled out the numbers less than
5874 this node's value. So handle node->left explicitly. */
5875 do_jump_if_equal (index,
5876 expand_expr (node->left->low, NULL_RTX,
5877 VOIDmode, 0),
5878 label_rtx (node->left->code_label), unsignedp);
5879 }
5880 }
5881 else
5882 {
5883 /* Node is a range. These cases are very similar to those for a single
5884 value, except that we do not start by testing whether this node
5885 is the one to branch to. */
5886
5887 if (node->right != 0 && node->left != 0)
5888 {
5889 /* Node has subtrees on both sides.
5890 If the right-hand subtree is bounded,
5891 test for it first, since we can go straight there.
5892 Otherwise, we need to make a branch in the control structure,
5893 then handle the two subtrees. */
5894 tree test_label = 0;
5895
5896 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5897 VOIDmode, 0),
5898 GT, NULL_RTX, mode, unsignedp, 0);
5899
5900 if (node_is_bounded (node->right, index_type))
5901 /* Right hand node is fully bounded so we can eliminate any
5902 testing and branch directly to the target code. */
5903 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5904 else
5905 {
5906 /* Right hand node requires testing.
5907 Branch to a label where we will handle it later. */
5908
5909 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5910 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5911 }
5912
5913 /* Value belongs to this node or to the left-hand subtree. */
5914
5915 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5916 GE, NULL_RTX, mode, unsignedp, 0);
5917 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5918
5919 /* Handle the left-hand subtree. */
5920 emit_case_nodes (index, node->left, default_label, index_type);
5921
5922 /* If right node had to be handled later, do that now. */
5923
5924 if (test_label)
5925 {
5926 /* If the left-hand subtree fell through,
5927 don't let it fall into the right-hand subtree. */
5928 emit_jump_if_reachable (default_label);
5929
5930 expand_label (test_label);
5931 emit_case_nodes (index, node->right, default_label, index_type);
5932 }
5933 }
5934
5935 else if (node->right != 0 && node->left == 0)
5936 {
5937 /* Deal with values to the left of this node,
5938 if they are possible. */
5939 if (!node_has_low_bound (node, index_type))
5940 {
5941 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5942 VOIDmode, 0),
5943 LT, NULL_RTX, mode, unsignedp, 0);
5944 emit_jump_insn ((*gen_blt_pat) (default_label));
5945 }
5946
5947 /* Value belongs to this node or to the right-hand subtree. */
5948
5949 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5950 VOIDmode, 0),
5951 LE, NULL_RTX, mode, unsignedp, 0);
5952 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5953
5954 emit_case_nodes (index, node->right, default_label, index_type);
5955 }
5956
5957 else if (node->right == 0 && node->left != 0)
5958 {
5959 /* Deal with values to the right of this node,
5960 if they are possible. */
5961 if (!node_has_high_bound (node, index_type))
5962 {
5963 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5964 VOIDmode, 0),
5965 GT, NULL_RTX, mode, unsignedp, 0);
5966 emit_jump_insn ((*gen_bgt_pat) (default_label));
5967 }
5968
5969 /* Value belongs to this node or to the left-hand subtree. */
5970
5971 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5972 GE, NULL_RTX, mode, unsignedp, 0);
5973 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5974
5975 emit_case_nodes (index, node->left, default_label, index_type);
5976 }
5977
5978 else
5979 {
5980 /* Node has no children so we check low and high bounds to remove
5981 redundant tests. Only one of the bounds can exist,
5982 since otherwise this node is bounded--a case tested already. */
5983
5984 if (!node_has_high_bound (node, index_type))
5985 {
5986 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5987 VOIDmode, 0),
5988 GT, NULL_RTX, mode, unsignedp, 0);
5989 emit_jump_insn ((*gen_bgt_pat) (default_label));
5990 }
5991
5992 if (!node_has_low_bound (node, index_type))
5993 {
5994 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5995 VOIDmode, 0),
5996 LT, NULL_RTX, mode, unsignedp, 0);
5997 emit_jump_insn ((*gen_blt_pat) (default_label));
5998 }
5999
6000 emit_jump (label_rtx (node->code_label));
6001 }
6002 }
6003 }
6004 \f
6005 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6006 so that the debugging info will be correct for the unrolled loop. */
6007
6008 /* Indexed by block number, contains a pointer to the N'th block node.
6009
6010 Allocated by the call to identify_blocks, then released after the call
6011 to reorder_blocks in the function unroll_block_trees. */
6012
6013 static tree *block_vector;
6014
6015 void
6016 find_loop_tree_blocks ()
6017 {
6018 tree block = DECL_INITIAL (current_function_decl);
6019
6020 block_vector = identify_blocks (block, get_insns ());
6021 }
6022
6023 void
6024 unroll_block_trees ()
6025 {
6026 tree block = DECL_INITIAL (current_function_decl);
6027
6028 reorder_blocks (block_vector, block, get_insns ());
6029
6030 /* Release any memory allocated by identify_blocks. */
6031 if (block_vector)
6032 free (block_vector);
6033 }