rtl.def (CALL_PLACEHOLDER): New rtx code.
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
59
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
64
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67 char *emit_filename;
68 int emit_lineno;
69
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
72
73 int expr_stmts_for_value;
74
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
77
78 static tree last_expr_type;
79 static rtx last_expr_value;
80
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
85
86 static rtx last_block_end_note;
87
88 /* Number of binding contours started so far in this function. */
89
90 int block_start_count;
91
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
94
95 extern int current_function_returns_pcc_struct;
96
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
100
101 extern rtx cleanup_label;
102
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
106
107 extern rtx return_label;
108
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
113
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
117
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
120
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
125
126 extern rtx arg_pointer_save_area;
127
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
130 \f
131 /* Functions and data structures for expanding case statements. */
132
133 /* Case label structure, used to hold info on labels within case
134 statements. We handle "range" labels; for a single-value label
135 as in C, the high and low limits are the same.
136
137 An AVL tree of case nodes is initially created, and later transformed
138 to a list linked via the RIGHT fields in the nodes. Nodes with
139 higher case values are later in the list.
140
141 Switch statements can be output in one of two forms. A branch table
142 is used if there are more than a few labels and the labels are dense
143 within the range between the smallest and largest case value. If a
144 branch table is used, no further manipulations are done with the case
145 node chain.
146
147 The alternative to the use of a branch table is to generate a series
148 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
149 and PARENT fields to hold a binary tree. Initially the tree is
150 totally unbalanced, with everything on the right. We balance the tree
151 with nodes on the left having lower case values than the parent
152 and nodes on the right having higher values. We then output the tree
153 in order. */
154
155 struct case_node
156 {
157 struct case_node *left; /* Left son in binary tree */
158 struct case_node *right; /* Right son in binary tree; also node chain */
159 struct case_node *parent; /* Parent of node in binary tree */
160 tree low; /* Lowest index value for this label */
161 tree high; /* Highest index value for this label */
162 tree code_label; /* Label to jump to when node matches */
163 int balance;
164 };
165
166 typedef struct case_node case_node;
167 typedef struct case_node *case_node_ptr;
168
169 /* These are used by estimate_case_costs and balance_case_nodes. */
170
171 /* This must be a signed type, and non-ANSI compilers lack signed char. */
172 static short *cost_table;
173 static int use_cost_table;
174 \f
175 /* Stack of control and binding constructs we are currently inside.
176
177 These constructs begin when you call `expand_start_WHATEVER'
178 and end when you call `expand_end_WHATEVER'. This stack records
179 info about how the construct began that tells the end-function
180 what to do. It also may provide information about the construct
181 to alter the behavior of other constructs within the body.
182 For example, they may affect the behavior of C `break' and `continue'.
183
184 Each construct gets one `struct nesting' object.
185 All of these objects are chained through the `all' field.
186 `nesting_stack' points to the first object (innermost construct).
187 The position of an entry on `nesting_stack' is in its `depth' field.
188
189 Each type of construct has its own individual stack.
190 For example, loops have `loop_stack'. Each object points to the
191 next object of the same type through the `next' field.
192
193 Some constructs are visible to `break' exit-statements and others
194 are not. Which constructs are visible depends on the language.
195 Therefore, the data structure allows each construct to be visible
196 or not, according to the args given when the construct is started.
197 The construct is visible if the `exit_label' field is non-null.
198 In that case, the value should be a CODE_LABEL rtx. */
199
200 struct nesting
201 {
202 struct nesting *all;
203 struct nesting *next;
204 int depth;
205 rtx exit_label;
206 union
207 {
208 /* For conds (if-then and if-then-else statements). */
209 struct
210 {
211 /* Label for the end of the if construct.
212 There is none if EXITFLAG was not set
213 and no `else' has been seen yet. */
214 rtx endif_label;
215 /* Label for the end of this alternative.
216 This may be the end of the if or the next else/elseif. */
217 rtx next_label;
218 } cond;
219 /* For loops. */
220 struct
221 {
222 /* Label at the top of the loop; place to loop back to. */
223 rtx start_label;
224 /* Label at the end of the whole construct. */
225 rtx end_label;
226 /* Label before a jump that branches to the end of the whole
227 construct. This is where destructors go if any. */
228 rtx alt_end_label;
229 /* Label for `continue' statement to jump to;
230 this is in front of the stepper of the loop. */
231 rtx continue_label;
232 } loop;
233 /* For variable binding contours. */
234 struct
235 {
236 /* Sequence number of this binding contour within the function,
237 in order of entry. */
238 int block_start_count;
239 /* Nonzero => value to restore stack to on exit. */
240 rtx stack_level;
241 /* The NOTE that starts this contour.
242 Used by expand_goto to check whether the destination
243 is within each contour or not. */
244 rtx first_insn;
245 /* Innermost containing binding contour that has a stack level. */
246 struct nesting *innermost_stack_block;
247 /* List of cleanups to be run on exit from this contour.
248 This is a list of expressions to be evaluated.
249 The TREE_PURPOSE of each link is the ..._DECL node
250 which the cleanup pertains to. */
251 tree cleanups;
252 /* List of cleanup-lists of blocks containing this block,
253 as they were at the locus where this block appears.
254 There is an element for each containing block,
255 ordered innermost containing block first.
256 The tail of this list can be 0,
257 if all remaining elements would be empty lists.
258 The element's TREE_VALUE is the cleanup-list of that block,
259 which may be null. */
260 tree outer_cleanups;
261 /* Chain of labels defined inside this binding contour.
262 For contours that have stack levels or cleanups. */
263 struct label_chain *label_chain;
264 /* Number of function calls seen, as of start of this block. */
265 int function_call_count;
266 /* Nonzero if this is associated with a EH region. */
267 int exception_region;
268 /* The saved target_temp_slot_level from our outer block.
269 We may reset target_temp_slot_level to be the level of
270 this block, if that is done, target_temp_slot_level
271 reverts to the saved target_temp_slot_level at the very
272 end of the block. */
273 int target_temp_slot_level;
274 /* True if we are currently emitting insns in an area of
275 output code that is controlled by a conditional
276 expression. This is used by the cleanup handling code to
277 generate conditional cleanup actions. */
278 int conditional_code;
279 /* A place to move the start of the exception region for any
280 of the conditional cleanups, must be at the end or after
281 the start of the last unconditional cleanup, and before any
282 conditional branch points. */
283 rtx last_unconditional_cleanup;
284 /* When in a conditional context, this is the specific
285 cleanup list associated with last_unconditional_cleanup,
286 where we place the conditionalized cleanups. */
287 tree *cleanup_ptr;
288 } block;
289 /* For switch (C) or case (Pascal) statements,
290 and also for dummies (see `expand_start_case_dummy'). */
291 struct
292 {
293 /* The insn after which the case dispatch should finally
294 be emitted. Zero for a dummy. */
295 rtx start;
296 /* A list of case labels; it is first built as an AVL tree.
297 During expand_end_case, this is converted to a list, and may be
298 rearranged into a nearly balanced binary tree. */
299 struct case_node *case_list;
300 /* Label to jump to if no case matches. */
301 tree default_label;
302 /* The expression to be dispatched on. */
303 tree index_expr;
304 /* Type that INDEX_EXPR should be converted to. */
305 tree nominal_type;
306 /* Number of range exprs in case statement. */
307 int num_ranges;
308 /* Name of this kind of statement, for warnings. */
309 char *printname;
310 /* Used to save no_line_numbers till we see the first case label.
311 We set this to -1 when we see the first case label in this
312 case statement. */
313 int line_number_status;
314 } case_stmt;
315 } data;
316 };
317
318 /* Chain of all pending binding contours. */
319 struct nesting *block_stack;
320
321 /* If any new stacks are added here, add them to POPSTACKS too. */
322
323 /* Chain of all pending binding contours that restore stack levels
324 or have cleanups. */
325 struct nesting *stack_block_stack;
326
327 /* Chain of all pending conditional statements. */
328 struct nesting *cond_stack;
329
330 /* Chain of all pending loops. */
331 struct nesting *loop_stack;
332
333 /* Chain of all pending case or switch statements. */
334 struct nesting *case_stack;
335
336 /* Separate chain including all of the above,
337 chained through the `all' field. */
338 struct nesting *nesting_stack;
339
340 /* Number of entries on nesting_stack now. */
341 int nesting_depth;
342
343 /* Allocate and return a new `struct nesting'. */
344
345 #define ALLOC_NESTING() \
346 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
347
348 /* Pop the nesting stack element by element until we pop off
349 the element which is at the top of STACK.
350 Update all the other stacks, popping off elements from them
351 as we pop them from nesting_stack. */
352
353 #define POPSTACK(STACK) \
354 do { struct nesting *target = STACK; \
355 struct nesting *this; \
356 do { this = nesting_stack; \
357 if (loop_stack == this) \
358 loop_stack = loop_stack->next; \
359 if (cond_stack == this) \
360 cond_stack = cond_stack->next; \
361 if (block_stack == this) \
362 block_stack = block_stack->next; \
363 if (stack_block_stack == this) \
364 stack_block_stack = stack_block_stack->next; \
365 if (case_stack == this) \
366 case_stack = case_stack->next; \
367 nesting_depth = nesting_stack->depth - 1; \
368 nesting_stack = this->all; \
369 obstack_free (&stmt_obstack, this); } \
370 while (this != target); } while (0)
371 \f
372 /* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380 struct goto_fixup
381 {
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
390 /* The BLOCK for the place where this goto was found. */
391 tree context;
392 /* The CODE_LABEL rtx that this is jumping to. */
393 rtx target_rtl;
394 /* Number of binding contours started in current function
395 before the label reference. */
396 int block_start_count;
397 /* The outermost stack level that should be restored for this jump.
398 Each time a binding contour that resets the stack is exited,
399 if the target label is *not* yet defined, this slot is updated. */
400 rtx stack_level;
401 /* List of lists of cleanup expressions to be run by this goto.
402 There is one element for each block that this goto is within.
403 The tail of this list can be 0,
404 if all remaining elements would be empty.
405 The TREE_VALUE contains the cleanup list of that block as of the
406 time this goto was seen.
407 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
408 tree cleanup_list_list;
409 };
410
411 static struct goto_fixup *goto_fixup_chain;
412
413 /* Within any binding contour that must restore a stack level,
414 all labels are recorded with a chain of these structures. */
415
416 struct label_chain
417 {
418 /* Points to following fixup. */
419 struct label_chain *next;
420 tree label;
421 };
422
423
424 /* Non-zero if we are using EH to handle cleanus. */
425 static int using_eh_for_cleanups_p = 0;
426
427
428 static int n_occurrences PROTO((int, char *));
429 static void expand_goto_internal PROTO((tree, rtx, rtx));
430 static int expand_fixup PROTO((tree, rtx, rtx));
431 static void expand_nl_handler_label PROTO((rtx, rtx));
432 static void expand_nl_goto_receiver PROTO((void));
433 static void expand_nl_goto_receivers PROTO((struct nesting *));
434 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
436 static void expand_null_return_1 PROTO((rtx, int));
437 static void expand_value_return PROTO((rtx));
438 static int tail_recursion_args PROTO((tree, tree));
439 static void expand_cleanups PROTO((tree, tree, int, int));
440 static void check_seenlabel PROTO((void));
441 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
442 static int estimate_case_costs PROTO((case_node_ptr));
443 static void group_case_nodes PROTO((case_node_ptr));
444 static void balance_case_nodes PROTO((case_node_ptr *,
445 case_node_ptr));
446 static int node_has_low_bound PROTO((case_node_ptr, tree));
447 static int node_has_high_bound PROTO((case_node_ptr, tree));
448 static int node_is_bounded PROTO((case_node_ptr, tree));
449 static void emit_jump_if_reachable PROTO((rtx));
450 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
451 static int add_case_node PROTO((tree, tree, tree, tree *));
452 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
453 \f
454 void
455 using_eh_for_cleanups ()
456 {
457 using_eh_for_cleanups_p = 1;
458 }
459
460 void
461 init_stmt ()
462 {
463 gcc_obstack_init (&stmt_obstack);
464 init_eh ();
465 }
466
467 void
468 init_stmt_for_function ()
469 {
470 /* We are not currently within any block, conditional, loop or case. */
471 block_stack = 0;
472 stack_block_stack = 0;
473 loop_stack = 0;
474 case_stack = 0;
475 cond_stack = 0;
476 nesting_stack = 0;
477 nesting_depth = 0;
478
479 block_start_count = 0;
480
481 /* No gotos have been expanded yet. */
482 goto_fixup_chain = 0;
483
484 /* We are not processing a ({...}) grouping. */
485 expr_stmts_for_value = 0;
486 last_expr_type = 0;
487
488 init_eh_for_function ();
489 }
490
491 void
492 save_stmt_status (p)
493 struct function *p;
494 {
495 p->block_stack = block_stack;
496 p->stack_block_stack = stack_block_stack;
497 p->cond_stack = cond_stack;
498 p->loop_stack = loop_stack;
499 p->case_stack = case_stack;
500 p->nesting_stack = nesting_stack;
501 p->nesting_depth = nesting_depth;
502 p->block_start_count = block_start_count;
503 p->last_expr_type = last_expr_type;
504 p->last_expr_value = last_expr_value;
505 p->expr_stmts_for_value = expr_stmts_for_value;
506 p->emit_filename = emit_filename;
507 p->emit_lineno = emit_lineno;
508 p->goto_fixup_chain = goto_fixup_chain;
509 save_eh_status (p);
510 }
511
512 void
513 restore_stmt_status (p)
514 struct function *p;
515 {
516 block_stack = p->block_stack;
517 stack_block_stack = p->stack_block_stack;
518 cond_stack = p->cond_stack;
519 loop_stack = p->loop_stack;
520 case_stack = p->case_stack;
521 nesting_stack = p->nesting_stack;
522 nesting_depth = p->nesting_depth;
523 block_start_count = p->block_start_count;
524 last_expr_type = p->last_expr_type;
525 last_expr_value = p->last_expr_value;
526 expr_stmts_for_value = p->expr_stmts_for_value;
527 emit_filename = p->emit_filename;
528 emit_lineno = p->emit_lineno;
529 goto_fixup_chain = p->goto_fixup_chain;
530 restore_eh_status (p);
531 }
532 \f
533 /* Emit a no-op instruction. */
534
535 void
536 emit_nop ()
537 {
538 rtx last_insn;
539
540 last_insn = get_last_insn ();
541 if (!optimize
542 && (GET_CODE (last_insn) == CODE_LABEL
543 || (GET_CODE (last_insn) == NOTE
544 && prev_real_insn (last_insn) == 0)))
545 emit_insn (gen_nop ());
546 }
547 \f
548 /* Return the rtx-label that corresponds to a LABEL_DECL,
549 creating it if necessary. */
550
551 rtx
552 label_rtx (label)
553 tree label;
554 {
555 if (TREE_CODE (label) != LABEL_DECL)
556 abort ();
557
558 if (DECL_RTL (label))
559 return DECL_RTL (label);
560
561 return DECL_RTL (label) = gen_label_rtx ();
562 }
563
564 /* Add an unconditional jump to LABEL as the next sequential instruction. */
565
566 void
567 emit_jump (label)
568 rtx label;
569 {
570 do_pending_stack_adjust ();
571 emit_jump_insn (gen_jump (label));
572 emit_barrier ();
573 }
574
575 /* Emit code to jump to the address
576 specified by the pointer expression EXP. */
577
578 void
579 expand_computed_goto (exp)
580 tree exp;
581 {
582 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
583
584 #ifdef POINTERS_EXTEND_UNSIGNED
585 x = convert_memory_address (Pmode, x);
586 #endif
587
588 emit_queue ();
589 /* Be sure the function is executable. */
590 if (current_function_check_memory_usage)
591 emit_library_call (chkr_check_exec_libfunc, 1,
592 VOIDmode, 1, x, ptr_mode);
593
594 do_pending_stack_adjust ();
595 emit_indirect_jump (x);
596 }
597 \f
598 /* Handle goto statements and the labels that they can go to. */
599
600 /* Specify the location in the RTL code of a label LABEL,
601 which is a LABEL_DECL tree node.
602
603 This is used for the kind of label that the user can jump to with a
604 goto statement, and for alternatives of a switch or case statement.
605 RTL labels generated for loops and conditionals don't go through here;
606 they are generated directly at the RTL level, by other functions below.
607
608 Note that this has nothing to do with defining label *names*.
609 Languages vary in how they do that and what that even means. */
610
611 void
612 expand_label (label)
613 tree label;
614 {
615 struct label_chain *p;
616
617 do_pending_stack_adjust ();
618 emit_label (label_rtx (label));
619 if (DECL_NAME (label))
620 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
621
622 if (stack_block_stack != 0)
623 {
624 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
625 p->next = stack_block_stack->data.block.label_chain;
626 stack_block_stack->data.block.label_chain = p;
627 p->label = label;
628 }
629 }
630
631 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
632 from nested functions. */
633
634 void
635 declare_nonlocal_label (label)
636 tree label;
637 {
638 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
639
640 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
641 LABEL_PRESERVE_P (label_rtx (label)) = 1;
642 if (nonlocal_goto_handler_slots == 0)
643 {
644 emit_stack_save (SAVE_NONLOCAL,
645 &nonlocal_goto_stack_level,
646 PREV_INSN (tail_recursion_reentry));
647 }
648 nonlocal_goto_handler_slots
649 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
650 }
651
652 /* Generate RTL code for a `goto' statement with target label LABEL.
653 LABEL should be a LABEL_DECL tree node that was or will later be
654 defined with `expand_label'. */
655
656 void
657 expand_goto (label)
658 tree label;
659 {
660 tree context;
661
662 /* Check for a nonlocal goto to a containing function. */
663 context = decl_function_context (label);
664 if (context != 0 && context != current_function_decl)
665 {
666 struct function *p = find_function_data (context);
667 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
668 rtx temp, handler_slot;
669 tree link;
670
671 /* Find the corresponding handler slot for this label. */
672 handler_slot = p->nonlocal_goto_handler_slots;
673 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
674 link = TREE_CHAIN (link))
675 handler_slot = XEXP (handler_slot, 1);
676 handler_slot = XEXP (handler_slot, 0);
677
678 p->has_nonlocal_label = 1;
679 current_function_has_nonlocal_goto = 1;
680 LABEL_REF_NONLOCAL_P (label_ref) = 1;
681
682 /* Copy the rtl for the slots so that they won't be shared in
683 case the virtual stack vars register gets instantiated differently
684 in the parent than in the child. */
685
686 #if HAVE_nonlocal_goto
687 if (HAVE_nonlocal_goto)
688 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
689 copy_rtx (handler_slot),
690 copy_rtx (p->nonlocal_goto_stack_level),
691 label_ref));
692 else
693 #endif
694 {
695 rtx addr;
696
697 /* Restore frame pointer for containing function.
698 This sets the actual hard register used for the frame pointer
699 to the location of the function's incoming static chain info.
700 The non-local goto handler will then adjust it to contain the
701 proper value and reload the argument pointer, if needed. */
702 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
703
704 /* We have now loaded the frame pointer hardware register with
705 the address of that corresponds to the start of the virtual
706 stack vars. So replace virtual_stack_vars_rtx in all
707 addresses we use with stack_pointer_rtx. */
708
709 /* Get addr of containing function's current nonlocal goto handler,
710 which will do any cleanups and then jump to the label. */
711 addr = copy_rtx (handler_slot);
712 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
713 hard_frame_pointer_rtx));
714
715 /* Restore the stack pointer. Note this uses fp just restored. */
716 addr = p->nonlocal_goto_stack_level;
717 if (addr)
718 addr = replace_rtx (copy_rtx (addr),
719 virtual_stack_vars_rtx,
720 hard_frame_pointer_rtx);
721
722 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
723
724 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
725 really needed. */
726 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
727 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
728 emit_indirect_jump (temp);
729 }
730 }
731 else
732 expand_goto_internal (label, label_rtx (label), NULL_RTX);
733 }
734
735 /* Generate RTL code for a `goto' statement with target label BODY.
736 LABEL should be a LABEL_REF.
737 LAST_INSN, if non-0, is the rtx we should consider as the last
738 insn emitted (for the purposes of cleaning up a return). */
739
740 static void
741 expand_goto_internal (body, label, last_insn)
742 tree body;
743 rtx label;
744 rtx last_insn;
745 {
746 struct nesting *block;
747 rtx stack_level = 0;
748
749 if (GET_CODE (label) != CODE_LABEL)
750 abort ();
751
752 /* If label has already been defined, we can tell now
753 whether and how we must alter the stack level. */
754
755 if (PREV_INSN (label) != 0)
756 {
757 /* Find the innermost pending block that contains the label.
758 (Check containment by comparing insn-uids.)
759 Then restore the outermost stack level within that block,
760 and do cleanups of all blocks contained in it. */
761 for (block = block_stack; block; block = block->next)
762 {
763 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
764 break;
765 if (block->data.block.stack_level != 0)
766 stack_level = block->data.block.stack_level;
767 /* Execute the cleanups for blocks we are exiting. */
768 if (block->data.block.cleanups != 0)
769 {
770 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
771 do_pending_stack_adjust ();
772 }
773 }
774
775 if (stack_level)
776 {
777 /* Ensure stack adjust isn't done by emit_jump, as this
778 would clobber the stack pointer. This one should be
779 deleted as dead by flow. */
780 clear_pending_stack_adjust ();
781 do_pending_stack_adjust ();
782 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
783 }
784
785 if (body != 0 && DECL_TOO_LATE (body))
786 error ("jump to `%s' invalidly jumps into binding contour",
787 IDENTIFIER_POINTER (DECL_NAME (body)));
788 }
789 /* Label not yet defined: may need to put this goto
790 on the fixup list. */
791 else if (! expand_fixup (body, label, last_insn))
792 {
793 /* No fixup needed. Record that the label is the target
794 of at least one goto that has no fixup. */
795 if (body != 0)
796 TREE_ADDRESSABLE (body) = 1;
797 }
798
799 emit_jump (label);
800 }
801 \f
802 /* Generate if necessary a fixup for a goto
803 whose target label in tree structure (if any) is TREE_LABEL
804 and whose target in rtl is RTL_LABEL.
805
806 If LAST_INSN is nonzero, we pretend that the jump appears
807 after insn LAST_INSN instead of at the current point in the insn stream.
808
809 The fixup will be used later to insert insns just before the goto.
810 Those insns will restore the stack level as appropriate for the
811 target label, and will (in the case of C++) also invoke any object
812 destructors which have to be invoked when we exit the scopes which
813 are exited by the goto.
814
815 Value is nonzero if a fixup is made. */
816
817 static int
818 expand_fixup (tree_label, rtl_label, last_insn)
819 tree tree_label;
820 rtx rtl_label;
821 rtx last_insn;
822 {
823 struct nesting *block, *end_block;
824
825 /* See if we can recognize which block the label will be output in.
826 This is possible in some very common cases.
827 If we succeed, set END_BLOCK to that block.
828 Otherwise, set it to 0. */
829
830 if (cond_stack
831 && (rtl_label == cond_stack->data.cond.endif_label
832 || rtl_label == cond_stack->data.cond.next_label))
833 end_block = cond_stack;
834 /* If we are in a loop, recognize certain labels which
835 are likely targets. This reduces the number of fixups
836 we need to create. */
837 else if (loop_stack
838 && (rtl_label == loop_stack->data.loop.start_label
839 || rtl_label == loop_stack->data.loop.end_label
840 || rtl_label == loop_stack->data.loop.continue_label))
841 end_block = loop_stack;
842 else
843 end_block = 0;
844
845 /* Now set END_BLOCK to the binding level to which we will return. */
846
847 if (end_block)
848 {
849 struct nesting *next_block = end_block->all;
850 block = block_stack;
851
852 /* First see if the END_BLOCK is inside the innermost binding level.
853 If so, then no cleanups or stack levels are relevant. */
854 while (next_block && next_block != block)
855 next_block = next_block->all;
856
857 if (next_block)
858 return 0;
859
860 /* Otherwise, set END_BLOCK to the innermost binding level
861 which is outside the relevant control-structure nesting. */
862 next_block = block_stack->next;
863 for (block = block_stack; block != end_block; block = block->all)
864 if (block == next_block)
865 next_block = next_block->next;
866 end_block = next_block;
867 }
868
869 /* Does any containing block have a stack level or cleanups?
870 If not, no fixup is needed, and that is the normal case
871 (the only case, for standard C). */
872 for (block = block_stack; block != end_block; block = block->next)
873 if (block->data.block.stack_level != 0
874 || block->data.block.cleanups != 0)
875 break;
876
877 if (block != end_block)
878 {
879 /* Ok, a fixup is needed. Add a fixup to the list of such. */
880 struct goto_fixup *fixup
881 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
882 /* In case an old stack level is restored, make sure that comes
883 after any pending stack adjust. */
884 /* ?? If the fixup isn't to come at the present position,
885 doing the stack adjust here isn't useful. Doing it with our
886 settings at that location isn't useful either. Let's hope
887 someone does it! */
888 if (last_insn == 0)
889 do_pending_stack_adjust ();
890 fixup->target = tree_label;
891 fixup->target_rtl = rtl_label;
892
893 /* Create a BLOCK node and a corresponding matched set of
894 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
895 this point. The notes will encapsulate any and all fixup
896 code which we might later insert at this point in the insn
897 stream. Also, the BLOCK node will be the parent (i.e. the
898 `SUPERBLOCK') of any other BLOCK nodes which we might create
899 later on when we are expanding the fixup code.
900
901 Note that optimization passes (including expand_end_loop)
902 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
903 as a placeholder. */
904
905 {
906 register rtx original_before_jump
907 = last_insn ? last_insn : get_last_insn ();
908 rtx start;
909
910 start_sequence ();
911 pushlevel (0);
912 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
913 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
914 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
915 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
916 end_sequence ();
917 emit_insns_after (start, original_before_jump);
918 }
919
920 fixup->block_start_count = block_start_count;
921 fixup->stack_level = 0;
922 fixup->cleanup_list_list
923 = ((block->data.block.outer_cleanups
924 || block->data.block.cleanups)
925 ? tree_cons (NULL_TREE, block->data.block.cleanups,
926 block->data.block.outer_cleanups)
927 : 0);
928 fixup->next = goto_fixup_chain;
929 goto_fixup_chain = fixup;
930 }
931
932 return block != 0;
933 }
934
935
936 \f
937 /* Expand any needed fixups in the outputmost binding level of the
938 function. FIRST_INSN is the first insn in the function. */
939
940 void
941 expand_fixups (first_insn)
942 rtx first_insn;
943 {
944 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
945 }
946
947 /* When exiting a binding contour, process all pending gotos requiring fixups.
948 THISBLOCK is the structure that describes the block being exited.
949 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
950 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
951 FIRST_INSN is the insn that began this contour.
952
953 Gotos that jump out of this contour must restore the
954 stack level and do the cleanups before actually jumping.
955
956 DONT_JUMP_IN nonzero means report error there is a jump into this
957 contour from before the beginning of the contour.
958 This is also done if STACK_LEVEL is nonzero. */
959
960 static void
961 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
962 struct nesting *thisblock;
963 rtx stack_level;
964 tree cleanup_list;
965 rtx first_insn;
966 int dont_jump_in;
967 {
968 register struct goto_fixup *f, *prev;
969
970 /* F is the fixup we are considering; PREV is the previous one. */
971 /* We run this loop in two passes so that cleanups of exited blocks
972 are run first, and blocks that are exited are marked so
973 afterwards. */
974
975 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
976 {
977 /* Test for a fixup that is inactive because it is already handled. */
978 if (f->before_jump == 0)
979 {
980 /* Delete inactive fixup from the chain, if that is easy to do. */
981 if (prev != 0)
982 prev->next = f->next;
983 }
984 /* Has this fixup's target label been defined?
985 If so, we can finalize it. */
986 else if (PREV_INSN (f->target_rtl) != 0)
987 {
988 register rtx cleanup_insns;
989
990 /* Get the first non-label after the label
991 this goto jumps to. If that's before this scope begins,
992 we don't have a jump into the scope. */
993 rtx after_label = f->target_rtl;
994 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
995 after_label = NEXT_INSN (after_label);
996
997 /* If this fixup jumped into this contour from before the beginning
998 of this contour, report an error. */
999 /* ??? Bug: this does not detect jumping in through intermediate
1000 blocks that have stack levels or cleanups.
1001 It detects only a problem with the innermost block
1002 around the label. */
1003 if (f->target != 0
1004 && (dont_jump_in || stack_level || cleanup_list)
1005 /* If AFTER_LABEL is 0, it means the jump goes to the end
1006 of the rtl, which means it jumps into this scope. */
1007 && (after_label == 0
1008 || INSN_UID (first_insn) < INSN_UID (after_label))
1009 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1010 && ! DECL_ERROR_ISSUED (f->target))
1011 {
1012 error_with_decl (f->target,
1013 "label `%s' used before containing binding contour");
1014 /* Prevent multiple errors for one label. */
1015 DECL_ERROR_ISSUED (f->target) = 1;
1016 }
1017
1018 /* We will expand the cleanups into a sequence of their own and
1019 then later on we will attach this new sequence to the insn
1020 stream just ahead of the actual jump insn. */
1021
1022 start_sequence ();
1023
1024 /* Temporarily restore the lexical context where we will
1025 logically be inserting the fixup code. We do this for the
1026 sake of getting the debugging information right. */
1027
1028 pushlevel (0);
1029 set_block (f->context);
1030
1031 /* Expand the cleanups for blocks this jump exits. */
1032 if (f->cleanup_list_list)
1033 {
1034 tree lists;
1035 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1036 /* Marked elements correspond to blocks that have been closed.
1037 Do their cleanups. */
1038 if (TREE_ADDRESSABLE (lists)
1039 && TREE_VALUE (lists) != 0)
1040 {
1041 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1042 /* Pop any pushes done in the cleanups,
1043 in case function is about to return. */
1044 do_pending_stack_adjust ();
1045 }
1046 }
1047
1048 /* Restore stack level for the biggest contour that this
1049 jump jumps out of. */
1050 if (f->stack_level)
1051 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1052
1053 /* Finish up the sequence containing the insns which implement the
1054 necessary cleanups, and then attach that whole sequence to the
1055 insn stream just ahead of the actual jump insn. Attaching it
1056 at that point insures that any cleanups which are in fact
1057 implicit C++ object destructions (which must be executed upon
1058 leaving the block) appear (to the debugger) to be taking place
1059 in an area of the generated code where the object(s) being
1060 destructed are still "in scope". */
1061
1062 cleanup_insns = get_insns ();
1063 poplevel (1, 0, 0);
1064
1065 end_sequence ();
1066 emit_insns_after (cleanup_insns, f->before_jump);
1067
1068
1069 f->before_jump = 0;
1070 }
1071 }
1072
1073 /* For any still-undefined labels, do the cleanups for this block now.
1074 We must do this now since items in the cleanup list may go out
1075 of scope when the block ends. */
1076 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1077 if (f->before_jump != 0
1078 && PREV_INSN (f->target_rtl) == 0
1079 /* Label has still not appeared. If we are exiting a block with
1080 a stack level to restore, that started before the fixup,
1081 mark this stack level as needing restoration
1082 when the fixup is later finalized. */
1083 && thisblock != 0
1084 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1085 means the label is undefined. That's erroneous, but possible. */
1086 && (thisblock->data.block.block_start_count
1087 <= f->block_start_count))
1088 {
1089 tree lists = f->cleanup_list_list;
1090 rtx cleanup_insns;
1091
1092 for (; lists; lists = TREE_CHAIN (lists))
1093 /* If the following elt. corresponds to our containing block
1094 then the elt. must be for this block. */
1095 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1096 {
1097 start_sequence ();
1098 pushlevel (0);
1099 set_block (f->context);
1100 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1101 do_pending_stack_adjust ();
1102 cleanup_insns = get_insns ();
1103 poplevel (1, 0, 0);
1104 end_sequence ();
1105 if (cleanup_insns != 0)
1106 f->before_jump
1107 = emit_insns_after (cleanup_insns, f->before_jump);
1108
1109 f->cleanup_list_list = TREE_CHAIN (lists);
1110 }
1111
1112 if (stack_level)
1113 f->stack_level = stack_level;
1114 }
1115 }
1116 \f
1117 /* Return the number of times character C occurs in string S. */
1118 static int
1119 n_occurrences (c, s)
1120 int c;
1121 char *s;
1122 {
1123 int n = 0;
1124 while (*s)
1125 n += (*s++ == c);
1126 return n;
1127 }
1128 \f
1129 /* Generate RTL for an asm statement (explicit assembler code).
1130 BODY is a STRING_CST node containing the assembler code text,
1131 or an ADDR_EXPR containing a STRING_CST. */
1132
1133 void
1134 expand_asm (body)
1135 tree body;
1136 {
1137 if (current_function_check_memory_usage)
1138 {
1139 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1140 return;
1141 }
1142
1143 if (TREE_CODE (body) == ADDR_EXPR)
1144 body = TREE_OPERAND (body, 0);
1145
1146 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1147 TREE_STRING_POINTER (body)));
1148 last_expr_type = 0;
1149 }
1150
1151 /* Generate RTL for an asm statement with arguments.
1152 STRING is the instruction template.
1153 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1154 Each output or input has an expression in the TREE_VALUE and
1155 a constraint-string in the TREE_PURPOSE.
1156 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1157 that is clobbered by this insn.
1158
1159 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1160 Some elements of OUTPUTS may be replaced with trees representing temporary
1161 values. The caller should copy those temporary values to the originally
1162 specified lvalues.
1163
1164 VOL nonzero means the insn is volatile; don't optimize it. */
1165
1166 void
1167 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1168 tree string, outputs, inputs, clobbers;
1169 int vol;
1170 char *filename;
1171 int line;
1172 {
1173 rtvec argvec, constraints;
1174 rtx body;
1175 int ninputs = list_length (inputs);
1176 int noutputs = list_length (outputs);
1177 int ninout = 0;
1178 int nclobbers;
1179 tree tail;
1180 register int i;
1181 /* Vector of RTX's of evaluated output operands. */
1182 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1183 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1184 enum machine_mode *inout_mode
1185 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1186 /* The insn we have emitted. */
1187 rtx insn;
1188
1189 /* An ASM with no outputs needs to be treated as volatile, for now. */
1190 if (noutputs == 0)
1191 vol = 1;
1192
1193 if (current_function_check_memory_usage)
1194 {
1195 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1196 return;
1197 }
1198
1199 /* Count the number of meaningful clobbered registers, ignoring what
1200 we would ignore later. */
1201 nclobbers = 0;
1202 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1203 {
1204 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1205 i = decode_reg_name (regname);
1206 if (i >= 0 || i == -4)
1207 ++nclobbers;
1208 else if (i == -2)
1209 error ("unknown register name `%s' in `asm'", regname);
1210 }
1211
1212 last_expr_type = 0;
1213
1214 /* Check that the number of alternatives is constant across all
1215 operands. */
1216 if (outputs || inputs)
1217 {
1218 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1219 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1220 tree next = inputs;
1221
1222 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1223 {
1224 error ("too many alternatives in `asm'");
1225 return;
1226 }
1227
1228 tmp = outputs;
1229 while (tmp)
1230 {
1231 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1232 if (n_occurrences (',', constraint) != nalternatives)
1233 {
1234 error ("operand constraints for `asm' differ in number of alternatives");
1235 return;
1236 }
1237 if (TREE_CHAIN (tmp))
1238 tmp = TREE_CHAIN (tmp);
1239 else
1240 tmp = next, next = 0;
1241 }
1242 }
1243
1244 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1245 {
1246 tree val = TREE_VALUE (tail);
1247 tree type = TREE_TYPE (val);
1248 char *constraint;
1249 char *p;
1250 int c_len;
1251 int j;
1252 int is_inout = 0;
1253 int allows_reg = 0;
1254
1255 /* If there's an erroneous arg, emit no insn. */
1256 if (TREE_TYPE (val) == error_mark_node)
1257 return;
1258
1259 /* Make sure constraint has `=' and does not have `+'. Also, see
1260 if it allows any register. Be liberal on the latter test, since
1261 the worst that happens if we get it wrong is we issue an error
1262 message. */
1263
1264 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1265 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1266
1267 /* Allow the `=' or `+' to not be at the beginning of the string,
1268 since it wasn't explicitly documented that way, and there is a
1269 large body of code that puts it last. Swap the character to
1270 the front, so as not to uglify any place else. */
1271 switch (c_len)
1272 {
1273 default:
1274 if ((p = strchr (constraint, '=')) != NULL)
1275 break;
1276 if ((p = strchr (constraint, '+')) != NULL)
1277 break;
1278 case 0:
1279 error ("output operand constraint lacks `='");
1280 return;
1281 }
1282
1283 if (p != constraint)
1284 {
1285 j = *p;
1286 bcopy (constraint, constraint+1, p-constraint);
1287 *constraint = j;
1288
1289 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1290 }
1291
1292 is_inout = constraint[0] == '+';
1293 /* Replace '+' with '='. */
1294 constraint[0] = '=';
1295 /* Make sure we can specify the matching operand. */
1296 if (is_inout && i > 9)
1297 {
1298 error ("output operand constraint %d contains `+'", i);
1299 return;
1300 }
1301
1302 for (j = 1; j < c_len; j++)
1303 switch (constraint[j])
1304 {
1305 case '+':
1306 case '=':
1307 error ("operand constraint contains '+' or '=' at illegal position.");
1308 return;
1309
1310 case '%':
1311 if (i + 1 == ninputs + noutputs)
1312 {
1313 error ("`%%' constraint used with last operand");
1314 return;
1315 }
1316 break;
1317
1318 case '?': case '!': case '*': case '&':
1319 case 'V': case 'm': case 'o': case '<': case '>':
1320 case 'E': case 'F': case 'G': case 'H': case 'X':
1321 case 's': case 'i': case 'n':
1322 case 'I': case 'J': case 'K': case 'L': case 'M':
1323 case 'N': case 'O': case 'P': case ',':
1324 #ifdef EXTRA_CONSTRAINT
1325 case 'Q': case 'R': case 'S': case 'T': case 'U':
1326 #endif
1327 break;
1328
1329 case '0': case '1': case '2': case '3': case '4':
1330 case '5': case '6': case '7': case '8': case '9':
1331 error ("matching constraint not valid in output operand");
1332 break;
1333
1334 case 'p': case 'g': case 'r':
1335 default:
1336 allows_reg = 1;
1337 break;
1338 }
1339
1340 /* If an output operand is not a decl or indirect ref and our constraint
1341 allows a register, make a temporary to act as an intermediate.
1342 Make the asm insn write into that, then our caller will copy it to
1343 the real output operand. Likewise for promoted variables. */
1344
1345 if (TREE_CODE (val) == INDIRECT_REF
1346 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1347 && ! (GET_CODE (DECL_RTL (val)) == REG
1348 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1349 || ! allows_reg
1350 || is_inout)
1351 {
1352 if (! allows_reg)
1353 mark_addressable (TREE_VALUE (tail));
1354
1355 output_rtx[i]
1356 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1357 EXPAND_MEMORY_USE_WO);
1358
1359 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1360 error ("output number %d not directly addressable", i);
1361 }
1362 else
1363 {
1364 output_rtx[i] = assign_temp (type, 0, 0, 0);
1365 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1366 }
1367
1368 if (is_inout)
1369 {
1370 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1371 inout_opnum[ninout++] = i;
1372 }
1373 }
1374
1375 ninputs += ninout;
1376 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1377 {
1378 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1379 return;
1380 }
1381
1382 /* Make vectors for the expression-rtx and constraint strings. */
1383
1384 argvec = rtvec_alloc (ninputs);
1385 constraints = rtvec_alloc (ninputs);
1386
1387 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1388 TREE_STRING_POINTER (string), "", 0, argvec,
1389 constraints, filename, line);
1390
1391 MEM_VOLATILE_P (body) = vol;
1392
1393 /* Eval the inputs and put them into ARGVEC.
1394 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1395
1396 i = 0;
1397 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1398 {
1399 int j;
1400 int allows_reg = 0, allows_mem = 0;
1401 char *constraint, *orig_constraint;
1402 int c_len;
1403 rtx op;
1404
1405 /* If there's an erroneous arg, emit no insn,
1406 because the ASM_INPUT would get VOIDmode
1407 and that could cause a crash in reload. */
1408 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1409 return;
1410
1411 /* ??? Can this happen, and does the error message make any sense? */
1412 if (TREE_PURPOSE (tail) == NULL_TREE)
1413 {
1414 error ("hard register `%s' listed as input operand to `asm'",
1415 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1416 return;
1417 }
1418
1419 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1420 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1421 orig_constraint = constraint;
1422
1423 /* Make sure constraint has neither `=', `+', nor '&'. */
1424
1425 for (j = 0; j < c_len; j++)
1426 switch (constraint[j])
1427 {
1428 case '+': case '=': case '&':
1429 if (constraint == orig_constraint)
1430 {
1431 error ("input operand constraint contains `%c'", constraint[j]);
1432 return;
1433 }
1434 break;
1435
1436 case '%':
1437 if (constraint == orig_constraint
1438 && i + 1 == ninputs - ninout)
1439 {
1440 error ("`%%' constraint used with last operand");
1441 return;
1442 }
1443 break;
1444
1445 case 'V': case 'm': case 'o':
1446 allows_mem = 1;
1447 break;
1448
1449 case '<': case '>':
1450 case '?': case '!': case '*':
1451 case 'E': case 'F': case 'G': case 'H': case 'X':
1452 case 's': case 'i': case 'n':
1453 case 'I': case 'J': case 'K': case 'L': case 'M':
1454 case 'N': case 'O': case 'P': case ',':
1455 #ifdef EXTRA_CONSTRAINT
1456 case 'Q': case 'R': case 'S': case 'T': case 'U':
1457 #endif
1458 break;
1459
1460 /* Whether or not a numeric constraint allows a register is
1461 decided by the matching constraint, and so there is no need
1462 to do anything special with them. We must handle them in
1463 the default case, so that we don't unnecessarily force
1464 operands to memory. */
1465 case '0': case '1': case '2': case '3': case '4':
1466 case '5': case '6': case '7': case '8': case '9':
1467 if (constraint[j] >= '0' + noutputs)
1468 {
1469 error
1470 ("matching constraint references invalid operand number");
1471 return;
1472 }
1473
1474 /* Try and find the real constraint for this dup. */
1475 if (j == 0 && c_len == 1)
1476 {
1477 tree o = outputs;
1478 for (j = constraint[j] - '0'; j > 0; --j)
1479 o = TREE_CHAIN (o);
1480
1481 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1482 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1483 j = 0;
1484 break;
1485 }
1486
1487 /* ... fall through ... */
1488
1489 case 'p': case 'r':
1490 default:
1491 allows_reg = 1;
1492 break;
1493
1494 case 'g':
1495 allows_reg = 1;
1496 allows_mem = 1;
1497 break;
1498 }
1499
1500 if (! allows_reg && allows_mem)
1501 mark_addressable (TREE_VALUE (tail));
1502
1503 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1504
1505 if (! asm_operand_ok (op, constraint))
1506 {
1507 if (allows_reg)
1508 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1509 else if (!allows_mem)
1510 warning ("asm operand %d probably doesn't match constraints", i);
1511 else if (CONSTANT_P (op))
1512 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1513 op);
1514 else if (GET_CODE (op) == REG
1515 || GET_CODE (op) == SUBREG
1516 || GET_CODE (op) == CONCAT)
1517 {
1518 tree type = TREE_TYPE (TREE_VALUE (tail));
1519 rtx memloc = assign_temp (type, 1, 1, 1);
1520
1521 emit_move_insn (memloc, op);
1522 op = memloc;
1523 }
1524 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1525 /* We won't recognize volatile memory as available a
1526 memory_operand at this point. Ignore it. */
1527 ;
1528 else if (queued_subexp_p (op))
1529 ;
1530 else
1531 /* ??? Leave this only until we have experience with what
1532 happens in combine and elsewhere when constraints are
1533 not satisfied. */
1534 warning ("asm operand %d probably doesn't match constraints", i);
1535 }
1536 XVECEXP (body, 3, i) = op;
1537
1538 XVECEXP (body, 4, i) /* constraints */
1539 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1540 orig_constraint);
1541 i++;
1542 }
1543
1544 /* Protect all the operands from the queue,
1545 now that they have all been evaluated. */
1546
1547 for (i = 0; i < ninputs - ninout; i++)
1548 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1549
1550 for (i = 0; i < noutputs; i++)
1551 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1552
1553 /* For in-out operands, copy output rtx to input rtx. */
1554 for (i = 0; i < ninout; i++)
1555 {
1556 static char match[9+1][2]
1557 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1558 int j = inout_opnum[i];
1559
1560 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1561 = output_rtx[j];
1562 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1563 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1564 }
1565
1566 /* Now, for each output, construct an rtx
1567 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1568 ARGVEC CONSTRAINTS))
1569 If there is more than one, put them inside a PARALLEL. */
1570
1571 if (noutputs == 1 && nclobbers == 0)
1572 {
1573 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1574 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1575 }
1576 else if (noutputs == 0 && nclobbers == 0)
1577 {
1578 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1579 insn = emit_insn (body);
1580 }
1581 else
1582 {
1583 rtx obody = body;
1584 int num = noutputs;
1585 if (num == 0) num = 1;
1586 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1587
1588 /* For each output operand, store a SET. */
1589
1590 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1591 {
1592 XVECEXP (body, 0, i)
1593 = gen_rtx_SET (VOIDmode,
1594 output_rtx[i],
1595 gen_rtx_ASM_OPERANDS (VOIDmode,
1596 TREE_STRING_POINTER (string),
1597 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1598 i, argvec, constraints,
1599 filename, line));
1600 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1601 }
1602
1603 /* If there are no outputs (but there are some clobbers)
1604 store the bare ASM_OPERANDS into the PARALLEL. */
1605
1606 if (i == 0)
1607 XVECEXP (body, 0, i++) = obody;
1608
1609 /* Store (clobber REG) for each clobbered register specified. */
1610
1611 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1612 {
1613 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1614 int j = decode_reg_name (regname);
1615
1616 if (j < 0)
1617 {
1618 if (j == -3) /* `cc', which is not a register */
1619 continue;
1620
1621 if (j == -4) /* `memory', don't cache memory across asm */
1622 {
1623 XVECEXP (body, 0, i++)
1624 = gen_rtx_CLOBBER (VOIDmode,
1625 gen_rtx_MEM (BLKmode,
1626 gen_rtx_SCRATCH (VOIDmode)));
1627 continue;
1628 }
1629
1630 /* Ignore unknown register, error already signaled. */
1631 continue;
1632 }
1633
1634 /* Use QImode since that's guaranteed to clobber just one reg. */
1635 XVECEXP (body, 0, i++)
1636 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1637 }
1638
1639 insn = emit_insn (body);
1640 }
1641
1642 free_temp_slots ();
1643 }
1644 \f
1645 /* Generate RTL to evaluate the expression EXP
1646 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1647
1648 void
1649 expand_expr_stmt (exp)
1650 tree exp;
1651 {
1652 /* If -W, warn about statements with no side effects,
1653 except for an explicit cast to void (e.g. for assert()), and
1654 except inside a ({...}) where they may be useful. */
1655 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1656 {
1657 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1658 && !(TREE_CODE (exp) == CONVERT_EXPR
1659 && TREE_TYPE (exp) == void_type_node))
1660 warning_with_file_and_line (emit_filename, emit_lineno,
1661 "statement with no effect");
1662 else if (warn_unused)
1663 warn_if_unused_value (exp);
1664 }
1665
1666 /* If EXP is of function type and we are expanding statements for
1667 value, convert it to pointer-to-function. */
1668 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1669 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1670
1671 last_expr_type = TREE_TYPE (exp);
1672 if (flag_syntax_only && ! expr_stmts_for_value)
1673 last_expr_value = 0;
1674 else
1675 last_expr_value = expand_expr (exp,
1676 (expr_stmts_for_value
1677 ? NULL_RTX : const0_rtx),
1678 VOIDmode, 0);
1679
1680 /* If all we do is reference a volatile value in memory,
1681 copy it to a register to be sure it is actually touched. */
1682 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1683 && TREE_THIS_VOLATILE (exp))
1684 {
1685 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1686 ;
1687 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1688 copy_to_reg (last_expr_value);
1689 else
1690 {
1691 rtx lab = gen_label_rtx ();
1692
1693 /* Compare the value with itself to reference it. */
1694 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1695 expand_expr (TYPE_SIZE (last_expr_type),
1696 NULL_RTX, VOIDmode, 0),
1697 BLKmode, 0,
1698 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1699 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1700 emit_label (lab);
1701 }
1702 }
1703
1704 /* If this expression is part of a ({...}) and is in memory, we may have
1705 to preserve temporaries. */
1706 preserve_temp_slots (last_expr_value);
1707
1708 /* Free any temporaries used to evaluate this expression. Any temporary
1709 used as a result of this expression will already have been preserved
1710 above. */
1711 free_temp_slots ();
1712
1713 emit_queue ();
1714 }
1715
1716 /* Warn if EXP contains any computations whose results are not used.
1717 Return 1 if a warning is printed; 0 otherwise. */
1718
1719 int
1720 warn_if_unused_value (exp)
1721 tree exp;
1722 {
1723 if (TREE_USED (exp))
1724 return 0;
1725
1726 switch (TREE_CODE (exp))
1727 {
1728 case PREINCREMENT_EXPR:
1729 case POSTINCREMENT_EXPR:
1730 case PREDECREMENT_EXPR:
1731 case POSTDECREMENT_EXPR:
1732 case MODIFY_EXPR:
1733 case INIT_EXPR:
1734 case TARGET_EXPR:
1735 case CALL_EXPR:
1736 case METHOD_CALL_EXPR:
1737 case RTL_EXPR:
1738 case TRY_CATCH_EXPR:
1739 case WITH_CLEANUP_EXPR:
1740 case EXIT_EXPR:
1741 /* We don't warn about COND_EXPR because it may be a useful
1742 construct if either arm contains a side effect. */
1743 case COND_EXPR:
1744 return 0;
1745
1746 case BIND_EXPR:
1747 /* For a binding, warn if no side effect within it. */
1748 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1749
1750 case SAVE_EXPR:
1751 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1752
1753 case TRUTH_ORIF_EXPR:
1754 case TRUTH_ANDIF_EXPR:
1755 /* In && or ||, warn if 2nd operand has no side effect. */
1756 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1757
1758 case COMPOUND_EXPR:
1759 if (TREE_NO_UNUSED_WARNING (exp))
1760 return 0;
1761 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1762 return 1;
1763 /* Let people do `(foo (), 0)' without a warning. */
1764 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1765 return 0;
1766 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1767
1768 case NOP_EXPR:
1769 case CONVERT_EXPR:
1770 case NON_LVALUE_EXPR:
1771 /* Don't warn about values cast to void. */
1772 if (TREE_TYPE (exp) == void_type_node)
1773 return 0;
1774 /* Don't warn about conversions not explicit in the user's program. */
1775 if (TREE_NO_UNUSED_WARNING (exp))
1776 return 0;
1777 /* Assignment to a cast usually results in a cast of a modify.
1778 Don't complain about that. There can be an arbitrary number of
1779 casts before the modify, so we must loop until we find the first
1780 non-cast expression and then test to see if that is a modify. */
1781 {
1782 tree tem = TREE_OPERAND (exp, 0);
1783
1784 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1785 tem = TREE_OPERAND (tem, 0);
1786
1787 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1788 || TREE_CODE (tem) == CALL_EXPR)
1789 return 0;
1790 }
1791 goto warn;
1792
1793 case INDIRECT_REF:
1794 /* Don't warn about automatic dereferencing of references, since
1795 the user cannot control it. */
1796 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1797 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1798 /* ... fall through ... */
1799
1800 default:
1801 /* Referencing a volatile value is a side effect, so don't warn. */
1802 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1803 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1804 && TREE_THIS_VOLATILE (exp))
1805 return 0;
1806 warn:
1807 warning_with_file_and_line (emit_filename, emit_lineno,
1808 "value computed is not used");
1809 return 1;
1810 }
1811 }
1812
1813 /* Clear out the memory of the last expression evaluated. */
1814
1815 void
1816 clear_last_expr ()
1817 {
1818 last_expr_type = 0;
1819 }
1820
1821 /* Begin a statement which will return a value.
1822 Return the RTL_EXPR for this statement expr.
1823 The caller must save that value and pass it to expand_end_stmt_expr. */
1824
1825 tree
1826 expand_start_stmt_expr ()
1827 {
1828 int momentary;
1829 tree t;
1830
1831 /* Make the RTL_EXPR node temporary, not momentary,
1832 so that rtl_expr_chain doesn't become garbage. */
1833 momentary = suspend_momentary ();
1834 t = make_node (RTL_EXPR);
1835 resume_momentary (momentary);
1836 do_pending_stack_adjust ();
1837 start_sequence_for_rtl_expr (t);
1838 NO_DEFER_POP;
1839 expr_stmts_for_value++;
1840 return t;
1841 }
1842
1843 /* Restore the previous state at the end of a statement that returns a value.
1844 Returns a tree node representing the statement's value and the
1845 insns to compute the value.
1846
1847 The nodes of that expression have been freed by now, so we cannot use them.
1848 But we don't want to do that anyway; the expression has already been
1849 evaluated and now we just want to use the value. So generate a RTL_EXPR
1850 with the proper type and RTL value.
1851
1852 If the last substatement was not an expression,
1853 return something with type `void'. */
1854
1855 tree
1856 expand_end_stmt_expr (t)
1857 tree t;
1858 {
1859 OK_DEFER_POP;
1860
1861 if (last_expr_type == 0)
1862 {
1863 last_expr_type = void_type_node;
1864 last_expr_value = const0_rtx;
1865 }
1866 else if (last_expr_value == 0)
1867 /* There are some cases where this can happen, such as when the
1868 statement is void type. */
1869 last_expr_value = const0_rtx;
1870 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1871 /* Remove any possible QUEUED. */
1872 last_expr_value = protect_from_queue (last_expr_value, 0);
1873
1874 emit_queue ();
1875
1876 TREE_TYPE (t) = last_expr_type;
1877 RTL_EXPR_RTL (t) = last_expr_value;
1878 RTL_EXPR_SEQUENCE (t) = get_insns ();
1879
1880 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1881
1882 end_sequence ();
1883
1884 /* Don't consider deleting this expr or containing exprs at tree level. */
1885 TREE_SIDE_EFFECTS (t) = 1;
1886 /* Propagate volatility of the actual RTL expr. */
1887 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1888
1889 last_expr_type = 0;
1890 expr_stmts_for_value--;
1891
1892 return t;
1893 }
1894 \f
1895 /* Generate RTL for the start of an if-then. COND is the expression
1896 whose truth should be tested.
1897
1898 If EXITFLAG is nonzero, this conditional is visible to
1899 `exit_something'. */
1900
1901 void
1902 expand_start_cond (cond, exitflag)
1903 tree cond;
1904 int exitflag;
1905 {
1906 struct nesting *thiscond = ALLOC_NESTING ();
1907
1908 /* Make an entry on cond_stack for the cond we are entering. */
1909
1910 thiscond->next = cond_stack;
1911 thiscond->all = nesting_stack;
1912 thiscond->depth = ++nesting_depth;
1913 thiscond->data.cond.next_label = gen_label_rtx ();
1914 /* Before we encounter an `else', we don't need a separate exit label
1915 unless there are supposed to be exit statements
1916 to exit this conditional. */
1917 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1918 thiscond->data.cond.endif_label = thiscond->exit_label;
1919 cond_stack = thiscond;
1920 nesting_stack = thiscond;
1921
1922 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1923 }
1924
1925 /* Generate RTL between then-clause and the elseif-clause
1926 of an if-then-elseif-.... */
1927
1928 void
1929 expand_start_elseif (cond)
1930 tree cond;
1931 {
1932 if (cond_stack->data.cond.endif_label == 0)
1933 cond_stack->data.cond.endif_label = gen_label_rtx ();
1934 emit_jump (cond_stack->data.cond.endif_label);
1935 emit_label (cond_stack->data.cond.next_label);
1936 cond_stack->data.cond.next_label = gen_label_rtx ();
1937 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1938 }
1939
1940 /* Generate RTL between the then-clause and the else-clause
1941 of an if-then-else. */
1942
1943 void
1944 expand_start_else ()
1945 {
1946 if (cond_stack->data.cond.endif_label == 0)
1947 cond_stack->data.cond.endif_label = gen_label_rtx ();
1948
1949 emit_jump (cond_stack->data.cond.endif_label);
1950 emit_label (cond_stack->data.cond.next_label);
1951 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1952 }
1953
1954 /* After calling expand_start_else, turn this "else" into an "else if"
1955 by providing another condition. */
1956
1957 void
1958 expand_elseif (cond)
1959 tree cond;
1960 {
1961 cond_stack->data.cond.next_label = gen_label_rtx ();
1962 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1963 }
1964
1965 /* Generate RTL for the end of an if-then.
1966 Pop the record for it off of cond_stack. */
1967
1968 void
1969 expand_end_cond ()
1970 {
1971 struct nesting *thiscond = cond_stack;
1972
1973 do_pending_stack_adjust ();
1974 if (thiscond->data.cond.next_label)
1975 emit_label (thiscond->data.cond.next_label);
1976 if (thiscond->data.cond.endif_label)
1977 emit_label (thiscond->data.cond.endif_label);
1978
1979 POPSTACK (cond_stack);
1980 last_expr_type = 0;
1981 }
1982
1983
1984 \f
1985 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1986 loop should be exited by `exit_something'. This is a loop for which
1987 `expand_continue' will jump to the top of the loop.
1988
1989 Make an entry on loop_stack to record the labels associated with
1990 this loop. */
1991
1992 struct nesting *
1993 expand_start_loop (exit_flag)
1994 int exit_flag;
1995 {
1996 register struct nesting *thisloop = ALLOC_NESTING ();
1997
1998 /* Make an entry on loop_stack for the loop we are entering. */
1999
2000 thisloop->next = loop_stack;
2001 thisloop->all = nesting_stack;
2002 thisloop->depth = ++nesting_depth;
2003 thisloop->data.loop.start_label = gen_label_rtx ();
2004 thisloop->data.loop.end_label = gen_label_rtx ();
2005 thisloop->data.loop.alt_end_label = 0;
2006 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2007 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2008 loop_stack = thisloop;
2009 nesting_stack = thisloop;
2010
2011 do_pending_stack_adjust ();
2012 emit_queue ();
2013 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2014 emit_label (thisloop->data.loop.start_label);
2015
2016 return thisloop;
2017 }
2018
2019 /* Like expand_start_loop but for a loop where the continuation point
2020 (for expand_continue_loop) will be specified explicitly. */
2021
2022 struct nesting *
2023 expand_start_loop_continue_elsewhere (exit_flag)
2024 int exit_flag;
2025 {
2026 struct nesting *thisloop = expand_start_loop (exit_flag);
2027 loop_stack->data.loop.continue_label = gen_label_rtx ();
2028 return thisloop;
2029 }
2030
2031 /* Specify the continuation point for a loop started with
2032 expand_start_loop_continue_elsewhere.
2033 Use this at the point in the code to which a continue statement
2034 should jump. */
2035
2036 void
2037 expand_loop_continue_here ()
2038 {
2039 do_pending_stack_adjust ();
2040 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2041 emit_label (loop_stack->data.loop.continue_label);
2042 }
2043
2044 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2045 Pop the block off of loop_stack. */
2046
2047 void
2048 expand_end_loop ()
2049 {
2050 rtx start_label = loop_stack->data.loop.start_label;
2051 rtx insn = get_last_insn ();
2052
2053 /* Mark the continue-point at the top of the loop if none elsewhere. */
2054 if (start_label == loop_stack->data.loop.continue_label)
2055 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2056
2057 do_pending_stack_adjust ();
2058
2059 /* If optimizing, perhaps reorder the loop. If the loop starts with
2060 a loop exit, roll that to the end where it will optimize together
2061 with the jump back.
2062
2063 We look for the conditional branch to the exit, except that once
2064 we find such a branch, we don't look past 30 instructions.
2065
2066 In more detail, if the loop presently looks like this (in pseudo-C):
2067
2068 start_label:
2069 if (test) goto end_label;
2070 body;
2071 goto start_label;
2072 end_label:
2073
2074 transform it to look like:
2075
2076 goto start_label;
2077 newstart_label:
2078 body;
2079 start_label:
2080 if (test) goto end_label;
2081 goto newstart_label;
2082 end_label:
2083
2084 Here, the `test' may actually consist of some reasonably complex
2085 code, terminating in a test. */
2086
2087 if (optimize
2088 &&
2089 ! (GET_CODE (insn) == JUMP_INSN
2090 && GET_CODE (PATTERN (insn)) == SET
2091 && SET_DEST (PATTERN (insn)) == pc_rtx
2092 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2093 {
2094 int eh_regions = 0;
2095 int num_insns = 0;
2096 rtx last_test_insn = NULL_RTX;
2097
2098 /* Scan insns from the top of the loop looking for a qualified
2099 conditional exit. */
2100 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2101 insn = NEXT_INSN (insn))
2102 {
2103 if (GET_CODE (insn) == NOTE)
2104 {
2105 if (optimize < 2
2106 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2107 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2108 /* The code that actually moves the exit test will
2109 carefully leave BLOCK notes in their original
2110 location. That means, however, that we can't debug
2111 the exit test itself. So, we refuse to move code
2112 containing BLOCK notes at low optimization levels. */
2113 break;
2114
2115 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2116 ++eh_regions;
2117 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2118 {
2119 --eh_regions;
2120 if (eh_regions < 0)
2121 /* We've come to the end of an EH region, but
2122 never saw the beginning of that region. That
2123 means that an EH region begins before the top
2124 of the loop, and ends in the middle of it. The
2125 existence of such a situation violates a basic
2126 assumption in this code, since that would imply
2127 that even when EH_REGIONS is zero, we might
2128 move code out of an exception region. */
2129 abort ();
2130 }
2131
2132 /* We already know this INSN is a NOTE, so there's no
2133 point in looking at it to see if it's a JUMP. */
2134 continue;
2135 }
2136
2137 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2138 num_insns++;
2139
2140 if (last_test_insn && num_insns > 30)
2141 break;
2142
2143 if (eh_regions > 0)
2144 /* We don't want to move a partial EH region. Consider:
2145
2146 while ( ( { try {
2147 if (cond ()) 0;
2148 else {
2149 bar();
2150 1;
2151 }
2152 } catch (...) {
2153 1;
2154 } )) {
2155 body;
2156 }
2157
2158 This isn't legal C++, but here's what it's supposed to
2159 mean: if cond() is true, stop looping. Otherwise,
2160 call bar, and keep looping. In addition, if cond
2161 throws an exception, catch it and keep looping. Such
2162 constructs are certainy legal in LISP.
2163
2164 We should not move the `if (cond()) 0' test since then
2165 the EH-region for the try-block would be broken up.
2166 (In this case we would the EH_BEG note for the `try'
2167 and `if cond()' but not the call to bar() or the
2168 EH_END note.)
2169
2170 So we don't look for tests within an EH region. */
2171 continue;
2172
2173 if (GET_CODE (insn) == JUMP_INSN
2174 && GET_CODE (PATTERN (insn)) == SET
2175 && SET_DEST (PATTERN (insn)) == pc_rtx)
2176 {
2177 /* This is indeed a jump. */
2178 rtx dest1 = NULL_RTX;
2179 rtx dest2 = NULL_RTX;
2180 rtx potential_last_test;
2181 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2182 {
2183 /* A conditional jump. */
2184 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2185 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2186 potential_last_test = insn;
2187 }
2188 else
2189 {
2190 /* An unconditional jump. */
2191 dest1 = SET_SRC (PATTERN (insn));
2192 /* Include the BARRIER after the JUMP. */
2193 potential_last_test = NEXT_INSN (insn);
2194 }
2195
2196 do {
2197 if (dest1 && GET_CODE (dest1) == LABEL_REF
2198 && ((XEXP (dest1, 0)
2199 == loop_stack->data.loop.alt_end_label)
2200 || (XEXP (dest1, 0)
2201 == loop_stack->data.loop.end_label)))
2202 {
2203 last_test_insn = potential_last_test;
2204 break;
2205 }
2206
2207 /* If this was a conditional jump, there may be
2208 another label at which we should look. */
2209 dest1 = dest2;
2210 dest2 = NULL_RTX;
2211 } while (dest1);
2212 }
2213 }
2214
2215 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2216 {
2217 /* We found one. Move everything from there up
2218 to the end of the loop, and add a jump into the loop
2219 to jump to there. */
2220 register rtx newstart_label = gen_label_rtx ();
2221 register rtx start_move = start_label;
2222 rtx next_insn;
2223
2224 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2225 then we want to move this note also. */
2226 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2227 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2228 == NOTE_INSN_LOOP_CONT))
2229 start_move = PREV_INSN (start_move);
2230
2231 emit_label_after (newstart_label, PREV_INSN (start_move));
2232
2233 /* Actually move the insns. Start at the beginning, and
2234 keep copying insns until we've copied the
2235 last_test_insn. */
2236 for (insn = start_move; insn; insn = next_insn)
2237 {
2238 /* Figure out which insn comes after this one. We have
2239 to do this before we move INSN. */
2240 if (insn == last_test_insn)
2241 /* We've moved all the insns. */
2242 next_insn = NULL_RTX;
2243 else
2244 next_insn = NEXT_INSN (insn);
2245
2246 if (GET_CODE (insn) == NOTE
2247 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2248 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2249 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2250 NOTE_INSN_BLOCK_ENDs because the correct generation
2251 of debugging information depends on these appearing
2252 in the same order in the RTL and in the tree
2253 structure, where they are represented as BLOCKs.
2254 So, we don't move block notes. Of course, moving
2255 the code inside the block is likely to make it
2256 impossible to debug the instructions in the exit
2257 test, but such is the price of optimization. */
2258 continue;
2259
2260 /* Move the INSN. */
2261 reorder_insns (insn, insn, get_last_insn ());
2262 }
2263
2264 emit_jump_insn_after (gen_jump (start_label),
2265 PREV_INSN (newstart_label));
2266 emit_barrier_after (PREV_INSN (newstart_label));
2267 start_label = newstart_label;
2268 }
2269 }
2270
2271 emit_jump (start_label);
2272 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2273 emit_label (loop_stack->data.loop.end_label);
2274
2275 POPSTACK (loop_stack);
2276
2277 last_expr_type = 0;
2278 }
2279
2280 /* Generate a jump to the current loop's continue-point.
2281 This is usually the top of the loop, but may be specified
2282 explicitly elsewhere. If not currently inside a loop,
2283 return 0 and do nothing; caller will print an error message. */
2284
2285 int
2286 expand_continue_loop (whichloop)
2287 struct nesting *whichloop;
2288 {
2289 last_expr_type = 0;
2290 if (whichloop == 0)
2291 whichloop = loop_stack;
2292 if (whichloop == 0)
2293 return 0;
2294 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2295 NULL_RTX);
2296 return 1;
2297 }
2298
2299 /* Generate a jump to exit the current loop. If not currently inside a loop,
2300 return 0 and do nothing; caller will print an error message. */
2301
2302 int
2303 expand_exit_loop (whichloop)
2304 struct nesting *whichloop;
2305 {
2306 last_expr_type = 0;
2307 if (whichloop == 0)
2308 whichloop = loop_stack;
2309 if (whichloop == 0)
2310 return 0;
2311 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2312 return 1;
2313 }
2314
2315 /* Generate a conditional jump to exit the current loop if COND
2316 evaluates to zero. If not currently inside a loop,
2317 return 0 and do nothing; caller will print an error message. */
2318
2319 int
2320 expand_exit_loop_if_false (whichloop, cond)
2321 struct nesting *whichloop;
2322 tree cond;
2323 {
2324 rtx label = gen_label_rtx ();
2325 rtx last_insn;
2326 last_expr_type = 0;
2327
2328 if (whichloop == 0)
2329 whichloop = loop_stack;
2330 if (whichloop == 0)
2331 return 0;
2332 /* In order to handle fixups, we actually create a conditional jump
2333 around a unconditional branch to exit the loop. If fixups are
2334 necessary, they go before the unconditional branch. */
2335
2336
2337 do_jump (cond, NULL_RTX, label);
2338 last_insn = get_last_insn ();
2339 if (GET_CODE (last_insn) == CODE_LABEL)
2340 whichloop->data.loop.alt_end_label = last_insn;
2341 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2342 NULL_RTX);
2343 emit_label (label);
2344
2345 return 1;
2346 }
2347
2348 /* Return non-zero if we should preserve sub-expressions as separate
2349 pseudos. We never do so if we aren't optimizing. We always do so
2350 if -fexpensive-optimizations.
2351
2352 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2353 the loop may still be a small one. */
2354
2355 int
2356 preserve_subexpressions_p ()
2357 {
2358 rtx insn;
2359
2360 if (flag_expensive_optimizations)
2361 return 1;
2362
2363 if (optimize == 0 || loop_stack == 0)
2364 return 0;
2365
2366 insn = get_last_insn_anywhere ();
2367
2368 return (insn
2369 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2370 < n_non_fixed_regs * 3));
2371
2372 }
2373
2374 /* Generate a jump to exit the current loop, conditional, binding contour
2375 or case statement. Not all such constructs are visible to this function,
2376 only those started with EXIT_FLAG nonzero. Individual languages use
2377 the EXIT_FLAG parameter to control which kinds of constructs you can
2378 exit this way.
2379
2380 If not currently inside anything that can be exited,
2381 return 0 and do nothing; caller will print an error message. */
2382
2383 int
2384 expand_exit_something ()
2385 {
2386 struct nesting *n;
2387 last_expr_type = 0;
2388 for (n = nesting_stack; n; n = n->all)
2389 if (n->exit_label != 0)
2390 {
2391 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2392 return 1;
2393 }
2394
2395 return 0;
2396 }
2397 \f
2398 /* Generate RTL to return from the current function, with no value.
2399 (That is, we do not do anything about returning any value.) */
2400
2401 void
2402 expand_null_return ()
2403 {
2404 struct nesting *block = block_stack;
2405 rtx last_insn = 0;
2406
2407 /* Does any pending block have cleanups? */
2408
2409 while (block && block->data.block.cleanups == 0)
2410 block = block->next;
2411
2412 /* If yes, use a goto to return, since that runs cleanups. */
2413
2414 expand_null_return_1 (last_insn, block != 0);
2415 }
2416
2417 /* Generate RTL to return from the current function, with value VAL. */
2418
2419 static void
2420 expand_value_return (val)
2421 rtx val;
2422 {
2423 struct nesting *block = block_stack;
2424 rtx last_insn = get_last_insn ();
2425 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2426
2427 /* Copy the value to the return location
2428 unless it's already there. */
2429
2430 if (return_reg != val)
2431 {
2432 #ifdef PROMOTE_FUNCTION_RETURN
2433 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2434 int unsignedp = TREE_UNSIGNED (type);
2435 enum machine_mode mode
2436 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2437 &unsignedp, 1);
2438
2439 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2440 convert_move (return_reg, val, unsignedp);
2441 else
2442 #endif
2443 emit_move_insn (return_reg, val);
2444 }
2445 if (GET_CODE (return_reg) == REG
2446 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2447 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2448 /* Handle calls that return values in multiple non-contiguous locations.
2449 The Irix 6 ABI has examples of this. */
2450 else if (GET_CODE (return_reg) == PARALLEL)
2451 {
2452 int i;
2453
2454 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2455 {
2456 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2457
2458 if (GET_CODE (x) == REG
2459 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2460 emit_insn (gen_rtx_USE (VOIDmode, x));
2461 }
2462 }
2463
2464 /* Does any pending block have cleanups? */
2465
2466 while (block && block->data.block.cleanups == 0)
2467 block = block->next;
2468
2469 /* If yes, use a goto to return, since that runs cleanups.
2470 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2471
2472 expand_null_return_1 (last_insn, block != 0);
2473 }
2474
2475 /* Output a return with no value. If LAST_INSN is nonzero,
2476 pretend that the return takes place after LAST_INSN.
2477 If USE_GOTO is nonzero then don't use a return instruction;
2478 go to the return label instead. This causes any cleanups
2479 of pending blocks to be executed normally. */
2480
2481 static void
2482 expand_null_return_1 (last_insn, use_goto)
2483 rtx last_insn;
2484 int use_goto;
2485 {
2486 rtx end_label = cleanup_label ? cleanup_label : return_label;
2487
2488 clear_pending_stack_adjust ();
2489 do_pending_stack_adjust ();
2490 last_expr_type = 0;
2491
2492 /* PCC-struct return always uses an epilogue. */
2493 if (current_function_returns_pcc_struct || use_goto)
2494 {
2495 if (end_label == 0)
2496 end_label = return_label = gen_label_rtx ();
2497 expand_goto_internal (NULL_TREE, end_label, last_insn);
2498 return;
2499 }
2500
2501 /* Otherwise output a simple return-insn if one is available,
2502 unless it won't do the job. */
2503 #ifdef HAVE_return
2504 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2505 {
2506 emit_jump_insn (gen_return ());
2507 emit_barrier ();
2508 return;
2509 }
2510 #endif
2511
2512 /* Otherwise jump to the epilogue. */
2513 expand_goto_internal (NULL_TREE, end_label, last_insn);
2514 }
2515 \f
2516 /* Generate RTL to evaluate the expression RETVAL and return it
2517 from the current function. */
2518
2519 void
2520 expand_return (retval)
2521 tree retval;
2522 {
2523 /* If there are any cleanups to be performed, then they will
2524 be inserted following LAST_INSN. It is desirable
2525 that the last_insn, for such purposes, should be the
2526 last insn before computing the return value. Otherwise, cleanups
2527 which call functions can clobber the return value. */
2528 /* ??? rms: I think that is erroneous, because in C++ it would
2529 run destructors on variables that might be used in the subsequent
2530 computation of the return value. */
2531 rtx last_insn = 0;
2532 register rtx val = 0;
2533 register rtx op0;
2534 tree retval_rhs;
2535 int cleanups;
2536
2537 /* If function wants no value, give it none. */
2538 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2539 {
2540 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2541 emit_queue ();
2542 expand_null_return ();
2543 return;
2544 }
2545
2546 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2547 /* This is not sufficient. We also need to watch for cleanups of the
2548 expression we are about to expand. Unfortunately, we cannot know
2549 if it has cleanups until we expand it, and we want to change how we
2550 expand it depending upon if we need cleanups. We can't win. */
2551 #if 0
2552 cleanups = any_pending_cleanups (1);
2553 #else
2554 cleanups = 1;
2555 #endif
2556
2557 if (TREE_CODE (retval) == RESULT_DECL)
2558 retval_rhs = retval;
2559 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2560 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2561 retval_rhs = TREE_OPERAND (retval, 1);
2562 else if (TREE_TYPE (retval) == void_type_node)
2563 /* Recognize tail-recursive call to void function. */
2564 retval_rhs = retval;
2565 else
2566 retval_rhs = NULL_TREE;
2567
2568 /* Only use `last_insn' if there are cleanups which must be run. */
2569 if (cleanups || cleanup_label != 0)
2570 last_insn = get_last_insn ();
2571
2572 /* Distribute return down conditional expr if either of the sides
2573 may involve tail recursion (see test below). This enhances the number
2574 of tail recursions we see. Don't do this always since it can produce
2575 sub-optimal code in some cases and we distribute assignments into
2576 conditional expressions when it would help. */
2577
2578 if (optimize && retval_rhs != 0
2579 && frame_offset == 0
2580 && TREE_CODE (retval_rhs) == COND_EXPR
2581 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2582 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2583 {
2584 rtx label = gen_label_rtx ();
2585 tree expr;
2586
2587 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2588 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2589 DECL_RESULT (current_function_decl),
2590 TREE_OPERAND (retval_rhs, 1));
2591 TREE_SIDE_EFFECTS (expr) = 1;
2592 expand_return (expr);
2593 emit_label (label);
2594
2595 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2596 DECL_RESULT (current_function_decl),
2597 TREE_OPERAND (retval_rhs, 2));
2598 TREE_SIDE_EFFECTS (expr) = 1;
2599 expand_return (expr);
2600 return;
2601 }
2602
2603 /* Attempt to optimize the call if it is tail recursive. */
2604 optimize_tail_recursion (retval_rhs, last_insn);
2605
2606 #ifdef HAVE_return
2607 /* This optimization is safe if there are local cleanups
2608 because expand_null_return takes care of them.
2609 ??? I think it should also be safe when there is a cleanup label,
2610 because expand_null_return takes care of them, too.
2611 Any reason why not? */
2612 if (HAVE_return && cleanup_label == 0
2613 && ! current_function_returns_pcc_struct
2614 && BRANCH_COST <= 1)
2615 {
2616 /* If this is return x == y; then generate
2617 if (x == y) return 1; else return 0;
2618 if we can do it with explicit return insns and branches are cheap,
2619 but not if we have the corresponding scc insn. */
2620 int has_scc = 0;
2621 if (retval_rhs)
2622 switch (TREE_CODE (retval_rhs))
2623 {
2624 case EQ_EXPR:
2625 #ifdef HAVE_seq
2626 has_scc = HAVE_seq;
2627 #endif
2628 case NE_EXPR:
2629 #ifdef HAVE_sne
2630 has_scc = HAVE_sne;
2631 #endif
2632 case GT_EXPR:
2633 #ifdef HAVE_sgt
2634 has_scc = HAVE_sgt;
2635 #endif
2636 case GE_EXPR:
2637 #ifdef HAVE_sge
2638 has_scc = HAVE_sge;
2639 #endif
2640 case LT_EXPR:
2641 #ifdef HAVE_slt
2642 has_scc = HAVE_slt;
2643 #endif
2644 case LE_EXPR:
2645 #ifdef HAVE_sle
2646 has_scc = HAVE_sle;
2647 #endif
2648 case TRUTH_ANDIF_EXPR:
2649 case TRUTH_ORIF_EXPR:
2650 case TRUTH_AND_EXPR:
2651 case TRUTH_OR_EXPR:
2652 case TRUTH_NOT_EXPR:
2653 case TRUTH_XOR_EXPR:
2654 if (! has_scc)
2655 {
2656 op0 = gen_label_rtx ();
2657 jumpifnot (retval_rhs, op0);
2658 expand_value_return (const1_rtx);
2659 emit_label (op0);
2660 expand_value_return (const0_rtx);
2661 return;
2662 }
2663 break;
2664
2665 default:
2666 break;
2667 }
2668 }
2669 #endif /* HAVE_return */
2670
2671 /* If the result is an aggregate that is being returned in one (or more)
2672 registers, load the registers here. The compiler currently can't handle
2673 copying a BLKmode value into registers. We could put this code in a
2674 more general area (for use by everyone instead of just function
2675 call/return), but until this feature is generally usable it is kept here
2676 (and in expand_call). The value must go into a pseudo in case there
2677 are cleanups that will clobber the real return register. */
2678
2679 if (retval_rhs != 0
2680 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2681 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2682 {
2683 int i, bitpos, xbitpos;
2684 int big_endian_correction = 0;
2685 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2686 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2687 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2688 (unsigned int)BITS_PER_WORD);
2689 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2690 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2691 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2692 enum machine_mode tmpmode, result_reg_mode;
2693
2694 /* Structures whose size is not a multiple of a word are aligned
2695 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2696 machine, this means we must skip the empty high order bytes when
2697 calculating the bit offset. */
2698 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2699 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2700 * BITS_PER_UNIT));
2701
2702 /* Copy the structure BITSIZE bits at a time. */
2703 for (bitpos = 0, xbitpos = big_endian_correction;
2704 bitpos < bytes * BITS_PER_UNIT;
2705 bitpos += bitsize, xbitpos += bitsize)
2706 {
2707 /* We need a new destination pseudo each time xbitpos is
2708 on a word boundary and when xbitpos == big_endian_correction
2709 (the first time through). */
2710 if (xbitpos % BITS_PER_WORD == 0
2711 || xbitpos == big_endian_correction)
2712 {
2713 /* Generate an appropriate register. */
2714 dst = gen_reg_rtx (word_mode);
2715 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2716
2717 /* Clobber the destination before we move anything into it. */
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2719 }
2720
2721 /* We need a new source operand each time bitpos is on a word
2722 boundary. */
2723 if (bitpos % BITS_PER_WORD == 0)
2724 src = operand_subword_force (result_val,
2725 bitpos / BITS_PER_WORD,
2726 BLKmode);
2727
2728 /* Use bitpos for the source extraction (left justified) and
2729 xbitpos for the destination store (right justified). */
2730 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2731 extract_bit_field (src, bitsize,
2732 bitpos % BITS_PER_WORD, 1,
2733 NULL_RTX, word_mode,
2734 word_mode,
2735 bitsize / BITS_PER_UNIT,
2736 BITS_PER_WORD),
2737 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2738 }
2739
2740 /* Find the smallest integer mode large enough to hold the
2741 entire structure and use that mode instead of BLKmode
2742 on the USE insn for the return register. */
2743 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2744 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2745 tmpmode != MAX_MACHINE_MODE;
2746 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2747 {
2748 /* Have we found a large enough mode? */
2749 if (GET_MODE_SIZE (tmpmode) >= bytes)
2750 break;
2751 }
2752
2753 /* No suitable mode found. */
2754 if (tmpmode == MAX_MACHINE_MODE)
2755 abort ();
2756
2757 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2758
2759 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2760 result_reg_mode = word_mode;
2761 else
2762 result_reg_mode = tmpmode;
2763 result_reg = gen_reg_rtx (result_reg_mode);
2764
2765 emit_queue ();
2766 for (i = 0; i < n_regs; i++)
2767 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2768 result_pseudos[i]);
2769
2770 if (tmpmode != result_reg_mode)
2771 result_reg = gen_lowpart (tmpmode, result_reg);
2772
2773 expand_value_return (result_reg);
2774 }
2775 else if (cleanups
2776 && retval_rhs != 0
2777 && TREE_TYPE (retval_rhs) != void_type_node
2778 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2779 {
2780 /* Calculate the return value into a pseudo reg. */
2781 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2782 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2783 val = force_not_mem (val);
2784 emit_queue ();
2785 /* Return the calculated value, doing cleanups first. */
2786 expand_value_return (val);
2787 }
2788 else
2789 {
2790 /* No cleanups or no hard reg used;
2791 calculate value into hard return reg. */
2792 expand_expr (retval, const0_rtx, VOIDmode, 0);
2793 emit_queue ();
2794 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2795 }
2796 }
2797
2798 /* Return 1 if the end of the generated RTX is not a barrier.
2799 This means code already compiled can drop through. */
2800
2801 int
2802 drop_through_at_end_p ()
2803 {
2804 rtx insn = get_last_insn ();
2805 while (insn && GET_CODE (insn) == NOTE)
2806 insn = PREV_INSN (insn);
2807 return insn && GET_CODE (insn) != BARRIER;
2808 }
2809 \f
2810 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2811 and emit code to optimize the tail recursion. LAST_INSN indicates where
2812 to place the jump to the tail recursion label.
2813
2814 This is only used by expand_return, but expand_call is expected to
2815 use it soon. */
2816
2817 void
2818 optimize_tail_recursion (call_expr, last_insn)
2819 tree call_expr;
2820 rtx last_insn;
2821 {
2822 /* For tail-recursive call to current function,
2823 just jump back to the beginning.
2824 It's unsafe if any auto variable in this function
2825 has its address taken; for simplicity,
2826 require stack frame to be empty. */
2827 if (optimize && call_expr != 0
2828 && frame_offset == 0
2829 && TREE_CODE (call_expr) == CALL_EXPR
2830 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2831 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2832 /* Finish checking validity, and if valid emit code
2833 to set the argument variables for the new call. */
2834 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2835 DECL_ARGUMENTS (current_function_decl)))
2836 {
2837 if (tail_recursion_label == 0)
2838 {
2839 tail_recursion_label = gen_label_rtx ();
2840 emit_label_after (tail_recursion_label,
2841 tail_recursion_reentry);
2842 }
2843 emit_queue ();
2844 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2845 emit_barrier ();
2846 }
2847 }
2848
2849 /* Emit code to alter this function's formal parms for a tail-recursive call.
2850 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2851 FORMALS is the chain of decls of formals.
2852 Return 1 if this can be done;
2853 otherwise return 0 and do not emit any code. */
2854
2855 static int
2856 tail_recursion_args (actuals, formals)
2857 tree actuals, formals;
2858 {
2859 register tree a = actuals, f = formals;
2860 register int i;
2861 register rtx *argvec;
2862
2863 /* Check that number and types of actuals are compatible
2864 with the formals. This is not always true in valid C code.
2865 Also check that no formal needs to be addressable
2866 and that all formals are scalars. */
2867
2868 /* Also count the args. */
2869
2870 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2871 {
2872 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2873 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2874 return 0;
2875 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2876 return 0;
2877 }
2878 if (a != 0 || f != 0)
2879 return 0;
2880
2881 /* Compute all the actuals. */
2882
2883 argvec = (rtx *) alloca (i * sizeof (rtx));
2884
2885 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2886 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2887
2888 /* Find which actual values refer to current values of previous formals.
2889 Copy each of them now, before any formal is changed. */
2890
2891 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2892 {
2893 int copy = 0;
2894 register int j;
2895 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2896 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2897 { copy = 1; break; }
2898 if (copy)
2899 argvec[i] = copy_to_reg (argvec[i]);
2900 }
2901
2902 /* Store the values of the actuals into the formals. */
2903
2904 for (f = formals, a = actuals, i = 0; f;
2905 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2906 {
2907 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2908 emit_move_insn (DECL_RTL (f), argvec[i]);
2909 else
2910 convert_move (DECL_RTL (f), argvec[i],
2911 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2912 }
2913
2914 free_temp_slots ();
2915 return 1;
2916 }
2917 \f
2918 /* Generate the RTL code for entering a binding contour.
2919 The variables are declared one by one, by calls to `expand_decl'.
2920
2921 EXIT_FLAG is nonzero if this construct should be visible to
2922 `exit_something'. */
2923
2924 void
2925 expand_start_bindings (exit_flag)
2926 int exit_flag;
2927 {
2928 struct nesting *thisblock = ALLOC_NESTING ();
2929 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2930
2931 /* Make an entry on block_stack for the block we are entering. */
2932
2933 thisblock->next = block_stack;
2934 thisblock->all = nesting_stack;
2935 thisblock->depth = ++nesting_depth;
2936 thisblock->data.block.stack_level = 0;
2937 thisblock->data.block.cleanups = 0;
2938 thisblock->data.block.function_call_count = 0;
2939 thisblock->data.block.exception_region = 0;
2940 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2941
2942 thisblock->data.block.conditional_code = 0;
2943 thisblock->data.block.last_unconditional_cleanup = note;
2944 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2945
2946 if (block_stack
2947 && !(block_stack->data.block.cleanups == NULL_TREE
2948 && block_stack->data.block.outer_cleanups == NULL_TREE))
2949 thisblock->data.block.outer_cleanups
2950 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2951 block_stack->data.block.outer_cleanups);
2952 else
2953 thisblock->data.block.outer_cleanups = 0;
2954 thisblock->data.block.label_chain = 0;
2955 thisblock->data.block.innermost_stack_block = stack_block_stack;
2956 thisblock->data.block.first_insn = note;
2957 thisblock->data.block.block_start_count = ++block_start_count;
2958 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2959 block_stack = thisblock;
2960 nesting_stack = thisblock;
2961
2962 /* Make a new level for allocating stack slots. */
2963 push_temp_slots ();
2964 }
2965
2966 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2967 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2968 expand_expr are made. After we end the region, we know that all
2969 space for all temporaries that were created by TARGET_EXPRs will be
2970 destroyed and their space freed for reuse. */
2971
2972 void
2973 expand_start_target_temps ()
2974 {
2975 /* This is so that even if the result is preserved, the space
2976 allocated will be freed, as we know that it is no longer in use. */
2977 push_temp_slots ();
2978
2979 /* Start a new binding layer that will keep track of all cleanup
2980 actions to be performed. */
2981 expand_start_bindings (0);
2982
2983 target_temp_slot_level = temp_slot_level;
2984 }
2985
2986 void
2987 expand_end_target_temps ()
2988 {
2989 expand_end_bindings (NULL_TREE, 0, 0);
2990
2991 /* This is so that even if the result is preserved, the space
2992 allocated will be freed, as we know that it is no longer in use. */
2993 pop_temp_slots ();
2994 }
2995
2996 /* Mark top block of block_stack as an implicit binding for an
2997 exception region. This is used to prevent infinite recursion when
2998 ending a binding with expand_end_bindings. It is only ever called
2999 by expand_eh_region_start, as that it the only way to create a
3000 block stack for a exception region. */
3001
3002 void
3003 mark_block_as_eh_region ()
3004 {
3005 block_stack->data.block.exception_region = 1;
3006 if (block_stack->next
3007 && block_stack->next->data.block.conditional_code)
3008 {
3009 block_stack->data.block.conditional_code
3010 = block_stack->next->data.block.conditional_code;
3011 block_stack->data.block.last_unconditional_cleanup
3012 = block_stack->next->data.block.last_unconditional_cleanup;
3013 block_stack->data.block.cleanup_ptr
3014 = block_stack->next->data.block.cleanup_ptr;
3015 }
3016 }
3017
3018 /* True if we are currently emitting insns in an area of output code
3019 that is controlled by a conditional expression. This is used by
3020 the cleanup handling code to generate conditional cleanup actions. */
3021
3022 int
3023 conditional_context ()
3024 {
3025 return block_stack && block_stack->data.block.conditional_code;
3026 }
3027
3028 /* Mark top block of block_stack as not for an implicit binding for an
3029 exception region. This is only ever done by expand_eh_region_end
3030 to let expand_end_bindings know that it is being called explicitly
3031 to end the binding layer for just the binding layer associated with
3032 the exception region, otherwise expand_end_bindings would try and
3033 end all implicit binding layers for exceptions regions, and then
3034 one normal binding layer. */
3035
3036 void
3037 mark_block_as_not_eh_region ()
3038 {
3039 block_stack->data.block.exception_region = 0;
3040 }
3041
3042 /* True if the top block of block_stack was marked as for an exception
3043 region by mark_block_as_eh_region. */
3044
3045 int
3046 is_eh_region ()
3047 {
3048 return block_stack && block_stack->data.block.exception_region;
3049 }
3050
3051 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3052 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3053 BLOCK node. */
3054
3055 void
3056 remember_end_note (block)
3057 register tree block;
3058 {
3059 BLOCK_END_NOTE (block) = last_block_end_note;
3060 last_block_end_note = NULL_RTX;
3061 }
3062
3063 /* Emit a handler label for a nonlocal goto handler.
3064 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3065
3066 static void
3067 expand_nl_handler_label (slot, before_insn)
3068 rtx slot, before_insn;
3069 {
3070 rtx insns;
3071 rtx handler_label = gen_label_rtx ();
3072
3073 /* Don't let jump_optimize delete the handler. */
3074 LABEL_PRESERVE_P (handler_label) = 1;
3075
3076 start_sequence ();
3077 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3078 insns = get_insns ();
3079 end_sequence ();
3080 emit_insns_before (insns, before_insn);
3081
3082 emit_label (handler_label);
3083 }
3084
3085 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3086 handler. */
3087 static void
3088 expand_nl_goto_receiver ()
3089 {
3090 #ifdef HAVE_nonlocal_goto
3091 if (! HAVE_nonlocal_goto)
3092 #endif
3093 /* First adjust our frame pointer to its actual value. It was
3094 previously set to the start of the virtual area corresponding to
3095 the stacked variables when we branched here and now needs to be
3096 adjusted to the actual hardware fp value.
3097
3098 Assignments are to virtual registers are converted by
3099 instantiate_virtual_regs into the corresponding assignment
3100 to the underlying register (fp in this case) that makes
3101 the original assignment true.
3102 So the following insn will actually be
3103 decrementing fp by STARTING_FRAME_OFFSET. */
3104 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3105
3106 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3107 if (fixed_regs[ARG_POINTER_REGNUM])
3108 {
3109 #ifdef ELIMINABLE_REGS
3110 /* If the argument pointer can be eliminated in favor of the
3111 frame pointer, we don't need to restore it. We assume here
3112 that if such an elimination is present, it can always be used.
3113 This is the case on all known machines; if we don't make this
3114 assumption, we do unnecessary saving on many machines. */
3115 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3116 size_t i;
3117
3118 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3119 if (elim_regs[i].from == ARG_POINTER_REGNUM
3120 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3121 break;
3122
3123 if (i == sizeof elim_regs / sizeof elim_regs [0])
3124 #endif
3125 {
3126 /* Now restore our arg pointer from the address at which it
3127 was saved in our stack frame.
3128 If there hasn't be space allocated for it yet, make
3129 some now. */
3130 if (arg_pointer_save_area == 0)
3131 arg_pointer_save_area
3132 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3133 emit_move_insn (virtual_incoming_args_rtx,
3134 /* We need a pseudo here, or else
3135 instantiate_virtual_regs_1 complains. */
3136 copy_to_reg (arg_pointer_save_area));
3137 }
3138 }
3139 #endif
3140
3141 #ifdef HAVE_nonlocal_goto_receiver
3142 if (HAVE_nonlocal_goto_receiver)
3143 emit_insn (gen_nonlocal_goto_receiver ());
3144 #endif
3145 }
3146
3147 /* Make handlers for nonlocal gotos taking place in the function calls in
3148 block THISBLOCK. */
3149
3150 static void
3151 expand_nl_goto_receivers (thisblock)
3152 struct nesting *thisblock;
3153 {
3154 tree link;
3155 rtx afterward = gen_label_rtx ();
3156 rtx insns, slot;
3157 int any_invalid;
3158
3159 /* Record the handler address in the stack slot for that purpose,
3160 during this block, saving and restoring the outer value. */
3161 if (thisblock->next != 0)
3162 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3163 {
3164 rtx save_receiver = gen_reg_rtx (Pmode);
3165 emit_move_insn (XEXP (slot, 0), save_receiver);
3166
3167 start_sequence ();
3168 emit_move_insn (save_receiver, XEXP (slot, 0));
3169 insns = get_insns ();
3170 end_sequence ();
3171 emit_insns_before (insns, thisblock->data.block.first_insn);
3172 }
3173
3174 /* Jump around the handlers; they run only when specially invoked. */
3175 emit_jump (afterward);
3176
3177 /* Make a separate handler for each label. */
3178 link = nonlocal_labels;
3179 slot = nonlocal_goto_handler_slots;
3180 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3181 /* Skip any labels we shouldn't be able to jump to from here,
3182 we generate one special handler for all of them below which just calls
3183 abort. */
3184 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3185 {
3186 expand_nl_handler_label (XEXP (slot, 0),
3187 thisblock->data.block.first_insn);
3188 expand_nl_goto_receiver ();
3189
3190 /* Jump to the "real" nonlocal label. */
3191 expand_goto (TREE_VALUE (link));
3192 }
3193
3194 /* A second pass over all nonlocal labels; this time we handle those
3195 we should not be able to jump to at this point. */
3196 link = nonlocal_labels;
3197 slot = nonlocal_goto_handler_slots;
3198 any_invalid = 0;
3199 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3200 if (DECL_TOO_LATE (TREE_VALUE (link)))
3201 {
3202 expand_nl_handler_label (XEXP (slot, 0),
3203 thisblock->data.block.first_insn);
3204 any_invalid = 1;
3205 }
3206
3207 if (any_invalid)
3208 {
3209 expand_nl_goto_receiver ();
3210 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3211 VOIDmode, 0);
3212 emit_barrier ();
3213 }
3214
3215 emit_label (afterward);
3216 }
3217
3218 /* Generate RTL code to terminate a binding contour.
3219 VARS is the chain of VAR_DECL nodes
3220 for the variables bound in this contour.
3221 MARK_ENDS is nonzero if we should put a note at the beginning
3222 and end of this binding contour.
3223
3224 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3225 (That is true automatically if the contour has a saved stack level.) */
3226
3227 void
3228 expand_end_bindings (vars, mark_ends, dont_jump_in)
3229 tree vars;
3230 int mark_ends;
3231 int dont_jump_in;
3232 {
3233 register struct nesting *thisblock;
3234 register tree decl;
3235
3236 while (block_stack->data.block.exception_region)
3237 {
3238 /* Because we don't need or want a new temporary level and
3239 because we didn't create one in expand_eh_region_start,
3240 create a fake one now to avoid removing one in
3241 expand_end_bindings. */
3242 push_temp_slots ();
3243
3244 block_stack->data.block.exception_region = 0;
3245
3246 expand_end_bindings (NULL_TREE, 0, 0);
3247 }
3248
3249 /* Since expand_eh_region_start does an expand_start_bindings, we
3250 have to first end all the bindings that were created by
3251 expand_eh_region_start. */
3252
3253 thisblock = block_stack;
3254
3255 if (warn_unused)
3256 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3257 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3258 && ! DECL_IN_SYSTEM_HEADER (decl)
3259 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3260 warning_with_decl (decl, "unused variable `%s'");
3261
3262 if (thisblock->exit_label)
3263 {
3264 do_pending_stack_adjust ();
3265 emit_label (thisblock->exit_label);
3266 }
3267
3268 /* If necessary, make handlers for nonlocal gotos taking
3269 place in the function calls in this block. */
3270 if (function_call_count != thisblock->data.block.function_call_count
3271 && nonlocal_labels
3272 /* Make handler for outermost block
3273 if there were any nonlocal gotos to this function. */
3274 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3275 /* Make handler for inner block if it has something
3276 special to do when you jump out of it. */
3277 : (thisblock->data.block.cleanups != 0
3278 || thisblock->data.block.stack_level != 0)))
3279 expand_nl_goto_receivers (thisblock);
3280
3281 /* Don't allow jumping into a block that has a stack level.
3282 Cleanups are allowed, though. */
3283 if (dont_jump_in
3284 || thisblock->data.block.stack_level != 0)
3285 {
3286 struct label_chain *chain;
3287
3288 /* Any labels in this block are no longer valid to go to.
3289 Mark them to cause an error message. */
3290 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3291 {
3292 DECL_TOO_LATE (chain->label) = 1;
3293 /* If any goto without a fixup came to this label,
3294 that must be an error, because gotos without fixups
3295 come from outside all saved stack-levels. */
3296 if (TREE_ADDRESSABLE (chain->label))
3297 error_with_decl (chain->label,
3298 "label `%s' used before containing binding contour");
3299 }
3300 }
3301
3302 /* Restore stack level in effect before the block
3303 (only if variable-size objects allocated). */
3304 /* Perform any cleanups associated with the block. */
3305
3306 if (thisblock->data.block.stack_level != 0
3307 || thisblock->data.block.cleanups != 0)
3308 {
3309 /* Only clean up here if this point can actually be reached. */
3310 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3311
3312 /* Don't let cleanups affect ({...}) constructs. */
3313 int old_expr_stmts_for_value = expr_stmts_for_value;
3314 rtx old_last_expr_value = last_expr_value;
3315 tree old_last_expr_type = last_expr_type;
3316 expr_stmts_for_value = 0;
3317
3318 /* Do the cleanups. */
3319 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3320 if (reachable)
3321 do_pending_stack_adjust ();
3322
3323 expr_stmts_for_value = old_expr_stmts_for_value;
3324 last_expr_value = old_last_expr_value;
3325 last_expr_type = old_last_expr_type;
3326
3327 /* Restore the stack level. */
3328
3329 if (reachable && thisblock->data.block.stack_level != 0)
3330 {
3331 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3332 thisblock->data.block.stack_level, NULL_RTX);
3333 if (nonlocal_goto_handler_slots != 0)
3334 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3335 NULL_RTX);
3336 }
3337
3338 /* Any gotos out of this block must also do these things.
3339 Also report any gotos with fixups that came to labels in this
3340 level. */
3341 fixup_gotos (thisblock,
3342 thisblock->data.block.stack_level,
3343 thisblock->data.block.cleanups,
3344 thisblock->data.block.first_insn,
3345 dont_jump_in);
3346 }
3347
3348 /* Mark the beginning and end of the scope if requested.
3349 We do this now, after running cleanups on the variables
3350 just going out of scope, so they are in scope for their cleanups. */
3351
3352 if (mark_ends)
3353 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3354 else
3355 /* Get rid of the beginning-mark if we don't make an end-mark. */
3356 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3357
3358 /* If doing stupid register allocation, make sure lives of all
3359 register variables declared here extend thru end of scope. */
3360
3361 if (obey_regdecls)
3362 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3363 {
3364 rtx rtl = DECL_RTL (decl);
3365 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3366 use_variable (rtl);
3367 }
3368
3369 /* Restore the temporary level of TARGET_EXPRs. */
3370 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3371
3372 /* Restore block_stack level for containing block. */
3373
3374 stack_block_stack = thisblock->data.block.innermost_stack_block;
3375 POPSTACK (block_stack);
3376
3377 /* Pop the stack slot nesting and free any slots at this level. */
3378 pop_temp_slots ();
3379 }
3380 \f
3381 /* Generate RTL for the automatic variable declaration DECL.
3382 (Other kinds of declarations are simply ignored if seen here.) */
3383
3384 void
3385 expand_decl (decl)
3386 register tree decl;
3387 {
3388 struct nesting *thisblock = block_stack;
3389 tree type;
3390
3391 type = TREE_TYPE (decl);
3392
3393 /* Only automatic variables need any expansion done.
3394 Static and external variables, and external functions,
3395 will be handled by `assemble_variable' (called from finish_decl).
3396 TYPE_DECL and CONST_DECL require nothing.
3397 PARM_DECLs are handled in `assign_parms'. */
3398
3399 if (TREE_CODE (decl) != VAR_DECL)
3400 return;
3401 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3402 return;
3403
3404 /* Create the RTL representation for the variable. */
3405
3406 if (type == error_mark_node)
3407 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3408 else if (DECL_SIZE (decl) == 0)
3409 /* Variable with incomplete type. */
3410 {
3411 if (DECL_INITIAL (decl) == 0)
3412 /* Error message was already done; now avoid a crash. */
3413 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3414 else
3415 /* An initializer is going to decide the size of this array.
3416 Until we know the size, represent its address with a reg. */
3417 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3418 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3419 }
3420 else if (DECL_MODE (decl) != BLKmode
3421 /* If -ffloat-store, don't put explicit float vars
3422 into regs. */
3423 && !(flag_float_store
3424 && TREE_CODE (type) == REAL_TYPE)
3425 && ! TREE_THIS_VOLATILE (decl)
3426 && ! TREE_ADDRESSABLE (decl)
3427 && (DECL_REGISTER (decl) || ! obey_regdecls)
3428 /* if -fcheck-memory-usage, check all variables. */
3429 && ! current_function_check_memory_usage)
3430 {
3431 /* Automatic variable that can go in a register. */
3432 int unsignedp = TREE_UNSIGNED (type);
3433 enum machine_mode reg_mode
3434 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3435
3436 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3437 mark_user_reg (DECL_RTL (decl));
3438
3439 if (POINTER_TYPE_P (type))
3440 mark_reg_pointer (DECL_RTL (decl),
3441 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3442 / BITS_PER_UNIT));
3443 }
3444
3445 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3446 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3447 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3448 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3449 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3450 {
3451 /* Variable of fixed size that goes on the stack. */
3452 rtx oldaddr = 0;
3453 rtx addr;
3454
3455 /* If we previously made RTL for this decl, it must be an array
3456 whose size was determined by the initializer.
3457 The old address was a register; set that register now
3458 to the proper address. */
3459 if (DECL_RTL (decl) != 0)
3460 {
3461 if (GET_CODE (DECL_RTL (decl)) != MEM
3462 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3463 abort ();
3464 oldaddr = XEXP (DECL_RTL (decl), 0);
3465 }
3466
3467 DECL_RTL (decl)
3468 = assign_stack_temp (DECL_MODE (decl),
3469 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3470 + BITS_PER_UNIT - 1)
3471 / BITS_PER_UNIT),
3472 1);
3473 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3474 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3475
3476 /* Set alignment we actually gave this decl. */
3477 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3478 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3479
3480 if (oldaddr)
3481 {
3482 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3483 if (addr != oldaddr)
3484 emit_move_insn (oldaddr, addr);
3485 }
3486
3487 /* If this is a memory ref that contains aggregate components,
3488 mark it as such for cse and loop optimize. */
3489 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3490 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3491 #if 0
3492 /* If this is in memory because of -ffloat-store,
3493 set the volatile bit, to prevent optimizations from
3494 undoing the effects. */
3495 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3496 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3497 #endif
3498
3499 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3500 }
3501 else
3502 /* Dynamic-size object: must push space on the stack. */
3503 {
3504 rtx address, size;
3505
3506 /* Record the stack pointer on entry to block, if have
3507 not already done so. */
3508 if (thisblock->data.block.stack_level == 0)
3509 {
3510 do_pending_stack_adjust ();
3511 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3512 &thisblock->data.block.stack_level,
3513 thisblock->data.block.first_insn);
3514 stack_block_stack = thisblock;
3515 }
3516
3517 /* Compute the variable's size, in bytes. */
3518 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3519 DECL_SIZE (decl),
3520 size_int (BITS_PER_UNIT)),
3521 NULL_RTX, VOIDmode, 0);
3522 free_temp_slots ();
3523
3524 /* Allocate space on the stack for the variable. Note that
3525 DECL_ALIGN says how the variable is to be aligned and we
3526 cannot use it to conclude anything about the alignment of
3527 the size. */
3528 address = allocate_dynamic_stack_space (size, NULL_RTX,
3529 TYPE_ALIGN (TREE_TYPE (decl)));
3530
3531 /* Reference the variable indirect through that rtx. */
3532 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3533
3534 /* If this is a memory ref that contains aggregate components,
3535 mark it as such for cse and loop optimize. */
3536 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3537 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3538
3539 /* Indicate the alignment we actually gave this variable. */
3540 #ifdef STACK_BOUNDARY
3541 DECL_ALIGN (decl) = STACK_BOUNDARY;
3542 #else
3543 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3544 #endif
3545 }
3546
3547 if (TREE_THIS_VOLATILE (decl))
3548 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3549 #if 0 /* A variable is not necessarily unchanging
3550 just because it is const. RTX_UNCHANGING_P
3551 means no change in the function,
3552 not merely no change in the variable's scope.
3553 It is correct to set RTX_UNCHANGING_P if the variable's scope
3554 is the whole function. There's no convenient way to test that. */
3555 if (TREE_READONLY (decl))
3556 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3557 #endif
3558
3559 /* If doing stupid register allocation, make sure life of any
3560 register variable starts here, at the start of its scope. */
3561
3562 if (obey_regdecls)
3563 use_variable (DECL_RTL (decl));
3564 }
3565
3566
3567 \f
3568 /* Emit code to perform the initialization of a declaration DECL. */
3569
3570 void
3571 expand_decl_init (decl)
3572 tree decl;
3573 {
3574 int was_used = TREE_USED (decl);
3575
3576 /* If this is a CONST_DECL, we don't have to generate any code, but
3577 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3578 to be set while in the obstack containing the constant. If we don't
3579 do this, we can lose if we have functions nested three deep and the middle
3580 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3581 the innermost function is the first to expand that STRING_CST. */
3582 if (TREE_CODE (decl) == CONST_DECL)
3583 {
3584 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3585 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3586 EXPAND_INITIALIZER);
3587 return;
3588 }
3589
3590 if (TREE_STATIC (decl))
3591 return;
3592
3593 /* Compute and store the initial value now. */
3594
3595 if (DECL_INITIAL (decl) == error_mark_node)
3596 {
3597 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3598
3599 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3600 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3601 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3602 0, 0);
3603 emit_queue ();
3604 }
3605 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3606 {
3607 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3608 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3609 emit_queue ();
3610 }
3611
3612 /* Don't let the initialization count as "using" the variable. */
3613 TREE_USED (decl) = was_used;
3614
3615 /* Free any temporaries we made while initializing the decl. */
3616 preserve_temp_slots (NULL_RTX);
3617 free_temp_slots ();
3618 }
3619
3620 /* CLEANUP is an expression to be executed at exit from this binding contour;
3621 for example, in C++, it might call the destructor for this variable.
3622
3623 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3624 CLEANUP multiple times, and have the correct semantics. This
3625 happens in exception handling, for gotos, returns, breaks that
3626 leave the current scope.
3627
3628 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3629 that is not associated with any particular variable. */
3630
3631 int
3632 expand_decl_cleanup (decl, cleanup)
3633 tree decl, cleanup;
3634 {
3635 struct nesting *thisblock = block_stack;
3636
3637 /* Error if we are not in any block. */
3638 if (thisblock == 0)
3639 return 0;
3640
3641 /* Record the cleanup if there is one. */
3642
3643 if (cleanup != 0)
3644 {
3645 tree t;
3646 rtx seq;
3647 tree *cleanups = &thisblock->data.block.cleanups;
3648 int cond_context = conditional_context ();
3649
3650 if (cond_context)
3651 {
3652 rtx flag = gen_reg_rtx (word_mode);
3653 rtx set_flag_0;
3654 tree cond;
3655
3656 start_sequence ();
3657 emit_move_insn (flag, const0_rtx);
3658 set_flag_0 = get_insns ();
3659 end_sequence ();
3660
3661 thisblock->data.block.last_unconditional_cleanup
3662 = emit_insns_after (set_flag_0,
3663 thisblock->data.block.last_unconditional_cleanup);
3664
3665 emit_move_insn (flag, const1_rtx);
3666
3667 /* All cleanups must be on the function_obstack. */
3668 push_obstacks_nochange ();
3669 resume_temporary_allocation ();
3670
3671 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3672 DECL_RTL (cond) = flag;
3673
3674 /* Conditionalize the cleanup. */
3675 cleanup = build (COND_EXPR, void_type_node,
3676 truthvalue_conversion (cond),
3677 cleanup, integer_zero_node);
3678 cleanup = fold (cleanup);
3679
3680 pop_obstacks ();
3681
3682 cleanups = thisblock->data.block.cleanup_ptr;
3683 }
3684
3685 /* All cleanups must be on the function_obstack. */
3686 push_obstacks_nochange ();
3687 resume_temporary_allocation ();
3688 cleanup = unsave_expr (cleanup);
3689 pop_obstacks ();
3690
3691 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3692
3693 if (! cond_context)
3694 /* If this block has a cleanup, it belongs in stack_block_stack. */
3695 stack_block_stack = thisblock;
3696
3697 if (cond_context)
3698 {
3699 start_sequence ();
3700 }
3701
3702 /* If this was optimized so that there is no exception region for the
3703 cleanup, then mark the TREE_LIST node, so that we can later tell
3704 if we need to call expand_eh_region_end. */
3705 if (! using_eh_for_cleanups_p
3706 || expand_eh_region_start_tree (decl, cleanup))
3707 TREE_ADDRESSABLE (t) = 1;
3708 /* If that started a new EH region, we're in a new block. */
3709 thisblock = block_stack;
3710
3711 if (cond_context)
3712 {
3713 seq = get_insns ();
3714 end_sequence ();
3715 if (seq)
3716 thisblock->data.block.last_unconditional_cleanup
3717 = emit_insns_after (seq,
3718 thisblock->data.block.last_unconditional_cleanup);
3719 }
3720 else
3721 {
3722 thisblock->data.block.last_unconditional_cleanup
3723 = get_last_insn ();
3724 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3725 }
3726 }
3727 return 1;
3728 }
3729
3730 /* Like expand_decl_cleanup, but suppress generating an exception handler
3731 to perform the cleanup. */
3732
3733 int
3734 expand_decl_cleanup_no_eh (decl, cleanup)
3735 tree decl, cleanup;
3736 {
3737 int save_eh = using_eh_for_cleanups_p;
3738 int result;
3739
3740 using_eh_for_cleanups_p = 0;
3741 result = expand_decl_cleanup (decl, cleanup);
3742 using_eh_for_cleanups_p = save_eh;
3743
3744 return result;
3745 }
3746
3747 /* Arrange for the top element of the dynamic cleanup chain to be
3748 popped if we exit the current binding contour. DECL is the
3749 associated declaration, if any, otherwise NULL_TREE. If the
3750 current contour is left via an exception, then __sjthrow will pop
3751 the top element off the dynamic cleanup chain. The code that
3752 avoids doing the action we push into the cleanup chain in the
3753 exceptional case is contained in expand_cleanups.
3754
3755 This routine is only used by expand_eh_region_start, and that is
3756 the only way in which an exception region should be started. This
3757 routine is only used when using the setjmp/longjmp codegen method
3758 for exception handling. */
3759
3760 int
3761 expand_dcc_cleanup (decl)
3762 tree decl;
3763 {
3764 struct nesting *thisblock = block_stack;
3765 tree cleanup;
3766
3767 /* Error if we are not in any block. */
3768 if (thisblock == 0)
3769 return 0;
3770
3771 /* Record the cleanup for the dynamic handler chain. */
3772
3773 /* All cleanups must be on the function_obstack. */
3774 push_obstacks_nochange ();
3775 resume_temporary_allocation ();
3776 cleanup = make_node (POPDCC_EXPR);
3777 pop_obstacks ();
3778
3779 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3780 thisblock->data.block.cleanups
3781 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3782
3783 /* If this block has a cleanup, it belongs in stack_block_stack. */
3784 stack_block_stack = thisblock;
3785 return 1;
3786 }
3787
3788 /* Arrange for the top element of the dynamic handler chain to be
3789 popped if we exit the current binding contour. DECL is the
3790 associated declaration, if any, otherwise NULL_TREE. If the current
3791 contour is left via an exception, then __sjthrow will pop the top
3792 element off the dynamic handler chain. The code that avoids doing
3793 the action we push into the handler chain in the exceptional case
3794 is contained in expand_cleanups.
3795
3796 This routine is only used by expand_eh_region_start, and that is
3797 the only way in which an exception region should be started. This
3798 routine is only used when using the setjmp/longjmp codegen method
3799 for exception handling. */
3800
3801 int
3802 expand_dhc_cleanup (decl)
3803 tree decl;
3804 {
3805 struct nesting *thisblock = block_stack;
3806 tree cleanup;
3807
3808 /* Error if we are not in any block. */
3809 if (thisblock == 0)
3810 return 0;
3811
3812 /* Record the cleanup for the dynamic handler chain. */
3813
3814 /* All cleanups must be on the function_obstack. */
3815 push_obstacks_nochange ();
3816 resume_temporary_allocation ();
3817 cleanup = make_node (POPDHC_EXPR);
3818 pop_obstacks ();
3819
3820 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3821 thisblock->data.block.cleanups
3822 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3823
3824 /* If this block has a cleanup, it belongs in stack_block_stack. */
3825 stack_block_stack = thisblock;
3826 return 1;
3827 }
3828 \f
3829 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3830 DECL_ELTS is the list of elements that belong to DECL's type.
3831 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3832
3833 void
3834 expand_anon_union_decl (decl, cleanup, decl_elts)
3835 tree decl, cleanup, decl_elts;
3836 {
3837 struct nesting *thisblock = block_stack;
3838 rtx x;
3839
3840 expand_decl (decl);
3841 expand_decl_cleanup (decl, cleanup);
3842 x = DECL_RTL (decl);
3843
3844 while (decl_elts)
3845 {
3846 tree decl_elt = TREE_VALUE (decl_elts);
3847 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3848 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3849
3850 /* Propagate the union's alignment to the elements. */
3851 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3852
3853 /* If the element has BLKmode and the union doesn't, the union is
3854 aligned such that the element doesn't need to have BLKmode, so
3855 change the element's mode to the appropriate one for its size. */
3856 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3857 DECL_MODE (decl_elt) = mode
3858 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3859 MODE_INT, 1);
3860
3861 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3862 instead create a new MEM rtx with the proper mode. */
3863 if (GET_CODE (x) == MEM)
3864 {
3865 if (mode == GET_MODE (x))
3866 DECL_RTL (decl_elt) = x;
3867 else
3868 {
3869 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3870 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3871 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3872 }
3873 }
3874 else if (GET_CODE (x) == REG)
3875 {
3876 if (mode == GET_MODE (x))
3877 DECL_RTL (decl_elt) = x;
3878 else
3879 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3880 }
3881 else
3882 abort ();
3883
3884 /* Record the cleanup if there is one. */
3885
3886 if (cleanup != 0)
3887 thisblock->data.block.cleanups
3888 = temp_tree_cons (decl_elt, cleanup_elt,
3889 thisblock->data.block.cleanups);
3890
3891 decl_elts = TREE_CHAIN (decl_elts);
3892 }
3893 }
3894 \f
3895 /* Expand a list of cleanups LIST.
3896 Elements may be expressions or may be nested lists.
3897
3898 If DONT_DO is nonnull, then any list-element
3899 whose TREE_PURPOSE matches DONT_DO is omitted.
3900 This is sometimes used to avoid a cleanup associated with
3901 a value that is being returned out of the scope.
3902
3903 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3904 goto and handle protection regions specially in that case.
3905
3906 If REACHABLE, we emit code, otherwise just inform the exception handling
3907 code about this finalization. */
3908
3909 static void
3910 expand_cleanups (list, dont_do, in_fixup, reachable)
3911 tree list;
3912 tree dont_do;
3913 int in_fixup;
3914 int reachable;
3915 {
3916 tree tail;
3917 for (tail = list; tail; tail = TREE_CHAIN (tail))
3918 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3919 {
3920 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3921 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3922 else
3923 {
3924 if (! in_fixup)
3925 {
3926 tree cleanup = TREE_VALUE (tail);
3927
3928 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3929 if (TREE_CODE (cleanup) != POPDHC_EXPR
3930 && TREE_CODE (cleanup) != POPDCC_EXPR
3931 /* See expand_eh_region_start_tree for this case. */
3932 && ! TREE_ADDRESSABLE (tail))
3933 {
3934 cleanup = protect_with_terminate (cleanup);
3935 expand_eh_region_end (cleanup);
3936 }
3937 }
3938
3939 if (reachable)
3940 {
3941 /* Cleanups may be run multiple times. For example,
3942 when exiting a binding contour, we expand the
3943 cleanups associated with that contour. When a goto
3944 within that binding contour has a target outside that
3945 contour, it will expand all cleanups from its scope to
3946 the target. Though the cleanups are expanded multiple
3947 times, the control paths are non-overlapping so the
3948 cleanups will not be executed twice. */
3949
3950 /* We may need to protect fixups with rethrow regions. */
3951 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3952
3953 if (protect)
3954 expand_fixup_region_start ();
3955
3956 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3957 if (protect)
3958 expand_fixup_region_end (TREE_VALUE (tail));
3959 free_temp_slots ();
3960 }
3961 }
3962 }
3963 }
3964
3965 /* Mark when the context we are emitting RTL for as a conditional
3966 context, so that any cleanup actions we register with
3967 expand_decl_init will be properly conditionalized when those
3968 cleanup actions are later performed. Must be called before any
3969 expression (tree) is expanded that is within a conditional context. */
3970
3971 void
3972 start_cleanup_deferral ()
3973 {
3974 /* block_stack can be NULL if we are inside the parameter list. It is
3975 OK to do nothing, because cleanups aren't possible here. */
3976 if (block_stack)
3977 ++block_stack->data.block.conditional_code;
3978 }
3979
3980 /* Mark the end of a conditional region of code. Because cleanup
3981 deferrals may be nested, we may still be in a conditional region
3982 after we end the currently deferred cleanups, only after we end all
3983 deferred cleanups, are we back in unconditional code. */
3984
3985 void
3986 end_cleanup_deferral ()
3987 {
3988 /* block_stack can be NULL if we are inside the parameter list. It is
3989 OK to do nothing, because cleanups aren't possible here. */
3990 if (block_stack)
3991 --block_stack->data.block.conditional_code;
3992 }
3993
3994 /* Move all cleanups from the current block_stack
3995 to the containing block_stack, where they are assumed to
3996 have been created. If anything can cause a temporary to
3997 be created, but not expanded for more than one level of
3998 block_stacks, then this code will have to change. */
3999
4000 void
4001 move_cleanups_up ()
4002 {
4003 struct nesting *block = block_stack;
4004 struct nesting *outer = block->next;
4005
4006 outer->data.block.cleanups
4007 = chainon (block->data.block.cleanups,
4008 outer->data.block.cleanups);
4009 block->data.block.cleanups = 0;
4010 }
4011
4012 tree
4013 last_cleanup_this_contour ()
4014 {
4015 if (block_stack == 0)
4016 return 0;
4017
4018 return block_stack->data.block.cleanups;
4019 }
4020
4021 /* Return 1 if there are any pending cleanups at this point.
4022 If THIS_CONTOUR is nonzero, check the current contour as well.
4023 Otherwise, look only at the contours that enclose this one. */
4024
4025 int
4026 any_pending_cleanups (this_contour)
4027 int this_contour;
4028 {
4029 struct nesting *block;
4030
4031 if (block_stack == 0)
4032 return 0;
4033
4034 if (this_contour && block_stack->data.block.cleanups != NULL)
4035 return 1;
4036 if (block_stack->data.block.cleanups == 0
4037 && block_stack->data.block.outer_cleanups == 0)
4038 return 0;
4039
4040 for (block = block_stack->next; block; block = block->next)
4041 if (block->data.block.cleanups != 0)
4042 return 1;
4043
4044 return 0;
4045 }
4046 \f
4047 /* Enter a case (Pascal) or switch (C) statement.
4048 Push a block onto case_stack and nesting_stack
4049 to accumulate the case-labels that are seen
4050 and to record the labels generated for the statement.
4051
4052 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4053 Otherwise, this construct is transparent for `exit_something'.
4054
4055 EXPR is the index-expression to be dispatched on.
4056 TYPE is its nominal type. We could simply convert EXPR to this type,
4057 but instead we take short cuts. */
4058
4059 void
4060 expand_start_case (exit_flag, expr, type, printname)
4061 int exit_flag;
4062 tree expr;
4063 tree type;
4064 char *printname;
4065 {
4066 register struct nesting *thiscase = ALLOC_NESTING ();
4067
4068 /* Make an entry on case_stack for the case we are entering. */
4069
4070 thiscase->next = case_stack;
4071 thiscase->all = nesting_stack;
4072 thiscase->depth = ++nesting_depth;
4073 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4074 thiscase->data.case_stmt.case_list = 0;
4075 thiscase->data.case_stmt.index_expr = expr;
4076 thiscase->data.case_stmt.nominal_type = type;
4077 thiscase->data.case_stmt.default_label = 0;
4078 thiscase->data.case_stmt.num_ranges = 0;
4079 thiscase->data.case_stmt.printname = printname;
4080 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4081 case_stack = thiscase;
4082 nesting_stack = thiscase;
4083
4084 do_pending_stack_adjust ();
4085
4086 /* Make sure case_stmt.start points to something that won't
4087 need any transformation before expand_end_case. */
4088 if (GET_CODE (get_last_insn ()) != NOTE)
4089 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4090
4091 thiscase->data.case_stmt.start = get_last_insn ();
4092
4093 start_cleanup_deferral ();
4094 }
4095
4096
4097 /* Start a "dummy case statement" within which case labels are invalid
4098 and are not connected to any larger real case statement.
4099 This can be used if you don't want to let a case statement jump
4100 into the middle of certain kinds of constructs. */
4101
4102 void
4103 expand_start_case_dummy ()
4104 {
4105 register struct nesting *thiscase = ALLOC_NESTING ();
4106
4107 /* Make an entry on case_stack for the dummy. */
4108
4109 thiscase->next = case_stack;
4110 thiscase->all = nesting_stack;
4111 thiscase->depth = ++nesting_depth;
4112 thiscase->exit_label = 0;
4113 thiscase->data.case_stmt.case_list = 0;
4114 thiscase->data.case_stmt.start = 0;
4115 thiscase->data.case_stmt.nominal_type = 0;
4116 thiscase->data.case_stmt.default_label = 0;
4117 thiscase->data.case_stmt.num_ranges = 0;
4118 case_stack = thiscase;
4119 nesting_stack = thiscase;
4120 start_cleanup_deferral ();
4121 }
4122
4123 /* End a dummy case statement. */
4124
4125 void
4126 expand_end_case_dummy ()
4127 {
4128 end_cleanup_deferral ();
4129 POPSTACK (case_stack);
4130 }
4131
4132 /* Return the data type of the index-expression
4133 of the innermost case statement, or null if none. */
4134
4135 tree
4136 case_index_expr_type ()
4137 {
4138 if (case_stack)
4139 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4140 return 0;
4141 }
4142 \f
4143 static void
4144 check_seenlabel ()
4145 {
4146 /* If this is the first label, warn if any insns have been emitted. */
4147 if (case_stack->data.case_stmt.line_number_status >= 0)
4148 {
4149 rtx insn;
4150
4151 restore_line_number_status
4152 (case_stack->data.case_stmt.line_number_status);
4153 case_stack->data.case_stmt.line_number_status = -1;
4154
4155 for (insn = case_stack->data.case_stmt.start;
4156 insn;
4157 insn = NEXT_INSN (insn))
4158 {
4159 if (GET_CODE (insn) == CODE_LABEL)
4160 break;
4161 if (GET_CODE (insn) != NOTE
4162 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4163 {
4164 do
4165 insn = PREV_INSN (insn);
4166 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4167
4168 /* If insn is zero, then there must have been a syntax error. */
4169 if (insn)
4170 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4171 NOTE_LINE_NUMBER(insn),
4172 "unreachable code at beginning of %s",
4173 case_stack->data.case_stmt.printname);
4174 break;
4175 }
4176 }
4177 }
4178 }
4179
4180 /* Accumulate one case or default label inside a case or switch statement.
4181 VALUE is the value of the case (a null pointer, for a default label).
4182 The function CONVERTER, when applied to arguments T and V,
4183 converts the value V to the type T.
4184
4185 If not currently inside a case or switch statement, return 1 and do
4186 nothing. The caller will print a language-specific error message.
4187 If VALUE is a duplicate or overlaps, return 2 and do nothing
4188 except store the (first) duplicate node in *DUPLICATE.
4189 If VALUE is out of range, return 3 and do nothing.
4190 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4191 Return 0 on success.
4192
4193 Extended to handle range statements. */
4194
4195 int
4196 pushcase (value, converter, label, duplicate)
4197 register tree value;
4198 tree (*converter) PROTO((tree, tree));
4199 register tree label;
4200 tree *duplicate;
4201 {
4202 tree index_type;
4203 tree nominal_type;
4204
4205 /* Fail if not inside a real case statement. */
4206 if (! (case_stack && case_stack->data.case_stmt.start))
4207 return 1;
4208
4209 if (stack_block_stack
4210 && stack_block_stack->depth > case_stack->depth)
4211 return 5;
4212
4213 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4214 nominal_type = case_stack->data.case_stmt.nominal_type;
4215
4216 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4217 if (index_type == error_mark_node)
4218 return 0;
4219
4220 /* Convert VALUE to the type in which the comparisons are nominally done. */
4221 if (value != 0)
4222 value = (*converter) (nominal_type, value);
4223
4224 check_seenlabel ();
4225
4226 /* Fail if this value is out of range for the actual type of the index
4227 (which may be narrower than NOMINAL_TYPE). */
4228 if (value != 0 && ! int_fits_type_p (value, index_type))
4229 return 3;
4230
4231 /* Fail if this is a duplicate or overlaps another entry. */
4232 if (value == 0)
4233 {
4234 if (case_stack->data.case_stmt.default_label != 0)
4235 {
4236 *duplicate = case_stack->data.case_stmt.default_label;
4237 return 2;
4238 }
4239 case_stack->data.case_stmt.default_label = label;
4240 }
4241 else
4242 return add_case_node (value, value, label, duplicate);
4243
4244 expand_label (label);
4245 return 0;
4246 }
4247
4248 /* Like pushcase but this case applies to all values between VALUE1 and
4249 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4250 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4251 starts at VALUE1 and ends at the highest value of the index type.
4252 If both are NULL, this case applies to all values.
4253
4254 The return value is the same as that of pushcase but there is one
4255 additional error code: 4 means the specified range was empty. */
4256
4257 int
4258 pushcase_range (value1, value2, converter, label, duplicate)
4259 register tree value1, value2;
4260 tree (*converter) PROTO((tree, tree));
4261 register tree label;
4262 tree *duplicate;
4263 {
4264 tree index_type;
4265 tree nominal_type;
4266
4267 /* Fail if not inside a real case statement. */
4268 if (! (case_stack && case_stack->data.case_stmt.start))
4269 return 1;
4270
4271 if (stack_block_stack
4272 && stack_block_stack->depth > case_stack->depth)
4273 return 5;
4274
4275 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4276 nominal_type = case_stack->data.case_stmt.nominal_type;
4277
4278 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4279 if (index_type == error_mark_node)
4280 return 0;
4281
4282 check_seenlabel ();
4283
4284 /* Convert VALUEs to type in which the comparisons are nominally done
4285 and replace any unspecified value with the corresponding bound. */
4286 if (value1 == 0)
4287 value1 = TYPE_MIN_VALUE (index_type);
4288 if (value2 == 0)
4289 value2 = TYPE_MAX_VALUE (index_type);
4290
4291 /* Fail if the range is empty. Do this before any conversion since
4292 we want to allow out-of-range empty ranges. */
4293 if (value2 && tree_int_cst_lt (value2, value1))
4294 return 4;
4295
4296 value1 = (*converter) (nominal_type, value1);
4297
4298 /* If the max was unbounded, use the max of the nominal_type we are
4299 converting to. Do this after the < check above to suppress false
4300 positives. */
4301 if (!value2)
4302 value2 = TYPE_MAX_VALUE (nominal_type);
4303 value2 = (*converter) (nominal_type, value2);
4304
4305 /* Fail if these values are out of range. */
4306 if (TREE_CONSTANT_OVERFLOW (value1)
4307 || ! int_fits_type_p (value1, index_type))
4308 return 3;
4309
4310 if (TREE_CONSTANT_OVERFLOW (value2)
4311 || ! int_fits_type_p (value2, index_type))
4312 return 3;
4313
4314 return add_case_node (value1, value2, label, duplicate);
4315 }
4316
4317 /* Do the actual insertion of a case label for pushcase and pushcase_range
4318 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4319 slowdown for large switch statements. */
4320
4321 static int
4322 add_case_node (low, high, label, duplicate)
4323 tree low, high;
4324 tree label;
4325 tree *duplicate;
4326 {
4327 struct case_node *p, **q, *r;
4328
4329 q = &case_stack->data.case_stmt.case_list;
4330 p = *q;
4331
4332 while ((r = *q))
4333 {
4334 p = r;
4335
4336 /* Keep going past elements distinctly greater than HIGH. */
4337 if (tree_int_cst_lt (high, p->low))
4338 q = &p->left;
4339
4340 /* or distinctly less than LOW. */
4341 else if (tree_int_cst_lt (p->high, low))
4342 q = &p->right;
4343
4344 else
4345 {
4346 /* We have an overlap; this is an error. */
4347 *duplicate = p->code_label;
4348 return 2;
4349 }
4350 }
4351
4352 /* Add this label to the chain, and succeed.
4353 Copy LOW, HIGH so they are on temporary rather than momentary
4354 obstack and will thus survive till the end of the case statement. */
4355
4356 r = (struct case_node *) oballoc (sizeof (struct case_node));
4357 r->low = copy_node (low);
4358
4359 /* If the bounds are equal, turn this into the one-value case. */
4360
4361 if (tree_int_cst_equal (low, high))
4362 r->high = r->low;
4363 else
4364 {
4365 r->high = copy_node (high);
4366 case_stack->data.case_stmt.num_ranges++;
4367 }
4368
4369 r->code_label = label;
4370 expand_label (label);
4371
4372 *q = r;
4373 r->parent = p;
4374 r->left = 0;
4375 r->right = 0;
4376 r->balance = 0;
4377
4378 while (p)
4379 {
4380 struct case_node *s;
4381
4382 if (r == p->left)
4383 {
4384 int b;
4385
4386 if (! (b = p->balance))
4387 /* Growth propagation from left side. */
4388 p->balance = -1;
4389 else if (b < 0)
4390 {
4391 if (r->balance < 0)
4392 {
4393 /* R-Rotation */
4394 if ((p->left = s = r->right))
4395 s->parent = p;
4396
4397 r->right = p;
4398 p->balance = 0;
4399 r->balance = 0;
4400 s = p->parent;
4401 p->parent = r;
4402
4403 if ((r->parent = s))
4404 {
4405 if (s->left == p)
4406 s->left = r;
4407 else
4408 s->right = r;
4409 }
4410 else
4411 case_stack->data.case_stmt.case_list = r;
4412 }
4413 else
4414 /* r->balance == +1 */
4415 {
4416 /* LR-Rotation */
4417
4418 int b2;
4419 struct case_node *t = r->right;
4420
4421 if ((p->left = s = t->right))
4422 s->parent = p;
4423
4424 t->right = p;
4425 if ((r->right = s = t->left))
4426 s->parent = r;
4427
4428 t->left = r;
4429 b = t->balance;
4430 b2 = b < 0;
4431 p->balance = b2;
4432 b2 = -b2 - b;
4433 r->balance = b2;
4434 t->balance = 0;
4435 s = p->parent;
4436 p->parent = t;
4437 r->parent = t;
4438
4439 if ((t->parent = s))
4440 {
4441 if (s->left == p)
4442 s->left = t;
4443 else
4444 s->right = t;
4445 }
4446 else
4447 case_stack->data.case_stmt.case_list = t;
4448 }
4449 break;
4450 }
4451
4452 else
4453 {
4454 /* p->balance == +1; growth of left side balances the node. */
4455 p->balance = 0;
4456 break;
4457 }
4458 }
4459 else
4460 /* r == p->right */
4461 {
4462 int b;
4463
4464 if (! (b = p->balance))
4465 /* Growth propagation from right side. */
4466 p->balance++;
4467 else if (b > 0)
4468 {
4469 if (r->balance > 0)
4470 {
4471 /* L-Rotation */
4472
4473 if ((p->right = s = r->left))
4474 s->parent = p;
4475
4476 r->left = p;
4477 p->balance = 0;
4478 r->balance = 0;
4479 s = p->parent;
4480 p->parent = r;
4481 if ((r->parent = s))
4482 {
4483 if (s->left == p)
4484 s->left = r;
4485 else
4486 s->right = r;
4487 }
4488
4489 else
4490 case_stack->data.case_stmt.case_list = r;
4491 }
4492
4493 else
4494 /* r->balance == -1 */
4495 {
4496 /* RL-Rotation */
4497 int b2;
4498 struct case_node *t = r->left;
4499
4500 if ((p->right = s = t->left))
4501 s->parent = p;
4502
4503 t->left = p;
4504
4505 if ((r->left = s = t->right))
4506 s->parent = r;
4507
4508 t->right = r;
4509 b = t->balance;
4510 b2 = b < 0;
4511 r->balance = b2;
4512 b2 = -b2 - b;
4513 p->balance = b2;
4514 t->balance = 0;
4515 s = p->parent;
4516 p->parent = t;
4517 r->parent = t;
4518
4519 if ((t->parent = s))
4520 {
4521 if (s->left == p)
4522 s->left = t;
4523 else
4524 s->right = t;
4525 }
4526
4527 else
4528 case_stack->data.case_stmt.case_list = t;
4529 }
4530 break;
4531 }
4532 else
4533 {
4534 /* p->balance == -1; growth of right side balances the node. */
4535 p->balance = 0;
4536 break;
4537 }
4538 }
4539
4540 r = p;
4541 p = p->parent;
4542 }
4543
4544 return 0;
4545 }
4546
4547 \f
4548 /* Returns the number of possible values of TYPE.
4549 Returns -1 if the number is unknown or variable.
4550 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4551 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4552 do not increase monotonically (there may be duplicates);
4553 to 1 if the values increase monotonically, but not always by 1;
4554 otherwise sets it to 0. */
4555
4556 HOST_WIDE_INT
4557 all_cases_count (type, spareness)
4558 tree type;
4559 int *spareness;
4560 {
4561 HOST_WIDE_INT count;
4562 *spareness = 0;
4563
4564 switch (TREE_CODE (type))
4565 {
4566 tree t;
4567 case BOOLEAN_TYPE:
4568 count = 2;
4569 break;
4570 case CHAR_TYPE:
4571 count = 1 << BITS_PER_UNIT;
4572 break;
4573 default:
4574 case INTEGER_TYPE:
4575 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4576 || TYPE_MAX_VALUE (type) == NULL
4577 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4578 return -1;
4579 else
4580 {
4581 /* count
4582 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4583 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4584 but with overflow checking. */
4585 tree mint = TYPE_MIN_VALUE (type);
4586 tree maxt = TYPE_MAX_VALUE (type);
4587 HOST_WIDE_INT lo, hi;
4588 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4589 &lo, &hi);
4590 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4591 lo, hi, &lo, &hi);
4592 add_double (lo, hi, 1, 0, &lo, &hi);
4593 if (hi != 0 || lo < 0)
4594 return -2;
4595 count = lo;
4596 }
4597 break;
4598 case ENUMERAL_TYPE:
4599 count = 0;
4600 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4601 {
4602 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4603 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4604 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4605 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4606 *spareness = 1;
4607 count++;
4608 }
4609 if (*spareness == 1)
4610 {
4611 tree prev = TREE_VALUE (TYPE_VALUES (type));
4612 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4613 {
4614 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4615 {
4616 *spareness = 2;
4617 break;
4618 }
4619 prev = TREE_VALUE (t);
4620 }
4621
4622 }
4623 }
4624 return count;
4625 }
4626
4627
4628 #define BITARRAY_TEST(ARRAY, INDEX) \
4629 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4630 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4631 #define BITARRAY_SET(ARRAY, INDEX) \
4632 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4633 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4634
4635 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4636 with the case values we have seen, assuming the case expression
4637 has the given TYPE.
4638 SPARSENESS is as determined by all_cases_count.
4639
4640 The time needed is proportional to COUNT, unless
4641 SPARSENESS is 2, in which case quadratic time is needed. */
4642
4643 void
4644 mark_seen_cases (type, cases_seen, count, sparseness)
4645 tree type;
4646 unsigned char *cases_seen;
4647 long count;
4648 int sparseness;
4649 {
4650 tree next_node_to_try = NULL_TREE;
4651 long next_node_offset = 0;
4652
4653 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4654 tree val = make_node (INTEGER_CST);
4655 TREE_TYPE (val) = type;
4656 if (! root)
4657 ; /* Do nothing */
4658 else if (sparseness == 2)
4659 {
4660 tree t;
4661 HOST_WIDE_INT xlo;
4662
4663 /* This less efficient loop is only needed to handle
4664 duplicate case values (multiple enum constants
4665 with the same value). */
4666 TREE_TYPE (val) = TREE_TYPE (root->low);
4667 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4668 t = TREE_CHAIN (t), xlo++)
4669 {
4670 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4671 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4672 n = root;
4673 do
4674 {
4675 /* Keep going past elements distinctly greater than VAL. */
4676 if (tree_int_cst_lt (val, n->low))
4677 n = n->left;
4678
4679 /* or distinctly less than VAL. */
4680 else if (tree_int_cst_lt (n->high, val))
4681 n = n->right;
4682
4683 else
4684 {
4685 /* We have found a matching range. */
4686 BITARRAY_SET (cases_seen, xlo);
4687 break;
4688 }
4689 }
4690 while (n);
4691 }
4692 }
4693 else
4694 {
4695 if (root->left)
4696 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4697 for (n = root; n; n = n->right)
4698 {
4699 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4700 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4701 while ( ! tree_int_cst_lt (n->high, val))
4702 {
4703 /* Calculate (into xlo) the "offset" of the integer (val).
4704 The element with lowest value has offset 0, the next smallest
4705 element has offset 1, etc. */
4706
4707 HOST_WIDE_INT xlo, xhi;
4708 tree t;
4709 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4710 {
4711 /* The TYPE_VALUES will be in increasing order, so
4712 starting searching where we last ended. */
4713 t = next_node_to_try;
4714 xlo = next_node_offset;
4715 xhi = 0;
4716 for (;;)
4717 {
4718 if (t == NULL_TREE)
4719 {
4720 t = TYPE_VALUES (type);
4721 xlo = 0;
4722 }
4723 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4724 {
4725 next_node_to_try = TREE_CHAIN (t);
4726 next_node_offset = xlo + 1;
4727 break;
4728 }
4729 xlo++;
4730 t = TREE_CHAIN (t);
4731 if (t == next_node_to_try)
4732 {
4733 xlo = -1;
4734 break;
4735 }
4736 }
4737 }
4738 else
4739 {
4740 t = TYPE_MIN_VALUE (type);
4741 if (t)
4742 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4743 &xlo, &xhi);
4744 else
4745 xlo = xhi = 0;
4746 add_double (xlo, xhi,
4747 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4748 &xlo, &xhi);
4749 }
4750
4751 if (xhi == 0 && xlo >= 0 && xlo < count)
4752 BITARRAY_SET (cases_seen, xlo);
4753 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4754 1, 0,
4755 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4756 }
4757 }
4758 }
4759 }
4760
4761 /* Called when the index of a switch statement is an enumerated type
4762 and there is no default label.
4763
4764 Checks that all enumeration literals are covered by the case
4765 expressions of a switch. Also, warn if there are any extra
4766 switch cases that are *not* elements of the enumerated type.
4767
4768 If all enumeration literals were covered by the case expressions,
4769 turn one of the expressions into the default expression since it should
4770 not be possible to fall through such a switch. */
4771
4772 void
4773 check_for_full_enumeration_handling (type)
4774 tree type;
4775 {
4776 register struct case_node *n;
4777 register tree chain;
4778 #if 0 /* variable used by 'if 0'ed code below. */
4779 register struct case_node **l;
4780 int all_values = 1;
4781 #endif
4782
4783 /* True iff the selector type is a numbered set mode. */
4784 int sparseness = 0;
4785
4786 /* The number of possible selector values. */
4787 HOST_WIDE_INT size;
4788
4789 /* For each possible selector value. a one iff it has been matched
4790 by a case value alternative. */
4791 unsigned char *cases_seen;
4792
4793 /* The allocated size of cases_seen, in chars. */
4794 long bytes_needed;
4795
4796 if (! warn_switch)
4797 return;
4798
4799 size = all_cases_count (type, &sparseness);
4800 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4801
4802 if (size > 0 && size < 600000
4803 /* We deliberately use malloc here - not xmalloc. */
4804 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4805 {
4806 long i;
4807 tree v = TYPE_VALUES (type);
4808 bzero (cases_seen, bytes_needed);
4809
4810 /* The time complexity of this code is normally O(N), where
4811 N being the number of members in the enumerated type.
4812 However, if type is a ENUMERAL_TYPE whose values do not
4813 increase monotonically, O(N*log(N)) time may be needed. */
4814
4815 mark_seen_cases (type, cases_seen, size, sparseness);
4816
4817 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4818 {
4819 if (BITARRAY_TEST(cases_seen, i) == 0)
4820 warning ("enumeration value `%s' not handled in switch",
4821 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4822 }
4823
4824 free (cases_seen);
4825 }
4826
4827 /* Now we go the other way around; we warn if there are case
4828 expressions that don't correspond to enumerators. This can
4829 occur since C and C++ don't enforce type-checking of
4830 assignments to enumeration variables. */
4831
4832 if (case_stack->data.case_stmt.case_list
4833 && case_stack->data.case_stmt.case_list->left)
4834 case_stack->data.case_stmt.case_list
4835 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4836 if (warn_switch)
4837 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4838 {
4839 for (chain = TYPE_VALUES (type);
4840 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4841 chain = TREE_CHAIN (chain))
4842 ;
4843
4844 if (!chain)
4845 {
4846 if (TYPE_NAME (type) == 0)
4847 warning ("case value `%ld' not in enumerated type",
4848 (long) TREE_INT_CST_LOW (n->low));
4849 else
4850 warning ("case value `%ld' not in enumerated type `%s'",
4851 (long) TREE_INT_CST_LOW (n->low),
4852 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4853 == IDENTIFIER_NODE)
4854 ? TYPE_NAME (type)
4855 : DECL_NAME (TYPE_NAME (type))));
4856 }
4857 if (!tree_int_cst_equal (n->low, n->high))
4858 {
4859 for (chain = TYPE_VALUES (type);
4860 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4861 chain = TREE_CHAIN (chain))
4862 ;
4863
4864 if (!chain)
4865 {
4866 if (TYPE_NAME (type) == 0)
4867 warning ("case value `%ld' not in enumerated type",
4868 (long) TREE_INT_CST_LOW (n->high));
4869 else
4870 warning ("case value `%ld' not in enumerated type `%s'",
4871 (long) TREE_INT_CST_LOW (n->high),
4872 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4873 == IDENTIFIER_NODE)
4874 ? TYPE_NAME (type)
4875 : DECL_NAME (TYPE_NAME (type))));
4876 }
4877 }
4878 }
4879
4880 #if 0
4881 /* ??? This optimization is disabled because it causes valid programs to
4882 fail. ANSI C does not guarantee that an expression with enum type
4883 will have a value that is the same as one of the enumeration literals. */
4884
4885 /* If all values were found as case labels, make one of them the default
4886 label. Thus, this switch will never fall through. We arbitrarily pick
4887 the last one to make the default since this is likely the most
4888 efficient choice. */
4889
4890 if (all_values)
4891 {
4892 for (l = &case_stack->data.case_stmt.case_list;
4893 (*l)->right != 0;
4894 l = &(*l)->right)
4895 ;
4896
4897 case_stack->data.case_stmt.default_label = (*l)->code_label;
4898 *l = 0;
4899 }
4900 #endif /* 0 */
4901 }
4902
4903 \f
4904 /* Terminate a case (Pascal) or switch (C) statement
4905 in which ORIG_INDEX is the expression to be tested.
4906 Generate the code to test it and jump to the right place. */
4907
4908 void
4909 expand_end_case (orig_index)
4910 tree orig_index;
4911 {
4912 tree minval, maxval, range, orig_minval;
4913 rtx default_label = 0;
4914 register struct case_node *n;
4915 unsigned int count;
4916 rtx index;
4917 rtx table_label;
4918 int ncases;
4919 rtx *labelvec;
4920 register int i;
4921 rtx before_case;
4922 register struct nesting *thiscase = case_stack;
4923 tree index_expr, index_type;
4924 int unsignedp;
4925
4926 table_label = gen_label_rtx ();
4927 index_expr = thiscase->data.case_stmt.index_expr;
4928 index_type = TREE_TYPE (index_expr);
4929 unsignedp = TREE_UNSIGNED (index_type);
4930
4931 do_pending_stack_adjust ();
4932
4933 /* This might get an spurious warning in the presence of a syntax error;
4934 it could be fixed by moving the call to check_seenlabel after the
4935 check for error_mark_node, and copying the code of check_seenlabel that
4936 deals with case_stack->data.case_stmt.line_number_status /
4937 restore_line_number_status in front of the call to end_cleanup_deferral;
4938 However, this might miss some useful warnings in the presence of
4939 non-syntax errors. */
4940 check_seenlabel ();
4941
4942 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4943 if (index_type != error_mark_node)
4944 {
4945 /* If switch expression was an enumerated type, check that all
4946 enumeration literals are covered by the cases.
4947 No sense trying this if there's a default case, however. */
4948
4949 if (!thiscase->data.case_stmt.default_label
4950 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4951 && TREE_CODE (index_expr) != INTEGER_CST)
4952 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4953
4954 /* If we don't have a default-label, create one here,
4955 after the body of the switch. */
4956 if (thiscase->data.case_stmt.default_label == 0)
4957 {
4958 thiscase->data.case_stmt.default_label
4959 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4960 expand_label (thiscase->data.case_stmt.default_label);
4961 }
4962 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4963
4964 before_case = get_last_insn ();
4965
4966 if (thiscase->data.case_stmt.case_list
4967 && thiscase->data.case_stmt.case_list->left)
4968 thiscase->data.case_stmt.case_list
4969 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4970
4971 /* Simplify the case-list before we count it. */
4972 group_case_nodes (thiscase->data.case_stmt.case_list);
4973
4974 /* Get upper and lower bounds of case values.
4975 Also convert all the case values to the index expr's data type. */
4976
4977 count = 0;
4978 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4979 {
4980 /* Check low and high label values are integers. */
4981 if (TREE_CODE (n->low) != INTEGER_CST)
4982 abort ();
4983 if (TREE_CODE (n->high) != INTEGER_CST)
4984 abort ();
4985
4986 n->low = convert (index_type, n->low);
4987 n->high = convert (index_type, n->high);
4988
4989 /* Count the elements and track the largest and smallest
4990 of them (treating them as signed even if they are not). */
4991 if (count++ == 0)
4992 {
4993 minval = n->low;
4994 maxval = n->high;
4995 }
4996 else
4997 {
4998 if (INT_CST_LT (n->low, minval))
4999 minval = n->low;
5000 if (INT_CST_LT (maxval, n->high))
5001 maxval = n->high;
5002 }
5003 /* A range counts double, since it requires two compares. */
5004 if (! tree_int_cst_equal (n->low, n->high))
5005 count++;
5006 }
5007
5008 orig_minval = minval;
5009
5010 /* Compute span of values. */
5011 if (count != 0)
5012 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5013
5014 end_cleanup_deferral ();
5015
5016 if (count == 0)
5017 {
5018 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5019 emit_queue ();
5020 emit_jump (default_label);
5021 }
5022
5023 /* If range of values is much bigger than number of values,
5024 make a sequence of conditional branches instead of a dispatch.
5025 If the switch-index is a constant, do it this way
5026 because we can optimize it. */
5027
5028 #ifndef CASE_VALUES_THRESHOLD
5029 #ifdef HAVE_casesi
5030 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5031 #else
5032 /* If machine does not have a case insn that compares the
5033 bounds, this means extra overhead for dispatch tables
5034 which raises the threshold for using them. */
5035 #define CASE_VALUES_THRESHOLD 5
5036 #endif /* HAVE_casesi */
5037 #endif /* CASE_VALUES_THRESHOLD */
5038
5039 else if (TREE_INT_CST_HIGH (range) != 0
5040 || count < (unsigned int) CASE_VALUES_THRESHOLD
5041 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5042 > 10 * count)
5043 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5044 || flag_pic
5045 #endif
5046 || TREE_CODE (index_expr) == INTEGER_CST
5047 /* These will reduce to a constant. */
5048 || (TREE_CODE (index_expr) == CALL_EXPR
5049 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5050 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5051 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5052 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5053 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5054 {
5055 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5056
5057 /* If the index is a short or char that we do not have
5058 an insn to handle comparisons directly, convert it to
5059 a full integer now, rather than letting each comparison
5060 generate the conversion. */
5061
5062 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5063 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5064 == CODE_FOR_nothing))
5065 {
5066 enum machine_mode wider_mode;
5067 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5068 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5069 if (cmp_optab->handlers[(int) wider_mode].insn_code
5070 != CODE_FOR_nothing)
5071 {
5072 index = convert_to_mode (wider_mode, index, unsignedp);
5073 break;
5074 }
5075 }
5076
5077 emit_queue ();
5078 do_pending_stack_adjust ();
5079
5080 index = protect_from_queue (index, 0);
5081 if (GET_CODE (index) == MEM)
5082 index = copy_to_reg (index);
5083 if (GET_CODE (index) == CONST_INT
5084 || TREE_CODE (index_expr) == INTEGER_CST)
5085 {
5086 /* Make a tree node with the proper constant value
5087 if we don't already have one. */
5088 if (TREE_CODE (index_expr) != INTEGER_CST)
5089 {
5090 index_expr
5091 = build_int_2 (INTVAL (index),
5092 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5093 index_expr = convert (index_type, index_expr);
5094 }
5095
5096 /* For constant index expressions we need only
5097 issue a unconditional branch to the appropriate
5098 target code. The job of removing any unreachable
5099 code is left to the optimisation phase if the
5100 "-O" option is specified. */
5101 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5102 if (! tree_int_cst_lt (index_expr, n->low)
5103 && ! tree_int_cst_lt (n->high, index_expr))
5104 break;
5105
5106 if (n)
5107 emit_jump (label_rtx (n->code_label));
5108 else
5109 emit_jump (default_label);
5110 }
5111 else
5112 {
5113 /* If the index expression is not constant we generate
5114 a binary decision tree to select the appropriate
5115 target code. This is done as follows:
5116
5117 The list of cases is rearranged into a binary tree,
5118 nearly optimal assuming equal probability for each case.
5119
5120 The tree is transformed into RTL, eliminating
5121 redundant test conditions at the same time.
5122
5123 If program flow could reach the end of the
5124 decision tree an unconditional jump to the
5125 default code is emitted. */
5126
5127 use_cost_table
5128 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5129 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5130 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5131 NULL_PTR);
5132 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5133 default_label, index_type);
5134 emit_jump_if_reachable (default_label);
5135 }
5136 }
5137 else
5138 {
5139 int win = 0;
5140 #ifdef HAVE_casesi
5141 if (HAVE_casesi)
5142 {
5143 enum machine_mode index_mode = SImode;
5144 int index_bits = GET_MODE_BITSIZE (index_mode);
5145 rtx op1, op2;
5146 enum machine_mode op_mode;
5147
5148 /* Convert the index to SImode. */
5149 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5150 > GET_MODE_BITSIZE (index_mode))
5151 {
5152 enum machine_mode omode = TYPE_MODE (index_type);
5153 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5154
5155 /* We must handle the endpoints in the original mode. */
5156 index_expr = build (MINUS_EXPR, index_type,
5157 index_expr, minval);
5158 minval = integer_zero_node;
5159 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5160 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5161 emit_jump_insn (gen_bltu (default_label));
5162 /* Now we can safely truncate. */
5163 index = convert_to_mode (index_mode, index, 0);
5164 }
5165 else
5166 {
5167 if (TYPE_MODE (index_type) != index_mode)
5168 {
5169 index_expr = convert (type_for_size (index_bits, 0),
5170 index_expr);
5171 index_type = TREE_TYPE (index_expr);
5172 }
5173
5174 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5175 }
5176 emit_queue ();
5177 index = protect_from_queue (index, 0);
5178 do_pending_stack_adjust ();
5179
5180 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5181 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5182 (index, op_mode))
5183 index = copy_to_mode_reg (op_mode, index);
5184
5185 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5186
5187 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5188 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5189 (op1, op_mode))
5190 op1 = copy_to_mode_reg (op_mode, op1);
5191
5192 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5193
5194 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5195 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5196 (op2, op_mode))
5197 op2 = copy_to_mode_reg (op_mode, op2);
5198
5199 emit_jump_insn (gen_casesi (index, op1, op2,
5200 table_label, default_label));
5201 win = 1;
5202 }
5203 #endif
5204 #ifdef HAVE_tablejump
5205 if (! win && HAVE_tablejump)
5206 {
5207 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5208 fold (build (MINUS_EXPR, index_type,
5209 index_expr, minval)));
5210 index_type = TREE_TYPE (index_expr);
5211 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5212 emit_queue ();
5213 index = protect_from_queue (index, 0);
5214 do_pending_stack_adjust ();
5215
5216 do_tablejump (index, TYPE_MODE (index_type),
5217 expand_expr (range, NULL_RTX, VOIDmode, 0),
5218 table_label, default_label);
5219 win = 1;
5220 }
5221 #endif
5222 if (! win)
5223 abort ();
5224
5225 /* Get table of labels to jump to, in order of case index. */
5226
5227 ncases = TREE_INT_CST_LOW (range) + 1;
5228 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5229 bzero ((char *) labelvec, ncases * sizeof (rtx));
5230
5231 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5232 {
5233 register HOST_WIDE_INT i
5234 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5235
5236 while (1)
5237 {
5238 labelvec[i]
5239 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5240 if (i + TREE_INT_CST_LOW (orig_minval)
5241 == TREE_INT_CST_LOW (n->high))
5242 break;
5243 i++;
5244 }
5245 }
5246
5247 /* Fill in the gaps with the default. */
5248 for (i = 0; i < ncases; i++)
5249 if (labelvec[i] == 0)
5250 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5251
5252 /* Output the table */
5253 emit_label (table_label);
5254
5255 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5256 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5257 gen_rtx_LABEL_REF (Pmode, table_label),
5258 gen_rtvec_v (ncases, labelvec),
5259 const0_rtx, const0_rtx, 0));
5260 else
5261 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5262 gen_rtvec_v (ncases, labelvec)));
5263
5264 /* If the case insn drops through the table,
5265 after the table we must jump to the default-label.
5266 Otherwise record no drop-through after the table. */
5267 #ifdef CASE_DROPS_THROUGH
5268 emit_jump (default_label);
5269 #else
5270 emit_barrier ();
5271 #endif
5272 }
5273
5274 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5275 reorder_insns (before_case, get_last_insn (),
5276 thiscase->data.case_stmt.start);
5277 }
5278 else
5279 end_cleanup_deferral ();
5280
5281 if (thiscase->exit_label)
5282 emit_label (thiscase->exit_label);
5283
5284 POPSTACK (case_stack);
5285
5286 free_temp_slots ();
5287 }
5288
5289 /* Convert the tree NODE into a list linked by the right field, with the left
5290 field zeroed. RIGHT is used for recursion; it is a list to be placed
5291 rightmost in the resulting list. */
5292
5293 static struct case_node *
5294 case_tree2list (node, right)
5295 struct case_node *node, *right;
5296 {
5297 struct case_node *left;
5298
5299 if (node->right)
5300 right = case_tree2list (node->right, right);
5301
5302 node->right = right;
5303 if ((left = node->left))
5304 {
5305 node->left = 0;
5306 return case_tree2list (left, node);
5307 }
5308
5309 return node;
5310 }
5311
5312 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5313
5314 static void
5315 do_jump_if_equal (op1, op2, label, unsignedp)
5316 rtx op1, op2, label;
5317 int unsignedp;
5318 {
5319 if (GET_CODE (op1) == CONST_INT
5320 && GET_CODE (op2) == CONST_INT)
5321 {
5322 if (INTVAL (op1) == INTVAL (op2))
5323 emit_jump (label);
5324 }
5325 else
5326 {
5327 enum machine_mode mode = GET_MODE (op1);
5328 if (mode == VOIDmode)
5329 mode = GET_MODE (op2);
5330 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5331 emit_jump_insn (gen_beq (label));
5332 }
5333 }
5334 \f
5335 /* Not all case values are encountered equally. This function
5336 uses a heuristic to weight case labels, in cases where that
5337 looks like a reasonable thing to do.
5338
5339 Right now, all we try to guess is text, and we establish the
5340 following weights:
5341
5342 chars above space: 16
5343 digits: 16
5344 default: 12
5345 space, punct: 8
5346 tab: 4
5347 newline: 2
5348 other "\" chars: 1
5349 remaining chars: 0
5350
5351 If we find any cases in the switch that are not either -1 or in the range
5352 of valid ASCII characters, or are control characters other than those
5353 commonly used with "\", don't treat this switch scanning text.
5354
5355 Return 1 if these nodes are suitable for cost estimation, otherwise
5356 return 0. */
5357
5358 static int
5359 estimate_case_costs (node)
5360 case_node_ptr node;
5361 {
5362 tree min_ascii = build_int_2 (-1, -1);
5363 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5364 case_node_ptr n;
5365 int i;
5366
5367 /* If we haven't already made the cost table, make it now. Note that the
5368 lower bound of the table is -1, not zero. */
5369
5370 if (cost_table == NULL)
5371 {
5372 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5373 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5374
5375 for (i = 0; i < 128; i++)
5376 {
5377 if (ISALNUM (i))
5378 cost_table[i] = 16;
5379 else if (ISPUNCT (i))
5380 cost_table[i] = 8;
5381 else if (ISCNTRL (i))
5382 cost_table[i] = -1;
5383 }
5384
5385 cost_table[' '] = 8;
5386 cost_table['\t'] = 4;
5387 cost_table['\0'] = 4;
5388 cost_table['\n'] = 2;
5389 cost_table['\f'] = 1;
5390 cost_table['\v'] = 1;
5391 cost_table['\b'] = 1;
5392 }
5393
5394 /* See if all the case expressions look like text. It is text if the
5395 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5396 as signed arithmetic since we don't want to ever access cost_table with a
5397 value less than -1. Also check that none of the constants in a range
5398 are strange control characters. */
5399
5400 for (n = node; n; n = n->right)
5401 {
5402 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5403 return 0;
5404
5405 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5406 if (cost_table[i] < 0)
5407 return 0;
5408 }
5409
5410 /* All interesting values are within the range of interesting
5411 ASCII characters. */
5412 return 1;
5413 }
5414
5415 /* Scan an ordered list of case nodes
5416 combining those with consecutive values or ranges.
5417
5418 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5419
5420 static void
5421 group_case_nodes (head)
5422 case_node_ptr head;
5423 {
5424 case_node_ptr node = head;
5425
5426 while (node)
5427 {
5428 rtx lb = next_real_insn (label_rtx (node->code_label));
5429 rtx lb2;
5430 case_node_ptr np = node;
5431
5432 /* Try to group the successors of NODE with NODE. */
5433 while (((np = np->right) != 0)
5434 /* Do they jump to the same place? */
5435 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5436 || (lb != 0 && lb2 != 0
5437 && simplejump_p (lb)
5438 && simplejump_p (lb2)
5439 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5440 SET_SRC (PATTERN (lb2)))))
5441 /* Are their ranges consecutive? */
5442 && tree_int_cst_equal (np->low,
5443 fold (build (PLUS_EXPR,
5444 TREE_TYPE (node->high),
5445 node->high,
5446 integer_one_node)))
5447 /* An overflow is not consecutive. */
5448 && tree_int_cst_lt (node->high,
5449 fold (build (PLUS_EXPR,
5450 TREE_TYPE (node->high),
5451 node->high,
5452 integer_one_node))))
5453 {
5454 node->high = np->high;
5455 }
5456 /* NP is the first node after NODE which can't be grouped with it.
5457 Delete the nodes in between, and move on to that node. */
5458 node->right = np;
5459 node = np;
5460 }
5461 }
5462
5463 /* Take an ordered list of case nodes
5464 and transform them into a near optimal binary tree,
5465 on the assumption that any target code selection value is as
5466 likely as any other.
5467
5468 The transformation is performed by splitting the ordered
5469 list into two equal sections plus a pivot. The parts are
5470 then attached to the pivot as left and right branches. Each
5471 branch is then transformed recursively. */
5472
5473 static void
5474 balance_case_nodes (head, parent)
5475 case_node_ptr *head;
5476 case_node_ptr parent;
5477 {
5478 register case_node_ptr np;
5479
5480 np = *head;
5481 if (np)
5482 {
5483 int cost = 0;
5484 int i = 0;
5485 int ranges = 0;
5486 register case_node_ptr *npp;
5487 case_node_ptr left;
5488
5489 /* Count the number of entries on branch. Also count the ranges. */
5490
5491 while (np)
5492 {
5493 if (!tree_int_cst_equal (np->low, np->high))
5494 {
5495 ranges++;
5496 if (use_cost_table)
5497 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5498 }
5499
5500 if (use_cost_table)
5501 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5502
5503 i++;
5504 np = np->right;
5505 }
5506
5507 if (i > 2)
5508 {
5509 /* Split this list if it is long enough for that to help. */
5510 npp = head;
5511 left = *npp;
5512 if (use_cost_table)
5513 {
5514 /* Find the place in the list that bisects the list's total cost,
5515 Here I gets half the total cost. */
5516 int n_moved = 0;
5517 i = (cost + 1) / 2;
5518 while (1)
5519 {
5520 /* Skip nodes while their cost does not reach that amount. */
5521 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5522 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5523 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5524 if (i <= 0)
5525 break;
5526 npp = &(*npp)->right;
5527 n_moved += 1;
5528 }
5529 if (n_moved == 0)
5530 {
5531 /* Leave this branch lopsided, but optimize left-hand
5532 side and fill in `parent' fields for right-hand side. */
5533 np = *head;
5534 np->parent = parent;
5535 balance_case_nodes (&np->left, np);
5536 for (; np->right; np = np->right)
5537 np->right->parent = np;
5538 return;
5539 }
5540 }
5541 /* If there are just three nodes, split at the middle one. */
5542 else if (i == 3)
5543 npp = &(*npp)->right;
5544 else
5545 {
5546 /* Find the place in the list that bisects the list's total cost,
5547 where ranges count as 2.
5548 Here I gets half the total cost. */
5549 i = (i + ranges + 1) / 2;
5550 while (1)
5551 {
5552 /* Skip nodes while their cost does not reach that amount. */
5553 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5554 i--;
5555 i--;
5556 if (i <= 0)
5557 break;
5558 npp = &(*npp)->right;
5559 }
5560 }
5561 *head = np = *npp;
5562 *npp = 0;
5563 np->parent = parent;
5564 np->left = left;
5565
5566 /* Optimize each of the two split parts. */
5567 balance_case_nodes (&np->left, np);
5568 balance_case_nodes (&np->right, np);
5569 }
5570 else
5571 {
5572 /* Else leave this branch as one level,
5573 but fill in `parent' fields. */
5574 np = *head;
5575 np->parent = parent;
5576 for (; np->right; np = np->right)
5577 np->right->parent = np;
5578 }
5579 }
5580 }
5581 \f
5582 /* Search the parent sections of the case node tree
5583 to see if a test for the lower bound of NODE would be redundant.
5584 INDEX_TYPE is the type of the index expression.
5585
5586 The instructions to generate the case decision tree are
5587 output in the same order as nodes are processed so it is
5588 known that if a parent node checks the range of the current
5589 node minus one that the current node is bounded at its lower
5590 span. Thus the test would be redundant. */
5591
5592 static int
5593 node_has_low_bound (node, index_type)
5594 case_node_ptr node;
5595 tree index_type;
5596 {
5597 tree low_minus_one;
5598 case_node_ptr pnode;
5599
5600 /* If the lower bound of this node is the lowest value in the index type,
5601 we need not test it. */
5602
5603 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5604 return 1;
5605
5606 /* If this node has a left branch, the value at the left must be less
5607 than that at this node, so it cannot be bounded at the bottom and
5608 we need not bother testing any further. */
5609
5610 if (node->left)
5611 return 0;
5612
5613 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5614 node->low, integer_one_node));
5615
5616 /* If the subtraction above overflowed, we can't verify anything.
5617 Otherwise, look for a parent that tests our value - 1. */
5618
5619 if (! tree_int_cst_lt (low_minus_one, node->low))
5620 return 0;
5621
5622 for (pnode = node->parent; pnode; pnode = pnode->parent)
5623 if (tree_int_cst_equal (low_minus_one, pnode->high))
5624 return 1;
5625
5626 return 0;
5627 }
5628
5629 /* Search the parent sections of the case node tree
5630 to see if a test for the upper bound of NODE would be redundant.
5631 INDEX_TYPE is the type of the index expression.
5632
5633 The instructions to generate the case decision tree are
5634 output in the same order as nodes are processed so it is
5635 known that if a parent node checks the range of the current
5636 node plus one that the current node is bounded at its upper
5637 span. Thus the test would be redundant. */
5638
5639 static int
5640 node_has_high_bound (node, index_type)
5641 case_node_ptr node;
5642 tree index_type;
5643 {
5644 tree high_plus_one;
5645 case_node_ptr pnode;
5646
5647 /* If there is no upper bound, obviously no test is needed. */
5648
5649 if (TYPE_MAX_VALUE (index_type) == NULL)
5650 return 1;
5651
5652 /* If the upper bound of this node is the highest value in the type
5653 of the index expression, we need not test against it. */
5654
5655 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5656 return 1;
5657
5658 /* If this node has a right branch, the value at the right must be greater
5659 than that at this node, so it cannot be bounded at the top and
5660 we need not bother testing any further. */
5661
5662 if (node->right)
5663 return 0;
5664
5665 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5666 node->high, integer_one_node));
5667
5668 /* If the addition above overflowed, we can't verify anything.
5669 Otherwise, look for a parent that tests our value + 1. */
5670
5671 if (! tree_int_cst_lt (node->high, high_plus_one))
5672 return 0;
5673
5674 for (pnode = node->parent; pnode; pnode = pnode->parent)
5675 if (tree_int_cst_equal (high_plus_one, pnode->low))
5676 return 1;
5677
5678 return 0;
5679 }
5680
5681 /* Search the parent sections of the
5682 case node tree to see if both tests for the upper and lower
5683 bounds of NODE would be redundant. */
5684
5685 static int
5686 node_is_bounded (node, index_type)
5687 case_node_ptr node;
5688 tree index_type;
5689 {
5690 return (node_has_low_bound (node, index_type)
5691 && node_has_high_bound (node, index_type));
5692 }
5693
5694 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5695
5696 static void
5697 emit_jump_if_reachable (label)
5698 rtx label;
5699 {
5700 if (GET_CODE (get_last_insn ()) != BARRIER)
5701 emit_jump (label);
5702 }
5703 \f
5704 /* Emit step-by-step code to select a case for the value of INDEX.
5705 The thus generated decision tree follows the form of the
5706 case-node binary tree NODE, whose nodes represent test conditions.
5707 INDEX_TYPE is the type of the index of the switch.
5708
5709 Care is taken to prune redundant tests from the decision tree
5710 by detecting any boundary conditions already checked by
5711 emitted rtx. (See node_has_high_bound, node_has_low_bound
5712 and node_is_bounded, above.)
5713
5714 Where the test conditions can be shown to be redundant we emit
5715 an unconditional jump to the target code. As a further
5716 optimization, the subordinates of a tree node are examined to
5717 check for bounded nodes. In this case conditional and/or
5718 unconditional jumps as a result of the boundary check for the
5719 current node are arranged to target the subordinates associated
5720 code for out of bound conditions on the current node.
5721
5722 We can assume that when control reaches the code generated here,
5723 the index value has already been compared with the parents
5724 of this node, and determined to be on the same side of each parent
5725 as this node is. Thus, if this node tests for the value 51,
5726 and a parent tested for 52, we don't need to consider
5727 the possibility of a value greater than 51. If another parent
5728 tests for the value 50, then this node need not test anything. */
5729
5730 static void
5731 emit_case_nodes (index, node, default_label, index_type)
5732 rtx index;
5733 case_node_ptr node;
5734 rtx default_label;
5735 tree index_type;
5736 {
5737 /* If INDEX has an unsigned type, we must make unsigned branches. */
5738 int unsignedp = TREE_UNSIGNED (index_type);
5739 typedef rtx rtx_fn ();
5740 rtx_fn *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5741 rtx_fn *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5742 rtx_fn *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5743 rtx_fn *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5744 enum machine_mode mode = GET_MODE (index);
5745
5746 /* See if our parents have already tested everything for us.
5747 If they have, emit an unconditional jump for this node. */
5748 if (node_is_bounded (node, index_type))
5749 emit_jump (label_rtx (node->code_label));
5750
5751 else if (tree_int_cst_equal (node->low, node->high))
5752 {
5753 /* Node is single valued. First see if the index expression matches
5754 this node and then check our children, if any. */
5755
5756 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5757 label_rtx (node->code_label), unsignedp);
5758
5759 if (node->right != 0 && node->left != 0)
5760 {
5761 /* This node has children on both sides.
5762 Dispatch to one side or the other
5763 by comparing the index value with this node's value.
5764 If one subtree is bounded, check that one first,
5765 so we can avoid real branches in the tree. */
5766
5767 if (node_is_bounded (node->right, index_type))
5768 {
5769 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5770 VOIDmode, 0),
5771 GT, NULL_RTX, mode, unsignedp, 0);
5772
5773 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5774 emit_case_nodes (index, node->left, default_label, index_type);
5775 }
5776
5777 else if (node_is_bounded (node->left, index_type))
5778 {
5779 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5780 VOIDmode, 0),
5781 LT, NULL_RTX, mode, unsignedp, 0);
5782 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5783 emit_case_nodes (index, node->right, default_label, index_type);
5784 }
5785
5786 else
5787 {
5788 /* Neither node is bounded. First distinguish the two sides;
5789 then emit the code for one side at a time. */
5790
5791 tree test_label
5792 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5793
5794 /* See if the value is on the right. */
5795 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5796 VOIDmode, 0),
5797 GT, NULL_RTX, mode, unsignedp, 0);
5798 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5799
5800 /* Value must be on the left.
5801 Handle the left-hand subtree. */
5802 emit_case_nodes (index, node->left, default_label, index_type);
5803 /* If left-hand subtree does nothing,
5804 go to default. */
5805 emit_jump_if_reachable (default_label);
5806
5807 /* Code branches here for the right-hand subtree. */
5808 expand_label (test_label);
5809 emit_case_nodes (index, node->right, default_label, index_type);
5810 }
5811 }
5812
5813 else if (node->right != 0 && node->left == 0)
5814 {
5815 /* Here we have a right child but no left so we issue conditional
5816 branch to default and process the right child.
5817
5818 Omit the conditional branch to default if we it avoid only one
5819 right child; it costs too much space to save so little time. */
5820
5821 if (node->right->right || node->right->left
5822 || !tree_int_cst_equal (node->right->low, node->right->high))
5823 {
5824 if (!node_has_low_bound (node, index_type))
5825 {
5826 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5827 VOIDmode, 0),
5828 LT, NULL_RTX, mode, unsignedp, 0);
5829 emit_jump_insn ((*gen_blt_pat) (default_label));
5830 }
5831
5832 emit_case_nodes (index, node->right, default_label, index_type);
5833 }
5834 else
5835 /* We cannot process node->right normally
5836 since we haven't ruled out the numbers less than
5837 this node's value. So handle node->right explicitly. */
5838 do_jump_if_equal (index,
5839 expand_expr (node->right->low, NULL_RTX,
5840 VOIDmode, 0),
5841 label_rtx (node->right->code_label), unsignedp);
5842 }
5843
5844 else if (node->right == 0 && node->left != 0)
5845 {
5846 /* Just one subtree, on the left. */
5847
5848 #if 0 /* The following code and comment were formerly part
5849 of the condition here, but they didn't work
5850 and I don't understand what the idea was. -- rms. */
5851 /* If our "most probable entry" is less probable
5852 than the default label, emit a jump to
5853 the default label using condition codes
5854 already lying around. With no right branch,
5855 a branch-greater-than will get us to the default
5856 label correctly. */
5857 if (use_cost_table
5858 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5859 ;
5860 #endif /* 0 */
5861 if (node->left->left || node->left->right
5862 || !tree_int_cst_equal (node->left->low, node->left->high))
5863 {
5864 if (!node_has_high_bound (node, index_type))
5865 {
5866 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5867 VOIDmode, 0),
5868 GT, NULL_RTX, mode, unsignedp, 0);
5869 emit_jump_insn ((*gen_bgt_pat) (default_label));
5870 }
5871
5872 emit_case_nodes (index, node->left, default_label, index_type);
5873 }
5874 else
5875 /* We cannot process node->left normally
5876 since we haven't ruled out the numbers less than
5877 this node's value. So handle node->left explicitly. */
5878 do_jump_if_equal (index,
5879 expand_expr (node->left->low, NULL_RTX,
5880 VOIDmode, 0),
5881 label_rtx (node->left->code_label), unsignedp);
5882 }
5883 }
5884 else
5885 {
5886 /* Node is a range. These cases are very similar to those for a single
5887 value, except that we do not start by testing whether this node
5888 is the one to branch to. */
5889
5890 if (node->right != 0 && node->left != 0)
5891 {
5892 /* Node has subtrees on both sides.
5893 If the right-hand subtree is bounded,
5894 test for it first, since we can go straight there.
5895 Otherwise, we need to make a branch in the control structure,
5896 then handle the two subtrees. */
5897 tree test_label = 0;
5898
5899 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5900 VOIDmode, 0),
5901 GT, NULL_RTX, mode, unsignedp, 0);
5902
5903 if (node_is_bounded (node->right, index_type))
5904 /* Right hand node is fully bounded so we can eliminate any
5905 testing and branch directly to the target code. */
5906 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5907 else
5908 {
5909 /* Right hand node requires testing.
5910 Branch to a label where we will handle it later. */
5911
5912 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5913 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5914 }
5915
5916 /* Value belongs to this node or to the left-hand subtree. */
5917
5918 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5919 GE, NULL_RTX, mode, unsignedp, 0);
5920 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5921
5922 /* Handle the left-hand subtree. */
5923 emit_case_nodes (index, node->left, default_label, index_type);
5924
5925 /* If right node had to be handled later, do that now. */
5926
5927 if (test_label)
5928 {
5929 /* If the left-hand subtree fell through,
5930 don't let it fall into the right-hand subtree. */
5931 emit_jump_if_reachable (default_label);
5932
5933 expand_label (test_label);
5934 emit_case_nodes (index, node->right, default_label, index_type);
5935 }
5936 }
5937
5938 else if (node->right != 0 && node->left == 0)
5939 {
5940 /* Deal with values to the left of this node,
5941 if they are possible. */
5942 if (!node_has_low_bound (node, index_type))
5943 {
5944 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5945 VOIDmode, 0),
5946 LT, NULL_RTX, mode, unsignedp, 0);
5947 emit_jump_insn ((*gen_blt_pat) (default_label));
5948 }
5949
5950 /* Value belongs to this node or to the right-hand subtree. */
5951
5952 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5953 VOIDmode, 0),
5954 LE, NULL_RTX, mode, unsignedp, 0);
5955 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5956
5957 emit_case_nodes (index, node->right, default_label, index_type);
5958 }
5959
5960 else if (node->right == 0 && node->left != 0)
5961 {
5962 /* Deal with values to the right of this node,
5963 if they are possible. */
5964 if (!node_has_high_bound (node, index_type))
5965 {
5966 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5967 VOIDmode, 0),
5968 GT, NULL_RTX, mode, unsignedp, 0);
5969 emit_jump_insn ((*gen_bgt_pat) (default_label));
5970 }
5971
5972 /* Value belongs to this node or to the left-hand subtree. */
5973
5974 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5975 GE, NULL_RTX, mode, unsignedp, 0);
5976 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5977
5978 emit_case_nodes (index, node->left, default_label, index_type);
5979 }
5980
5981 else
5982 {
5983 /* Node has no children so we check low and high bounds to remove
5984 redundant tests. Only one of the bounds can exist,
5985 since otherwise this node is bounded--a case tested already. */
5986
5987 if (!node_has_high_bound (node, index_type))
5988 {
5989 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5990 VOIDmode, 0),
5991 GT, NULL_RTX, mode, unsignedp, 0);
5992 emit_jump_insn ((*gen_bgt_pat) (default_label));
5993 }
5994
5995 if (!node_has_low_bound (node, index_type))
5996 {
5997 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5998 VOIDmode, 0),
5999 LT, NULL_RTX, mode, unsignedp, 0);
6000 emit_jump_insn ((*gen_blt_pat) (default_label));
6001 }
6002
6003 emit_jump (label_rtx (node->code_label));
6004 }
6005 }
6006 }
6007 \f
6008 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6009 so that the debugging info will be correct for the unrolled loop. */
6010
6011 /* Indexed by block number, contains a pointer to the N'th block node.
6012
6013 Allocated by the call to identify_blocks, then released after the call
6014 to reorder_blocks in the function unroll_block_trees. */
6015
6016 static tree *block_vector;
6017
6018 void
6019 find_loop_tree_blocks ()
6020 {
6021 tree block = DECL_INITIAL (current_function_decl);
6022
6023 block_vector = identify_blocks (block, get_insns ());
6024 }
6025
6026 void
6027 unroll_block_trees ()
6028 {
6029 tree block = DECL_INITIAL (current_function_decl);
6030
6031 reorder_blocks (block_vector, block, get_insns ());
6032
6033 /* Release any memory allocated by identify_blocks. */
6034 if (block_vector)
6035 free (block_vector);
6036 }