local-alloc.c (requires_inout): Don't use reserved range for EXTRA_CONSTRAINTS...
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file handles the generation of rtl code from tree structure
24 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
25 It also creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
27
28 The functions whose names start with `expand_' are called by the
29 parser to generate RTL instructions for various kinds of constructs.
30
31 Some control and binding constructs require calling several such
32 functions at different times. For example, a simple if-then
33 is expanded by calling `expand_start_cond' (with the condition-expression
34 as argument) before parsing the then-clause and calling `expand_end_cond'
35 after parsing the then-clause. */
36
37 #include "config.h"
38 #include "system.h"
39
40 #include "rtl.h"
41 #include "tree.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "insn-flags.h"
47 #include "insn-config.h"
48 #include "insn-codes.h"
49 #include "expr.h"
50 #include "hard-reg-set.h"
51 #include "obstack.h"
52 #include "loop.h"
53 #include "recog.h"
54 #include "machmode.h"
55 #include "toplev.h"
56 #include "output.h"
57 #include "ggc.h"
58
59 #define obstack_chunk_alloc xmalloc
60 #define obstack_chunk_free free
61 struct obstack stmt_obstack;
62
63 /* Assume that case vectors are not pc-relative. */
64 #ifndef CASE_VECTOR_PC_RELATIVE
65 #define CASE_VECTOR_PC_RELATIVE 0
66 #endif
67
68 \f
69 /* Functions and data structures for expanding case statements. */
70
71 /* Case label structure, used to hold info on labels within case
72 statements. We handle "range" labels; for a single-value label
73 as in C, the high and low limits are the same.
74
75 An AVL tree of case nodes is initially created, and later transformed
76 to a list linked via the RIGHT fields in the nodes. Nodes with
77 higher case values are later in the list.
78
79 Switch statements can be output in one of two forms. A branch table
80 is used if there are more than a few labels and the labels are dense
81 within the range between the smallest and largest case value. If a
82 branch table is used, no further manipulations are done with the case
83 node chain.
84
85 The alternative to the use of a branch table is to generate a series
86 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
87 and PARENT fields to hold a binary tree. Initially the tree is
88 totally unbalanced, with everything on the right. We balance the tree
89 with nodes on the left having lower case values than the parent
90 and nodes on the right having higher values. We then output the tree
91 in order. */
92
93 struct case_node
94 {
95 struct case_node *left; /* Left son in binary tree */
96 struct case_node *right; /* Right son in binary tree; also node chain */
97 struct case_node *parent; /* Parent of node in binary tree */
98 tree low; /* Lowest index value for this label */
99 tree high; /* Highest index value for this label */
100 tree code_label; /* Label to jump to when node matches */
101 int balance;
102 };
103
104 typedef struct case_node case_node;
105 typedef struct case_node *case_node_ptr;
106
107 /* These are used by estimate_case_costs and balance_case_nodes. */
108
109 /* This must be a signed type, and non-ANSI compilers lack signed char. */
110 static short cost_table_[129];
111 static short *cost_table;
112 static int use_cost_table;
113 \f
114 /* Stack of control and binding constructs we are currently inside.
115
116 These constructs begin when you call `expand_start_WHATEVER'
117 and end when you call `expand_end_WHATEVER'. This stack records
118 info about how the construct began that tells the end-function
119 what to do. It also may provide information about the construct
120 to alter the behavior of other constructs within the body.
121 For example, they may affect the behavior of C `break' and `continue'.
122
123 Each construct gets one `struct nesting' object.
124 All of these objects are chained through the `all' field.
125 `nesting_stack' points to the first object (innermost construct).
126 The position of an entry on `nesting_stack' is in its `depth' field.
127
128 Each type of construct has its own individual stack.
129 For example, loops have `loop_stack'. Each object points to the
130 next object of the same type through the `next' field.
131
132 Some constructs are visible to `break' exit-statements and others
133 are not. Which constructs are visible depends on the language.
134 Therefore, the data structure allows each construct to be visible
135 or not, according to the args given when the construct is started.
136 The construct is visible if the `exit_label' field is non-null.
137 In that case, the value should be a CODE_LABEL rtx. */
138
139 struct nesting
140 {
141 struct nesting *all;
142 struct nesting *next;
143 int depth;
144 rtx exit_label;
145 union
146 {
147 /* For conds (if-then and if-then-else statements). */
148 struct
149 {
150 /* Label for the end of the if construct.
151 There is none if EXITFLAG was not set
152 and no `else' has been seen yet. */
153 rtx endif_label;
154 /* Label for the end of this alternative.
155 This may be the end of the if or the next else/elseif. */
156 rtx next_label;
157 } cond;
158 /* For loops. */
159 struct
160 {
161 /* Label at the top of the loop; place to loop back to. */
162 rtx start_label;
163 /* Label at the end of the whole construct. */
164 rtx end_label;
165 /* Label before a jump that branches to the end of the whole
166 construct. This is where destructors go if any. */
167 rtx alt_end_label;
168 /* Label for `continue' statement to jump to;
169 this is in front of the stepper of the loop. */
170 rtx continue_label;
171 } loop;
172 /* For variable binding contours. */
173 struct
174 {
175 /* Sequence number of this binding contour within the function,
176 in order of entry. */
177 int block_start_count;
178 /* Nonzero => value to restore stack to on exit. */
179 rtx stack_level;
180 /* The NOTE that starts this contour.
181 Used by expand_goto to check whether the destination
182 is within each contour or not. */
183 rtx first_insn;
184 /* Innermost containing binding contour that has a stack level. */
185 struct nesting *innermost_stack_block;
186 /* List of cleanups to be run on exit from this contour.
187 This is a list of expressions to be evaluated.
188 The TREE_PURPOSE of each link is the ..._DECL node
189 which the cleanup pertains to. */
190 tree cleanups;
191 /* List of cleanup-lists of blocks containing this block,
192 as they were at the locus where this block appears.
193 There is an element for each containing block,
194 ordered innermost containing block first.
195 The tail of this list can be 0,
196 if all remaining elements would be empty lists.
197 The element's TREE_VALUE is the cleanup-list of that block,
198 which may be null. */
199 tree outer_cleanups;
200 /* Chain of labels defined inside this binding contour.
201 For contours that have stack levels or cleanups. */
202 struct label_chain *label_chain;
203 /* Number of function calls seen, as of start of this block. */
204 int n_function_calls;
205 /* Nonzero if this is associated with a EH region. */
206 int exception_region;
207 /* The saved target_temp_slot_level from our outer block.
208 We may reset target_temp_slot_level to be the level of
209 this block, if that is done, target_temp_slot_level
210 reverts to the saved target_temp_slot_level at the very
211 end of the block. */
212 int block_target_temp_slot_level;
213 /* True if we are currently emitting insns in an area of
214 output code that is controlled by a conditional
215 expression. This is used by the cleanup handling code to
216 generate conditional cleanup actions. */
217 int conditional_code;
218 /* A place to move the start of the exception region for any
219 of the conditional cleanups, must be at the end or after
220 the start of the last unconditional cleanup, and before any
221 conditional branch points. */
222 rtx last_unconditional_cleanup;
223 /* When in a conditional context, this is the specific
224 cleanup list associated with last_unconditional_cleanup,
225 where we place the conditionalized cleanups. */
226 tree *cleanup_ptr;
227 } block;
228 /* For switch (C) or case (Pascal) statements,
229 and also for dummies (see `expand_start_case_dummy'). */
230 struct
231 {
232 /* The insn after which the case dispatch should finally
233 be emitted. Zero for a dummy. */
234 rtx start;
235 /* A list of case labels; it is first built as an AVL tree.
236 During expand_end_case, this is converted to a list, and may be
237 rearranged into a nearly balanced binary tree. */
238 struct case_node *case_list;
239 /* Label to jump to if no case matches. */
240 tree default_label;
241 /* The expression to be dispatched on. */
242 tree index_expr;
243 /* Type that INDEX_EXPR should be converted to. */
244 tree nominal_type;
245 /* Number of range exprs in case statement. */
246 int num_ranges;
247 /* Name of this kind of statement, for warnings. */
248 const char *printname;
249 /* Used to save no_line_numbers till we see the first case label.
250 We set this to -1 when we see the first case label in this
251 case statement. */
252 int line_number_status;
253 } case_stmt;
254 } data;
255 };
256
257 /* Allocate and return a new `struct nesting'. */
258
259 #define ALLOC_NESTING() \
260 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
261
262 /* Pop the nesting stack element by element until we pop off
263 the element which is at the top of STACK.
264 Update all the other stacks, popping off elements from them
265 as we pop them from nesting_stack. */
266
267 #define POPSTACK(STACK) \
268 do { struct nesting *target = STACK; \
269 struct nesting *this; \
270 do { this = nesting_stack; \
271 if (loop_stack == this) \
272 loop_stack = loop_stack->next; \
273 if (cond_stack == this) \
274 cond_stack = cond_stack->next; \
275 if (block_stack == this) \
276 block_stack = block_stack->next; \
277 if (stack_block_stack == this) \
278 stack_block_stack = stack_block_stack->next; \
279 if (case_stack == this) \
280 case_stack = case_stack->next; \
281 nesting_depth = nesting_stack->depth - 1; \
282 nesting_stack = this->all; \
283 obstack_free (&stmt_obstack, this); } \
284 while (this != target); } while (0)
285 \f
286 /* In some cases it is impossible to generate code for a forward goto
287 until the label definition is seen. This happens when it may be necessary
288 for the goto to reset the stack pointer: we don't yet know how to do that.
289 So expand_goto puts an entry on this fixup list.
290 Each time a binding contour that resets the stack is exited,
291 we check each fixup.
292 If the target label has now been defined, we can insert the proper code. */
293
294 struct goto_fixup
295 {
296 /* Points to following fixup. */
297 struct goto_fixup *next;
298 /* Points to the insn before the jump insn.
299 If more code must be inserted, it goes after this insn. */
300 rtx before_jump;
301 /* The LABEL_DECL that this jump is jumping to, or 0
302 for break, continue or return. */
303 tree target;
304 /* The BLOCK for the place where this goto was found. */
305 tree context;
306 /* The CODE_LABEL rtx that this is jumping to. */
307 rtx target_rtl;
308 /* Number of binding contours started in current function
309 before the label reference. */
310 int block_start_count;
311 /* The outermost stack level that should be restored for this jump.
312 Each time a binding contour that resets the stack is exited,
313 if the target label is *not* yet defined, this slot is updated. */
314 rtx stack_level;
315 /* List of lists of cleanup expressions to be run by this goto.
316 There is one element for each block that this goto is within.
317 The tail of this list can be 0,
318 if all remaining elements would be empty.
319 The TREE_VALUE contains the cleanup list of that block as of the
320 time this goto was seen.
321 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
322 tree cleanup_list_list;
323 };
324
325 /* Within any binding contour that must restore a stack level,
326 all labels are recorded with a chain of these structures. */
327
328 struct label_chain
329 {
330 /* Points to following fixup. */
331 struct label_chain *next;
332 tree label;
333 };
334
335 struct stmt_status
336 {
337 /* Chain of all pending binding contours. */
338 struct nesting *x_block_stack;
339
340 /* If any new stacks are added here, add them to POPSTACKS too. */
341
342 /* Chain of all pending binding contours that restore stack levels
343 or have cleanups. */
344 struct nesting *x_stack_block_stack;
345
346 /* Chain of all pending conditional statements. */
347 struct nesting *x_cond_stack;
348
349 /* Chain of all pending loops. */
350 struct nesting *x_loop_stack;
351
352 /* Chain of all pending case or switch statements. */
353 struct nesting *x_case_stack;
354
355 /* Separate chain including all of the above,
356 chained through the `all' field. */
357 struct nesting *x_nesting_stack;
358
359 /* Number of entries on nesting_stack now. */
360 int x_nesting_depth;
361
362 /* Number of binding contours started so far in this function. */
363 int x_block_start_count;
364
365 /* Each time we expand an expression-statement,
366 record the expr's type and its RTL value here. */
367 tree x_last_expr_type;
368 rtx x_last_expr_value;
369
370 /* Nonzero if within a ({...}) grouping, in which case we must
371 always compute a value for each expr-stmt in case it is the last one. */
372 int x_expr_stmts_for_value;
373
374 /* Filename and line number of last line-number note,
375 whether we actually emitted it or not. */
376 const char *x_emit_filename;
377 int x_emit_lineno;
378
379 struct goto_fixup *x_goto_fixup_chain;
380 };
381
382 #define block_stack (cfun->stmt->x_block_stack)
383 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
384 #define cond_stack (cfun->stmt->x_cond_stack)
385 #define loop_stack (cfun->stmt->x_loop_stack)
386 #define case_stack (cfun->stmt->x_case_stack)
387 #define nesting_stack (cfun->stmt->x_nesting_stack)
388 #define nesting_depth (cfun->stmt->x_nesting_depth)
389 #define current_block_start_count (cfun->stmt->x_block_start_count)
390 #define last_expr_type (cfun->stmt->x_last_expr_type)
391 #define last_expr_value (cfun->stmt->x_last_expr_value)
392 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
393 #define emit_filename (cfun->stmt->x_emit_filename)
394 #define emit_lineno (cfun->stmt->x_emit_lineno)
395 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
396
397 /* Non-zero if we are using EH to handle cleanus. */
398 static int using_eh_for_cleanups_p = 0;
399
400 /* Character strings, each containing a single decimal digit. */
401 static char *digit_strings[10];
402
403
404 static int n_occurrences PARAMS ((int, const char *));
405 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
406 static int expand_fixup PARAMS ((tree, rtx, rtx));
407 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
408 static void expand_nl_goto_receiver PARAMS ((void));
409 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
410 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
411 rtx, int));
412 static void expand_null_return_1 PARAMS ((rtx, int));
413 static void expand_value_return PARAMS ((rtx));
414 static int tail_recursion_args PARAMS ((tree, tree));
415 static void expand_cleanups PARAMS ((tree, tree, int, int));
416 static void check_seenlabel PARAMS ((void));
417 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
418 static int estimate_case_costs PARAMS ((case_node_ptr));
419 static void group_case_nodes PARAMS ((case_node_ptr));
420 static void balance_case_nodes PARAMS ((case_node_ptr *,
421 case_node_ptr));
422 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
423 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
424 static int node_is_bounded PARAMS ((case_node_ptr, tree));
425 static void emit_jump_if_reachable PARAMS ((rtx));
426 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
427 static int add_case_node PARAMS ((tree, tree, tree, tree *));
428 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
429 static void mark_cond_nesting PARAMS ((struct nesting *));
430 static void mark_loop_nesting PARAMS ((struct nesting *));
431 static void mark_block_nesting PARAMS ((struct nesting *));
432 static void mark_case_nesting PARAMS ((struct nesting *));
433 static void mark_case_node PARAMS ((struct case_node *));
434 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
435
436 \f
437 void
438 using_eh_for_cleanups ()
439 {
440 using_eh_for_cleanups_p = 1;
441 }
442
443 /* Mark N (known to be a cond-nesting) for GC. */
444
445 static void
446 mark_cond_nesting (n)
447 struct nesting *n;
448 {
449 while (n)
450 {
451 ggc_mark_rtx (n->exit_label);
452 ggc_mark_rtx (n->data.cond.endif_label);
453 ggc_mark_rtx (n->data.cond.next_label);
454
455 n = n->next;
456 }
457 }
458
459 /* Mark N (known to be a loop-nesting) for GC. */
460
461 static void
462 mark_loop_nesting (n)
463 struct nesting *n;
464 {
465
466 while (n)
467 {
468 ggc_mark_rtx (n->exit_label);
469 ggc_mark_rtx (n->data.loop.start_label);
470 ggc_mark_rtx (n->data.loop.end_label);
471 ggc_mark_rtx (n->data.loop.alt_end_label);
472 ggc_mark_rtx (n->data.loop.continue_label);
473
474 n = n->next;
475 }
476 }
477
478 /* Mark N (known to be a block-nesting) for GC. */
479
480 static void
481 mark_block_nesting (n)
482 struct nesting *n;
483 {
484 while (n)
485 {
486 struct label_chain *l;
487
488 ggc_mark_rtx (n->exit_label);
489 ggc_mark_rtx (n->data.block.stack_level);
490 ggc_mark_rtx (n->data.block.first_insn);
491 ggc_mark_tree (n->data.block.cleanups);
492 ggc_mark_tree (n->data.block.outer_cleanups);
493
494 for (l = n->data.block.label_chain; l != NULL; l = l->next)
495 ggc_mark_tree (l->label);
496
497 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
498
499 /* ??? cleanup_ptr never points outside the stack, does it? */
500
501 n = n->next;
502 }
503 }
504
505 /* Mark N (known to be a case-nesting) for GC. */
506
507 static void
508 mark_case_nesting (n)
509 struct nesting *n;
510 {
511 while (n)
512 {
513 ggc_mark_rtx (n->exit_label);
514 ggc_mark_rtx (n->data.case_stmt.start);
515
516 ggc_mark_tree (n->data.case_stmt.default_label);
517 ggc_mark_tree (n->data.case_stmt.index_expr);
518 ggc_mark_tree (n->data.case_stmt.nominal_type);
519
520 mark_case_node (n->data.case_stmt.case_list);
521 n = n->next;
522 }
523 }
524
525 /* Mark C for GC. */
526
527 static void
528 mark_case_node (c)
529 struct case_node *c;
530 {
531 if (c != 0)
532 {
533 ggc_mark_tree (c->low);
534 ggc_mark_tree (c->high);
535 ggc_mark_tree (c->code_label);
536
537 mark_case_node (c->right);
538 mark_case_node (c->left);
539 }
540 }
541
542 /* Mark G for GC. */
543
544 static void
545 mark_goto_fixup (g)
546 struct goto_fixup *g;
547 {
548 while (g)
549 {
550 ggc_mark (g);
551 ggc_mark_rtx (g->before_jump);
552 ggc_mark_tree (g->target);
553 ggc_mark_tree (g->context);
554 ggc_mark_rtx (g->target_rtl);
555 ggc_mark_rtx (g->stack_level);
556 ggc_mark_tree (g->cleanup_list_list);
557
558 g = g->next;
559 }
560 }
561
562 /* Clear out all parts of the state in F that can safely be discarded
563 after the function has been compiled, to let garbage collection
564 reclaim the memory. */
565
566 void
567 free_stmt_status (f)
568 struct function *f;
569 {
570 /* We're about to free the function obstack. If we hold pointers to
571 things allocated there, then we'll try to mark them when we do
572 GC. So, we clear them out here explicitly. */
573 if (f->stmt)
574 free (f->stmt);
575 f->stmt = NULL;
576 }
577
578 /* Mark P for GC. */
579
580 void
581 mark_stmt_status (p)
582 struct stmt_status *p;
583 {
584 if (p == 0)
585 return;
586
587 mark_block_nesting (p->x_block_stack);
588 mark_cond_nesting (p->x_cond_stack);
589 mark_loop_nesting (p->x_loop_stack);
590 mark_case_nesting (p->x_case_stack);
591
592 ggc_mark_tree (p->x_last_expr_type);
593 /* last_epxr_value is only valid if last_expr_type is nonzero. */
594 if (p->x_last_expr_type)
595 ggc_mark_rtx (p->x_last_expr_value);
596
597 mark_goto_fixup (p->x_goto_fixup_chain);
598 }
599
600 void
601 init_stmt ()
602 {
603 int i;
604
605 gcc_obstack_init (&stmt_obstack);
606
607 for (i = 0; i < 10; i++)
608 {
609 digit_strings[i] = ggc_alloc_string (NULL, 1);
610 digit_strings[i][0] = '0' + i;
611 }
612 ggc_add_string_root (digit_strings, 10);
613 }
614
615 void
616 init_stmt_for_function ()
617 {
618 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
619
620 /* We are not currently within any block, conditional, loop or case. */
621 block_stack = 0;
622 stack_block_stack = 0;
623 loop_stack = 0;
624 case_stack = 0;
625 cond_stack = 0;
626 nesting_stack = 0;
627 nesting_depth = 0;
628
629 current_block_start_count = 0;
630
631 /* No gotos have been expanded yet. */
632 goto_fixup_chain = 0;
633
634 /* We are not processing a ({...}) grouping. */
635 expr_stmts_for_value = 0;
636 last_expr_type = 0;
637 last_expr_value = NULL_RTX;
638 }
639 \f
640 /* Return nonzero if anything is pushed on the loop, condition, or case
641 stack. */
642 int
643 in_control_zone_p ()
644 {
645 return cond_stack || loop_stack || case_stack;
646 }
647
648 /* Record the current file and line. Called from emit_line_note. */
649 void
650 set_file_and_line_for_stmt (file, line)
651 const char *file;
652 int line;
653 {
654 /* If we're outputting an inline function, and we add a line note,
655 there may be no CFUN->STMT information. So, there's no need to
656 update it. */
657 if (cfun->stmt)
658 {
659 emit_filename = file;
660 emit_lineno = line;
661 }
662 }
663
664 /* Emit a no-op instruction. */
665
666 void
667 emit_nop ()
668 {
669 rtx last_insn;
670
671 last_insn = get_last_insn ();
672 if (!optimize
673 && (GET_CODE (last_insn) == CODE_LABEL
674 || (GET_CODE (last_insn) == NOTE
675 && prev_real_insn (last_insn) == 0)))
676 emit_insn (gen_nop ());
677 }
678 \f
679 /* Return the rtx-label that corresponds to a LABEL_DECL,
680 creating it if necessary. */
681
682 rtx
683 label_rtx (label)
684 tree label;
685 {
686 if (TREE_CODE (label) != LABEL_DECL)
687 abort ();
688
689 if (DECL_RTL (label))
690 return DECL_RTL (label);
691
692 return DECL_RTL (label) = gen_label_rtx ();
693 }
694
695 /* Add an unconditional jump to LABEL as the next sequential instruction. */
696
697 void
698 emit_jump (label)
699 rtx label;
700 {
701 do_pending_stack_adjust ();
702 emit_jump_insn (gen_jump (label));
703 emit_barrier ();
704 }
705
706 /* Emit code to jump to the address
707 specified by the pointer expression EXP. */
708
709 void
710 expand_computed_goto (exp)
711 tree exp;
712 {
713 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
714
715 #ifdef POINTERS_EXTEND_UNSIGNED
716 x = convert_memory_address (Pmode, x);
717 #endif
718
719 emit_queue ();
720 /* Be sure the function is executable. */
721 if (current_function_check_memory_usage)
722 emit_library_call (chkr_check_exec_libfunc, 1,
723 VOIDmode, 1, x, ptr_mode);
724
725 do_pending_stack_adjust ();
726 emit_indirect_jump (x);
727
728 current_function_has_computed_jump = 1;
729 }
730 \f
731 /* Handle goto statements and the labels that they can go to. */
732
733 /* Specify the location in the RTL code of a label LABEL,
734 which is a LABEL_DECL tree node.
735
736 This is used for the kind of label that the user can jump to with a
737 goto statement, and for alternatives of a switch or case statement.
738 RTL labels generated for loops and conditionals don't go through here;
739 they are generated directly at the RTL level, by other functions below.
740
741 Note that this has nothing to do with defining label *names*.
742 Languages vary in how they do that and what that even means. */
743
744 void
745 expand_label (label)
746 tree label;
747 {
748 struct label_chain *p;
749
750 do_pending_stack_adjust ();
751 emit_label (label_rtx (label));
752 if (DECL_NAME (label))
753 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
754
755 if (stack_block_stack != 0)
756 {
757 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
758 p->next = stack_block_stack->data.block.label_chain;
759 stack_block_stack->data.block.label_chain = p;
760 p->label = label;
761 }
762 }
763
764 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
765 from nested functions. */
766
767 void
768 declare_nonlocal_label (label)
769 tree label;
770 {
771 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
772
773 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
774 LABEL_PRESERVE_P (label_rtx (label)) = 1;
775 if (nonlocal_goto_handler_slots == 0)
776 {
777 emit_stack_save (SAVE_NONLOCAL,
778 &nonlocal_goto_stack_level,
779 PREV_INSN (tail_recursion_reentry));
780 }
781 nonlocal_goto_handler_slots
782 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
783 }
784
785 /* Generate RTL code for a `goto' statement with target label LABEL.
786 LABEL should be a LABEL_DECL tree node that was or will later be
787 defined with `expand_label'. */
788
789 void
790 expand_goto (label)
791 tree label;
792 {
793 tree context;
794
795 /* Check for a nonlocal goto to a containing function. */
796 context = decl_function_context (label);
797 if (context != 0 && context != current_function_decl)
798 {
799 struct function *p = find_function_data (context);
800 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
801 rtx handler_slot, static_chain, save_area;
802 tree link;
803
804 /* Find the corresponding handler slot for this label. */
805 handler_slot = p->x_nonlocal_goto_handler_slots;
806 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
807 link = TREE_CHAIN (link))
808 handler_slot = XEXP (handler_slot, 1);
809 handler_slot = XEXP (handler_slot, 0);
810
811 p->has_nonlocal_label = 1;
812 current_function_has_nonlocal_goto = 1;
813 LABEL_REF_NONLOCAL_P (label_ref) = 1;
814
815 /* Copy the rtl for the slots so that they won't be shared in
816 case the virtual stack vars register gets instantiated differently
817 in the parent than in the child. */
818
819 static_chain = copy_to_reg (lookup_static_chain (label));
820
821 /* Get addr of containing function's current nonlocal goto handler,
822 which will do any cleanups and then jump to the label. */
823 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
824 virtual_stack_vars_rtx,
825 static_chain));
826
827 /* Get addr of containing function's nonlocal save area. */
828 save_area = p->x_nonlocal_goto_stack_level;
829 if (save_area)
830 save_area = replace_rtx (copy_rtx (save_area),
831 virtual_stack_vars_rtx, static_chain);
832
833 #if HAVE_nonlocal_goto
834 if (HAVE_nonlocal_goto)
835 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
836 save_area, label_ref));
837 else
838 #endif
839 {
840 /* Restore frame pointer for containing function.
841 This sets the actual hard register used for the frame pointer
842 to the location of the function's incoming static chain info.
843 The non-local goto handler will then adjust it to contain the
844 proper value and reload the argument pointer, if needed. */
845 emit_move_insn (hard_frame_pointer_rtx, static_chain);
846 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
847
848 /* USE of hard_frame_pointer_rtx added for consistency;
849 not clear if really needed. */
850 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
851 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
852 emit_indirect_jump (handler_slot);
853 }
854 }
855 else
856 expand_goto_internal (label, label_rtx (label), NULL_RTX);
857 }
858
859 /* Generate RTL code for a `goto' statement with target label BODY.
860 LABEL should be a LABEL_REF.
861 LAST_INSN, if non-0, is the rtx we should consider as the last
862 insn emitted (for the purposes of cleaning up a return). */
863
864 static void
865 expand_goto_internal (body, label, last_insn)
866 tree body;
867 rtx label;
868 rtx last_insn;
869 {
870 struct nesting *block;
871 rtx stack_level = 0;
872
873 if (GET_CODE (label) != CODE_LABEL)
874 abort ();
875
876 /* If label has already been defined, we can tell now
877 whether and how we must alter the stack level. */
878
879 if (PREV_INSN (label) != 0)
880 {
881 /* Find the innermost pending block that contains the label.
882 (Check containment by comparing insn-uids.)
883 Then restore the outermost stack level within that block,
884 and do cleanups of all blocks contained in it. */
885 for (block = block_stack; block; block = block->next)
886 {
887 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
888 break;
889 if (block->data.block.stack_level != 0)
890 stack_level = block->data.block.stack_level;
891 /* Execute the cleanups for blocks we are exiting. */
892 if (block->data.block.cleanups != 0)
893 {
894 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
895 do_pending_stack_adjust ();
896 }
897 }
898
899 if (stack_level)
900 {
901 /* Ensure stack adjust isn't done by emit_jump, as this
902 would clobber the stack pointer. This one should be
903 deleted as dead by flow. */
904 clear_pending_stack_adjust ();
905 do_pending_stack_adjust ();
906
907 /* Don't do this adjust if it's to the end label and this function
908 is to return with a depressed stack pointer. */
909 if (label == return_label
910 && (((TREE_CODE (TREE_TYPE (current_function_decl))
911 == FUNCTION_TYPE)
912 && (TYPE_RETURNS_STACK_DEPRESSED
913 (TREE_TYPE (current_function_decl))))))
914 ;
915 else
916 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
917 }
918
919 if (body != 0 && DECL_TOO_LATE (body))
920 error ("jump to `%s' invalidly jumps into binding contour",
921 IDENTIFIER_POINTER (DECL_NAME (body)));
922 }
923 /* Label not yet defined: may need to put this goto
924 on the fixup list. */
925 else if (! expand_fixup (body, label, last_insn))
926 {
927 /* No fixup needed. Record that the label is the target
928 of at least one goto that has no fixup. */
929 if (body != 0)
930 TREE_ADDRESSABLE (body) = 1;
931 }
932
933 emit_jump (label);
934 }
935 \f
936 /* Generate if necessary a fixup for a goto
937 whose target label in tree structure (if any) is TREE_LABEL
938 and whose target in rtl is RTL_LABEL.
939
940 If LAST_INSN is nonzero, we pretend that the jump appears
941 after insn LAST_INSN instead of at the current point in the insn stream.
942
943 The fixup will be used later to insert insns just before the goto.
944 Those insns will restore the stack level as appropriate for the
945 target label, and will (in the case of C++) also invoke any object
946 destructors which have to be invoked when we exit the scopes which
947 are exited by the goto.
948
949 Value is nonzero if a fixup is made. */
950
951 static int
952 expand_fixup (tree_label, rtl_label, last_insn)
953 tree tree_label;
954 rtx rtl_label;
955 rtx last_insn;
956 {
957 struct nesting *block, *end_block;
958
959 /* See if we can recognize which block the label will be output in.
960 This is possible in some very common cases.
961 If we succeed, set END_BLOCK to that block.
962 Otherwise, set it to 0. */
963
964 if (cond_stack
965 && (rtl_label == cond_stack->data.cond.endif_label
966 || rtl_label == cond_stack->data.cond.next_label))
967 end_block = cond_stack;
968 /* If we are in a loop, recognize certain labels which
969 are likely targets. This reduces the number of fixups
970 we need to create. */
971 else if (loop_stack
972 && (rtl_label == loop_stack->data.loop.start_label
973 || rtl_label == loop_stack->data.loop.end_label
974 || rtl_label == loop_stack->data.loop.continue_label))
975 end_block = loop_stack;
976 else
977 end_block = 0;
978
979 /* Now set END_BLOCK to the binding level to which we will return. */
980
981 if (end_block)
982 {
983 struct nesting *next_block = end_block->all;
984 block = block_stack;
985
986 /* First see if the END_BLOCK is inside the innermost binding level.
987 If so, then no cleanups or stack levels are relevant. */
988 while (next_block && next_block != block)
989 next_block = next_block->all;
990
991 if (next_block)
992 return 0;
993
994 /* Otherwise, set END_BLOCK to the innermost binding level
995 which is outside the relevant control-structure nesting. */
996 next_block = block_stack->next;
997 for (block = block_stack; block != end_block; block = block->all)
998 if (block == next_block)
999 next_block = next_block->next;
1000 end_block = next_block;
1001 }
1002
1003 /* Does any containing block have a stack level or cleanups?
1004 If not, no fixup is needed, and that is the normal case
1005 (the only case, for standard C). */
1006 for (block = block_stack; block != end_block; block = block->next)
1007 if (block->data.block.stack_level != 0
1008 || block->data.block.cleanups != 0)
1009 break;
1010
1011 if (block != end_block)
1012 {
1013 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1014 struct goto_fixup *fixup
1015 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1016 /* In case an old stack level is restored, make sure that comes
1017 after any pending stack adjust. */
1018 /* ?? If the fixup isn't to come at the present position,
1019 doing the stack adjust here isn't useful. Doing it with our
1020 settings at that location isn't useful either. Let's hope
1021 someone does it! */
1022 if (last_insn == 0)
1023 do_pending_stack_adjust ();
1024 fixup->target = tree_label;
1025 fixup->target_rtl = rtl_label;
1026
1027 /* Create a BLOCK node and a corresponding matched set of
1028 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1029 this point. The notes will encapsulate any and all fixup
1030 code which we might later insert at this point in the insn
1031 stream. Also, the BLOCK node will be the parent (i.e. the
1032 `SUPERBLOCK') of any other BLOCK nodes which we might create
1033 later on when we are expanding the fixup code.
1034
1035 Note that optimization passes (including expand_end_loop)
1036 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1037 as a placeholder. */
1038
1039 {
1040 register rtx original_before_jump
1041 = last_insn ? last_insn : get_last_insn ();
1042 rtx start;
1043 rtx end;
1044 tree block;
1045
1046 block = make_node (BLOCK);
1047 TREE_USED (block) = 1;
1048
1049 if (!cfun->x_whole_function_mode_p)
1050 insert_block (block);
1051 else
1052 {
1053 BLOCK_CHAIN (block)
1054 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1055 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1056 = block;
1057 }
1058
1059 start_sequence ();
1060 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1061 if (cfun->x_whole_function_mode_p)
1062 NOTE_BLOCK (start) = block;
1063 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1064 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1065 if (cfun->x_whole_function_mode_p)
1066 NOTE_BLOCK (end) = block;
1067 fixup->context = block;
1068 end_sequence ();
1069 emit_insns_after (start, original_before_jump);
1070 }
1071
1072 fixup->block_start_count = current_block_start_count;
1073 fixup->stack_level = 0;
1074 fixup->cleanup_list_list
1075 = ((block->data.block.outer_cleanups
1076 || block->data.block.cleanups)
1077 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1078 block->data.block.outer_cleanups)
1079 : 0);
1080 fixup->next = goto_fixup_chain;
1081 goto_fixup_chain = fixup;
1082 }
1083
1084 return block != 0;
1085 }
1086
1087
1088 \f
1089 /* Expand any needed fixups in the outputmost binding level of the
1090 function. FIRST_INSN is the first insn in the function. */
1091
1092 void
1093 expand_fixups (first_insn)
1094 rtx first_insn;
1095 {
1096 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1097 }
1098
1099 /* When exiting a binding contour, process all pending gotos requiring fixups.
1100 THISBLOCK is the structure that describes the block being exited.
1101 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1102 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1103 FIRST_INSN is the insn that began this contour.
1104
1105 Gotos that jump out of this contour must restore the
1106 stack level and do the cleanups before actually jumping.
1107
1108 DONT_JUMP_IN nonzero means report error there is a jump into this
1109 contour from before the beginning of the contour.
1110 This is also done if STACK_LEVEL is nonzero. */
1111
1112 static void
1113 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1114 struct nesting *thisblock;
1115 rtx stack_level;
1116 tree cleanup_list;
1117 rtx first_insn;
1118 int dont_jump_in;
1119 {
1120 register struct goto_fixup *f, *prev;
1121
1122 /* F is the fixup we are considering; PREV is the previous one. */
1123 /* We run this loop in two passes so that cleanups of exited blocks
1124 are run first, and blocks that are exited are marked so
1125 afterwards. */
1126
1127 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1128 {
1129 /* Test for a fixup that is inactive because it is already handled. */
1130 if (f->before_jump == 0)
1131 {
1132 /* Delete inactive fixup from the chain, if that is easy to do. */
1133 if (prev != 0)
1134 prev->next = f->next;
1135 }
1136 /* Has this fixup's target label been defined?
1137 If so, we can finalize it. */
1138 else if (PREV_INSN (f->target_rtl) != 0)
1139 {
1140 register rtx cleanup_insns;
1141
1142 /* If this fixup jumped into this contour from before the beginning
1143 of this contour, report an error. This code used to use
1144 the first non-label insn after f->target_rtl, but that's
1145 wrong since such can be added, by things like put_var_into_stack
1146 and have INSN_UIDs that are out of the range of the block. */
1147 /* ??? Bug: this does not detect jumping in through intermediate
1148 blocks that have stack levels or cleanups.
1149 It detects only a problem with the innermost block
1150 around the label. */
1151 if (f->target != 0
1152 && (dont_jump_in || stack_level || cleanup_list)
1153 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1154 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1155 && ! DECL_ERROR_ISSUED (f->target))
1156 {
1157 error_with_decl (f->target,
1158 "label `%s' used before containing binding contour");
1159 /* Prevent multiple errors for one label. */
1160 DECL_ERROR_ISSUED (f->target) = 1;
1161 }
1162
1163 /* We will expand the cleanups into a sequence of their own and
1164 then later on we will attach this new sequence to the insn
1165 stream just ahead of the actual jump insn. */
1166
1167 start_sequence ();
1168
1169 /* Temporarily restore the lexical context where we will
1170 logically be inserting the fixup code. We do this for the
1171 sake of getting the debugging information right. */
1172
1173 pushlevel (0);
1174 set_block (f->context);
1175
1176 /* Expand the cleanups for blocks this jump exits. */
1177 if (f->cleanup_list_list)
1178 {
1179 tree lists;
1180 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1181 /* Marked elements correspond to blocks that have been closed.
1182 Do their cleanups. */
1183 if (TREE_ADDRESSABLE (lists)
1184 && TREE_VALUE (lists) != 0)
1185 {
1186 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1187 /* Pop any pushes done in the cleanups,
1188 in case function is about to return. */
1189 do_pending_stack_adjust ();
1190 }
1191 }
1192
1193 /* Restore stack level for the biggest contour that this
1194 jump jumps out of. */
1195 if (f->stack_level
1196 && ! (f->target_rtl == return_label
1197 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1198 == FUNCTION_TYPE)
1199 && (TYPE_RETURNS_STACK_DEPRESSED
1200 (TREE_TYPE (current_function_decl))))))
1201 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1202
1203 /* Finish up the sequence containing the insns which implement the
1204 necessary cleanups, and then attach that whole sequence to the
1205 insn stream just ahead of the actual jump insn. Attaching it
1206 at that point insures that any cleanups which are in fact
1207 implicit C++ object destructions (which must be executed upon
1208 leaving the block) appear (to the debugger) to be taking place
1209 in an area of the generated code where the object(s) being
1210 destructed are still "in scope". */
1211
1212 cleanup_insns = get_insns ();
1213 poplevel (1, 0, 0);
1214
1215 end_sequence ();
1216 emit_insns_after (cleanup_insns, f->before_jump);
1217
1218
1219 f->before_jump = 0;
1220 }
1221 }
1222
1223 /* For any still-undefined labels, do the cleanups for this block now.
1224 We must do this now since items in the cleanup list may go out
1225 of scope when the block ends. */
1226 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1227 if (f->before_jump != 0
1228 && PREV_INSN (f->target_rtl) == 0
1229 /* Label has still not appeared. If we are exiting a block with
1230 a stack level to restore, that started before the fixup,
1231 mark this stack level as needing restoration
1232 when the fixup is later finalized. */
1233 && thisblock != 0
1234 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1235 means the label is undefined. That's erroneous, but possible. */
1236 && (thisblock->data.block.block_start_count
1237 <= f->block_start_count))
1238 {
1239 tree lists = f->cleanup_list_list;
1240 rtx cleanup_insns;
1241
1242 for (; lists; lists = TREE_CHAIN (lists))
1243 /* If the following elt. corresponds to our containing block
1244 then the elt. must be for this block. */
1245 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1246 {
1247 start_sequence ();
1248 pushlevel (0);
1249 set_block (f->context);
1250 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1251 do_pending_stack_adjust ();
1252 cleanup_insns = get_insns ();
1253 poplevel (1, 0, 0);
1254 end_sequence ();
1255 if (cleanup_insns != 0)
1256 f->before_jump
1257 = emit_insns_after (cleanup_insns, f->before_jump);
1258
1259 f->cleanup_list_list = TREE_CHAIN (lists);
1260 }
1261
1262 if (stack_level)
1263 f->stack_level = stack_level;
1264 }
1265 }
1266 \f
1267 /* Return the number of times character C occurs in string S. */
1268 static int
1269 n_occurrences (c, s)
1270 int c;
1271 const char *s;
1272 {
1273 int n = 0;
1274 while (*s)
1275 n += (*s++ == c);
1276 return n;
1277 }
1278 \f
1279 /* Generate RTL for an asm statement (explicit assembler code).
1280 BODY is a STRING_CST node containing the assembler code text,
1281 or an ADDR_EXPR containing a STRING_CST. */
1282
1283 void
1284 expand_asm (body)
1285 tree body;
1286 {
1287 if (current_function_check_memory_usage)
1288 {
1289 error ("`asm' cannot be used in function where memory usage is checked");
1290 return;
1291 }
1292
1293 if (TREE_CODE (body) == ADDR_EXPR)
1294 body = TREE_OPERAND (body, 0);
1295
1296 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1297 TREE_STRING_POINTER (body)));
1298 last_expr_type = 0;
1299 }
1300
1301 /* Generate RTL for an asm statement with arguments.
1302 STRING is the instruction template.
1303 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1304 Each output or input has an expression in the TREE_VALUE and
1305 a constraint-string in the TREE_PURPOSE.
1306 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1307 that is clobbered by this insn.
1308
1309 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1310 Some elements of OUTPUTS may be replaced with trees representing temporary
1311 values. The caller should copy those temporary values to the originally
1312 specified lvalues.
1313
1314 VOL nonzero means the insn is volatile; don't optimize it. */
1315
1316 void
1317 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1318 tree string, outputs, inputs, clobbers;
1319 int vol;
1320 const char *filename;
1321 int line;
1322 {
1323 rtvec argvec, constraints;
1324 rtx body;
1325 int ninputs = list_length (inputs);
1326 int noutputs = list_length (outputs);
1327 int ninout = 0;
1328 int nclobbers;
1329 tree tail;
1330 register int i;
1331 /* Vector of RTX's of evaluated output operands. */
1332 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1333 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1334 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1335 enum machine_mode *inout_mode
1336 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1337 /* The insn we have emitted. */
1338 rtx insn;
1339
1340 /* An ASM with no outputs needs to be treated as volatile, for now. */
1341 if (noutputs == 0)
1342 vol = 1;
1343
1344 if (current_function_check_memory_usage)
1345 {
1346 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1347 return;
1348 }
1349
1350 #ifdef MD_ASM_CLOBBERS
1351 /* Sometimes we wish to automatically clobber registers across an asm.
1352 Case in point is when the i386 backend moved from cc0 to a hard reg --
1353 maintaining source-level compatability means automatically clobbering
1354 the flags register. */
1355 MD_ASM_CLOBBERS (clobbers);
1356 #endif
1357
1358 if (current_function_check_memory_usage)
1359 {
1360 error ("`asm' cannot be used in function where memory usage is checked");
1361 return;
1362 }
1363
1364 /* Count the number of meaningful clobbered registers, ignoring what
1365 we would ignore later. */
1366 nclobbers = 0;
1367 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1368 {
1369 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1370
1371 i = decode_reg_name (regname);
1372 if (i >= 0 || i == -4)
1373 ++nclobbers;
1374 else if (i == -2)
1375 error ("unknown register name `%s' in `asm'", regname);
1376 }
1377
1378 last_expr_type = 0;
1379
1380 /* Check that the number of alternatives is constant across all
1381 operands. */
1382 if (outputs || inputs)
1383 {
1384 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1385 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1386 tree next = inputs;
1387
1388 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1389 {
1390 error ("too many alternatives in `asm'");
1391 return;
1392 }
1393
1394 tmp = outputs;
1395 while (tmp)
1396 {
1397 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1398
1399 if (n_occurrences (',', constraint) != nalternatives)
1400 {
1401 error ("operand constraints for `asm' differ in number of alternatives");
1402 return;
1403 }
1404
1405 if (TREE_CHAIN (tmp))
1406 tmp = TREE_CHAIN (tmp);
1407 else
1408 tmp = next, next = 0;
1409 }
1410 }
1411
1412 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1413 {
1414 tree val = TREE_VALUE (tail);
1415 tree type = TREE_TYPE (val);
1416 char *constraint;
1417 char *p;
1418 int c_len;
1419 int j;
1420 int is_inout = 0;
1421 int allows_reg = 0;
1422 int allows_mem = 0;
1423
1424 /* If there's an erroneous arg, emit no insn. */
1425 if (TREE_TYPE (val) == error_mark_node)
1426 return;
1427
1428 /* Make sure constraint has `=' and does not have `+'. Also, see
1429 if it allows any register. Be liberal on the latter test, since
1430 the worst that happens if we get it wrong is we issue an error
1431 message. */
1432
1433 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1434 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1435
1436 /* Allow the `=' or `+' to not be at the beginning of the string,
1437 since it wasn't explicitly documented that way, and there is a
1438 large body of code that puts it last. Swap the character to
1439 the front, so as not to uglify any place else. */
1440 switch (c_len)
1441 {
1442 default:
1443 if ((p = strchr (constraint, '=')) != NULL)
1444 break;
1445 if ((p = strchr (constraint, '+')) != NULL)
1446 break;
1447 case 0:
1448 error ("output operand constraint lacks `='");
1449 return;
1450 }
1451
1452 if (p != constraint)
1453 {
1454 j = *p;
1455 bcopy (constraint, constraint+1, p-constraint);
1456 *constraint = j;
1457
1458 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1459 }
1460
1461 is_inout = constraint[0] == '+';
1462 /* Replace '+' with '='. */
1463 constraint[0] = '=';
1464 /* Make sure we can specify the matching operand. */
1465 if (is_inout && i > 9)
1466 {
1467 error ("output operand constraint %d contains `+'", i);
1468 return;
1469 }
1470
1471 for (j = 1; j < c_len; j++)
1472 switch (constraint[j])
1473 {
1474 case '+':
1475 case '=':
1476 error ("operand constraint contains '+' or '=' at illegal position.");
1477 return;
1478
1479 case '%':
1480 if (i + 1 == ninputs + noutputs)
1481 {
1482 error ("`%%' constraint used with last operand");
1483 return;
1484 }
1485 break;
1486
1487 case '?': case '!': case '*': case '&':
1488 case 'E': case 'F': case 'G': case 'H':
1489 case 's': case 'i': case 'n':
1490 case 'I': case 'J': case 'K': case 'L': case 'M':
1491 case 'N': case 'O': case 'P': case ',':
1492 break;
1493
1494 case '0': case '1': case '2': case '3': case '4':
1495 case '5': case '6': case '7': case '8': case '9':
1496 error ("matching constraint not valid in output operand");
1497 break;
1498
1499 case 'V': case 'm': case 'o':
1500 allows_mem = 1;
1501 break;
1502
1503 case '<': case '>':
1504 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1505 excepting those that expand_call created. So match memory
1506 and hope. */
1507 allows_mem = 1;
1508 break;
1509
1510 case 'g': case 'X':
1511 allows_reg = 1;
1512 allows_mem = 1;
1513 break;
1514
1515 case 'p': case 'r':
1516 allows_reg = 1;
1517 break;
1518
1519 default:
1520 if (! ISALPHA (constraint[j]))
1521 {
1522 error ("invalid punctuation `%c' in constraint",
1523 constraint[j]);
1524 return;
1525 }
1526 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1527 allows_reg = 1;
1528 #ifdef EXTRA_CONSTRAINT
1529 else
1530 {
1531 /* Otherwise we can't assume anything about the nature of
1532 the constraint except that it isn't purely registers.
1533 Treat it like "g" and hope for the best. */
1534 allows_reg = 1;
1535 allows_mem = 1;
1536 }
1537 #endif
1538 break;
1539 }
1540
1541 /* If an output operand is not a decl or indirect ref and our constraint
1542 allows a register, make a temporary to act as an intermediate.
1543 Make the asm insn write into that, then our caller will copy it to
1544 the real output operand. Likewise for promoted variables. */
1545
1546 real_output_rtx[i] = NULL_RTX;
1547 if ((TREE_CODE (val) == INDIRECT_REF
1548 && allows_mem)
1549 || (DECL_P (val)
1550 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1551 && ! (GET_CODE (DECL_RTL (val)) == REG
1552 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1553 || ! allows_reg
1554 || is_inout)
1555 {
1556 if (! allows_reg)
1557 mark_addressable (TREE_VALUE (tail));
1558
1559 output_rtx[i]
1560 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1561 EXPAND_MEMORY_USE_WO);
1562
1563 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1564 error ("output number %d not directly addressable", i);
1565 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1566 {
1567 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1568 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1569 if (is_inout)
1570 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1571 }
1572 }
1573 else
1574 {
1575 output_rtx[i] = assign_temp (type, 0, 0, 1);
1576 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1577 }
1578
1579 if (is_inout)
1580 {
1581 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1582 inout_opnum[ninout++] = i;
1583 }
1584 }
1585
1586 ninputs += ninout;
1587 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1588 {
1589 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1590 return;
1591 }
1592
1593 /* Make vectors for the expression-rtx and constraint strings. */
1594
1595 argvec = rtvec_alloc (ninputs);
1596 constraints = rtvec_alloc (ninputs);
1597
1598 body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
1599 empty_string, 0, argvec, constraints,
1600 filename, line);
1601
1602 MEM_VOLATILE_P (body) = vol;
1603
1604 /* Eval the inputs and put them into ARGVEC.
1605 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1606
1607 i = 0;
1608 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1609 {
1610 int j;
1611 int allows_reg = 0, allows_mem = 0;
1612 char *constraint, *orig_constraint;
1613 int c_len;
1614 rtx op;
1615
1616 /* If there's an erroneous arg, emit no insn,
1617 because the ASM_INPUT would get VOIDmode
1618 and that could cause a crash in reload. */
1619 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1620 return;
1621
1622 /* ??? Can this happen, and does the error message make any sense? */
1623 if (TREE_PURPOSE (tail) == NULL_TREE)
1624 {
1625 error ("hard register `%s' listed as input operand to `asm'",
1626 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1627 return;
1628 }
1629
1630 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1631 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1632 orig_constraint = constraint;
1633
1634 /* Make sure constraint has neither `=', `+', nor '&'. */
1635
1636 for (j = 0; j < c_len; j++)
1637 switch (constraint[j])
1638 {
1639 case '+': case '=': case '&':
1640 if (constraint == orig_constraint)
1641 {
1642 error ("input operand constraint contains `%c'",
1643 constraint[j]);
1644 return;
1645 }
1646 break;
1647
1648 case '%':
1649 if (constraint == orig_constraint
1650 && i + 1 == ninputs - ninout)
1651 {
1652 error ("`%%' constraint used with last operand");
1653 return;
1654 }
1655 break;
1656
1657 case 'V': case 'm': case 'o':
1658 allows_mem = 1;
1659 break;
1660
1661 case '<': case '>':
1662 case '?': case '!': case '*':
1663 case 'E': case 'F': case 'G': case 'H':
1664 case 's': case 'i': case 'n':
1665 case 'I': case 'J': case 'K': case 'L': case 'M':
1666 case 'N': case 'O': case 'P': case ',':
1667 break;
1668
1669 /* Whether or not a numeric constraint allows a register is
1670 decided by the matching constraint, and so there is no need
1671 to do anything special with them. We must handle them in
1672 the default case, so that we don't unnecessarily force
1673 operands to memory. */
1674 case '0': case '1': case '2': case '3': case '4':
1675 case '5': case '6': case '7': case '8': case '9':
1676 if (constraint[j] >= '0' + noutputs)
1677 {
1678 error
1679 ("matching constraint references invalid operand number");
1680 return;
1681 }
1682
1683 /* Try and find the real constraint for this dup. */
1684 if ((j == 0 && c_len == 1)
1685 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1686 {
1687 tree o = outputs;
1688
1689 for (j = constraint[j] - '0'; j > 0; --j)
1690 o = TREE_CHAIN (o);
1691
1692 c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
1693 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1694 j = 0;
1695 break;
1696 }
1697
1698 /* ... fall through ... */
1699
1700 case 'p': case 'r':
1701 allows_reg = 1;
1702 break;
1703
1704 case 'g': case 'X':
1705 allows_reg = 1;
1706 allows_mem = 1;
1707 break;
1708
1709 default:
1710 if (! ISALPHA (constraint[j]))
1711 {
1712 error ("invalid punctuation `%c' in constraint",
1713 constraint[j]);
1714 return;
1715 }
1716 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1717 allows_reg = 1;
1718 #ifdef EXTRA_CONSTRAINT
1719 else
1720 {
1721 /* Otherwise we can't assume anything about the nature of
1722 the constraint except that it isn't purely registers.
1723 Treat it like "g" and hope for the best. */
1724 allows_reg = 1;
1725 allows_mem = 1;
1726 }
1727 #endif
1728 break;
1729 }
1730
1731 if (! allows_reg && allows_mem)
1732 mark_addressable (TREE_VALUE (tail));
1733
1734 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1735
1736 if (asm_operand_ok (op, constraint) <= 0)
1737 {
1738 if (allows_reg)
1739 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1740 else if (!allows_mem)
1741 warning ("asm operand %d probably doesn't match constraints", i);
1742 else if (CONSTANT_P (op))
1743 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1744 op);
1745 else if (GET_CODE (op) == REG
1746 || GET_CODE (op) == SUBREG
1747 || GET_CODE (op) == CONCAT)
1748 {
1749 tree type = TREE_TYPE (TREE_VALUE (tail));
1750 rtx memloc = assign_temp (type, 1, 1, 1);
1751
1752 emit_move_insn (memloc, op);
1753 op = memloc;
1754 }
1755
1756 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1757 /* We won't recognize volatile memory as available a
1758 memory_operand at this point. Ignore it. */
1759 ;
1760 else if (queued_subexp_p (op))
1761 ;
1762 else
1763 /* ??? Leave this only until we have experience with what
1764 happens in combine and elsewhere when constraints are
1765 not satisfied. */
1766 warning ("asm operand %d probably doesn't match constraints", i);
1767 }
1768 XVECEXP (body, 3, i) = op;
1769
1770 XVECEXP (body, 4, i) /* constraints */
1771 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1772 orig_constraint);
1773 i++;
1774 }
1775
1776 /* Protect all the operands from the queue now that they have all been
1777 evaluated. */
1778
1779 for (i = 0; i < ninputs - ninout; i++)
1780 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1781
1782 for (i = 0; i < noutputs; i++)
1783 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1784
1785 /* For in-out operands, copy output rtx to input rtx. */
1786 for (i = 0; i < ninout; i++)
1787 {
1788 int j = inout_opnum[i];
1789
1790 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1791 = output_rtx[j];
1792 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1793 = gen_rtx_ASM_INPUT (inout_mode[i], digit_strings[j]);
1794 }
1795
1796 /* Now, for each output, construct an rtx
1797 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1798 ARGVEC CONSTRAINTS))
1799 If there is more than one, put them inside a PARALLEL. */
1800
1801 if (noutputs == 1 && nclobbers == 0)
1802 {
1803 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1804 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1805 }
1806
1807 else if (noutputs == 0 && nclobbers == 0)
1808 {
1809 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1810 insn = emit_insn (body);
1811 }
1812
1813 else
1814 {
1815 rtx obody = body;
1816 int num = noutputs;
1817
1818 if (num == 0)
1819 num = 1;
1820
1821 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1822
1823 /* For each output operand, store a SET. */
1824 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1825 {
1826 XVECEXP (body, 0, i)
1827 = gen_rtx_SET (VOIDmode,
1828 output_rtx[i],
1829 gen_rtx_ASM_OPERANDS
1830 (VOIDmode,
1831 TREE_STRING_POINTER (string),
1832 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1833 i, argvec, constraints,
1834 filename, line));
1835
1836 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1837 }
1838
1839 /* If there are no outputs (but there are some clobbers)
1840 store the bare ASM_OPERANDS into the PARALLEL. */
1841
1842 if (i == 0)
1843 XVECEXP (body, 0, i++) = obody;
1844
1845 /* Store (clobber REG) for each clobbered register specified. */
1846
1847 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1848 {
1849 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1850 int j = decode_reg_name (regname);
1851
1852 if (j < 0)
1853 {
1854 if (j == -3) /* `cc', which is not a register */
1855 continue;
1856
1857 if (j == -4) /* `memory', don't cache memory across asm */
1858 {
1859 XVECEXP (body, 0, i++)
1860 = gen_rtx_CLOBBER (VOIDmode,
1861 gen_rtx_MEM
1862 (BLKmode,
1863 gen_rtx_SCRATCH (VOIDmode)));
1864 continue;
1865 }
1866
1867 /* Ignore unknown register, error already signaled. */
1868 continue;
1869 }
1870
1871 /* Use QImode since that's guaranteed to clobber just one reg. */
1872 XVECEXP (body, 0, i++)
1873 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1874 }
1875
1876 insn = emit_insn (body);
1877 }
1878
1879 /* For any outputs that needed reloading into registers, spill them
1880 back to where they belong. */
1881 for (i = 0; i < noutputs; ++i)
1882 if (real_output_rtx[i])
1883 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1884
1885 free_temp_slots ();
1886 }
1887 \f
1888 /* Generate RTL to evaluate the expression EXP
1889 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1890
1891 void
1892 expand_expr_stmt (exp)
1893 tree exp;
1894 {
1895 /* If -W, warn about statements with no side effects,
1896 except for an explicit cast to void (e.g. for assert()), and
1897 except inside a ({...}) where they may be useful. */
1898 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1899 {
1900 if (! TREE_SIDE_EFFECTS (exp)
1901 && (extra_warnings || warn_unused_value)
1902 && !(TREE_CODE (exp) == CONVERT_EXPR
1903 && VOID_TYPE_P (TREE_TYPE (exp))))
1904 warning_with_file_and_line (emit_filename, emit_lineno,
1905 "statement with no effect");
1906 else if (warn_unused_value)
1907 warn_if_unused_value (exp);
1908 }
1909
1910 /* If EXP is of function type and we are expanding statements for
1911 value, convert it to pointer-to-function. */
1912 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1913 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1914
1915 last_expr_type = TREE_TYPE (exp);
1916 last_expr_value = expand_expr (exp,
1917 (expr_stmts_for_value
1918 ? NULL_RTX : const0_rtx),
1919 VOIDmode, 0);
1920
1921 /* If all we do is reference a volatile value in memory,
1922 copy it to a register to be sure it is actually touched. */
1923 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1924 && TREE_THIS_VOLATILE (exp))
1925 {
1926 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1927 ;
1928 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1929 copy_to_reg (last_expr_value);
1930 else
1931 {
1932 rtx lab = gen_label_rtx ();
1933
1934 /* Compare the value with itself to reference it. */
1935 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1936 expand_expr (TYPE_SIZE (last_expr_type),
1937 NULL_RTX, VOIDmode, 0),
1938 BLKmode, 0,
1939 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1940 lab);
1941 emit_label (lab);
1942 }
1943 }
1944
1945 /* If this expression is part of a ({...}) and is in memory, we may have
1946 to preserve temporaries. */
1947 preserve_temp_slots (last_expr_value);
1948
1949 /* Free any temporaries used to evaluate this expression. Any temporary
1950 used as a result of this expression will already have been preserved
1951 above. */
1952 free_temp_slots ();
1953
1954 emit_queue ();
1955 }
1956
1957 /* Warn if EXP contains any computations whose results are not used.
1958 Return 1 if a warning is printed; 0 otherwise. */
1959
1960 int
1961 warn_if_unused_value (exp)
1962 tree exp;
1963 {
1964 if (TREE_USED (exp))
1965 return 0;
1966
1967 switch (TREE_CODE (exp))
1968 {
1969 case PREINCREMENT_EXPR:
1970 case POSTINCREMENT_EXPR:
1971 case PREDECREMENT_EXPR:
1972 case POSTDECREMENT_EXPR:
1973 case MODIFY_EXPR:
1974 case INIT_EXPR:
1975 case TARGET_EXPR:
1976 case CALL_EXPR:
1977 case METHOD_CALL_EXPR:
1978 case RTL_EXPR:
1979 case TRY_CATCH_EXPR:
1980 case WITH_CLEANUP_EXPR:
1981 case EXIT_EXPR:
1982 /* We don't warn about COND_EXPR because it may be a useful
1983 construct if either arm contains a side effect. */
1984 case COND_EXPR:
1985 return 0;
1986
1987 case BIND_EXPR:
1988 /* For a binding, warn if no side effect within it. */
1989 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1990
1991 case SAVE_EXPR:
1992 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1993
1994 case TRUTH_ORIF_EXPR:
1995 case TRUTH_ANDIF_EXPR:
1996 /* In && or ||, warn if 2nd operand has no side effect. */
1997 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1998
1999 case COMPOUND_EXPR:
2000 if (TREE_NO_UNUSED_WARNING (exp))
2001 return 0;
2002 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2003 return 1;
2004 /* Let people do `(foo (), 0)' without a warning. */
2005 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2006 return 0;
2007 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2008
2009 case NOP_EXPR:
2010 case CONVERT_EXPR:
2011 case NON_LVALUE_EXPR:
2012 /* Don't warn about values cast to void. */
2013 if (VOID_TYPE_P (TREE_TYPE (exp)))
2014 return 0;
2015 /* Don't warn about conversions not explicit in the user's program. */
2016 if (TREE_NO_UNUSED_WARNING (exp))
2017 return 0;
2018 /* Assignment to a cast usually results in a cast of a modify.
2019 Don't complain about that. There can be an arbitrary number of
2020 casts before the modify, so we must loop until we find the first
2021 non-cast expression and then test to see if that is a modify. */
2022 {
2023 tree tem = TREE_OPERAND (exp, 0);
2024
2025 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2026 tem = TREE_OPERAND (tem, 0);
2027
2028 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2029 || TREE_CODE (tem) == CALL_EXPR)
2030 return 0;
2031 }
2032 goto warn;
2033
2034 case INDIRECT_REF:
2035 /* Don't warn about automatic dereferencing of references, since
2036 the user cannot control it. */
2037 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2038 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2039 /* ... fall through ... */
2040
2041 default:
2042 /* Referencing a volatile value is a side effect, so don't warn. */
2043 if ((DECL_P (exp)
2044 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2045 && TREE_THIS_VOLATILE (exp))
2046 return 0;
2047
2048 /* If this is an expression which has no operands, there is no value
2049 to be unused. There are no such language-independent codes,
2050 but front ends may define such. */
2051 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2052 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2053 return 0;
2054
2055 warn:
2056 warning_with_file_and_line (emit_filename, emit_lineno,
2057 "value computed is not used");
2058 return 1;
2059 }
2060 }
2061
2062 /* Clear out the memory of the last expression evaluated. */
2063
2064 void
2065 clear_last_expr ()
2066 {
2067 last_expr_type = 0;
2068 }
2069
2070 /* Begin a statement which will return a value.
2071 Return the RTL_EXPR for this statement expr.
2072 The caller must save that value and pass it to expand_end_stmt_expr. */
2073
2074 tree
2075 expand_start_stmt_expr ()
2076 {
2077 int momentary;
2078 tree t;
2079
2080 /* Make the RTL_EXPR node temporary, not momentary,
2081 so that rtl_expr_chain doesn't become garbage. */
2082 momentary = suspend_momentary ();
2083 t = make_node (RTL_EXPR);
2084 resume_momentary (momentary);
2085 do_pending_stack_adjust ();
2086 start_sequence_for_rtl_expr (t);
2087 NO_DEFER_POP;
2088 expr_stmts_for_value++;
2089 return t;
2090 }
2091
2092 /* Restore the previous state at the end of a statement that returns a value.
2093 Returns a tree node representing the statement's value and the
2094 insns to compute the value.
2095
2096 The nodes of that expression have been freed by now, so we cannot use them.
2097 But we don't want to do that anyway; the expression has already been
2098 evaluated and now we just want to use the value. So generate a RTL_EXPR
2099 with the proper type and RTL value.
2100
2101 If the last substatement was not an expression,
2102 return something with type `void'. */
2103
2104 tree
2105 expand_end_stmt_expr (t)
2106 tree t;
2107 {
2108 OK_DEFER_POP;
2109
2110 if (last_expr_type == 0)
2111 {
2112 last_expr_type = void_type_node;
2113 last_expr_value = const0_rtx;
2114 }
2115 else if (last_expr_value == 0)
2116 /* There are some cases where this can happen, such as when the
2117 statement is void type. */
2118 last_expr_value = const0_rtx;
2119 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2120 /* Remove any possible QUEUED. */
2121 last_expr_value = protect_from_queue (last_expr_value, 0);
2122
2123 emit_queue ();
2124
2125 TREE_TYPE (t) = last_expr_type;
2126 RTL_EXPR_RTL (t) = last_expr_value;
2127 RTL_EXPR_SEQUENCE (t) = get_insns ();
2128
2129 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2130
2131 end_sequence ();
2132
2133 /* Don't consider deleting this expr or containing exprs at tree level. */
2134 TREE_SIDE_EFFECTS (t) = 1;
2135 /* Propagate volatility of the actual RTL expr. */
2136 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2137
2138 last_expr_type = 0;
2139 expr_stmts_for_value--;
2140
2141 return t;
2142 }
2143 \f
2144 /* Generate RTL for the start of an if-then. COND is the expression
2145 whose truth should be tested.
2146
2147 If EXITFLAG is nonzero, this conditional is visible to
2148 `exit_something'. */
2149
2150 void
2151 expand_start_cond (cond, exitflag)
2152 tree cond;
2153 int exitflag;
2154 {
2155 struct nesting *thiscond = ALLOC_NESTING ();
2156
2157 /* Make an entry on cond_stack for the cond we are entering. */
2158
2159 thiscond->next = cond_stack;
2160 thiscond->all = nesting_stack;
2161 thiscond->depth = ++nesting_depth;
2162 thiscond->data.cond.next_label = gen_label_rtx ();
2163 /* Before we encounter an `else', we don't need a separate exit label
2164 unless there are supposed to be exit statements
2165 to exit this conditional. */
2166 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2167 thiscond->data.cond.endif_label = thiscond->exit_label;
2168 cond_stack = thiscond;
2169 nesting_stack = thiscond;
2170
2171 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2172 }
2173
2174 /* Generate RTL between then-clause and the elseif-clause
2175 of an if-then-elseif-.... */
2176
2177 void
2178 expand_start_elseif (cond)
2179 tree cond;
2180 {
2181 if (cond_stack->data.cond.endif_label == 0)
2182 cond_stack->data.cond.endif_label = gen_label_rtx ();
2183 emit_jump (cond_stack->data.cond.endif_label);
2184 emit_label (cond_stack->data.cond.next_label);
2185 cond_stack->data.cond.next_label = gen_label_rtx ();
2186 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2187 }
2188
2189 /* Generate RTL between the then-clause and the else-clause
2190 of an if-then-else. */
2191
2192 void
2193 expand_start_else ()
2194 {
2195 if (cond_stack->data.cond.endif_label == 0)
2196 cond_stack->data.cond.endif_label = gen_label_rtx ();
2197
2198 emit_jump (cond_stack->data.cond.endif_label);
2199 emit_label (cond_stack->data.cond.next_label);
2200 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2201 }
2202
2203 /* After calling expand_start_else, turn this "else" into an "else if"
2204 by providing another condition. */
2205
2206 void
2207 expand_elseif (cond)
2208 tree cond;
2209 {
2210 cond_stack->data.cond.next_label = gen_label_rtx ();
2211 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2212 }
2213
2214 /* Generate RTL for the end of an if-then.
2215 Pop the record for it off of cond_stack. */
2216
2217 void
2218 expand_end_cond ()
2219 {
2220 struct nesting *thiscond = cond_stack;
2221
2222 do_pending_stack_adjust ();
2223 if (thiscond->data.cond.next_label)
2224 emit_label (thiscond->data.cond.next_label);
2225 if (thiscond->data.cond.endif_label)
2226 emit_label (thiscond->data.cond.endif_label);
2227
2228 POPSTACK (cond_stack);
2229 last_expr_type = 0;
2230 }
2231
2232
2233 \f
2234 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2235 loop should be exited by `exit_something'. This is a loop for which
2236 `expand_continue' will jump to the top of the loop.
2237
2238 Make an entry on loop_stack to record the labels associated with
2239 this loop. */
2240
2241 struct nesting *
2242 expand_start_loop (exit_flag)
2243 int exit_flag;
2244 {
2245 register struct nesting *thisloop = ALLOC_NESTING ();
2246
2247 /* Make an entry on loop_stack for the loop we are entering. */
2248
2249 thisloop->next = loop_stack;
2250 thisloop->all = nesting_stack;
2251 thisloop->depth = ++nesting_depth;
2252 thisloop->data.loop.start_label = gen_label_rtx ();
2253 thisloop->data.loop.end_label = gen_label_rtx ();
2254 thisloop->data.loop.alt_end_label = 0;
2255 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2256 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2257 loop_stack = thisloop;
2258 nesting_stack = thisloop;
2259
2260 do_pending_stack_adjust ();
2261 emit_queue ();
2262 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2263 emit_label (thisloop->data.loop.start_label);
2264
2265 return thisloop;
2266 }
2267
2268 /* Like expand_start_loop but for a loop where the continuation point
2269 (for expand_continue_loop) will be specified explicitly. */
2270
2271 struct nesting *
2272 expand_start_loop_continue_elsewhere (exit_flag)
2273 int exit_flag;
2274 {
2275 struct nesting *thisloop = expand_start_loop (exit_flag);
2276 loop_stack->data.loop.continue_label = gen_label_rtx ();
2277 return thisloop;
2278 }
2279
2280 /* Specify the continuation point for a loop started with
2281 expand_start_loop_continue_elsewhere.
2282 Use this at the point in the code to which a continue statement
2283 should jump. */
2284
2285 void
2286 expand_loop_continue_here ()
2287 {
2288 do_pending_stack_adjust ();
2289 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2290 emit_label (loop_stack->data.loop.continue_label);
2291 }
2292
2293 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2294 Pop the block off of loop_stack. */
2295
2296 void
2297 expand_end_loop ()
2298 {
2299 rtx start_label = loop_stack->data.loop.start_label;
2300 rtx insn = get_last_insn ();
2301 int needs_end_jump = 1;
2302
2303 /* Mark the continue-point at the top of the loop if none elsewhere. */
2304 if (start_label == loop_stack->data.loop.continue_label)
2305 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2306
2307 do_pending_stack_adjust ();
2308
2309 /* If optimizing, perhaps reorder the loop.
2310 First, try to use a condjump near the end.
2311 expand_exit_loop_if_false ends loops with unconditional jumps,
2312 like this:
2313
2314 if (test) goto label;
2315 optional: cleanup
2316 goto loop_stack->data.loop.end_label
2317 barrier
2318 label:
2319
2320 If we find such a pattern, we can end the loop earlier. */
2321
2322 if (optimize
2323 && GET_CODE (insn) == CODE_LABEL
2324 && LABEL_NAME (insn) == NULL
2325 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2326 {
2327 rtx label = insn;
2328 rtx jump = PREV_INSN (PREV_INSN (label));
2329
2330 if (GET_CODE (jump) == JUMP_INSN
2331 && GET_CODE (PATTERN (jump)) == SET
2332 && SET_DEST (PATTERN (jump)) == pc_rtx
2333 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2334 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2335 == loop_stack->data.loop.end_label))
2336 {
2337 rtx prev;
2338
2339 /* The test might be complex and reference LABEL multiple times,
2340 like the loop in loop_iterations to set vtop. To handle this,
2341 we move LABEL. */
2342 insn = PREV_INSN (label);
2343 reorder_insns (label, label, start_label);
2344
2345 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2346 {
2347 /* We ignore line number notes, but if we see any other note,
2348 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2349 NOTE_INSN_LOOP_*, we disable this optimization. */
2350 if (GET_CODE (prev) == NOTE)
2351 {
2352 if (NOTE_LINE_NUMBER (prev) < 0)
2353 break;
2354 continue;
2355 }
2356 if (GET_CODE (prev) == CODE_LABEL)
2357 break;
2358 if (GET_CODE (prev) == JUMP_INSN)
2359 {
2360 if (GET_CODE (PATTERN (prev)) == SET
2361 && SET_DEST (PATTERN (prev)) == pc_rtx
2362 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2363 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2364 == LABEL_REF)
2365 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2366 {
2367 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2368 = start_label;
2369 emit_note_after (NOTE_INSN_LOOP_END, prev);
2370 needs_end_jump = 0;
2371 }
2372 break;
2373 }
2374 }
2375 }
2376 }
2377
2378 /* If the loop starts with a loop exit, roll that to the end where
2379 it will optimize together with the jump back.
2380
2381 We look for the conditional branch to the exit, except that once
2382 we find such a branch, we don't look past 30 instructions.
2383
2384 In more detail, if the loop presently looks like this (in pseudo-C):
2385
2386 start_label:
2387 if (test) goto end_label;
2388 body;
2389 goto start_label;
2390 end_label:
2391
2392 transform it to look like:
2393
2394 goto start_label;
2395 newstart_label:
2396 body;
2397 start_label:
2398 if (test) goto end_label;
2399 goto newstart_label;
2400 end_label:
2401
2402 Here, the `test' may actually consist of some reasonably complex
2403 code, terminating in a test. */
2404
2405 if (optimize
2406 && needs_end_jump
2407 &&
2408 ! (GET_CODE (insn) == JUMP_INSN
2409 && GET_CODE (PATTERN (insn)) == SET
2410 && SET_DEST (PATTERN (insn)) == pc_rtx
2411 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2412 {
2413 int eh_regions = 0;
2414 int num_insns = 0;
2415 rtx last_test_insn = NULL_RTX;
2416
2417 /* Scan insns from the top of the loop looking for a qualified
2418 conditional exit. */
2419 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2420 insn = NEXT_INSN (insn))
2421 {
2422 if (GET_CODE (insn) == NOTE)
2423 {
2424 if (optimize < 2
2425 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2426 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2427 /* The code that actually moves the exit test will
2428 carefully leave BLOCK notes in their original
2429 location. That means, however, that we can't debug
2430 the exit test itself. So, we refuse to move code
2431 containing BLOCK notes at low optimization levels. */
2432 break;
2433
2434 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2435 ++eh_regions;
2436 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2437 {
2438 --eh_regions;
2439 if (eh_regions < 0)
2440 /* We've come to the end of an EH region, but
2441 never saw the beginning of that region. That
2442 means that an EH region begins before the top
2443 of the loop, and ends in the middle of it. The
2444 existence of such a situation violates a basic
2445 assumption in this code, since that would imply
2446 that even when EH_REGIONS is zero, we might
2447 move code out of an exception region. */
2448 abort ();
2449 }
2450
2451 /* We must not walk into a nested loop. */
2452 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2453 break;
2454
2455 /* We already know this INSN is a NOTE, so there's no
2456 point in looking at it to see if it's a JUMP. */
2457 continue;
2458 }
2459
2460 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2461 num_insns++;
2462
2463 if (last_test_insn && num_insns > 30)
2464 break;
2465
2466 if (eh_regions > 0)
2467 /* We don't want to move a partial EH region. Consider:
2468
2469 while ( ( { try {
2470 if (cond ()) 0;
2471 else {
2472 bar();
2473 1;
2474 }
2475 } catch (...) {
2476 1;
2477 } )) {
2478 body;
2479 }
2480
2481 This isn't legal C++, but here's what it's supposed to
2482 mean: if cond() is true, stop looping. Otherwise,
2483 call bar, and keep looping. In addition, if cond
2484 throws an exception, catch it and keep looping. Such
2485 constructs are certainy legal in LISP.
2486
2487 We should not move the `if (cond()) 0' test since then
2488 the EH-region for the try-block would be broken up.
2489 (In this case we would the EH_BEG note for the `try'
2490 and `if cond()' but not the call to bar() or the
2491 EH_END note.)
2492
2493 So we don't look for tests within an EH region. */
2494 continue;
2495
2496 if (GET_CODE (insn) == JUMP_INSN
2497 && GET_CODE (PATTERN (insn)) == SET
2498 && SET_DEST (PATTERN (insn)) == pc_rtx)
2499 {
2500 /* This is indeed a jump. */
2501 rtx dest1 = NULL_RTX;
2502 rtx dest2 = NULL_RTX;
2503 rtx potential_last_test;
2504 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2505 {
2506 /* A conditional jump. */
2507 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2508 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2509 potential_last_test = insn;
2510 }
2511 else
2512 {
2513 /* An unconditional jump. */
2514 dest1 = SET_SRC (PATTERN (insn));
2515 /* Include the BARRIER after the JUMP. */
2516 potential_last_test = NEXT_INSN (insn);
2517 }
2518
2519 do {
2520 if (dest1 && GET_CODE (dest1) == LABEL_REF
2521 && ((XEXP (dest1, 0)
2522 == loop_stack->data.loop.alt_end_label)
2523 || (XEXP (dest1, 0)
2524 == loop_stack->data.loop.end_label)))
2525 {
2526 last_test_insn = potential_last_test;
2527 break;
2528 }
2529
2530 /* If this was a conditional jump, there may be
2531 another label at which we should look. */
2532 dest1 = dest2;
2533 dest2 = NULL_RTX;
2534 } while (dest1);
2535 }
2536 }
2537
2538 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2539 {
2540 /* We found one. Move everything from there up
2541 to the end of the loop, and add a jump into the loop
2542 to jump to there. */
2543 register rtx newstart_label = gen_label_rtx ();
2544 register rtx start_move = start_label;
2545 rtx next_insn;
2546
2547 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2548 then we want to move this note also. */
2549 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2550 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2551 == NOTE_INSN_LOOP_CONT))
2552 start_move = PREV_INSN (start_move);
2553
2554 emit_label_after (newstart_label, PREV_INSN (start_move));
2555
2556 /* Actually move the insns. Start at the beginning, and
2557 keep copying insns until we've copied the
2558 last_test_insn. */
2559 for (insn = start_move; insn; insn = next_insn)
2560 {
2561 /* Figure out which insn comes after this one. We have
2562 to do this before we move INSN. */
2563 if (insn == last_test_insn)
2564 /* We've moved all the insns. */
2565 next_insn = NULL_RTX;
2566 else
2567 next_insn = NEXT_INSN (insn);
2568
2569 if (GET_CODE (insn) == NOTE
2570 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2571 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2572 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2573 NOTE_INSN_BLOCK_ENDs because the correct generation
2574 of debugging information depends on these appearing
2575 in the same order in the RTL and in the tree
2576 structure, where they are represented as BLOCKs.
2577 So, we don't move block notes. Of course, moving
2578 the code inside the block is likely to make it
2579 impossible to debug the instructions in the exit
2580 test, but such is the price of optimization. */
2581 continue;
2582
2583 /* Move the INSN. */
2584 reorder_insns (insn, insn, get_last_insn ());
2585 }
2586
2587 emit_jump_insn_after (gen_jump (start_label),
2588 PREV_INSN (newstart_label));
2589 emit_barrier_after (PREV_INSN (newstart_label));
2590 start_label = newstart_label;
2591 }
2592 }
2593
2594 if (needs_end_jump)
2595 {
2596 emit_jump (start_label);
2597 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2598 }
2599 emit_label (loop_stack->data.loop.end_label);
2600
2601 POPSTACK (loop_stack);
2602
2603 last_expr_type = 0;
2604 }
2605
2606 /* Generate a jump to the current loop's continue-point.
2607 This is usually the top of the loop, but may be specified
2608 explicitly elsewhere. If not currently inside a loop,
2609 return 0 and do nothing; caller will print an error message. */
2610
2611 int
2612 expand_continue_loop (whichloop)
2613 struct nesting *whichloop;
2614 {
2615 last_expr_type = 0;
2616 if (whichloop == 0)
2617 whichloop = loop_stack;
2618 if (whichloop == 0)
2619 return 0;
2620 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2621 NULL_RTX);
2622 return 1;
2623 }
2624
2625 /* Generate a jump to exit the current loop. If not currently inside a loop,
2626 return 0 and do nothing; caller will print an error message. */
2627
2628 int
2629 expand_exit_loop (whichloop)
2630 struct nesting *whichloop;
2631 {
2632 last_expr_type = 0;
2633 if (whichloop == 0)
2634 whichloop = loop_stack;
2635 if (whichloop == 0)
2636 return 0;
2637 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2638 return 1;
2639 }
2640
2641 /* Generate a conditional jump to exit the current loop if COND
2642 evaluates to zero. If not currently inside a loop,
2643 return 0 and do nothing; caller will print an error message. */
2644
2645 int
2646 expand_exit_loop_if_false (whichloop, cond)
2647 struct nesting *whichloop;
2648 tree cond;
2649 {
2650 rtx label = gen_label_rtx ();
2651 rtx last_insn;
2652 last_expr_type = 0;
2653
2654 if (whichloop == 0)
2655 whichloop = loop_stack;
2656 if (whichloop == 0)
2657 return 0;
2658 /* In order to handle fixups, we actually create a conditional jump
2659 around a unconditional branch to exit the loop. If fixups are
2660 necessary, they go before the unconditional branch. */
2661
2662
2663 do_jump (cond, NULL_RTX, label);
2664 last_insn = get_last_insn ();
2665 if (GET_CODE (last_insn) == CODE_LABEL)
2666 whichloop->data.loop.alt_end_label = last_insn;
2667 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2668 NULL_RTX);
2669 emit_label (label);
2670
2671 return 1;
2672 }
2673
2674 /* Return nonzero if the loop nest is empty. Else return zero. */
2675
2676 int
2677 stmt_loop_nest_empty ()
2678 {
2679 /* cfun->stmt can be NULL if we are building a call to get the
2680 EH context for a setjmp/longjmp EH target and the current
2681 function was a deferred inline function. */
2682 return (cfun->stmt == NULL || loop_stack == NULL);
2683 }
2684
2685 /* Return non-zero if we should preserve sub-expressions as separate
2686 pseudos. We never do so if we aren't optimizing. We always do so
2687 if -fexpensive-optimizations.
2688
2689 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2690 the loop may still be a small one. */
2691
2692 int
2693 preserve_subexpressions_p ()
2694 {
2695 rtx insn;
2696
2697 if (flag_expensive_optimizations)
2698 return 1;
2699
2700 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2701 return 0;
2702
2703 insn = get_last_insn_anywhere ();
2704
2705 return (insn
2706 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2707 < n_non_fixed_regs * 3));
2708
2709 }
2710
2711 /* Generate a jump to exit the current loop, conditional, binding contour
2712 or case statement. Not all such constructs are visible to this function,
2713 only those started with EXIT_FLAG nonzero. Individual languages use
2714 the EXIT_FLAG parameter to control which kinds of constructs you can
2715 exit this way.
2716
2717 If not currently inside anything that can be exited,
2718 return 0 and do nothing; caller will print an error message. */
2719
2720 int
2721 expand_exit_something ()
2722 {
2723 struct nesting *n;
2724 last_expr_type = 0;
2725 for (n = nesting_stack; n; n = n->all)
2726 if (n->exit_label != 0)
2727 {
2728 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2729 return 1;
2730 }
2731
2732 return 0;
2733 }
2734 \f
2735 /* Generate RTL to return from the current function, with no value.
2736 (That is, we do not do anything about returning any value.) */
2737
2738 void
2739 expand_null_return ()
2740 {
2741 struct nesting *block = block_stack;
2742 rtx last_insn = get_last_insn ();
2743
2744 /* If this function was declared to return a value, but we
2745 didn't, clobber the return registers so that they are not
2746 propogated live to the rest of the function. */
2747 clobber_return_register ();
2748
2749 /* Does any pending block have cleanups? */
2750 while (block && block->data.block.cleanups == 0)
2751 block = block->next;
2752
2753 /* If yes, use a goto to return, since that runs cleanups. */
2754
2755 expand_null_return_1 (last_insn, block != 0);
2756 }
2757
2758 /* Generate RTL to return from the current function, with value VAL. */
2759
2760 static void
2761 expand_value_return (val)
2762 rtx val;
2763 {
2764 struct nesting *block = block_stack;
2765 rtx last_insn = get_last_insn ();
2766 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2767
2768 /* Copy the value to the return location
2769 unless it's already there. */
2770
2771 if (return_reg != val)
2772 {
2773 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2774 #ifdef PROMOTE_FUNCTION_RETURN
2775 int unsignedp = TREE_UNSIGNED (type);
2776 enum machine_mode old_mode
2777 = DECL_MODE (DECL_RESULT (current_function_decl));
2778 enum machine_mode mode
2779 = promote_mode (type, old_mode, &unsignedp, 1);
2780
2781 if (mode != old_mode)
2782 val = convert_modes (mode, old_mode, val, unsignedp);
2783 #endif
2784 if (GET_CODE (return_reg) == PARALLEL)
2785 emit_group_load (return_reg, val, int_size_in_bytes (type),
2786 TYPE_ALIGN (type));
2787 else
2788 emit_move_insn (return_reg, val);
2789 }
2790
2791 /* Does any pending block have cleanups? */
2792
2793 while (block && block->data.block.cleanups == 0)
2794 block = block->next;
2795
2796 /* If yes, use a goto to return, since that runs cleanups.
2797 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2798
2799 expand_null_return_1 (last_insn, block != 0);
2800 }
2801
2802 /* Output a return with no value. If LAST_INSN is nonzero,
2803 pretend that the return takes place after LAST_INSN.
2804 If USE_GOTO is nonzero then don't use a return instruction;
2805 go to the return label instead. This causes any cleanups
2806 of pending blocks to be executed normally. */
2807
2808 static void
2809 expand_null_return_1 (last_insn, use_goto)
2810 rtx last_insn;
2811 int use_goto;
2812 {
2813 rtx end_label = cleanup_label ? cleanup_label : return_label;
2814
2815 clear_pending_stack_adjust ();
2816 do_pending_stack_adjust ();
2817 last_expr_type = 0;
2818
2819 /* PCC-struct return always uses an epilogue. */
2820 if (current_function_returns_pcc_struct || use_goto)
2821 {
2822 if (end_label == 0)
2823 end_label = return_label = gen_label_rtx ();
2824 expand_goto_internal (NULL_TREE, end_label, last_insn);
2825 return;
2826 }
2827
2828 /* Otherwise output a simple return-insn if one is available,
2829 unless it won't do the job. */
2830 #ifdef HAVE_return
2831 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2832 {
2833 emit_jump_insn (gen_return ());
2834 emit_barrier ();
2835 return;
2836 }
2837 #endif
2838
2839 /* Otherwise jump to the epilogue. */
2840 expand_goto_internal (NULL_TREE, end_label, last_insn);
2841 }
2842 \f
2843 /* Generate RTL to evaluate the expression RETVAL and return it
2844 from the current function. */
2845
2846 void
2847 expand_return (retval)
2848 tree retval;
2849 {
2850 /* If there are any cleanups to be performed, then they will
2851 be inserted following LAST_INSN. It is desirable
2852 that the last_insn, for such purposes, should be the
2853 last insn before computing the return value. Otherwise, cleanups
2854 which call functions can clobber the return value. */
2855 /* ??? rms: I think that is erroneous, because in C++ it would
2856 run destructors on variables that might be used in the subsequent
2857 computation of the return value. */
2858 rtx last_insn = 0;
2859 rtx result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2860 register rtx val = 0;
2861 tree retval_rhs;
2862 int cleanups;
2863
2864 /* If function wants no value, give it none. */
2865 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2866 {
2867 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2868 emit_queue ();
2869 expand_null_return ();
2870 return;
2871 }
2872
2873 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2874 /* This is not sufficient. We also need to watch for cleanups of the
2875 expression we are about to expand. Unfortunately, we cannot know
2876 if it has cleanups until we expand it, and we want to change how we
2877 expand it depending upon if we need cleanups. We can't win. */
2878 #if 0
2879 cleanups = any_pending_cleanups (1);
2880 #else
2881 cleanups = 1;
2882 #endif
2883
2884 if (retval == error_mark_node)
2885 retval_rhs = NULL_TREE;
2886 else if (TREE_CODE (retval) == RESULT_DECL)
2887 retval_rhs = retval;
2888 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2889 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2890 retval_rhs = TREE_OPERAND (retval, 1);
2891 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2892 /* Recognize tail-recursive call to void function. */
2893 retval_rhs = retval;
2894 else
2895 retval_rhs = NULL_TREE;
2896
2897 /* Only use `last_insn' if there are cleanups which must be run. */
2898 if (cleanups || cleanup_label != 0)
2899 last_insn = get_last_insn ();
2900
2901 /* Distribute return down conditional expr if either of the sides
2902 may involve tail recursion (see test below). This enhances the number
2903 of tail recursions we see. Don't do this always since it can produce
2904 sub-optimal code in some cases and we distribute assignments into
2905 conditional expressions when it would help. */
2906
2907 if (optimize && retval_rhs != 0
2908 && frame_offset == 0
2909 && TREE_CODE (retval_rhs) == COND_EXPR
2910 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2911 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2912 {
2913 rtx label = gen_label_rtx ();
2914 tree expr;
2915
2916 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2917 start_cleanup_deferral ();
2918 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2919 DECL_RESULT (current_function_decl),
2920 TREE_OPERAND (retval_rhs, 1));
2921 TREE_SIDE_EFFECTS (expr) = 1;
2922 expand_return (expr);
2923 emit_label (label);
2924
2925 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2926 DECL_RESULT (current_function_decl),
2927 TREE_OPERAND (retval_rhs, 2));
2928 TREE_SIDE_EFFECTS (expr) = 1;
2929 expand_return (expr);
2930 end_cleanup_deferral ();
2931 return;
2932 }
2933
2934 /* If the result is an aggregate that is being returned in one (or more)
2935 registers, load the registers here. The compiler currently can't handle
2936 copying a BLKmode value into registers. We could put this code in a
2937 more general area (for use by everyone instead of just function
2938 call/return), but until this feature is generally usable it is kept here
2939 (and in expand_call). The value must go into a pseudo in case there
2940 are cleanups that will clobber the real return register. */
2941
2942 if (retval_rhs != 0
2943 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2944 && GET_CODE (result_rtl) == REG)
2945 {
2946 int i;
2947 unsigned HOST_WIDE_INT bitpos, xbitpos;
2948 unsigned HOST_WIDE_INT big_endian_correction = 0;
2949 unsigned HOST_WIDE_INT bytes
2950 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2951 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2952 unsigned int bitsize
2953 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2954 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2955 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2956 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2957 enum machine_mode tmpmode, result_reg_mode;
2958
2959 /* Structures whose size is not a multiple of a word are aligned
2960 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2961 machine, this means we must skip the empty high order bytes when
2962 calculating the bit offset. */
2963 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2964 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2965 * BITS_PER_UNIT));
2966
2967 /* Copy the structure BITSIZE bits at a time. */
2968 for (bitpos = 0, xbitpos = big_endian_correction;
2969 bitpos < bytes * BITS_PER_UNIT;
2970 bitpos += bitsize, xbitpos += bitsize)
2971 {
2972 /* We need a new destination pseudo each time xbitpos is
2973 on a word boundary and when xbitpos == big_endian_correction
2974 (the first time through). */
2975 if (xbitpos % BITS_PER_WORD == 0
2976 || xbitpos == big_endian_correction)
2977 {
2978 /* Generate an appropriate register. */
2979 dst = gen_reg_rtx (word_mode);
2980 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2981
2982 /* Clobber the destination before we move anything into it. */
2983 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2984 }
2985
2986 /* We need a new source operand each time bitpos is on a word
2987 boundary. */
2988 if (bitpos % BITS_PER_WORD == 0)
2989 src = operand_subword_force (result_val,
2990 bitpos / BITS_PER_WORD,
2991 BLKmode);
2992
2993 /* Use bitpos for the source extraction (left justified) and
2994 xbitpos for the destination store (right justified). */
2995 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2996 extract_bit_field (src, bitsize,
2997 bitpos % BITS_PER_WORD, 1,
2998 NULL_RTX, word_mode, word_mode,
2999 bitsize, BITS_PER_WORD),
3000 bitsize, BITS_PER_WORD);
3001 }
3002
3003 /* Find the smallest integer mode large enough to hold the
3004 entire structure and use that mode instead of BLKmode
3005 on the USE insn for the return register. */
3006 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
3007 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3008 tmpmode != VOIDmode;
3009 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3010 {
3011 /* Have we found a large enough mode? */
3012 if (GET_MODE_SIZE (tmpmode) >= bytes)
3013 break;
3014 }
3015
3016 /* No suitable mode found. */
3017 if (tmpmode == VOIDmode)
3018 abort ();
3019
3020 PUT_MODE (result_rtl, tmpmode);
3021
3022 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3023 result_reg_mode = word_mode;
3024 else
3025 result_reg_mode = tmpmode;
3026 result_reg = gen_reg_rtx (result_reg_mode);
3027
3028 emit_queue ();
3029 for (i = 0; i < n_regs; i++)
3030 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3031 result_pseudos[i]);
3032
3033 if (tmpmode != result_reg_mode)
3034 result_reg = gen_lowpart (tmpmode, result_reg);
3035
3036 expand_value_return (result_reg);
3037 }
3038 else if (cleanups
3039 && retval_rhs != 0
3040 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3041 && (GET_CODE (result_rtl) == REG
3042 || (GET_CODE (result_rtl) == PARALLEL)))
3043 {
3044 /* Calculate the return value into a temporary (usually a pseudo
3045 reg). */
3046 val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
3047 0, 0, 1);
3048 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3049 val = force_not_mem (val);
3050 emit_queue ();
3051 /* Return the calculated value, doing cleanups first. */
3052 expand_value_return (val);
3053 }
3054 else
3055 {
3056 /* No cleanups or no hard reg used;
3057 calculate value into hard return reg. */
3058 expand_expr (retval, const0_rtx, VOIDmode, 0);
3059 emit_queue ();
3060 expand_value_return (result_rtl);
3061 }
3062 }
3063
3064 /* Return 1 if the end of the generated RTX is not a barrier.
3065 This means code already compiled can drop through. */
3066
3067 int
3068 drop_through_at_end_p ()
3069 {
3070 rtx insn = get_last_insn ();
3071 while (insn && GET_CODE (insn) == NOTE)
3072 insn = PREV_INSN (insn);
3073 return insn && GET_CODE (insn) != BARRIER;
3074 }
3075 \f
3076 /* Attempt to optimize a potential tail recursion call into a goto.
3077 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3078 where to place the jump to the tail recursion label.
3079
3080 Return TRUE if the call was optimized into a goto. */
3081
3082 int
3083 optimize_tail_recursion (arguments, last_insn)
3084 tree arguments;
3085 rtx last_insn;
3086 {
3087 /* Finish checking validity, and if valid emit code to set the
3088 argument variables for the new call. */
3089 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3090 {
3091 if (tail_recursion_label == 0)
3092 {
3093 tail_recursion_label = gen_label_rtx ();
3094 emit_label_after (tail_recursion_label,
3095 tail_recursion_reentry);
3096 }
3097 emit_queue ();
3098 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3099 emit_barrier ();
3100 return 1;
3101 }
3102 return 0;
3103 }
3104
3105 /* Emit code to alter this function's formal parms for a tail-recursive call.
3106 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3107 FORMALS is the chain of decls of formals.
3108 Return 1 if this can be done;
3109 otherwise return 0 and do not emit any code. */
3110
3111 static int
3112 tail_recursion_args (actuals, formals)
3113 tree actuals, formals;
3114 {
3115 register tree a = actuals, f = formals;
3116 register int i;
3117 register rtx *argvec;
3118
3119 /* Check that number and types of actuals are compatible
3120 with the formals. This is not always true in valid C code.
3121 Also check that no formal needs to be addressable
3122 and that all formals are scalars. */
3123
3124 /* Also count the args. */
3125
3126 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3127 {
3128 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3129 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3130 return 0;
3131 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3132 return 0;
3133 }
3134 if (a != 0 || f != 0)
3135 return 0;
3136
3137 /* Compute all the actuals. */
3138
3139 argvec = (rtx *) alloca (i * sizeof (rtx));
3140
3141 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3142 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3143
3144 /* Find which actual values refer to current values of previous formals.
3145 Copy each of them now, before any formal is changed. */
3146
3147 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3148 {
3149 int copy = 0;
3150 register int j;
3151 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3152 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3153 { copy = 1; break; }
3154 if (copy)
3155 argvec[i] = copy_to_reg (argvec[i]);
3156 }
3157
3158 /* Store the values of the actuals into the formals. */
3159
3160 for (f = formals, a = actuals, i = 0; f;
3161 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3162 {
3163 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3164 emit_move_insn (DECL_RTL (f), argvec[i]);
3165 else
3166 convert_move (DECL_RTL (f), argvec[i],
3167 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3168 }
3169
3170 free_temp_slots ();
3171 return 1;
3172 }
3173 \f
3174 /* Generate the RTL code for entering a binding contour.
3175 The variables are declared one by one, by calls to `expand_decl'.
3176
3177 FLAGS is a bitwise or of the following flags:
3178
3179 1 - Nonzero if this construct should be visible to
3180 `exit_something'.
3181
3182 2 - Nonzero if this contour does not require a
3183 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3184 language-independent code should set this flag because they
3185 will not create corresponding BLOCK nodes. (There should be
3186 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3187 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3188 when expand_end_bindings is called.
3189
3190 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3191 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3192 note. */
3193
3194 void
3195 expand_start_bindings_and_block (flags, block)
3196 int flags;
3197 tree block;
3198 {
3199 struct nesting *thisblock = ALLOC_NESTING ();
3200 rtx note;
3201 int exit_flag = ((flags & 1) != 0);
3202 int block_flag = ((flags & 2) == 0);
3203
3204 /* If a BLOCK is supplied, then the caller should be requesting a
3205 NOTE_INSN_BLOCK_BEG note. */
3206 if (!block_flag && block)
3207 abort ();
3208
3209 /* Create a note to mark the beginning of the block. */
3210 if (block_flag)
3211 {
3212 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3213 NOTE_BLOCK (note) = block;
3214 }
3215 else
3216 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3217
3218 /* Make an entry on block_stack for the block we are entering. */
3219
3220 thisblock->next = block_stack;
3221 thisblock->all = nesting_stack;
3222 thisblock->depth = ++nesting_depth;
3223 thisblock->data.block.stack_level = 0;
3224 thisblock->data.block.cleanups = 0;
3225 thisblock->data.block.n_function_calls = 0;
3226 thisblock->data.block.exception_region = 0;
3227 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3228
3229 thisblock->data.block.conditional_code = 0;
3230 thisblock->data.block.last_unconditional_cleanup = note;
3231 /* When we insert instructions after the last unconditional cleanup,
3232 we don't adjust last_insn. That means that a later add_insn will
3233 clobber the instructions we've just added. The easiest way to
3234 fix this is to just insert another instruction here, so that the
3235 instructions inserted after the last unconditional cleanup are
3236 never the last instruction. */
3237 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3238 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3239
3240 if (block_stack
3241 && !(block_stack->data.block.cleanups == NULL_TREE
3242 && block_stack->data.block.outer_cleanups == NULL_TREE))
3243 thisblock->data.block.outer_cleanups
3244 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3245 block_stack->data.block.outer_cleanups);
3246 else
3247 thisblock->data.block.outer_cleanups = 0;
3248 thisblock->data.block.label_chain = 0;
3249 thisblock->data.block.innermost_stack_block = stack_block_stack;
3250 thisblock->data.block.first_insn = note;
3251 thisblock->data.block.block_start_count = ++current_block_start_count;
3252 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3253 block_stack = thisblock;
3254 nesting_stack = thisblock;
3255
3256 /* Make a new level for allocating stack slots. */
3257 push_temp_slots ();
3258 }
3259
3260 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3261 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3262 expand_expr are made. After we end the region, we know that all
3263 space for all temporaries that were created by TARGET_EXPRs will be
3264 destroyed and their space freed for reuse. */
3265
3266 void
3267 expand_start_target_temps ()
3268 {
3269 /* This is so that even if the result is preserved, the space
3270 allocated will be freed, as we know that it is no longer in use. */
3271 push_temp_slots ();
3272
3273 /* Start a new binding layer that will keep track of all cleanup
3274 actions to be performed. */
3275 expand_start_bindings (2);
3276
3277 target_temp_slot_level = temp_slot_level;
3278 }
3279
3280 void
3281 expand_end_target_temps ()
3282 {
3283 expand_end_bindings (NULL_TREE, 0, 0);
3284
3285 /* This is so that even if the result is preserved, the space
3286 allocated will be freed, as we know that it is no longer in use. */
3287 pop_temp_slots ();
3288 }
3289
3290 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3291 in question represents the outermost pair of curly braces (i.e. the "body
3292 block") of a function or method.
3293
3294 For any BLOCK node representing a "body block" of a function or method, the
3295 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3296 represents the outermost (function) scope for the function or method (i.e.
3297 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3298 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3299
3300 int
3301 is_body_block (stmt)
3302 register tree stmt;
3303 {
3304 if (TREE_CODE (stmt) == BLOCK)
3305 {
3306 tree parent = BLOCK_SUPERCONTEXT (stmt);
3307
3308 if (parent && TREE_CODE (parent) == BLOCK)
3309 {
3310 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3311
3312 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3313 return 1;
3314 }
3315 }
3316
3317 return 0;
3318 }
3319
3320 /* Mark top block of block_stack as an implicit binding for an
3321 exception region. This is used to prevent infinite recursion when
3322 ending a binding with expand_end_bindings. It is only ever called
3323 by expand_eh_region_start, as that it the only way to create a
3324 block stack for a exception region. */
3325
3326 void
3327 mark_block_as_eh_region ()
3328 {
3329 block_stack->data.block.exception_region = 1;
3330 if (block_stack->next
3331 && block_stack->next->data.block.conditional_code)
3332 {
3333 block_stack->data.block.conditional_code
3334 = block_stack->next->data.block.conditional_code;
3335 block_stack->data.block.last_unconditional_cleanup
3336 = block_stack->next->data.block.last_unconditional_cleanup;
3337 block_stack->data.block.cleanup_ptr
3338 = block_stack->next->data.block.cleanup_ptr;
3339 }
3340 }
3341
3342 /* True if we are currently emitting insns in an area of output code
3343 that is controlled by a conditional expression. This is used by
3344 the cleanup handling code to generate conditional cleanup actions. */
3345
3346 int
3347 conditional_context ()
3348 {
3349 return block_stack && block_stack->data.block.conditional_code;
3350 }
3351
3352 /* Mark top block of block_stack as not for an implicit binding for an
3353 exception region. This is only ever done by expand_eh_region_end
3354 to let expand_end_bindings know that it is being called explicitly
3355 to end the binding layer for just the binding layer associated with
3356 the exception region, otherwise expand_end_bindings would try and
3357 end all implicit binding layers for exceptions regions, and then
3358 one normal binding layer. */
3359
3360 void
3361 mark_block_as_not_eh_region ()
3362 {
3363 block_stack->data.block.exception_region = 0;
3364 }
3365
3366 /* True if the top block of block_stack was marked as for an exception
3367 region by mark_block_as_eh_region. */
3368
3369 int
3370 is_eh_region ()
3371 {
3372 return cfun && block_stack && block_stack->data.block.exception_region;
3373 }
3374
3375 /* Emit a handler label for a nonlocal goto handler.
3376 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3377
3378 static rtx
3379 expand_nl_handler_label (slot, before_insn)
3380 rtx slot, before_insn;
3381 {
3382 rtx insns;
3383 rtx handler_label = gen_label_rtx ();
3384
3385 /* Don't let jump_optimize delete the handler. */
3386 LABEL_PRESERVE_P (handler_label) = 1;
3387
3388 start_sequence ();
3389 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3390 insns = get_insns ();
3391 end_sequence ();
3392 emit_insns_before (insns, before_insn);
3393
3394 emit_label (handler_label);
3395
3396 return handler_label;
3397 }
3398
3399 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3400 handler. */
3401 static void
3402 expand_nl_goto_receiver ()
3403 {
3404 #ifdef HAVE_nonlocal_goto
3405 if (! HAVE_nonlocal_goto)
3406 #endif
3407 /* First adjust our frame pointer to its actual value. It was
3408 previously set to the start of the virtual area corresponding to
3409 the stacked variables when we branched here and now needs to be
3410 adjusted to the actual hardware fp value.
3411
3412 Assignments are to virtual registers are converted by
3413 instantiate_virtual_regs into the corresponding assignment
3414 to the underlying register (fp in this case) that makes
3415 the original assignment true.
3416 So the following insn will actually be
3417 decrementing fp by STARTING_FRAME_OFFSET. */
3418 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3419
3420 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3421 if (fixed_regs[ARG_POINTER_REGNUM])
3422 {
3423 #ifdef ELIMINABLE_REGS
3424 /* If the argument pointer can be eliminated in favor of the
3425 frame pointer, we don't need to restore it. We assume here
3426 that if such an elimination is present, it can always be used.
3427 This is the case on all known machines; if we don't make this
3428 assumption, we do unnecessary saving on many machines. */
3429 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3430 size_t i;
3431
3432 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3433 if (elim_regs[i].from == ARG_POINTER_REGNUM
3434 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3435 break;
3436
3437 if (i == ARRAY_SIZE (elim_regs))
3438 #endif
3439 {
3440 /* Now restore our arg pointer from the address at which it
3441 was saved in our stack frame.
3442 If there hasn't be space allocated for it yet, make
3443 some now. */
3444 if (arg_pointer_save_area == 0)
3445 arg_pointer_save_area
3446 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3447 emit_move_insn (virtual_incoming_args_rtx,
3448 /* We need a pseudo here, or else
3449 instantiate_virtual_regs_1 complains. */
3450 copy_to_reg (arg_pointer_save_area));
3451 }
3452 }
3453 #endif
3454
3455 #ifdef HAVE_nonlocal_goto_receiver
3456 if (HAVE_nonlocal_goto_receiver)
3457 emit_insn (gen_nonlocal_goto_receiver ());
3458 #endif
3459 }
3460
3461 /* Make handlers for nonlocal gotos taking place in the function calls in
3462 block THISBLOCK. */
3463
3464 static void
3465 expand_nl_goto_receivers (thisblock)
3466 struct nesting *thisblock;
3467 {
3468 tree link;
3469 rtx afterward = gen_label_rtx ();
3470 rtx insns, slot;
3471 rtx label_list;
3472 int any_invalid;
3473
3474 /* Record the handler address in the stack slot for that purpose,
3475 during this block, saving and restoring the outer value. */
3476 if (thisblock->next != 0)
3477 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3478 {
3479 rtx save_receiver = gen_reg_rtx (Pmode);
3480 emit_move_insn (XEXP (slot, 0), save_receiver);
3481
3482 start_sequence ();
3483 emit_move_insn (save_receiver, XEXP (slot, 0));
3484 insns = get_insns ();
3485 end_sequence ();
3486 emit_insns_before (insns, thisblock->data.block.first_insn);
3487 }
3488
3489 /* Jump around the handlers; they run only when specially invoked. */
3490 emit_jump (afterward);
3491
3492 /* Make a separate handler for each label. */
3493 link = nonlocal_labels;
3494 slot = nonlocal_goto_handler_slots;
3495 label_list = NULL_RTX;
3496 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3497 /* Skip any labels we shouldn't be able to jump to from here,
3498 we generate one special handler for all of them below which just calls
3499 abort. */
3500 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3501 {
3502 rtx lab;
3503 lab = expand_nl_handler_label (XEXP (slot, 0),
3504 thisblock->data.block.first_insn);
3505 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3506
3507 expand_nl_goto_receiver ();
3508
3509 /* Jump to the "real" nonlocal label. */
3510 expand_goto (TREE_VALUE (link));
3511 }
3512
3513 /* A second pass over all nonlocal labels; this time we handle those
3514 we should not be able to jump to at this point. */
3515 link = nonlocal_labels;
3516 slot = nonlocal_goto_handler_slots;
3517 any_invalid = 0;
3518 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3519 if (DECL_TOO_LATE (TREE_VALUE (link)))
3520 {
3521 rtx lab;
3522 lab = expand_nl_handler_label (XEXP (slot, 0),
3523 thisblock->data.block.first_insn);
3524 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3525 any_invalid = 1;
3526 }
3527
3528 if (any_invalid)
3529 {
3530 expand_nl_goto_receiver ();
3531 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3532 VOIDmode, 0);
3533 emit_barrier ();
3534 }
3535
3536 nonlocal_goto_handler_labels = label_list;
3537 emit_label (afterward);
3538 }
3539
3540 /* Warn about any unused VARS (which may contain nodes other than
3541 VAR_DECLs, but such nodes are ignored). The nodes are connected
3542 via the TREE_CHAIN field. */
3543
3544 void
3545 warn_about_unused_variables (vars)
3546 tree vars;
3547 {
3548 tree decl;
3549
3550 if (warn_unused_variable)
3551 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3552 if (TREE_CODE (decl) == VAR_DECL
3553 && ! TREE_USED (decl)
3554 && ! DECL_IN_SYSTEM_HEADER (decl)
3555 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3556 warning_with_decl (decl, "unused variable `%s'");
3557 }
3558
3559 /* Generate RTL code to terminate a binding contour.
3560
3561 VARS is the chain of VAR_DECL nodes for the variables bound in this
3562 contour. There may actually be other nodes in this chain, but any
3563 nodes other than VAR_DECLS are ignored.
3564
3565 MARK_ENDS is nonzero if we should put a note at the beginning
3566 and end of this binding contour.
3567
3568 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3569 (That is true automatically if the contour has a saved stack level.) */
3570
3571 void
3572 expand_end_bindings (vars, mark_ends, dont_jump_in)
3573 tree vars;
3574 int mark_ends;
3575 int dont_jump_in;
3576 {
3577 register struct nesting *thisblock;
3578
3579 while (block_stack->data.block.exception_region)
3580 {
3581 /* Because we don't need or want a new temporary level and
3582 because we didn't create one in expand_eh_region_start,
3583 create a fake one now to avoid removing one in
3584 expand_end_bindings. */
3585 push_temp_slots ();
3586
3587 block_stack->data.block.exception_region = 0;
3588
3589 expand_end_bindings (NULL_TREE, 0, 0);
3590 }
3591
3592 /* Since expand_eh_region_start does an expand_start_bindings, we
3593 have to first end all the bindings that were created by
3594 expand_eh_region_start. */
3595
3596 thisblock = block_stack;
3597
3598 /* If any of the variables in this scope were not used, warn the
3599 user. */
3600 warn_about_unused_variables (vars);
3601
3602 if (thisblock->exit_label)
3603 {
3604 do_pending_stack_adjust ();
3605 emit_label (thisblock->exit_label);
3606 }
3607
3608 /* If necessary, make handlers for nonlocal gotos taking
3609 place in the function calls in this block. */
3610 if (function_call_count != thisblock->data.block.n_function_calls
3611 && nonlocal_labels
3612 /* Make handler for outermost block
3613 if there were any nonlocal gotos to this function. */
3614 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3615 /* Make handler for inner block if it has something
3616 special to do when you jump out of it. */
3617 : (thisblock->data.block.cleanups != 0
3618 || thisblock->data.block.stack_level != 0)))
3619 expand_nl_goto_receivers (thisblock);
3620
3621 /* Don't allow jumping into a block that has a stack level.
3622 Cleanups are allowed, though. */
3623 if (dont_jump_in
3624 || thisblock->data.block.stack_level != 0)
3625 {
3626 struct label_chain *chain;
3627
3628 /* Any labels in this block are no longer valid to go to.
3629 Mark them to cause an error message. */
3630 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3631 {
3632 DECL_TOO_LATE (chain->label) = 1;
3633 /* If any goto without a fixup came to this label,
3634 that must be an error, because gotos without fixups
3635 come from outside all saved stack-levels. */
3636 if (TREE_ADDRESSABLE (chain->label))
3637 error_with_decl (chain->label,
3638 "label `%s' used before containing binding contour");
3639 }
3640 }
3641
3642 /* Restore stack level in effect before the block
3643 (only if variable-size objects allocated). */
3644 /* Perform any cleanups associated with the block. */
3645
3646 if (thisblock->data.block.stack_level != 0
3647 || thisblock->data.block.cleanups != 0)
3648 {
3649 int reachable;
3650 rtx insn;
3651
3652 /* Don't let cleanups affect ({...}) constructs. */
3653 int old_expr_stmts_for_value = expr_stmts_for_value;
3654 rtx old_last_expr_value = last_expr_value;
3655 tree old_last_expr_type = last_expr_type;
3656 expr_stmts_for_value = 0;
3657
3658 /* Only clean up here if this point can actually be reached. */
3659 insn = get_last_insn ();
3660 if (GET_CODE (insn) == NOTE)
3661 insn = prev_nonnote_insn (insn);
3662 reachable = (! insn || GET_CODE (insn) != BARRIER);
3663
3664 /* Do the cleanups. */
3665 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3666 if (reachable)
3667 do_pending_stack_adjust ();
3668
3669 expr_stmts_for_value = old_expr_stmts_for_value;
3670 last_expr_value = old_last_expr_value;
3671 last_expr_type = old_last_expr_type;
3672
3673 /* Restore the stack level. */
3674
3675 if (reachable && thisblock->data.block.stack_level != 0)
3676 {
3677 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3678 thisblock->data.block.stack_level, NULL_RTX);
3679 if (nonlocal_goto_handler_slots != 0)
3680 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3681 NULL_RTX);
3682 }
3683
3684 /* Any gotos out of this block must also do these things.
3685 Also report any gotos with fixups that came to labels in this
3686 level. */
3687 fixup_gotos (thisblock,
3688 thisblock->data.block.stack_level,
3689 thisblock->data.block.cleanups,
3690 thisblock->data.block.first_insn,
3691 dont_jump_in);
3692 }
3693
3694 /* Mark the beginning and end of the scope if requested.
3695 We do this now, after running cleanups on the variables
3696 just going out of scope, so they are in scope for their cleanups. */
3697
3698 if (mark_ends)
3699 {
3700 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3701 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3702 }
3703 else
3704 /* Get rid of the beginning-mark if we don't make an end-mark. */
3705 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3706
3707 /* Restore the temporary level of TARGET_EXPRs. */
3708 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3709
3710 /* Restore block_stack level for containing block. */
3711
3712 stack_block_stack = thisblock->data.block.innermost_stack_block;
3713 POPSTACK (block_stack);
3714
3715 /* Pop the stack slot nesting and free any slots at this level. */
3716 pop_temp_slots ();
3717 }
3718 \f
3719 /* Generate code to save the stack pointer at the start of the current block
3720 and set up to restore it on exit. */
3721
3722 void
3723 save_stack_pointer ()
3724 {
3725 struct nesting *thisblock = block_stack;
3726
3727 if (thisblock->data.block.stack_level == 0)
3728 {
3729 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3730 &thisblock->data.block.stack_level,
3731 thisblock->data.block.first_insn);
3732 stack_block_stack = thisblock;
3733 }
3734 }
3735 \f
3736 /* Generate RTL for the automatic variable declaration DECL.
3737 (Other kinds of declarations are simply ignored if seen here.) */
3738
3739 void
3740 expand_decl (decl)
3741 register tree decl;
3742 {
3743 struct nesting *thisblock;
3744 tree type;
3745
3746 type = TREE_TYPE (decl);
3747
3748 /* Only automatic variables need any expansion done.
3749 Static and external variables, and external functions,
3750 will be handled by `assemble_variable' (called from finish_decl).
3751 TYPE_DECL and CONST_DECL require nothing.
3752 PARM_DECLs are handled in `assign_parms'. */
3753
3754 if (TREE_CODE (decl) != VAR_DECL)
3755 return;
3756 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3757 return;
3758
3759 thisblock = block_stack;
3760
3761 /* Create the RTL representation for the variable. */
3762
3763 if (type == error_mark_node)
3764 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3765 else if (DECL_SIZE (decl) == 0)
3766 /* Variable with incomplete type. */
3767 {
3768 if (DECL_INITIAL (decl) == 0)
3769 /* Error message was already done; now avoid a crash. */
3770 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3771 else
3772 /* An initializer is going to decide the size of this array.
3773 Until we know the size, represent its address with a reg. */
3774 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3775
3776 set_mem_attributes (DECL_RTL (decl), decl, 1);
3777 }
3778 else if (DECL_MODE (decl) != BLKmode
3779 /* If -ffloat-store, don't put explicit float vars
3780 into regs. */
3781 && !(flag_float_store
3782 && TREE_CODE (type) == REAL_TYPE)
3783 && ! TREE_THIS_VOLATILE (decl)
3784 && ! TREE_ADDRESSABLE (decl)
3785 && (DECL_REGISTER (decl) || optimize)
3786 /* if -fcheck-memory-usage, check all variables. */
3787 && ! current_function_check_memory_usage)
3788 {
3789 /* Automatic variable that can go in a register. */
3790 int unsignedp = TREE_UNSIGNED (type);
3791 enum machine_mode reg_mode
3792 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3793
3794 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3795 mark_user_reg (DECL_RTL (decl));
3796
3797 if (POINTER_TYPE_P (type))
3798 mark_reg_pointer (DECL_RTL (decl),
3799 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3800
3801 maybe_set_unchanging (DECL_RTL (decl), decl);
3802 }
3803
3804 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3805 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3806 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3807 STACK_CHECK_MAX_VAR_SIZE)))
3808 {
3809 /* Variable of fixed size that goes on the stack. */
3810 rtx oldaddr = 0;
3811 rtx addr;
3812
3813 /* If we previously made RTL for this decl, it must be an array
3814 whose size was determined by the initializer.
3815 The old address was a register; set that register now
3816 to the proper address. */
3817 if (DECL_RTL (decl) != 0)
3818 {
3819 if (GET_CODE (DECL_RTL (decl)) != MEM
3820 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3821 abort ();
3822 oldaddr = XEXP (DECL_RTL (decl), 0);
3823 }
3824
3825 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3826
3827 /* Set alignment we actually gave this decl. */
3828 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3829 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3830 DECL_USER_ALIGN (decl) = 0;
3831
3832 if (oldaddr)
3833 {
3834 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3835 if (addr != oldaddr)
3836 emit_move_insn (oldaddr, addr);
3837 }
3838 }
3839 else
3840 /* Dynamic-size object: must push space on the stack. */
3841 {
3842 rtx address, size;
3843
3844 /* Record the stack pointer on entry to block, if have
3845 not already done so. */
3846 do_pending_stack_adjust ();
3847 save_stack_pointer ();
3848
3849 /* In function-at-a-time mode, variable_size doesn't expand this,
3850 so do it now. */
3851 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3852 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3853 const0_rtx, VOIDmode, 0);
3854
3855 /* Compute the variable's size, in bytes. */
3856 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3857 free_temp_slots ();
3858
3859 /* Allocate space on the stack for the variable. Note that
3860 DECL_ALIGN says how the variable is to be aligned and we
3861 cannot use it to conclude anything about the alignment of
3862 the size. */
3863 address = allocate_dynamic_stack_space (size, NULL_RTX,
3864 TYPE_ALIGN (TREE_TYPE (decl)));
3865
3866 /* Reference the variable indirect through that rtx. */
3867 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3868
3869 set_mem_attributes (DECL_RTL (decl), decl, 1);
3870
3871 /* Indicate the alignment we actually gave this variable. */
3872 #ifdef STACK_BOUNDARY
3873 DECL_ALIGN (decl) = STACK_BOUNDARY;
3874 #else
3875 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3876 #endif
3877 DECL_USER_ALIGN (decl) = 0;
3878 }
3879 }
3880 \f
3881 /* Emit code to perform the initialization of a declaration DECL. */
3882
3883 void
3884 expand_decl_init (decl)
3885 tree decl;
3886 {
3887 int was_used = TREE_USED (decl);
3888
3889 /* If this is a CONST_DECL, we don't have to generate any code, but
3890 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3891 to be set while in the obstack containing the constant. If we don't
3892 do this, we can lose if we have functions nested three deep and the middle
3893 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3894 the innermost function is the first to expand that STRING_CST. */
3895 if (TREE_CODE (decl) == CONST_DECL)
3896 {
3897 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3898 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3899 EXPAND_INITIALIZER);
3900 return;
3901 }
3902
3903 if (TREE_STATIC (decl))
3904 return;
3905
3906 /* Compute and store the initial value now. */
3907
3908 if (DECL_INITIAL (decl) == error_mark_node)
3909 {
3910 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3911
3912 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3913 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3914 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3915 0, 0);
3916 emit_queue ();
3917 }
3918 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3919 {
3920 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3921 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3922 emit_queue ();
3923 }
3924
3925 /* Don't let the initialization count as "using" the variable. */
3926 TREE_USED (decl) = was_used;
3927
3928 /* Free any temporaries we made while initializing the decl. */
3929 preserve_temp_slots (NULL_RTX);
3930 free_temp_slots ();
3931 }
3932
3933 /* CLEANUP is an expression to be executed at exit from this binding contour;
3934 for example, in C++, it might call the destructor for this variable.
3935
3936 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3937 CLEANUP multiple times, and have the correct semantics. This
3938 happens in exception handling, for gotos, returns, breaks that
3939 leave the current scope.
3940
3941 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3942 that is not associated with any particular variable. */
3943
3944 int
3945 expand_decl_cleanup (decl, cleanup)
3946 tree decl, cleanup;
3947 {
3948 struct nesting *thisblock;
3949
3950 /* Error if we are not in any block. */
3951 if (cfun == 0 || block_stack == 0)
3952 return 0;
3953
3954 thisblock = block_stack;
3955
3956 /* Record the cleanup if there is one. */
3957
3958 if (cleanup != 0)
3959 {
3960 tree t;
3961 rtx seq;
3962 tree *cleanups = &thisblock->data.block.cleanups;
3963 int cond_context = conditional_context ();
3964
3965 if (cond_context)
3966 {
3967 rtx flag = gen_reg_rtx (word_mode);
3968 rtx set_flag_0;
3969 tree cond;
3970
3971 start_sequence ();
3972 emit_move_insn (flag, const0_rtx);
3973 set_flag_0 = get_insns ();
3974 end_sequence ();
3975
3976 thisblock->data.block.last_unconditional_cleanup
3977 = emit_insns_after (set_flag_0,
3978 thisblock->data.block.last_unconditional_cleanup);
3979
3980 emit_move_insn (flag, const1_rtx);
3981
3982 /* All cleanups must be on the function_obstack. */
3983 push_obstacks_nochange ();
3984 resume_temporary_allocation ();
3985
3986 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3987 DECL_RTL (cond) = flag;
3988
3989 /* Conditionalize the cleanup. */
3990 cleanup = build (COND_EXPR, void_type_node,
3991 truthvalue_conversion (cond),
3992 cleanup, integer_zero_node);
3993 cleanup = fold (cleanup);
3994
3995 pop_obstacks ();
3996
3997 cleanups = thisblock->data.block.cleanup_ptr;
3998 }
3999
4000 /* All cleanups must be on the function_obstack. */
4001 push_obstacks_nochange ();
4002 resume_temporary_allocation ();
4003 cleanup = unsave_expr (cleanup);
4004 pop_obstacks ();
4005
4006 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
4007
4008 if (! cond_context)
4009 /* If this block has a cleanup, it belongs in stack_block_stack. */
4010 stack_block_stack = thisblock;
4011
4012 if (cond_context)
4013 {
4014 start_sequence ();
4015 }
4016
4017 /* If this was optimized so that there is no exception region for the
4018 cleanup, then mark the TREE_LIST node, so that we can later tell
4019 if we need to call expand_eh_region_end. */
4020 if (! using_eh_for_cleanups_p
4021 || expand_eh_region_start_tree (decl, cleanup))
4022 TREE_ADDRESSABLE (t) = 1;
4023 /* If that started a new EH region, we're in a new block. */
4024 thisblock = block_stack;
4025
4026 if (cond_context)
4027 {
4028 seq = get_insns ();
4029 end_sequence ();
4030 if (seq)
4031 thisblock->data.block.last_unconditional_cleanup
4032 = emit_insns_after (seq,
4033 thisblock->data.block.last_unconditional_cleanup);
4034 }
4035 else
4036 {
4037 thisblock->data.block.last_unconditional_cleanup
4038 = get_last_insn ();
4039 /* When we insert instructions after the last unconditional cleanup,
4040 we don't adjust last_insn. That means that a later add_insn will
4041 clobber the instructions we've just added. The easiest way to
4042 fix this is to just insert another instruction here, so that the
4043 instructions inserted after the last unconditional cleanup are
4044 never the last instruction. */
4045 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4046 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4047 }
4048 }
4049 return 1;
4050 }
4051
4052 /* Like expand_decl_cleanup, but suppress generating an exception handler
4053 to perform the cleanup. */
4054
4055 #if 0
4056 int
4057 expand_decl_cleanup_no_eh (decl, cleanup)
4058 tree decl, cleanup;
4059 {
4060 int save_eh = using_eh_for_cleanups_p;
4061 int result;
4062
4063 using_eh_for_cleanups_p = 0;
4064 result = expand_decl_cleanup (decl, cleanup);
4065 using_eh_for_cleanups_p = save_eh;
4066
4067 return result;
4068 }
4069 #endif
4070
4071 /* Arrange for the top element of the dynamic cleanup chain to be
4072 popped if we exit the current binding contour. DECL is the
4073 associated declaration, if any, otherwise NULL_TREE. If the
4074 current contour is left via an exception, then __sjthrow will pop
4075 the top element off the dynamic cleanup chain. The code that
4076 avoids doing the action we push into the cleanup chain in the
4077 exceptional case is contained in expand_cleanups.
4078
4079 This routine is only used by expand_eh_region_start, and that is
4080 the only way in which an exception region should be started. This
4081 routine is only used when using the setjmp/longjmp codegen method
4082 for exception handling. */
4083
4084 int
4085 expand_dcc_cleanup (decl)
4086 tree decl;
4087 {
4088 struct nesting *thisblock;
4089 tree cleanup;
4090
4091 /* Error if we are not in any block. */
4092 if (cfun == 0 || block_stack == 0)
4093 return 0;
4094 thisblock = block_stack;
4095
4096 /* Record the cleanup for the dynamic handler chain. */
4097
4098 /* All cleanups must be on the function_obstack. */
4099 push_obstacks_nochange ();
4100 resume_temporary_allocation ();
4101 cleanup = make_node (POPDCC_EXPR);
4102 pop_obstacks ();
4103
4104 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4105 thisblock->data.block.cleanups
4106 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4107
4108 /* If this block has a cleanup, it belongs in stack_block_stack. */
4109 stack_block_stack = thisblock;
4110 return 1;
4111 }
4112
4113 /* Arrange for the top element of the dynamic handler chain to be
4114 popped if we exit the current binding contour. DECL is the
4115 associated declaration, if any, otherwise NULL_TREE. If the current
4116 contour is left via an exception, then __sjthrow will pop the top
4117 element off the dynamic handler chain. The code that avoids doing
4118 the action we push into the handler chain in the exceptional case
4119 is contained in expand_cleanups.
4120
4121 This routine is only used by expand_eh_region_start, and that is
4122 the only way in which an exception region should be started. This
4123 routine is only used when using the setjmp/longjmp codegen method
4124 for exception handling. */
4125
4126 int
4127 expand_dhc_cleanup (decl)
4128 tree decl;
4129 {
4130 struct nesting *thisblock;
4131 tree cleanup;
4132
4133 /* Error if we are not in any block. */
4134 if (cfun == 0 || block_stack == 0)
4135 return 0;
4136 thisblock = block_stack;
4137
4138 /* Record the cleanup for the dynamic handler chain. */
4139
4140 /* All cleanups must be on the function_obstack. */
4141 push_obstacks_nochange ();
4142 resume_temporary_allocation ();
4143 cleanup = make_node (POPDHC_EXPR);
4144 pop_obstacks ();
4145
4146 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4147 thisblock->data.block.cleanups
4148 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4149
4150 /* If this block has a cleanup, it belongs in stack_block_stack. */
4151 stack_block_stack = thisblock;
4152 return 1;
4153 }
4154 \f
4155 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4156 DECL_ELTS is the list of elements that belong to DECL's type.
4157 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4158
4159 void
4160 expand_anon_union_decl (decl, cleanup, decl_elts)
4161 tree decl, cleanup, decl_elts;
4162 {
4163 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4164 rtx x;
4165 tree t;
4166
4167 /* If any of the elements are addressable, so is the entire union. */
4168 for (t = decl_elts; t; t = TREE_CHAIN (t))
4169 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4170 {
4171 TREE_ADDRESSABLE (decl) = 1;
4172 break;
4173 }
4174
4175 expand_decl (decl);
4176 expand_decl_cleanup (decl, cleanup);
4177 x = DECL_RTL (decl);
4178
4179 /* Go through the elements, assigning RTL to each. */
4180 for (t = decl_elts; t; t = TREE_CHAIN (t))
4181 {
4182 tree decl_elt = TREE_VALUE (t);
4183 tree cleanup_elt = TREE_PURPOSE (t);
4184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4185
4186 /* Propagate the union's alignment to the elements. */
4187 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4188 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4189
4190 /* If the element has BLKmode and the union doesn't, the union is
4191 aligned such that the element doesn't need to have BLKmode, so
4192 change the element's mode to the appropriate one for its size. */
4193 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4194 DECL_MODE (decl_elt) = mode
4195 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4196
4197 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4198 instead create a new MEM rtx with the proper mode. */
4199 if (GET_CODE (x) == MEM)
4200 {
4201 if (mode == GET_MODE (x))
4202 DECL_RTL (decl_elt) = x;
4203 else
4204 {
4205 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4206 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4207 }
4208 }
4209 else if (GET_CODE (x) == REG)
4210 {
4211 if (mode == GET_MODE (x))
4212 DECL_RTL (decl_elt) = x;
4213 else
4214 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4215 }
4216 else
4217 abort ();
4218
4219 /* Record the cleanup if there is one. */
4220
4221 if (cleanup != 0)
4222 thisblock->data.block.cleanups
4223 = temp_tree_cons (decl_elt, cleanup_elt,
4224 thisblock->data.block.cleanups);
4225 }
4226 }
4227 \f
4228 /* Expand a list of cleanups LIST.
4229 Elements may be expressions or may be nested lists.
4230
4231 If DONT_DO is nonnull, then any list-element
4232 whose TREE_PURPOSE matches DONT_DO is omitted.
4233 This is sometimes used to avoid a cleanup associated with
4234 a value that is being returned out of the scope.
4235
4236 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4237 goto and handle protection regions specially in that case.
4238
4239 If REACHABLE, we emit code, otherwise just inform the exception handling
4240 code about this finalization. */
4241
4242 static void
4243 expand_cleanups (list, dont_do, in_fixup, reachable)
4244 tree list;
4245 tree dont_do;
4246 int in_fixup;
4247 int reachable;
4248 {
4249 tree tail;
4250 for (tail = list; tail; tail = TREE_CHAIN (tail))
4251 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4252 {
4253 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4254 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4255 else
4256 {
4257 if (! in_fixup)
4258 {
4259 tree cleanup = TREE_VALUE (tail);
4260
4261 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4262 if (TREE_CODE (cleanup) != POPDHC_EXPR
4263 && TREE_CODE (cleanup) != POPDCC_EXPR
4264 /* See expand_eh_region_start_tree for this case. */
4265 && ! TREE_ADDRESSABLE (tail))
4266 {
4267 cleanup = protect_with_terminate (cleanup);
4268 expand_eh_region_end (cleanup);
4269 }
4270 }
4271
4272 if (reachable)
4273 {
4274 /* Cleanups may be run multiple times. For example,
4275 when exiting a binding contour, we expand the
4276 cleanups associated with that contour. When a goto
4277 within that binding contour has a target outside that
4278 contour, it will expand all cleanups from its scope to
4279 the target. Though the cleanups are expanded multiple
4280 times, the control paths are non-overlapping so the
4281 cleanups will not be executed twice. */
4282
4283 /* We may need to protect fixups with rethrow regions. */
4284 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4285
4286 if (protect)
4287 expand_fixup_region_start ();
4288
4289 /* The cleanup might contain try-blocks, so we have to
4290 preserve our current queue. */
4291 push_ehqueue ();
4292 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4293 pop_ehqueue ();
4294 if (protect)
4295 expand_fixup_region_end (TREE_VALUE (tail));
4296 free_temp_slots ();
4297 }
4298 }
4299 }
4300 }
4301
4302 /* Mark when the context we are emitting RTL for as a conditional
4303 context, so that any cleanup actions we register with
4304 expand_decl_init will be properly conditionalized when those
4305 cleanup actions are later performed. Must be called before any
4306 expression (tree) is expanded that is within a conditional context. */
4307
4308 void
4309 start_cleanup_deferral ()
4310 {
4311 /* block_stack can be NULL if we are inside the parameter list. It is
4312 OK to do nothing, because cleanups aren't possible here. */
4313 if (block_stack)
4314 ++block_stack->data.block.conditional_code;
4315 }
4316
4317 /* Mark the end of a conditional region of code. Because cleanup
4318 deferrals may be nested, we may still be in a conditional region
4319 after we end the currently deferred cleanups, only after we end all
4320 deferred cleanups, are we back in unconditional code. */
4321
4322 void
4323 end_cleanup_deferral ()
4324 {
4325 /* block_stack can be NULL if we are inside the parameter list. It is
4326 OK to do nothing, because cleanups aren't possible here. */
4327 if (block_stack)
4328 --block_stack->data.block.conditional_code;
4329 }
4330
4331 /* Move all cleanups from the current block_stack
4332 to the containing block_stack, where they are assumed to
4333 have been created. If anything can cause a temporary to
4334 be created, but not expanded for more than one level of
4335 block_stacks, then this code will have to change. */
4336
4337 void
4338 move_cleanups_up ()
4339 {
4340 struct nesting *block = block_stack;
4341 struct nesting *outer = block->next;
4342
4343 outer->data.block.cleanups
4344 = chainon (block->data.block.cleanups,
4345 outer->data.block.cleanups);
4346 block->data.block.cleanups = 0;
4347 }
4348
4349 tree
4350 last_cleanup_this_contour ()
4351 {
4352 if (block_stack == 0)
4353 return 0;
4354
4355 return block_stack->data.block.cleanups;
4356 }
4357
4358 /* Return 1 if there are any pending cleanups at this point.
4359 If THIS_CONTOUR is nonzero, check the current contour as well.
4360 Otherwise, look only at the contours that enclose this one. */
4361
4362 int
4363 any_pending_cleanups (this_contour)
4364 int this_contour;
4365 {
4366 struct nesting *block;
4367
4368 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4369 return 0;
4370
4371 if (this_contour && block_stack->data.block.cleanups != NULL)
4372 return 1;
4373 if (block_stack->data.block.cleanups == 0
4374 && block_stack->data.block.outer_cleanups == 0)
4375 return 0;
4376
4377 for (block = block_stack->next; block; block = block->next)
4378 if (block->data.block.cleanups != 0)
4379 return 1;
4380
4381 return 0;
4382 }
4383 \f
4384 /* Enter a case (Pascal) or switch (C) statement.
4385 Push a block onto case_stack and nesting_stack
4386 to accumulate the case-labels that are seen
4387 and to record the labels generated for the statement.
4388
4389 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4390 Otherwise, this construct is transparent for `exit_something'.
4391
4392 EXPR is the index-expression to be dispatched on.
4393 TYPE is its nominal type. We could simply convert EXPR to this type,
4394 but instead we take short cuts. */
4395
4396 void
4397 expand_start_case (exit_flag, expr, type, printname)
4398 int exit_flag;
4399 tree expr;
4400 tree type;
4401 const char *printname;
4402 {
4403 register struct nesting *thiscase = ALLOC_NESTING ();
4404
4405 /* Make an entry on case_stack for the case we are entering. */
4406
4407 thiscase->next = case_stack;
4408 thiscase->all = nesting_stack;
4409 thiscase->depth = ++nesting_depth;
4410 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4411 thiscase->data.case_stmt.case_list = 0;
4412 thiscase->data.case_stmt.index_expr = expr;
4413 thiscase->data.case_stmt.nominal_type = type;
4414 thiscase->data.case_stmt.default_label = 0;
4415 thiscase->data.case_stmt.num_ranges = 0;
4416 thiscase->data.case_stmt.printname = printname;
4417 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4418 case_stack = thiscase;
4419 nesting_stack = thiscase;
4420
4421 do_pending_stack_adjust ();
4422
4423 /* Make sure case_stmt.start points to something that won't
4424 need any transformation before expand_end_case. */
4425 if (GET_CODE (get_last_insn ()) != NOTE)
4426 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4427
4428 thiscase->data.case_stmt.start = get_last_insn ();
4429
4430 start_cleanup_deferral ();
4431 }
4432
4433
4434 /* Start a "dummy case statement" within which case labels are invalid
4435 and are not connected to any larger real case statement.
4436 This can be used if you don't want to let a case statement jump
4437 into the middle of certain kinds of constructs. */
4438
4439 void
4440 expand_start_case_dummy ()
4441 {
4442 register struct nesting *thiscase = ALLOC_NESTING ();
4443
4444 /* Make an entry on case_stack for the dummy. */
4445
4446 thiscase->next = case_stack;
4447 thiscase->all = nesting_stack;
4448 thiscase->depth = ++nesting_depth;
4449 thiscase->exit_label = 0;
4450 thiscase->data.case_stmt.case_list = 0;
4451 thiscase->data.case_stmt.start = 0;
4452 thiscase->data.case_stmt.nominal_type = 0;
4453 thiscase->data.case_stmt.default_label = 0;
4454 thiscase->data.case_stmt.num_ranges = 0;
4455 case_stack = thiscase;
4456 nesting_stack = thiscase;
4457 start_cleanup_deferral ();
4458 }
4459
4460 /* End a dummy case statement. */
4461
4462 void
4463 expand_end_case_dummy ()
4464 {
4465 end_cleanup_deferral ();
4466 POPSTACK (case_stack);
4467 }
4468
4469 /* Return the data type of the index-expression
4470 of the innermost case statement, or null if none. */
4471
4472 tree
4473 case_index_expr_type ()
4474 {
4475 if (case_stack)
4476 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4477 return 0;
4478 }
4479 \f
4480 static void
4481 check_seenlabel ()
4482 {
4483 /* If this is the first label, warn if any insns have been emitted. */
4484 if (case_stack->data.case_stmt.line_number_status >= 0)
4485 {
4486 rtx insn;
4487
4488 restore_line_number_status
4489 (case_stack->data.case_stmt.line_number_status);
4490 case_stack->data.case_stmt.line_number_status = -1;
4491
4492 for (insn = case_stack->data.case_stmt.start;
4493 insn;
4494 insn = NEXT_INSN (insn))
4495 {
4496 if (GET_CODE (insn) == CODE_LABEL)
4497 break;
4498 if (GET_CODE (insn) != NOTE
4499 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4500 {
4501 do
4502 insn = PREV_INSN (insn);
4503 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4504
4505 /* If insn is zero, then there must have been a syntax error. */
4506 if (insn)
4507 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4508 NOTE_LINE_NUMBER(insn),
4509 "unreachable code at beginning of %s",
4510 case_stack->data.case_stmt.printname);
4511 break;
4512 }
4513 }
4514 }
4515 }
4516
4517 /* Accumulate one case or default label inside a case or switch statement.
4518 VALUE is the value of the case (a null pointer, for a default label).
4519 The function CONVERTER, when applied to arguments T and V,
4520 converts the value V to the type T.
4521
4522 If not currently inside a case or switch statement, return 1 and do
4523 nothing. The caller will print a language-specific error message.
4524 If VALUE is a duplicate or overlaps, return 2 and do nothing
4525 except store the (first) duplicate node in *DUPLICATE.
4526 If VALUE is out of range, return 3 and do nothing.
4527 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4528 Return 0 on success.
4529
4530 Extended to handle range statements. */
4531
4532 int
4533 pushcase (value, converter, label, duplicate)
4534 register tree value;
4535 tree (*converter) PARAMS ((tree, tree));
4536 register tree label;
4537 tree *duplicate;
4538 {
4539 tree index_type;
4540 tree nominal_type;
4541
4542 /* Fail if not inside a real case statement. */
4543 if (! (case_stack && case_stack->data.case_stmt.start))
4544 return 1;
4545
4546 if (stack_block_stack
4547 && stack_block_stack->depth > case_stack->depth)
4548 return 5;
4549
4550 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4551 nominal_type = case_stack->data.case_stmt.nominal_type;
4552
4553 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4554 if (index_type == error_mark_node)
4555 return 0;
4556
4557 /* Convert VALUE to the type in which the comparisons are nominally done. */
4558 if (value != 0)
4559 value = (*converter) (nominal_type, value);
4560
4561 check_seenlabel ();
4562
4563 /* Fail if this value is out of range for the actual type of the index
4564 (which may be narrower than NOMINAL_TYPE). */
4565 if (value != 0
4566 && (TREE_CONSTANT_OVERFLOW (value)
4567 || ! int_fits_type_p (value, index_type)))
4568 return 3;
4569
4570 /* Fail if this is a duplicate or overlaps another entry. */
4571 if (value == 0)
4572 {
4573 if (case_stack->data.case_stmt.default_label != 0)
4574 {
4575 *duplicate = case_stack->data.case_stmt.default_label;
4576 return 2;
4577 }
4578 case_stack->data.case_stmt.default_label = label;
4579 }
4580 else
4581 return add_case_node (value, value, label, duplicate);
4582
4583 expand_label (label);
4584 return 0;
4585 }
4586
4587 /* Like pushcase but this case applies to all values between VALUE1 and
4588 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4589 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4590 starts at VALUE1 and ends at the highest value of the index type.
4591 If both are NULL, this case applies to all values.
4592
4593 The return value is the same as that of pushcase but there is one
4594 additional error code: 4 means the specified range was empty. */
4595
4596 int
4597 pushcase_range (value1, value2, converter, label, duplicate)
4598 register tree value1, value2;
4599 tree (*converter) PARAMS ((tree, tree));
4600 register tree label;
4601 tree *duplicate;
4602 {
4603 tree index_type;
4604 tree nominal_type;
4605
4606 /* Fail if not inside a real case statement. */
4607 if (! (case_stack && case_stack->data.case_stmt.start))
4608 return 1;
4609
4610 if (stack_block_stack
4611 && stack_block_stack->depth > case_stack->depth)
4612 return 5;
4613
4614 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4615 nominal_type = case_stack->data.case_stmt.nominal_type;
4616
4617 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4618 if (index_type == error_mark_node)
4619 return 0;
4620
4621 check_seenlabel ();
4622
4623 /* Convert VALUEs to type in which the comparisons are nominally done
4624 and replace any unspecified value with the corresponding bound. */
4625 if (value1 == 0)
4626 value1 = TYPE_MIN_VALUE (index_type);
4627 if (value2 == 0)
4628 value2 = TYPE_MAX_VALUE (index_type);
4629
4630 /* Fail if the range is empty. Do this before any conversion since
4631 we want to allow out-of-range empty ranges. */
4632 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4633 return 4;
4634
4635 /* If the max was unbounded, use the max of the nominal_type we are
4636 converting to. Do this after the < check above to suppress false
4637 positives. */
4638 if (value2 == 0)
4639 value2 = TYPE_MAX_VALUE (nominal_type);
4640
4641 value1 = (*converter) (nominal_type, value1);
4642 value2 = (*converter) (nominal_type, value2);
4643
4644 /* Fail if these values are out of range. */
4645 if (TREE_CONSTANT_OVERFLOW (value1)
4646 || ! int_fits_type_p (value1, index_type))
4647 return 3;
4648
4649 if (TREE_CONSTANT_OVERFLOW (value2)
4650 || ! int_fits_type_p (value2, index_type))
4651 return 3;
4652
4653 return add_case_node (value1, value2, label, duplicate);
4654 }
4655
4656 /* Do the actual insertion of a case label for pushcase and pushcase_range
4657 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4658 slowdown for large switch statements. */
4659
4660 static int
4661 add_case_node (low, high, label, duplicate)
4662 tree low, high;
4663 tree label;
4664 tree *duplicate;
4665 {
4666 struct case_node *p, **q, *r;
4667
4668 q = &case_stack->data.case_stmt.case_list;
4669 p = *q;
4670
4671 while ((r = *q))
4672 {
4673 p = r;
4674
4675 /* Keep going past elements distinctly greater than HIGH. */
4676 if (tree_int_cst_lt (high, p->low))
4677 q = &p->left;
4678
4679 /* or distinctly less than LOW. */
4680 else if (tree_int_cst_lt (p->high, low))
4681 q = &p->right;
4682
4683 else
4684 {
4685 /* We have an overlap; this is an error. */
4686 *duplicate = p->code_label;
4687 return 2;
4688 }
4689 }
4690
4691 /* Add this label to the chain, and succeed.
4692 Copy LOW, HIGH so they are on temporary rather than momentary
4693 obstack and will thus survive till the end of the case statement. */
4694
4695 r = (struct case_node *) oballoc (sizeof (struct case_node));
4696 r->low = copy_node (low);
4697
4698 /* If the bounds are equal, turn this into the one-value case. */
4699
4700 if (tree_int_cst_equal (low, high))
4701 r->high = r->low;
4702 else
4703 {
4704 r->high = copy_node (high);
4705 case_stack->data.case_stmt.num_ranges++;
4706 }
4707
4708 r->code_label = label;
4709 expand_label (label);
4710
4711 *q = r;
4712 r->parent = p;
4713 r->left = 0;
4714 r->right = 0;
4715 r->balance = 0;
4716
4717 while (p)
4718 {
4719 struct case_node *s;
4720
4721 if (r == p->left)
4722 {
4723 int b;
4724
4725 if (! (b = p->balance))
4726 /* Growth propagation from left side. */
4727 p->balance = -1;
4728 else if (b < 0)
4729 {
4730 if (r->balance < 0)
4731 {
4732 /* R-Rotation */
4733 if ((p->left = s = r->right))
4734 s->parent = p;
4735
4736 r->right = p;
4737 p->balance = 0;
4738 r->balance = 0;
4739 s = p->parent;
4740 p->parent = r;
4741
4742 if ((r->parent = s))
4743 {
4744 if (s->left == p)
4745 s->left = r;
4746 else
4747 s->right = r;
4748 }
4749 else
4750 case_stack->data.case_stmt.case_list = r;
4751 }
4752 else
4753 /* r->balance == +1 */
4754 {
4755 /* LR-Rotation */
4756
4757 int b2;
4758 struct case_node *t = r->right;
4759
4760 if ((p->left = s = t->right))
4761 s->parent = p;
4762
4763 t->right = p;
4764 if ((r->right = s = t->left))
4765 s->parent = r;
4766
4767 t->left = r;
4768 b = t->balance;
4769 b2 = b < 0;
4770 p->balance = b2;
4771 b2 = -b2 - b;
4772 r->balance = b2;
4773 t->balance = 0;
4774 s = p->parent;
4775 p->parent = t;
4776 r->parent = t;
4777
4778 if ((t->parent = s))
4779 {
4780 if (s->left == p)
4781 s->left = t;
4782 else
4783 s->right = t;
4784 }
4785 else
4786 case_stack->data.case_stmt.case_list = t;
4787 }
4788 break;
4789 }
4790
4791 else
4792 {
4793 /* p->balance == +1; growth of left side balances the node. */
4794 p->balance = 0;
4795 break;
4796 }
4797 }
4798 else
4799 /* r == p->right */
4800 {
4801 int b;
4802
4803 if (! (b = p->balance))
4804 /* Growth propagation from right side. */
4805 p->balance++;
4806 else if (b > 0)
4807 {
4808 if (r->balance > 0)
4809 {
4810 /* L-Rotation */
4811
4812 if ((p->right = s = r->left))
4813 s->parent = p;
4814
4815 r->left = p;
4816 p->balance = 0;
4817 r->balance = 0;
4818 s = p->parent;
4819 p->parent = r;
4820 if ((r->parent = s))
4821 {
4822 if (s->left == p)
4823 s->left = r;
4824 else
4825 s->right = r;
4826 }
4827
4828 else
4829 case_stack->data.case_stmt.case_list = r;
4830 }
4831
4832 else
4833 /* r->balance == -1 */
4834 {
4835 /* RL-Rotation */
4836 int b2;
4837 struct case_node *t = r->left;
4838
4839 if ((p->right = s = t->left))
4840 s->parent = p;
4841
4842 t->left = p;
4843
4844 if ((r->left = s = t->right))
4845 s->parent = r;
4846
4847 t->right = r;
4848 b = t->balance;
4849 b2 = b < 0;
4850 r->balance = b2;
4851 b2 = -b2 - b;
4852 p->balance = b2;
4853 t->balance = 0;
4854 s = p->parent;
4855 p->parent = t;
4856 r->parent = t;
4857
4858 if ((t->parent = s))
4859 {
4860 if (s->left == p)
4861 s->left = t;
4862 else
4863 s->right = t;
4864 }
4865
4866 else
4867 case_stack->data.case_stmt.case_list = t;
4868 }
4869 break;
4870 }
4871 else
4872 {
4873 /* p->balance == -1; growth of right side balances the node. */
4874 p->balance = 0;
4875 break;
4876 }
4877 }
4878
4879 r = p;
4880 p = p->parent;
4881 }
4882
4883 return 0;
4884 }
4885
4886 \f
4887 /* Returns the number of possible values of TYPE.
4888 Returns -1 if the number is unknown, variable, or if the number does not
4889 fit in a HOST_WIDE_INT.
4890 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4891 do not increase monotonically (there may be duplicates);
4892 to 1 if the values increase monotonically, but not always by 1;
4893 otherwise sets it to 0. */
4894
4895 HOST_WIDE_INT
4896 all_cases_count (type, spareness)
4897 tree type;
4898 int *spareness;
4899 {
4900 tree t;
4901 HOST_WIDE_INT count, minval, lastval;
4902
4903 *spareness = 0;
4904
4905 switch (TREE_CODE (type))
4906 {
4907 case BOOLEAN_TYPE:
4908 count = 2;
4909 break;
4910
4911 case CHAR_TYPE:
4912 count = 1 << BITS_PER_UNIT;
4913 break;
4914
4915 default:
4916 case INTEGER_TYPE:
4917 if (TYPE_MAX_VALUE (type) != 0
4918 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4919 TYPE_MIN_VALUE (type))))
4920 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4921 convert (type, integer_zero_node))))
4922 && host_integerp (t, 1))
4923 count = tree_low_cst (t, 1);
4924 else
4925 return -1;
4926 break;
4927
4928 case ENUMERAL_TYPE:
4929 /* Don't waste time with enumeral types with huge values. */
4930 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4931 || TYPE_MAX_VALUE (type) == 0
4932 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4933 return -1;
4934
4935 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4936 count = 0;
4937
4938 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4939 {
4940 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4941
4942 if (*spareness == 2 || thisval < lastval)
4943 *spareness = 2;
4944 else if (thisval != minval + count)
4945 *spareness = 1;
4946
4947 count++;
4948 }
4949 }
4950
4951 return count;
4952 }
4953
4954 #define BITARRAY_TEST(ARRAY, INDEX) \
4955 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4956 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4957 #define BITARRAY_SET(ARRAY, INDEX) \
4958 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4959 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4960
4961 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4962 with the case values we have seen, assuming the case expression
4963 has the given TYPE.
4964 SPARSENESS is as determined by all_cases_count.
4965
4966 The time needed is proportional to COUNT, unless
4967 SPARSENESS is 2, in which case quadratic time is needed. */
4968
4969 void
4970 mark_seen_cases (type, cases_seen, count, sparseness)
4971 tree type;
4972 unsigned char *cases_seen;
4973 HOST_WIDE_INT count;
4974 int sparseness;
4975 {
4976 tree next_node_to_try = NULL_TREE;
4977 HOST_WIDE_INT next_node_offset = 0;
4978
4979 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4980 tree val = make_node (INTEGER_CST);
4981
4982 TREE_TYPE (val) = type;
4983 if (! root)
4984 ; /* Do nothing */
4985 else if (sparseness == 2)
4986 {
4987 tree t;
4988 unsigned HOST_WIDE_INT xlo;
4989
4990 /* This less efficient loop is only needed to handle
4991 duplicate case values (multiple enum constants
4992 with the same value). */
4993 TREE_TYPE (val) = TREE_TYPE (root->low);
4994 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4995 t = TREE_CHAIN (t), xlo++)
4996 {
4997 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4998 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4999 n = root;
5000 do
5001 {
5002 /* Keep going past elements distinctly greater than VAL. */
5003 if (tree_int_cst_lt (val, n->low))
5004 n = n->left;
5005
5006 /* or distinctly less than VAL. */
5007 else if (tree_int_cst_lt (n->high, val))
5008 n = n->right;
5009
5010 else
5011 {
5012 /* We have found a matching range. */
5013 BITARRAY_SET (cases_seen, xlo);
5014 break;
5015 }
5016 }
5017 while (n);
5018 }
5019 }
5020 else
5021 {
5022 if (root->left)
5023 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5024
5025 for (n = root; n; n = n->right)
5026 {
5027 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5028 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5029 while ( ! tree_int_cst_lt (n->high, val))
5030 {
5031 /* Calculate (into xlo) the "offset" of the integer (val).
5032 The element with lowest value has offset 0, the next smallest
5033 element has offset 1, etc. */
5034
5035 unsigned HOST_WIDE_INT xlo;
5036 HOST_WIDE_INT xhi;
5037 tree t;
5038
5039 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5040 {
5041 /* The TYPE_VALUES will be in increasing order, so
5042 starting searching where we last ended. */
5043 t = next_node_to_try;
5044 xlo = next_node_offset;
5045 xhi = 0;
5046 for (;;)
5047 {
5048 if (t == NULL_TREE)
5049 {
5050 t = TYPE_VALUES (type);
5051 xlo = 0;
5052 }
5053 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5054 {
5055 next_node_to_try = TREE_CHAIN (t);
5056 next_node_offset = xlo + 1;
5057 break;
5058 }
5059 xlo++;
5060 t = TREE_CHAIN (t);
5061 if (t == next_node_to_try)
5062 {
5063 xlo = -1;
5064 break;
5065 }
5066 }
5067 }
5068 else
5069 {
5070 t = TYPE_MIN_VALUE (type);
5071 if (t)
5072 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5073 &xlo, &xhi);
5074 else
5075 xlo = xhi = 0;
5076 add_double (xlo, xhi,
5077 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5078 &xlo, &xhi);
5079 }
5080
5081 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5082 BITARRAY_SET (cases_seen, xlo);
5083
5084 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5085 1, 0,
5086 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5087 }
5088 }
5089 }
5090 }
5091
5092 /* Called when the index of a switch statement is an enumerated type
5093 and there is no default label.
5094
5095 Checks that all enumeration literals are covered by the case
5096 expressions of a switch. Also, warn if there are any extra
5097 switch cases that are *not* elements of the enumerated type.
5098
5099 If all enumeration literals were covered by the case expressions,
5100 turn one of the expressions into the default expression since it should
5101 not be possible to fall through such a switch. */
5102
5103 void
5104 check_for_full_enumeration_handling (type)
5105 tree type;
5106 {
5107 register struct case_node *n;
5108 register tree chain;
5109 #if 0 /* variable used by 'if 0'ed code below. */
5110 register struct case_node **l;
5111 int all_values = 1;
5112 #endif
5113
5114 /* True iff the selector type is a numbered set mode. */
5115 int sparseness = 0;
5116
5117 /* The number of possible selector values. */
5118 HOST_WIDE_INT size;
5119
5120 /* For each possible selector value. a one iff it has been matched
5121 by a case value alternative. */
5122 unsigned char *cases_seen;
5123
5124 /* The allocated size of cases_seen, in chars. */
5125 HOST_WIDE_INT bytes_needed;
5126
5127 if (! warn_switch)
5128 return;
5129
5130 size = all_cases_count (type, &sparseness);
5131 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5132
5133 if (size > 0 && size < 600000
5134 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5135 this optimization if we don't have enough memory rather than
5136 aborting, as xmalloc would do. */
5137 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5138 {
5139 HOST_WIDE_INT i;
5140 tree v = TYPE_VALUES (type);
5141
5142 /* The time complexity of this code is normally O(N), where
5143 N being the number of members in the enumerated type.
5144 However, if type is a ENUMERAL_TYPE whose values do not
5145 increase monotonically, O(N*log(N)) time may be needed. */
5146
5147 mark_seen_cases (type, cases_seen, size, sparseness);
5148
5149 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5150 if (BITARRAY_TEST(cases_seen, i) == 0)
5151 warning ("enumeration value `%s' not handled in switch",
5152 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5153
5154 free (cases_seen);
5155 }
5156
5157 /* Now we go the other way around; we warn if there are case
5158 expressions that don't correspond to enumerators. This can
5159 occur since C and C++ don't enforce type-checking of
5160 assignments to enumeration variables. */
5161
5162 if (case_stack->data.case_stmt.case_list
5163 && case_stack->data.case_stmt.case_list->left)
5164 case_stack->data.case_stmt.case_list
5165 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5166 if (warn_switch)
5167 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5168 {
5169 for (chain = TYPE_VALUES (type);
5170 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5171 chain = TREE_CHAIN (chain))
5172 ;
5173
5174 if (!chain)
5175 {
5176 if (TYPE_NAME (type) == 0)
5177 warning ("case value `%ld' not in enumerated type",
5178 (long) TREE_INT_CST_LOW (n->low));
5179 else
5180 warning ("case value `%ld' not in enumerated type `%s'",
5181 (long) TREE_INT_CST_LOW (n->low),
5182 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5183 == IDENTIFIER_NODE)
5184 ? TYPE_NAME (type)
5185 : DECL_NAME (TYPE_NAME (type))));
5186 }
5187 if (!tree_int_cst_equal (n->low, n->high))
5188 {
5189 for (chain = TYPE_VALUES (type);
5190 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5191 chain = TREE_CHAIN (chain))
5192 ;
5193
5194 if (!chain)
5195 {
5196 if (TYPE_NAME (type) == 0)
5197 warning ("case value `%ld' not in enumerated type",
5198 (long) TREE_INT_CST_LOW (n->high));
5199 else
5200 warning ("case value `%ld' not in enumerated type `%s'",
5201 (long) TREE_INT_CST_LOW (n->high),
5202 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5203 == IDENTIFIER_NODE)
5204 ? TYPE_NAME (type)
5205 : DECL_NAME (TYPE_NAME (type))));
5206 }
5207 }
5208 }
5209
5210 #if 0
5211 /* ??? This optimization is disabled because it causes valid programs to
5212 fail. ANSI C does not guarantee that an expression with enum type
5213 will have a value that is the same as one of the enumeration literals. */
5214
5215 /* If all values were found as case labels, make one of them the default
5216 label. Thus, this switch will never fall through. We arbitrarily pick
5217 the last one to make the default since this is likely the most
5218 efficient choice. */
5219
5220 if (all_values)
5221 {
5222 for (l = &case_stack->data.case_stmt.case_list;
5223 (*l)->right != 0;
5224 l = &(*l)->right)
5225 ;
5226
5227 case_stack->data.case_stmt.default_label = (*l)->code_label;
5228 *l = 0;
5229 }
5230 #endif /* 0 */
5231 }
5232
5233 \f
5234 /* Terminate a case (Pascal) or switch (C) statement
5235 in which ORIG_INDEX is the expression to be tested.
5236 Generate the code to test it and jump to the right place. */
5237
5238 void
5239 expand_end_case (orig_index)
5240 tree orig_index;
5241 {
5242 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5243 rtx default_label = 0;
5244 register struct case_node *n;
5245 unsigned int count;
5246 rtx index;
5247 rtx table_label;
5248 int ncases;
5249 rtx *labelvec;
5250 register int i;
5251 rtx before_case;
5252 register struct nesting *thiscase = case_stack;
5253 tree index_expr, index_type;
5254 int unsignedp;
5255
5256 /* Don't crash due to previous errors. */
5257 if (thiscase == NULL)
5258 return;
5259
5260 table_label = gen_label_rtx ();
5261 index_expr = thiscase->data.case_stmt.index_expr;
5262 index_type = TREE_TYPE (index_expr);
5263 unsignedp = TREE_UNSIGNED (index_type);
5264
5265 do_pending_stack_adjust ();
5266
5267 /* This might get an spurious warning in the presence of a syntax error;
5268 it could be fixed by moving the call to check_seenlabel after the
5269 check for error_mark_node, and copying the code of check_seenlabel that
5270 deals with case_stack->data.case_stmt.line_number_status /
5271 restore_line_number_status in front of the call to end_cleanup_deferral;
5272 However, this might miss some useful warnings in the presence of
5273 non-syntax errors. */
5274 check_seenlabel ();
5275
5276 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5277 if (index_type != error_mark_node)
5278 {
5279 /* If switch expression was an enumerated type, check that all
5280 enumeration literals are covered by the cases.
5281 No sense trying this if there's a default case, however. */
5282
5283 if (!thiscase->data.case_stmt.default_label
5284 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5285 && TREE_CODE (index_expr) != INTEGER_CST)
5286 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5287
5288 /* If we don't have a default-label, create one here,
5289 after the body of the switch. */
5290 if (thiscase->data.case_stmt.default_label == 0)
5291 {
5292 thiscase->data.case_stmt.default_label
5293 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5294 expand_label (thiscase->data.case_stmt.default_label);
5295 }
5296 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5297
5298 before_case = get_last_insn ();
5299
5300 if (thiscase->data.case_stmt.case_list
5301 && thiscase->data.case_stmt.case_list->left)
5302 thiscase->data.case_stmt.case_list
5303 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5304
5305 /* Simplify the case-list before we count it. */
5306 group_case_nodes (thiscase->data.case_stmt.case_list);
5307
5308 /* Get upper and lower bounds of case values.
5309 Also convert all the case values to the index expr's data type. */
5310
5311 count = 0;
5312 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5313 {
5314 /* Check low and high label values are integers. */
5315 if (TREE_CODE (n->low) != INTEGER_CST)
5316 abort ();
5317 if (TREE_CODE (n->high) != INTEGER_CST)
5318 abort ();
5319
5320 n->low = convert (index_type, n->low);
5321 n->high = convert (index_type, n->high);
5322
5323 /* Count the elements and track the largest and smallest
5324 of them (treating them as signed even if they are not). */
5325 if (count++ == 0)
5326 {
5327 minval = n->low;
5328 maxval = n->high;
5329 }
5330 else
5331 {
5332 if (INT_CST_LT (n->low, minval))
5333 minval = n->low;
5334 if (INT_CST_LT (maxval, n->high))
5335 maxval = n->high;
5336 }
5337 /* A range counts double, since it requires two compares. */
5338 if (! tree_int_cst_equal (n->low, n->high))
5339 count++;
5340 }
5341
5342 orig_minval = minval;
5343
5344 /* Compute span of values. */
5345 if (count != 0)
5346 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5347
5348 end_cleanup_deferral ();
5349
5350 if (count == 0)
5351 {
5352 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5353 emit_queue ();
5354 emit_jump (default_label);
5355 }
5356
5357 /* If range of values is much bigger than number of values,
5358 make a sequence of conditional branches instead of a dispatch.
5359 If the switch-index is a constant, do it this way
5360 because we can optimize it. */
5361
5362 #ifndef CASE_VALUES_THRESHOLD
5363 #ifdef HAVE_casesi
5364 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5365 #else
5366 /* If machine does not have a case insn that compares the
5367 bounds, this means extra overhead for dispatch tables
5368 which raises the threshold for using them. */
5369 #define CASE_VALUES_THRESHOLD 5
5370 #endif /* HAVE_casesi */
5371 #endif /* CASE_VALUES_THRESHOLD */
5372
5373 else if (count < CASE_VALUES_THRESHOLD
5374 || compare_tree_int (range, 10 * count) > 0
5375 /* RANGE may be signed, and really large ranges will show up
5376 as negative numbers. */
5377 || compare_tree_int (range, 0) < 0
5378 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5379 || flag_pic
5380 #endif
5381 || TREE_CODE (index_expr) == INTEGER_CST
5382 /* These will reduce to a constant. */
5383 || (TREE_CODE (index_expr) == CALL_EXPR
5384 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5385 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5386 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5387 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5388 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5389 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5390 {
5391 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5392
5393 /* If the index is a short or char that we do not have
5394 an insn to handle comparisons directly, convert it to
5395 a full integer now, rather than letting each comparison
5396 generate the conversion. */
5397
5398 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5399 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5400 == CODE_FOR_nothing))
5401 {
5402 enum machine_mode wider_mode;
5403 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5404 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5405 if (cmp_optab->handlers[(int) wider_mode].insn_code
5406 != CODE_FOR_nothing)
5407 {
5408 index = convert_to_mode (wider_mode, index, unsignedp);
5409 break;
5410 }
5411 }
5412
5413 emit_queue ();
5414 do_pending_stack_adjust ();
5415
5416 index = protect_from_queue (index, 0);
5417 if (GET_CODE (index) == MEM)
5418 index = copy_to_reg (index);
5419 if (GET_CODE (index) == CONST_INT
5420 || TREE_CODE (index_expr) == INTEGER_CST)
5421 {
5422 /* Make a tree node with the proper constant value
5423 if we don't already have one. */
5424 if (TREE_CODE (index_expr) != INTEGER_CST)
5425 {
5426 index_expr
5427 = build_int_2 (INTVAL (index),
5428 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5429 index_expr = convert (index_type, index_expr);
5430 }
5431
5432 /* For constant index expressions we need only
5433 issue a unconditional branch to the appropriate
5434 target code. The job of removing any unreachable
5435 code is left to the optimisation phase if the
5436 "-O" option is specified. */
5437 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5438 if (! tree_int_cst_lt (index_expr, n->low)
5439 && ! tree_int_cst_lt (n->high, index_expr))
5440 break;
5441
5442 if (n)
5443 emit_jump (label_rtx (n->code_label));
5444 else
5445 emit_jump (default_label);
5446 }
5447 else
5448 {
5449 /* If the index expression is not constant we generate
5450 a binary decision tree to select the appropriate
5451 target code. This is done as follows:
5452
5453 The list of cases is rearranged into a binary tree,
5454 nearly optimal assuming equal probability for each case.
5455
5456 The tree is transformed into RTL, eliminating
5457 redundant test conditions at the same time.
5458
5459 If program flow could reach the end of the
5460 decision tree an unconditional jump to the
5461 default code is emitted. */
5462
5463 use_cost_table
5464 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5465 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5466 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5467 NULL_PTR);
5468 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5469 default_label, index_type);
5470 emit_jump_if_reachable (default_label);
5471 }
5472 }
5473 else
5474 {
5475 int win = 0;
5476 #ifdef HAVE_casesi
5477 if (HAVE_casesi)
5478 {
5479 enum machine_mode index_mode = SImode;
5480 int index_bits = GET_MODE_BITSIZE (index_mode);
5481 rtx op1, op2;
5482 enum machine_mode op_mode;
5483
5484 /* Convert the index to SImode. */
5485 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5486 > GET_MODE_BITSIZE (index_mode))
5487 {
5488 enum machine_mode omode = TYPE_MODE (index_type);
5489 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5490
5491 /* We must handle the endpoints in the original mode. */
5492 index_expr = build (MINUS_EXPR, index_type,
5493 index_expr, minval);
5494 minval = integer_zero_node;
5495 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5496 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5497 omode, 1, 0, default_label);
5498 /* Now we can safely truncate. */
5499 index = convert_to_mode (index_mode, index, 0);
5500 }
5501 else
5502 {
5503 if (TYPE_MODE (index_type) != index_mode)
5504 {
5505 index_expr = convert (type_for_size (index_bits, 0),
5506 index_expr);
5507 index_type = TREE_TYPE (index_expr);
5508 }
5509
5510 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5511 }
5512 emit_queue ();
5513 index = protect_from_queue (index, 0);
5514 do_pending_stack_adjust ();
5515
5516 op_mode = insn_data[(int)CODE_FOR_casesi].operand[0].mode;
5517 if (! (*insn_data[(int)CODE_FOR_casesi].operand[0].predicate)
5518 (index, op_mode))
5519 index = copy_to_mode_reg (op_mode, index);
5520
5521 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5522
5523 op_mode = insn_data[(int)CODE_FOR_casesi].operand[1].mode;
5524 if (! (*insn_data[(int)CODE_FOR_casesi].operand[1].predicate)
5525 (op1, op_mode))
5526 op1 = copy_to_mode_reg (op_mode, op1);
5527
5528 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5529
5530 op_mode = insn_data[(int)CODE_FOR_casesi].operand[2].mode;
5531 if (! (*insn_data[(int)CODE_FOR_casesi].operand[2].predicate)
5532 (op2, op_mode))
5533 op2 = copy_to_mode_reg (op_mode, op2);
5534
5535 emit_jump_insn (gen_casesi (index, op1, op2,
5536 table_label, default_label));
5537 win = 1;
5538 }
5539 #endif
5540 #ifdef HAVE_tablejump
5541 if (! win && HAVE_tablejump)
5542 {
5543 index_type = thiscase->data.case_stmt.nominal_type;
5544 index_expr = fold (build (MINUS_EXPR, index_type,
5545 convert (index_type, index_expr),
5546 convert (index_type, minval)));
5547 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5548 emit_queue ();
5549 index = protect_from_queue (index, 0);
5550 do_pending_stack_adjust ();
5551
5552 do_tablejump (index, TYPE_MODE (index_type),
5553 expand_expr (range, NULL_RTX, VOIDmode, 0),
5554 table_label, default_label);
5555 win = 1;
5556 }
5557 #endif
5558 if (! win)
5559 abort ();
5560
5561 /* Get table of labels to jump to, in order of case index. */
5562
5563 ncases = TREE_INT_CST_LOW (range) + 1;
5564 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5565 bzero ((char *) labelvec, ncases * sizeof (rtx));
5566
5567 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5568 {
5569 register HOST_WIDE_INT i
5570 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5571
5572 while (1)
5573 {
5574 labelvec[i]
5575 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5576 if (i + TREE_INT_CST_LOW (orig_minval)
5577 == TREE_INT_CST_LOW (n->high))
5578 break;
5579 i++;
5580 }
5581 }
5582
5583 /* Fill in the gaps with the default. */
5584 for (i = 0; i < ncases; i++)
5585 if (labelvec[i] == 0)
5586 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5587
5588 /* Output the table */
5589 emit_label (table_label);
5590
5591 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5592 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5593 gen_rtx_LABEL_REF (Pmode, table_label),
5594 gen_rtvec_v (ncases, labelvec),
5595 const0_rtx, const0_rtx));
5596 else
5597 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5598 gen_rtvec_v (ncases, labelvec)));
5599
5600 /* If the case insn drops through the table,
5601 after the table we must jump to the default-label.
5602 Otherwise record no drop-through after the table. */
5603 #ifdef CASE_DROPS_THROUGH
5604 emit_jump (default_label);
5605 #else
5606 emit_barrier ();
5607 #endif
5608 }
5609
5610 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5611 reorder_insns (before_case, get_last_insn (),
5612 thiscase->data.case_stmt.start);
5613 }
5614 else
5615 end_cleanup_deferral ();
5616
5617 if (thiscase->exit_label)
5618 emit_label (thiscase->exit_label);
5619
5620 POPSTACK (case_stack);
5621
5622 free_temp_slots ();
5623 }
5624
5625 /* Convert the tree NODE into a list linked by the right field, with the left
5626 field zeroed. RIGHT is used for recursion; it is a list to be placed
5627 rightmost in the resulting list. */
5628
5629 static struct case_node *
5630 case_tree2list (node, right)
5631 struct case_node *node, *right;
5632 {
5633 struct case_node *left;
5634
5635 if (node->right)
5636 right = case_tree2list (node->right, right);
5637
5638 node->right = right;
5639 if ((left = node->left))
5640 {
5641 node->left = 0;
5642 return case_tree2list (left, node);
5643 }
5644
5645 return node;
5646 }
5647
5648 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5649
5650 static void
5651 do_jump_if_equal (op1, op2, label, unsignedp)
5652 rtx op1, op2, label;
5653 int unsignedp;
5654 {
5655 if (GET_CODE (op1) == CONST_INT
5656 && GET_CODE (op2) == CONST_INT)
5657 {
5658 if (INTVAL (op1) == INTVAL (op2))
5659 emit_jump (label);
5660 }
5661 else
5662 {
5663 enum machine_mode mode = GET_MODE (op1);
5664 if (mode == VOIDmode)
5665 mode = GET_MODE (op2);
5666 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5667 0, label);
5668 }
5669 }
5670 \f
5671 /* Not all case values are encountered equally. This function
5672 uses a heuristic to weight case labels, in cases where that
5673 looks like a reasonable thing to do.
5674
5675 Right now, all we try to guess is text, and we establish the
5676 following weights:
5677
5678 chars above space: 16
5679 digits: 16
5680 default: 12
5681 space, punct: 8
5682 tab: 4
5683 newline: 2
5684 other "\" chars: 1
5685 remaining chars: 0
5686
5687 If we find any cases in the switch that are not either -1 or in the range
5688 of valid ASCII characters, or are control characters other than those
5689 commonly used with "\", don't treat this switch scanning text.
5690
5691 Return 1 if these nodes are suitable for cost estimation, otherwise
5692 return 0. */
5693
5694 static int
5695 estimate_case_costs (node)
5696 case_node_ptr node;
5697 {
5698 tree min_ascii = build_int_2 (-1, -1);
5699 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5700 case_node_ptr n;
5701 int i;
5702
5703 /* If we haven't already made the cost table, make it now. Note that the
5704 lower bound of the table is -1, not zero. */
5705
5706 if (cost_table == NULL)
5707 {
5708 cost_table = cost_table_ + 1;
5709
5710 for (i = 0; i < 128; i++)
5711 {
5712 if (ISALNUM (i))
5713 cost_table[i] = 16;
5714 else if (ISPUNCT (i))
5715 cost_table[i] = 8;
5716 else if (ISCNTRL (i))
5717 cost_table[i] = -1;
5718 }
5719
5720 cost_table[' '] = 8;
5721 cost_table['\t'] = 4;
5722 cost_table['\0'] = 4;
5723 cost_table['\n'] = 2;
5724 cost_table['\f'] = 1;
5725 cost_table['\v'] = 1;
5726 cost_table['\b'] = 1;
5727 }
5728
5729 /* See if all the case expressions look like text. It is text if the
5730 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5731 as signed arithmetic since we don't want to ever access cost_table with a
5732 value less than -1. Also check that none of the constants in a range
5733 are strange control characters. */
5734
5735 for (n = node; n; n = n->right)
5736 {
5737 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5738 return 0;
5739
5740 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5741 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5742 if (cost_table[i] < 0)
5743 return 0;
5744 }
5745
5746 /* All interesting values are within the range of interesting
5747 ASCII characters. */
5748 return 1;
5749 }
5750
5751 /* Scan an ordered list of case nodes
5752 combining those with consecutive values or ranges.
5753
5754 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5755
5756 static void
5757 group_case_nodes (head)
5758 case_node_ptr head;
5759 {
5760 case_node_ptr node = head;
5761
5762 while (node)
5763 {
5764 rtx lb = next_real_insn (label_rtx (node->code_label));
5765 rtx lb2;
5766 case_node_ptr np = node;
5767
5768 /* Try to group the successors of NODE with NODE. */
5769 while (((np = np->right) != 0)
5770 /* Do they jump to the same place? */
5771 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5772 || (lb != 0 && lb2 != 0
5773 && simplejump_p (lb)
5774 && simplejump_p (lb2)
5775 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5776 SET_SRC (PATTERN (lb2)))))
5777 /* Are their ranges consecutive? */
5778 && tree_int_cst_equal (np->low,
5779 fold (build (PLUS_EXPR,
5780 TREE_TYPE (node->high),
5781 node->high,
5782 integer_one_node)))
5783 /* An overflow is not consecutive. */
5784 && tree_int_cst_lt (node->high,
5785 fold (build (PLUS_EXPR,
5786 TREE_TYPE (node->high),
5787 node->high,
5788 integer_one_node))))
5789 {
5790 node->high = np->high;
5791 }
5792 /* NP is the first node after NODE which can't be grouped with it.
5793 Delete the nodes in between, and move on to that node. */
5794 node->right = np;
5795 node = np;
5796 }
5797 }
5798
5799 /* Take an ordered list of case nodes
5800 and transform them into a near optimal binary tree,
5801 on the assumption that any target code selection value is as
5802 likely as any other.
5803
5804 The transformation is performed by splitting the ordered
5805 list into two equal sections plus a pivot. The parts are
5806 then attached to the pivot as left and right branches. Each
5807 branch is then transformed recursively. */
5808
5809 static void
5810 balance_case_nodes (head, parent)
5811 case_node_ptr *head;
5812 case_node_ptr parent;
5813 {
5814 register case_node_ptr np;
5815
5816 np = *head;
5817 if (np)
5818 {
5819 int cost = 0;
5820 int i = 0;
5821 int ranges = 0;
5822 register case_node_ptr *npp;
5823 case_node_ptr left;
5824
5825 /* Count the number of entries on branch. Also count the ranges. */
5826
5827 while (np)
5828 {
5829 if (!tree_int_cst_equal (np->low, np->high))
5830 {
5831 ranges++;
5832 if (use_cost_table)
5833 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5834 }
5835
5836 if (use_cost_table)
5837 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5838
5839 i++;
5840 np = np->right;
5841 }
5842
5843 if (i > 2)
5844 {
5845 /* Split this list if it is long enough for that to help. */
5846 npp = head;
5847 left = *npp;
5848 if (use_cost_table)
5849 {
5850 /* Find the place in the list that bisects the list's total cost,
5851 Here I gets half the total cost. */
5852 int n_moved = 0;
5853 i = (cost + 1) / 2;
5854 while (1)
5855 {
5856 /* Skip nodes while their cost does not reach that amount. */
5857 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5858 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5859 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5860 if (i <= 0)
5861 break;
5862 npp = &(*npp)->right;
5863 n_moved += 1;
5864 }
5865 if (n_moved == 0)
5866 {
5867 /* Leave this branch lopsided, but optimize left-hand
5868 side and fill in `parent' fields for right-hand side. */
5869 np = *head;
5870 np->parent = parent;
5871 balance_case_nodes (&np->left, np);
5872 for (; np->right; np = np->right)
5873 np->right->parent = np;
5874 return;
5875 }
5876 }
5877 /* If there are just three nodes, split at the middle one. */
5878 else if (i == 3)
5879 npp = &(*npp)->right;
5880 else
5881 {
5882 /* Find the place in the list that bisects the list's total cost,
5883 where ranges count as 2.
5884 Here I gets half the total cost. */
5885 i = (i + ranges + 1) / 2;
5886 while (1)
5887 {
5888 /* Skip nodes while their cost does not reach that amount. */
5889 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5890 i--;
5891 i--;
5892 if (i <= 0)
5893 break;
5894 npp = &(*npp)->right;
5895 }
5896 }
5897 *head = np = *npp;
5898 *npp = 0;
5899 np->parent = parent;
5900 np->left = left;
5901
5902 /* Optimize each of the two split parts. */
5903 balance_case_nodes (&np->left, np);
5904 balance_case_nodes (&np->right, np);
5905 }
5906 else
5907 {
5908 /* Else leave this branch as one level,
5909 but fill in `parent' fields. */
5910 np = *head;
5911 np->parent = parent;
5912 for (; np->right; np = np->right)
5913 np->right->parent = np;
5914 }
5915 }
5916 }
5917 \f
5918 /* Search the parent sections of the case node tree
5919 to see if a test for the lower bound of NODE would be redundant.
5920 INDEX_TYPE is the type of the index expression.
5921
5922 The instructions to generate the case decision tree are
5923 output in the same order as nodes are processed so it is
5924 known that if a parent node checks the range of the current
5925 node minus one that the current node is bounded at its lower
5926 span. Thus the test would be redundant. */
5927
5928 static int
5929 node_has_low_bound (node, index_type)
5930 case_node_ptr node;
5931 tree index_type;
5932 {
5933 tree low_minus_one;
5934 case_node_ptr pnode;
5935
5936 /* If the lower bound of this node is the lowest value in the index type,
5937 we need not test it. */
5938
5939 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5940 return 1;
5941
5942 /* If this node has a left branch, the value at the left must be less
5943 than that at this node, so it cannot be bounded at the bottom and
5944 we need not bother testing any further. */
5945
5946 if (node->left)
5947 return 0;
5948
5949 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5950 node->low, integer_one_node));
5951
5952 /* If the subtraction above overflowed, we can't verify anything.
5953 Otherwise, look for a parent that tests our value - 1. */
5954
5955 if (! tree_int_cst_lt (low_minus_one, node->low))
5956 return 0;
5957
5958 for (pnode = node->parent; pnode; pnode = pnode->parent)
5959 if (tree_int_cst_equal (low_minus_one, pnode->high))
5960 return 1;
5961
5962 return 0;
5963 }
5964
5965 /* Search the parent sections of the case node tree
5966 to see if a test for the upper bound of NODE would be redundant.
5967 INDEX_TYPE is the type of the index expression.
5968
5969 The instructions to generate the case decision tree are
5970 output in the same order as nodes are processed so it is
5971 known that if a parent node checks the range of the current
5972 node plus one that the current node is bounded at its upper
5973 span. Thus the test would be redundant. */
5974
5975 static int
5976 node_has_high_bound (node, index_type)
5977 case_node_ptr node;
5978 tree index_type;
5979 {
5980 tree high_plus_one;
5981 case_node_ptr pnode;
5982
5983 /* If there is no upper bound, obviously no test is needed. */
5984
5985 if (TYPE_MAX_VALUE (index_type) == NULL)
5986 return 1;
5987
5988 /* If the upper bound of this node is the highest value in the type
5989 of the index expression, we need not test against it. */
5990
5991 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5992 return 1;
5993
5994 /* If this node has a right branch, the value at the right must be greater
5995 than that at this node, so it cannot be bounded at the top and
5996 we need not bother testing any further. */
5997
5998 if (node->right)
5999 return 0;
6000
6001 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6002 node->high, integer_one_node));
6003
6004 /* If the addition above overflowed, we can't verify anything.
6005 Otherwise, look for a parent that tests our value + 1. */
6006
6007 if (! tree_int_cst_lt (node->high, high_plus_one))
6008 return 0;
6009
6010 for (pnode = node->parent; pnode; pnode = pnode->parent)
6011 if (tree_int_cst_equal (high_plus_one, pnode->low))
6012 return 1;
6013
6014 return 0;
6015 }
6016
6017 /* Search the parent sections of the
6018 case node tree to see if both tests for the upper and lower
6019 bounds of NODE would be redundant. */
6020
6021 static int
6022 node_is_bounded (node, index_type)
6023 case_node_ptr node;
6024 tree index_type;
6025 {
6026 return (node_has_low_bound (node, index_type)
6027 && node_has_high_bound (node, index_type));
6028 }
6029
6030 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6031
6032 static void
6033 emit_jump_if_reachable (label)
6034 rtx label;
6035 {
6036 if (GET_CODE (get_last_insn ()) != BARRIER)
6037 emit_jump (label);
6038 }
6039 \f
6040 /* Emit step-by-step code to select a case for the value of INDEX.
6041 The thus generated decision tree follows the form of the
6042 case-node binary tree NODE, whose nodes represent test conditions.
6043 INDEX_TYPE is the type of the index of the switch.
6044
6045 Care is taken to prune redundant tests from the decision tree
6046 by detecting any boundary conditions already checked by
6047 emitted rtx. (See node_has_high_bound, node_has_low_bound
6048 and node_is_bounded, above.)
6049
6050 Where the test conditions can be shown to be redundant we emit
6051 an unconditional jump to the target code. As a further
6052 optimization, the subordinates of a tree node are examined to
6053 check for bounded nodes. In this case conditional and/or
6054 unconditional jumps as a result of the boundary check for the
6055 current node are arranged to target the subordinates associated
6056 code for out of bound conditions on the current node.
6057
6058 We can assume that when control reaches the code generated here,
6059 the index value has already been compared with the parents
6060 of this node, and determined to be on the same side of each parent
6061 as this node is. Thus, if this node tests for the value 51,
6062 and a parent tested for 52, we don't need to consider
6063 the possibility of a value greater than 51. If another parent
6064 tests for the value 50, then this node need not test anything. */
6065
6066 static void
6067 emit_case_nodes (index, node, default_label, index_type)
6068 rtx index;
6069 case_node_ptr node;
6070 rtx default_label;
6071 tree index_type;
6072 {
6073 /* If INDEX has an unsigned type, we must make unsigned branches. */
6074 int unsignedp = TREE_UNSIGNED (index_type);
6075 enum machine_mode mode = GET_MODE (index);
6076
6077 /* See if our parents have already tested everything for us.
6078 If they have, emit an unconditional jump for this node. */
6079 if (node_is_bounded (node, index_type))
6080 emit_jump (label_rtx (node->code_label));
6081
6082 else if (tree_int_cst_equal (node->low, node->high))
6083 {
6084 /* Node is single valued. First see if the index expression matches
6085 this node and then check our children, if any. */
6086
6087 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6088 label_rtx (node->code_label), unsignedp);
6089
6090 if (node->right != 0 && node->left != 0)
6091 {
6092 /* This node has children on both sides.
6093 Dispatch to one side or the other
6094 by comparing the index value with this node's value.
6095 If one subtree is bounded, check that one first,
6096 so we can avoid real branches in the tree. */
6097
6098 if (node_is_bounded (node->right, index_type))
6099 {
6100 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6101 VOIDmode, 0),
6102 GT, NULL_RTX, mode, unsignedp, 0,
6103 label_rtx (node->right->code_label));
6104 emit_case_nodes (index, node->left, default_label, index_type);
6105 }
6106
6107 else if (node_is_bounded (node->left, index_type))
6108 {
6109 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6110 VOIDmode, 0),
6111 LT, NULL_RTX, mode, unsignedp, 0,
6112 label_rtx (node->left->code_label));
6113 emit_case_nodes (index, node->right, default_label, index_type);
6114 }
6115
6116 else
6117 {
6118 /* Neither node is bounded. First distinguish the two sides;
6119 then emit the code for one side at a time. */
6120
6121 tree test_label
6122 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6123
6124 /* See if the value is on the right. */
6125 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6126 VOIDmode, 0),
6127 GT, NULL_RTX, mode, unsignedp, 0,
6128 label_rtx (test_label));
6129
6130 /* Value must be on the left.
6131 Handle the left-hand subtree. */
6132 emit_case_nodes (index, node->left, default_label, index_type);
6133 /* If left-hand subtree does nothing,
6134 go to default. */
6135 emit_jump_if_reachable (default_label);
6136
6137 /* Code branches here for the right-hand subtree. */
6138 expand_label (test_label);
6139 emit_case_nodes (index, node->right, default_label, index_type);
6140 }
6141 }
6142
6143 else if (node->right != 0 && node->left == 0)
6144 {
6145 /* Here we have a right child but no left so we issue conditional
6146 branch to default and process the right child.
6147
6148 Omit the conditional branch to default if we it avoid only one
6149 right child; it costs too much space to save so little time. */
6150
6151 if (node->right->right || node->right->left
6152 || !tree_int_cst_equal (node->right->low, node->right->high))
6153 {
6154 if (!node_has_low_bound (node, index_type))
6155 {
6156 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6157 NULL_RTX,
6158 VOIDmode, 0),
6159 LT, NULL_RTX, mode, unsignedp, 0,
6160 default_label);
6161 }
6162
6163 emit_case_nodes (index, node->right, default_label, index_type);
6164 }
6165 else
6166 /* We cannot process node->right normally
6167 since we haven't ruled out the numbers less than
6168 this node's value. So handle node->right explicitly. */
6169 do_jump_if_equal (index,
6170 expand_expr (node->right->low, NULL_RTX,
6171 VOIDmode, 0),
6172 label_rtx (node->right->code_label), unsignedp);
6173 }
6174
6175 else if (node->right == 0 && node->left != 0)
6176 {
6177 /* Just one subtree, on the left. */
6178
6179 #if 0 /* The following code and comment were formerly part
6180 of the condition here, but they didn't work
6181 and I don't understand what the idea was. -- rms. */
6182 /* If our "most probable entry" is less probable
6183 than the default label, emit a jump to
6184 the default label using condition codes
6185 already lying around. With no right branch,
6186 a branch-greater-than will get us to the default
6187 label correctly. */
6188 if (use_cost_table
6189 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6190 ;
6191 #endif /* 0 */
6192 if (node->left->left || node->left->right
6193 || !tree_int_cst_equal (node->left->low, node->left->high))
6194 {
6195 if (!node_has_high_bound (node, index_type))
6196 {
6197 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6198 NULL_RTX,
6199 VOIDmode, 0),
6200 GT, NULL_RTX, mode, unsignedp, 0,
6201 default_label);
6202 }
6203
6204 emit_case_nodes (index, node->left, default_label, index_type);
6205 }
6206 else
6207 /* We cannot process node->left normally
6208 since we haven't ruled out the numbers less than
6209 this node's value. So handle node->left explicitly. */
6210 do_jump_if_equal (index,
6211 expand_expr (node->left->low, NULL_RTX,
6212 VOIDmode, 0),
6213 label_rtx (node->left->code_label), unsignedp);
6214 }
6215 }
6216 else
6217 {
6218 /* Node is a range. These cases are very similar to those for a single
6219 value, except that we do not start by testing whether this node
6220 is the one to branch to. */
6221
6222 if (node->right != 0 && node->left != 0)
6223 {
6224 /* Node has subtrees on both sides.
6225 If the right-hand subtree is bounded,
6226 test for it first, since we can go straight there.
6227 Otherwise, we need to make a branch in the control structure,
6228 then handle the two subtrees. */
6229 tree test_label = 0;
6230
6231
6232 if (node_is_bounded (node->right, index_type))
6233 /* Right hand node is fully bounded so we can eliminate any
6234 testing and branch directly to the target code. */
6235 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6236 VOIDmode, 0),
6237 GT, NULL_RTX, mode, unsignedp, 0,
6238 label_rtx (node->right->code_label));
6239 else
6240 {
6241 /* Right hand node requires testing.
6242 Branch to a label where we will handle it later. */
6243
6244 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6245 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6246 VOIDmode, 0),
6247 GT, NULL_RTX, mode, unsignedp, 0,
6248 label_rtx (test_label));
6249 }
6250
6251 /* Value belongs to this node or to the left-hand subtree. */
6252
6253 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6254 VOIDmode, 0),
6255 GE, NULL_RTX, mode, unsignedp, 0,
6256 label_rtx (node->code_label));
6257
6258 /* Handle the left-hand subtree. */
6259 emit_case_nodes (index, node->left, default_label, index_type);
6260
6261 /* If right node had to be handled later, do that now. */
6262
6263 if (test_label)
6264 {
6265 /* If the left-hand subtree fell through,
6266 don't let it fall into the right-hand subtree. */
6267 emit_jump_if_reachable (default_label);
6268
6269 expand_label (test_label);
6270 emit_case_nodes (index, node->right, default_label, index_type);
6271 }
6272 }
6273
6274 else if (node->right != 0 && node->left == 0)
6275 {
6276 /* Deal with values to the left of this node,
6277 if they are possible. */
6278 if (!node_has_low_bound (node, index_type))
6279 {
6280 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6281 VOIDmode, 0),
6282 LT, NULL_RTX, mode, unsignedp, 0,
6283 default_label);
6284 }
6285
6286 /* Value belongs to this node or to the right-hand subtree. */
6287
6288 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6289 VOIDmode, 0),
6290 LE, NULL_RTX, mode, unsignedp, 0,
6291 label_rtx (node->code_label));
6292
6293 emit_case_nodes (index, node->right, default_label, index_type);
6294 }
6295
6296 else if (node->right == 0 && node->left != 0)
6297 {
6298 /* Deal with values to the right of this node,
6299 if they are possible. */
6300 if (!node_has_high_bound (node, index_type))
6301 {
6302 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6303 VOIDmode, 0),
6304 GT, NULL_RTX, mode, unsignedp, 0,
6305 default_label);
6306 }
6307
6308 /* Value belongs to this node or to the left-hand subtree. */
6309
6310 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6311 VOIDmode, 0),
6312 GE, NULL_RTX, mode, unsignedp, 0,
6313 label_rtx (node->code_label));
6314
6315 emit_case_nodes (index, node->left, default_label, index_type);
6316 }
6317
6318 else
6319 {
6320 /* Node has no children so we check low and high bounds to remove
6321 redundant tests. Only one of the bounds can exist,
6322 since otherwise this node is bounded--a case tested already. */
6323
6324 if (!node_has_high_bound (node, index_type))
6325 {
6326 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6327 VOIDmode, 0),
6328 GT, NULL_RTX, mode, unsignedp, 0,
6329 default_label);
6330 }
6331
6332 if (!node_has_low_bound (node, index_type))
6333 {
6334 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6335 VOIDmode, 0),
6336 LT, NULL_RTX, mode, unsignedp, 0,
6337 default_label);
6338 }
6339
6340 emit_jump (label_rtx (node->code_label));
6341 }
6342 }
6343 }
6344 \f