tree.h (expand_expr_stmt_value): Add maybe_last argument.
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "expr.h"
47 #include "libfuncs.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55 #include "ggc.h"
56
57 #define obstack_chunk_alloc xmalloc
58 #define obstack_chunk_free free
59 struct obstack stmt_obstack;
60
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
65 \f
66 /* Functions and data structures for expanding case statements. */
67
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
71
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
75
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
80 node chain.
81
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
88 in order. */
89
90 struct case_node
91 {
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
98 int balance;
99 };
100
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
103
104 /* These are used by estimate_case_costs and balance_case_nodes. */
105
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
110
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 is unsigned. */
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
114 \f
115 /* Stack of control and binding constructs we are currently inside.
116
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
123
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
128
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
132
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
139
140 struct nesting
141 {
142 struct nesting *all;
143 struct nesting *next;
144 int depth;
145 rtx exit_label;
146 union
147 {
148 /* For conds (if-then and if-then-else statements). */
149 struct
150 {
151 /* Label for the end of the if construct.
152 There is none if EXITFLAG was not set
153 and no `else' has been seen yet. */
154 rtx endif_label;
155 /* Label for the end of this alternative.
156 This may be the end of the if or the next else/elseif. */
157 rtx next_label;
158 } cond;
159 /* For loops. */
160 struct
161 {
162 /* Label at the top of the loop; place to loop back to. */
163 rtx start_label;
164 /* Label at the end of the whole construct. */
165 rtx end_label;
166 /* Label before a jump that branches to the end of the whole
167 construct. This is where destructors go if any. */
168 rtx alt_end_label;
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
171 rtx continue_label;
172 } loop;
173 /* For variable binding contours. */
174 struct
175 {
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
179 /* Nonzero => value to restore stack to on exit. */
180 rtx stack_level;
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
184 rtx first_insn;
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
191 tree cleanups;
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
196 The tail of this list can be 0,
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
200 tree outer_cleanups;
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
204 /* Number of function calls seen, as of start of this block. */
205 int n_function_calls;
206 /* Nonzero if this is associated with a EH region. */
207 int exception_region;
208 /* The saved target_temp_slot_level from our outer block.
209 We may reset target_temp_slot_level to be the level of
210 this block, if that is done, target_temp_slot_level
211 reverts to the saved target_temp_slot_level at the very
212 end of the block. */
213 int block_target_temp_slot_level;
214 /* True if we are currently emitting insns in an area of
215 output code that is controlled by a conditional
216 expression. This is used by the cleanup handling code to
217 generate conditional cleanup actions. */
218 int conditional_code;
219 /* A place to move the start of the exception region for any
220 of the conditional cleanups, must be at the end or after
221 the start of the last unconditional cleanup, and before any
222 conditional branch points. */
223 rtx last_unconditional_cleanup;
224 /* When in a conditional context, this is the specific
225 cleanup list associated with last_unconditional_cleanup,
226 where we place the conditionalized cleanups. */
227 tree *cleanup_ptr;
228 } block;
229 /* For switch (C) or case (Pascal) statements,
230 and also for dummies (see `expand_start_case_dummy'). */
231 struct
232 {
233 /* The insn after which the case dispatch should finally
234 be emitted. Zero for a dummy. */
235 rtx start;
236 /* A list of case labels; it is first built as an AVL tree.
237 During expand_end_case, this is converted to a list, and may be
238 rearranged into a nearly balanced binary tree. */
239 struct case_node *case_list;
240 /* Label to jump to if no case matches. */
241 tree default_label;
242 /* The expression to be dispatched on. */
243 tree index_expr;
244 /* Type that INDEX_EXPR should be converted to. */
245 tree nominal_type;
246 /* Name of this kind of statement, for warnings. */
247 const char *printname;
248 /* Used to save no_line_numbers till we see the first case label.
249 We set this to -1 when we see the first case label in this
250 case statement. */
251 int line_number_status;
252 } case_stmt;
253 } data;
254 };
255
256 /* Allocate and return a new `struct nesting'. */
257
258 #define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
260
261 /* Pop the nesting stack element by element until we pop off
262 the element which is at the top of STACK.
263 Update all the other stacks, popping off elements from them
264 as we pop them from nesting_stack. */
265
266 #define POPSTACK(STACK) \
267 do { struct nesting *target = STACK; \
268 struct nesting *this; \
269 do { this = nesting_stack; \
270 if (loop_stack == this) \
271 loop_stack = loop_stack->next; \
272 if (cond_stack == this) \
273 cond_stack = cond_stack->next; \
274 if (block_stack == this) \
275 block_stack = block_stack->next; \
276 if (stack_block_stack == this) \
277 stack_block_stack = stack_block_stack->next; \
278 if (case_stack == this) \
279 case_stack = case_stack->next; \
280 nesting_depth = nesting_stack->depth - 1; \
281 nesting_stack = this->all; \
282 obstack_free (&stmt_obstack, this); } \
283 while (this != target); } while (0)
284 \f
285 /* In some cases it is impossible to generate code for a forward goto
286 until the label definition is seen. This happens when it may be necessary
287 for the goto to reset the stack pointer: we don't yet know how to do that.
288 So expand_goto puts an entry on this fixup list.
289 Each time a binding contour that resets the stack is exited,
290 we check each fixup.
291 If the target label has now been defined, we can insert the proper code. */
292
293 struct goto_fixup
294 {
295 /* Points to following fixup. */
296 struct goto_fixup *next;
297 /* Points to the insn before the jump insn.
298 If more code must be inserted, it goes after this insn. */
299 rtx before_jump;
300 /* The LABEL_DECL that this jump is jumping to, or 0
301 for break, continue or return. */
302 tree target;
303 /* The BLOCK for the place where this goto was found. */
304 tree context;
305 /* The CODE_LABEL rtx that this is jumping to. */
306 rtx target_rtl;
307 /* Number of binding contours started in current function
308 before the label reference. */
309 int block_start_count;
310 /* The outermost stack level that should be restored for this jump.
311 Each time a binding contour that resets the stack is exited,
312 if the target label is *not* yet defined, this slot is updated. */
313 rtx stack_level;
314 /* List of lists of cleanup expressions to be run by this goto.
315 There is one element for each block that this goto is within.
316 The tail of this list can be 0,
317 if all remaining elements would be empty.
318 The TREE_VALUE contains the cleanup list of that block as of the
319 time this goto was seen.
320 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
321 tree cleanup_list_list;
322 };
323
324 /* Within any binding contour that must restore a stack level,
325 all labels are recorded with a chain of these structures. */
326
327 struct label_chain
328 {
329 /* Points to following fixup. */
330 struct label_chain *next;
331 tree label;
332 };
333
334 struct stmt_status
335 {
336 /* Chain of all pending binding contours. */
337 struct nesting *x_block_stack;
338
339 /* If any new stacks are added here, add them to POPSTACKS too. */
340
341 /* Chain of all pending binding contours that restore stack levels
342 or have cleanups. */
343 struct nesting *x_stack_block_stack;
344
345 /* Chain of all pending conditional statements. */
346 struct nesting *x_cond_stack;
347
348 /* Chain of all pending loops. */
349 struct nesting *x_loop_stack;
350
351 /* Chain of all pending case or switch statements. */
352 struct nesting *x_case_stack;
353
354 /* Separate chain including all of the above,
355 chained through the `all' field. */
356 struct nesting *x_nesting_stack;
357
358 /* Number of entries on nesting_stack now. */
359 int x_nesting_depth;
360
361 /* Number of binding contours started so far in this function. */
362 int x_block_start_count;
363
364 /* Each time we expand an expression-statement,
365 record the expr's type and its RTL value here. */
366 tree x_last_expr_type;
367 rtx x_last_expr_value;
368
369 /* Nonzero if within a ({...}) grouping, in which case we must
370 always compute a value for each expr-stmt in case it is the last one. */
371 int x_expr_stmts_for_value;
372
373 /* Filename and line number of last line-number note,
374 whether we actually emitted it or not. */
375 const char *x_emit_filename;
376 int x_emit_lineno;
377
378 struct goto_fixup *x_goto_fixup_chain;
379 };
380
381 #define block_stack (cfun->stmt->x_block_stack)
382 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
383 #define cond_stack (cfun->stmt->x_cond_stack)
384 #define loop_stack (cfun->stmt->x_loop_stack)
385 #define case_stack (cfun->stmt->x_case_stack)
386 #define nesting_stack (cfun->stmt->x_nesting_stack)
387 #define nesting_depth (cfun->stmt->x_nesting_depth)
388 #define current_block_start_count (cfun->stmt->x_block_start_count)
389 #define last_expr_type (cfun->stmt->x_last_expr_type)
390 #define last_expr_value (cfun->stmt->x_last_expr_value)
391 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392 #define emit_filename (cfun->stmt->x_emit_filename)
393 #define emit_lineno (cfun->stmt->x_emit_lineno)
394 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
395
396 /* Non-zero if we are using EH to handle cleanus. */
397 static int using_eh_for_cleanups_p = 0;
398
399 static int n_occurrences PARAMS ((int, const char *));
400 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
401 int, const char * const *,
402 bool *, bool *));
403 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
404 static int expand_fixup PARAMS ((tree, rtx, rtx));
405 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
406 static void expand_nl_goto_receiver PARAMS ((void));
407 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
408 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
409 rtx, int));
410 static bool check_operand_nalternatives PARAMS ((tree, tree));
411 static bool check_unique_operand_names PARAMS ((tree, tree));
412 static tree resolve_operand_names PARAMS ((tree, tree, tree,
413 const char **));
414 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
415 static void expand_null_return_1 PARAMS ((rtx));
416 static void expand_value_return PARAMS ((rtx));
417 static int tail_recursion_args PARAMS ((tree, tree));
418 static void expand_cleanups PARAMS ((tree, tree, int, int));
419 static void check_seenlabel PARAMS ((void));
420 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
421 static int estimate_case_costs PARAMS ((case_node_ptr));
422 static void group_case_nodes PARAMS ((case_node_ptr));
423 static void balance_case_nodes PARAMS ((case_node_ptr *,
424 case_node_ptr));
425 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
426 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
427 static int node_is_bounded PARAMS ((case_node_ptr, tree));
428 static void emit_jump_if_reachable PARAMS ((rtx));
429 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
430 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
431 static void mark_cond_nesting PARAMS ((struct nesting *));
432 static void mark_loop_nesting PARAMS ((struct nesting *));
433 static void mark_block_nesting PARAMS ((struct nesting *));
434 static void mark_case_nesting PARAMS ((struct nesting *));
435 static void mark_case_node PARAMS ((struct case_node *));
436 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
437 static void free_case_nodes PARAMS ((case_node_ptr));
438 \f
439 void
440 using_eh_for_cleanups ()
441 {
442 using_eh_for_cleanups_p = 1;
443 }
444
445 /* Mark N (known to be a cond-nesting) for GC. */
446
447 static void
448 mark_cond_nesting (n)
449 struct nesting *n;
450 {
451 while (n)
452 {
453 ggc_mark_rtx (n->exit_label);
454 ggc_mark_rtx (n->data.cond.endif_label);
455 ggc_mark_rtx (n->data.cond.next_label);
456
457 n = n->next;
458 }
459 }
460
461 /* Mark N (known to be a loop-nesting) for GC. */
462
463 static void
464 mark_loop_nesting (n)
465 struct nesting *n;
466 {
467
468 while (n)
469 {
470 ggc_mark_rtx (n->exit_label);
471 ggc_mark_rtx (n->data.loop.start_label);
472 ggc_mark_rtx (n->data.loop.end_label);
473 ggc_mark_rtx (n->data.loop.alt_end_label);
474 ggc_mark_rtx (n->data.loop.continue_label);
475
476 n = n->next;
477 }
478 }
479
480 /* Mark N (known to be a block-nesting) for GC. */
481
482 static void
483 mark_block_nesting (n)
484 struct nesting *n;
485 {
486 while (n)
487 {
488 struct label_chain *l;
489
490 ggc_mark_rtx (n->exit_label);
491 ggc_mark_rtx (n->data.block.stack_level);
492 ggc_mark_rtx (n->data.block.first_insn);
493 ggc_mark_tree (n->data.block.cleanups);
494 ggc_mark_tree (n->data.block.outer_cleanups);
495
496 for (l = n->data.block.label_chain; l != NULL; l = l->next)
497 {
498 ggc_mark (l);
499 ggc_mark_tree (l->label);
500 }
501
502 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
503
504 /* ??? cleanup_ptr never points outside the stack, does it? */
505
506 n = n->next;
507 }
508 }
509
510 /* Mark N (known to be a case-nesting) for GC. */
511
512 static void
513 mark_case_nesting (n)
514 struct nesting *n;
515 {
516 while (n)
517 {
518 ggc_mark_rtx (n->exit_label);
519 ggc_mark_rtx (n->data.case_stmt.start);
520
521 ggc_mark_tree (n->data.case_stmt.default_label);
522 ggc_mark_tree (n->data.case_stmt.index_expr);
523 ggc_mark_tree (n->data.case_stmt.nominal_type);
524
525 mark_case_node (n->data.case_stmt.case_list);
526 n = n->next;
527 }
528 }
529
530 /* Mark C for GC. */
531
532 static void
533 mark_case_node (c)
534 struct case_node *c;
535 {
536 if (c != 0)
537 {
538 ggc_mark_tree (c->low);
539 ggc_mark_tree (c->high);
540 ggc_mark_tree (c->code_label);
541
542 mark_case_node (c->right);
543 mark_case_node (c->left);
544 }
545 }
546
547 /* Mark G for GC. */
548
549 static void
550 mark_goto_fixup (g)
551 struct goto_fixup *g;
552 {
553 while (g)
554 {
555 ggc_mark (g);
556 ggc_mark_rtx (g->before_jump);
557 ggc_mark_tree (g->target);
558 ggc_mark_tree (g->context);
559 ggc_mark_rtx (g->target_rtl);
560 ggc_mark_rtx (g->stack_level);
561 ggc_mark_tree (g->cleanup_list_list);
562
563 g = g->next;
564 }
565 }
566
567 /* Clear out all parts of the state in F that can safely be discarded
568 after the function has been compiled, to let garbage collection
569 reclaim the memory. */
570
571 void
572 free_stmt_status (f)
573 struct function *f;
574 {
575 /* We're about to free the function obstack. If we hold pointers to
576 things allocated there, then we'll try to mark them when we do
577 GC. So, we clear them out here explicitly. */
578 if (f->stmt)
579 free (f->stmt);
580 f->stmt = NULL;
581 }
582
583 /* Mark P for GC. */
584
585 void
586 mark_stmt_status (p)
587 struct stmt_status *p;
588 {
589 if (p == 0)
590 return;
591
592 mark_block_nesting (p->x_block_stack);
593 mark_cond_nesting (p->x_cond_stack);
594 mark_loop_nesting (p->x_loop_stack);
595 mark_case_nesting (p->x_case_stack);
596
597 ggc_mark_tree (p->x_last_expr_type);
598 /* last_epxr_value is only valid if last_expr_type is nonzero. */
599 if (p->x_last_expr_type)
600 ggc_mark_rtx (p->x_last_expr_value);
601
602 mark_goto_fixup (p->x_goto_fixup_chain);
603 }
604
605 void
606 init_stmt ()
607 {
608 gcc_obstack_init (&stmt_obstack);
609 }
610
611 void
612 init_stmt_for_function ()
613 {
614 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
615
616 /* We are not currently within any block, conditional, loop or case. */
617 block_stack = 0;
618 stack_block_stack = 0;
619 loop_stack = 0;
620 case_stack = 0;
621 cond_stack = 0;
622 nesting_stack = 0;
623 nesting_depth = 0;
624
625 current_block_start_count = 0;
626
627 /* No gotos have been expanded yet. */
628 goto_fixup_chain = 0;
629
630 /* We are not processing a ({...}) grouping. */
631 expr_stmts_for_value = 0;
632 last_expr_type = 0;
633 last_expr_value = NULL_RTX;
634 }
635 \f
636 /* Return nonzero if anything is pushed on the loop, condition, or case
637 stack. */
638 int
639 in_control_zone_p ()
640 {
641 return cond_stack || loop_stack || case_stack;
642 }
643
644 /* Record the current file and line. Called from emit_line_note. */
645 void
646 set_file_and_line_for_stmt (file, line)
647 const char *file;
648 int line;
649 {
650 /* If we're outputting an inline function, and we add a line note,
651 there may be no CFUN->STMT information. So, there's no need to
652 update it. */
653 if (cfun->stmt)
654 {
655 emit_filename = file;
656 emit_lineno = line;
657 }
658 }
659
660 /* Emit a no-op instruction. */
661
662 void
663 emit_nop ()
664 {
665 rtx last_insn;
666
667 last_insn = get_last_insn ();
668 if (!optimize
669 && (GET_CODE (last_insn) == CODE_LABEL
670 || (GET_CODE (last_insn) == NOTE
671 && prev_real_insn (last_insn) == 0)))
672 emit_insn (gen_nop ());
673 }
674 \f
675 /* Return the rtx-label that corresponds to a LABEL_DECL,
676 creating it if necessary. */
677
678 rtx
679 label_rtx (label)
680 tree label;
681 {
682 if (TREE_CODE (label) != LABEL_DECL)
683 abort ();
684
685 if (!DECL_RTL_SET_P (label))
686 SET_DECL_RTL (label, gen_label_rtx ());
687
688 return DECL_RTL (label);
689 }
690
691
692 /* Add an unconditional jump to LABEL as the next sequential instruction. */
693
694 void
695 emit_jump (label)
696 rtx label;
697 {
698 do_pending_stack_adjust ();
699 emit_jump_insn (gen_jump (label));
700 emit_barrier ();
701 }
702
703 /* Emit code to jump to the address
704 specified by the pointer expression EXP. */
705
706 void
707 expand_computed_goto (exp)
708 tree exp;
709 {
710 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
711
712 #ifdef POINTERS_EXTEND_UNSIGNED
713 if (GET_MODE (x) != Pmode)
714 x = convert_memory_address (Pmode, x);
715 #endif
716
717 emit_queue ();
718 do_pending_stack_adjust ();
719 emit_indirect_jump (x);
720
721 current_function_has_computed_jump = 1;
722 }
723 \f
724 /* Handle goto statements and the labels that they can go to. */
725
726 /* Specify the location in the RTL code of a label LABEL,
727 which is a LABEL_DECL tree node.
728
729 This is used for the kind of label that the user can jump to with a
730 goto statement, and for alternatives of a switch or case statement.
731 RTL labels generated for loops and conditionals don't go through here;
732 they are generated directly at the RTL level, by other functions below.
733
734 Note that this has nothing to do with defining label *names*.
735 Languages vary in how they do that and what that even means. */
736
737 void
738 expand_label (label)
739 tree label;
740 {
741 struct label_chain *p;
742
743 do_pending_stack_adjust ();
744 emit_label (label_rtx (label));
745 if (DECL_NAME (label))
746 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
747
748 if (stack_block_stack != 0)
749 {
750 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
751 p->next = stack_block_stack->data.block.label_chain;
752 stack_block_stack->data.block.label_chain = p;
753 p->label = label;
754 }
755 }
756
757 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
758 from nested functions. */
759
760 void
761 declare_nonlocal_label (label)
762 tree label;
763 {
764 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
765
766 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
767 LABEL_PRESERVE_P (label_rtx (label)) = 1;
768 if (nonlocal_goto_handler_slots == 0)
769 {
770 emit_stack_save (SAVE_NONLOCAL,
771 &nonlocal_goto_stack_level,
772 PREV_INSN (tail_recursion_reentry));
773 }
774 nonlocal_goto_handler_slots
775 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
776 }
777
778 /* Generate RTL code for a `goto' statement with target label LABEL.
779 LABEL should be a LABEL_DECL tree node that was or will later be
780 defined with `expand_label'. */
781
782 void
783 expand_goto (label)
784 tree label;
785 {
786 tree context;
787
788 /* Check for a nonlocal goto to a containing function. */
789 context = decl_function_context (label);
790 if (context != 0 && context != current_function_decl)
791 {
792 struct function *p = find_function_data (context);
793 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
794 rtx handler_slot, static_chain, save_area, insn;
795 tree link;
796
797 /* Find the corresponding handler slot for this label. */
798 handler_slot = p->x_nonlocal_goto_handler_slots;
799 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
800 link = TREE_CHAIN (link))
801 handler_slot = XEXP (handler_slot, 1);
802 handler_slot = XEXP (handler_slot, 0);
803
804 p->has_nonlocal_label = 1;
805 current_function_has_nonlocal_goto = 1;
806 LABEL_REF_NONLOCAL_P (label_ref) = 1;
807
808 /* Copy the rtl for the slots so that they won't be shared in
809 case the virtual stack vars register gets instantiated differently
810 in the parent than in the child. */
811
812 static_chain = copy_to_reg (lookup_static_chain (label));
813
814 /* Get addr of containing function's current nonlocal goto handler,
815 which will do any cleanups and then jump to the label. */
816 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
817 virtual_stack_vars_rtx,
818 static_chain));
819
820 /* Get addr of containing function's nonlocal save area. */
821 save_area = p->x_nonlocal_goto_stack_level;
822 if (save_area)
823 save_area = replace_rtx (copy_rtx (save_area),
824 virtual_stack_vars_rtx, static_chain);
825
826 #if HAVE_nonlocal_goto
827 if (HAVE_nonlocal_goto)
828 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
829 save_area, label_ref));
830 else
831 #endif
832 {
833 /* Restore frame pointer for containing function.
834 This sets the actual hard register used for the frame pointer
835 to the location of the function's incoming static chain info.
836 The non-local goto handler will then adjust it to contain the
837 proper value and reload the argument pointer, if needed. */
838 emit_move_insn (hard_frame_pointer_rtx, static_chain);
839 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
840
841 /* USE of hard_frame_pointer_rtx added for consistency;
842 not clear if really needed. */
843 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
844 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
845 emit_indirect_jump (handler_slot);
846 }
847
848 /* Search backwards to the jump insn and mark it as a
849 non-local goto. */
850 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
851 {
852 if (GET_CODE (insn) == JUMP_INSN)
853 {
854 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
855 const0_rtx, REG_NOTES (insn));
856 break;
857 }
858 else if (GET_CODE (insn) == CALL_INSN)
859 break;
860 }
861 }
862 else
863 expand_goto_internal (label, label_rtx (label), NULL_RTX);
864 }
865
866 /* Generate RTL code for a `goto' statement with target label BODY.
867 LABEL should be a LABEL_REF.
868 LAST_INSN, if non-0, is the rtx we should consider as the last
869 insn emitted (for the purposes of cleaning up a return). */
870
871 static void
872 expand_goto_internal (body, label, last_insn)
873 tree body;
874 rtx label;
875 rtx last_insn;
876 {
877 struct nesting *block;
878 rtx stack_level = 0;
879
880 if (GET_CODE (label) != CODE_LABEL)
881 abort ();
882
883 /* If label has already been defined, we can tell now
884 whether and how we must alter the stack level. */
885
886 if (PREV_INSN (label) != 0)
887 {
888 /* Find the innermost pending block that contains the label.
889 (Check containment by comparing insn-uids.)
890 Then restore the outermost stack level within that block,
891 and do cleanups of all blocks contained in it. */
892 for (block = block_stack; block; block = block->next)
893 {
894 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
895 break;
896 if (block->data.block.stack_level != 0)
897 stack_level = block->data.block.stack_level;
898 /* Execute the cleanups for blocks we are exiting. */
899 if (block->data.block.cleanups != 0)
900 {
901 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
902 do_pending_stack_adjust ();
903 }
904 }
905
906 if (stack_level)
907 {
908 /* Ensure stack adjust isn't done by emit_jump, as this
909 would clobber the stack pointer. This one should be
910 deleted as dead by flow. */
911 clear_pending_stack_adjust ();
912 do_pending_stack_adjust ();
913
914 /* Don't do this adjust if it's to the end label and this function
915 is to return with a depressed stack pointer. */
916 if (label == return_label
917 && (((TREE_CODE (TREE_TYPE (current_function_decl))
918 == FUNCTION_TYPE)
919 && (TYPE_RETURNS_STACK_DEPRESSED
920 (TREE_TYPE (current_function_decl))))))
921 ;
922 else
923 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
924 }
925
926 if (body != 0 && DECL_TOO_LATE (body))
927 error ("jump to `%s' invalidly jumps into binding contour",
928 IDENTIFIER_POINTER (DECL_NAME (body)));
929 }
930 /* Label not yet defined: may need to put this goto
931 on the fixup list. */
932 else if (! expand_fixup (body, label, last_insn))
933 {
934 /* No fixup needed. Record that the label is the target
935 of at least one goto that has no fixup. */
936 if (body != 0)
937 TREE_ADDRESSABLE (body) = 1;
938 }
939
940 emit_jump (label);
941 }
942 \f
943 /* Generate if necessary a fixup for a goto
944 whose target label in tree structure (if any) is TREE_LABEL
945 and whose target in rtl is RTL_LABEL.
946
947 If LAST_INSN is nonzero, we pretend that the jump appears
948 after insn LAST_INSN instead of at the current point in the insn stream.
949
950 The fixup will be used later to insert insns just before the goto.
951 Those insns will restore the stack level as appropriate for the
952 target label, and will (in the case of C++) also invoke any object
953 destructors which have to be invoked when we exit the scopes which
954 are exited by the goto.
955
956 Value is nonzero if a fixup is made. */
957
958 static int
959 expand_fixup (tree_label, rtl_label, last_insn)
960 tree tree_label;
961 rtx rtl_label;
962 rtx last_insn;
963 {
964 struct nesting *block, *end_block;
965
966 /* See if we can recognize which block the label will be output in.
967 This is possible in some very common cases.
968 If we succeed, set END_BLOCK to that block.
969 Otherwise, set it to 0. */
970
971 if (cond_stack
972 && (rtl_label == cond_stack->data.cond.endif_label
973 || rtl_label == cond_stack->data.cond.next_label))
974 end_block = cond_stack;
975 /* If we are in a loop, recognize certain labels which
976 are likely targets. This reduces the number of fixups
977 we need to create. */
978 else if (loop_stack
979 && (rtl_label == loop_stack->data.loop.start_label
980 || rtl_label == loop_stack->data.loop.end_label
981 || rtl_label == loop_stack->data.loop.continue_label))
982 end_block = loop_stack;
983 else
984 end_block = 0;
985
986 /* Now set END_BLOCK to the binding level to which we will return. */
987
988 if (end_block)
989 {
990 struct nesting *next_block = end_block->all;
991 block = block_stack;
992
993 /* First see if the END_BLOCK is inside the innermost binding level.
994 If so, then no cleanups or stack levels are relevant. */
995 while (next_block && next_block != block)
996 next_block = next_block->all;
997
998 if (next_block)
999 return 0;
1000
1001 /* Otherwise, set END_BLOCK to the innermost binding level
1002 which is outside the relevant control-structure nesting. */
1003 next_block = block_stack->next;
1004 for (block = block_stack; block != end_block; block = block->all)
1005 if (block == next_block)
1006 next_block = next_block->next;
1007 end_block = next_block;
1008 }
1009
1010 /* Does any containing block have a stack level or cleanups?
1011 If not, no fixup is needed, and that is the normal case
1012 (the only case, for standard C). */
1013 for (block = block_stack; block != end_block; block = block->next)
1014 if (block->data.block.stack_level != 0
1015 || block->data.block.cleanups != 0)
1016 break;
1017
1018 if (block != end_block)
1019 {
1020 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1021 struct goto_fixup *fixup
1022 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1023 /* In case an old stack level is restored, make sure that comes
1024 after any pending stack adjust. */
1025 /* ?? If the fixup isn't to come at the present position,
1026 doing the stack adjust here isn't useful. Doing it with our
1027 settings at that location isn't useful either. Let's hope
1028 someone does it! */
1029 if (last_insn == 0)
1030 do_pending_stack_adjust ();
1031 fixup->target = tree_label;
1032 fixup->target_rtl = rtl_label;
1033
1034 /* Create a BLOCK node and a corresponding matched set of
1035 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1036 this point. The notes will encapsulate any and all fixup
1037 code which we might later insert at this point in the insn
1038 stream. Also, the BLOCK node will be the parent (i.e. the
1039 `SUPERBLOCK') of any other BLOCK nodes which we might create
1040 later on when we are expanding the fixup code.
1041
1042 Note that optimization passes (including expand_end_loop)
1043 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1044 as a placeholder. */
1045
1046 {
1047 rtx original_before_jump
1048 = last_insn ? last_insn : get_last_insn ();
1049 rtx start;
1050 rtx end;
1051 tree block;
1052
1053 block = make_node (BLOCK);
1054 TREE_USED (block) = 1;
1055
1056 if (!cfun->x_whole_function_mode_p)
1057 insert_block (block);
1058 else
1059 {
1060 BLOCK_CHAIN (block)
1061 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1062 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1063 = block;
1064 }
1065
1066 start_sequence ();
1067 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1068 if (cfun->x_whole_function_mode_p)
1069 NOTE_BLOCK (start) = block;
1070 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1071 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1072 if (cfun->x_whole_function_mode_p)
1073 NOTE_BLOCK (end) = block;
1074 fixup->context = block;
1075 end_sequence ();
1076 emit_insns_after (start, original_before_jump);
1077 }
1078
1079 fixup->block_start_count = current_block_start_count;
1080 fixup->stack_level = 0;
1081 fixup->cleanup_list_list
1082 = ((block->data.block.outer_cleanups
1083 || block->data.block.cleanups)
1084 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1085 block->data.block.outer_cleanups)
1086 : 0);
1087 fixup->next = goto_fixup_chain;
1088 goto_fixup_chain = fixup;
1089 }
1090
1091 return block != 0;
1092 }
1093 \f
1094 /* Expand any needed fixups in the outputmost binding level of the
1095 function. FIRST_INSN is the first insn in the function. */
1096
1097 void
1098 expand_fixups (first_insn)
1099 rtx first_insn;
1100 {
1101 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1102 }
1103
1104 /* When exiting a binding contour, process all pending gotos requiring fixups.
1105 THISBLOCK is the structure that describes the block being exited.
1106 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1107 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1108 FIRST_INSN is the insn that began this contour.
1109
1110 Gotos that jump out of this contour must restore the
1111 stack level and do the cleanups before actually jumping.
1112
1113 DONT_JUMP_IN nonzero means report error there is a jump into this
1114 contour from before the beginning of the contour.
1115 This is also done if STACK_LEVEL is nonzero. */
1116
1117 static void
1118 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1119 struct nesting *thisblock;
1120 rtx stack_level;
1121 tree cleanup_list;
1122 rtx first_insn;
1123 int dont_jump_in;
1124 {
1125 struct goto_fixup *f, *prev;
1126
1127 /* F is the fixup we are considering; PREV is the previous one. */
1128 /* We run this loop in two passes so that cleanups of exited blocks
1129 are run first, and blocks that are exited are marked so
1130 afterwards. */
1131
1132 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1133 {
1134 /* Test for a fixup that is inactive because it is already handled. */
1135 if (f->before_jump == 0)
1136 {
1137 /* Delete inactive fixup from the chain, if that is easy to do. */
1138 if (prev != 0)
1139 prev->next = f->next;
1140 }
1141 /* Has this fixup's target label been defined?
1142 If so, we can finalize it. */
1143 else if (PREV_INSN (f->target_rtl) != 0)
1144 {
1145 rtx cleanup_insns;
1146
1147 /* If this fixup jumped into this contour from before the beginning
1148 of this contour, report an error. This code used to use
1149 the first non-label insn after f->target_rtl, but that's
1150 wrong since such can be added, by things like put_var_into_stack
1151 and have INSN_UIDs that are out of the range of the block. */
1152 /* ??? Bug: this does not detect jumping in through intermediate
1153 blocks that have stack levels or cleanups.
1154 It detects only a problem with the innermost block
1155 around the label. */
1156 if (f->target != 0
1157 && (dont_jump_in || stack_level || cleanup_list)
1158 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1159 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1160 && ! DECL_ERROR_ISSUED (f->target))
1161 {
1162 error_with_decl (f->target,
1163 "label `%s' used before containing binding contour");
1164 /* Prevent multiple errors for one label. */
1165 DECL_ERROR_ISSUED (f->target) = 1;
1166 }
1167
1168 /* We will expand the cleanups into a sequence of their own and
1169 then later on we will attach this new sequence to the insn
1170 stream just ahead of the actual jump insn. */
1171
1172 start_sequence ();
1173
1174 /* Temporarily restore the lexical context where we will
1175 logically be inserting the fixup code. We do this for the
1176 sake of getting the debugging information right. */
1177
1178 pushlevel (0);
1179 set_block (f->context);
1180
1181 /* Expand the cleanups for blocks this jump exits. */
1182 if (f->cleanup_list_list)
1183 {
1184 tree lists;
1185 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1186 /* Marked elements correspond to blocks that have been closed.
1187 Do their cleanups. */
1188 if (TREE_ADDRESSABLE (lists)
1189 && TREE_VALUE (lists) != 0)
1190 {
1191 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1192 /* Pop any pushes done in the cleanups,
1193 in case function is about to return. */
1194 do_pending_stack_adjust ();
1195 }
1196 }
1197
1198 /* Restore stack level for the biggest contour that this
1199 jump jumps out of. */
1200 if (f->stack_level
1201 && ! (f->target_rtl == return_label
1202 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1203 == FUNCTION_TYPE)
1204 && (TYPE_RETURNS_STACK_DEPRESSED
1205 (TREE_TYPE (current_function_decl))))))
1206 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1207
1208 /* Finish up the sequence containing the insns which implement the
1209 necessary cleanups, and then attach that whole sequence to the
1210 insn stream just ahead of the actual jump insn. Attaching it
1211 at that point insures that any cleanups which are in fact
1212 implicit C++ object destructions (which must be executed upon
1213 leaving the block) appear (to the debugger) to be taking place
1214 in an area of the generated code where the object(s) being
1215 destructed are still "in scope". */
1216
1217 cleanup_insns = get_insns ();
1218 poplevel (1, 0, 0);
1219
1220 end_sequence ();
1221 emit_insns_after (cleanup_insns, f->before_jump);
1222
1223 f->before_jump = 0;
1224 }
1225 }
1226
1227 /* For any still-undefined labels, do the cleanups for this block now.
1228 We must do this now since items in the cleanup list may go out
1229 of scope when the block ends. */
1230 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1231 if (f->before_jump != 0
1232 && PREV_INSN (f->target_rtl) == 0
1233 /* Label has still not appeared. If we are exiting a block with
1234 a stack level to restore, that started before the fixup,
1235 mark this stack level as needing restoration
1236 when the fixup is later finalized. */
1237 && thisblock != 0
1238 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1239 means the label is undefined. That's erroneous, but possible. */
1240 && (thisblock->data.block.block_start_count
1241 <= f->block_start_count))
1242 {
1243 tree lists = f->cleanup_list_list;
1244 rtx cleanup_insns;
1245
1246 for (; lists; lists = TREE_CHAIN (lists))
1247 /* If the following elt. corresponds to our containing block
1248 then the elt. must be for this block. */
1249 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1250 {
1251 start_sequence ();
1252 pushlevel (0);
1253 set_block (f->context);
1254 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1255 do_pending_stack_adjust ();
1256 cleanup_insns = get_insns ();
1257 poplevel (1, 0, 0);
1258 end_sequence ();
1259 if (cleanup_insns != 0)
1260 f->before_jump
1261 = emit_insns_after (cleanup_insns, f->before_jump);
1262
1263 f->cleanup_list_list = TREE_CHAIN (lists);
1264 }
1265
1266 if (stack_level)
1267 f->stack_level = stack_level;
1268 }
1269 }
1270 \f
1271 /* Return the number of times character C occurs in string S. */
1272 static int
1273 n_occurrences (c, s)
1274 int c;
1275 const char *s;
1276 {
1277 int n = 0;
1278 while (*s)
1279 n += (*s++ == c);
1280 return n;
1281 }
1282 \f
1283 /* Generate RTL for an asm statement (explicit assembler code).
1284 BODY is a STRING_CST node containing the assembler code text,
1285 or an ADDR_EXPR containing a STRING_CST. */
1286
1287 void
1288 expand_asm (body)
1289 tree body;
1290 {
1291 if (TREE_CODE (body) == ADDR_EXPR)
1292 body = TREE_OPERAND (body, 0);
1293
1294 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1295 TREE_STRING_POINTER (body)));
1296 last_expr_type = 0;
1297 }
1298
1299 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1300 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1301 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1302 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1303 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1304 constraint allows the use of a register operand. And, *IS_INOUT
1305 will be true if the operand is read-write, i.e., if it is used as
1306 an input as well as an output. If *CONSTRAINT_P is not in
1307 canonical form, it will be made canonical. (Note that `+' will be
1308 rpelaced with `=' as part of this process.)
1309
1310 Returns TRUE if all went well; FALSE if an error occurred. */
1311
1312 bool
1313 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1314 allows_mem, allows_reg, is_inout)
1315 const char **constraint_p;
1316 int operand_num;
1317 int ninputs;
1318 int noutputs;
1319 bool *allows_mem;
1320 bool *allows_reg;
1321 bool *is_inout;
1322 {
1323 const char *constraint = *constraint_p;
1324 const char *p;
1325
1326 /* Assume the constraint doesn't allow the use of either a register
1327 or memory. */
1328 *allows_mem = false;
1329 *allows_reg = false;
1330
1331 /* Allow the `=' or `+' to not be at the beginning of the string,
1332 since it wasn't explicitly documented that way, and there is a
1333 large body of code that puts it last. Swap the character to
1334 the front, so as not to uglify any place else. */
1335 p = strchr (constraint, '=');
1336 if (!p)
1337 p = strchr (constraint, '+');
1338
1339 /* If the string doesn't contain an `=', issue an error
1340 message. */
1341 if (!p)
1342 {
1343 error ("output operand constraint lacks `='");
1344 return false;
1345 }
1346
1347 /* If the constraint begins with `+', then the operand is both read
1348 from and written to. */
1349 *is_inout = (*p == '+');
1350
1351 /* Canonicalize the output constraint so that it begins with `='. */
1352 if (p != constraint || is_inout)
1353 {
1354 char *buf;
1355 size_t c_len = strlen (constraint);
1356
1357 if (p != constraint)
1358 warning ("output constraint `%c' for operand %d is not at the beginning",
1359 *p, operand_num);
1360
1361 /* Make a copy of the constraint. */
1362 buf = alloca (c_len + 1);
1363 strcpy (buf, constraint);
1364 /* Swap the first character and the `=' or `+'. */
1365 buf[p - constraint] = buf[0];
1366 /* Make sure the first character is an `='. (Until we do this,
1367 it might be a `+'.) */
1368 buf[0] = '=';
1369 /* Replace the constraint with the canonicalized string. */
1370 *constraint_p = ggc_alloc_string (buf, c_len);
1371 constraint = *constraint_p;
1372 }
1373
1374 /* Loop through the constraint string. */
1375 for (p = constraint + 1; *p; ++p)
1376 switch (*p)
1377 {
1378 case '+':
1379 case '=':
1380 error ("operand constraint contains incorrectly positioned '+' or '='");
1381 return false;
1382
1383 case '%':
1384 if (operand_num + 1 == ninputs + noutputs)
1385 {
1386 error ("`%%' constraint used with last operand");
1387 return false;
1388 }
1389 break;
1390
1391 case 'V': case 'm': case 'o':
1392 *allows_mem = true;
1393 break;
1394
1395 case '?': case '!': case '*': case '&': case '#':
1396 case 'E': case 'F': case 'G': case 'H':
1397 case 's': case 'i': case 'n':
1398 case 'I': case 'J': case 'K': case 'L': case 'M':
1399 case 'N': case 'O': case 'P': case ',':
1400 break;
1401
1402 case '0': case '1': case '2': case '3': case '4':
1403 case '5': case '6': case '7': case '8': case '9':
1404 case '[':
1405 error ("matching constraint not valid in output operand");
1406 return false;
1407
1408 case '<': case '>':
1409 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1410 excepting those that expand_call created. So match memory
1411 and hope. */
1412 *allows_mem = true;
1413 break;
1414
1415 case 'g': case 'X':
1416 *allows_reg = true;
1417 *allows_mem = true;
1418 break;
1419
1420 case 'p': case 'r':
1421 *allows_reg = true;
1422 break;
1423
1424 default:
1425 if (!ISALPHA (*p))
1426 break;
1427 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1428 *allows_reg = true;
1429 #ifdef EXTRA_CONSTRAINT
1430 else
1431 {
1432 /* Otherwise we can't assume anything about the nature of
1433 the constraint except that it isn't purely registers.
1434 Treat it like "g" and hope for the best. */
1435 *allows_reg = true;
1436 *allows_mem = true;
1437 }
1438 #endif
1439 break;
1440 }
1441
1442 return true;
1443 }
1444
1445 /* Similar, but for input constraints. */
1446
1447 static bool
1448 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1449 constraints, allows_mem, allows_reg)
1450 const char **constraint_p;
1451 int input_num;
1452 int ninputs;
1453 int noutputs;
1454 int ninout;
1455 const char * const * constraints;
1456 bool *allows_mem;
1457 bool *allows_reg;
1458 {
1459 const char *constraint = *constraint_p;
1460 const char *orig_constraint = constraint;
1461 size_t c_len = strlen (constraint);
1462 size_t j;
1463
1464 /* Assume the constraint doesn't allow the use of either
1465 a register or memory. */
1466 *allows_mem = false;
1467 *allows_reg = false;
1468
1469 /* Make sure constraint has neither `=', `+', nor '&'. */
1470
1471 for (j = 0; j < c_len; j++)
1472 switch (constraint[j])
1473 {
1474 case '+': case '=': case '&':
1475 if (constraint == orig_constraint)
1476 {
1477 error ("input operand constraint contains `%c'", constraint[j]);
1478 return false;
1479 }
1480 break;
1481
1482 case '%':
1483 if (constraint == orig_constraint
1484 && input_num + 1 == ninputs - ninout)
1485 {
1486 error ("`%%' constraint used with last operand");
1487 return false;
1488 }
1489 break;
1490
1491 case 'V': case 'm': case 'o':
1492 *allows_mem = true;
1493 break;
1494
1495 case '<': case '>':
1496 case '?': case '!': case '*': case '#':
1497 case 'E': case 'F': case 'G': case 'H':
1498 case 's': case 'i': case 'n':
1499 case 'I': case 'J': case 'K': case 'L': case 'M':
1500 case 'N': case 'O': case 'P': case ',':
1501 break;
1502
1503 /* Whether or not a numeric constraint allows a register is
1504 decided by the matching constraint, and so there is no need
1505 to do anything special with them. We must handle them in
1506 the default case, so that we don't unnecessarily force
1507 operands to memory. */
1508 case '0': case '1': case '2': case '3': case '4':
1509 case '5': case '6': case '7': case '8': case '9':
1510 {
1511 char *end;
1512 unsigned long match;
1513
1514 match = strtoul (constraint + j, &end, 10);
1515 if (match >= (unsigned long) noutputs)
1516 {
1517 error ("matching constraint references invalid operand number");
1518 return false;
1519 }
1520
1521 /* Try and find the real constraint for this dup. Only do this
1522 if the matching constraint is the only alternative. */
1523 if (*end == '\0'
1524 && (j == 0 || (j == 1 && constraint[0] == '%')))
1525 {
1526 constraint = constraints[match];
1527 *constraint_p = constraint;
1528 c_len = strlen (constraint);
1529 j = 0;
1530 break;
1531 }
1532 else
1533 j = end - constraint;
1534 }
1535 /* Fall through. */
1536
1537 case 'p': case 'r':
1538 *allows_reg = true;
1539 break;
1540
1541 case 'g': case 'X':
1542 *allows_reg = true;
1543 *allows_mem = true;
1544 break;
1545
1546 default:
1547 if (! ISALPHA (constraint[j]))
1548 {
1549 error ("invalid punctuation `%c' in constraint", constraint[j]);
1550 return false;
1551 }
1552 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1553 *allows_reg = true;
1554 #ifdef EXTRA_CONSTRAINT
1555 else
1556 {
1557 /* Otherwise we can't assume anything about the nature of
1558 the constraint except that it isn't purely registers.
1559 Treat it like "g" and hope for the best. */
1560 *allows_reg = true;
1561 *allows_mem = true;
1562 }
1563 #endif
1564 break;
1565 }
1566
1567 return true;
1568 }
1569
1570 /* Generate RTL for an asm statement with arguments.
1571 STRING is the instruction template.
1572 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1573 Each output or input has an expression in the TREE_VALUE and
1574 and a tree list in TREE_PURPOSE which in turn contains a constraint
1575 name in TREE_VALUE (or NULL_TREE) and a constraint string
1576 in TREE_PURPOSE.
1577 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1578 that is clobbered by this insn.
1579
1580 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1581 Some elements of OUTPUTS may be replaced with trees representing temporary
1582 values. The caller should copy those temporary values to the originally
1583 specified lvalues.
1584
1585 VOL nonzero means the insn is volatile; don't optimize it. */
1586
1587 void
1588 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1589 tree string, outputs, inputs, clobbers;
1590 int vol;
1591 const char *filename;
1592 int line;
1593 {
1594 rtvec argvec, constraintvec;
1595 rtx body;
1596 int ninputs = list_length (inputs);
1597 int noutputs = list_length (outputs);
1598 int ninout;
1599 int nclobbers;
1600 tree tail;
1601 int i;
1602 /* Vector of RTX's of evaluated output operands. */
1603 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1604 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1605 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1606 enum machine_mode *inout_mode
1607 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1608 const char **constraints
1609 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1610 /* The insn we have emitted. */
1611 rtx insn;
1612 int old_generating_concat_p = generating_concat_p;
1613
1614 /* An ASM with no outputs needs to be treated as volatile, for now. */
1615 if (noutputs == 0)
1616 vol = 1;
1617
1618 if (! check_operand_nalternatives (outputs, inputs))
1619 return;
1620
1621 if (! check_unique_operand_names (outputs, inputs))
1622 return;
1623
1624 string = resolve_operand_names (string, outputs, inputs, constraints);
1625
1626 #ifdef MD_ASM_CLOBBERS
1627 /* Sometimes we wish to automatically clobber registers across an asm.
1628 Case in point is when the i386 backend moved from cc0 to a hard reg --
1629 maintaining source-level compatibility means automatically clobbering
1630 the flags register. */
1631 MD_ASM_CLOBBERS (clobbers);
1632 #endif
1633
1634 /* Count the number of meaningful clobbered registers, ignoring what
1635 we would ignore later. */
1636 nclobbers = 0;
1637 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1638 {
1639 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1640
1641 i = decode_reg_name (regname);
1642 if (i >= 0 || i == -4)
1643 ++nclobbers;
1644 else if (i == -2)
1645 error ("unknown register name `%s' in `asm'", regname);
1646 }
1647
1648 last_expr_type = 0;
1649
1650 /* First pass over inputs and outputs checks validity and sets
1651 mark_addressable if needed. */
1652
1653 ninout = 0;
1654 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1655 {
1656 tree val = TREE_VALUE (tail);
1657 tree type = TREE_TYPE (val);
1658 const char *constraint;
1659 bool is_inout;
1660 bool allows_reg;
1661 bool allows_mem;
1662
1663 /* If there's an erroneous arg, emit no insn. */
1664 if (type == error_mark_node)
1665 return;
1666
1667 /* Try to parse the output constraint. If that fails, there's
1668 no point in going further. */
1669 constraint = constraints[i];
1670 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1671 &allows_mem, &allows_reg, &is_inout))
1672 return;
1673
1674 if (! allows_reg
1675 && (allows_mem
1676 || is_inout
1677 || (DECL_P (val)
1678 && GET_CODE (DECL_RTL (val)) == REG
1679 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1680 mark_addressable (val);
1681
1682 if (is_inout)
1683 ninout++;
1684 }
1685
1686 ninputs += ninout;
1687 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1688 {
1689 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1690 return;
1691 }
1692
1693 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1694 {
1695 bool allows_reg, allows_mem;
1696 const char *constraint;
1697
1698 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1699 would get VOIDmode and that could cause a crash in reload. */
1700 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1701 return;
1702
1703 constraint = constraints[i + noutputs];
1704 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1705 constraints, &allows_mem, &allows_reg))
1706 return;
1707
1708 if (! allows_reg && allows_mem)
1709 mark_addressable (TREE_VALUE (tail));
1710 }
1711
1712 /* Second pass evaluates arguments. */
1713
1714 ninout = 0;
1715 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1716 {
1717 tree val = TREE_VALUE (tail);
1718 tree type = TREE_TYPE (val);
1719 bool is_inout;
1720 bool allows_reg;
1721 bool allows_mem;
1722
1723 if (!parse_output_constraint (&constraints[i], i, ninputs,
1724 noutputs, &allows_mem, &allows_reg,
1725 &is_inout))
1726 abort ();
1727
1728 /* If an output operand is not a decl or indirect ref and our constraint
1729 allows a register, make a temporary to act as an intermediate.
1730 Make the asm insn write into that, then our caller will copy it to
1731 the real output operand. Likewise for promoted variables. */
1732
1733 generating_concat_p = 0;
1734
1735 real_output_rtx[i] = NULL_RTX;
1736 if ((TREE_CODE (val) == INDIRECT_REF
1737 && allows_mem)
1738 || (DECL_P (val)
1739 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1740 && ! (GET_CODE (DECL_RTL (val)) == REG
1741 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1742 || ! allows_reg
1743 || is_inout)
1744 {
1745 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1746
1747 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1748 error ("output number %d not directly addressable", i);
1749 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1750 || GET_CODE (output_rtx[i]) == CONCAT)
1751 {
1752 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1753 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1754 if (is_inout)
1755 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1756 }
1757 }
1758 else
1759 {
1760 output_rtx[i] = assign_temp (type, 0, 0, 1);
1761 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1762 }
1763
1764 generating_concat_p = old_generating_concat_p;
1765
1766 if (is_inout)
1767 {
1768 inout_mode[ninout] = TYPE_MODE (type);
1769 inout_opnum[ninout++] = i;
1770 }
1771 }
1772
1773 /* Make vectors for the expression-rtx, constraint strings,
1774 and named operands. */
1775
1776 argvec = rtvec_alloc (ninputs);
1777 constraintvec = rtvec_alloc (ninputs);
1778
1779 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1780 : GET_MODE (output_rtx[0])),
1781 TREE_STRING_POINTER (string),
1782 empty_string, 0, argvec, constraintvec,
1783 filename, line);
1784
1785 MEM_VOLATILE_P (body) = vol;
1786
1787 /* Eval the inputs and put them into ARGVEC.
1788 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1789
1790 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1791 {
1792 bool allows_reg, allows_mem;
1793 const char *constraint;
1794 tree val, type;
1795 rtx op;
1796
1797 constraint = constraints[i + noutputs];
1798 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1799 constraints, &allows_mem, &allows_reg))
1800 abort ();
1801
1802 generating_concat_p = 0;
1803
1804 val = TREE_VALUE (tail);
1805 type = TREE_TYPE (val);
1806 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1807
1808 /* Never pass a CONCAT to an ASM. */
1809 if (GET_CODE (op) == CONCAT)
1810 op = force_reg (GET_MODE (op), op);
1811
1812 if (asm_operand_ok (op, constraint) <= 0)
1813 {
1814 if (allows_reg)
1815 op = force_reg (TYPE_MODE (type), op);
1816 else if (!allows_mem)
1817 warning ("asm operand %d probably doesn't match constraints",
1818 i + noutputs);
1819 else if (CONSTANT_P (op))
1820 op = force_const_mem (TYPE_MODE (type), op);
1821 else if (GET_CODE (op) == REG
1822 || GET_CODE (op) == SUBREG
1823 || GET_CODE (op) == ADDRESSOF
1824 || GET_CODE (op) == CONCAT)
1825 {
1826 tree qual_type = build_qualified_type (type,
1827 (TYPE_QUALS (type)
1828 | TYPE_QUAL_CONST));
1829 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1830
1831 emit_move_insn (memloc, op);
1832 op = memloc;
1833 }
1834
1835 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1836 {
1837 /* We won't recognize volatile memory as available a
1838 memory_operand at this point. Ignore it. */
1839 }
1840 else if (queued_subexp_p (op))
1841 ;
1842 else
1843 /* ??? Leave this only until we have experience with what
1844 happens in combine and elsewhere when constraints are
1845 not satisfied. */
1846 warning ("asm operand %d probably doesn't match constraints",
1847 i + noutputs);
1848 }
1849
1850 generating_concat_p = old_generating_concat_p;
1851 ASM_OPERANDS_INPUT (body, i) = op;
1852
1853 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1854 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1855 }
1856
1857 /* Protect all the operands from the queue now that they have all been
1858 evaluated. */
1859
1860 generating_concat_p = 0;
1861
1862 for (i = 0; i < ninputs - ninout; i++)
1863 ASM_OPERANDS_INPUT (body, i)
1864 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1865
1866 for (i = 0; i < noutputs; i++)
1867 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1868
1869 /* For in-out operands, copy output rtx to input rtx. */
1870 for (i = 0; i < ninout; i++)
1871 {
1872 int j = inout_opnum[i];
1873 char buffer[16];
1874
1875 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1876 = output_rtx[j];
1877
1878 sprintf (buffer, "%d", j);
1879 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1880 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1881 }
1882
1883 generating_concat_p = old_generating_concat_p;
1884
1885 /* Now, for each output, construct an rtx
1886 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1887 ARGVEC CONSTRAINTS OPNAMES))
1888 If there is more than one, put them inside a PARALLEL. */
1889
1890 if (noutputs == 1 && nclobbers == 0)
1891 {
1892 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1893 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1894 }
1895
1896 else if (noutputs == 0 && nclobbers == 0)
1897 {
1898 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1899 insn = emit_insn (body);
1900 }
1901
1902 else
1903 {
1904 rtx obody = body;
1905 int num = noutputs;
1906
1907 if (num == 0)
1908 num = 1;
1909
1910 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1911
1912 /* For each output operand, store a SET. */
1913 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1914 {
1915 XVECEXP (body, 0, i)
1916 = gen_rtx_SET (VOIDmode,
1917 output_rtx[i],
1918 gen_rtx_ASM_OPERANDS
1919 (GET_MODE (output_rtx[i]),
1920 TREE_STRING_POINTER (string),
1921 constraints[i], i, argvec, constraintvec,
1922 filename, line));
1923
1924 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1925 }
1926
1927 /* If there are no outputs (but there are some clobbers)
1928 store the bare ASM_OPERANDS into the PARALLEL. */
1929
1930 if (i == 0)
1931 XVECEXP (body, 0, i++) = obody;
1932
1933 /* Store (clobber REG) for each clobbered register specified. */
1934
1935 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1936 {
1937 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1938 int j = decode_reg_name (regname);
1939
1940 if (j < 0)
1941 {
1942 if (j == -3) /* `cc', which is not a register */
1943 continue;
1944
1945 if (j == -4) /* `memory', don't cache memory across asm */
1946 {
1947 XVECEXP (body, 0, i++)
1948 = gen_rtx_CLOBBER (VOIDmode,
1949 gen_rtx_MEM
1950 (BLKmode,
1951 gen_rtx_SCRATCH (VOIDmode)));
1952 continue;
1953 }
1954
1955 /* Ignore unknown register, error already signaled. */
1956 continue;
1957 }
1958
1959 /* Use QImode since that's guaranteed to clobber just one reg. */
1960 XVECEXP (body, 0, i++)
1961 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1962 }
1963
1964 insn = emit_insn (body);
1965 }
1966
1967 /* For any outputs that needed reloading into registers, spill them
1968 back to where they belong. */
1969 for (i = 0; i < noutputs; ++i)
1970 if (real_output_rtx[i])
1971 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1972
1973 free_temp_slots ();
1974 }
1975
1976 /* A subroutine of expand_asm_operands. Check that all operands have
1977 the same number of alternatives. Return true if so. */
1978
1979 static bool
1980 check_operand_nalternatives (outputs, inputs)
1981 tree outputs, inputs;
1982 {
1983 if (outputs || inputs)
1984 {
1985 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1986 int nalternatives
1987 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1988 tree next = inputs;
1989
1990 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1991 {
1992 error ("too many alternatives in `asm'");
1993 return false;
1994 }
1995
1996 tmp = outputs;
1997 while (tmp)
1998 {
1999 const char *constraint
2000 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2001
2002 if (n_occurrences (',', constraint) != nalternatives)
2003 {
2004 error ("operand constraints for `asm' differ in number of alternatives");
2005 return false;
2006 }
2007
2008 if (TREE_CHAIN (tmp))
2009 tmp = TREE_CHAIN (tmp);
2010 else
2011 tmp = next, next = 0;
2012 }
2013 }
2014
2015 return true;
2016 }
2017
2018 /* A subroutine of expand_asm_operands. Check that all operand names
2019 are unique. Return true if so. We rely on the fact that these names
2020 are identifiers, and so have been canonicalized by get_identifier,
2021 so all we need are pointer comparisons. */
2022
2023 static bool
2024 check_unique_operand_names (outputs, inputs)
2025 tree outputs, inputs;
2026 {
2027 tree i, j;
2028
2029 for (i = outputs; i ; i = TREE_CHAIN (i))
2030 {
2031 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2032 if (! i_name)
2033 continue;
2034
2035 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2036 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2037 goto failure;
2038 }
2039
2040 for (i = inputs; i ; i = TREE_CHAIN (i))
2041 {
2042 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2043 if (! i_name)
2044 continue;
2045
2046 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2047 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2048 goto failure;
2049 for (j = outputs; j ; j = TREE_CHAIN (j))
2050 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2051 goto failure;
2052 }
2053
2054 return true;
2055
2056 failure:
2057 error ("duplicate asm operand name '%s'",
2058 IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2059 return false;
2060 }
2061
2062 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2063 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2064 STRING and in the constraints to those numbers. */
2065
2066 static tree
2067 resolve_operand_names (string, outputs, inputs, pconstraints)
2068 tree string;
2069 tree outputs, inputs;
2070 const char **pconstraints;
2071 {
2072 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2073 char *p;
2074 tree t;
2075
2076 /* Assume that we will not need extra space to perform the substitution.
2077 This because we get to remove '[' and ']', which means we cannot have
2078 a problem until we have more than 999 operands. */
2079
2080 p = buffer;
2081 while ((p = strchr (p, '%')) != NULL)
2082 {
2083 if (p[1] == '[')
2084 p += 1;
2085 else if (ISALPHA (p[1]) && p[2] == '[')
2086 p += 2;
2087 else
2088 {
2089 p += 1;
2090 continue;
2091 }
2092
2093 p = resolve_operand_name_1 (p, outputs, inputs);
2094 }
2095
2096 string = build_string (strlen (buffer), buffer);
2097 free (buffer);
2098
2099 /* Collect output constraints here because it's convenient.
2100 There should be no named operands here; this is verified
2101 in expand_asm_operand. */
2102 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2103 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2104
2105 /* Substitute [<name>] in input constraint strings. */
2106 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2107 {
2108 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2109 if (strchr (c, '[') == NULL)
2110 *pconstraints = c;
2111 else
2112 {
2113 p = buffer = xstrdup (c);
2114 while ((p = strchr (p, '[')) != NULL)
2115 p = resolve_operand_name_1 (p, outputs, inputs);
2116
2117 *pconstraints = ggc_alloc_string (buffer, -1);
2118 free (buffer);
2119 }
2120 }
2121
2122 return string;
2123 }
2124
2125 /* A subroutine of resolve_operand_names. P points to the '[' for a
2126 potential named operand of the form [<name>]. In place, replace
2127 the name and brackets with a number. Return a pointer to the
2128 balance of the string after substitution. */
2129
2130 static char *
2131 resolve_operand_name_1 (p, outputs, inputs)
2132 char *p;
2133 tree outputs, inputs;
2134 {
2135 char *q;
2136 int op;
2137 tree t;
2138 size_t len;
2139
2140 /* Collect the operand name. */
2141 q = strchr (p, ']');
2142 if (!q)
2143 {
2144 error ("missing close brace for named operand");
2145 return strchr (p, '\0');
2146 }
2147 len = q - p - 1;
2148
2149 /* Resolve the name to a number. */
2150 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2151 {
2152 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2153 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2154 goto found;
2155 }
2156 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2157 {
2158 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2159 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2160 goto found;
2161 }
2162
2163 *q = '\0';
2164 error ("undefined named operand '%s'", p + 1);
2165 op = 0;
2166 found:
2167
2168 /* Replace the name with the number. Unfortunately, not all libraries
2169 get the return value of sprintf correct, so search for the end of the
2170 generated string by hand. */
2171 sprintf (p, "%d", op);
2172 p = strchr (p, '\0');
2173
2174 /* Verify the no extra buffer space assumption. */
2175 if (p > q)
2176 abort ();
2177
2178 /* Shift the rest of the buffer down to fill the gap. */
2179 memmove (p, q + 1, strlen (q + 1) + 1);
2180
2181 return p;
2182 }
2183 \f
2184 /* Generate RTL to evaluate the expression EXP
2185 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2186 Provided just for backward-compatibility. expand_expr_stmt_value()
2187 should be used for new code. */
2188
2189 void
2190 expand_expr_stmt (exp)
2191 tree exp;
2192 {
2193 expand_expr_stmt_value (exp, -1, 1);
2194 }
2195
2196 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2197 whether to (1) save the value of the expression, (0) discard it or
2198 (-1) use expr_stmts_for_value to tell. The use of -1 is
2199 deprecated, and retained only for backward compatibility. */
2200
2201 void
2202 expand_expr_stmt_value (exp, want_value, maybe_last)
2203 tree exp;
2204 int want_value, maybe_last;
2205 {
2206 rtx value;
2207 tree type;
2208
2209 if (want_value == -1)
2210 want_value = expr_stmts_for_value != 0;
2211
2212 /* If -W, warn about statements with no side effects,
2213 except for an explicit cast to void (e.g. for assert()), and
2214 except for last statement in ({...}) where they may be useful. */
2215 if (! want_value
2216 && (expr_stmts_for_value == 0 || ! maybe_last)
2217 && exp != error_mark_node)
2218 {
2219 if (! TREE_SIDE_EFFECTS (exp))
2220 {
2221 if ((extra_warnings || warn_unused_value)
2222 && !(TREE_CODE (exp) == CONVERT_EXPR
2223 && VOID_TYPE_P (TREE_TYPE (exp))))
2224 warning_with_file_and_line (emit_filename, emit_lineno,
2225 "statement with no effect");
2226 }
2227 else if (warn_unused_value)
2228 warn_if_unused_value (exp);
2229 }
2230
2231 /* If EXP is of function type and we are expanding statements for
2232 value, convert it to pointer-to-function. */
2233 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2234 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2235
2236 /* The call to `expand_expr' could cause last_expr_type and
2237 last_expr_value to get reset. Therefore, we set last_expr_value
2238 and last_expr_type *after* calling expand_expr. */
2239 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2240 VOIDmode, 0);
2241 type = TREE_TYPE (exp);
2242
2243 /* If all we do is reference a volatile value in memory,
2244 copy it to a register to be sure it is actually touched. */
2245 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2246 {
2247 if (TYPE_MODE (type) == VOIDmode)
2248 ;
2249 else if (TYPE_MODE (type) != BLKmode)
2250 value = copy_to_reg (value);
2251 else
2252 {
2253 rtx lab = gen_label_rtx ();
2254
2255 /* Compare the value with itself to reference it. */
2256 emit_cmp_and_jump_insns (value, value, EQ,
2257 expand_expr (TYPE_SIZE (type),
2258 NULL_RTX, VOIDmode, 0),
2259 BLKmode, 0, lab);
2260 emit_label (lab);
2261 }
2262 }
2263
2264 /* If this expression is part of a ({...}) and is in memory, we may have
2265 to preserve temporaries. */
2266 preserve_temp_slots (value);
2267
2268 /* Free any temporaries used to evaluate this expression. Any temporary
2269 used as a result of this expression will already have been preserved
2270 above. */
2271 free_temp_slots ();
2272
2273 if (want_value)
2274 {
2275 last_expr_value = value;
2276 last_expr_type = type;
2277 }
2278
2279 emit_queue ();
2280 }
2281
2282 /* Warn if EXP contains any computations whose results are not used.
2283 Return 1 if a warning is printed; 0 otherwise. */
2284
2285 int
2286 warn_if_unused_value (exp)
2287 tree exp;
2288 {
2289 if (TREE_USED (exp))
2290 return 0;
2291
2292 /* Don't warn about void constructs. This includes casting to void,
2293 void function calls, and statement expressions with a final cast
2294 to void. */
2295 if (VOID_TYPE_P (TREE_TYPE (exp)))
2296 return 0;
2297
2298 /* If this is an expression with side effects, don't warn. */
2299 if (TREE_SIDE_EFFECTS (exp))
2300 return 0;
2301
2302 switch (TREE_CODE (exp))
2303 {
2304 case PREINCREMENT_EXPR:
2305 case POSTINCREMENT_EXPR:
2306 case PREDECREMENT_EXPR:
2307 case POSTDECREMENT_EXPR:
2308 case MODIFY_EXPR:
2309 case INIT_EXPR:
2310 case TARGET_EXPR:
2311 case CALL_EXPR:
2312 case METHOD_CALL_EXPR:
2313 case RTL_EXPR:
2314 case TRY_CATCH_EXPR:
2315 case WITH_CLEANUP_EXPR:
2316 case EXIT_EXPR:
2317 return 0;
2318
2319 case BIND_EXPR:
2320 /* For a binding, warn if no side effect within it. */
2321 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2322
2323 case SAVE_EXPR:
2324 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2325
2326 case TRUTH_ORIF_EXPR:
2327 case TRUTH_ANDIF_EXPR:
2328 /* In && or ||, warn if 2nd operand has no side effect. */
2329 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2330
2331 case COMPOUND_EXPR:
2332 if (TREE_NO_UNUSED_WARNING (exp))
2333 return 0;
2334 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2335 return 1;
2336 /* Let people do `(foo (), 0)' without a warning. */
2337 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2338 return 0;
2339 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2340
2341 case NOP_EXPR:
2342 case CONVERT_EXPR:
2343 case NON_LVALUE_EXPR:
2344 /* Don't warn about conversions not explicit in the user's program. */
2345 if (TREE_NO_UNUSED_WARNING (exp))
2346 return 0;
2347 /* Assignment to a cast usually results in a cast of a modify.
2348 Don't complain about that. There can be an arbitrary number of
2349 casts before the modify, so we must loop until we find the first
2350 non-cast expression and then test to see if that is a modify. */
2351 {
2352 tree tem = TREE_OPERAND (exp, 0);
2353
2354 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2355 tem = TREE_OPERAND (tem, 0);
2356
2357 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2358 || TREE_CODE (tem) == CALL_EXPR)
2359 return 0;
2360 }
2361 goto warn;
2362
2363 case INDIRECT_REF:
2364 /* Don't warn about automatic dereferencing of references, since
2365 the user cannot control it. */
2366 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2367 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2368 /* Fall through. */
2369
2370 default:
2371 /* Referencing a volatile value is a side effect, so don't warn. */
2372 if ((DECL_P (exp)
2373 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2374 && TREE_THIS_VOLATILE (exp))
2375 return 0;
2376
2377 /* If this is an expression which has no operands, there is no value
2378 to be unused. There are no such language-independent codes,
2379 but front ends may define such. */
2380 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2381 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2382 return 0;
2383
2384 warn:
2385 warning_with_file_and_line (emit_filename, emit_lineno,
2386 "value computed is not used");
2387 return 1;
2388 }
2389 }
2390
2391 /* Clear out the memory of the last expression evaluated. */
2392
2393 void
2394 clear_last_expr ()
2395 {
2396 last_expr_type = 0;
2397 }
2398
2399 /* Begin a statement which will return a value.
2400 Return the RTL_EXPR for this statement expr.
2401 The caller must save that value and pass it to expand_end_stmt_expr. */
2402
2403 tree
2404 expand_start_stmt_expr ()
2405 {
2406 tree t;
2407
2408 /* Make the RTL_EXPR node temporary, not momentary,
2409 so that rtl_expr_chain doesn't become garbage. */
2410 t = make_node (RTL_EXPR);
2411 do_pending_stack_adjust ();
2412 start_sequence_for_rtl_expr (t);
2413 NO_DEFER_POP;
2414 expr_stmts_for_value++;
2415 last_expr_value = NULL_RTX;
2416 return t;
2417 }
2418
2419 /* Restore the previous state at the end of a statement that returns a value.
2420 Returns a tree node representing the statement's value and the
2421 insns to compute the value.
2422
2423 The nodes of that expression have been freed by now, so we cannot use them.
2424 But we don't want to do that anyway; the expression has already been
2425 evaluated and now we just want to use the value. So generate a RTL_EXPR
2426 with the proper type and RTL value.
2427
2428 If the last substatement was not an expression,
2429 return something with type `void'. */
2430
2431 tree
2432 expand_end_stmt_expr (t)
2433 tree t;
2434 {
2435 OK_DEFER_POP;
2436
2437 if (! last_expr_value || ! last_expr_type)
2438 {
2439 last_expr_value = const0_rtx;
2440 last_expr_type = void_type_node;
2441 }
2442 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2443 /* Remove any possible QUEUED. */
2444 last_expr_value = protect_from_queue (last_expr_value, 0);
2445
2446 emit_queue ();
2447
2448 TREE_TYPE (t) = last_expr_type;
2449 RTL_EXPR_RTL (t) = last_expr_value;
2450 RTL_EXPR_SEQUENCE (t) = get_insns ();
2451
2452 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2453
2454 end_sequence ();
2455
2456 /* Don't consider deleting this expr or containing exprs at tree level. */
2457 TREE_SIDE_EFFECTS (t) = 1;
2458 /* Propagate volatility of the actual RTL expr. */
2459 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2460
2461 last_expr_type = 0;
2462 expr_stmts_for_value--;
2463
2464 return t;
2465 }
2466 \f
2467 /* Generate RTL for the start of an if-then. COND is the expression
2468 whose truth should be tested.
2469
2470 If EXITFLAG is nonzero, this conditional is visible to
2471 `exit_something'. */
2472
2473 void
2474 expand_start_cond (cond, exitflag)
2475 tree cond;
2476 int exitflag;
2477 {
2478 struct nesting *thiscond = ALLOC_NESTING ();
2479
2480 /* Make an entry on cond_stack for the cond we are entering. */
2481
2482 thiscond->next = cond_stack;
2483 thiscond->all = nesting_stack;
2484 thiscond->depth = ++nesting_depth;
2485 thiscond->data.cond.next_label = gen_label_rtx ();
2486 /* Before we encounter an `else', we don't need a separate exit label
2487 unless there are supposed to be exit statements
2488 to exit this conditional. */
2489 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2490 thiscond->data.cond.endif_label = thiscond->exit_label;
2491 cond_stack = thiscond;
2492 nesting_stack = thiscond;
2493
2494 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2495 }
2496
2497 /* Generate RTL between then-clause and the elseif-clause
2498 of an if-then-elseif-.... */
2499
2500 void
2501 expand_start_elseif (cond)
2502 tree cond;
2503 {
2504 if (cond_stack->data.cond.endif_label == 0)
2505 cond_stack->data.cond.endif_label = gen_label_rtx ();
2506 emit_jump (cond_stack->data.cond.endif_label);
2507 emit_label (cond_stack->data.cond.next_label);
2508 cond_stack->data.cond.next_label = gen_label_rtx ();
2509 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2510 }
2511
2512 /* Generate RTL between the then-clause and the else-clause
2513 of an if-then-else. */
2514
2515 void
2516 expand_start_else ()
2517 {
2518 if (cond_stack->data.cond.endif_label == 0)
2519 cond_stack->data.cond.endif_label = gen_label_rtx ();
2520
2521 emit_jump (cond_stack->data.cond.endif_label);
2522 emit_label (cond_stack->data.cond.next_label);
2523 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2524 }
2525
2526 /* After calling expand_start_else, turn this "else" into an "else if"
2527 by providing another condition. */
2528
2529 void
2530 expand_elseif (cond)
2531 tree cond;
2532 {
2533 cond_stack->data.cond.next_label = gen_label_rtx ();
2534 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2535 }
2536
2537 /* Generate RTL for the end of an if-then.
2538 Pop the record for it off of cond_stack. */
2539
2540 void
2541 expand_end_cond ()
2542 {
2543 struct nesting *thiscond = cond_stack;
2544
2545 do_pending_stack_adjust ();
2546 if (thiscond->data.cond.next_label)
2547 emit_label (thiscond->data.cond.next_label);
2548 if (thiscond->data.cond.endif_label)
2549 emit_label (thiscond->data.cond.endif_label);
2550
2551 POPSTACK (cond_stack);
2552 last_expr_type = 0;
2553 }
2554 \f
2555 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2556 loop should be exited by `exit_something'. This is a loop for which
2557 `expand_continue' will jump to the top of the loop.
2558
2559 Make an entry on loop_stack to record the labels associated with
2560 this loop. */
2561
2562 struct nesting *
2563 expand_start_loop (exit_flag)
2564 int exit_flag;
2565 {
2566 struct nesting *thisloop = ALLOC_NESTING ();
2567
2568 /* Make an entry on loop_stack for the loop we are entering. */
2569
2570 thisloop->next = loop_stack;
2571 thisloop->all = nesting_stack;
2572 thisloop->depth = ++nesting_depth;
2573 thisloop->data.loop.start_label = gen_label_rtx ();
2574 thisloop->data.loop.end_label = gen_label_rtx ();
2575 thisloop->data.loop.alt_end_label = 0;
2576 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2577 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2578 loop_stack = thisloop;
2579 nesting_stack = thisloop;
2580
2581 do_pending_stack_adjust ();
2582 emit_queue ();
2583 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2584 emit_label (thisloop->data.loop.start_label);
2585
2586 return thisloop;
2587 }
2588
2589 /* Like expand_start_loop but for a loop where the continuation point
2590 (for expand_continue_loop) will be specified explicitly. */
2591
2592 struct nesting *
2593 expand_start_loop_continue_elsewhere (exit_flag)
2594 int exit_flag;
2595 {
2596 struct nesting *thisloop = expand_start_loop (exit_flag);
2597 loop_stack->data.loop.continue_label = gen_label_rtx ();
2598 return thisloop;
2599 }
2600
2601 /* Begin a null, aka do { } while (0) "loop". But since the contents
2602 of said loop can still contain a break, we must frob the loop nest. */
2603
2604 struct nesting *
2605 expand_start_null_loop ()
2606 {
2607 struct nesting *thisloop = ALLOC_NESTING ();
2608
2609 /* Make an entry on loop_stack for the loop we are entering. */
2610
2611 thisloop->next = loop_stack;
2612 thisloop->all = nesting_stack;
2613 thisloop->depth = ++nesting_depth;
2614 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2615 thisloop->data.loop.end_label = gen_label_rtx ();
2616 thisloop->data.loop.alt_end_label = NULL_RTX;
2617 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2618 thisloop->exit_label = thisloop->data.loop.end_label;
2619 loop_stack = thisloop;
2620 nesting_stack = thisloop;
2621
2622 return thisloop;
2623 }
2624
2625 /* Specify the continuation point for a loop started with
2626 expand_start_loop_continue_elsewhere.
2627 Use this at the point in the code to which a continue statement
2628 should jump. */
2629
2630 void
2631 expand_loop_continue_here ()
2632 {
2633 do_pending_stack_adjust ();
2634 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2635 emit_label (loop_stack->data.loop.continue_label);
2636 }
2637
2638 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2639 Pop the block off of loop_stack. */
2640
2641 void
2642 expand_end_loop ()
2643 {
2644 rtx start_label = loop_stack->data.loop.start_label;
2645 rtx insn = get_last_insn ();
2646 int needs_end_jump = 1;
2647
2648 /* Mark the continue-point at the top of the loop if none elsewhere. */
2649 if (start_label == loop_stack->data.loop.continue_label)
2650 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2651
2652 do_pending_stack_adjust ();
2653
2654 /* If optimizing, perhaps reorder the loop.
2655 First, try to use a condjump near the end.
2656 expand_exit_loop_if_false ends loops with unconditional jumps,
2657 like this:
2658
2659 if (test) goto label;
2660 optional: cleanup
2661 goto loop_stack->data.loop.end_label
2662 barrier
2663 label:
2664
2665 If we find such a pattern, we can end the loop earlier. */
2666
2667 if (optimize
2668 && GET_CODE (insn) == CODE_LABEL
2669 && LABEL_NAME (insn) == NULL
2670 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2671 {
2672 rtx label = insn;
2673 rtx jump = PREV_INSN (PREV_INSN (label));
2674
2675 if (GET_CODE (jump) == JUMP_INSN
2676 && GET_CODE (PATTERN (jump)) == SET
2677 && SET_DEST (PATTERN (jump)) == pc_rtx
2678 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2679 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2680 == loop_stack->data.loop.end_label))
2681 {
2682 rtx prev;
2683
2684 /* The test might be complex and reference LABEL multiple times,
2685 like the loop in loop_iterations to set vtop. To handle this,
2686 we move LABEL. */
2687 insn = PREV_INSN (label);
2688 reorder_insns (label, label, start_label);
2689
2690 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2691 {
2692 /* We ignore line number notes, but if we see any other note,
2693 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2694 NOTE_INSN_LOOP_*, we disable this optimization. */
2695 if (GET_CODE (prev) == NOTE)
2696 {
2697 if (NOTE_LINE_NUMBER (prev) < 0)
2698 break;
2699 continue;
2700 }
2701 if (GET_CODE (prev) == CODE_LABEL)
2702 break;
2703 if (GET_CODE (prev) == JUMP_INSN)
2704 {
2705 if (GET_CODE (PATTERN (prev)) == SET
2706 && SET_DEST (PATTERN (prev)) == pc_rtx
2707 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2708 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2709 == LABEL_REF)
2710 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2711 {
2712 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2713 = start_label;
2714 emit_note_after (NOTE_INSN_LOOP_END, prev);
2715 needs_end_jump = 0;
2716 }
2717 break;
2718 }
2719 }
2720 }
2721 }
2722
2723 /* If the loop starts with a loop exit, roll that to the end where
2724 it will optimize together with the jump back.
2725
2726 We look for the conditional branch to the exit, except that once
2727 we find such a branch, we don't look past 30 instructions.
2728
2729 In more detail, if the loop presently looks like this (in pseudo-C):
2730
2731 start_label:
2732 if (test) goto end_label;
2733 body;
2734 goto start_label;
2735 end_label:
2736
2737 transform it to look like:
2738
2739 goto start_label;
2740 newstart_label:
2741 body;
2742 start_label:
2743 if (test) goto end_label;
2744 goto newstart_label;
2745 end_label:
2746
2747 Here, the `test' may actually consist of some reasonably complex
2748 code, terminating in a test. */
2749
2750 if (optimize
2751 && needs_end_jump
2752 &&
2753 ! (GET_CODE (insn) == JUMP_INSN
2754 && GET_CODE (PATTERN (insn)) == SET
2755 && SET_DEST (PATTERN (insn)) == pc_rtx
2756 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2757 {
2758 int eh_regions = 0;
2759 int num_insns = 0;
2760 rtx last_test_insn = NULL_RTX;
2761
2762 /* Scan insns from the top of the loop looking for a qualified
2763 conditional exit. */
2764 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2765 insn = NEXT_INSN (insn))
2766 {
2767 if (GET_CODE (insn) == NOTE)
2768 {
2769 if (optimize < 2
2770 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2771 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2772 /* The code that actually moves the exit test will
2773 carefully leave BLOCK notes in their original
2774 location. That means, however, that we can't debug
2775 the exit test itself. So, we refuse to move code
2776 containing BLOCK notes at low optimization levels. */
2777 break;
2778
2779 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2780 ++eh_regions;
2781 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2782 {
2783 --eh_regions;
2784 if (eh_regions < 0)
2785 /* We've come to the end of an EH region, but
2786 never saw the beginning of that region. That
2787 means that an EH region begins before the top
2788 of the loop, and ends in the middle of it. The
2789 existence of such a situation violates a basic
2790 assumption in this code, since that would imply
2791 that even when EH_REGIONS is zero, we might
2792 move code out of an exception region. */
2793 abort ();
2794 }
2795
2796 /* We must not walk into a nested loop. */
2797 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2798 break;
2799
2800 /* We already know this INSN is a NOTE, so there's no
2801 point in looking at it to see if it's a JUMP. */
2802 continue;
2803 }
2804
2805 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2806 num_insns++;
2807
2808 if (last_test_insn && num_insns > 30)
2809 break;
2810
2811 if (eh_regions > 0)
2812 /* We don't want to move a partial EH region. Consider:
2813
2814 while ( ( { try {
2815 if (cond ()) 0;
2816 else {
2817 bar();
2818 1;
2819 }
2820 } catch (...) {
2821 1;
2822 } )) {
2823 body;
2824 }
2825
2826 This isn't legal C++, but here's what it's supposed to
2827 mean: if cond() is true, stop looping. Otherwise,
2828 call bar, and keep looping. In addition, if cond
2829 throws an exception, catch it and keep looping. Such
2830 constructs are certainy legal in LISP.
2831
2832 We should not move the `if (cond()) 0' test since then
2833 the EH-region for the try-block would be broken up.
2834 (In this case we would the EH_BEG note for the `try'
2835 and `if cond()' but not the call to bar() or the
2836 EH_END note.)
2837
2838 So we don't look for tests within an EH region. */
2839 continue;
2840
2841 if (GET_CODE (insn) == JUMP_INSN
2842 && GET_CODE (PATTERN (insn)) == SET
2843 && SET_DEST (PATTERN (insn)) == pc_rtx)
2844 {
2845 /* This is indeed a jump. */
2846 rtx dest1 = NULL_RTX;
2847 rtx dest2 = NULL_RTX;
2848 rtx potential_last_test;
2849 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2850 {
2851 /* A conditional jump. */
2852 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2853 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2854 potential_last_test = insn;
2855 }
2856 else
2857 {
2858 /* An unconditional jump. */
2859 dest1 = SET_SRC (PATTERN (insn));
2860 /* Include the BARRIER after the JUMP. */
2861 potential_last_test = NEXT_INSN (insn);
2862 }
2863
2864 do {
2865 if (dest1 && GET_CODE (dest1) == LABEL_REF
2866 && ((XEXP (dest1, 0)
2867 == loop_stack->data.loop.alt_end_label)
2868 || (XEXP (dest1, 0)
2869 == loop_stack->data.loop.end_label)))
2870 {
2871 last_test_insn = potential_last_test;
2872 break;
2873 }
2874
2875 /* If this was a conditional jump, there may be
2876 another label at which we should look. */
2877 dest1 = dest2;
2878 dest2 = NULL_RTX;
2879 } while (dest1);
2880 }
2881 }
2882
2883 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2884 {
2885 /* We found one. Move everything from there up
2886 to the end of the loop, and add a jump into the loop
2887 to jump to there. */
2888 rtx newstart_label = gen_label_rtx ();
2889 rtx start_move = start_label;
2890 rtx next_insn;
2891
2892 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2893 then we want to move this note also. */
2894 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2895 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2896 == NOTE_INSN_LOOP_CONT))
2897 start_move = PREV_INSN (start_move);
2898
2899 emit_label_after (newstart_label, PREV_INSN (start_move));
2900
2901 /* Actually move the insns. Start at the beginning, and
2902 keep copying insns until we've copied the
2903 last_test_insn. */
2904 for (insn = start_move; insn; insn = next_insn)
2905 {
2906 /* Figure out which insn comes after this one. We have
2907 to do this before we move INSN. */
2908 if (insn == last_test_insn)
2909 /* We've moved all the insns. */
2910 next_insn = NULL_RTX;
2911 else
2912 next_insn = NEXT_INSN (insn);
2913
2914 if (GET_CODE (insn) == NOTE
2915 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2916 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2917 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2918 NOTE_INSN_BLOCK_ENDs because the correct generation
2919 of debugging information depends on these appearing
2920 in the same order in the RTL and in the tree
2921 structure, where they are represented as BLOCKs.
2922 So, we don't move block notes. Of course, moving
2923 the code inside the block is likely to make it
2924 impossible to debug the instructions in the exit
2925 test, but such is the price of optimization. */
2926 continue;
2927
2928 /* Move the INSN. */
2929 reorder_insns (insn, insn, get_last_insn ());
2930 }
2931
2932 emit_jump_insn_after (gen_jump (start_label),
2933 PREV_INSN (newstart_label));
2934 emit_barrier_after (PREV_INSN (newstart_label));
2935 start_label = newstart_label;
2936 }
2937 }
2938
2939 if (needs_end_jump)
2940 {
2941 emit_jump (start_label);
2942 emit_note (NULL, NOTE_INSN_LOOP_END);
2943 }
2944 emit_label (loop_stack->data.loop.end_label);
2945
2946 POPSTACK (loop_stack);
2947
2948 last_expr_type = 0;
2949 }
2950
2951 /* Finish a null loop, aka do { } while (0). */
2952
2953 void
2954 expand_end_null_loop ()
2955 {
2956 do_pending_stack_adjust ();
2957 emit_label (loop_stack->data.loop.end_label);
2958
2959 POPSTACK (loop_stack);
2960
2961 last_expr_type = 0;
2962 }
2963
2964 /* Generate a jump to the current loop's continue-point.
2965 This is usually the top of the loop, but may be specified
2966 explicitly elsewhere. If not currently inside a loop,
2967 return 0 and do nothing; caller will print an error message. */
2968
2969 int
2970 expand_continue_loop (whichloop)
2971 struct nesting *whichloop;
2972 {
2973 last_expr_type = 0;
2974 if (whichloop == 0)
2975 whichloop = loop_stack;
2976 if (whichloop == 0)
2977 return 0;
2978 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2979 NULL_RTX);
2980 return 1;
2981 }
2982
2983 /* Generate a jump to exit the current loop. If not currently inside a loop,
2984 return 0 and do nothing; caller will print an error message. */
2985
2986 int
2987 expand_exit_loop (whichloop)
2988 struct nesting *whichloop;
2989 {
2990 last_expr_type = 0;
2991 if (whichloop == 0)
2992 whichloop = loop_stack;
2993 if (whichloop == 0)
2994 return 0;
2995 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2996 return 1;
2997 }
2998
2999 /* Generate a conditional jump to exit the current loop if COND
3000 evaluates to zero. If not currently inside a loop,
3001 return 0 and do nothing; caller will print an error message. */
3002
3003 int
3004 expand_exit_loop_if_false (whichloop, cond)
3005 struct nesting *whichloop;
3006 tree cond;
3007 {
3008 rtx label = gen_label_rtx ();
3009 rtx last_insn;
3010 last_expr_type = 0;
3011
3012 if (whichloop == 0)
3013 whichloop = loop_stack;
3014 if (whichloop == 0)
3015 return 0;
3016 /* In order to handle fixups, we actually create a conditional jump
3017 around an unconditional branch to exit the loop. If fixups are
3018 necessary, they go before the unconditional branch. */
3019
3020 do_jump (cond, NULL_RTX, label);
3021 last_insn = get_last_insn ();
3022 if (GET_CODE (last_insn) == CODE_LABEL)
3023 whichloop->data.loop.alt_end_label = last_insn;
3024 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
3025 NULL_RTX);
3026 emit_label (label);
3027
3028 return 1;
3029 }
3030
3031 /* Return nonzero if the loop nest is empty. Else return zero. */
3032
3033 int
3034 stmt_loop_nest_empty ()
3035 {
3036 /* cfun->stmt can be NULL if we are building a call to get the
3037 EH context for a setjmp/longjmp EH target and the current
3038 function was a deferred inline function. */
3039 return (cfun->stmt == NULL || loop_stack == NULL);
3040 }
3041
3042 /* Return non-zero if we should preserve sub-expressions as separate
3043 pseudos. We never do so if we aren't optimizing. We always do so
3044 if -fexpensive-optimizations.
3045
3046 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
3047 the loop may still be a small one. */
3048
3049 int
3050 preserve_subexpressions_p ()
3051 {
3052 rtx insn;
3053
3054 if (flag_expensive_optimizations)
3055 return 1;
3056
3057 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
3058 return 0;
3059
3060 insn = get_last_insn_anywhere ();
3061
3062 return (insn
3063 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
3064 < n_non_fixed_regs * 3));
3065
3066 }
3067
3068 /* Generate a jump to exit the current loop, conditional, binding contour
3069 or case statement. Not all such constructs are visible to this function,
3070 only those started with EXIT_FLAG nonzero. Individual languages use
3071 the EXIT_FLAG parameter to control which kinds of constructs you can
3072 exit this way.
3073
3074 If not currently inside anything that can be exited,
3075 return 0 and do nothing; caller will print an error message. */
3076
3077 int
3078 expand_exit_something ()
3079 {
3080 struct nesting *n;
3081 last_expr_type = 0;
3082 for (n = nesting_stack; n; n = n->all)
3083 if (n->exit_label != 0)
3084 {
3085 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
3086 return 1;
3087 }
3088
3089 return 0;
3090 }
3091 \f
3092 /* Generate RTL to return from the current function, with no value.
3093 (That is, we do not do anything about returning any value.) */
3094
3095 void
3096 expand_null_return ()
3097 {
3098 rtx last_insn = get_last_insn ();
3099
3100 /* If this function was declared to return a value, but we
3101 didn't, clobber the return registers so that they are not
3102 propagated live to the rest of the function. */
3103 clobber_return_register ();
3104
3105 expand_null_return_1 (last_insn);
3106 }
3107
3108 /* Generate RTL to return from the current function, with value VAL. */
3109
3110 static void
3111 expand_value_return (val)
3112 rtx val;
3113 {
3114 rtx last_insn = get_last_insn ();
3115 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3116
3117 /* Copy the value to the return location
3118 unless it's already there. */
3119
3120 if (return_reg != val)
3121 {
3122 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3123 #ifdef PROMOTE_FUNCTION_RETURN
3124 int unsignedp = TREE_UNSIGNED (type);
3125 enum machine_mode old_mode
3126 = DECL_MODE (DECL_RESULT (current_function_decl));
3127 enum machine_mode mode
3128 = promote_mode (type, old_mode, &unsignedp, 1);
3129
3130 if (mode != old_mode)
3131 val = convert_modes (mode, old_mode, val, unsignedp);
3132 #endif
3133 if (GET_CODE (return_reg) == PARALLEL)
3134 emit_group_load (return_reg, val, int_size_in_bytes (type));
3135 else
3136 emit_move_insn (return_reg, val);
3137 }
3138
3139 expand_null_return_1 (last_insn);
3140 }
3141
3142 /* Output a return with no value. If LAST_INSN is nonzero,
3143 pretend that the return takes place after LAST_INSN. */
3144
3145 static void
3146 expand_null_return_1 (last_insn)
3147 rtx last_insn;
3148 {
3149 rtx end_label = cleanup_label ? cleanup_label : return_label;
3150
3151 clear_pending_stack_adjust ();
3152 do_pending_stack_adjust ();
3153 last_expr_type = 0;
3154
3155 if (end_label == 0)
3156 end_label = return_label = gen_label_rtx ();
3157 expand_goto_internal (NULL_TREE, end_label, last_insn);
3158 }
3159 \f
3160 /* Generate RTL to evaluate the expression RETVAL and return it
3161 from the current function. */
3162
3163 void
3164 expand_return (retval)
3165 tree retval;
3166 {
3167 /* If there are any cleanups to be performed, then they will
3168 be inserted following LAST_INSN. It is desirable
3169 that the last_insn, for such purposes, should be the
3170 last insn before computing the return value. Otherwise, cleanups
3171 which call functions can clobber the return value. */
3172 /* ??? rms: I think that is erroneous, because in C++ it would
3173 run destructors on variables that might be used in the subsequent
3174 computation of the return value. */
3175 rtx last_insn = 0;
3176 rtx result_rtl;
3177 rtx val = 0;
3178 tree retval_rhs;
3179
3180 /* If function wants no value, give it none. */
3181 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3182 {
3183 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3184 emit_queue ();
3185 expand_null_return ();
3186 return;
3187 }
3188
3189 if (retval == error_mark_node)
3190 {
3191 /* Treat this like a return of no value from a function that
3192 returns a value. */
3193 expand_null_return ();
3194 return;
3195 }
3196 else if (TREE_CODE (retval) == RESULT_DECL)
3197 retval_rhs = retval;
3198 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3199 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3200 retval_rhs = TREE_OPERAND (retval, 1);
3201 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3202 /* Recognize tail-recursive call to void function. */
3203 retval_rhs = retval;
3204 else
3205 retval_rhs = NULL_TREE;
3206
3207 last_insn = get_last_insn ();
3208
3209 /* Distribute return down conditional expr if either of the sides
3210 may involve tail recursion (see test below). This enhances the number
3211 of tail recursions we see. Don't do this always since it can produce
3212 sub-optimal code in some cases and we distribute assignments into
3213 conditional expressions when it would help. */
3214
3215 if (optimize && retval_rhs != 0
3216 && frame_offset == 0
3217 && TREE_CODE (retval_rhs) == COND_EXPR
3218 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3219 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3220 {
3221 rtx label = gen_label_rtx ();
3222 tree expr;
3223
3224 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3225 start_cleanup_deferral ();
3226 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3227 DECL_RESULT (current_function_decl),
3228 TREE_OPERAND (retval_rhs, 1));
3229 TREE_SIDE_EFFECTS (expr) = 1;
3230 expand_return (expr);
3231 emit_label (label);
3232
3233 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3234 DECL_RESULT (current_function_decl),
3235 TREE_OPERAND (retval_rhs, 2));
3236 TREE_SIDE_EFFECTS (expr) = 1;
3237 expand_return (expr);
3238 end_cleanup_deferral ();
3239 return;
3240 }
3241
3242 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3243
3244 /* If the result is an aggregate that is being returned in one (or more)
3245 registers, load the registers here. The compiler currently can't handle
3246 copying a BLKmode value into registers. We could put this code in a
3247 more general area (for use by everyone instead of just function
3248 call/return), but until this feature is generally usable it is kept here
3249 (and in expand_call). The value must go into a pseudo in case there
3250 are cleanups that will clobber the real return register. */
3251
3252 if (retval_rhs != 0
3253 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3254 && GET_CODE (result_rtl) == REG)
3255 {
3256 int i;
3257 unsigned HOST_WIDE_INT bitpos, xbitpos;
3258 unsigned HOST_WIDE_INT big_endian_correction = 0;
3259 unsigned HOST_WIDE_INT bytes
3260 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3261 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3262 unsigned int bitsize
3263 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3264 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3265 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3266 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3267 enum machine_mode tmpmode, result_reg_mode;
3268
3269 if (bytes == 0)
3270 {
3271 expand_null_return ();
3272 return;
3273 }
3274
3275 /* Structures whose size is not a multiple of a word are aligned
3276 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3277 machine, this means we must skip the empty high order bytes when
3278 calculating the bit offset. */
3279 if (BYTES_BIG_ENDIAN
3280 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
3281 && bytes % UNITS_PER_WORD)
3282 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3283 * BITS_PER_UNIT));
3284
3285 /* Copy the structure BITSIZE bits at a time. */
3286 for (bitpos = 0, xbitpos = big_endian_correction;
3287 bitpos < bytes * BITS_PER_UNIT;
3288 bitpos += bitsize, xbitpos += bitsize)
3289 {
3290 /* We need a new destination pseudo each time xbitpos is
3291 on a word boundary and when xbitpos == big_endian_correction
3292 (the first time through). */
3293 if (xbitpos % BITS_PER_WORD == 0
3294 || xbitpos == big_endian_correction)
3295 {
3296 /* Generate an appropriate register. */
3297 dst = gen_reg_rtx (word_mode);
3298 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3299
3300 /* Clear the destination before we move anything into it. */
3301 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3302 }
3303
3304 /* We need a new source operand each time bitpos is on a word
3305 boundary. */
3306 if (bitpos % BITS_PER_WORD == 0)
3307 src = operand_subword_force (result_val,
3308 bitpos / BITS_PER_WORD,
3309 BLKmode);
3310
3311 /* Use bitpos for the source extraction (left justified) and
3312 xbitpos for the destination store (right justified). */
3313 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3314 extract_bit_field (src, bitsize,
3315 bitpos % BITS_PER_WORD, 1,
3316 NULL_RTX, word_mode, word_mode,
3317 BITS_PER_WORD),
3318 BITS_PER_WORD);
3319 }
3320
3321 /* Find the smallest integer mode large enough to hold the
3322 entire structure and use that mode instead of BLKmode
3323 on the USE insn for the return register. */
3324 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3325 tmpmode != VOIDmode;
3326 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3327 /* Have we found a large enough mode? */
3328 if (GET_MODE_SIZE (tmpmode) >= bytes)
3329 break;
3330
3331 /* No suitable mode found. */
3332 if (tmpmode == VOIDmode)
3333 abort ();
3334
3335 PUT_MODE (result_rtl, tmpmode);
3336
3337 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3338 result_reg_mode = word_mode;
3339 else
3340 result_reg_mode = tmpmode;
3341 result_reg = gen_reg_rtx (result_reg_mode);
3342
3343 emit_queue ();
3344 for (i = 0; i < n_regs; i++)
3345 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3346 result_pseudos[i]);
3347
3348 if (tmpmode != result_reg_mode)
3349 result_reg = gen_lowpart (tmpmode, result_reg);
3350
3351 expand_value_return (result_reg);
3352 }
3353 else if (retval_rhs != 0
3354 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3355 && (GET_CODE (result_rtl) == REG
3356 || (GET_CODE (result_rtl) == PARALLEL)))
3357 {
3358 /* Calculate the return value into a temporary (usually a pseudo
3359 reg). */
3360 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3361 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3362
3363 val = assign_temp (nt, 0, 0, 1);
3364 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3365 val = force_not_mem (val);
3366 emit_queue ();
3367 /* Return the calculated value, doing cleanups first. */
3368 expand_value_return (val);
3369 }
3370 else
3371 {
3372 /* No cleanups or no hard reg used;
3373 calculate value into hard return reg. */
3374 expand_expr (retval, const0_rtx, VOIDmode, 0);
3375 emit_queue ();
3376 expand_value_return (result_rtl);
3377 }
3378 }
3379
3380 /* Return 1 if the end of the generated RTX is not a barrier.
3381 This means code already compiled can drop through. */
3382
3383 int
3384 drop_through_at_end_p ()
3385 {
3386 rtx insn = get_last_insn ();
3387 while (insn && GET_CODE (insn) == NOTE)
3388 insn = PREV_INSN (insn);
3389 return insn && GET_CODE (insn) != BARRIER;
3390 }
3391 \f
3392 /* Attempt to optimize a potential tail recursion call into a goto.
3393 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3394 where to place the jump to the tail recursion label.
3395
3396 Return TRUE if the call was optimized into a goto. */
3397
3398 int
3399 optimize_tail_recursion (arguments, last_insn)
3400 tree arguments;
3401 rtx last_insn;
3402 {
3403 /* Finish checking validity, and if valid emit code to set the
3404 argument variables for the new call. */
3405 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3406 {
3407 if (tail_recursion_label == 0)
3408 {
3409 tail_recursion_label = gen_label_rtx ();
3410 emit_label_after (tail_recursion_label,
3411 tail_recursion_reentry);
3412 }
3413 emit_queue ();
3414 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3415 emit_barrier ();
3416 return 1;
3417 }
3418 return 0;
3419 }
3420
3421 /* Emit code to alter this function's formal parms for a tail-recursive call.
3422 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3423 FORMALS is the chain of decls of formals.
3424 Return 1 if this can be done;
3425 otherwise return 0 and do not emit any code. */
3426
3427 static int
3428 tail_recursion_args (actuals, formals)
3429 tree actuals, formals;
3430 {
3431 tree a = actuals, f = formals;
3432 int i;
3433 rtx *argvec;
3434
3435 /* Check that number and types of actuals are compatible
3436 with the formals. This is not always true in valid C code.
3437 Also check that no formal needs to be addressable
3438 and that all formals are scalars. */
3439
3440 /* Also count the args. */
3441
3442 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3443 {
3444 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3445 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3446 return 0;
3447 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3448 return 0;
3449 }
3450 if (a != 0 || f != 0)
3451 return 0;
3452
3453 /* Compute all the actuals. */
3454
3455 argvec = (rtx *) alloca (i * sizeof (rtx));
3456
3457 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3458 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3459
3460 /* Find which actual values refer to current values of previous formals.
3461 Copy each of them now, before any formal is changed. */
3462
3463 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3464 {
3465 int copy = 0;
3466 int j;
3467 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3468 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3469 {
3470 copy = 1;
3471 break;
3472 }
3473 if (copy)
3474 argvec[i] = copy_to_reg (argvec[i]);
3475 }
3476
3477 /* Store the values of the actuals into the formals. */
3478
3479 for (f = formals, a = actuals, i = 0; f;
3480 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3481 {
3482 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3483 emit_move_insn (DECL_RTL (f), argvec[i]);
3484 else
3485 convert_move (DECL_RTL (f), argvec[i],
3486 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3487 }
3488
3489 free_temp_slots ();
3490 return 1;
3491 }
3492 \f
3493 /* Generate the RTL code for entering a binding contour.
3494 The variables are declared one by one, by calls to `expand_decl'.
3495
3496 FLAGS is a bitwise or of the following flags:
3497
3498 1 - Nonzero if this construct should be visible to
3499 `exit_something'.
3500
3501 2 - Nonzero if this contour does not require a
3502 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3503 language-independent code should set this flag because they
3504 will not create corresponding BLOCK nodes. (There should be
3505 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3506 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3507 when expand_end_bindings is called.
3508
3509 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3510 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3511 note. */
3512
3513 void
3514 expand_start_bindings_and_block (flags, block)
3515 int flags;
3516 tree block;
3517 {
3518 struct nesting *thisblock = ALLOC_NESTING ();
3519 rtx note;
3520 int exit_flag = ((flags & 1) != 0);
3521 int block_flag = ((flags & 2) == 0);
3522
3523 /* If a BLOCK is supplied, then the caller should be requesting a
3524 NOTE_INSN_BLOCK_BEG note. */
3525 if (!block_flag && block)
3526 abort ();
3527
3528 /* Create a note to mark the beginning of the block. */
3529 if (block_flag)
3530 {
3531 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3532 NOTE_BLOCK (note) = block;
3533 }
3534 else
3535 note = emit_note (NULL, NOTE_INSN_DELETED);
3536
3537 /* Make an entry on block_stack for the block we are entering. */
3538
3539 thisblock->next = block_stack;
3540 thisblock->all = nesting_stack;
3541 thisblock->depth = ++nesting_depth;
3542 thisblock->data.block.stack_level = 0;
3543 thisblock->data.block.cleanups = 0;
3544 thisblock->data.block.n_function_calls = 0;
3545 thisblock->data.block.exception_region = 0;
3546 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3547
3548 thisblock->data.block.conditional_code = 0;
3549 thisblock->data.block.last_unconditional_cleanup = note;
3550 /* When we insert instructions after the last unconditional cleanup,
3551 we don't adjust last_insn. That means that a later add_insn will
3552 clobber the instructions we've just added. The easiest way to
3553 fix this is to just insert another instruction here, so that the
3554 instructions inserted after the last unconditional cleanup are
3555 never the last instruction. */
3556 emit_note (NULL, NOTE_INSN_DELETED);
3557 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3558
3559 if (block_stack
3560 && !(block_stack->data.block.cleanups == NULL_TREE
3561 && block_stack->data.block.outer_cleanups == NULL_TREE))
3562 thisblock->data.block.outer_cleanups
3563 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3564 block_stack->data.block.outer_cleanups);
3565 else
3566 thisblock->data.block.outer_cleanups = 0;
3567 thisblock->data.block.label_chain = 0;
3568 thisblock->data.block.innermost_stack_block = stack_block_stack;
3569 thisblock->data.block.first_insn = note;
3570 thisblock->data.block.block_start_count = ++current_block_start_count;
3571 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3572 block_stack = thisblock;
3573 nesting_stack = thisblock;
3574
3575 /* Make a new level for allocating stack slots. */
3576 push_temp_slots ();
3577 }
3578
3579 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3580 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3581 expand_expr are made. After we end the region, we know that all
3582 space for all temporaries that were created by TARGET_EXPRs will be
3583 destroyed and their space freed for reuse. */
3584
3585 void
3586 expand_start_target_temps ()
3587 {
3588 /* This is so that even if the result is preserved, the space
3589 allocated will be freed, as we know that it is no longer in use. */
3590 push_temp_slots ();
3591
3592 /* Start a new binding layer that will keep track of all cleanup
3593 actions to be performed. */
3594 expand_start_bindings (2);
3595
3596 target_temp_slot_level = temp_slot_level;
3597 }
3598
3599 void
3600 expand_end_target_temps ()
3601 {
3602 expand_end_bindings (NULL_TREE, 0, 0);
3603
3604 /* This is so that even if the result is preserved, the space
3605 allocated will be freed, as we know that it is no longer in use. */
3606 pop_temp_slots ();
3607 }
3608
3609 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3610 in question represents the outermost pair of curly braces (i.e. the "body
3611 block") of a function or method.
3612
3613 For any BLOCK node representing a "body block" of a function or method, the
3614 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3615 represents the outermost (function) scope for the function or method (i.e.
3616 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3617 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3618
3619 int
3620 is_body_block (stmt)
3621 tree stmt;
3622 {
3623 if (TREE_CODE (stmt) == BLOCK)
3624 {
3625 tree parent = BLOCK_SUPERCONTEXT (stmt);
3626
3627 if (parent && TREE_CODE (parent) == BLOCK)
3628 {
3629 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3630
3631 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3632 return 1;
3633 }
3634 }
3635
3636 return 0;
3637 }
3638
3639 /* True if we are currently emitting insns in an area of output code
3640 that is controlled by a conditional expression. This is used by
3641 the cleanup handling code to generate conditional cleanup actions. */
3642
3643 int
3644 conditional_context ()
3645 {
3646 return block_stack && block_stack->data.block.conditional_code;
3647 }
3648
3649 /* Return an opaque pointer to the current nesting level, so frontend code
3650 can check its own sanity. */
3651
3652 struct nesting *
3653 current_nesting_level ()
3654 {
3655 return cfun ? block_stack : 0;
3656 }
3657
3658 /* Emit a handler label for a nonlocal goto handler.
3659 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3660
3661 static rtx
3662 expand_nl_handler_label (slot, before_insn)
3663 rtx slot, before_insn;
3664 {
3665 rtx insns;
3666 rtx handler_label = gen_label_rtx ();
3667
3668 /* Don't let cleanup_cfg delete the handler. */
3669 LABEL_PRESERVE_P (handler_label) = 1;
3670
3671 start_sequence ();
3672 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3673 insns = get_insns ();
3674 end_sequence ();
3675 emit_insns_before (insns, before_insn);
3676
3677 emit_label (handler_label);
3678
3679 return handler_label;
3680 }
3681
3682 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3683 handler. */
3684 static void
3685 expand_nl_goto_receiver ()
3686 {
3687 #ifdef HAVE_nonlocal_goto
3688 if (! HAVE_nonlocal_goto)
3689 #endif
3690 /* First adjust our frame pointer to its actual value. It was
3691 previously set to the start of the virtual area corresponding to
3692 the stacked variables when we branched here and now needs to be
3693 adjusted to the actual hardware fp value.
3694
3695 Assignments are to virtual registers are converted by
3696 instantiate_virtual_regs into the corresponding assignment
3697 to the underlying register (fp in this case) that makes
3698 the original assignment true.
3699 So the following insn will actually be
3700 decrementing fp by STARTING_FRAME_OFFSET. */
3701 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3702
3703 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3704 if (fixed_regs[ARG_POINTER_REGNUM])
3705 {
3706 #ifdef ELIMINABLE_REGS
3707 /* If the argument pointer can be eliminated in favor of the
3708 frame pointer, we don't need to restore it. We assume here
3709 that if such an elimination is present, it can always be used.
3710 This is the case on all known machines; if we don't make this
3711 assumption, we do unnecessary saving on many machines. */
3712 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3713 size_t i;
3714
3715 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3716 if (elim_regs[i].from == ARG_POINTER_REGNUM
3717 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3718 break;
3719
3720 if (i == ARRAY_SIZE (elim_regs))
3721 #endif
3722 {
3723 /* Now restore our arg pointer from the address at which it
3724 was saved in our stack frame. */
3725 emit_move_insn (virtual_incoming_args_rtx,
3726 copy_to_reg (get_arg_pointer_save_area (cfun)));
3727 }
3728 }
3729 #endif
3730
3731 #ifdef HAVE_nonlocal_goto_receiver
3732 if (HAVE_nonlocal_goto_receiver)
3733 emit_insn (gen_nonlocal_goto_receiver ());
3734 #endif
3735 }
3736
3737 /* Make handlers for nonlocal gotos taking place in the function calls in
3738 block THISBLOCK. */
3739
3740 static void
3741 expand_nl_goto_receivers (thisblock)
3742 struct nesting *thisblock;
3743 {
3744 tree link;
3745 rtx afterward = gen_label_rtx ();
3746 rtx insns, slot;
3747 rtx label_list;
3748 int any_invalid;
3749
3750 /* Record the handler address in the stack slot for that purpose,
3751 during this block, saving and restoring the outer value. */
3752 if (thisblock->next != 0)
3753 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3754 {
3755 rtx save_receiver = gen_reg_rtx (Pmode);
3756 emit_move_insn (XEXP (slot, 0), save_receiver);
3757
3758 start_sequence ();
3759 emit_move_insn (save_receiver, XEXP (slot, 0));
3760 insns = get_insns ();
3761 end_sequence ();
3762 emit_insns_before (insns, thisblock->data.block.first_insn);
3763 }
3764
3765 /* Jump around the handlers; they run only when specially invoked. */
3766 emit_jump (afterward);
3767
3768 /* Make a separate handler for each label. */
3769 link = nonlocal_labels;
3770 slot = nonlocal_goto_handler_slots;
3771 label_list = NULL_RTX;
3772 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3773 /* Skip any labels we shouldn't be able to jump to from here,
3774 we generate one special handler for all of them below which just calls
3775 abort. */
3776 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3777 {
3778 rtx lab;
3779 lab = expand_nl_handler_label (XEXP (slot, 0),
3780 thisblock->data.block.first_insn);
3781 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3782
3783 expand_nl_goto_receiver ();
3784
3785 /* Jump to the "real" nonlocal label. */
3786 expand_goto (TREE_VALUE (link));
3787 }
3788
3789 /* A second pass over all nonlocal labels; this time we handle those
3790 we should not be able to jump to at this point. */
3791 link = nonlocal_labels;
3792 slot = nonlocal_goto_handler_slots;
3793 any_invalid = 0;
3794 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3795 if (DECL_TOO_LATE (TREE_VALUE (link)))
3796 {
3797 rtx lab;
3798 lab = expand_nl_handler_label (XEXP (slot, 0),
3799 thisblock->data.block.first_insn);
3800 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3801 any_invalid = 1;
3802 }
3803
3804 if (any_invalid)
3805 {
3806 expand_nl_goto_receiver ();
3807 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), LCT_NORETURN,
3808 VOIDmode, 0);
3809 emit_barrier ();
3810 }
3811
3812 nonlocal_goto_handler_labels = label_list;
3813 emit_label (afterward);
3814 }
3815
3816 /* Warn about any unused VARS (which may contain nodes other than
3817 VAR_DECLs, but such nodes are ignored). The nodes are connected
3818 via the TREE_CHAIN field. */
3819
3820 void
3821 warn_about_unused_variables (vars)
3822 tree vars;
3823 {
3824 tree decl;
3825
3826 if (warn_unused_variable)
3827 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3828 if (TREE_CODE (decl) == VAR_DECL
3829 && ! TREE_USED (decl)
3830 && ! DECL_IN_SYSTEM_HEADER (decl)
3831 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3832 warning_with_decl (decl, "unused variable `%s'");
3833 }
3834
3835 /* Generate RTL code to terminate a binding contour.
3836
3837 VARS is the chain of VAR_DECL nodes for the variables bound in this
3838 contour. There may actually be other nodes in this chain, but any
3839 nodes other than VAR_DECLS are ignored.
3840
3841 MARK_ENDS is nonzero if we should put a note at the beginning
3842 and end of this binding contour.
3843
3844 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3845 (That is true automatically if the contour has a saved stack level.) */
3846
3847 void
3848 expand_end_bindings (vars, mark_ends, dont_jump_in)
3849 tree vars;
3850 int mark_ends;
3851 int dont_jump_in;
3852 {
3853 struct nesting *thisblock = block_stack;
3854
3855 /* If any of the variables in this scope were not used, warn the
3856 user. */
3857 warn_about_unused_variables (vars);
3858
3859 if (thisblock->exit_label)
3860 {
3861 do_pending_stack_adjust ();
3862 emit_label (thisblock->exit_label);
3863 }
3864
3865 /* If necessary, make handlers for nonlocal gotos taking
3866 place in the function calls in this block. */
3867 if (function_call_count != thisblock->data.block.n_function_calls
3868 && nonlocal_labels
3869 /* Make handler for outermost block
3870 if there were any nonlocal gotos to this function. */
3871 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3872 /* Make handler for inner block if it has something
3873 special to do when you jump out of it. */
3874 : (thisblock->data.block.cleanups != 0
3875 || thisblock->data.block.stack_level != 0)))
3876 expand_nl_goto_receivers (thisblock);
3877
3878 /* Don't allow jumping into a block that has a stack level.
3879 Cleanups are allowed, though. */
3880 if (dont_jump_in
3881 || thisblock->data.block.stack_level != 0)
3882 {
3883 struct label_chain *chain;
3884
3885 /* Any labels in this block are no longer valid to go to.
3886 Mark them to cause an error message. */
3887 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3888 {
3889 DECL_TOO_LATE (chain->label) = 1;
3890 /* If any goto without a fixup came to this label,
3891 that must be an error, because gotos without fixups
3892 come from outside all saved stack-levels. */
3893 if (TREE_ADDRESSABLE (chain->label))
3894 error_with_decl (chain->label,
3895 "label `%s' used before containing binding contour");
3896 }
3897 }
3898
3899 /* Restore stack level in effect before the block
3900 (only if variable-size objects allocated). */
3901 /* Perform any cleanups associated with the block. */
3902
3903 if (thisblock->data.block.stack_level != 0
3904 || thisblock->data.block.cleanups != 0)
3905 {
3906 int reachable;
3907 rtx insn;
3908
3909 /* Don't let cleanups affect ({...}) constructs. */
3910 int old_expr_stmts_for_value = expr_stmts_for_value;
3911 rtx old_last_expr_value = last_expr_value;
3912 tree old_last_expr_type = last_expr_type;
3913 expr_stmts_for_value = 0;
3914
3915 /* Only clean up here if this point can actually be reached. */
3916 insn = get_last_insn ();
3917 if (GET_CODE (insn) == NOTE)
3918 insn = prev_nonnote_insn (insn);
3919 reachable = (! insn || GET_CODE (insn) != BARRIER);
3920
3921 /* Do the cleanups. */
3922 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3923 if (reachable)
3924 do_pending_stack_adjust ();
3925
3926 expr_stmts_for_value = old_expr_stmts_for_value;
3927 last_expr_value = old_last_expr_value;
3928 last_expr_type = old_last_expr_type;
3929
3930 /* Restore the stack level. */
3931
3932 if (reachable && thisblock->data.block.stack_level != 0)
3933 {
3934 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3935 thisblock->data.block.stack_level, NULL_RTX);
3936 if (nonlocal_goto_handler_slots != 0)
3937 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3938 NULL_RTX);
3939 }
3940
3941 /* Any gotos out of this block must also do these things.
3942 Also report any gotos with fixups that came to labels in this
3943 level. */
3944 fixup_gotos (thisblock,
3945 thisblock->data.block.stack_level,
3946 thisblock->data.block.cleanups,
3947 thisblock->data.block.first_insn,
3948 dont_jump_in);
3949 }
3950
3951 /* Mark the beginning and end of the scope if requested.
3952 We do this now, after running cleanups on the variables
3953 just going out of scope, so they are in scope for their cleanups. */
3954
3955 if (mark_ends)
3956 {
3957 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3958 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3959 }
3960 else
3961 /* Get rid of the beginning-mark if we don't make an end-mark. */
3962 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3963
3964 /* Restore the temporary level of TARGET_EXPRs. */
3965 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3966
3967 /* Restore block_stack level for containing block. */
3968
3969 stack_block_stack = thisblock->data.block.innermost_stack_block;
3970 POPSTACK (block_stack);
3971
3972 /* Pop the stack slot nesting and free any slots at this level. */
3973 pop_temp_slots ();
3974 }
3975 \f
3976 /* Generate code to save the stack pointer at the start of the current block
3977 and set up to restore it on exit. */
3978
3979 void
3980 save_stack_pointer ()
3981 {
3982 struct nesting *thisblock = block_stack;
3983
3984 if (thisblock->data.block.stack_level == 0)
3985 {
3986 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3987 &thisblock->data.block.stack_level,
3988 thisblock->data.block.first_insn);
3989 stack_block_stack = thisblock;
3990 }
3991 }
3992 \f
3993 /* Generate RTL for the automatic variable declaration DECL.
3994 (Other kinds of declarations are simply ignored if seen here.) */
3995
3996 void
3997 expand_decl (decl)
3998 tree decl;
3999 {
4000 struct nesting *thisblock;
4001 tree type;
4002
4003 type = TREE_TYPE (decl);
4004
4005 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
4006 type in case this node is used in a reference. */
4007 if (TREE_CODE (decl) == CONST_DECL)
4008 {
4009 DECL_MODE (decl) = TYPE_MODE (type);
4010 DECL_ALIGN (decl) = TYPE_ALIGN (type);
4011 DECL_SIZE (decl) = TYPE_SIZE (type);
4012 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
4013 return;
4014 }
4015
4016 /* Otherwise, only automatic variables need any expansion done. Static and
4017 external variables, and external functions, will be handled by
4018 `assemble_variable' (called from finish_decl). TYPE_DECL requires
4019 nothing. PARM_DECLs are handled in `assign_parms'. */
4020 if (TREE_CODE (decl) != VAR_DECL)
4021 return;
4022
4023 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
4024 return;
4025
4026 thisblock = block_stack;
4027
4028 /* Create the RTL representation for the variable. */
4029
4030 if (type == error_mark_node)
4031 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
4032
4033 else if (DECL_SIZE (decl) == 0)
4034 /* Variable with incomplete type. */
4035 {
4036 rtx x;
4037 if (DECL_INITIAL (decl) == 0)
4038 /* Error message was already done; now avoid a crash. */
4039 x = gen_rtx_MEM (BLKmode, const0_rtx);
4040 else
4041 /* An initializer is going to decide the size of this array.
4042 Until we know the size, represent its address with a reg. */
4043 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
4044
4045 set_mem_attributes (x, decl, 1);
4046 SET_DECL_RTL (decl, x);
4047 }
4048 else if (DECL_MODE (decl) != BLKmode
4049 /* If -ffloat-store, don't put explicit float vars
4050 into regs. */
4051 && !(flag_float_store
4052 && TREE_CODE (type) == REAL_TYPE)
4053 && ! TREE_THIS_VOLATILE (decl)
4054 && (DECL_REGISTER (decl) || optimize))
4055 {
4056 /* Automatic variable that can go in a register. */
4057 int unsignedp = TREE_UNSIGNED (type);
4058 enum machine_mode reg_mode
4059 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
4060
4061 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
4062
4063 if (GET_CODE (DECL_RTL (decl)) == REG)
4064 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
4065 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
4066 {
4067 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
4068 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
4069 }
4070
4071 mark_user_reg (DECL_RTL (decl));
4072
4073 if (POINTER_TYPE_P (type))
4074 mark_reg_pointer (DECL_RTL (decl),
4075 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
4076
4077 maybe_set_unchanging (DECL_RTL (decl), decl);
4078
4079 /* If something wants our address, try to use ADDRESSOF. */
4080 if (TREE_ADDRESSABLE (decl))
4081 put_var_into_stack (decl);
4082 }
4083
4084 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
4085 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
4086 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
4087 STACK_CHECK_MAX_VAR_SIZE)))
4088 {
4089 /* Variable of fixed size that goes on the stack. */
4090 rtx oldaddr = 0;
4091 rtx addr;
4092 rtx x;
4093
4094 /* If we previously made RTL for this decl, it must be an array
4095 whose size was determined by the initializer.
4096 The old address was a register; set that register now
4097 to the proper address. */
4098 if (DECL_RTL_SET_P (decl))
4099 {
4100 if (GET_CODE (DECL_RTL (decl)) != MEM
4101 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
4102 abort ();
4103 oldaddr = XEXP (DECL_RTL (decl), 0);
4104 }
4105
4106 /* Set alignment we actually gave this decl. */
4107 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
4108 : GET_MODE_BITSIZE (DECL_MODE (decl)));
4109 DECL_USER_ALIGN (decl) = 0;
4110
4111 x = assign_temp (TREE_TYPE (decl), 1, 1, 1);
4112 set_mem_attributes (x, decl, 1);
4113 SET_DECL_RTL (decl, x);
4114
4115 if (oldaddr)
4116 {
4117 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4118 if (addr != oldaddr)
4119 emit_move_insn (oldaddr, addr);
4120 }
4121 }
4122 else
4123 /* Dynamic-size object: must push space on the stack. */
4124 {
4125 rtx address, size, x;
4126
4127 /* Record the stack pointer on entry to block, if have
4128 not already done so. */
4129 do_pending_stack_adjust ();
4130 save_stack_pointer ();
4131
4132 /* In function-at-a-time mode, variable_size doesn't expand this,
4133 so do it now. */
4134 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4135 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4136 const0_rtx, VOIDmode, 0);
4137
4138 /* Compute the variable's size, in bytes. */
4139 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4140 free_temp_slots ();
4141
4142 /* Allocate space on the stack for the variable. Note that
4143 DECL_ALIGN says how the variable is to be aligned and we
4144 cannot use it to conclude anything about the alignment of
4145 the size. */
4146 address = allocate_dynamic_stack_space (size, NULL_RTX,
4147 TYPE_ALIGN (TREE_TYPE (decl)));
4148
4149 /* Reference the variable indirect through that rtx. */
4150 x = gen_rtx_MEM (DECL_MODE (decl), address);
4151 set_mem_attributes (x, decl, 1);
4152 SET_DECL_RTL (decl, x);
4153
4154
4155 /* Indicate the alignment we actually gave this variable. */
4156 #ifdef STACK_BOUNDARY
4157 DECL_ALIGN (decl) = STACK_BOUNDARY;
4158 #else
4159 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4160 #endif
4161 DECL_USER_ALIGN (decl) = 0;
4162 }
4163 }
4164 \f
4165 /* Emit code to perform the initialization of a declaration DECL. */
4166
4167 void
4168 expand_decl_init (decl)
4169 tree decl;
4170 {
4171 int was_used = TREE_USED (decl);
4172
4173 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4174 for static decls. */
4175 if (TREE_CODE (decl) == CONST_DECL
4176 || TREE_STATIC (decl))
4177 return;
4178
4179 /* Compute and store the initial value now. */
4180
4181 if (DECL_INITIAL (decl) == error_mark_node)
4182 {
4183 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4184
4185 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4186 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4187 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4188 0, 0);
4189 emit_queue ();
4190 }
4191 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4192 {
4193 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4194 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4195 emit_queue ();
4196 }
4197
4198 /* Don't let the initialization count as "using" the variable. */
4199 TREE_USED (decl) = was_used;
4200
4201 /* Free any temporaries we made while initializing the decl. */
4202 preserve_temp_slots (NULL_RTX);
4203 free_temp_slots ();
4204 }
4205
4206 /* CLEANUP is an expression to be executed at exit from this binding contour;
4207 for example, in C++, it might call the destructor for this variable.
4208
4209 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4210 CLEANUP multiple times, and have the correct semantics. This
4211 happens in exception handling, for gotos, returns, breaks that
4212 leave the current scope.
4213
4214 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4215 that is not associated with any particular variable. */
4216
4217 int
4218 expand_decl_cleanup (decl, cleanup)
4219 tree decl, cleanup;
4220 {
4221 struct nesting *thisblock;
4222
4223 /* Error if we are not in any block. */
4224 if (cfun == 0 || block_stack == 0)
4225 return 0;
4226
4227 thisblock = block_stack;
4228
4229 /* Record the cleanup if there is one. */
4230
4231 if (cleanup != 0)
4232 {
4233 tree t;
4234 rtx seq;
4235 tree *cleanups = &thisblock->data.block.cleanups;
4236 int cond_context = conditional_context ();
4237
4238 if (cond_context)
4239 {
4240 rtx flag = gen_reg_rtx (word_mode);
4241 rtx set_flag_0;
4242 tree cond;
4243
4244 start_sequence ();
4245 emit_move_insn (flag, const0_rtx);
4246 set_flag_0 = get_insns ();
4247 end_sequence ();
4248
4249 thisblock->data.block.last_unconditional_cleanup
4250 = emit_insns_after (set_flag_0,
4251 thisblock->data.block.last_unconditional_cleanup);
4252
4253 emit_move_insn (flag, const1_rtx);
4254
4255 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4256 SET_DECL_RTL (cond, flag);
4257
4258 /* Conditionalize the cleanup. */
4259 cleanup = build (COND_EXPR, void_type_node,
4260 truthvalue_conversion (cond),
4261 cleanup, integer_zero_node);
4262 cleanup = fold (cleanup);
4263
4264 cleanups = thisblock->data.block.cleanup_ptr;
4265 }
4266
4267 cleanup = unsave_expr (cleanup);
4268
4269 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4270
4271 if (! cond_context)
4272 /* If this block has a cleanup, it belongs in stack_block_stack. */
4273 stack_block_stack = thisblock;
4274
4275 if (cond_context)
4276 {
4277 start_sequence ();
4278 }
4279
4280 if (! using_eh_for_cleanups_p)
4281 TREE_ADDRESSABLE (t) = 1;
4282 else
4283 expand_eh_region_start ();
4284
4285 if (cond_context)
4286 {
4287 seq = get_insns ();
4288 end_sequence ();
4289 if (seq)
4290 thisblock->data.block.last_unconditional_cleanup
4291 = emit_insns_after (seq,
4292 thisblock->data.block.last_unconditional_cleanup);
4293 }
4294 else
4295 {
4296 thisblock->data.block.last_unconditional_cleanup
4297 = get_last_insn ();
4298 /* When we insert instructions after the last unconditional cleanup,
4299 we don't adjust last_insn. That means that a later add_insn will
4300 clobber the instructions we've just added. The easiest way to
4301 fix this is to just insert another instruction here, so that the
4302 instructions inserted after the last unconditional cleanup are
4303 never the last instruction. */
4304 emit_note (NULL, NOTE_INSN_DELETED);
4305 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4306 }
4307 }
4308 return 1;
4309 }
4310 \f
4311 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4312 DECL_ELTS is the list of elements that belong to DECL's type.
4313 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4314
4315 void
4316 expand_anon_union_decl (decl, cleanup, decl_elts)
4317 tree decl, cleanup, decl_elts;
4318 {
4319 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4320 rtx x;
4321 tree t;
4322
4323 /* If any of the elements are addressable, so is the entire union. */
4324 for (t = decl_elts; t; t = TREE_CHAIN (t))
4325 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4326 {
4327 TREE_ADDRESSABLE (decl) = 1;
4328 break;
4329 }
4330
4331 expand_decl (decl);
4332 expand_decl_cleanup (decl, cleanup);
4333 x = DECL_RTL (decl);
4334
4335 /* Go through the elements, assigning RTL to each. */
4336 for (t = decl_elts; t; t = TREE_CHAIN (t))
4337 {
4338 tree decl_elt = TREE_VALUE (t);
4339 tree cleanup_elt = TREE_PURPOSE (t);
4340 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4341
4342 /* Propagate the union's alignment to the elements. */
4343 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4344 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4345
4346 /* If the element has BLKmode and the union doesn't, the union is
4347 aligned such that the element doesn't need to have BLKmode, so
4348 change the element's mode to the appropriate one for its size. */
4349 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4350 DECL_MODE (decl_elt) = mode
4351 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4352
4353 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4354 instead create a new MEM rtx with the proper mode. */
4355 if (GET_CODE (x) == MEM)
4356 {
4357 if (mode == GET_MODE (x))
4358 SET_DECL_RTL (decl_elt, x);
4359 else
4360 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4361 }
4362 else if (GET_CODE (x) == REG)
4363 {
4364 if (mode == GET_MODE (x))
4365 SET_DECL_RTL (decl_elt, x);
4366 else
4367 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4368 }
4369 else
4370 abort ();
4371
4372 /* Record the cleanup if there is one. */
4373
4374 if (cleanup != 0)
4375 thisblock->data.block.cleanups
4376 = tree_cons (decl_elt, cleanup_elt,
4377 thisblock->data.block.cleanups);
4378 }
4379 }
4380 \f
4381 /* Expand a list of cleanups LIST.
4382 Elements may be expressions or may be nested lists.
4383
4384 If DONT_DO is nonnull, then any list-element
4385 whose TREE_PURPOSE matches DONT_DO is omitted.
4386 This is sometimes used to avoid a cleanup associated with
4387 a value that is being returned out of the scope.
4388
4389 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4390 goto and handle protection regions specially in that case.
4391
4392 If REACHABLE, we emit code, otherwise just inform the exception handling
4393 code about this finalization. */
4394
4395 static void
4396 expand_cleanups (list, dont_do, in_fixup, reachable)
4397 tree list;
4398 tree dont_do;
4399 int in_fixup;
4400 int reachable;
4401 {
4402 tree tail;
4403 for (tail = list; tail; tail = TREE_CHAIN (tail))
4404 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4405 {
4406 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4407 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4408 else
4409 {
4410 if (! in_fixup && using_eh_for_cleanups_p)
4411 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4412
4413 if (reachable)
4414 {
4415 /* Cleanups may be run multiple times. For example,
4416 when exiting a binding contour, we expand the
4417 cleanups associated with that contour. When a goto
4418 within that binding contour has a target outside that
4419 contour, it will expand all cleanups from its scope to
4420 the target. Though the cleanups are expanded multiple
4421 times, the control paths are non-overlapping so the
4422 cleanups will not be executed twice. */
4423
4424 /* We may need to protect from outer cleanups. */
4425 if (in_fixup && using_eh_for_cleanups_p)
4426 {
4427 expand_eh_region_start ();
4428
4429 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4430
4431 expand_eh_region_end_fixup (TREE_VALUE (tail));
4432 }
4433 else
4434 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4435
4436 free_temp_slots ();
4437 }
4438 }
4439 }
4440 }
4441
4442 /* Mark when the context we are emitting RTL for as a conditional
4443 context, so that any cleanup actions we register with
4444 expand_decl_init will be properly conditionalized when those
4445 cleanup actions are later performed. Must be called before any
4446 expression (tree) is expanded that is within a conditional context. */
4447
4448 void
4449 start_cleanup_deferral ()
4450 {
4451 /* block_stack can be NULL if we are inside the parameter list. It is
4452 OK to do nothing, because cleanups aren't possible here. */
4453 if (block_stack)
4454 ++block_stack->data.block.conditional_code;
4455 }
4456
4457 /* Mark the end of a conditional region of code. Because cleanup
4458 deferrals may be nested, we may still be in a conditional region
4459 after we end the currently deferred cleanups, only after we end all
4460 deferred cleanups, are we back in unconditional code. */
4461
4462 void
4463 end_cleanup_deferral ()
4464 {
4465 /* block_stack can be NULL if we are inside the parameter list. It is
4466 OK to do nothing, because cleanups aren't possible here. */
4467 if (block_stack)
4468 --block_stack->data.block.conditional_code;
4469 }
4470
4471 /* Move all cleanups from the current block_stack
4472 to the containing block_stack, where they are assumed to
4473 have been created. If anything can cause a temporary to
4474 be created, but not expanded for more than one level of
4475 block_stacks, then this code will have to change. */
4476
4477 void
4478 move_cleanups_up ()
4479 {
4480 struct nesting *block = block_stack;
4481 struct nesting *outer = block->next;
4482
4483 outer->data.block.cleanups
4484 = chainon (block->data.block.cleanups,
4485 outer->data.block.cleanups);
4486 block->data.block.cleanups = 0;
4487 }
4488
4489 tree
4490 last_cleanup_this_contour ()
4491 {
4492 if (block_stack == 0)
4493 return 0;
4494
4495 return block_stack->data.block.cleanups;
4496 }
4497
4498 /* Return 1 if there are any pending cleanups at this point.
4499 If THIS_CONTOUR is nonzero, check the current contour as well.
4500 Otherwise, look only at the contours that enclose this one. */
4501
4502 int
4503 any_pending_cleanups (this_contour)
4504 int this_contour;
4505 {
4506 struct nesting *block;
4507
4508 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4509 return 0;
4510
4511 if (this_contour && block_stack->data.block.cleanups != NULL)
4512 return 1;
4513 if (block_stack->data.block.cleanups == 0
4514 && block_stack->data.block.outer_cleanups == 0)
4515 return 0;
4516
4517 for (block = block_stack->next; block; block = block->next)
4518 if (block->data.block.cleanups != 0)
4519 return 1;
4520
4521 return 0;
4522 }
4523 \f
4524 /* Enter a case (Pascal) or switch (C) statement.
4525 Push a block onto case_stack and nesting_stack
4526 to accumulate the case-labels that are seen
4527 and to record the labels generated for the statement.
4528
4529 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4530 Otherwise, this construct is transparent for `exit_something'.
4531
4532 EXPR is the index-expression to be dispatched on.
4533 TYPE is its nominal type. We could simply convert EXPR to this type,
4534 but instead we take short cuts. */
4535
4536 void
4537 expand_start_case (exit_flag, expr, type, printname)
4538 int exit_flag;
4539 tree expr;
4540 tree type;
4541 const char *printname;
4542 {
4543 struct nesting *thiscase = ALLOC_NESTING ();
4544
4545 /* Make an entry on case_stack for the case we are entering. */
4546
4547 thiscase->next = case_stack;
4548 thiscase->all = nesting_stack;
4549 thiscase->depth = ++nesting_depth;
4550 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4551 thiscase->data.case_stmt.case_list = 0;
4552 thiscase->data.case_stmt.index_expr = expr;
4553 thiscase->data.case_stmt.nominal_type = type;
4554 thiscase->data.case_stmt.default_label = 0;
4555 thiscase->data.case_stmt.printname = printname;
4556 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4557 case_stack = thiscase;
4558 nesting_stack = thiscase;
4559
4560 do_pending_stack_adjust ();
4561
4562 /* Make sure case_stmt.start points to something that won't
4563 need any transformation before expand_end_case. */
4564 if (GET_CODE (get_last_insn ()) != NOTE)
4565 emit_note (NULL, NOTE_INSN_DELETED);
4566
4567 thiscase->data.case_stmt.start = get_last_insn ();
4568
4569 start_cleanup_deferral ();
4570 }
4571
4572 /* Start a "dummy case statement" within which case labels are invalid
4573 and are not connected to any larger real case statement.
4574 This can be used if you don't want to let a case statement jump
4575 into the middle of certain kinds of constructs. */
4576
4577 void
4578 expand_start_case_dummy ()
4579 {
4580 struct nesting *thiscase = ALLOC_NESTING ();
4581
4582 /* Make an entry on case_stack for the dummy. */
4583
4584 thiscase->next = case_stack;
4585 thiscase->all = nesting_stack;
4586 thiscase->depth = ++nesting_depth;
4587 thiscase->exit_label = 0;
4588 thiscase->data.case_stmt.case_list = 0;
4589 thiscase->data.case_stmt.start = 0;
4590 thiscase->data.case_stmt.nominal_type = 0;
4591 thiscase->data.case_stmt.default_label = 0;
4592 case_stack = thiscase;
4593 nesting_stack = thiscase;
4594 start_cleanup_deferral ();
4595 }
4596
4597 /* End a dummy case statement. */
4598
4599 void
4600 expand_end_case_dummy ()
4601 {
4602 end_cleanup_deferral ();
4603 POPSTACK (case_stack);
4604 }
4605
4606 /* Return the data type of the index-expression
4607 of the innermost case statement, or null if none. */
4608
4609 tree
4610 case_index_expr_type ()
4611 {
4612 if (case_stack)
4613 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4614 return 0;
4615 }
4616 \f
4617 static void
4618 check_seenlabel ()
4619 {
4620 /* If this is the first label, warn if any insns have been emitted. */
4621 if (case_stack->data.case_stmt.line_number_status >= 0)
4622 {
4623 rtx insn;
4624
4625 restore_line_number_status
4626 (case_stack->data.case_stmt.line_number_status);
4627 case_stack->data.case_stmt.line_number_status = -1;
4628
4629 for (insn = case_stack->data.case_stmt.start;
4630 insn;
4631 insn = NEXT_INSN (insn))
4632 {
4633 if (GET_CODE (insn) == CODE_LABEL)
4634 break;
4635 if (GET_CODE (insn) != NOTE
4636 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4637 {
4638 do
4639 insn = PREV_INSN (insn);
4640 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4641
4642 /* If insn is zero, then there must have been a syntax error. */
4643 if (insn)
4644 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4645 NOTE_LINE_NUMBER (insn),
4646 "unreachable code at beginning of %s",
4647 case_stack->data.case_stmt.printname);
4648 break;
4649 }
4650 }
4651 }
4652 }
4653
4654 /* Accumulate one case or default label inside a case or switch statement.
4655 VALUE is the value of the case (a null pointer, for a default label).
4656 The function CONVERTER, when applied to arguments T and V,
4657 converts the value V to the type T.
4658
4659 If not currently inside a case or switch statement, return 1 and do
4660 nothing. The caller will print a language-specific error message.
4661 If VALUE is a duplicate or overlaps, return 2 and do nothing
4662 except store the (first) duplicate node in *DUPLICATE.
4663 If VALUE is out of range, return 3 and do nothing.
4664 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4665 Return 0 on success.
4666
4667 Extended to handle range statements. */
4668
4669 int
4670 pushcase (value, converter, label, duplicate)
4671 tree value;
4672 tree (*converter) PARAMS ((tree, tree));
4673 tree label;
4674 tree *duplicate;
4675 {
4676 tree index_type;
4677 tree nominal_type;
4678
4679 /* Fail if not inside a real case statement. */
4680 if (! (case_stack && case_stack->data.case_stmt.start))
4681 return 1;
4682
4683 if (stack_block_stack
4684 && stack_block_stack->depth > case_stack->depth)
4685 return 5;
4686
4687 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4688 nominal_type = case_stack->data.case_stmt.nominal_type;
4689
4690 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4691 if (index_type == error_mark_node)
4692 return 0;
4693
4694 /* Convert VALUE to the type in which the comparisons are nominally done. */
4695 if (value != 0)
4696 value = (*converter) (nominal_type, value);
4697
4698 check_seenlabel ();
4699
4700 /* Fail if this value is out of range for the actual type of the index
4701 (which may be narrower than NOMINAL_TYPE). */
4702 if (value != 0
4703 && (TREE_CONSTANT_OVERFLOW (value)
4704 || ! int_fits_type_p (value, index_type)))
4705 return 3;
4706
4707 return add_case_node (value, value, label, duplicate);
4708 }
4709
4710 /* Like pushcase but this case applies to all values between VALUE1 and
4711 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4712 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4713 starts at VALUE1 and ends at the highest value of the index type.
4714 If both are NULL, this case applies to all values.
4715
4716 The return value is the same as that of pushcase but there is one
4717 additional error code: 4 means the specified range was empty. */
4718
4719 int
4720 pushcase_range (value1, value2, converter, label, duplicate)
4721 tree value1, value2;
4722 tree (*converter) PARAMS ((tree, tree));
4723 tree label;
4724 tree *duplicate;
4725 {
4726 tree index_type;
4727 tree nominal_type;
4728
4729 /* Fail if not inside a real case statement. */
4730 if (! (case_stack && case_stack->data.case_stmt.start))
4731 return 1;
4732
4733 if (stack_block_stack
4734 && stack_block_stack->depth > case_stack->depth)
4735 return 5;
4736
4737 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4738 nominal_type = case_stack->data.case_stmt.nominal_type;
4739
4740 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4741 if (index_type == error_mark_node)
4742 return 0;
4743
4744 check_seenlabel ();
4745
4746 /* Convert VALUEs to type in which the comparisons are nominally done
4747 and replace any unspecified value with the corresponding bound. */
4748 if (value1 == 0)
4749 value1 = TYPE_MIN_VALUE (index_type);
4750 if (value2 == 0)
4751 value2 = TYPE_MAX_VALUE (index_type);
4752
4753 /* Fail if the range is empty. Do this before any conversion since
4754 we want to allow out-of-range empty ranges. */
4755 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4756 return 4;
4757
4758 /* If the max was unbounded, use the max of the nominal_type we are
4759 converting to. Do this after the < check above to suppress false
4760 positives. */
4761 if (value2 == 0)
4762 value2 = TYPE_MAX_VALUE (nominal_type);
4763
4764 value1 = (*converter) (nominal_type, value1);
4765 value2 = (*converter) (nominal_type, value2);
4766
4767 /* Fail if these values are out of range. */
4768 if (TREE_CONSTANT_OVERFLOW (value1)
4769 || ! int_fits_type_p (value1, index_type))
4770 return 3;
4771
4772 if (TREE_CONSTANT_OVERFLOW (value2)
4773 || ! int_fits_type_p (value2, index_type))
4774 return 3;
4775
4776 return add_case_node (value1, value2, label, duplicate);
4777 }
4778
4779 /* Do the actual insertion of a case label for pushcase and pushcase_range
4780 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4781 slowdown for large switch statements. */
4782
4783 int
4784 add_case_node (low, high, label, duplicate)
4785 tree low, high;
4786 tree label;
4787 tree *duplicate;
4788 {
4789 struct case_node *p, **q, *r;
4790
4791 /* If there's no HIGH value, then this is not a case range; it's
4792 just a simple case label. But that's just a degenerate case
4793 range. */
4794 if (!high)
4795 high = low;
4796
4797 /* Handle default labels specially. */
4798 if (!high && !low)
4799 {
4800 if (case_stack->data.case_stmt.default_label != 0)
4801 {
4802 *duplicate = case_stack->data.case_stmt.default_label;
4803 return 2;
4804 }
4805 case_stack->data.case_stmt.default_label = label;
4806 expand_label (label);
4807 return 0;
4808 }
4809
4810 q = &case_stack->data.case_stmt.case_list;
4811 p = *q;
4812
4813 while ((r = *q))
4814 {
4815 p = r;
4816
4817 /* Keep going past elements distinctly greater than HIGH. */
4818 if (tree_int_cst_lt (high, p->low))
4819 q = &p->left;
4820
4821 /* or distinctly less than LOW. */
4822 else if (tree_int_cst_lt (p->high, low))
4823 q = &p->right;
4824
4825 else
4826 {
4827 /* We have an overlap; this is an error. */
4828 *duplicate = p->code_label;
4829 return 2;
4830 }
4831 }
4832
4833 /* Add this label to the chain, and succeed. */
4834
4835 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4836 r->low = low;
4837
4838 /* If the bounds are equal, turn this into the one-value case. */
4839 if (tree_int_cst_equal (low, high))
4840 r->high = r->low;
4841 else
4842 r->high = high;
4843
4844 r->code_label = label;
4845 expand_label (label);
4846
4847 *q = r;
4848 r->parent = p;
4849 r->left = 0;
4850 r->right = 0;
4851 r->balance = 0;
4852
4853 while (p)
4854 {
4855 struct case_node *s;
4856
4857 if (r == p->left)
4858 {
4859 int b;
4860
4861 if (! (b = p->balance))
4862 /* Growth propagation from left side. */
4863 p->balance = -1;
4864 else if (b < 0)
4865 {
4866 if (r->balance < 0)
4867 {
4868 /* R-Rotation */
4869 if ((p->left = s = r->right))
4870 s->parent = p;
4871
4872 r->right = p;
4873 p->balance = 0;
4874 r->balance = 0;
4875 s = p->parent;
4876 p->parent = r;
4877
4878 if ((r->parent = s))
4879 {
4880 if (s->left == p)
4881 s->left = r;
4882 else
4883 s->right = r;
4884 }
4885 else
4886 case_stack->data.case_stmt.case_list = r;
4887 }
4888 else
4889 /* r->balance == +1 */
4890 {
4891 /* LR-Rotation */
4892
4893 int b2;
4894 struct case_node *t = r->right;
4895
4896 if ((p->left = s = t->right))
4897 s->parent = p;
4898
4899 t->right = p;
4900 if ((r->right = s = t->left))
4901 s->parent = r;
4902
4903 t->left = r;
4904 b = t->balance;
4905 b2 = b < 0;
4906 p->balance = b2;
4907 b2 = -b2 - b;
4908 r->balance = b2;
4909 t->balance = 0;
4910 s = p->parent;
4911 p->parent = t;
4912 r->parent = t;
4913
4914 if ((t->parent = s))
4915 {
4916 if (s->left == p)
4917 s->left = t;
4918 else
4919 s->right = t;
4920 }
4921 else
4922 case_stack->data.case_stmt.case_list = t;
4923 }
4924 break;
4925 }
4926
4927 else
4928 {
4929 /* p->balance == +1; growth of left side balances the node. */
4930 p->balance = 0;
4931 break;
4932 }
4933 }
4934 else
4935 /* r == p->right */
4936 {
4937 int b;
4938
4939 if (! (b = p->balance))
4940 /* Growth propagation from right side. */
4941 p->balance++;
4942 else if (b > 0)
4943 {
4944 if (r->balance > 0)
4945 {
4946 /* L-Rotation */
4947
4948 if ((p->right = s = r->left))
4949 s->parent = p;
4950
4951 r->left = p;
4952 p->balance = 0;
4953 r->balance = 0;
4954 s = p->parent;
4955 p->parent = r;
4956 if ((r->parent = s))
4957 {
4958 if (s->left == p)
4959 s->left = r;
4960 else
4961 s->right = r;
4962 }
4963
4964 else
4965 case_stack->data.case_stmt.case_list = r;
4966 }
4967
4968 else
4969 /* r->balance == -1 */
4970 {
4971 /* RL-Rotation */
4972 int b2;
4973 struct case_node *t = r->left;
4974
4975 if ((p->right = s = t->left))
4976 s->parent = p;
4977
4978 t->left = p;
4979
4980 if ((r->left = s = t->right))
4981 s->parent = r;
4982
4983 t->right = r;
4984 b = t->balance;
4985 b2 = b < 0;
4986 r->balance = b2;
4987 b2 = -b2 - b;
4988 p->balance = b2;
4989 t->balance = 0;
4990 s = p->parent;
4991 p->parent = t;
4992 r->parent = t;
4993
4994 if ((t->parent = s))
4995 {
4996 if (s->left == p)
4997 s->left = t;
4998 else
4999 s->right = t;
5000 }
5001
5002 else
5003 case_stack->data.case_stmt.case_list = t;
5004 }
5005 break;
5006 }
5007 else
5008 {
5009 /* p->balance == -1; growth of right side balances the node. */
5010 p->balance = 0;
5011 break;
5012 }
5013 }
5014
5015 r = p;
5016 p = p->parent;
5017 }
5018
5019 return 0;
5020 }
5021 \f
5022 /* Returns the number of possible values of TYPE.
5023 Returns -1 if the number is unknown, variable, or if the number does not
5024 fit in a HOST_WIDE_INT.
5025 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
5026 do not increase monotonically (there may be duplicates);
5027 to 1 if the values increase monotonically, but not always by 1;
5028 otherwise sets it to 0. */
5029
5030 HOST_WIDE_INT
5031 all_cases_count (type, spareness)
5032 tree type;
5033 int *spareness;
5034 {
5035 tree t;
5036 HOST_WIDE_INT count, minval, lastval;
5037
5038 *spareness = 0;
5039
5040 switch (TREE_CODE (type))
5041 {
5042 case BOOLEAN_TYPE:
5043 count = 2;
5044 break;
5045
5046 case CHAR_TYPE:
5047 count = 1 << BITS_PER_UNIT;
5048 break;
5049
5050 default:
5051 case INTEGER_TYPE:
5052 if (TYPE_MAX_VALUE (type) != 0
5053 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
5054 TYPE_MIN_VALUE (type))))
5055 && 0 != (t = fold (build (PLUS_EXPR, type, t,
5056 convert (type, integer_zero_node))))
5057 && host_integerp (t, 1))
5058 count = tree_low_cst (t, 1);
5059 else
5060 return -1;
5061 break;
5062
5063 case ENUMERAL_TYPE:
5064 /* Don't waste time with enumeral types with huge values. */
5065 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
5066 || TYPE_MAX_VALUE (type) == 0
5067 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
5068 return -1;
5069
5070 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
5071 count = 0;
5072
5073 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
5074 {
5075 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
5076
5077 if (*spareness == 2 || thisval < lastval)
5078 *spareness = 2;
5079 else if (thisval != minval + count)
5080 *spareness = 1;
5081
5082 count++;
5083 }
5084 }
5085
5086 return count;
5087 }
5088
5089 #define BITARRAY_TEST(ARRAY, INDEX) \
5090 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5091 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5092 #define BITARRAY_SET(ARRAY, INDEX) \
5093 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5094 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5095
5096 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5097 with the case values we have seen, assuming the case expression
5098 has the given TYPE.
5099 SPARSENESS is as determined by all_cases_count.
5100
5101 The time needed is proportional to COUNT, unless
5102 SPARSENESS is 2, in which case quadratic time is needed. */
5103
5104 void
5105 mark_seen_cases (type, cases_seen, count, sparseness)
5106 tree type;
5107 unsigned char *cases_seen;
5108 HOST_WIDE_INT count;
5109 int sparseness;
5110 {
5111 tree next_node_to_try = NULL_TREE;
5112 HOST_WIDE_INT next_node_offset = 0;
5113
5114 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5115 tree val = make_node (INTEGER_CST);
5116
5117 TREE_TYPE (val) = type;
5118 if (! root)
5119 /* Do nothing. */
5120 ;
5121 else if (sparseness == 2)
5122 {
5123 tree t;
5124 unsigned HOST_WIDE_INT xlo;
5125
5126 /* This less efficient loop is only needed to handle
5127 duplicate case values (multiple enum constants
5128 with the same value). */
5129 TREE_TYPE (val) = TREE_TYPE (root->low);
5130 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5131 t = TREE_CHAIN (t), xlo++)
5132 {
5133 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5134 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5135 n = root;
5136 do
5137 {
5138 /* Keep going past elements distinctly greater than VAL. */
5139 if (tree_int_cst_lt (val, n->low))
5140 n = n->left;
5141
5142 /* or distinctly less than VAL. */
5143 else if (tree_int_cst_lt (n->high, val))
5144 n = n->right;
5145
5146 else
5147 {
5148 /* We have found a matching range. */
5149 BITARRAY_SET (cases_seen, xlo);
5150 break;
5151 }
5152 }
5153 while (n);
5154 }
5155 }
5156 else
5157 {
5158 if (root->left)
5159 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5160
5161 for (n = root; n; n = n->right)
5162 {
5163 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5164 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5165 while (! tree_int_cst_lt (n->high, val))
5166 {
5167 /* Calculate (into xlo) the "offset" of the integer (val).
5168 The element with lowest value has offset 0, the next smallest
5169 element has offset 1, etc. */
5170
5171 unsigned HOST_WIDE_INT xlo;
5172 HOST_WIDE_INT xhi;
5173 tree t;
5174
5175 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5176 {
5177 /* The TYPE_VALUES will be in increasing order, so
5178 starting searching where we last ended. */
5179 t = next_node_to_try;
5180 xlo = next_node_offset;
5181 xhi = 0;
5182 for (;;)
5183 {
5184 if (t == NULL_TREE)
5185 {
5186 t = TYPE_VALUES (type);
5187 xlo = 0;
5188 }
5189 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5190 {
5191 next_node_to_try = TREE_CHAIN (t);
5192 next_node_offset = xlo + 1;
5193 break;
5194 }
5195 xlo++;
5196 t = TREE_CHAIN (t);
5197 if (t == next_node_to_try)
5198 {
5199 xlo = -1;
5200 break;
5201 }
5202 }
5203 }
5204 else
5205 {
5206 t = TYPE_MIN_VALUE (type);
5207 if (t)
5208 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5209 &xlo, &xhi);
5210 else
5211 xlo = xhi = 0;
5212 add_double (xlo, xhi,
5213 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5214 &xlo, &xhi);
5215 }
5216
5217 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5218 BITARRAY_SET (cases_seen, xlo);
5219
5220 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5221 1, 0,
5222 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5223 }
5224 }
5225 }
5226 }
5227
5228 /* Called when the index of a switch statement is an enumerated type
5229 and there is no default label.
5230
5231 Checks that all enumeration literals are covered by the case
5232 expressions of a switch. Also, warn if there are any extra
5233 switch cases that are *not* elements of the enumerated type.
5234
5235 If all enumeration literals were covered by the case expressions,
5236 turn one of the expressions into the default expression since it should
5237 not be possible to fall through such a switch. */
5238
5239 void
5240 check_for_full_enumeration_handling (type)
5241 tree type;
5242 {
5243 struct case_node *n;
5244 tree chain;
5245
5246 /* True iff the selector type is a numbered set mode. */
5247 int sparseness = 0;
5248
5249 /* The number of possible selector values. */
5250 HOST_WIDE_INT size;
5251
5252 /* For each possible selector value. a one iff it has been matched
5253 by a case value alternative. */
5254 unsigned char *cases_seen;
5255
5256 /* The allocated size of cases_seen, in chars. */
5257 HOST_WIDE_INT bytes_needed;
5258
5259 if (! warn_switch)
5260 return;
5261
5262 size = all_cases_count (type, &sparseness);
5263 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5264
5265 if (size > 0 && size < 600000
5266 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5267 this optimization if we don't have enough memory rather than
5268 aborting, as xmalloc would do. */
5269 && (cases_seen =
5270 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5271 {
5272 HOST_WIDE_INT i;
5273 tree v = TYPE_VALUES (type);
5274
5275 /* The time complexity of this code is normally O(N), where
5276 N being the number of members in the enumerated type.
5277 However, if type is a ENUMERAL_TYPE whose values do not
5278 increase monotonically, O(N*log(N)) time may be needed. */
5279
5280 mark_seen_cases (type, cases_seen, size, sparseness);
5281
5282 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5283 if (BITARRAY_TEST (cases_seen, i) == 0)
5284 warning ("enumeration value `%s' not handled in switch",
5285 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5286
5287 free (cases_seen);
5288 }
5289
5290 /* Now we go the other way around; we warn if there are case
5291 expressions that don't correspond to enumerators. This can
5292 occur since C and C++ don't enforce type-checking of
5293 assignments to enumeration variables. */
5294
5295 if (case_stack->data.case_stmt.case_list
5296 && case_stack->data.case_stmt.case_list->left)
5297 case_stack->data.case_stmt.case_list
5298 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5299 if (warn_switch)
5300 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5301 {
5302 for (chain = TYPE_VALUES (type);
5303 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5304 chain = TREE_CHAIN (chain))
5305 ;
5306
5307 if (!chain)
5308 {
5309 if (TYPE_NAME (type) == 0)
5310 warning ("case value `%ld' not in enumerated type",
5311 (long) TREE_INT_CST_LOW (n->low));
5312 else
5313 warning ("case value `%ld' not in enumerated type `%s'",
5314 (long) TREE_INT_CST_LOW (n->low),
5315 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5316 == IDENTIFIER_NODE)
5317 ? TYPE_NAME (type)
5318 : DECL_NAME (TYPE_NAME (type))));
5319 }
5320 if (!tree_int_cst_equal (n->low, n->high))
5321 {
5322 for (chain = TYPE_VALUES (type);
5323 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5324 chain = TREE_CHAIN (chain))
5325 ;
5326
5327 if (!chain)
5328 {
5329 if (TYPE_NAME (type) == 0)
5330 warning ("case value `%ld' not in enumerated type",
5331 (long) TREE_INT_CST_LOW (n->high));
5332 else
5333 warning ("case value `%ld' not in enumerated type `%s'",
5334 (long) TREE_INT_CST_LOW (n->high),
5335 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5336 == IDENTIFIER_NODE)
5337 ? TYPE_NAME (type)
5338 : DECL_NAME (TYPE_NAME (type))));
5339 }
5340 }
5341 }
5342 }
5343
5344 /* Free CN, and its children. */
5345
5346 static void
5347 free_case_nodes (cn)
5348 case_node_ptr cn;
5349 {
5350 if (cn)
5351 {
5352 free_case_nodes (cn->left);
5353 free_case_nodes (cn->right);
5354 free (cn);
5355 }
5356 }
5357
5358 \f
5359
5360 /* Terminate a case (Pascal) or switch (C) statement
5361 in which ORIG_INDEX is the expression to be tested.
5362 Generate the code to test it and jump to the right place. */
5363
5364 void
5365 expand_end_case (orig_index)
5366 tree orig_index;
5367 {
5368 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5369 rtx default_label = 0;
5370 struct case_node *n;
5371 unsigned int count;
5372 rtx index;
5373 rtx table_label;
5374 int ncases;
5375 rtx *labelvec;
5376 int i;
5377 rtx before_case, end;
5378 struct nesting *thiscase = case_stack;
5379 tree index_expr, index_type;
5380 int unsignedp;
5381
5382 /* Don't crash due to previous errors. */
5383 if (thiscase == NULL)
5384 return;
5385
5386 table_label = gen_label_rtx ();
5387 index_expr = thiscase->data.case_stmt.index_expr;
5388 index_type = TREE_TYPE (index_expr);
5389 unsignedp = TREE_UNSIGNED (index_type);
5390
5391 do_pending_stack_adjust ();
5392
5393 /* This might get an spurious warning in the presence of a syntax error;
5394 it could be fixed by moving the call to check_seenlabel after the
5395 check for error_mark_node, and copying the code of check_seenlabel that
5396 deals with case_stack->data.case_stmt.line_number_status /
5397 restore_line_number_status in front of the call to end_cleanup_deferral;
5398 However, this might miss some useful warnings in the presence of
5399 non-syntax errors. */
5400 check_seenlabel ();
5401
5402 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5403 if (index_type != error_mark_node)
5404 {
5405 /* If switch expression was an enumerated type, check that all
5406 enumeration literals are covered by the cases.
5407 No sense trying this if there's a default case, however. */
5408
5409 if (!thiscase->data.case_stmt.default_label
5410 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5411 && TREE_CODE (index_expr) != INTEGER_CST)
5412 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5413
5414 /* If we don't have a default-label, create one here,
5415 after the body of the switch. */
5416 if (thiscase->data.case_stmt.default_label == 0)
5417 {
5418 thiscase->data.case_stmt.default_label
5419 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5420 expand_label (thiscase->data.case_stmt.default_label);
5421 }
5422 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5423
5424 before_case = get_last_insn ();
5425
5426 if (thiscase->data.case_stmt.case_list
5427 && thiscase->data.case_stmt.case_list->left)
5428 thiscase->data.case_stmt.case_list
5429 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5430
5431 /* Simplify the case-list before we count it. */
5432 group_case_nodes (thiscase->data.case_stmt.case_list);
5433
5434 /* Get upper and lower bounds of case values.
5435 Also convert all the case values to the index expr's data type. */
5436
5437 count = 0;
5438 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5439 {
5440 /* Check low and high label values are integers. */
5441 if (TREE_CODE (n->low) != INTEGER_CST)
5442 abort ();
5443 if (TREE_CODE (n->high) != INTEGER_CST)
5444 abort ();
5445
5446 n->low = convert (index_type, n->low);
5447 n->high = convert (index_type, n->high);
5448
5449 /* Count the elements and track the largest and smallest
5450 of them (treating them as signed even if they are not). */
5451 if (count++ == 0)
5452 {
5453 minval = n->low;
5454 maxval = n->high;
5455 }
5456 else
5457 {
5458 if (INT_CST_LT (n->low, minval))
5459 minval = n->low;
5460 if (INT_CST_LT (maxval, n->high))
5461 maxval = n->high;
5462 }
5463 /* A range counts double, since it requires two compares. */
5464 if (! tree_int_cst_equal (n->low, n->high))
5465 count++;
5466 }
5467
5468 /* Compute span of values. */
5469 if (count != 0)
5470 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5471
5472 end_cleanup_deferral ();
5473
5474 if (count == 0)
5475 {
5476 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5477 emit_queue ();
5478 emit_jump (default_label);
5479 }
5480
5481 /* If range of values is much bigger than number of values,
5482 make a sequence of conditional branches instead of a dispatch.
5483 If the switch-index is a constant, do it this way
5484 because we can optimize it. */
5485
5486 else if (count < case_values_threshold ()
5487 || compare_tree_int (range, 10 * count) > 0
5488 /* RANGE may be signed, and really large ranges will show up
5489 as negative numbers. */
5490 || compare_tree_int (range, 0) < 0
5491 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5492 || flag_pic
5493 #endif
5494 || TREE_CODE (index_expr) == INTEGER_CST
5495 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5496 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5497 {
5498 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5499
5500 /* If the index is a short or char that we do not have
5501 an insn to handle comparisons directly, convert it to
5502 a full integer now, rather than letting each comparison
5503 generate the conversion. */
5504
5505 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5506 && ! have_insn_for (COMPARE, GET_MODE (index)))
5507 {
5508 enum machine_mode wider_mode;
5509 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5510 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5511 if (have_insn_for (COMPARE, wider_mode))
5512 {
5513 index = convert_to_mode (wider_mode, index, unsignedp);
5514 break;
5515 }
5516 }
5517
5518 emit_queue ();
5519 do_pending_stack_adjust ();
5520
5521 index = protect_from_queue (index, 0);
5522 if (GET_CODE (index) == MEM)
5523 index = copy_to_reg (index);
5524 if (GET_CODE (index) == CONST_INT
5525 || TREE_CODE (index_expr) == INTEGER_CST)
5526 {
5527 /* Make a tree node with the proper constant value
5528 if we don't already have one. */
5529 if (TREE_CODE (index_expr) != INTEGER_CST)
5530 {
5531 index_expr
5532 = build_int_2 (INTVAL (index),
5533 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5534 index_expr = convert (index_type, index_expr);
5535 }
5536
5537 /* For constant index expressions we need only
5538 issue an unconditional branch to the appropriate
5539 target code. The job of removing any unreachable
5540 code is left to the optimisation phase if the
5541 "-O" option is specified. */
5542 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5543 if (! tree_int_cst_lt (index_expr, n->low)
5544 && ! tree_int_cst_lt (n->high, index_expr))
5545 break;
5546
5547 if (n)
5548 emit_jump (label_rtx (n->code_label));
5549 else
5550 emit_jump (default_label);
5551 }
5552 else
5553 {
5554 /* If the index expression is not constant we generate
5555 a binary decision tree to select the appropriate
5556 target code. This is done as follows:
5557
5558 The list of cases is rearranged into a binary tree,
5559 nearly optimal assuming equal probability for each case.
5560
5561 The tree is transformed into RTL, eliminating
5562 redundant test conditions at the same time.
5563
5564 If program flow could reach the end of the
5565 decision tree an unconditional jump to the
5566 default code is emitted. */
5567
5568 use_cost_table
5569 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5570 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5571 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5572 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5573 default_label, index_type);
5574 emit_jump_if_reachable (default_label);
5575 }
5576 }
5577 else
5578 {
5579 if (! try_casesi (index_type, index_expr, minval, range,
5580 table_label, default_label))
5581 {
5582 index_type = thiscase->data.case_stmt.nominal_type;
5583
5584 /* Index jumptables from zero for suitable values of
5585 minval to avoid a subtraction. */
5586 if (! optimize_size
5587 && compare_tree_int (minval, 0) > 0
5588 && compare_tree_int (minval, 3) < 0)
5589 {
5590 minval = integer_zero_node;
5591 range = maxval;
5592 }
5593
5594 if (! try_tablejump (index_type, index_expr, minval, range,
5595 table_label, default_label))
5596 abort ();
5597 }
5598
5599 /* Get table of labels to jump to, in order of case index. */
5600
5601 ncases = tree_low_cst (range, 0) + 1;
5602 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5603 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5604
5605 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5606 {
5607 /* Compute the low and high bounds relative to the minimum
5608 value since that should fit in a HOST_WIDE_INT while the
5609 actual values may not. */
5610 HOST_WIDE_INT i_low
5611 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5612 n->low, minval)), 1);
5613 HOST_WIDE_INT i_high
5614 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5615 n->high, minval)), 1);
5616 HOST_WIDE_INT i;
5617
5618 for (i = i_low; i <= i_high; i ++)
5619 labelvec[i]
5620 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5621 }
5622
5623 /* Fill in the gaps with the default. */
5624 for (i = 0; i < ncases; i++)
5625 if (labelvec[i] == 0)
5626 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5627
5628 /* Output the table */
5629 emit_label (table_label);
5630
5631 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5632 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5633 gen_rtx_LABEL_REF (Pmode, table_label),
5634 gen_rtvec_v (ncases, labelvec),
5635 const0_rtx, const0_rtx));
5636 else
5637 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5638 gen_rtvec_v (ncases, labelvec)));
5639
5640 /* If the case insn drops through the table,
5641 after the table we must jump to the default-label.
5642 Otherwise record no drop-through after the table. */
5643 #ifdef CASE_DROPS_THROUGH
5644 emit_jump (default_label);
5645 #else
5646 emit_barrier ();
5647 #endif
5648 }
5649
5650 before_case = NEXT_INSN (before_case);
5651 end = get_last_insn ();
5652 if (squeeze_notes (&before_case, &end))
5653 abort ();
5654 reorder_insns (before_case, end,
5655 thiscase->data.case_stmt.start);
5656 }
5657 else
5658 end_cleanup_deferral ();
5659
5660 if (thiscase->exit_label)
5661 emit_label (thiscase->exit_label);
5662
5663 free_case_nodes (case_stack->data.case_stmt.case_list);
5664 POPSTACK (case_stack);
5665
5666 free_temp_slots ();
5667 }
5668
5669 /* Convert the tree NODE into a list linked by the right field, with the left
5670 field zeroed. RIGHT is used for recursion; it is a list to be placed
5671 rightmost in the resulting list. */
5672
5673 static struct case_node *
5674 case_tree2list (node, right)
5675 struct case_node *node, *right;
5676 {
5677 struct case_node *left;
5678
5679 if (node->right)
5680 right = case_tree2list (node->right, right);
5681
5682 node->right = right;
5683 if ((left = node->left))
5684 {
5685 node->left = 0;
5686 return case_tree2list (left, node);
5687 }
5688
5689 return node;
5690 }
5691
5692 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5693
5694 static void
5695 do_jump_if_equal (op1, op2, label, unsignedp)
5696 rtx op1, op2, label;
5697 int unsignedp;
5698 {
5699 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5700 {
5701 if (INTVAL (op1) == INTVAL (op2))
5702 emit_jump (label);
5703 }
5704 else
5705 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5706 (GET_MODE (op1) == VOIDmode
5707 ? GET_MODE (op2) : GET_MODE (op1)),
5708 unsignedp, label);
5709 }
5710 \f
5711 /* Not all case values are encountered equally. This function
5712 uses a heuristic to weight case labels, in cases where that
5713 looks like a reasonable thing to do.
5714
5715 Right now, all we try to guess is text, and we establish the
5716 following weights:
5717
5718 chars above space: 16
5719 digits: 16
5720 default: 12
5721 space, punct: 8
5722 tab: 4
5723 newline: 2
5724 other "\" chars: 1
5725 remaining chars: 0
5726
5727 If we find any cases in the switch that are not either -1 or in the range
5728 of valid ASCII characters, or are control characters other than those
5729 commonly used with "\", don't treat this switch scanning text.
5730
5731 Return 1 if these nodes are suitable for cost estimation, otherwise
5732 return 0. */
5733
5734 static int
5735 estimate_case_costs (node)
5736 case_node_ptr node;
5737 {
5738 tree min_ascii = integer_minus_one_node;
5739 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5740 case_node_ptr n;
5741 int i;
5742
5743 /* If we haven't already made the cost table, make it now. Note that the
5744 lower bound of the table is -1, not zero. */
5745
5746 if (! cost_table_initialized)
5747 {
5748 cost_table_initialized = 1;
5749
5750 for (i = 0; i < 128; i++)
5751 {
5752 if (ISALNUM (i))
5753 COST_TABLE (i) = 16;
5754 else if (ISPUNCT (i))
5755 COST_TABLE (i) = 8;
5756 else if (ISCNTRL (i))
5757 COST_TABLE (i) = -1;
5758 }
5759
5760 COST_TABLE (' ') = 8;
5761 COST_TABLE ('\t') = 4;
5762 COST_TABLE ('\0') = 4;
5763 COST_TABLE ('\n') = 2;
5764 COST_TABLE ('\f') = 1;
5765 COST_TABLE ('\v') = 1;
5766 COST_TABLE ('\b') = 1;
5767 }
5768
5769 /* See if all the case expressions look like text. It is text if the
5770 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5771 as signed arithmetic since we don't want to ever access cost_table with a
5772 value less than -1. Also check that none of the constants in a range
5773 are strange control characters. */
5774
5775 for (n = node; n; n = n->right)
5776 {
5777 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5778 return 0;
5779
5780 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5781 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5782 if (COST_TABLE (i) < 0)
5783 return 0;
5784 }
5785
5786 /* All interesting values are within the range of interesting
5787 ASCII characters. */
5788 return 1;
5789 }
5790
5791 /* Scan an ordered list of case nodes
5792 combining those with consecutive values or ranges.
5793
5794 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5795
5796 static void
5797 group_case_nodes (head)
5798 case_node_ptr head;
5799 {
5800 case_node_ptr node = head;
5801
5802 while (node)
5803 {
5804 rtx lb = next_real_insn (label_rtx (node->code_label));
5805 rtx lb2;
5806 case_node_ptr np = node;
5807
5808 /* Try to group the successors of NODE with NODE. */
5809 while (((np = np->right) != 0)
5810 /* Do they jump to the same place? */
5811 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5812 || (lb != 0 && lb2 != 0
5813 && simplejump_p (lb)
5814 && simplejump_p (lb2)
5815 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5816 SET_SRC (PATTERN (lb2)))))
5817 /* Are their ranges consecutive? */
5818 && tree_int_cst_equal (np->low,
5819 fold (build (PLUS_EXPR,
5820 TREE_TYPE (node->high),
5821 node->high,
5822 integer_one_node)))
5823 /* An overflow is not consecutive. */
5824 && tree_int_cst_lt (node->high,
5825 fold (build (PLUS_EXPR,
5826 TREE_TYPE (node->high),
5827 node->high,
5828 integer_one_node))))
5829 {
5830 node->high = np->high;
5831 }
5832 /* NP is the first node after NODE which can't be grouped with it.
5833 Delete the nodes in between, and move on to that node. */
5834 node->right = np;
5835 node = np;
5836 }
5837 }
5838
5839 /* Take an ordered list of case nodes
5840 and transform them into a near optimal binary tree,
5841 on the assumption that any target code selection value is as
5842 likely as any other.
5843
5844 The transformation is performed by splitting the ordered
5845 list into two equal sections plus a pivot. The parts are
5846 then attached to the pivot as left and right branches. Each
5847 branch is then transformed recursively. */
5848
5849 static void
5850 balance_case_nodes (head, parent)
5851 case_node_ptr *head;
5852 case_node_ptr parent;
5853 {
5854 case_node_ptr np;
5855
5856 np = *head;
5857 if (np)
5858 {
5859 int cost = 0;
5860 int i = 0;
5861 int ranges = 0;
5862 case_node_ptr *npp;
5863 case_node_ptr left;
5864
5865 /* Count the number of entries on branch. Also count the ranges. */
5866
5867 while (np)
5868 {
5869 if (!tree_int_cst_equal (np->low, np->high))
5870 {
5871 ranges++;
5872 if (use_cost_table)
5873 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5874 }
5875
5876 if (use_cost_table)
5877 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5878
5879 i++;
5880 np = np->right;
5881 }
5882
5883 if (i > 2)
5884 {
5885 /* Split this list if it is long enough for that to help. */
5886 npp = head;
5887 left = *npp;
5888 if (use_cost_table)
5889 {
5890 /* Find the place in the list that bisects the list's total cost,
5891 Here I gets half the total cost. */
5892 int n_moved = 0;
5893 i = (cost + 1) / 2;
5894 while (1)
5895 {
5896 /* Skip nodes while their cost does not reach that amount. */
5897 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5898 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5899 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5900 if (i <= 0)
5901 break;
5902 npp = &(*npp)->right;
5903 n_moved += 1;
5904 }
5905 if (n_moved == 0)
5906 {
5907 /* Leave this branch lopsided, but optimize left-hand
5908 side and fill in `parent' fields for right-hand side. */
5909 np = *head;
5910 np->parent = parent;
5911 balance_case_nodes (&np->left, np);
5912 for (; np->right; np = np->right)
5913 np->right->parent = np;
5914 return;
5915 }
5916 }
5917 /* If there are just three nodes, split at the middle one. */
5918 else if (i == 3)
5919 npp = &(*npp)->right;
5920 else
5921 {
5922 /* Find the place in the list that bisects the list's total cost,
5923 where ranges count as 2.
5924 Here I gets half the total cost. */
5925 i = (i + ranges + 1) / 2;
5926 while (1)
5927 {
5928 /* Skip nodes while their cost does not reach that amount. */
5929 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5930 i--;
5931 i--;
5932 if (i <= 0)
5933 break;
5934 npp = &(*npp)->right;
5935 }
5936 }
5937 *head = np = *npp;
5938 *npp = 0;
5939 np->parent = parent;
5940 np->left = left;
5941
5942 /* Optimize each of the two split parts. */
5943 balance_case_nodes (&np->left, np);
5944 balance_case_nodes (&np->right, np);
5945 }
5946 else
5947 {
5948 /* Else leave this branch as one level,
5949 but fill in `parent' fields. */
5950 np = *head;
5951 np->parent = parent;
5952 for (; np->right; np = np->right)
5953 np->right->parent = np;
5954 }
5955 }
5956 }
5957 \f
5958 /* Search the parent sections of the case node tree
5959 to see if a test for the lower bound of NODE would be redundant.
5960 INDEX_TYPE is the type of the index expression.
5961
5962 The instructions to generate the case decision tree are
5963 output in the same order as nodes are processed so it is
5964 known that if a parent node checks the range of the current
5965 node minus one that the current node is bounded at its lower
5966 span. Thus the test would be redundant. */
5967
5968 static int
5969 node_has_low_bound (node, index_type)
5970 case_node_ptr node;
5971 tree index_type;
5972 {
5973 tree low_minus_one;
5974 case_node_ptr pnode;
5975
5976 /* If the lower bound of this node is the lowest value in the index type,
5977 we need not test it. */
5978
5979 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5980 return 1;
5981
5982 /* If this node has a left branch, the value at the left must be less
5983 than that at this node, so it cannot be bounded at the bottom and
5984 we need not bother testing any further. */
5985
5986 if (node->left)
5987 return 0;
5988
5989 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5990 node->low, integer_one_node));
5991
5992 /* If the subtraction above overflowed, we can't verify anything.
5993 Otherwise, look for a parent that tests our value - 1. */
5994
5995 if (! tree_int_cst_lt (low_minus_one, node->low))
5996 return 0;
5997
5998 for (pnode = node->parent; pnode; pnode = pnode->parent)
5999 if (tree_int_cst_equal (low_minus_one, pnode->high))
6000 return 1;
6001
6002 return 0;
6003 }
6004
6005 /* Search the parent sections of the case node tree
6006 to see if a test for the upper bound of NODE would be redundant.
6007 INDEX_TYPE is the type of the index expression.
6008
6009 The instructions to generate the case decision tree are
6010 output in the same order as nodes are processed so it is
6011 known that if a parent node checks the range of the current
6012 node plus one that the current node is bounded at its upper
6013 span. Thus the test would be redundant. */
6014
6015 static int
6016 node_has_high_bound (node, index_type)
6017 case_node_ptr node;
6018 tree index_type;
6019 {
6020 tree high_plus_one;
6021 case_node_ptr pnode;
6022
6023 /* If there is no upper bound, obviously no test is needed. */
6024
6025 if (TYPE_MAX_VALUE (index_type) == NULL)
6026 return 1;
6027
6028 /* If the upper bound of this node is the highest value in the type
6029 of the index expression, we need not test against it. */
6030
6031 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6032 return 1;
6033
6034 /* If this node has a right branch, the value at the right must be greater
6035 than that at this node, so it cannot be bounded at the top and
6036 we need not bother testing any further. */
6037
6038 if (node->right)
6039 return 0;
6040
6041 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6042 node->high, integer_one_node));
6043
6044 /* If the addition above overflowed, we can't verify anything.
6045 Otherwise, look for a parent that tests our value + 1. */
6046
6047 if (! tree_int_cst_lt (node->high, high_plus_one))
6048 return 0;
6049
6050 for (pnode = node->parent; pnode; pnode = pnode->parent)
6051 if (tree_int_cst_equal (high_plus_one, pnode->low))
6052 return 1;
6053
6054 return 0;
6055 }
6056
6057 /* Search the parent sections of the
6058 case node tree to see if both tests for the upper and lower
6059 bounds of NODE would be redundant. */
6060
6061 static int
6062 node_is_bounded (node, index_type)
6063 case_node_ptr node;
6064 tree index_type;
6065 {
6066 return (node_has_low_bound (node, index_type)
6067 && node_has_high_bound (node, index_type));
6068 }
6069
6070 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6071
6072 static void
6073 emit_jump_if_reachable (label)
6074 rtx label;
6075 {
6076 if (GET_CODE (get_last_insn ()) != BARRIER)
6077 emit_jump (label);
6078 }
6079 \f
6080 /* Emit step-by-step code to select a case for the value of INDEX.
6081 The thus generated decision tree follows the form of the
6082 case-node binary tree NODE, whose nodes represent test conditions.
6083 INDEX_TYPE is the type of the index of the switch.
6084
6085 Care is taken to prune redundant tests from the decision tree
6086 by detecting any boundary conditions already checked by
6087 emitted rtx. (See node_has_high_bound, node_has_low_bound
6088 and node_is_bounded, above.)
6089
6090 Where the test conditions can be shown to be redundant we emit
6091 an unconditional jump to the target code. As a further
6092 optimization, the subordinates of a tree node are examined to
6093 check for bounded nodes. In this case conditional and/or
6094 unconditional jumps as a result of the boundary check for the
6095 current node are arranged to target the subordinates associated
6096 code for out of bound conditions on the current node.
6097
6098 We can assume that when control reaches the code generated here,
6099 the index value has already been compared with the parents
6100 of this node, and determined to be on the same side of each parent
6101 as this node is. Thus, if this node tests for the value 51,
6102 and a parent tested for 52, we don't need to consider
6103 the possibility of a value greater than 51. If another parent
6104 tests for the value 50, then this node need not test anything. */
6105
6106 static void
6107 emit_case_nodes (index, node, default_label, index_type)
6108 rtx index;
6109 case_node_ptr node;
6110 rtx default_label;
6111 tree index_type;
6112 {
6113 /* If INDEX has an unsigned type, we must make unsigned branches. */
6114 int unsignedp = TREE_UNSIGNED (index_type);
6115 enum machine_mode mode = GET_MODE (index);
6116 enum machine_mode imode = TYPE_MODE (index_type);
6117
6118 /* See if our parents have already tested everything for us.
6119 If they have, emit an unconditional jump for this node. */
6120 if (node_is_bounded (node, index_type))
6121 emit_jump (label_rtx (node->code_label));
6122
6123 else if (tree_int_cst_equal (node->low, node->high))
6124 {
6125 /* Node is single valued. First see if the index expression matches
6126 this node and then check our children, if any. */
6127
6128 do_jump_if_equal (index,
6129 convert_modes (mode, imode,
6130 expand_expr (node->low, NULL_RTX,
6131 VOIDmode, 0),
6132 unsignedp),
6133 label_rtx (node->code_label), unsignedp);
6134
6135 if (node->right != 0 && node->left != 0)
6136 {
6137 /* This node has children on both sides.
6138 Dispatch to one side or the other
6139 by comparing the index value with this node's value.
6140 If one subtree is bounded, check that one first,
6141 so we can avoid real branches in the tree. */
6142
6143 if (node_is_bounded (node->right, index_type))
6144 {
6145 emit_cmp_and_jump_insns (index,
6146 convert_modes
6147 (mode, imode,
6148 expand_expr (node->high, NULL_RTX,
6149 VOIDmode, 0),
6150 unsignedp),
6151 GT, NULL_RTX, mode, unsignedp,
6152 label_rtx (node->right->code_label));
6153 emit_case_nodes (index, node->left, default_label, index_type);
6154 }
6155
6156 else if (node_is_bounded (node->left, index_type))
6157 {
6158 emit_cmp_and_jump_insns (index,
6159 convert_modes
6160 (mode, imode,
6161 expand_expr (node->high, NULL_RTX,
6162 VOIDmode, 0),
6163 unsignedp),
6164 LT, NULL_RTX, mode, unsignedp,
6165 label_rtx (node->left->code_label));
6166 emit_case_nodes (index, node->right, default_label, index_type);
6167 }
6168
6169 else
6170 {
6171 /* Neither node is bounded. First distinguish the two sides;
6172 then emit the code for one side at a time. */
6173
6174 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6175
6176 /* See if the value is on the right. */
6177 emit_cmp_and_jump_insns (index,
6178 convert_modes
6179 (mode, imode,
6180 expand_expr (node->high, NULL_RTX,
6181 VOIDmode, 0),
6182 unsignedp),
6183 GT, NULL_RTX, mode, unsignedp,
6184 label_rtx (test_label));
6185
6186 /* Value must be on the left.
6187 Handle the left-hand subtree. */
6188 emit_case_nodes (index, node->left, default_label, index_type);
6189 /* If left-hand subtree does nothing,
6190 go to default. */
6191 emit_jump_if_reachable (default_label);
6192
6193 /* Code branches here for the right-hand subtree. */
6194 expand_label (test_label);
6195 emit_case_nodes (index, node->right, default_label, index_type);
6196 }
6197 }
6198
6199 else if (node->right != 0 && node->left == 0)
6200 {
6201 /* Here we have a right child but no left so we issue conditional
6202 branch to default and process the right child.
6203
6204 Omit the conditional branch to default if we it avoid only one
6205 right child; it costs too much space to save so little time. */
6206
6207 if (node->right->right || node->right->left
6208 || !tree_int_cst_equal (node->right->low, node->right->high))
6209 {
6210 if (!node_has_low_bound (node, index_type))
6211 {
6212 emit_cmp_and_jump_insns (index,
6213 convert_modes
6214 (mode, imode,
6215 expand_expr (node->high, NULL_RTX,
6216 VOIDmode, 0),
6217 unsignedp),
6218 LT, NULL_RTX, mode, unsignedp,
6219 default_label);
6220 }
6221
6222 emit_case_nodes (index, node->right, default_label, index_type);
6223 }
6224 else
6225 /* We cannot process node->right normally
6226 since we haven't ruled out the numbers less than
6227 this node's value. So handle node->right explicitly. */
6228 do_jump_if_equal (index,
6229 convert_modes
6230 (mode, imode,
6231 expand_expr (node->right->low, NULL_RTX,
6232 VOIDmode, 0),
6233 unsignedp),
6234 label_rtx (node->right->code_label), unsignedp);
6235 }
6236
6237 else if (node->right == 0 && node->left != 0)
6238 {
6239 /* Just one subtree, on the left. */
6240 if (node->left->left || node->left->right
6241 || !tree_int_cst_equal (node->left->low, node->left->high))
6242 {
6243 if (!node_has_high_bound (node, index_type))
6244 {
6245 emit_cmp_and_jump_insns (index,
6246 convert_modes
6247 (mode, imode,
6248 expand_expr (node->high, NULL_RTX,
6249 VOIDmode, 0),
6250 unsignedp),
6251 GT, NULL_RTX, mode, unsignedp,
6252 default_label);
6253 }
6254
6255 emit_case_nodes (index, node->left, default_label, index_type);
6256 }
6257 else
6258 /* We cannot process node->left normally
6259 since we haven't ruled out the numbers less than
6260 this node's value. So handle node->left explicitly. */
6261 do_jump_if_equal (index,
6262 convert_modes
6263 (mode, imode,
6264 expand_expr (node->left->low, NULL_RTX,
6265 VOIDmode, 0),
6266 unsignedp),
6267 label_rtx (node->left->code_label), unsignedp);
6268 }
6269 }
6270 else
6271 {
6272 /* Node is a range. These cases are very similar to those for a single
6273 value, except that we do not start by testing whether this node
6274 is the one to branch to. */
6275
6276 if (node->right != 0 && node->left != 0)
6277 {
6278 /* Node has subtrees on both sides.
6279 If the right-hand subtree is bounded,
6280 test for it first, since we can go straight there.
6281 Otherwise, we need to make a branch in the control structure,
6282 then handle the two subtrees. */
6283 tree test_label = 0;
6284
6285 if (node_is_bounded (node->right, index_type))
6286 /* Right hand node is fully bounded so we can eliminate any
6287 testing and branch directly to the target code. */
6288 emit_cmp_and_jump_insns (index,
6289 convert_modes
6290 (mode, imode,
6291 expand_expr (node->high, NULL_RTX,
6292 VOIDmode, 0),
6293 unsignedp),
6294 GT, NULL_RTX, mode, unsignedp,
6295 label_rtx (node->right->code_label));
6296 else
6297 {
6298 /* Right hand node requires testing.
6299 Branch to a label where we will handle it later. */
6300
6301 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6302 emit_cmp_and_jump_insns (index,
6303 convert_modes
6304 (mode, imode,
6305 expand_expr (node->high, NULL_RTX,
6306 VOIDmode, 0),
6307 unsignedp),
6308 GT, NULL_RTX, mode, unsignedp,
6309 label_rtx (test_label));
6310 }
6311
6312 /* Value belongs to this node or to the left-hand subtree. */
6313
6314 emit_cmp_and_jump_insns (index,
6315 convert_modes
6316 (mode, imode,
6317 expand_expr (node->low, NULL_RTX,
6318 VOIDmode, 0),
6319 unsignedp),
6320 GE, NULL_RTX, mode, unsignedp,
6321 label_rtx (node->code_label));
6322
6323 /* Handle the left-hand subtree. */
6324 emit_case_nodes (index, node->left, default_label, index_type);
6325
6326 /* If right node had to be handled later, do that now. */
6327
6328 if (test_label)
6329 {
6330 /* If the left-hand subtree fell through,
6331 don't let it fall into the right-hand subtree. */
6332 emit_jump_if_reachable (default_label);
6333
6334 expand_label (test_label);
6335 emit_case_nodes (index, node->right, default_label, index_type);
6336 }
6337 }
6338
6339 else if (node->right != 0 && node->left == 0)
6340 {
6341 /* Deal with values to the left of this node,
6342 if they are possible. */
6343 if (!node_has_low_bound (node, index_type))
6344 {
6345 emit_cmp_and_jump_insns (index,
6346 convert_modes
6347 (mode, imode,
6348 expand_expr (node->low, NULL_RTX,
6349 VOIDmode, 0),
6350 unsignedp),
6351 LT, NULL_RTX, mode, unsignedp,
6352 default_label);
6353 }
6354
6355 /* Value belongs to this node or to the right-hand subtree. */
6356
6357 emit_cmp_and_jump_insns (index,
6358 convert_modes
6359 (mode, imode,
6360 expand_expr (node->high, NULL_RTX,
6361 VOIDmode, 0),
6362 unsignedp),
6363 LE, NULL_RTX, mode, unsignedp,
6364 label_rtx (node->code_label));
6365
6366 emit_case_nodes (index, node->right, default_label, index_type);
6367 }
6368
6369 else if (node->right == 0 && node->left != 0)
6370 {
6371 /* Deal with values to the right of this node,
6372 if they are possible. */
6373 if (!node_has_high_bound (node, index_type))
6374 {
6375 emit_cmp_and_jump_insns (index,
6376 convert_modes
6377 (mode, imode,
6378 expand_expr (node->high, NULL_RTX,
6379 VOIDmode, 0),
6380 unsignedp),
6381 GT, NULL_RTX, mode, unsignedp,
6382 default_label);
6383 }
6384
6385 /* Value belongs to this node or to the left-hand subtree. */
6386
6387 emit_cmp_and_jump_insns (index,
6388 convert_modes
6389 (mode, imode,
6390 expand_expr (node->low, NULL_RTX,
6391 VOIDmode, 0),
6392 unsignedp),
6393 GE, NULL_RTX, mode, unsignedp,
6394 label_rtx (node->code_label));
6395
6396 emit_case_nodes (index, node->left, default_label, index_type);
6397 }
6398
6399 else
6400 {
6401 /* Node has no children so we check low and high bounds to remove
6402 redundant tests. Only one of the bounds can exist,
6403 since otherwise this node is bounded--a case tested already. */
6404 int high_bound = node_has_high_bound (node, index_type);
6405 int low_bound = node_has_low_bound (node, index_type);
6406
6407 if (!high_bound && low_bound)
6408 {
6409 emit_cmp_and_jump_insns (index,
6410 convert_modes
6411 (mode, imode,
6412 expand_expr (node->high, NULL_RTX,
6413 VOIDmode, 0),
6414 unsignedp),
6415 GT, NULL_RTX, mode, unsignedp,
6416 default_label);
6417 }
6418
6419 else if (!low_bound && high_bound)
6420 {
6421 emit_cmp_and_jump_insns (index,
6422 convert_modes
6423 (mode, imode,
6424 expand_expr (node->low, NULL_RTX,
6425 VOIDmode, 0),
6426 unsignedp),
6427 LT, NULL_RTX, mode, unsignedp,
6428 default_label);
6429 }
6430 else if (!low_bound && !high_bound)
6431 {
6432 /* Widen LOW and HIGH to the same width as INDEX. */
6433 tree type = type_for_mode (mode, unsignedp);
6434 tree low = build1 (CONVERT_EXPR, type, node->low);
6435 tree high = build1 (CONVERT_EXPR, type, node->high);
6436 rtx low_rtx, new_index, new_bound;
6437
6438 /* Instead of doing two branches, emit one unsigned branch for
6439 (index-low) > (high-low). */
6440 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6441 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6442 NULL_RTX, unsignedp,
6443 OPTAB_WIDEN);
6444 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6445 high, low)),
6446 NULL_RTX, mode, 0);
6447
6448 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6449 mode, 1, default_label);
6450 }
6451
6452 emit_jump (label_rtx (node->code_label));
6453 }
6454 }
6455 }