tree.h (PHI_CHAIN): New.
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59 #include "optabs.h"
60 #include "target.h"
61 #include "regs.h"
62 \f
63 /* Functions and data structures for expanding case statements. */
64
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
68
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
72
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
78
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
85 in order. */
86
87 struct case_node GTY(())
88 {
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int balance;
96 };
97
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
100
101 /* These are used by estimate_case_costs and balance_case_nodes. */
102
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
107
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
111 \f
112 /* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
137 struct nesting GTY(())
138 {
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
143 enum nesting_desc {
144 COND_NESTING,
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
149 {
150 /* For conds (if-then and if-then-else statements). */
151 struct nesting_cond
152 {
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
159 rtx next_label;
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
162 struct nesting_block
163 {
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* Nonzero => value to restore stack to on exit. */
168 rtx stack_level;
169 /* The NOTE that starts this contour.
170 Used by expand_goto to check whether the destination
171 is within each contour or not. */
172 rtx first_insn;
173 /* Innermost containing binding contour that has a stack level. */
174 struct nesting *innermost_stack_block;
175 /* List of cleanups to be run on exit from this contour.
176 This is a list of expressions to be evaluated.
177 The TREE_PURPOSE of each link is the ..._DECL node
178 which the cleanup pertains to. */
179 tree cleanups;
180 /* List of cleanup-lists of blocks containing this block,
181 as they were at the locus where this block appears.
182 There is an element for each containing block,
183 ordered innermost containing block first.
184 The tail of this list can be 0,
185 if all remaining elements would be empty lists.
186 The element's TREE_VALUE is the cleanup-list of that block,
187 which may be null. */
188 tree outer_cleanups;
189 /* Chain of labels defined inside this binding contour.
190 For contours that have stack levels or cleanups. */
191 struct label_chain *label_chain;
192 /* Nonzero if this is associated with an EH region. */
193 int exception_region;
194 /* The saved target_temp_slot_level from our outer block.
195 We may reset target_temp_slot_level to be the level of
196 this block, if that is done, target_temp_slot_level
197 reverts to the saved target_temp_slot_level at the very
198 end of the block. */
199 int block_target_temp_slot_level;
200 /* True if we are currently emitting insns in an area of
201 output code that is controlled by a conditional
202 expression. This is used by the cleanup handling code to
203 generate conditional cleanup actions. */
204 int conditional_code;
205 /* A place to move the start of the exception region for any
206 of the conditional cleanups, must be at the end or after
207 the start of the last unconditional cleanup, and before any
208 conditional branch points. */
209 rtx last_unconditional_cleanup;
210 } GTY ((tag ("BLOCK_NESTING"))) block;
211 /* For switch (C) or case (Pascal) statements. */
212 struct nesting_case
213 {
214 /* The insn after which the case dispatch should finally
215 be emitted. Zero for a dummy. */
216 rtx start;
217 /* A list of case labels; it is first built as an AVL tree.
218 During expand_end_case, this is converted to a list, and may be
219 rearranged into a nearly balanced binary tree. */
220 struct case_node *case_list;
221 /* Label to jump to if no case matches. */
222 tree default_label;
223 /* The expression to be dispatched on. */
224 tree index_expr;
225 /* Type that INDEX_EXPR should be converted to. */
226 tree nominal_type;
227 /* Name of this kind of statement, for warnings. */
228 const char *printname;
229 /* Used to save no_line_numbers till we see the first case label.
230 We set this to -1 when we see the first case label in this
231 case statement. */
232 int line_number_status;
233 } GTY ((tag ("CASE_NESTING"))) case_stmt;
234 } GTY ((desc ("%1.desc"))) data;
235 };
236
237 /* Allocate and return a new `struct nesting'. */
238
239 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
240
241 /* Pop the nesting stack element by element until we pop off
242 the element which is at the top of STACK.
243 Update all the other stacks, popping off elements from them
244 as we pop them from nesting_stack. */
245
246 #define POPSTACK(STACK) \
247 do { struct nesting *target = STACK; \
248 struct nesting *this; \
249 do { this = nesting_stack; \
250 if (cond_stack == this) \
251 cond_stack = cond_stack->next; \
252 if (block_stack == this) \
253 block_stack = block_stack->next; \
254 if (stack_block_stack == this) \
255 stack_block_stack = stack_block_stack->next; \
256 if (case_stack == this) \
257 case_stack = case_stack->next; \
258 nesting_depth = nesting_stack->depth - 1; \
259 nesting_stack = this->all; } \
260 while (this != target); } while (0)
261 \f
262 /* In some cases it is impossible to generate code for a forward goto
263 until the label definition is seen. This happens when it may be necessary
264 for the goto to reset the stack pointer: we don't yet know how to do that.
265 So expand_goto puts an entry on this fixup list.
266 Each time a binding contour that resets the stack is exited,
267 we check each fixup.
268 If the target label has now been defined, we can insert the proper code. */
269
270 struct goto_fixup GTY(())
271 {
272 /* Points to following fixup. */
273 struct goto_fixup *next;
274 /* Points to the insn before the jump insn.
275 If more code must be inserted, it goes after this insn. */
276 rtx before_jump;
277 /* The LABEL_DECL that this jump is jumping to, or 0
278 for break, continue or return. */
279 tree target;
280 /* The BLOCK for the place where this goto was found. */
281 tree context;
282 /* The CODE_LABEL rtx that this is jumping to. */
283 rtx target_rtl;
284 /* Number of binding contours started in current function
285 before the label reference. */
286 int block_start_count;
287 /* The outermost stack level that should be restored for this jump.
288 Each time a binding contour that resets the stack is exited,
289 if the target label is *not* yet defined, this slot is updated. */
290 rtx stack_level;
291 /* List of lists of cleanup expressions to be run by this goto.
292 There is one element for each block that this goto is within.
293 The tail of this list can be 0,
294 if all remaining elements would be empty.
295 The TREE_VALUE contains the cleanup list of that block as of the
296 time this goto was seen.
297 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
298 tree cleanup_list_list;
299 };
300
301 /* Within any binding contour that must restore a stack level,
302 all labels are recorded with a chain of these structures. */
303
304 struct label_chain GTY(())
305 {
306 /* Points to following fixup. */
307 struct label_chain *next;
308 tree label;
309 };
310
311 struct stmt_status GTY(())
312 {
313 /* Chain of all pending binding contours. */
314 struct nesting * x_block_stack;
315
316 /* If any new stacks are added here, add them to POPSTACKS too. */
317
318 /* Chain of all pending binding contours that restore stack levels
319 or have cleanups. */
320 struct nesting * x_stack_block_stack;
321
322 /* Chain of all pending conditional statements. */
323 struct nesting * x_cond_stack;
324
325 /* Chain of all pending case or switch statements. */
326 struct nesting * x_case_stack;
327
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting * x_nesting_stack;
331
332 /* Number of entries on nesting_stack now. */
333 int x_nesting_depth;
334
335 /* Number of binding contours started so far in this function. */
336 int x_block_start_count;
337
338 /* Each time we expand an expression-statement,
339 record the expr's type and its RTL value here. */
340 tree x_last_expr_type;
341 rtx x_last_expr_value;
342 rtx x_last_expr_alt_rtl;
343
344 /* Nonzero if within a ({...}) grouping, in which case we must
345 always compute a value for each expr-stmt in case it is the last one. */
346 int x_expr_stmts_for_value;
347
348 /* Location of last line-number note, whether we actually
349 emitted it or not. */
350 location_t x_emit_locus;
351
352 struct goto_fixup *x_goto_fixup_chain;
353 };
354
355 #define block_stack (cfun->stmt->x_block_stack)
356 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
357 #define cond_stack (cfun->stmt->x_cond_stack)
358 #define case_stack (cfun->stmt->x_case_stack)
359 #define nesting_stack (cfun->stmt->x_nesting_stack)
360 #define nesting_depth (cfun->stmt->x_nesting_depth)
361 #define current_block_start_count (cfun->stmt->x_block_start_count)
362 #define last_expr_type (cfun->stmt->x_last_expr_type)
363 #define last_expr_value (cfun->stmt->x_last_expr_value)
364 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
365 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
366 #define emit_locus (cfun->stmt->x_emit_locus)
367 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
368
369 /* Nonzero if we are using EH to handle cleanups. */
370 int using_eh_for_cleanups_p = 0;
371
372 static int n_occurrences (int, const char *);
373 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
374 static void expand_goto_internal (tree, rtx, rtx);
375 static int expand_fixup (tree, rtx, rtx);
376 static void expand_nl_goto_receiver (void);
377 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
378 static bool check_operand_nalternatives (tree, tree);
379 static bool check_unique_operand_names (tree, tree);
380 static char *resolve_operand_name_1 (char *, tree, tree);
381 static void expand_null_return_1 (rtx);
382 static enum br_predictor return_prediction (rtx);
383 static rtx shift_return_value (rtx);
384 static void expand_value_return (rtx);
385 static void expand_cleanups (tree, int, int);
386 static void check_seenlabel (void);
387 static void do_jump_if_equal (rtx, rtx, rtx, int);
388 static int estimate_case_costs (case_node_ptr);
389 static bool same_case_target_p (rtx, rtx);
390 static void strip_default_case_nodes (case_node_ptr *, rtx);
391 static bool lshift_cheap_p (void);
392 static int case_bit_test_cmp (const void *, const void *);
393 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
394 static void group_case_nodes (case_node_ptr);
395 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
396 static int node_has_low_bound (case_node_ptr, tree);
397 static int node_has_high_bound (case_node_ptr, tree);
398 static int node_is_bounded (case_node_ptr, tree);
399 static void emit_jump_if_reachable (rtx);
400 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
401 static struct case_node *case_tree2list (case_node *, case_node *);
402 \f
403 void
404 using_eh_for_cleanups (void)
405 {
406 using_eh_for_cleanups_p = 1;
407 }
408
409 void
410 init_stmt_for_function (void)
411 {
412 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
413 }
414 \f
415 /* Record the current file and line. Called from emit_line_note. */
416
417 void
418 set_file_and_line_for_stmt (location_t location)
419 {
420 /* If we're outputting an inline function, and we add a line note,
421 there may be no CFUN->STMT information. So, there's no need to
422 update it. */
423 if (cfun->stmt)
424 emit_locus = location;
425 }
426
427 /* Emit a no-op instruction. */
428
429 void
430 emit_nop (void)
431 {
432 rtx last_insn;
433
434 last_insn = get_last_insn ();
435 if (!optimize
436 && (GET_CODE (last_insn) == CODE_LABEL
437 || (GET_CODE (last_insn) == NOTE
438 && prev_real_insn (last_insn) == 0)))
439 emit_insn (gen_nop ());
440 }
441 \f
442 /* Return the rtx-label that corresponds to a LABEL_DECL,
443 creating it if necessary. */
444
445 rtx
446 label_rtx (tree label)
447 {
448 if (TREE_CODE (label) != LABEL_DECL)
449 abort ();
450
451 if (!DECL_RTL_SET_P (label))
452 {
453 rtx r = gen_label_rtx ();
454 SET_DECL_RTL (label, r);
455 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
456 LABEL_PRESERVE_P (r) = 1;
457 }
458
459 return DECL_RTL (label);
460 }
461
462 /* As above, but also put it on the forced-reference list of the
463 function that contains it. */
464 rtx
465 force_label_rtx (tree label)
466 {
467 rtx ref = label_rtx (label);
468 tree function = decl_function_context (label);
469 struct function *p;
470
471 if (!function)
472 abort ();
473
474 if (function != current_function_decl)
475 p = find_function_data (function);
476 else
477 p = cfun;
478
479 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
480 p->expr->x_forced_labels);
481 return ref;
482 }
483
484 /* Add an unconditional jump to LABEL as the next sequential instruction. */
485
486 void
487 emit_jump (rtx label)
488 {
489 do_pending_stack_adjust ();
490 emit_jump_insn (gen_jump (label));
491 emit_barrier ();
492 }
493
494 /* Emit code to jump to the address
495 specified by the pointer expression EXP. */
496
497 void
498 expand_computed_goto (tree exp)
499 {
500 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
501
502 x = convert_memory_address (Pmode, x);
503
504 emit_queue ();
505
506 if (! cfun->computed_goto_common_label)
507 {
508 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
509 cfun->computed_goto_common_label = gen_label_rtx ();
510
511 do_pending_stack_adjust ();
512 emit_label (cfun->computed_goto_common_label);
513 emit_indirect_jump (cfun->computed_goto_common_reg);
514
515 current_function_has_computed_jump = 1;
516 }
517 else
518 {
519 emit_move_insn (cfun->computed_goto_common_reg, x);
520 emit_jump (cfun->computed_goto_common_label);
521 }
522 }
523 \f
524 /* Handle goto statements and the labels that they can go to. */
525
526 /* Specify the location in the RTL code of a label LABEL,
527 which is a LABEL_DECL tree node.
528
529 This is used for the kind of label that the user can jump to with a
530 goto statement, and for alternatives of a switch or case statement.
531 RTL labels generated for loops and conditionals don't go through here;
532 they are generated directly at the RTL level, by other functions below.
533
534 Note that this has nothing to do with defining label *names*.
535 Languages vary in how they do that and what that even means. */
536
537 void
538 expand_label (tree label)
539 {
540 struct label_chain *p;
541 rtx label_r = label_rtx (label);
542
543 do_pending_stack_adjust ();
544 emit_label (label_r);
545 if (DECL_NAME (label))
546 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
547
548 if (DECL_NONLOCAL (label))
549 {
550 expand_nl_goto_receiver ();
551 nonlocal_goto_handler_labels
552 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
553 nonlocal_goto_handler_labels);
554 }
555
556 if (FORCED_LABEL (label))
557 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
558
559 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
560 maybe_set_first_label_num (label_r);
561
562 if (stack_block_stack != 0)
563 {
564 p = ggc_alloc (sizeof (struct label_chain));
565 p->next = stack_block_stack->data.block.label_chain;
566 stack_block_stack->data.block.label_chain = p;
567 p->label = label;
568 }
569 }
570
571 /* Generate RTL code for a `goto' statement with target label LABEL.
572 LABEL should be a LABEL_DECL tree node that was or will later be
573 defined with `expand_label'. */
574
575 void
576 expand_goto (tree label)
577 {
578 #ifdef ENABLE_CHECKING
579 /* Check for a nonlocal goto to a containing function. Should have
580 gotten translated to __builtin_nonlocal_goto. */
581 tree context = decl_function_context (label);
582 if (context != 0 && context != current_function_decl)
583 abort ();
584 #endif
585
586 expand_goto_internal (label, label_rtx (label), NULL_RTX);
587 }
588
589 /* Generate RTL code for a `goto' statement with target label BODY.
590 LABEL should be a LABEL_REF.
591 LAST_INSN, if non-0, is the rtx we should consider as the last
592 insn emitted (for the purposes of cleaning up a return). */
593
594 static void
595 expand_goto_internal (tree body, rtx label, rtx last_insn)
596 {
597 struct nesting *block;
598 rtx stack_level = 0;
599
600 if (GET_CODE (label) != CODE_LABEL)
601 abort ();
602
603 /* If label has already been defined, we can tell now
604 whether and how we must alter the stack level. */
605
606 if (PREV_INSN (label) != 0)
607 {
608 /* Find the innermost pending block that contains the label.
609 (Check containment by comparing insn-uids.)
610 Then restore the outermost stack level within that block,
611 and do cleanups of all blocks contained in it. */
612 for (block = block_stack; block; block = block->next)
613 {
614 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
615 break;
616 if (block->data.block.stack_level != 0)
617 stack_level = block->data.block.stack_level;
618 /* Execute the cleanups for blocks we are exiting. */
619 if (block->data.block.cleanups != 0)
620 {
621 expand_cleanups (block->data.block.cleanups, 1, 1);
622 do_pending_stack_adjust ();
623 }
624 }
625
626 if (stack_level)
627 {
628 /* Ensure stack adjust isn't done by emit_jump, as this
629 would clobber the stack pointer. This one should be
630 deleted as dead by flow. */
631 clear_pending_stack_adjust ();
632 do_pending_stack_adjust ();
633
634 /* Don't do this adjust if it's to the end label and this function
635 is to return with a depressed stack pointer. */
636 if (label == return_label
637 && (((TREE_CODE (TREE_TYPE (current_function_decl))
638 == FUNCTION_TYPE)
639 && (TYPE_RETURNS_STACK_DEPRESSED
640 (TREE_TYPE (current_function_decl))))))
641 ;
642 else
643 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
644 }
645
646 if (body != 0 && DECL_TOO_LATE (body))
647 error ("jump to `%s' invalidly jumps into binding contour",
648 IDENTIFIER_POINTER (DECL_NAME (body)));
649 }
650 /* Label not yet defined: may need to put this goto
651 on the fixup list. */
652 else if (! expand_fixup (body, label, last_insn))
653 {
654 /* No fixup needed. Record that the label is the target
655 of at least one goto that has no fixup. */
656 if (body != 0)
657 TREE_ADDRESSABLE (body) = 1;
658 }
659
660 emit_jump (label);
661 }
662 \f
663 /* Generate if necessary a fixup for a goto
664 whose target label in tree structure (if any) is TREE_LABEL
665 and whose target in rtl is RTL_LABEL.
666
667 If LAST_INSN is nonzero, we pretend that the jump appears
668 after insn LAST_INSN instead of at the current point in the insn stream.
669
670 The fixup will be used later to insert insns just before the goto.
671 Those insns will restore the stack level as appropriate for the
672 target label, and will (in the case of C++) also invoke any object
673 destructors which have to be invoked when we exit the scopes which
674 are exited by the goto.
675
676 Value is nonzero if a fixup is made. */
677
678 static int
679 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
680 {
681 struct nesting *block, *end_block;
682
683 /* See if we can recognize which block the label will be output in.
684 This is possible in some very common cases.
685 If we succeed, set END_BLOCK to that block.
686 Otherwise, set it to 0. */
687
688 if (cond_stack
689 && (rtl_label == cond_stack->data.cond.endif_label
690 || rtl_label == cond_stack->data.cond.next_label))
691 end_block = cond_stack;
692 else
693 end_block = 0;
694
695 /* Now set END_BLOCK to the binding level to which we will return. */
696
697 if (end_block)
698 {
699 struct nesting *next_block = end_block->all;
700 block = block_stack;
701
702 /* First see if the END_BLOCK is inside the innermost binding level.
703 If so, then no cleanups or stack levels are relevant. */
704 while (next_block && next_block != block)
705 next_block = next_block->all;
706
707 if (next_block)
708 return 0;
709
710 /* Otherwise, set END_BLOCK to the innermost binding level
711 which is outside the relevant control-structure nesting. */
712 next_block = block_stack->next;
713 for (block = block_stack; block != end_block; block = block->all)
714 if (block == next_block)
715 next_block = next_block->next;
716 end_block = next_block;
717 }
718
719 /* Does any containing block have a stack level or cleanups?
720 If not, no fixup is needed, and that is the normal case
721 (the only case, for standard C). */
722 for (block = block_stack; block != end_block; block = block->next)
723 if (block->data.block.stack_level != 0
724 || block->data.block.cleanups != 0)
725 break;
726
727 if (block != end_block)
728 {
729 /* Ok, a fixup is needed. Add a fixup to the list of such. */
730 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
731 /* In case an old stack level is restored, make sure that comes
732 after any pending stack adjust. */
733 /* ?? If the fixup isn't to come at the present position,
734 doing the stack adjust here isn't useful. Doing it with our
735 settings at that location isn't useful either. Let's hope
736 someone does it! */
737 if (last_insn == 0)
738 do_pending_stack_adjust ();
739 fixup->target = tree_label;
740 fixup->target_rtl = rtl_label;
741
742 /* Create a BLOCK node and a corresponding matched set of
743 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
744 this point. The notes will encapsulate any and all fixup
745 code which we might later insert at this point in the insn
746 stream. Also, the BLOCK node will be the parent (i.e. the
747 `SUPERBLOCK') of any other BLOCK nodes which we might create
748 later on when we are expanding the fixup code.
749
750 Note that optimization passes might move the *_BLOCK notes away,
751 so we use a NOTE_INSN_DELETED as a placeholder. */
752
753 {
754 rtx original_before_jump
755 = last_insn ? last_insn : get_last_insn ();
756 rtx start;
757 rtx end;
758 tree block;
759
760 block = make_node (BLOCK);
761 TREE_USED (block) = 1;
762
763 if (!cfun->x_whole_function_mode_p)
764 lang_hooks.decls.insert_block (block);
765 else
766 {
767 BLOCK_CHAIN (block)
768 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
769 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
770 = block;
771 }
772
773 start_sequence ();
774 start = emit_note (NOTE_INSN_BLOCK_BEG);
775 if (cfun->x_whole_function_mode_p)
776 NOTE_BLOCK (start) = block;
777 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
778 end = emit_note (NOTE_INSN_BLOCK_END);
779 if (cfun->x_whole_function_mode_p)
780 NOTE_BLOCK (end) = block;
781 fixup->context = block;
782 end_sequence ();
783 emit_insn_after (start, original_before_jump);
784 }
785
786 fixup->block_start_count = current_block_start_count;
787 fixup->stack_level = 0;
788 fixup->cleanup_list_list
789 = ((block->data.block.outer_cleanups
790 || block->data.block.cleanups)
791 ? tree_cons (NULL_TREE, block->data.block.cleanups,
792 block->data.block.outer_cleanups)
793 : 0);
794 fixup->next = goto_fixup_chain;
795 goto_fixup_chain = fixup;
796 }
797
798 return block != 0;
799 }
800 \f
801 /* Expand any needed fixups in the outputmost binding level of the
802 function. FIRST_INSN is the first insn in the function. */
803
804 void
805 expand_fixups (rtx first_insn)
806 {
807 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
808 }
809
810 /* When exiting a binding contour, process all pending gotos requiring fixups.
811 THISBLOCK is the structure that describes the block being exited.
812 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
813 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
814 FIRST_INSN is the insn that began this contour.
815
816 Gotos that jump out of this contour must restore the
817 stack level and do the cleanups before actually jumping.
818
819 DONT_JUMP_IN positive means report error if there is a jump into this
820 contour from before the beginning of the contour. This is also done if
821 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
822
823 static void
824 fixup_gotos (struct nesting *thisblock, rtx stack_level,
825 tree cleanup_list, rtx first_insn, int dont_jump_in)
826 {
827 struct goto_fixup *f, *prev;
828
829 /* F is the fixup we are considering; PREV is the previous one. */
830 /* We run this loop in two passes so that cleanups of exited blocks
831 are run first, and blocks that are exited are marked so
832 afterwards. */
833
834 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
835 {
836 /* Test for a fixup that is inactive because it is already handled. */
837 if (f->before_jump == 0)
838 {
839 /* Delete inactive fixup from the chain, if that is easy to do. */
840 if (prev != 0)
841 prev->next = f->next;
842 }
843 /* Has this fixup's target label been defined?
844 If so, we can finalize it. */
845 else if (PREV_INSN (f->target_rtl) != 0)
846 {
847 rtx cleanup_insns;
848
849 /* If this fixup jumped into this contour from before the beginning
850 of this contour, report an error. This code used to use
851 the first non-label insn after f->target_rtl, but that's
852 wrong since such can be added, by things like put_var_into_stack
853 and have INSN_UIDs that are out of the range of the block. */
854 /* ??? Bug: this does not detect jumping in through intermediate
855 blocks that have stack levels or cleanups.
856 It detects only a problem with the innermost block
857 around the label. */
858 if (f->target != 0
859 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
860 || cleanup_list)
861 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
862 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
863 && ! DECL_ERROR_ISSUED (f->target))
864 {
865 error ("%Jlabel '%D' used before containing binding contour",
866 f->target, f->target);
867 /* Prevent multiple errors for one label. */
868 DECL_ERROR_ISSUED (f->target) = 1;
869 }
870
871 /* We will expand the cleanups into a sequence of their own and
872 then later on we will attach this new sequence to the insn
873 stream just ahead of the actual jump insn. */
874
875 start_sequence ();
876
877 /* Temporarily restore the lexical context where we will
878 logically be inserting the fixup code. We do this for the
879 sake of getting the debugging information right. */
880
881 lang_hooks.decls.pushlevel (0);
882 lang_hooks.decls.set_block (f->context);
883
884 /* Expand the cleanups for blocks this jump exits. */
885 if (f->cleanup_list_list)
886 {
887 tree lists;
888 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
889 /* Marked elements correspond to blocks that have been closed.
890 Do their cleanups. */
891 if (TREE_ADDRESSABLE (lists)
892 && TREE_VALUE (lists) != 0)
893 {
894 expand_cleanups (TREE_VALUE (lists), 1, 1);
895 /* Pop any pushes done in the cleanups,
896 in case function is about to return. */
897 do_pending_stack_adjust ();
898 }
899 }
900
901 /* Restore stack level for the biggest contour that this
902 jump jumps out of. */
903 if (f->stack_level
904 && ! (f->target_rtl == return_label
905 && ((TREE_CODE (TREE_TYPE (current_function_decl))
906 == FUNCTION_TYPE)
907 && (TYPE_RETURNS_STACK_DEPRESSED
908 (TREE_TYPE (current_function_decl))))))
909 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
910
911 /* Finish up the sequence containing the insns which implement the
912 necessary cleanups, and then attach that whole sequence to the
913 insn stream just ahead of the actual jump insn. Attaching it
914 at that point insures that any cleanups which are in fact
915 implicit C++ object destructions (which must be executed upon
916 leaving the block) appear (to the debugger) to be taking place
917 in an area of the generated code where the object(s) being
918 destructed are still "in scope". */
919
920 cleanup_insns = get_insns ();
921 lang_hooks.decls.poplevel (1, 0, 0);
922
923 end_sequence ();
924 emit_insn_after (cleanup_insns, f->before_jump);
925
926 f->before_jump = 0;
927 }
928 }
929
930 /* For any still-undefined labels, do the cleanups for this block now.
931 We must do this now since items in the cleanup list may go out
932 of scope when the block ends. */
933 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
934 if (f->before_jump != 0
935 && PREV_INSN (f->target_rtl) == 0
936 /* Label has still not appeared. If we are exiting a block with
937 a stack level to restore, that started before the fixup,
938 mark this stack level as needing restoration
939 when the fixup is later finalized. */
940 && thisblock != 0
941 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
942 means the label is undefined. That's erroneous, but possible. */
943 && (thisblock->data.block.block_start_count
944 <= f->block_start_count))
945 {
946 tree lists = f->cleanup_list_list;
947 rtx cleanup_insns;
948
949 for (; lists; lists = TREE_CHAIN (lists))
950 /* If the following elt. corresponds to our containing block
951 then the elt. must be for this block. */
952 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
953 {
954 start_sequence ();
955 lang_hooks.decls.pushlevel (0);
956 lang_hooks.decls.set_block (f->context);
957 expand_cleanups (TREE_VALUE (lists), 1, 1);
958 do_pending_stack_adjust ();
959 cleanup_insns = get_insns ();
960 lang_hooks.decls.poplevel (1, 0, 0);
961 end_sequence ();
962 if (cleanup_insns != 0)
963 f->before_jump
964 = emit_insn_after (cleanup_insns, f->before_jump);
965
966 f->cleanup_list_list = TREE_CHAIN (lists);
967 }
968
969 if (stack_level)
970 f->stack_level = stack_level;
971 }
972 }
973 \f
974 /* Return the number of times character C occurs in string S. */
975 static int
976 n_occurrences (int c, const char *s)
977 {
978 int n = 0;
979 while (*s)
980 n += (*s++ == c);
981 return n;
982 }
983 \f
984 /* Generate RTL for an asm statement (explicit assembler code).
985 STRING is a STRING_CST node containing the assembler code text,
986 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
987 insn is volatile; don't optimize it. */
988
989 void
990 expand_asm (tree string, int vol)
991 {
992 rtx body;
993
994 if (TREE_CODE (string) == ADDR_EXPR)
995 string = TREE_OPERAND (string, 0);
996
997 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
998
999 MEM_VOLATILE_P (body) = vol;
1000
1001 emit_insn (body);
1002
1003 clear_last_expr ();
1004 }
1005
1006 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1007 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1008 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1009 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1010 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1011 constraint allows the use of a register operand. And, *IS_INOUT
1012 will be true if the operand is read-write, i.e., if it is used as
1013 an input as well as an output. If *CONSTRAINT_P is not in
1014 canonical form, it will be made canonical. (Note that `+' will be
1015 replaced with `=' as part of this process.)
1016
1017 Returns TRUE if all went well; FALSE if an error occurred. */
1018
1019 bool
1020 parse_output_constraint (const char **constraint_p, int operand_num,
1021 int ninputs, int noutputs, bool *allows_mem,
1022 bool *allows_reg, bool *is_inout)
1023 {
1024 const char *constraint = *constraint_p;
1025 const char *p;
1026
1027 /* Assume the constraint doesn't allow the use of either a register
1028 or memory. */
1029 *allows_mem = false;
1030 *allows_reg = false;
1031
1032 /* Allow the `=' or `+' to not be at the beginning of the string,
1033 since it wasn't explicitly documented that way, and there is a
1034 large body of code that puts it last. Swap the character to
1035 the front, so as not to uglify any place else. */
1036 p = strchr (constraint, '=');
1037 if (!p)
1038 p = strchr (constraint, '+');
1039
1040 /* If the string doesn't contain an `=', issue an error
1041 message. */
1042 if (!p)
1043 {
1044 error ("output operand constraint lacks `='");
1045 return false;
1046 }
1047
1048 /* If the constraint begins with `+', then the operand is both read
1049 from and written to. */
1050 *is_inout = (*p == '+');
1051
1052 /* Canonicalize the output constraint so that it begins with `='. */
1053 if (p != constraint || is_inout)
1054 {
1055 char *buf;
1056 size_t c_len = strlen (constraint);
1057
1058 if (p != constraint)
1059 warning ("output constraint `%c' for operand %d is not at the beginning",
1060 *p, operand_num);
1061
1062 /* Make a copy of the constraint. */
1063 buf = alloca (c_len + 1);
1064 strcpy (buf, constraint);
1065 /* Swap the first character and the `=' or `+'. */
1066 buf[p - constraint] = buf[0];
1067 /* Make sure the first character is an `='. (Until we do this,
1068 it might be a `+'.) */
1069 buf[0] = '=';
1070 /* Replace the constraint with the canonicalized string. */
1071 *constraint_p = ggc_alloc_string (buf, c_len);
1072 constraint = *constraint_p;
1073 }
1074
1075 /* Loop through the constraint string. */
1076 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1077 switch (*p)
1078 {
1079 case '+':
1080 case '=':
1081 error ("operand constraint contains incorrectly positioned '+' or '='");
1082 return false;
1083
1084 case '%':
1085 if (operand_num + 1 == ninputs + noutputs)
1086 {
1087 error ("`%%' constraint used with last operand");
1088 return false;
1089 }
1090 break;
1091
1092 case 'V': case 'm': case 'o':
1093 *allows_mem = true;
1094 break;
1095
1096 case '?': case '!': case '*': case '&': case '#':
1097 case 'E': case 'F': case 'G': case 'H':
1098 case 's': case 'i': case 'n':
1099 case 'I': case 'J': case 'K': case 'L': case 'M':
1100 case 'N': case 'O': case 'P': case ',':
1101 break;
1102
1103 case '0': case '1': case '2': case '3': case '4':
1104 case '5': case '6': case '7': case '8': case '9':
1105 case '[':
1106 error ("matching constraint not valid in output operand");
1107 return false;
1108
1109 case '<': case '>':
1110 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1111 excepting those that expand_call created. So match memory
1112 and hope. */
1113 *allows_mem = true;
1114 break;
1115
1116 case 'g': case 'X':
1117 *allows_reg = true;
1118 *allows_mem = true;
1119 break;
1120
1121 case 'p': case 'r':
1122 *allows_reg = true;
1123 break;
1124
1125 default:
1126 if (!ISALPHA (*p))
1127 break;
1128 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1129 *allows_reg = true;
1130 #ifdef EXTRA_CONSTRAINT_STR
1131 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1132 *allows_reg = true;
1133 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1134 *allows_mem = true;
1135 else
1136 {
1137 /* Otherwise we can't assume anything about the nature of
1138 the constraint except that it isn't purely registers.
1139 Treat it like "g" and hope for the best. */
1140 *allows_reg = true;
1141 *allows_mem = true;
1142 }
1143 #endif
1144 break;
1145 }
1146
1147 return true;
1148 }
1149
1150 /* Similar, but for input constraints. */
1151
1152 bool
1153 parse_input_constraint (const char **constraint_p, int input_num,
1154 int ninputs, int noutputs, int ninout,
1155 const char * const * constraints,
1156 bool *allows_mem, bool *allows_reg)
1157 {
1158 const char *constraint = *constraint_p;
1159 const char *orig_constraint = constraint;
1160 size_t c_len = strlen (constraint);
1161 size_t j;
1162 bool saw_match = false;
1163
1164 /* Assume the constraint doesn't allow the use of either
1165 a register or memory. */
1166 *allows_mem = false;
1167 *allows_reg = false;
1168
1169 /* Make sure constraint has neither `=', `+', nor '&'. */
1170
1171 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1172 switch (constraint[j])
1173 {
1174 case '+': case '=': case '&':
1175 if (constraint == orig_constraint)
1176 {
1177 error ("input operand constraint contains `%c'", constraint[j]);
1178 return false;
1179 }
1180 break;
1181
1182 case '%':
1183 if (constraint == orig_constraint
1184 && input_num + 1 == ninputs - ninout)
1185 {
1186 error ("`%%' constraint used with last operand");
1187 return false;
1188 }
1189 break;
1190
1191 case 'V': case 'm': case 'o':
1192 *allows_mem = true;
1193 break;
1194
1195 case '<': case '>':
1196 case '?': case '!': case '*': case '#':
1197 case 'E': case 'F': case 'G': case 'H':
1198 case 's': case 'i': case 'n':
1199 case 'I': case 'J': case 'K': case 'L': case 'M':
1200 case 'N': case 'O': case 'P': case ',':
1201 break;
1202
1203 /* Whether or not a numeric constraint allows a register is
1204 decided by the matching constraint, and so there is no need
1205 to do anything special with them. We must handle them in
1206 the default case, so that we don't unnecessarily force
1207 operands to memory. */
1208 case '0': case '1': case '2': case '3': case '4':
1209 case '5': case '6': case '7': case '8': case '9':
1210 {
1211 char *end;
1212 unsigned long match;
1213
1214 saw_match = true;
1215
1216 match = strtoul (constraint + j, &end, 10);
1217 if (match >= (unsigned long) noutputs)
1218 {
1219 error ("matching constraint references invalid operand number");
1220 return false;
1221 }
1222
1223 /* Try and find the real constraint for this dup. Only do this
1224 if the matching constraint is the only alternative. */
1225 if (*end == '\0'
1226 && (j == 0 || (j == 1 && constraint[0] == '%')))
1227 {
1228 constraint = constraints[match];
1229 *constraint_p = constraint;
1230 c_len = strlen (constraint);
1231 j = 0;
1232 /* ??? At the end of the loop, we will skip the first part of
1233 the matched constraint. This assumes not only that the
1234 other constraint is an output constraint, but also that
1235 the '=' or '+' come first. */
1236 break;
1237 }
1238 else
1239 j = end - constraint;
1240 /* Anticipate increment at end of loop. */
1241 j--;
1242 }
1243 /* Fall through. */
1244
1245 case 'p': case 'r':
1246 *allows_reg = true;
1247 break;
1248
1249 case 'g': case 'X':
1250 *allows_reg = true;
1251 *allows_mem = true;
1252 break;
1253
1254 default:
1255 if (! ISALPHA (constraint[j]))
1256 {
1257 error ("invalid punctuation `%c' in constraint", constraint[j]);
1258 return false;
1259 }
1260 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1261 != NO_REGS)
1262 *allows_reg = true;
1263 #ifdef EXTRA_CONSTRAINT_STR
1264 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1265 *allows_reg = true;
1266 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1267 *allows_mem = true;
1268 else
1269 {
1270 /* Otherwise we can't assume anything about the nature of
1271 the constraint except that it isn't purely registers.
1272 Treat it like "g" and hope for the best. */
1273 *allows_reg = true;
1274 *allows_mem = true;
1275 }
1276 #endif
1277 break;
1278 }
1279
1280 if (saw_match && !*allows_reg)
1281 warning ("matching constraint does not allow a register");
1282
1283 return true;
1284 }
1285
1286 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1287 if it is an operand which must be passed in memory (i.e. an "m"
1288 constraint), false otherwise. */
1289
1290 bool
1291 asm_op_is_mem_input (tree input, tree expr)
1292 {
1293 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1294 tree outputs = ASM_OUTPUTS (expr);
1295 int noutputs = list_length (outputs);
1296 const char **constraints
1297 = (const char **) alloca ((noutputs) * sizeof (const char *));
1298 int i = 0;
1299 bool allows_mem, allows_reg;
1300 tree t;
1301
1302 /* Collect output constraints. */
1303 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1304 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1305
1306 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1307 error checking which will be done at expand time. */
1308 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1309 &allows_mem, &allows_reg);
1310 return (!allows_reg && allows_mem);
1311 }
1312
1313 /* Check for overlap between registers marked in CLOBBERED_REGS and
1314 anything inappropriate in DECL. Emit error and return TRUE for error,
1315 FALSE for ok. */
1316
1317 static bool
1318 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1319 {
1320 /* Conflicts between asm-declared register variables and the clobber
1321 list are not allowed. */
1322 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1323 && DECL_REGISTER (decl)
1324 && REG_P (DECL_RTL (decl))
1325 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1326 {
1327 rtx reg = DECL_RTL (decl);
1328 unsigned int regno;
1329
1330 for (regno = REGNO (reg);
1331 regno < (REGNO (reg)
1332 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1333 regno++)
1334 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1335 {
1336 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1337 IDENTIFIER_POINTER (DECL_NAME (decl)));
1338
1339 /* Reset registerness to stop multiple errors emitted for a
1340 single variable. */
1341 DECL_REGISTER (decl) = 0;
1342 return true;
1343 }
1344 }
1345 return false;
1346 }
1347
1348 /* Generate RTL for an asm statement with arguments.
1349 STRING is the instruction template.
1350 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1351 Each output or input has an expression in the TREE_VALUE and
1352 and a tree list in TREE_PURPOSE which in turn contains a constraint
1353 name in TREE_VALUE (or NULL_TREE) and a constraint string
1354 in TREE_PURPOSE.
1355 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1356 that is clobbered by this insn.
1357
1358 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1359 Some elements of OUTPUTS may be replaced with trees representing temporary
1360 values. The caller should copy those temporary values to the originally
1361 specified lvalues.
1362
1363 VOL nonzero means the insn is volatile; don't optimize it. */
1364
1365 void
1366 expand_asm_operands (tree string, tree outputs, tree inputs,
1367 tree clobbers, int vol, location_t locus)
1368 {
1369 rtvec argvec, constraintvec;
1370 rtx body;
1371 int ninputs = list_length (inputs);
1372 int noutputs = list_length (outputs);
1373 int ninout;
1374 int nclobbers;
1375 HARD_REG_SET clobbered_regs;
1376 int clobber_conflict_found = 0;
1377 tree tail;
1378 tree t;
1379 int i;
1380 /* Vector of RTX's of evaluated output operands. */
1381 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1382 int *inout_opnum = alloca (noutputs * sizeof (int));
1383 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1384 enum machine_mode *inout_mode
1385 = alloca (noutputs * sizeof (enum machine_mode));
1386 const char **constraints
1387 = alloca ((noutputs + ninputs) * sizeof (const char *));
1388 int old_generating_concat_p = generating_concat_p;
1389
1390 /* An ASM with no outputs needs to be treated as volatile, for now. */
1391 if (noutputs == 0)
1392 vol = 1;
1393
1394 if (! check_operand_nalternatives (outputs, inputs))
1395 return;
1396
1397 string = resolve_asm_operand_names (string, outputs, inputs);
1398
1399 /* Collect constraints. */
1400 i = 0;
1401 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1402 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1403 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1404 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1405
1406 /* Sometimes we wish to automatically clobber registers across an asm.
1407 Case in point is when the i386 backend moved from cc0 to a hard reg --
1408 maintaining source-level compatibility means automatically clobbering
1409 the flags register. */
1410 clobbers = targetm.md_asm_clobbers (clobbers);
1411
1412 /* Count the number of meaningful clobbered registers, ignoring what
1413 we would ignore later. */
1414 nclobbers = 0;
1415 CLEAR_HARD_REG_SET (clobbered_regs);
1416 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1417 {
1418 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1419
1420 i = decode_reg_name (regname);
1421 if (i >= 0 || i == -4)
1422 ++nclobbers;
1423 else if (i == -2)
1424 error ("unknown register name `%s' in `asm'", regname);
1425
1426 /* Mark clobbered registers. */
1427 if (i >= 0)
1428 {
1429 /* Clobbering the PIC register is an error */
1430 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1431 {
1432 error ("PIC register `%s' clobbered in `asm'", regname);
1433 return;
1434 }
1435
1436 SET_HARD_REG_BIT (clobbered_regs, i);
1437 }
1438 }
1439
1440 clear_last_expr ();
1441
1442 /* First pass over inputs and outputs checks validity and sets
1443 mark_addressable if needed. */
1444
1445 ninout = 0;
1446 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1447 {
1448 tree val = TREE_VALUE (tail);
1449 tree type = TREE_TYPE (val);
1450 const char *constraint;
1451 bool is_inout;
1452 bool allows_reg;
1453 bool allows_mem;
1454
1455 /* If there's an erroneous arg, emit no insn. */
1456 if (type == error_mark_node)
1457 return;
1458
1459 /* Try to parse the output constraint. If that fails, there's
1460 no point in going further. */
1461 constraint = constraints[i];
1462 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1463 &allows_mem, &allows_reg, &is_inout))
1464 return;
1465
1466 if (! allows_reg
1467 && (allows_mem
1468 || is_inout
1469 || (DECL_P (val)
1470 && REG_P (DECL_RTL (val))
1471 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1472 lang_hooks.mark_addressable (val);
1473
1474 if (is_inout)
1475 ninout++;
1476 }
1477
1478 ninputs += ninout;
1479 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1480 {
1481 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1482 return;
1483 }
1484
1485 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1486 {
1487 bool allows_reg, allows_mem;
1488 const char *constraint;
1489
1490 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1491 would get VOIDmode and that could cause a crash in reload. */
1492 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1493 return;
1494
1495 constraint = constraints[i + noutputs];
1496 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1497 constraints, &allows_mem, &allows_reg))
1498 return;
1499
1500 if (! allows_reg && allows_mem)
1501 lang_hooks.mark_addressable (TREE_VALUE (tail));
1502 }
1503
1504 /* Second pass evaluates arguments. */
1505
1506 ninout = 0;
1507 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1508 {
1509 tree val = TREE_VALUE (tail);
1510 tree type = TREE_TYPE (val);
1511 bool is_inout;
1512 bool allows_reg;
1513 bool allows_mem;
1514 rtx op;
1515
1516 if (!parse_output_constraint (&constraints[i], i, ninputs,
1517 noutputs, &allows_mem, &allows_reg,
1518 &is_inout))
1519 abort ();
1520
1521 /* If an output operand is not a decl or indirect ref and our constraint
1522 allows a register, make a temporary to act as an intermediate.
1523 Make the asm insn write into that, then our caller will copy it to
1524 the real output operand. Likewise for promoted variables. */
1525
1526 generating_concat_p = 0;
1527
1528 real_output_rtx[i] = NULL_RTX;
1529 if ((TREE_CODE (val) == INDIRECT_REF
1530 && allows_mem)
1531 || (DECL_P (val)
1532 && (allows_mem || REG_P (DECL_RTL (val)))
1533 && ! (REG_P (DECL_RTL (val))
1534 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1535 || ! allows_reg
1536 || is_inout)
1537 {
1538 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1539 if (GET_CODE (op) == MEM)
1540 op = validize_mem (op);
1541
1542 if (! allows_reg && GET_CODE (op) != MEM)
1543 error ("output number %d not directly addressable", i);
1544 if ((! allows_mem && GET_CODE (op) == MEM)
1545 || GET_CODE (op) == CONCAT)
1546 {
1547 real_output_rtx[i] = protect_from_queue (op, 1);
1548 op = gen_reg_rtx (GET_MODE (op));
1549 if (is_inout)
1550 emit_move_insn (op, real_output_rtx[i]);
1551 }
1552 }
1553 else
1554 {
1555 op = assign_temp (type, 0, 0, 1);
1556 op = validize_mem (op);
1557 TREE_VALUE (tail) = make_tree (type, op);
1558 }
1559 output_rtx[i] = op;
1560
1561 generating_concat_p = old_generating_concat_p;
1562
1563 if (is_inout)
1564 {
1565 inout_mode[ninout] = TYPE_MODE (type);
1566 inout_opnum[ninout++] = i;
1567 }
1568
1569 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1570 clobber_conflict_found = 1;
1571 }
1572
1573 /* Make vectors for the expression-rtx, constraint strings,
1574 and named operands. */
1575
1576 argvec = rtvec_alloc (ninputs);
1577 constraintvec = rtvec_alloc (ninputs);
1578
1579 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1580 : GET_MODE (output_rtx[0])),
1581 TREE_STRING_POINTER (string),
1582 empty_string, 0, argvec, constraintvec,
1583 locus.file, locus.line);
1584
1585 MEM_VOLATILE_P (body) = vol;
1586
1587 /* Eval the inputs and put them into ARGVEC.
1588 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1589
1590 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1591 {
1592 bool allows_reg, allows_mem;
1593 const char *constraint;
1594 tree val, type;
1595 rtx op;
1596
1597 constraint = constraints[i + noutputs];
1598 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1599 constraints, &allows_mem, &allows_reg))
1600 abort ();
1601
1602 generating_concat_p = 0;
1603
1604 val = TREE_VALUE (tail);
1605 type = TREE_TYPE (val);
1606 op = expand_expr (val, NULL_RTX, VOIDmode,
1607 (allows_mem && !allows_reg
1608 ? EXPAND_MEMORY : EXPAND_NORMAL));
1609
1610 /* Never pass a CONCAT to an ASM. */
1611 if (GET_CODE (op) == CONCAT)
1612 op = force_reg (GET_MODE (op), op);
1613 else if (GET_CODE (op) == MEM)
1614 op = validize_mem (op);
1615
1616 if (asm_operand_ok (op, constraint) <= 0)
1617 {
1618 if (allows_reg)
1619 op = force_reg (TYPE_MODE (type), op);
1620 else if (!allows_mem)
1621 warning ("asm operand %d probably doesn't match constraints",
1622 i + noutputs);
1623 else if (GET_CODE (op) == MEM)
1624 {
1625 /* We won't recognize either volatile memory or memory
1626 with a queued address as available a memory_operand
1627 at this point. Ignore it: clearly this *is* a memory. */
1628 }
1629 else
1630 {
1631 warning ("use of memory input without lvalue in "
1632 "asm operand %d is deprecated", i + noutputs);
1633
1634 if (CONSTANT_P (op))
1635 {
1636 rtx mem = force_const_mem (TYPE_MODE (type), op);
1637 if (mem)
1638 op = validize_mem (mem);
1639 else
1640 op = force_reg (TYPE_MODE (type), op);
1641 }
1642 if (REG_P (op)
1643 || GET_CODE (op) == SUBREG
1644 || GET_CODE (op) == ADDRESSOF
1645 || GET_CODE (op) == CONCAT)
1646 {
1647 tree qual_type = build_qualified_type (type,
1648 (TYPE_QUALS (type)
1649 | TYPE_QUAL_CONST));
1650 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1651 memloc = validize_mem (memloc);
1652 emit_move_insn (memloc, op);
1653 op = memloc;
1654 }
1655 }
1656 }
1657
1658 generating_concat_p = old_generating_concat_p;
1659 ASM_OPERANDS_INPUT (body, i) = op;
1660
1661 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1662 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1663
1664 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1665 clobber_conflict_found = 1;
1666 }
1667
1668 /* Protect all the operands from the queue now that they have all been
1669 evaluated. */
1670
1671 generating_concat_p = 0;
1672
1673 for (i = 0; i < ninputs - ninout; i++)
1674 ASM_OPERANDS_INPUT (body, i)
1675 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1676
1677 for (i = 0; i < noutputs; i++)
1678 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1679
1680 /* For in-out operands, copy output rtx to input rtx. */
1681 for (i = 0; i < ninout; i++)
1682 {
1683 int j = inout_opnum[i];
1684 char buffer[16];
1685
1686 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1687 = output_rtx[j];
1688
1689 sprintf (buffer, "%d", j);
1690 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1691 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1692 }
1693
1694 generating_concat_p = old_generating_concat_p;
1695
1696 /* Now, for each output, construct an rtx
1697 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1698 ARGVEC CONSTRAINTS OPNAMES))
1699 If there is more than one, put them inside a PARALLEL. */
1700
1701 if (noutputs == 1 && nclobbers == 0)
1702 {
1703 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1704 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1705 }
1706
1707 else if (noutputs == 0 && nclobbers == 0)
1708 {
1709 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1710 emit_insn (body);
1711 }
1712
1713 else
1714 {
1715 rtx obody = body;
1716 int num = noutputs;
1717
1718 if (num == 0)
1719 num = 1;
1720
1721 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1722
1723 /* For each output operand, store a SET. */
1724 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1725 {
1726 XVECEXP (body, 0, i)
1727 = gen_rtx_SET (VOIDmode,
1728 output_rtx[i],
1729 gen_rtx_ASM_OPERANDS
1730 (GET_MODE (output_rtx[i]),
1731 TREE_STRING_POINTER (string),
1732 constraints[i], i, argvec, constraintvec,
1733 locus.file, locus.line));
1734
1735 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1736 }
1737
1738 /* If there are no outputs (but there are some clobbers)
1739 store the bare ASM_OPERANDS into the PARALLEL. */
1740
1741 if (i == 0)
1742 XVECEXP (body, 0, i++) = obody;
1743
1744 /* Store (clobber REG) for each clobbered register specified. */
1745
1746 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1747 {
1748 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1749 int j = decode_reg_name (regname);
1750 rtx clobbered_reg;
1751
1752 if (j < 0)
1753 {
1754 if (j == -3) /* `cc', which is not a register */
1755 continue;
1756
1757 if (j == -4) /* `memory', don't cache memory across asm */
1758 {
1759 XVECEXP (body, 0, i++)
1760 = gen_rtx_CLOBBER (VOIDmode,
1761 gen_rtx_MEM
1762 (BLKmode,
1763 gen_rtx_SCRATCH (VOIDmode)));
1764 continue;
1765 }
1766
1767 /* Ignore unknown register, error already signaled. */
1768 continue;
1769 }
1770
1771 /* Use QImode since that's guaranteed to clobber just one reg. */
1772 clobbered_reg = gen_rtx_REG (QImode, j);
1773
1774 /* Do sanity check for overlap between clobbers and respectively
1775 input and outputs that hasn't been handled. Such overlap
1776 should have been detected and reported above. */
1777 if (!clobber_conflict_found)
1778 {
1779 int opno;
1780
1781 /* We test the old body (obody) contents to avoid tripping
1782 over the under-construction body. */
1783 for (opno = 0; opno < noutputs; opno++)
1784 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1785 internal_error ("asm clobber conflict with output operand");
1786
1787 for (opno = 0; opno < ninputs - ninout; opno++)
1788 if (reg_overlap_mentioned_p (clobbered_reg,
1789 ASM_OPERANDS_INPUT (obody, opno)))
1790 internal_error ("asm clobber conflict with input operand");
1791 }
1792
1793 XVECEXP (body, 0, i++)
1794 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1795 }
1796
1797 emit_insn (body);
1798 }
1799
1800 /* For any outputs that needed reloading into registers, spill them
1801 back to where they belong. */
1802 for (i = 0; i < noutputs; ++i)
1803 if (real_output_rtx[i])
1804 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1805
1806 free_temp_slots ();
1807 }
1808
1809 void
1810 expand_asm_expr (tree exp)
1811 {
1812 int noutputs, i;
1813 tree outputs, tail;
1814 tree *o;
1815
1816 if (ASM_INPUT_P (exp))
1817 {
1818 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1819 return;
1820 }
1821
1822 outputs = ASM_OUTPUTS (exp);
1823 noutputs = list_length (outputs);
1824 /* o[I] is the place that output number I should be written. */
1825 o = (tree *) alloca (noutputs * sizeof (tree));
1826
1827 /* Record the contents of OUTPUTS before it is modified. */
1828 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1829 o[i] = TREE_VALUE (tail);
1830
1831 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1832 OUTPUTS some trees for where the values were actually stored. */
1833 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1834 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1835 input_location);
1836
1837 /* Copy all the intermediate outputs into the specified outputs. */
1838 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1839 {
1840 if (o[i] != TREE_VALUE (tail))
1841 {
1842 expand_assignment (o[i], TREE_VALUE (tail), 0);
1843 free_temp_slots ();
1844
1845 /* Restore the original value so that it's correct the next
1846 time we expand this function. */
1847 TREE_VALUE (tail) = o[i];
1848 }
1849 }
1850
1851 /* Those MODIFY_EXPRs could do autoincrements. */
1852 emit_queue ();
1853 }
1854
1855 /* A subroutine of expand_asm_operands. Check that all operands have
1856 the same number of alternatives. Return true if so. */
1857
1858 static bool
1859 check_operand_nalternatives (tree outputs, tree inputs)
1860 {
1861 if (outputs || inputs)
1862 {
1863 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1864 int nalternatives
1865 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1866 tree next = inputs;
1867
1868 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1869 {
1870 error ("too many alternatives in `asm'");
1871 return false;
1872 }
1873
1874 tmp = outputs;
1875 while (tmp)
1876 {
1877 const char *constraint
1878 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1879
1880 if (n_occurrences (',', constraint) != nalternatives)
1881 {
1882 error ("operand constraints for `asm' differ in number of alternatives");
1883 return false;
1884 }
1885
1886 if (TREE_CHAIN (tmp))
1887 tmp = TREE_CHAIN (tmp);
1888 else
1889 tmp = next, next = 0;
1890 }
1891 }
1892
1893 return true;
1894 }
1895
1896 /* A subroutine of expand_asm_operands. Check that all operand names
1897 are unique. Return true if so. We rely on the fact that these names
1898 are identifiers, and so have been canonicalized by get_identifier,
1899 so all we need are pointer comparisons. */
1900
1901 static bool
1902 check_unique_operand_names (tree outputs, tree inputs)
1903 {
1904 tree i, j;
1905
1906 for (i = outputs; i ; i = TREE_CHAIN (i))
1907 {
1908 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1909 if (! i_name)
1910 continue;
1911
1912 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1913 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1914 goto failure;
1915 }
1916
1917 for (i = inputs; i ; i = TREE_CHAIN (i))
1918 {
1919 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1920 if (! i_name)
1921 continue;
1922
1923 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1924 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1925 goto failure;
1926 for (j = outputs; j ; j = TREE_CHAIN (j))
1927 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1928 goto failure;
1929 }
1930
1931 return true;
1932
1933 failure:
1934 error ("duplicate asm operand name '%s'",
1935 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1936 return false;
1937 }
1938
1939 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1940 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1941 STRING and in the constraints to those numbers. */
1942
1943 tree
1944 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1945 {
1946 char *buffer;
1947 char *p;
1948 const char *c;
1949 tree t;
1950
1951 check_unique_operand_names (outputs, inputs);
1952
1953 /* Substitute [<name>] in input constraint strings. There should be no
1954 named operands in output constraints. */
1955 for (t = inputs; t ; t = TREE_CHAIN (t))
1956 {
1957 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1958 if (strchr (c, '[') != NULL)
1959 {
1960 p = buffer = xstrdup (c);
1961 while ((p = strchr (p, '[')) != NULL)
1962 p = resolve_operand_name_1 (p, outputs, inputs);
1963 TREE_VALUE (TREE_PURPOSE (t))
1964 = build_string (strlen (buffer), buffer);
1965 free (buffer);
1966 }
1967 }
1968
1969 /* Now check for any needed substitutions in the template. */
1970 c = TREE_STRING_POINTER (string);
1971 while ((c = strchr (c, '%')) != NULL)
1972 {
1973 if (c[1] == '[')
1974 break;
1975 else if (ISALPHA (c[1]) && c[2] == '[')
1976 break;
1977 else
1978 {
1979 c += 1;
1980 continue;
1981 }
1982 }
1983
1984 if (c)
1985 {
1986 /* OK, we need to make a copy so we can perform the substitutions.
1987 Assume that we will not need extra space--we get to remove '['
1988 and ']', which means we cannot have a problem until we have more
1989 than 999 operands. */
1990 buffer = xstrdup (TREE_STRING_POINTER (string));
1991 p = buffer + (c - TREE_STRING_POINTER (string));
1992
1993 while ((p = strchr (p, '%')) != NULL)
1994 {
1995 if (p[1] == '[')
1996 p += 1;
1997 else if (ISALPHA (p[1]) && p[2] == '[')
1998 p += 2;
1999 else
2000 {
2001 p += 1;
2002 continue;
2003 }
2004
2005 p = resolve_operand_name_1 (p, outputs, inputs);
2006 }
2007
2008 string = build_string (strlen (buffer), buffer);
2009 free (buffer);
2010 }
2011
2012 return string;
2013 }
2014
2015 /* A subroutine of resolve_operand_names. P points to the '[' for a
2016 potential named operand of the form [<name>]. In place, replace
2017 the name and brackets with a number. Return a pointer to the
2018 balance of the string after substitution. */
2019
2020 static char *
2021 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2022 {
2023 char *q;
2024 int op;
2025 tree t;
2026 size_t len;
2027
2028 /* Collect the operand name. */
2029 q = strchr (p, ']');
2030 if (!q)
2031 {
2032 error ("missing close brace for named operand");
2033 return strchr (p, '\0');
2034 }
2035 len = q - p - 1;
2036
2037 /* Resolve the name to a number. */
2038 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2039 {
2040 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2041 if (name)
2042 {
2043 const char *c = TREE_STRING_POINTER (name);
2044 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2045 goto found;
2046 }
2047 }
2048 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2049 {
2050 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2051 if (name)
2052 {
2053 const char *c = TREE_STRING_POINTER (name);
2054 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2055 goto found;
2056 }
2057 }
2058
2059 *q = '\0';
2060 error ("undefined named operand '%s'", p + 1);
2061 op = 0;
2062 found:
2063
2064 /* Replace the name with the number. Unfortunately, not all libraries
2065 get the return value of sprintf correct, so search for the end of the
2066 generated string by hand. */
2067 sprintf (p, "%d", op);
2068 p = strchr (p, '\0');
2069
2070 /* Verify the no extra buffer space assumption. */
2071 if (p > q)
2072 abort ();
2073
2074 /* Shift the rest of the buffer down to fill the gap. */
2075 memmove (p, q + 1, strlen (q + 1) + 1);
2076
2077 return p;
2078 }
2079 \f
2080 /* Generate RTL to evaluate the expression EXP
2081 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2082 Provided just for backward-compatibility. expand_expr_stmt_value()
2083 should be used for new code. */
2084
2085 void
2086 expand_expr_stmt (tree exp)
2087 {
2088 expand_expr_stmt_value (exp, -1, 1);
2089 }
2090
2091 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2092 whether to (1) save the value of the expression, (0) discard it or
2093 (-1) use expr_stmts_for_value to tell. The use of -1 is
2094 deprecated, and retained only for backward compatibility. */
2095
2096 void
2097 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2098 {
2099 rtx value;
2100 tree type;
2101 rtx alt_rtl = NULL;
2102
2103 if (want_value == -1)
2104 want_value = expr_stmts_for_value != 0;
2105
2106 /* If -Wextra, warn about statements with no side effects,
2107 except for an explicit cast to void (e.g. for assert()), and
2108 except for last statement in ({...}) where they may be useful. */
2109 if (! want_value
2110 && (expr_stmts_for_value == 0 || ! maybe_last)
2111 && exp != error_mark_node
2112 && warn_unused_value)
2113 {
2114 if (TREE_SIDE_EFFECTS (exp))
2115 warn_if_unused_value (exp);
2116 else if (!VOID_TYPE_P (TREE_TYPE (exp)) && !TREE_NO_WARNING (exp))
2117 warning ("%Hstatement with no effect", &emit_locus);
2118 }
2119
2120 /* If EXP is of function type and we are expanding statements for
2121 value, convert it to pointer-to-function. */
2122 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2123 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2124
2125 /* The call to `expand_expr' could cause last_expr_type and
2126 last_expr_value to get reset. Therefore, we set last_expr_value
2127 and last_expr_type *after* calling expand_expr. */
2128 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2129 VOIDmode, 0, &alt_rtl);
2130 type = TREE_TYPE (exp);
2131
2132 /* If all we do is reference a volatile value in memory,
2133 copy it to a register to be sure it is actually touched. */
2134 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2135 {
2136 if (TYPE_MODE (type) == VOIDmode)
2137 ;
2138 else if (TYPE_MODE (type) != BLKmode)
2139 value = copy_to_reg (value);
2140 else
2141 {
2142 rtx lab = gen_label_rtx ();
2143
2144 /* Compare the value with itself to reference it. */
2145 emit_cmp_and_jump_insns (value, value, EQ,
2146 expand_expr (TYPE_SIZE (type),
2147 NULL_RTX, VOIDmode, 0),
2148 BLKmode, 0, lab);
2149 emit_label (lab);
2150 }
2151 }
2152
2153 /* If this expression is part of a ({...}) and is in memory, we may have
2154 to preserve temporaries. */
2155 preserve_temp_slots (value);
2156
2157 /* Free any temporaries used to evaluate this expression. Any temporary
2158 used as a result of this expression will already have been preserved
2159 above. */
2160 free_temp_slots ();
2161
2162 if (want_value)
2163 {
2164 last_expr_value = value;
2165 last_expr_alt_rtl = alt_rtl;
2166 last_expr_type = type;
2167 }
2168
2169 emit_queue ();
2170 }
2171
2172 /* Warn if EXP contains any computations whose results are not used.
2173 Return 1 if a warning is printed; 0 otherwise. */
2174
2175 int
2176 warn_if_unused_value (tree exp)
2177 {
2178 if (TREE_USED (exp))
2179 return 0;
2180
2181 /* Don't warn about void constructs. This includes casting to void,
2182 void function calls, and statement expressions with a final cast
2183 to void. */
2184 if (VOID_TYPE_P (TREE_TYPE (exp)))
2185 return 0;
2186
2187 switch (TREE_CODE (exp))
2188 {
2189 case PREINCREMENT_EXPR:
2190 case POSTINCREMENT_EXPR:
2191 case PREDECREMENT_EXPR:
2192 case POSTDECREMENT_EXPR:
2193 case MODIFY_EXPR:
2194 case INIT_EXPR:
2195 case TARGET_EXPR:
2196 case CALL_EXPR:
2197 case RTL_EXPR:
2198 case TRY_CATCH_EXPR:
2199 case WITH_CLEANUP_EXPR:
2200 case EXIT_EXPR:
2201 return 0;
2202
2203 case BIND_EXPR:
2204 /* For a binding, warn if no side effect within it. */
2205 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2206
2207 case SAVE_EXPR:
2208 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2209
2210 case TRUTH_ORIF_EXPR:
2211 case TRUTH_ANDIF_EXPR:
2212 /* In && or ||, warn if 2nd operand has no side effect. */
2213 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2214
2215 case COMPOUND_EXPR:
2216 if (TREE_NO_WARNING (exp))
2217 return 0;
2218 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2219 return 1;
2220 /* Let people do `(foo (), 0)' without a warning. */
2221 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2222 return 0;
2223 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2224
2225 case NOP_EXPR:
2226 case CONVERT_EXPR:
2227 case NON_LVALUE_EXPR:
2228 /* Don't warn about conversions not explicit in the user's program. */
2229 if (TREE_NO_WARNING (exp))
2230 return 0;
2231 /* Assignment to a cast usually results in a cast of a modify.
2232 Don't complain about that. There can be an arbitrary number of
2233 casts before the modify, so we must loop until we find the first
2234 non-cast expression and then test to see if that is a modify. */
2235 {
2236 tree tem = TREE_OPERAND (exp, 0);
2237
2238 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2239 tem = TREE_OPERAND (tem, 0);
2240
2241 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2242 || TREE_CODE (tem) == CALL_EXPR)
2243 return 0;
2244 }
2245 goto maybe_warn;
2246
2247 case INDIRECT_REF:
2248 /* Don't warn about automatic dereferencing of references, since
2249 the user cannot control it. */
2250 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2251 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2252 /* Fall through. */
2253
2254 default:
2255 /* Referencing a volatile value is a side effect, so don't warn. */
2256 if ((DECL_P (exp)
2257 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2258 && TREE_THIS_VOLATILE (exp))
2259 return 0;
2260
2261 /* If this is an expression which has no operands, there is no value
2262 to be unused. There are no such language-independent codes,
2263 but front ends may define such. */
2264 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2265 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2266 return 0;
2267
2268 maybe_warn:
2269 /* If this is an expression with side effects, don't warn. */
2270 if (TREE_SIDE_EFFECTS (exp))
2271 return 0;
2272
2273 warning ("%Hvalue computed is not used", &emit_locus);
2274 return 1;
2275 }
2276 }
2277
2278 /* Clear out the memory of the last expression evaluated. */
2279
2280 void
2281 clear_last_expr (void)
2282 {
2283 last_expr_type = NULL_TREE;
2284 last_expr_value = NULL_RTX;
2285 last_expr_alt_rtl = NULL_RTX;
2286 }
2287
2288 /* Begin a statement-expression, i.e., a series of statements which
2289 may return a value. Return the RTL_EXPR for this statement expr.
2290 The caller must save that value and pass it to
2291 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2292 in the statement-expression are deallocated at the end of the
2293 expression. */
2294
2295 tree
2296 expand_start_stmt_expr (int has_scope)
2297 {
2298 tree t;
2299
2300 /* Make the RTL_EXPR node temporary, not momentary,
2301 so that rtl_expr_chain doesn't become garbage. */
2302 t = make_node (RTL_EXPR);
2303 do_pending_stack_adjust ();
2304 if (has_scope)
2305 start_sequence_for_rtl_expr (t);
2306 else
2307 start_sequence ();
2308 NO_DEFER_POP;
2309 expr_stmts_for_value++;
2310 return t;
2311 }
2312
2313 /* Restore the previous state at the end of a statement that returns a value.
2314 Returns a tree node representing the statement's value and the
2315 insns to compute the value.
2316
2317 The nodes of that expression have been freed by now, so we cannot use them.
2318 But we don't want to do that anyway; the expression has already been
2319 evaluated and now we just want to use the value. So generate a RTL_EXPR
2320 with the proper type and RTL value.
2321
2322 If the last substatement was not an expression,
2323 return something with type `void'. */
2324
2325 tree
2326 expand_end_stmt_expr (tree t)
2327 {
2328 OK_DEFER_POP;
2329
2330 if (! last_expr_value || ! last_expr_type)
2331 {
2332 last_expr_value = const0_rtx;
2333 last_expr_alt_rtl = NULL_RTX;
2334 last_expr_type = void_type_node;
2335 }
2336 else if (!REG_P (last_expr_value) && ! CONSTANT_P (last_expr_value))
2337 /* Remove any possible QUEUED. */
2338 last_expr_value = protect_from_queue (last_expr_value, 0);
2339
2340 emit_queue ();
2341
2342 TREE_TYPE (t) = last_expr_type;
2343 RTL_EXPR_RTL (t) = last_expr_value;
2344 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2345 RTL_EXPR_SEQUENCE (t) = get_insns ();
2346
2347 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2348
2349 end_sequence ();
2350
2351 /* Don't consider deleting this expr or containing exprs at tree level. */
2352 TREE_SIDE_EFFECTS (t) = 1;
2353 /* Propagate volatility of the actual RTL expr. */
2354 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2355
2356 clear_last_expr ();
2357 expr_stmts_for_value--;
2358
2359 return t;
2360 }
2361 \f
2362 /* Generate RTL for the start of an if-then. COND is the expression
2363 whose truth should be tested.
2364
2365 If EXITFLAG is nonzero, this conditional is visible to
2366 `exit_something'. */
2367
2368 void
2369 expand_start_cond (tree cond, int exitflag)
2370 {
2371 struct nesting *thiscond = ALLOC_NESTING ();
2372
2373 /* Make an entry on cond_stack for the cond we are entering. */
2374
2375 thiscond->desc = COND_NESTING;
2376 thiscond->next = cond_stack;
2377 thiscond->all = nesting_stack;
2378 thiscond->depth = ++nesting_depth;
2379 thiscond->data.cond.next_label = gen_label_rtx ();
2380 /* Before we encounter an `else', we don't need a separate exit label
2381 unless there are supposed to be exit statements
2382 to exit this conditional. */
2383 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2384 thiscond->data.cond.endif_label = thiscond->exit_label;
2385 cond_stack = thiscond;
2386 nesting_stack = thiscond;
2387
2388 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2389 }
2390
2391 /* Generate RTL between then-clause and the elseif-clause
2392 of an if-then-elseif-.... */
2393
2394 void
2395 expand_start_elseif (tree cond)
2396 {
2397 if (cond_stack->data.cond.endif_label == 0)
2398 cond_stack->data.cond.endif_label = gen_label_rtx ();
2399 emit_jump (cond_stack->data.cond.endif_label);
2400 emit_label (cond_stack->data.cond.next_label);
2401 cond_stack->data.cond.next_label = gen_label_rtx ();
2402 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2403 }
2404
2405 /* Generate RTL between the then-clause and the else-clause
2406 of an if-then-else. */
2407
2408 void
2409 expand_start_else (void)
2410 {
2411 if (cond_stack->data.cond.endif_label == 0)
2412 cond_stack->data.cond.endif_label = gen_label_rtx ();
2413
2414 emit_jump (cond_stack->data.cond.endif_label);
2415 emit_label (cond_stack->data.cond.next_label);
2416 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2417 }
2418
2419 /* After calling expand_start_else, turn this "else" into an "else if"
2420 by providing another condition. */
2421
2422 void
2423 expand_elseif (tree cond)
2424 {
2425 cond_stack->data.cond.next_label = gen_label_rtx ();
2426 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2427 }
2428
2429 /* Generate RTL for the end of an if-then.
2430 Pop the record for it off of cond_stack. */
2431
2432 void
2433 expand_end_cond (void)
2434 {
2435 struct nesting *thiscond = cond_stack;
2436
2437 do_pending_stack_adjust ();
2438 if (thiscond->data.cond.next_label)
2439 emit_label (thiscond->data.cond.next_label);
2440 if (thiscond->data.cond.endif_label)
2441 emit_label (thiscond->data.cond.endif_label);
2442
2443 POPSTACK (cond_stack);
2444 clear_last_expr ();
2445 }
2446 \f
2447 /* Return nonzero if we should preserve sub-expressions as separate
2448 pseudos. We never do so if we aren't optimizing. We always do so
2449 if -fexpensive-optimizations. */
2450
2451 int
2452 preserve_subexpressions_p (void)
2453 {
2454 if (flag_expensive_optimizations)
2455 return 1;
2456
2457 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
2458 return 0;
2459
2460 return 1;
2461 }
2462
2463 \f
2464 /* Generate RTL to return from the current function, with no value.
2465 (That is, we do not do anything about returning any value.) */
2466
2467 void
2468 expand_null_return (void)
2469 {
2470 rtx last_insn;
2471
2472 last_insn = get_last_insn ();
2473
2474 /* If this function was declared to return a value, but we
2475 didn't, clobber the return registers so that they are not
2476 propagated live to the rest of the function. */
2477 clobber_return_register ();
2478
2479 expand_null_return_1 (last_insn);
2480 }
2481
2482 /* Generate RTL to return directly from the current function.
2483 (That is, we bypass any return value.) */
2484
2485 void
2486 expand_naked_return (void)
2487 {
2488 rtx last_insn, end_label;
2489
2490 last_insn = get_last_insn ();
2491 end_label = naked_return_label;
2492
2493 clear_pending_stack_adjust ();
2494 do_pending_stack_adjust ();
2495 clear_last_expr ();
2496
2497 if (end_label == 0)
2498 end_label = naked_return_label = gen_label_rtx ();
2499 expand_goto_internal (NULL_TREE, end_label, last_insn);
2500 }
2501
2502 /* Try to guess whether the value of return means error code. */
2503 static enum br_predictor
2504 return_prediction (rtx val)
2505 {
2506 /* Different heuristics for pointers and scalars. */
2507 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2508 {
2509 /* NULL is usually not returned. */
2510 if (val == const0_rtx)
2511 return PRED_NULL_RETURN;
2512 }
2513 else
2514 {
2515 /* Negative return values are often used to indicate
2516 errors. */
2517 if (GET_CODE (val) == CONST_INT
2518 && INTVAL (val) < 0)
2519 return PRED_NEGATIVE_RETURN;
2520 /* Constant return values are also usually erors,
2521 zero/one often mean booleans so exclude them from the
2522 heuristics. */
2523 if (CONSTANT_P (val)
2524 && (val != const0_rtx && val != const1_rtx))
2525 return PRED_CONST_RETURN;
2526 }
2527 return PRED_NO_PREDICTION;
2528 }
2529
2530
2531 /* If the current function returns values in the most significant part
2532 of a register, shift return value VAL appropriately. The mode of
2533 the function's return type is known not to be BLKmode. */
2534
2535 static rtx
2536 shift_return_value (rtx val)
2537 {
2538 tree type;
2539
2540 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2541 if (targetm.calls.return_in_msb (type))
2542 {
2543 rtx target;
2544 HOST_WIDE_INT shift;
2545
2546 target = DECL_RTL (DECL_RESULT (current_function_decl));
2547 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2548 - BITS_PER_UNIT * int_size_in_bytes (type));
2549 if (shift > 0)
2550 val = expand_binop (GET_MODE (target), ashl_optab,
2551 gen_lowpart (GET_MODE (target), val),
2552 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2553 }
2554 return val;
2555 }
2556
2557
2558 /* Generate RTL to return from the current function, with value VAL. */
2559
2560 static void
2561 expand_value_return (rtx val)
2562 {
2563 rtx last_insn;
2564 rtx return_reg;
2565 enum br_predictor pred;
2566
2567 if (flag_guess_branch_prob
2568 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2569 {
2570 /* Emit information for branch prediction. */
2571 rtx note;
2572
2573 note = emit_note (NOTE_INSN_PREDICTION);
2574
2575 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2576
2577 }
2578
2579 last_insn = get_last_insn ();
2580 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2581
2582 /* Copy the value to the return location
2583 unless it's already there. */
2584
2585 if (return_reg != val)
2586 {
2587 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2588 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2589 {
2590 int unsignedp = TYPE_UNSIGNED (type);
2591 enum machine_mode old_mode
2592 = DECL_MODE (DECL_RESULT (current_function_decl));
2593 enum machine_mode mode
2594 = promote_mode (type, old_mode, &unsignedp, 1);
2595
2596 if (mode != old_mode)
2597 val = convert_modes (mode, old_mode, val, unsignedp);
2598 }
2599 if (GET_CODE (return_reg) == PARALLEL)
2600 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2601 else
2602 emit_move_insn (return_reg, val);
2603 }
2604
2605 expand_null_return_1 (last_insn);
2606 }
2607
2608 /* Output a return with no value. If LAST_INSN is nonzero,
2609 pretend that the return takes place after LAST_INSN. */
2610
2611 static void
2612 expand_null_return_1 (rtx last_insn)
2613 {
2614 rtx end_label = cleanup_label ? cleanup_label : return_label;
2615
2616 clear_pending_stack_adjust ();
2617 do_pending_stack_adjust ();
2618 clear_last_expr ();
2619
2620 if (end_label == 0)
2621 end_label = return_label = gen_label_rtx ();
2622 expand_goto_internal (NULL_TREE, end_label, last_insn);
2623 }
2624 \f
2625 /* Generate RTL to evaluate the expression RETVAL and return it
2626 from the current function. */
2627
2628 void
2629 expand_return (tree retval)
2630 {
2631 /* If there are any cleanups to be performed, then they will
2632 be inserted following LAST_INSN. It is desirable
2633 that the last_insn, for such purposes, should be the
2634 last insn before computing the return value. Otherwise, cleanups
2635 which call functions can clobber the return value. */
2636 /* ??? rms: I think that is erroneous, because in C++ it would
2637 run destructors on variables that might be used in the subsequent
2638 computation of the return value. */
2639 rtx last_insn = 0;
2640 rtx result_rtl;
2641 rtx val = 0;
2642 tree retval_rhs;
2643
2644 /* If function wants no value, give it none. */
2645 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2646 {
2647 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2648 emit_queue ();
2649 expand_null_return ();
2650 return;
2651 }
2652
2653 if (retval == error_mark_node)
2654 {
2655 /* Treat this like a return of no value from a function that
2656 returns a value. */
2657 expand_null_return ();
2658 return;
2659 }
2660 else if (TREE_CODE (retval) == RESULT_DECL)
2661 retval_rhs = retval;
2662 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2663 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2664 retval_rhs = TREE_OPERAND (retval, 1);
2665 else
2666 retval_rhs = retval;
2667
2668 last_insn = get_last_insn ();
2669
2670 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2671
2672 /* If the result is an aggregate that is being returned in one (or more)
2673 registers, load the registers here. The compiler currently can't handle
2674 copying a BLKmode value into registers. We could put this code in a
2675 more general area (for use by everyone instead of just function
2676 call/return), but until this feature is generally usable it is kept here
2677 (and in expand_call). The value must go into a pseudo in case there
2678 are cleanups that will clobber the real return register. */
2679
2680 if (retval_rhs != 0
2681 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2682 && REG_P (result_rtl))
2683 {
2684 int i;
2685 unsigned HOST_WIDE_INT bitpos, xbitpos;
2686 unsigned HOST_WIDE_INT padding_correction = 0;
2687 unsigned HOST_WIDE_INT bytes
2688 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2689 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2690 unsigned int bitsize
2691 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2692 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2693 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2694 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2695 enum machine_mode tmpmode, result_reg_mode;
2696
2697 if (bytes == 0)
2698 {
2699 expand_null_return ();
2700 return;
2701 }
2702
2703 /* If the structure doesn't take up a whole number of words, see
2704 whether the register value should be padded on the left or on
2705 the right. Set PADDING_CORRECTION to the number of padding
2706 bits needed on the left side.
2707
2708 In most ABIs, the structure will be returned at the least end of
2709 the register, which translates to right padding on little-endian
2710 targets and left padding on big-endian targets. The opposite
2711 holds if the structure is returned at the most significant
2712 end of the register. */
2713 if (bytes % UNITS_PER_WORD != 0
2714 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2715 ? !BYTES_BIG_ENDIAN
2716 : BYTES_BIG_ENDIAN))
2717 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2718 * BITS_PER_UNIT));
2719
2720 /* Copy the structure BITSIZE bits at a time. */
2721 for (bitpos = 0, xbitpos = padding_correction;
2722 bitpos < bytes * BITS_PER_UNIT;
2723 bitpos += bitsize, xbitpos += bitsize)
2724 {
2725 /* We need a new destination pseudo each time xbitpos is
2726 on a word boundary and when xbitpos == padding_correction
2727 (the first time through). */
2728 if (xbitpos % BITS_PER_WORD == 0
2729 || xbitpos == padding_correction)
2730 {
2731 /* Generate an appropriate register. */
2732 dst = gen_reg_rtx (word_mode);
2733 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2734
2735 /* Clear the destination before we move anything into it. */
2736 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2737 }
2738
2739 /* We need a new source operand each time bitpos is on a word
2740 boundary. */
2741 if (bitpos % BITS_PER_WORD == 0)
2742 src = operand_subword_force (result_val,
2743 bitpos / BITS_PER_WORD,
2744 BLKmode);
2745
2746 /* Use bitpos for the source extraction (left justified) and
2747 xbitpos for the destination store (right justified). */
2748 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2749 extract_bit_field (src, bitsize,
2750 bitpos % BITS_PER_WORD, 1,
2751 NULL_RTX, word_mode, word_mode,
2752 BITS_PER_WORD),
2753 BITS_PER_WORD);
2754 }
2755
2756 tmpmode = GET_MODE (result_rtl);
2757 if (tmpmode == BLKmode)
2758 {
2759 /* Find the smallest integer mode large enough to hold the
2760 entire structure and use that mode instead of BLKmode
2761 on the USE insn for the return register. */
2762 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2763 tmpmode != VOIDmode;
2764 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2765 /* Have we found a large enough mode? */
2766 if (GET_MODE_SIZE (tmpmode) >= bytes)
2767 break;
2768
2769 /* No suitable mode found. */
2770 if (tmpmode == VOIDmode)
2771 abort ();
2772
2773 PUT_MODE (result_rtl, tmpmode);
2774 }
2775
2776 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2777 result_reg_mode = word_mode;
2778 else
2779 result_reg_mode = tmpmode;
2780 result_reg = gen_reg_rtx (result_reg_mode);
2781
2782 emit_queue ();
2783 for (i = 0; i < n_regs; i++)
2784 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2785 result_pseudos[i]);
2786
2787 if (tmpmode != result_reg_mode)
2788 result_reg = gen_lowpart (tmpmode, result_reg);
2789
2790 expand_value_return (result_reg);
2791 }
2792 else if (retval_rhs != 0
2793 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2794 && (REG_P (result_rtl)
2795 || (GET_CODE (result_rtl) == PARALLEL)))
2796 {
2797 /* Calculate the return value into a temporary (usually a pseudo
2798 reg). */
2799 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2800 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2801
2802 val = assign_temp (nt, 0, 0, 1);
2803 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2804 val = force_not_mem (val);
2805 emit_queue ();
2806 /* Return the calculated value, doing cleanups first. */
2807 expand_value_return (shift_return_value (val));
2808 }
2809 else
2810 {
2811 /* No cleanups or no hard reg used;
2812 calculate value into hard return reg. */
2813 expand_expr (retval, const0_rtx, VOIDmode, 0);
2814 emit_queue ();
2815 expand_value_return (result_rtl);
2816 }
2817 }
2818 \f
2819 /* Generate the RTL code for entering a binding contour.
2820 The variables are declared one by one, by calls to `expand_decl'.
2821
2822 FLAGS is a bitwise or of the following flags:
2823
2824 1 - Nonzero if this construct should be visible to
2825 `exit_something'.
2826
2827 2 - Nonzero if this contour does not require a
2828 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2829 language-independent code should set this flag because they
2830 will not create corresponding BLOCK nodes. (There should be
2831 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2832 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2833 when expand_end_bindings is called.
2834
2835 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2836 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2837 note. */
2838
2839 void
2840 expand_start_bindings_and_block (int flags, tree block)
2841 {
2842 struct nesting *thisblock = ALLOC_NESTING ();
2843 rtx note;
2844 int exit_flag = ((flags & 1) != 0);
2845 int block_flag = ((flags & 2) == 0);
2846
2847 /* If a BLOCK is supplied, then the caller should be requesting a
2848 NOTE_INSN_BLOCK_BEG note. */
2849 if (!block_flag && block)
2850 abort ();
2851
2852 /* Create a note to mark the beginning of the block. */
2853 if (block_flag && !cfun->dont_emit_block_notes)
2854 {
2855 note = emit_note (NOTE_INSN_BLOCK_BEG);
2856 NOTE_BLOCK (note) = block;
2857 }
2858 else
2859 note = emit_note (NOTE_INSN_DELETED);
2860
2861 /* Make an entry on block_stack for the block we are entering. */
2862
2863 thisblock->desc = BLOCK_NESTING;
2864 thisblock->next = block_stack;
2865 thisblock->all = nesting_stack;
2866 thisblock->depth = ++nesting_depth;
2867 thisblock->data.block.stack_level = 0;
2868 thisblock->data.block.cleanups = 0;
2869 thisblock->data.block.exception_region = 0;
2870 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2871
2872 thisblock->data.block.conditional_code = 0;
2873 thisblock->data.block.last_unconditional_cleanup = note;
2874 /* When we insert instructions after the last unconditional cleanup,
2875 we don't adjust last_insn. That means that a later add_insn will
2876 clobber the instructions we've just added. The easiest way to
2877 fix this is to just insert another instruction here, so that the
2878 instructions inserted after the last unconditional cleanup are
2879 never the last instruction. */
2880 emit_note (NOTE_INSN_DELETED);
2881
2882 if (block_stack
2883 && !(block_stack->data.block.cleanups == NULL_TREE
2884 && block_stack->data.block.outer_cleanups == NULL_TREE))
2885 thisblock->data.block.outer_cleanups
2886 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2887 block_stack->data.block.outer_cleanups);
2888 else
2889 thisblock->data.block.outer_cleanups = 0;
2890 thisblock->data.block.label_chain = 0;
2891 thisblock->data.block.innermost_stack_block = stack_block_stack;
2892 thisblock->data.block.first_insn = note;
2893 thisblock->data.block.block_start_count = ++current_block_start_count;
2894 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2895 block_stack = thisblock;
2896 nesting_stack = thisblock;
2897
2898 /* Make a new level for allocating stack slots. */
2899 push_temp_slots ();
2900 }
2901
2902 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2903 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2904 expand_expr are made. After we end the region, we know that all
2905 space for all temporaries that were created by TARGET_EXPRs will be
2906 destroyed and their space freed for reuse. */
2907
2908 void
2909 expand_start_target_temps (void)
2910 {
2911 /* This is so that even if the result is preserved, the space
2912 allocated will be freed, as we know that it is no longer in use. */
2913 push_temp_slots ();
2914
2915 /* Start a new binding layer that will keep track of all cleanup
2916 actions to be performed. */
2917 expand_start_bindings (2);
2918
2919 target_temp_slot_level = temp_slot_level;
2920 }
2921
2922 void
2923 expand_end_target_temps (void)
2924 {
2925 expand_end_bindings (NULL_TREE, 0, 0);
2926
2927 /* This is so that even if the result is preserved, the space
2928 allocated will be freed, as we know that it is no longer in use. */
2929 pop_temp_slots ();
2930 }
2931
2932 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2933 in question represents the outermost pair of curly braces (i.e. the "body
2934 block") of a function or method.
2935
2936 For any BLOCK node representing a "body block" of a function or method, the
2937 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2938 represents the outermost (function) scope for the function or method (i.e.
2939 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2940 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2941
2942 int
2943 is_body_block (tree stmt)
2944 {
2945 if (lang_hooks.no_body_blocks)
2946 return 0;
2947
2948 if (TREE_CODE (stmt) == BLOCK)
2949 {
2950 tree parent = BLOCK_SUPERCONTEXT (stmt);
2951
2952 if (parent && TREE_CODE (parent) == BLOCK)
2953 {
2954 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2955
2956 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2957 return 1;
2958 }
2959 }
2960
2961 return 0;
2962 }
2963
2964 /* True if we are currently emitting insns in an area of output code
2965 that is controlled by a conditional expression. This is used by
2966 the cleanup handling code to generate conditional cleanup actions. */
2967
2968 int
2969 conditional_context (void)
2970 {
2971 return block_stack && block_stack->data.block.conditional_code;
2972 }
2973
2974 /* Return an opaque pointer to the current nesting level, so frontend code
2975 can check its own sanity. */
2976
2977 struct nesting *
2978 current_nesting_level (void)
2979 {
2980 return cfun ? block_stack : 0;
2981 }
2982
2983 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2984 handler. */
2985 static void
2986 expand_nl_goto_receiver (void)
2987 {
2988 /* Clobber the FP when we get here, so we have to make sure it's
2989 marked as used by this function. */
2990 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2991
2992 /* Mark the static chain as clobbered here so life information
2993 doesn't get messed up for it. */
2994 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2995
2996 #ifdef HAVE_nonlocal_goto
2997 if (! HAVE_nonlocal_goto)
2998 #endif
2999 /* First adjust our frame pointer to its actual value. It was
3000 previously set to the start of the virtual area corresponding to
3001 the stacked variables when we branched here and now needs to be
3002 adjusted to the actual hardware fp value.
3003
3004 Assignments are to virtual registers are converted by
3005 instantiate_virtual_regs into the corresponding assignment
3006 to the underlying register (fp in this case) that makes
3007 the original assignment true.
3008 So the following insn will actually be
3009 decrementing fp by STARTING_FRAME_OFFSET. */
3010 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3011
3012 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3013 if (fixed_regs[ARG_POINTER_REGNUM])
3014 {
3015 #ifdef ELIMINABLE_REGS
3016 /* If the argument pointer can be eliminated in favor of the
3017 frame pointer, we don't need to restore it. We assume here
3018 that if such an elimination is present, it can always be used.
3019 This is the case on all known machines; if we don't make this
3020 assumption, we do unnecessary saving on many machines. */
3021 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3022 size_t i;
3023
3024 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3025 if (elim_regs[i].from == ARG_POINTER_REGNUM
3026 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3027 break;
3028
3029 if (i == ARRAY_SIZE (elim_regs))
3030 #endif
3031 {
3032 /* Now restore our arg pointer from the address at which it
3033 was saved in our stack frame. */
3034 emit_move_insn (virtual_incoming_args_rtx,
3035 copy_to_reg (get_arg_pointer_save_area (cfun)));
3036 }
3037 }
3038 #endif
3039
3040 #ifdef HAVE_nonlocal_goto_receiver
3041 if (HAVE_nonlocal_goto_receiver)
3042 emit_insn (gen_nonlocal_goto_receiver ());
3043 #endif
3044
3045 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3046 insn, but we must not allow the code we just generated to be reordered
3047 by scheduling. Specifically, the update of the frame pointer must
3048 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3049 insn. */
3050 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3051 }
3052
3053 /* Warn about any unused VARS (which may contain nodes other than
3054 VAR_DECLs, but such nodes are ignored). The nodes are connected
3055 via the TREE_CHAIN field. */
3056
3057 void
3058 warn_about_unused_variables (tree vars)
3059 {
3060 tree decl;
3061
3062 if (warn_unused_variable)
3063 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3064 if (TREE_CODE (decl) == VAR_DECL
3065 && ! TREE_USED (decl)
3066 && ! DECL_IN_SYSTEM_HEADER (decl)
3067 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3068 warning ("%Junused variable '%D'", decl, decl);
3069 }
3070
3071 /* Generate RTL code to terminate a binding contour.
3072
3073 VARS is the chain of VAR_DECL nodes for the variables bound in this
3074 contour. There may actually be other nodes in this chain, but any
3075 nodes other than VAR_DECLS are ignored.
3076
3077 MARK_ENDS is nonzero if we should put a note at the beginning
3078 and end of this binding contour.
3079
3080 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3081 zero if we can jump into this contour only if it does not have a saved
3082 stack level, and negative if we are not to check for invalid use of
3083 labels (because the front end does that). */
3084
3085 void
3086 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3087 {
3088 struct nesting *thisblock = block_stack;
3089
3090 /* If any of the variables in this scope were not used, warn the
3091 user. */
3092 warn_about_unused_variables (vars);
3093
3094 if (thisblock->exit_label)
3095 {
3096 do_pending_stack_adjust ();
3097 emit_label (thisblock->exit_label);
3098 }
3099
3100 /* Don't allow jumping into a block that has a stack level.
3101 Cleanups are allowed, though. */
3102 if (dont_jump_in > 0
3103 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3104 {
3105 struct label_chain *chain;
3106
3107 /* Any labels in this block are no longer valid to go to.
3108 Mark them to cause an error message. */
3109 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3110 {
3111 DECL_TOO_LATE (chain->label) = 1;
3112 /* If any goto without a fixup came to this label,
3113 that must be an error, because gotos without fixups
3114 come from outside all saved stack-levels. */
3115 if (TREE_ADDRESSABLE (chain->label))
3116 error ("%Jlabel '%D' used before containing binding contour",
3117 chain->label, chain->label);
3118 }
3119 }
3120
3121 /* Restore stack level in effect before the block
3122 (only if variable-size objects allocated). */
3123 /* Perform any cleanups associated with the block. */
3124
3125 if (thisblock->data.block.stack_level != 0
3126 || thisblock->data.block.cleanups != 0)
3127 {
3128 int reachable;
3129 rtx insn;
3130
3131 /* Don't let cleanups affect ({...}) constructs. */
3132 int old_expr_stmts_for_value = expr_stmts_for_value;
3133 rtx old_last_expr_value = last_expr_value;
3134 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3135 tree old_last_expr_type = last_expr_type;
3136 expr_stmts_for_value = 0;
3137
3138 /* Only clean up here if this point can actually be reached. */
3139 insn = get_last_insn ();
3140 if (GET_CODE (insn) == NOTE)
3141 insn = prev_nonnote_insn (insn);
3142 reachable = (! insn || GET_CODE (insn) != BARRIER);
3143
3144 /* Do the cleanups. */
3145 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3146 if (reachable)
3147 do_pending_stack_adjust ();
3148
3149 expr_stmts_for_value = old_expr_stmts_for_value;
3150 last_expr_value = old_last_expr_value;
3151 last_expr_alt_rtl = old_last_expr_alt_rtl;
3152 last_expr_type = old_last_expr_type;
3153
3154 /* Restore the stack level. */
3155
3156 if (reachable && thisblock->data.block.stack_level != 0)
3157 {
3158 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3159 thisblock->data.block.stack_level, NULL_RTX);
3160 if (cfun->nonlocal_goto_save_area)
3161 update_nonlocal_goto_save_area ();
3162 }
3163
3164 /* Any gotos out of this block must also do these things.
3165 Also report any gotos with fixups that came to labels in this
3166 level. */
3167 fixup_gotos (thisblock,
3168 thisblock->data.block.stack_level,
3169 thisblock->data.block.cleanups,
3170 thisblock->data.block.first_insn,
3171 dont_jump_in);
3172 }
3173
3174 /* Mark the beginning and end of the scope if requested.
3175 We do this now, after running cleanups on the variables
3176 just going out of scope, so they are in scope for their cleanups. */
3177
3178 if (mark_ends && !cfun->dont_emit_block_notes)
3179 {
3180 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3181 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3182 }
3183 else
3184 /* Get rid of the beginning-mark if we don't make an end-mark. */
3185 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3186
3187 /* Restore the temporary level of TARGET_EXPRs. */
3188 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3189
3190 /* Restore block_stack level for containing block. */
3191
3192 stack_block_stack = thisblock->data.block.innermost_stack_block;
3193 POPSTACK (block_stack);
3194
3195 /* Pop the stack slot nesting and free any slots at this level. */
3196 pop_temp_slots ();
3197 }
3198 \f
3199 /* Generate code to save the stack pointer at the start of the current block
3200 and set up to restore it on exit. */
3201
3202 void
3203 save_stack_pointer (void)
3204 {
3205 struct nesting *thisblock = block_stack;
3206
3207 if (thisblock->data.block.stack_level == 0)
3208 {
3209 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3210 &thisblock->data.block.stack_level,
3211 thisblock->data.block.first_insn);
3212 stack_block_stack = thisblock;
3213 }
3214 }
3215 \f
3216 /* Generate RTL for the automatic variable declaration DECL.
3217 (Other kinds of declarations are simply ignored if seen here.) */
3218
3219 void
3220 expand_decl (tree decl)
3221 {
3222 tree type;
3223
3224 type = TREE_TYPE (decl);
3225
3226 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3227 type in case this node is used in a reference. */
3228 if (TREE_CODE (decl) == CONST_DECL)
3229 {
3230 DECL_MODE (decl) = TYPE_MODE (type);
3231 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3232 DECL_SIZE (decl) = TYPE_SIZE (type);
3233 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3234 return;
3235 }
3236
3237 /* Otherwise, only automatic variables need any expansion done. Static and
3238 external variables, and external functions, will be handled by
3239 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3240 nothing. PARM_DECLs are handled in `assign_parms'. */
3241 if (TREE_CODE (decl) != VAR_DECL)
3242 return;
3243
3244 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3245 return;
3246
3247 /* Create the RTL representation for the variable. */
3248
3249 if (type == error_mark_node)
3250 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3251
3252 else if (DECL_SIZE (decl) == 0)
3253 /* Variable with incomplete type. */
3254 {
3255 rtx x;
3256 if (DECL_INITIAL (decl) == 0)
3257 /* Error message was already done; now avoid a crash. */
3258 x = gen_rtx_MEM (BLKmode, const0_rtx);
3259 else
3260 /* An initializer is going to decide the size of this array.
3261 Until we know the size, represent its address with a reg. */
3262 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3263
3264 set_mem_attributes (x, decl, 1);
3265 SET_DECL_RTL (decl, x);
3266 }
3267 else if (DECL_MODE (decl) != BLKmode
3268 /* If -ffloat-store, don't put explicit float vars
3269 into regs. */
3270 && !(flag_float_store
3271 && TREE_CODE (type) == REAL_TYPE)
3272 && ! TREE_THIS_VOLATILE (decl)
3273 && ! DECL_NONLOCAL (decl)
3274 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3275 {
3276 /* Automatic variable that can go in a register. */
3277 int unsignedp = TYPE_UNSIGNED (type);
3278 enum machine_mode reg_mode
3279 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3280
3281 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3282
3283 /* Note if the object is a user variable. */
3284 if (!DECL_ARTIFICIAL (decl))
3285 {
3286 mark_user_reg (DECL_RTL (decl));
3287
3288 /* Trust user variables which have a pointer type to really
3289 be pointers. Do not trust compiler generated temporaries
3290 as our type system is totally busted as it relates to
3291 pointer arithmetic which translates into lots of compiler
3292 generated objects with pointer types, but which are not really
3293 pointers. */
3294 if (POINTER_TYPE_P (type))
3295 mark_reg_pointer (DECL_RTL (decl),
3296 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3297 }
3298
3299 maybe_set_unchanging (DECL_RTL (decl), decl);
3300
3301 /* If something wants our address, try to use ADDRESSOF. */
3302 if (TREE_ADDRESSABLE (decl))
3303 put_var_into_stack (decl, /*rescan=*/false);
3304 }
3305
3306 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3307 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3308 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3309 STACK_CHECK_MAX_VAR_SIZE)))
3310 {
3311 /* Variable of fixed size that goes on the stack. */
3312 rtx oldaddr = 0;
3313 rtx addr;
3314 rtx x;
3315
3316 /* If we previously made RTL for this decl, it must be an array
3317 whose size was determined by the initializer.
3318 The old address was a register; set that register now
3319 to the proper address. */
3320 if (DECL_RTL_SET_P (decl))
3321 {
3322 if (GET_CODE (DECL_RTL (decl)) != MEM
3323 || !REG_P (XEXP (DECL_RTL (decl), 0)))
3324 abort ();
3325 oldaddr = XEXP (DECL_RTL (decl), 0);
3326 }
3327
3328 /* Set alignment we actually gave this decl. */
3329 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3330 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3331 DECL_USER_ALIGN (decl) = 0;
3332
3333 x = assign_temp (decl, 1, 1, 1);
3334 set_mem_attributes (x, decl, 1);
3335 SET_DECL_RTL (decl, x);
3336
3337 if (oldaddr)
3338 {
3339 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3340 if (addr != oldaddr)
3341 emit_move_insn (oldaddr, addr);
3342 }
3343 }
3344 else
3345 /* Dynamic-size object: must push space on the stack. */
3346 {
3347 rtx address, size, x;
3348
3349 /* Record the stack pointer on entry to block, if have
3350 not already done so. */
3351 do_pending_stack_adjust ();
3352 save_stack_pointer ();
3353
3354 /* Compute the variable's size, in bytes. This will expand any
3355 needed SAVE_EXPRs for the first time. */
3356 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3357 free_temp_slots ();
3358
3359 /* Allocate space on the stack for the variable. Note that
3360 DECL_ALIGN says how the variable is to be aligned and we
3361 cannot use it to conclude anything about the alignment of
3362 the size. */
3363 address = allocate_dynamic_stack_space (size, NULL_RTX,
3364 TYPE_ALIGN (TREE_TYPE (decl)));
3365
3366 /* Reference the variable indirect through that rtx. */
3367 x = gen_rtx_MEM (DECL_MODE (decl), address);
3368 set_mem_attributes (x, decl, 1);
3369 SET_DECL_RTL (decl, x);
3370
3371
3372 /* Indicate the alignment we actually gave this variable. */
3373 #ifdef STACK_BOUNDARY
3374 DECL_ALIGN (decl) = STACK_BOUNDARY;
3375 #else
3376 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3377 #endif
3378 DECL_USER_ALIGN (decl) = 0;
3379 }
3380 }
3381 \f
3382 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3383 void
3384 expand_stack_alloc (tree alloc, tree t_size)
3385 {
3386 rtx address, dest, size;
3387 tree var, type;
3388
3389 if (TREE_CODE (alloc) != ADDR_EXPR)
3390 abort ();
3391 var = TREE_OPERAND (alloc, 0);
3392 if (TREE_CODE (var) != VAR_DECL)
3393 abort ();
3394
3395 type = TREE_TYPE (var);
3396
3397 /* In function-at-a-time mode, variable_size doesn't expand this,
3398 so do it now. */
3399 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3400 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3401 const0_rtx, VOIDmode, 0);
3402
3403 /* Compute the variable's size, in bytes. */
3404 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3405 free_temp_slots ();
3406
3407 /* Allocate space on the stack for the variable. */
3408 address = XEXP (DECL_RTL (var), 0);
3409 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3410 if (dest != address)
3411 emit_move_insn (address, dest);
3412
3413 /* Indicate the alignment we actually gave this variable. */
3414 #ifdef STACK_BOUNDARY
3415 DECL_ALIGN (var) = STACK_BOUNDARY;
3416 #else
3417 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3418 #endif
3419 DECL_USER_ALIGN (var) = 0;
3420 }
3421
3422 /* Emit code to save the current value of stack. */
3423 rtx
3424 expand_stack_save (void)
3425 {
3426 rtx ret = NULL_RTX;
3427
3428 do_pending_stack_adjust ();
3429 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3430 return ret;
3431 }
3432
3433 /* Emit code to restore the current value of stack. */
3434 void
3435 expand_stack_restore (tree var)
3436 {
3437 rtx sa = DECL_RTL (var);
3438
3439 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3440 }
3441 \f
3442 /* Emit code to perform the initialization of a declaration DECL. */
3443
3444 void
3445 expand_decl_init (tree decl)
3446 {
3447 int was_used = TREE_USED (decl);
3448
3449 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3450 for static decls. */
3451 if (TREE_CODE (decl) == CONST_DECL
3452 || TREE_STATIC (decl))
3453 return;
3454
3455 /* Compute and store the initial value now. */
3456
3457 push_temp_slots ();
3458
3459 if (DECL_INITIAL (decl) == error_mark_node)
3460 {
3461 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3462
3463 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3464 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3465 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3466 0);
3467 emit_queue ();
3468 }
3469 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3470 {
3471 emit_line_note (DECL_SOURCE_LOCATION (decl));
3472 expand_assignment (decl, DECL_INITIAL (decl), 0);
3473 emit_queue ();
3474 }
3475
3476 /* Don't let the initialization count as "using" the variable. */
3477 TREE_USED (decl) = was_used;
3478
3479 /* Free any temporaries we made while initializing the decl. */
3480 preserve_temp_slots (NULL_RTX);
3481 free_temp_slots ();
3482 pop_temp_slots ();
3483 }
3484
3485 /* CLEANUP is an expression to be executed at exit from this binding contour;
3486 for example, in C++, it might call the destructor for this variable.
3487
3488 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3489 CLEANUP multiple times, and have the correct semantics. This
3490 happens in exception handling, for gotos, returns, breaks that
3491 leave the current scope.
3492
3493 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3494 that is not associated with any particular variable. */
3495
3496 int
3497 expand_decl_cleanup (tree decl, tree cleanup)
3498 {
3499 struct nesting *thisblock;
3500
3501 /* Error if we are not in any block. */
3502 if (cfun == 0 || block_stack == 0)
3503 return 0;
3504
3505 thisblock = block_stack;
3506
3507 /* Record the cleanup if there is one. */
3508
3509 if (cleanup != 0)
3510 {
3511 tree t;
3512 rtx seq;
3513 tree *cleanups = &thisblock->data.block.cleanups;
3514 int cond_context = conditional_context ();
3515
3516 if (cond_context)
3517 {
3518 rtx flag = gen_reg_rtx (word_mode);
3519 rtx set_flag_0;
3520 tree cond;
3521
3522 start_sequence ();
3523 emit_move_insn (flag, const0_rtx);
3524 set_flag_0 = get_insns ();
3525 end_sequence ();
3526
3527 thisblock->data.block.last_unconditional_cleanup
3528 = emit_insn_after (set_flag_0,
3529 thisblock->data.block.last_unconditional_cleanup);
3530
3531 emit_move_insn (flag, const1_rtx);
3532
3533 cond = build_decl (VAR_DECL, NULL_TREE,
3534 lang_hooks.types.type_for_mode (word_mode, 1));
3535 SET_DECL_RTL (cond, flag);
3536
3537 /* Conditionalize the cleanup. */
3538 cleanup = build (COND_EXPR, void_type_node,
3539 lang_hooks.truthvalue_conversion (cond),
3540 cleanup, integer_zero_node);
3541 cleanup = fold (cleanup);
3542
3543 cleanups = &thisblock->data.block.cleanups;
3544 }
3545
3546 cleanup = unsave_expr (cleanup);
3547
3548 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
3549
3550 if (! cond_context)
3551 /* If this block has a cleanup, it belongs in stack_block_stack. */
3552 stack_block_stack = thisblock;
3553
3554 if (cond_context)
3555 {
3556 start_sequence ();
3557 }
3558
3559 if (! using_eh_for_cleanups_p)
3560 TREE_ADDRESSABLE (t) = 1;
3561 else
3562 expand_eh_region_start ();
3563
3564 if (cond_context)
3565 {
3566 seq = get_insns ();
3567 end_sequence ();
3568 if (seq)
3569 thisblock->data.block.last_unconditional_cleanup
3570 = emit_insn_after (seq,
3571 thisblock->data.block.last_unconditional_cleanup);
3572 }
3573 else
3574 {
3575 thisblock->data.block.last_unconditional_cleanup
3576 = get_last_insn ();
3577 /* When we insert instructions after the last unconditional cleanup,
3578 we don't adjust last_insn. That means that a later add_insn will
3579 clobber the instructions we've just added. The easiest way to
3580 fix this is to just insert another instruction here, so that the
3581 instructions inserted after the last unconditional cleanup are
3582 never the last instruction. */
3583 emit_note (NOTE_INSN_DELETED);
3584 }
3585 }
3586 return 1;
3587 }
3588
3589 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
3590 is thrown. */
3591
3592 int
3593 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
3594 {
3595 int ret = expand_decl_cleanup (decl, cleanup);
3596 if (cleanup && ret)
3597 {
3598 tree node = block_stack->data.block.cleanups;
3599 CLEANUP_EH_ONLY (node) = eh_only;
3600 }
3601 return ret;
3602 }
3603 \f
3604 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3605 DECL_ELTS is the list of elements that belong to DECL's type.
3606 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3607
3608 void
3609 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
3610 {
3611 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
3612 rtx x;
3613 tree t;
3614
3615 /* If any of the elements are addressable, so is the entire union. */
3616 for (t = decl_elts; t; t = TREE_CHAIN (t))
3617 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
3618 {
3619 TREE_ADDRESSABLE (decl) = 1;
3620 break;
3621 }
3622
3623 expand_decl (decl);
3624 expand_decl_cleanup (decl, cleanup);
3625 x = DECL_RTL (decl);
3626
3627 /* Go through the elements, assigning RTL to each. */
3628 for (t = decl_elts; t; t = TREE_CHAIN (t))
3629 {
3630 tree decl_elt = TREE_VALUE (t);
3631 tree cleanup_elt = TREE_PURPOSE (t);
3632 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3633
3634 /* If any of the elements are addressable, so is the entire
3635 union. */
3636 if (TREE_USED (decl_elt))
3637 TREE_USED (decl) = 1;
3638
3639 /* Propagate the union's alignment to the elements. */
3640 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3641 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
3642
3643 /* If the element has BLKmode and the union doesn't, the union is
3644 aligned such that the element doesn't need to have BLKmode, so
3645 change the element's mode to the appropriate one for its size. */
3646 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3647 DECL_MODE (decl_elt) = mode
3648 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
3649
3650 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3651 instead create a new MEM rtx with the proper mode. */
3652 if (GET_CODE (x) == MEM)
3653 {
3654 if (mode == GET_MODE (x))
3655 SET_DECL_RTL (decl_elt, x);
3656 else
3657 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
3658 }
3659 else if (REG_P (x))
3660 {
3661 if (mode == GET_MODE (x))
3662 SET_DECL_RTL (decl_elt, x);
3663 else
3664 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
3665 }
3666 else
3667 abort ();
3668
3669 /* Record the cleanup if there is one. */
3670
3671 if (cleanup != 0)
3672 thisblock->data.block.cleanups
3673 = tree_cons (decl_elt, cleanup_elt,
3674 thisblock->data.block.cleanups);
3675 }
3676 }
3677 \f
3678 /* Expand a list of cleanups LIST.
3679 Elements may be expressions or may be nested lists.
3680
3681 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
3682 goto and handle protection regions specially in that case.
3683
3684 If REACHABLE, we emit code, otherwise just inform the exception handling
3685 code about this finalization. */
3686
3687 static void
3688 expand_cleanups (tree list, int in_fixup, int reachable)
3689 {
3690 tree tail;
3691 for (tail = list; tail; tail = TREE_CHAIN (tail))
3692 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3693 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
3694 else
3695 {
3696 if (! in_fixup && using_eh_for_cleanups_p)
3697 expand_eh_region_end_cleanup (TREE_VALUE (tail));
3698
3699 if (reachable && !CLEANUP_EH_ONLY (tail))
3700 {
3701 /* Cleanups may be run multiple times. For example,
3702 when exiting a binding contour, we expand the
3703 cleanups associated with that contour. When a goto
3704 within that binding contour has a target outside that
3705 contour, it will expand all cleanups from its scope to
3706 the target. Though the cleanups are expanded multiple
3707 times, the control paths are non-overlapping so the
3708 cleanups will not be executed twice. */
3709
3710 /* We may need to protect from outer cleanups. */
3711 if (in_fixup && using_eh_for_cleanups_p)
3712 {
3713 expand_eh_region_start ();
3714
3715 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3716
3717 expand_eh_region_end_fixup (TREE_VALUE (tail));
3718 }
3719 else
3720 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3721
3722 free_temp_slots ();
3723 }
3724 }
3725 }
3726
3727 /* Mark when the context we are emitting RTL for as a conditional
3728 context, so that any cleanup actions we register with
3729 expand_decl_init will be properly conditionalized when those
3730 cleanup actions are later performed. Must be called before any
3731 expression (tree) is expanded that is within a conditional context. */
3732
3733 void
3734 start_cleanup_deferral (void)
3735 {
3736 /* block_stack can be NULL if we are inside the parameter list. It is
3737 OK to do nothing, because cleanups aren't possible here. */
3738 if (block_stack)
3739 ++block_stack->data.block.conditional_code;
3740 }
3741
3742 /* Mark the end of a conditional region of code. Because cleanup
3743 deferrals may be nested, we may still be in a conditional region
3744 after we end the currently deferred cleanups, only after we end all
3745 deferred cleanups, are we back in unconditional code. */
3746
3747 void
3748 end_cleanup_deferral (void)
3749 {
3750 /* block_stack can be NULL if we are inside the parameter list. It is
3751 OK to do nothing, because cleanups aren't possible here. */
3752 if (block_stack)
3753 --block_stack->data.block.conditional_code;
3754 }
3755
3756 tree
3757 last_cleanup_this_contour (void)
3758 {
3759 if (block_stack == 0)
3760 return 0;
3761
3762 return block_stack->data.block.cleanups;
3763 }
3764
3765
3766 /* Return nonzero if any containing block has a stack level or
3767 cleanups. */
3768
3769 int
3770 containing_blocks_have_cleanups_or_stack_level (void)
3771 {
3772 struct nesting *block;
3773
3774 for (block = block_stack; block; block = block->next)
3775 if (block->data.block.stack_level != 0
3776 || block->data.block.cleanups != 0)
3777 return 1;
3778
3779 return 0;
3780 }
3781
3782 /* Return 1 if there are any pending cleanups at this point.
3783 Check the current contour as well as contours that enclose
3784 the current contour. */
3785
3786 int
3787 any_pending_cleanups (void)
3788 {
3789 struct nesting *block;
3790
3791 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
3792 return 0;
3793
3794 if (block_stack->data.block.cleanups != NULL)
3795 return 1;
3796
3797 if (block_stack->data.block.outer_cleanups == 0)
3798 return 0;
3799
3800 for (block = block_stack->next; block; block = block->next)
3801 if (block->data.block.cleanups != 0)
3802 return 1;
3803
3804 return 0;
3805 }
3806 \f
3807 /* Enter a case (Pascal) or switch (C) statement.
3808 Push a block onto case_stack and nesting_stack
3809 to accumulate the case-labels that are seen
3810 and to record the labels generated for the statement.
3811
3812 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3813 Otherwise, this construct is transparent for `exit_something'.
3814
3815 EXPR is the index-expression to be dispatched on.
3816 TYPE is its nominal type. We could simply convert EXPR to this type,
3817 but instead we take short cuts. */
3818
3819 void
3820 expand_start_case (int exit_flag, tree expr, tree type,
3821 const char *printname)
3822 {
3823 struct nesting *thiscase = ALLOC_NESTING ();
3824
3825 /* Make an entry on case_stack for the case we are entering. */
3826
3827 thiscase->desc = CASE_NESTING;
3828 thiscase->next = case_stack;
3829 thiscase->all = nesting_stack;
3830 thiscase->depth = ++nesting_depth;
3831 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3832 thiscase->data.case_stmt.case_list = 0;
3833 thiscase->data.case_stmt.index_expr = expr;
3834 thiscase->data.case_stmt.nominal_type = type;
3835 thiscase->data.case_stmt.default_label = 0;
3836 thiscase->data.case_stmt.printname = printname;
3837 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3838 case_stack = thiscase;
3839 nesting_stack = thiscase;
3840
3841 do_pending_stack_adjust ();
3842 emit_queue ();
3843
3844 /* Make sure case_stmt.start points to something that won't
3845 need any transformation before expand_end_case. */
3846 if (GET_CODE (get_last_insn ()) != NOTE)
3847 emit_note (NOTE_INSN_DELETED);
3848
3849 thiscase->data.case_stmt.start = get_last_insn ();
3850
3851 start_cleanup_deferral ();
3852 }
3853 \f
3854 static void
3855 check_seenlabel (void)
3856 {
3857 /* If this is the first label, warn if any insns have been emitted. */
3858 if (case_stack->data.case_stmt.line_number_status >= 0)
3859 {
3860 rtx insn;
3861
3862 restore_line_number_status
3863 (case_stack->data.case_stmt.line_number_status);
3864 case_stack->data.case_stmt.line_number_status = -1;
3865
3866 for (insn = case_stack->data.case_stmt.start;
3867 insn;
3868 insn = NEXT_INSN (insn))
3869 {
3870 if (GET_CODE (insn) == CODE_LABEL)
3871 break;
3872 if (GET_CODE (insn) != NOTE
3873 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3874 {
3875 do
3876 insn = PREV_INSN (insn);
3877 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
3878
3879 /* If insn is zero, then there must have been a syntax error. */
3880 if (insn)
3881 {
3882 location_t locus;
3883 locus.file = NOTE_SOURCE_FILE (insn);
3884 locus.line = NOTE_LINE_NUMBER (insn);
3885 warning ("%Hunreachable code at beginning of %s", &locus,
3886 case_stack->data.case_stmt.printname);
3887 }
3888 break;
3889 }
3890 }
3891 }
3892 }
3893
3894 /* Accumulate one case or default label inside a case or switch statement.
3895 VALUE is the value of the case (a null pointer, for a default label).
3896 The function CONVERTER, when applied to arguments T and V,
3897 converts the value V to the type T.
3898
3899 If not currently inside a case or switch statement, return 1 and do
3900 nothing. The caller will print a language-specific error message.
3901 If VALUE is a duplicate or overlaps, return 2 and do nothing
3902 except store the (first) duplicate node in *DUPLICATE.
3903 If VALUE is out of range, return 3 and do nothing.
3904 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3905 Return 0 on success.
3906
3907 Extended to handle range statements. */
3908
3909 int
3910 pushcase (tree value, tree (*converter) (tree, tree), tree label,
3911 tree *duplicate)
3912 {
3913 tree index_type;
3914 tree nominal_type;
3915
3916 /* Fail if not inside a real case statement. */
3917 if (! (case_stack && case_stack->data.case_stmt.start))
3918 return 1;
3919
3920 if (stack_block_stack
3921 && stack_block_stack->depth > case_stack->depth)
3922 return 5;
3923
3924 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3925 nominal_type = case_stack->data.case_stmt.nominal_type;
3926
3927 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3928 if (index_type == error_mark_node)
3929 return 0;
3930
3931 /* Convert VALUE to the type in which the comparisons are nominally done. */
3932 if (value != 0)
3933 value = (*converter) (nominal_type, value);
3934
3935 check_seenlabel ();
3936
3937 /* Fail if this value is out of range for the actual type of the index
3938 (which may be narrower than NOMINAL_TYPE). */
3939 if (value != 0
3940 && (TREE_CONSTANT_OVERFLOW (value)
3941 || ! int_fits_type_p (value, index_type)))
3942 return 3;
3943
3944 return add_case_node (value, value, label, duplicate, false);
3945 }
3946
3947 /* Like pushcase but this case applies to all values between VALUE1 and
3948 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3949 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3950 starts at VALUE1 and ends at the highest value of the index type.
3951 If both are NULL, this case applies to all values.
3952
3953 The return value is the same as that of pushcase but there is one
3954 additional error code: 4 means the specified range was empty. */
3955
3956 int
3957 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
3958 tree label, tree *duplicate)
3959 {
3960 tree index_type;
3961 tree nominal_type;
3962
3963 /* Fail if not inside a real case statement. */
3964 if (! (case_stack && case_stack->data.case_stmt.start))
3965 return 1;
3966
3967 if (stack_block_stack
3968 && stack_block_stack->depth > case_stack->depth)
3969 return 5;
3970
3971 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3972 nominal_type = case_stack->data.case_stmt.nominal_type;
3973
3974 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3975 if (index_type == error_mark_node)
3976 return 0;
3977
3978 check_seenlabel ();
3979
3980 /* Convert VALUEs to type in which the comparisons are nominally done
3981 and replace any unspecified value with the corresponding bound. */
3982 if (value1 == 0)
3983 value1 = TYPE_MIN_VALUE (index_type);
3984 if (value2 == 0)
3985 value2 = TYPE_MAX_VALUE (index_type);
3986
3987 /* Fail if the range is empty. Do this before any conversion since
3988 we want to allow out-of-range empty ranges. */
3989 if (value2 != 0 && tree_int_cst_lt (value2, value1))
3990 return 4;
3991
3992 /* If the max was unbounded, use the max of the nominal_type we are
3993 converting to. Do this after the < check above to suppress false
3994 positives. */
3995 if (value2 == 0)
3996 value2 = TYPE_MAX_VALUE (nominal_type);
3997
3998 value1 = (*converter) (nominal_type, value1);
3999 value2 = (*converter) (nominal_type, value2);
4000
4001 /* Fail if these values are out of range. */
4002 if (TREE_CONSTANT_OVERFLOW (value1)
4003 || ! int_fits_type_p (value1, index_type))
4004 return 3;
4005
4006 if (TREE_CONSTANT_OVERFLOW (value2)
4007 || ! int_fits_type_p (value2, index_type))
4008 return 3;
4009
4010 return add_case_node (value1, value2, label, duplicate, false);
4011 }
4012
4013 /* Do the actual insertion of a case label for pushcase and pushcase_range
4014 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4015 slowdown for large switch statements. */
4016
4017 int
4018 add_case_node (tree low, tree high, tree label, tree *duplicate,
4019 bool dont_expand_label)
4020 {
4021 struct case_node *p, **q, *r;
4022
4023 /* If there's no HIGH value, then this is not a case range; it's
4024 just a simple case label. But that's just a degenerate case
4025 range. */
4026 if (!high)
4027 high = low;
4028
4029 /* Handle default labels specially. */
4030 if (!high && !low)
4031 {
4032 if (case_stack->data.case_stmt.default_label != 0)
4033 {
4034 *duplicate = case_stack->data.case_stmt.default_label;
4035 return 2;
4036 }
4037 case_stack->data.case_stmt.default_label = label;
4038 if (!dont_expand_label)
4039 expand_label (label);
4040 return 0;
4041 }
4042
4043 q = &case_stack->data.case_stmt.case_list;
4044 p = *q;
4045
4046 while ((r = *q))
4047 {
4048 p = r;
4049
4050 /* Keep going past elements distinctly greater than HIGH. */
4051 if (tree_int_cst_lt (high, p->low))
4052 q = &p->left;
4053
4054 /* or distinctly less than LOW. */
4055 else if (tree_int_cst_lt (p->high, low))
4056 q = &p->right;
4057
4058 else
4059 {
4060 /* We have an overlap; this is an error. */
4061 *duplicate = p->code_label;
4062 return 2;
4063 }
4064 }
4065
4066 /* Add this label to the chain, and succeed. */
4067
4068 r = ggc_alloc (sizeof (struct case_node));
4069 r->low = low;
4070
4071 /* If the bounds are equal, turn this into the one-value case. */
4072 if (tree_int_cst_equal (low, high))
4073 r->high = r->low;
4074 else
4075 r->high = high;
4076
4077 r->code_label = label;
4078 if (!dont_expand_label)
4079 expand_label (label);
4080
4081 *q = r;
4082 r->parent = p;
4083 r->left = 0;
4084 r->right = 0;
4085 r->balance = 0;
4086
4087 while (p)
4088 {
4089 struct case_node *s;
4090
4091 if (r == p->left)
4092 {
4093 int b;
4094
4095 if (! (b = p->balance))
4096 /* Growth propagation from left side. */
4097 p->balance = -1;
4098 else if (b < 0)
4099 {
4100 if (r->balance < 0)
4101 {
4102 /* R-Rotation */
4103 if ((p->left = s = r->right))
4104 s->parent = p;
4105
4106 r->right = p;
4107 p->balance = 0;
4108 r->balance = 0;
4109 s = p->parent;
4110 p->parent = r;
4111
4112 if ((r->parent = s))
4113 {
4114 if (s->left == p)
4115 s->left = r;
4116 else
4117 s->right = r;
4118 }
4119 else
4120 case_stack->data.case_stmt.case_list = r;
4121 }
4122 else
4123 /* r->balance == +1 */
4124 {
4125 /* LR-Rotation */
4126
4127 int b2;
4128 struct case_node *t = r->right;
4129
4130 if ((p->left = s = t->right))
4131 s->parent = p;
4132
4133 t->right = p;
4134 if ((r->right = s = t->left))
4135 s->parent = r;
4136
4137 t->left = r;
4138 b = t->balance;
4139 b2 = b < 0;
4140 p->balance = b2;
4141 b2 = -b2 - b;
4142 r->balance = b2;
4143 t->balance = 0;
4144 s = p->parent;
4145 p->parent = t;
4146 r->parent = t;
4147
4148 if ((t->parent = s))
4149 {
4150 if (s->left == p)
4151 s->left = t;
4152 else
4153 s->right = t;
4154 }
4155 else
4156 case_stack->data.case_stmt.case_list = t;
4157 }
4158 break;
4159 }
4160
4161 else
4162 {
4163 /* p->balance == +1; growth of left side balances the node. */
4164 p->balance = 0;
4165 break;
4166 }
4167 }
4168 else
4169 /* r == p->right */
4170 {
4171 int b;
4172
4173 if (! (b = p->balance))
4174 /* Growth propagation from right side. */
4175 p->balance++;
4176 else if (b > 0)
4177 {
4178 if (r->balance > 0)
4179 {
4180 /* L-Rotation */
4181
4182 if ((p->right = s = r->left))
4183 s->parent = p;
4184
4185 r->left = p;
4186 p->balance = 0;
4187 r->balance = 0;
4188 s = p->parent;
4189 p->parent = r;
4190 if ((r->parent = s))
4191 {
4192 if (s->left == p)
4193 s->left = r;
4194 else
4195 s->right = r;
4196 }
4197
4198 else
4199 case_stack->data.case_stmt.case_list = r;
4200 }
4201
4202 else
4203 /* r->balance == -1 */
4204 {
4205 /* RL-Rotation */
4206 int b2;
4207 struct case_node *t = r->left;
4208
4209 if ((p->right = s = t->left))
4210 s->parent = p;
4211
4212 t->left = p;
4213
4214 if ((r->left = s = t->right))
4215 s->parent = r;
4216
4217 t->right = r;
4218 b = t->balance;
4219 b2 = b < 0;
4220 r->balance = b2;
4221 b2 = -b2 - b;
4222 p->balance = b2;
4223 t->balance = 0;
4224 s = p->parent;
4225 p->parent = t;
4226 r->parent = t;
4227
4228 if ((t->parent = s))
4229 {
4230 if (s->left == p)
4231 s->left = t;
4232 else
4233 s->right = t;
4234 }
4235
4236 else
4237 case_stack->data.case_stmt.case_list = t;
4238 }
4239 break;
4240 }
4241 else
4242 {
4243 /* p->balance == -1; growth of right side balances the node. */
4244 p->balance = 0;
4245 break;
4246 }
4247 }
4248
4249 r = p;
4250 p = p->parent;
4251 }
4252
4253 return 0;
4254 }
4255 \f
4256 /* Maximum number of case bit tests. */
4257 #define MAX_CASE_BIT_TESTS 3
4258
4259 /* By default, enable case bit tests on targets with ashlsi3. */
4260 #ifndef CASE_USE_BIT_TESTS
4261 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4262 != CODE_FOR_nothing)
4263 #endif
4264
4265
4266 /* A case_bit_test represents a set of case nodes that may be
4267 selected from using a bit-wise comparison. HI and LO hold
4268 the integer to be tested against, LABEL contains the label
4269 to jump to upon success and BITS counts the number of case
4270 nodes handled by this test, typically the number of bits
4271 set in HI:LO. */
4272
4273 struct case_bit_test
4274 {
4275 HOST_WIDE_INT hi;
4276 HOST_WIDE_INT lo;
4277 rtx label;
4278 int bits;
4279 };
4280
4281 /* Determine whether "1 << x" is relatively cheap in word_mode. */
4282
4283 static
4284 bool lshift_cheap_p (void)
4285 {
4286 static bool init = false;
4287 static bool cheap = true;
4288
4289 if (!init)
4290 {
4291 rtx reg = gen_rtx_REG (word_mode, 10000);
4292 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4293 cheap = cost < COSTS_N_INSNS (3);
4294 init = true;
4295 }
4296
4297 return cheap;
4298 }
4299
4300 /* Comparison function for qsort to order bit tests by decreasing
4301 number of case nodes, i.e. the node with the most cases gets
4302 tested first. */
4303
4304 static int
4305 case_bit_test_cmp (const void *p1, const void *p2)
4306 {
4307 const struct case_bit_test *d1 = p1;
4308 const struct case_bit_test *d2 = p2;
4309
4310 return d2->bits - d1->bits;
4311 }
4312
4313 /* Expand a switch statement by a short sequence of bit-wise
4314 comparisons. "switch(x)" is effectively converted into
4315 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4316 integer constants.
4317
4318 INDEX_EXPR is the value being switched on, which is of
4319 type INDEX_TYPE. MINVAL is the lowest case value of in
4320 the case nodes, of INDEX_TYPE type, and RANGE is highest
4321 value minus MINVAL, also of type INDEX_TYPE. NODES is
4322 the set of case nodes, and DEFAULT_LABEL is the label to
4323 branch to should none of the cases match.
4324
4325 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4326 node targets. */
4327
4328 static void
4329 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4330 tree range, case_node_ptr nodes, rtx default_label)
4331 {
4332 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4333 enum machine_mode mode;
4334 rtx expr, index, label;
4335 unsigned int i,j,lo,hi;
4336 struct case_node *n;
4337 unsigned int count;
4338
4339 count = 0;
4340 for (n = nodes; n; n = n->right)
4341 {
4342 label = label_rtx (n->code_label);
4343 for (i = 0; i < count; i++)
4344 if (same_case_target_p (label, test[i].label))
4345 break;
4346
4347 if (i == count)
4348 {
4349 if (count >= MAX_CASE_BIT_TESTS)
4350 abort ();
4351 test[i].hi = 0;
4352 test[i].lo = 0;
4353 test[i].label = label;
4354 test[i].bits = 1;
4355 count++;
4356 }
4357 else
4358 test[i].bits++;
4359
4360 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4361 n->low, minval)), 1);
4362 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4363 n->high, minval)), 1);
4364 for (j = lo; j <= hi; j++)
4365 if (j >= HOST_BITS_PER_WIDE_INT)
4366 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4367 else
4368 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4369 }
4370
4371 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4372
4373 index_expr = fold (build (MINUS_EXPR, index_type,
4374 convert (index_type, index_expr),
4375 convert (index_type, minval)));
4376 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4377 emit_queue ();
4378 index = protect_from_queue (index, 0);
4379 do_pending_stack_adjust ();
4380
4381 mode = TYPE_MODE (index_type);
4382 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4383 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4384 default_label);
4385
4386 index = convert_to_mode (word_mode, index, 0);
4387 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4388 index, NULL_RTX, 1, OPTAB_WIDEN);
4389
4390 for (i = 0; i < count; i++)
4391 {
4392 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4393 expr = expand_binop (word_mode, and_optab, index, expr,
4394 NULL_RTX, 1, OPTAB_WIDEN);
4395 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4396 word_mode, 1, test[i].label);
4397 }
4398
4399 emit_jump (default_label);
4400 }
4401
4402 #ifndef HAVE_casesi
4403 #define HAVE_casesi 0
4404 #endif
4405
4406 #ifndef HAVE_tablejump
4407 #define HAVE_tablejump 0
4408 #endif
4409
4410 /* Terminate a case (Pascal) or switch (C) statement
4411 in which ORIG_INDEX is the expression to be tested.
4412 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4413 type as given in the source before any compiler conversions.
4414 Generate the code to test it and jump to the right place. */
4415
4416 void
4417 expand_end_case_type (tree orig_index, tree orig_type)
4418 {
4419 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
4420 rtx default_label = 0;
4421 struct case_node *n, *m;
4422 unsigned int count, uniq;
4423 rtx index;
4424 rtx table_label;
4425 int ncases;
4426 rtx *labelvec;
4427 int i;
4428 rtx before_case, end, lab;
4429 struct nesting *thiscase = case_stack;
4430 tree index_expr, index_type;
4431 bool exit_done = false;
4432 int unsignedp;
4433
4434 /* Don't crash due to previous errors. */
4435 if (thiscase == NULL)
4436 return;
4437
4438 index_expr = thiscase->data.case_stmt.index_expr;
4439 index_type = TREE_TYPE (index_expr);
4440 unsignedp = TYPE_UNSIGNED (index_type);
4441 if (orig_type == NULL)
4442 orig_type = TREE_TYPE (orig_index);
4443
4444 do_pending_stack_adjust ();
4445
4446 /* This might get a spurious warning in the presence of a syntax error;
4447 it could be fixed by moving the call to check_seenlabel after the
4448 check for error_mark_node, and copying the code of check_seenlabel that
4449 deals with case_stack->data.case_stmt.line_number_status /
4450 restore_line_number_status in front of the call to end_cleanup_deferral;
4451 However, this might miss some useful warnings in the presence of
4452 non-syntax errors. */
4453 check_seenlabel ();
4454
4455 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4456 if (index_type != error_mark_node)
4457 {
4458 /* If we don't have a default-label, create one here,
4459 after the body of the switch. */
4460 if (thiscase->data.case_stmt.default_label == 0)
4461 {
4462 thiscase->data.case_stmt.default_label
4463 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4464 /* Share the exit label if possible. */
4465 if (thiscase->exit_label)
4466 {
4467 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4468 thiscase->exit_label);
4469 exit_done = true;
4470 }
4471 expand_label (thiscase->data.case_stmt.default_label);
4472 }
4473 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4474
4475 before_case = get_last_insn ();
4476
4477 if (thiscase->data.case_stmt.case_list
4478 && thiscase->data.case_stmt.case_list->left)
4479 thiscase->data.case_stmt.case_list
4480 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
4481
4482 /* Simplify the case-list before we count it. */
4483 group_case_nodes (thiscase->data.case_stmt.case_list);
4484 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
4485 default_label);
4486
4487 /* Get upper and lower bounds of case values.
4488 Also convert all the case values to the index expr's data type. */
4489
4490 uniq = 0;
4491 count = 0;
4492 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4493 {
4494 /* Check low and high label values are integers. */
4495 if (TREE_CODE (n->low) != INTEGER_CST)
4496 abort ();
4497 if (TREE_CODE (n->high) != INTEGER_CST)
4498 abort ();
4499
4500 n->low = convert (index_type, n->low);
4501 n->high = convert (index_type, n->high);
4502
4503 /* Count the elements and track the largest and smallest
4504 of them (treating them as signed even if they are not). */
4505 if (count++ == 0)
4506 {
4507 minval = n->low;
4508 maxval = n->high;
4509 }
4510 else
4511 {
4512 if (INT_CST_LT (n->low, minval))
4513 minval = n->low;
4514 if (INT_CST_LT (maxval, n->high))
4515 maxval = n->high;
4516 }
4517 /* A range counts double, since it requires two compares. */
4518 if (! tree_int_cst_equal (n->low, n->high))
4519 count++;
4520
4521 /* Count the number of unique case node targets. */
4522 uniq++;
4523 lab = label_rtx (n->code_label);
4524 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
4525 if (same_case_target_p (label_rtx (m->code_label), lab))
4526 {
4527 uniq--;
4528 break;
4529 }
4530 }
4531
4532 /* Compute span of values. */
4533 if (count != 0)
4534 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4535
4536 end_cleanup_deferral ();
4537
4538 if (count == 0)
4539 {
4540 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4541 emit_queue ();
4542 emit_jump (default_label);
4543 }
4544
4545 /* Try implementing this switch statement by a short sequence of
4546 bit-wise comparisons. However, we let the binary-tree case
4547 below handle constant index expressions. */
4548 else if (CASE_USE_BIT_TESTS
4549 && ! TREE_CONSTANT (index_expr)
4550 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
4551 && compare_tree_int (range, 0) > 0
4552 && lshift_cheap_p ()
4553 && ((uniq == 1 && count >= 3)
4554 || (uniq == 2 && count >= 5)
4555 || (uniq == 3 && count >= 6)))
4556 {
4557 /* Optimize the case where all the case values fit in a
4558 word without having to subtract MINVAL. In this case,
4559 we can optimize away the subtraction. */
4560 if (compare_tree_int (minval, 0) > 0
4561 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
4562 {
4563 minval = integer_zero_node;
4564 range = maxval;
4565 }
4566 emit_case_bit_tests (index_type, index_expr, minval, range,
4567 thiscase->data.case_stmt.case_list,
4568 default_label);
4569 }
4570
4571 /* If range of values is much bigger than number of values,
4572 make a sequence of conditional branches instead of a dispatch.
4573 If the switch-index is a constant, do it this way
4574 because we can optimize it. */
4575
4576 else if (count < case_values_threshold ()
4577 || compare_tree_int (range,
4578 (optimize_size ? 3 : 10) * count) > 0
4579 /* RANGE may be signed, and really large ranges will show up
4580 as negative numbers. */
4581 || compare_tree_int (range, 0) < 0
4582 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4583 || flag_pic
4584 #endif
4585 || TREE_CONSTANT (index_expr)
4586 /* If neither casesi or tablejump is available, we can
4587 only go this way. */
4588 || (!HAVE_casesi && !HAVE_tablejump))
4589 {
4590 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4591
4592 /* If the index is a short or char that we do not have
4593 an insn to handle comparisons directly, convert it to
4594 a full integer now, rather than letting each comparison
4595 generate the conversion. */
4596
4597 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4598 && ! have_insn_for (COMPARE, GET_MODE (index)))
4599 {
4600 enum machine_mode wider_mode;
4601 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4602 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4603 if (have_insn_for (COMPARE, wider_mode))
4604 {
4605 index = convert_to_mode (wider_mode, index, unsignedp);
4606 break;
4607 }
4608 }
4609
4610 emit_queue ();
4611 do_pending_stack_adjust ();
4612
4613 index = protect_from_queue (index, 0);
4614 if (GET_CODE (index) == MEM)
4615 index = copy_to_reg (index);
4616 if (GET_CODE (index) == CONST_INT
4617 || TREE_CODE (index_expr) == INTEGER_CST)
4618 {
4619 /* Make a tree node with the proper constant value
4620 if we don't already have one. */
4621 if (TREE_CODE (index_expr) != INTEGER_CST)
4622 {
4623 index_expr
4624 = build_int_2 (INTVAL (index),
4625 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4626 index_expr = convert (index_type, index_expr);
4627 }
4628
4629 /* For constant index expressions we need only
4630 issue an unconditional branch to the appropriate
4631 target code. The job of removing any unreachable
4632 code is left to the optimization phase if the
4633 "-O" option is specified. */
4634 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4635 if (! tree_int_cst_lt (index_expr, n->low)
4636 && ! tree_int_cst_lt (n->high, index_expr))
4637 break;
4638
4639 if (n)
4640 emit_jump (label_rtx (n->code_label));
4641 else
4642 emit_jump (default_label);
4643 }
4644 else
4645 {
4646 /* If the index expression is not constant we generate
4647 a binary decision tree to select the appropriate
4648 target code. This is done as follows:
4649
4650 The list of cases is rearranged into a binary tree,
4651 nearly optimal assuming equal probability for each case.
4652
4653 The tree is transformed into RTL, eliminating
4654 redundant test conditions at the same time.
4655
4656 If program flow could reach the end of the
4657 decision tree an unconditional jump to the
4658 default code is emitted. */
4659
4660 use_cost_table
4661 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
4662 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4663 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
4664 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4665 default_label, index_type);
4666 emit_jump_if_reachable (default_label);
4667 }
4668 }
4669 else
4670 {
4671 table_label = gen_label_rtx ();
4672 if (! try_casesi (index_type, index_expr, minval, range,
4673 table_label, default_label))
4674 {
4675 index_type = thiscase->data.case_stmt.nominal_type;
4676
4677 /* Index jumptables from zero for suitable values of
4678 minval to avoid a subtraction. */
4679 if (! optimize_size
4680 && compare_tree_int (minval, 0) > 0
4681 && compare_tree_int (minval, 3) < 0)
4682 {
4683 minval = integer_zero_node;
4684 range = maxval;
4685 }
4686
4687 if (! try_tablejump (index_type, index_expr, minval, range,
4688 table_label, default_label))
4689 abort ();
4690 }
4691
4692 /* Get table of labels to jump to, in order of case index. */
4693
4694 ncases = tree_low_cst (range, 0) + 1;
4695 labelvec = alloca (ncases * sizeof (rtx));
4696 memset (labelvec, 0, ncases * sizeof (rtx));
4697
4698 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4699 {
4700 /* Compute the low and high bounds relative to the minimum
4701 value since that should fit in a HOST_WIDE_INT while the
4702 actual values may not. */
4703 HOST_WIDE_INT i_low
4704 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4705 n->low, minval)), 1);
4706 HOST_WIDE_INT i_high
4707 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4708 n->high, minval)), 1);
4709 HOST_WIDE_INT i;
4710
4711 for (i = i_low; i <= i_high; i ++)
4712 labelvec[i]
4713 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4714 }
4715
4716 /* Fill in the gaps with the default. */
4717 for (i = 0; i < ncases; i++)
4718 if (labelvec[i] == 0)
4719 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4720
4721 /* Output the table. */
4722 emit_label (table_label);
4723
4724 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4725 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4726 gen_rtx_LABEL_REF (Pmode, table_label),
4727 gen_rtvec_v (ncases, labelvec),
4728 const0_rtx, const0_rtx));
4729 else
4730 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4731 gen_rtvec_v (ncases, labelvec)));
4732
4733 /* If the case insn drops through the table,
4734 after the table we must jump to the default-label.
4735 Otherwise record no drop-through after the table. */
4736 #ifdef CASE_DROPS_THROUGH
4737 emit_jump (default_label);
4738 #else
4739 emit_barrier ();
4740 #endif
4741 }
4742
4743 before_case = NEXT_INSN (before_case);
4744 end = get_last_insn ();
4745 if (squeeze_notes (&before_case, &end))
4746 abort ();
4747 reorder_insns (before_case, end,
4748 thiscase->data.case_stmt.start);
4749 }
4750 else
4751 end_cleanup_deferral ();
4752
4753 if (thiscase->exit_label && !exit_done)
4754 emit_label (thiscase->exit_label);
4755
4756 POPSTACK (case_stack);
4757
4758 free_temp_slots ();
4759 }
4760
4761 /* Convert the tree NODE into a list linked by the right field, with the left
4762 field zeroed. RIGHT is used for recursion; it is a list to be placed
4763 rightmost in the resulting list. */
4764
4765 static struct case_node *
4766 case_tree2list (struct case_node *node, struct case_node *right)
4767 {
4768 struct case_node *left;
4769
4770 if (node->right)
4771 right = case_tree2list (node->right, right);
4772
4773 node->right = right;
4774 if ((left = node->left))
4775 {
4776 node->left = 0;
4777 return case_tree2list (left, node);
4778 }
4779
4780 return node;
4781 }
4782
4783 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4784
4785 static void
4786 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
4787 {
4788 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
4789 {
4790 if (op1 == op2)
4791 emit_jump (label);
4792 }
4793 else
4794 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
4795 (GET_MODE (op1) == VOIDmode
4796 ? GET_MODE (op2) : GET_MODE (op1)),
4797 unsignedp, label);
4798 }
4799 \f
4800 /* Not all case values are encountered equally. This function
4801 uses a heuristic to weight case labels, in cases where that
4802 looks like a reasonable thing to do.
4803
4804 Right now, all we try to guess is text, and we establish the
4805 following weights:
4806
4807 chars above space: 16
4808 digits: 16
4809 default: 12
4810 space, punct: 8
4811 tab: 4
4812 newline: 2
4813 other "\" chars: 1
4814 remaining chars: 0
4815
4816 If we find any cases in the switch that are not either -1 or in the range
4817 of valid ASCII characters, or are control characters other than those
4818 commonly used with "\", don't treat this switch scanning text.
4819
4820 Return 1 if these nodes are suitable for cost estimation, otherwise
4821 return 0. */
4822
4823 static int
4824 estimate_case_costs (case_node_ptr node)
4825 {
4826 tree min_ascii = integer_minus_one_node;
4827 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4828 case_node_ptr n;
4829 int i;
4830
4831 /* If we haven't already made the cost table, make it now. Note that the
4832 lower bound of the table is -1, not zero. */
4833
4834 if (! cost_table_initialized)
4835 {
4836 cost_table_initialized = 1;
4837
4838 for (i = 0; i < 128; i++)
4839 {
4840 if (ISALNUM (i))
4841 COST_TABLE (i) = 16;
4842 else if (ISPUNCT (i))
4843 COST_TABLE (i) = 8;
4844 else if (ISCNTRL (i))
4845 COST_TABLE (i) = -1;
4846 }
4847
4848 COST_TABLE (' ') = 8;
4849 COST_TABLE ('\t') = 4;
4850 COST_TABLE ('\0') = 4;
4851 COST_TABLE ('\n') = 2;
4852 COST_TABLE ('\f') = 1;
4853 COST_TABLE ('\v') = 1;
4854 COST_TABLE ('\b') = 1;
4855 }
4856
4857 /* See if all the case expressions look like text. It is text if the
4858 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4859 as signed arithmetic since we don't want to ever access cost_table with a
4860 value less than -1. Also check that none of the constants in a range
4861 are strange control characters. */
4862
4863 for (n = node; n; n = n->right)
4864 {
4865 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4866 return 0;
4867
4868 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
4869 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
4870 if (COST_TABLE (i) < 0)
4871 return 0;
4872 }
4873
4874 /* All interesting values are within the range of interesting
4875 ASCII characters. */
4876 return 1;
4877 }
4878
4879 /* Determine whether two case labels branch to the same target. */
4880
4881 static bool
4882 same_case_target_p (rtx l1, rtx l2)
4883 {
4884 #if 0
4885 rtx i1, i2;
4886
4887 if (l1 == l2)
4888 return true;
4889
4890 i1 = next_real_insn (l1);
4891 i2 = next_real_insn (l2);
4892 if (i1 == i2)
4893 return true;
4894
4895 if (i1 && simplejump_p (i1))
4896 {
4897 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
4898 }
4899
4900 if (i2 && simplejump_p (i2))
4901 {
4902 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
4903 }
4904 #endif
4905 /* When coming from gimple, we usually won't have emitted either
4906 the labels or the body of the switch statement. The job being
4907 done here should be done via jump threading at the tree level.
4908 Cases that go the same place should have the same label. */
4909 return l1 == l2;
4910 }
4911
4912 /* Delete nodes that branch to the default label from a list of
4913 case nodes. Eg. case 5: default: becomes just default: */
4914
4915 static void
4916 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
4917 {
4918 case_node_ptr ptr;
4919
4920 while (*prev)
4921 {
4922 ptr = *prev;
4923 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
4924 *prev = ptr->right;
4925 else
4926 prev = &ptr->right;
4927 }
4928 }
4929
4930 /* Scan an ordered list of case nodes
4931 combining those with consecutive values or ranges.
4932
4933 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4934
4935 static void
4936 group_case_nodes (case_node_ptr head)
4937 {
4938 case_node_ptr node = head;
4939
4940 while (node)
4941 {
4942 rtx lab;
4943 case_node_ptr np = node;
4944
4945 lab = label_rtx (node->code_label);
4946
4947 /* Try to group the successors of NODE with NODE. */
4948 while (((np = np->right) != 0)
4949 /* Do they jump to the same place? */
4950 && same_case_target_p (label_rtx (np->code_label), lab)
4951 /* Are their ranges consecutive? */
4952 && tree_int_cst_equal (np->low,
4953 fold (build (PLUS_EXPR,
4954 TREE_TYPE (node->high),
4955 node->high,
4956 integer_one_node)))
4957 /* An overflow is not consecutive. */
4958 && tree_int_cst_lt (node->high,
4959 fold (build (PLUS_EXPR,
4960 TREE_TYPE (node->high),
4961 node->high,
4962 integer_one_node))))
4963 {
4964 node->high = np->high;
4965 }
4966 /* NP is the first node after NODE which can't be grouped with it.
4967 Delete the nodes in between, and move on to that node. */
4968 node->right = np;
4969 node = np;
4970 }
4971 }
4972
4973 /* Take an ordered list of case nodes
4974 and transform them into a near optimal binary tree,
4975 on the assumption that any target code selection value is as
4976 likely as any other.
4977
4978 The transformation is performed by splitting the ordered
4979 list into two equal sections plus a pivot. The parts are
4980 then attached to the pivot as left and right branches. Each
4981 branch is then transformed recursively. */
4982
4983 static void
4984 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
4985 {
4986 case_node_ptr np;
4987
4988 np = *head;
4989 if (np)
4990 {
4991 int cost = 0;
4992 int i = 0;
4993 int ranges = 0;
4994 case_node_ptr *npp;
4995 case_node_ptr left;
4996
4997 /* Count the number of entries on branch. Also count the ranges. */
4998
4999 while (np)
5000 {
5001 if (!tree_int_cst_equal (np->low, np->high))
5002 {
5003 ranges++;
5004 if (use_cost_table)
5005 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5006 }
5007
5008 if (use_cost_table)
5009 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5010
5011 i++;
5012 np = np->right;
5013 }
5014
5015 if (i > 2)
5016 {
5017 /* Split this list if it is long enough for that to help. */
5018 npp = head;
5019 left = *npp;
5020 if (use_cost_table)
5021 {
5022 /* Find the place in the list that bisects the list's total cost,
5023 Here I gets half the total cost. */
5024 int n_moved = 0;
5025 i = (cost + 1) / 2;
5026 while (1)
5027 {
5028 /* Skip nodes while their cost does not reach that amount. */
5029 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5030 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5031 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5032 if (i <= 0)
5033 break;
5034 npp = &(*npp)->right;
5035 n_moved += 1;
5036 }
5037 if (n_moved == 0)
5038 {
5039 /* Leave this branch lopsided, but optimize left-hand
5040 side and fill in `parent' fields for right-hand side. */
5041 np = *head;
5042 np->parent = parent;
5043 balance_case_nodes (&np->left, np);
5044 for (; np->right; np = np->right)
5045 np->right->parent = np;
5046 return;
5047 }
5048 }
5049 /* If there are just three nodes, split at the middle one. */
5050 else if (i == 3)
5051 npp = &(*npp)->right;
5052 else
5053 {
5054 /* Find the place in the list that bisects the list's total cost,
5055 where ranges count as 2.
5056 Here I gets half the total cost. */
5057 i = (i + ranges + 1) / 2;
5058 while (1)
5059 {
5060 /* Skip nodes while their cost does not reach that amount. */
5061 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5062 i--;
5063 i--;
5064 if (i <= 0)
5065 break;
5066 npp = &(*npp)->right;
5067 }
5068 }
5069 *head = np = *npp;
5070 *npp = 0;
5071 np->parent = parent;
5072 np->left = left;
5073
5074 /* Optimize each of the two split parts. */
5075 balance_case_nodes (&np->left, np);
5076 balance_case_nodes (&np->right, np);
5077 }
5078 else
5079 {
5080 /* Else leave this branch as one level,
5081 but fill in `parent' fields. */
5082 np = *head;
5083 np->parent = parent;
5084 for (; np->right; np = np->right)
5085 np->right->parent = np;
5086 }
5087 }
5088 }
5089 \f
5090 /* Search the parent sections of the case node tree
5091 to see if a test for the lower bound of NODE would be redundant.
5092 INDEX_TYPE is the type of the index expression.
5093
5094 The instructions to generate the case decision tree are
5095 output in the same order as nodes are processed so it is
5096 known that if a parent node checks the range of the current
5097 node minus one that the current node is bounded at its lower
5098 span. Thus the test would be redundant. */
5099
5100 static int
5101 node_has_low_bound (case_node_ptr node, tree index_type)
5102 {
5103 tree low_minus_one;
5104 case_node_ptr pnode;
5105
5106 /* If the lower bound of this node is the lowest value in the index type,
5107 we need not test it. */
5108
5109 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5110 return 1;
5111
5112 /* If this node has a left branch, the value at the left must be less
5113 than that at this node, so it cannot be bounded at the bottom and
5114 we need not bother testing any further. */
5115
5116 if (node->left)
5117 return 0;
5118
5119 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5120 node->low, integer_one_node));
5121
5122 /* If the subtraction above overflowed, we can't verify anything.
5123 Otherwise, look for a parent that tests our value - 1. */
5124
5125 if (! tree_int_cst_lt (low_minus_one, node->low))
5126 return 0;
5127
5128 for (pnode = node->parent; pnode; pnode = pnode->parent)
5129 if (tree_int_cst_equal (low_minus_one, pnode->high))
5130 return 1;
5131
5132 return 0;
5133 }
5134
5135 /* Search the parent sections of the case node tree
5136 to see if a test for the upper bound of NODE would be redundant.
5137 INDEX_TYPE is the type of the index expression.
5138
5139 The instructions to generate the case decision tree are
5140 output in the same order as nodes are processed so it is
5141 known that if a parent node checks the range of the current
5142 node plus one that the current node is bounded at its upper
5143 span. Thus the test would be redundant. */
5144
5145 static int
5146 node_has_high_bound (case_node_ptr node, tree index_type)
5147 {
5148 tree high_plus_one;
5149 case_node_ptr pnode;
5150
5151 /* If there is no upper bound, obviously no test is needed. */
5152
5153 if (TYPE_MAX_VALUE (index_type) == NULL)
5154 return 1;
5155
5156 /* If the upper bound of this node is the highest value in the type
5157 of the index expression, we need not test against it. */
5158
5159 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5160 return 1;
5161
5162 /* If this node has a right branch, the value at the right must be greater
5163 than that at this node, so it cannot be bounded at the top and
5164 we need not bother testing any further. */
5165
5166 if (node->right)
5167 return 0;
5168
5169 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5170 node->high, integer_one_node));
5171
5172 /* If the addition above overflowed, we can't verify anything.
5173 Otherwise, look for a parent that tests our value + 1. */
5174
5175 if (! tree_int_cst_lt (node->high, high_plus_one))
5176 return 0;
5177
5178 for (pnode = node->parent; pnode; pnode = pnode->parent)
5179 if (tree_int_cst_equal (high_plus_one, pnode->low))
5180 return 1;
5181
5182 return 0;
5183 }
5184
5185 /* Search the parent sections of the
5186 case node tree to see if both tests for the upper and lower
5187 bounds of NODE would be redundant. */
5188
5189 static int
5190 node_is_bounded (case_node_ptr node, tree index_type)
5191 {
5192 return (node_has_low_bound (node, index_type)
5193 && node_has_high_bound (node, index_type));
5194 }
5195
5196 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5197
5198 static void
5199 emit_jump_if_reachable (rtx label)
5200 {
5201 if (GET_CODE (get_last_insn ()) != BARRIER)
5202 emit_jump (label);
5203 }
5204 \f
5205 /* Emit step-by-step code to select a case for the value of INDEX.
5206 The thus generated decision tree follows the form of the
5207 case-node binary tree NODE, whose nodes represent test conditions.
5208 INDEX_TYPE is the type of the index of the switch.
5209
5210 Care is taken to prune redundant tests from the decision tree
5211 by detecting any boundary conditions already checked by
5212 emitted rtx. (See node_has_high_bound, node_has_low_bound
5213 and node_is_bounded, above.)
5214
5215 Where the test conditions can be shown to be redundant we emit
5216 an unconditional jump to the target code. As a further
5217 optimization, the subordinates of a tree node are examined to
5218 check for bounded nodes. In this case conditional and/or
5219 unconditional jumps as a result of the boundary check for the
5220 current node are arranged to target the subordinates associated
5221 code for out of bound conditions on the current node.
5222
5223 We can assume that when control reaches the code generated here,
5224 the index value has already been compared with the parents
5225 of this node, and determined to be on the same side of each parent
5226 as this node is. Thus, if this node tests for the value 51,
5227 and a parent tested for 52, we don't need to consider
5228 the possibility of a value greater than 51. If another parent
5229 tests for the value 50, then this node need not test anything. */
5230
5231 static void
5232 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
5233 tree index_type)
5234 {
5235 /* If INDEX has an unsigned type, we must make unsigned branches. */
5236 int unsignedp = TYPE_UNSIGNED (index_type);
5237 enum machine_mode mode = GET_MODE (index);
5238 enum machine_mode imode = TYPE_MODE (index_type);
5239
5240 /* See if our parents have already tested everything for us.
5241 If they have, emit an unconditional jump for this node. */
5242 if (node_is_bounded (node, index_type))
5243 emit_jump (label_rtx (node->code_label));
5244
5245 else if (tree_int_cst_equal (node->low, node->high))
5246 {
5247 /* Node is single valued. First see if the index expression matches
5248 this node and then check our children, if any. */
5249
5250 do_jump_if_equal (index,
5251 convert_modes (mode, imode,
5252 expand_expr (node->low, NULL_RTX,
5253 VOIDmode, 0),
5254 unsignedp),
5255 label_rtx (node->code_label), unsignedp);
5256
5257 if (node->right != 0 && node->left != 0)
5258 {
5259 /* This node has children on both sides.
5260 Dispatch to one side or the other
5261 by comparing the index value with this node's value.
5262 If one subtree is bounded, check that one first,
5263 so we can avoid real branches in the tree. */
5264
5265 if (node_is_bounded (node->right, index_type))
5266 {
5267 emit_cmp_and_jump_insns (index,
5268 convert_modes
5269 (mode, imode,
5270 expand_expr (node->high, NULL_RTX,
5271 VOIDmode, 0),
5272 unsignedp),
5273 GT, NULL_RTX, mode, unsignedp,
5274 label_rtx (node->right->code_label));
5275 emit_case_nodes (index, node->left, default_label, index_type);
5276 }
5277
5278 else if (node_is_bounded (node->left, index_type))
5279 {
5280 emit_cmp_and_jump_insns (index,
5281 convert_modes
5282 (mode, imode,
5283 expand_expr (node->high, NULL_RTX,
5284 VOIDmode, 0),
5285 unsignedp),
5286 LT, NULL_RTX, mode, unsignedp,
5287 label_rtx (node->left->code_label));
5288 emit_case_nodes (index, node->right, default_label, index_type);
5289 }
5290
5291 /* If both children are single-valued cases with no
5292 children, finish up all the work. This way, we can save
5293 one ordered comparison. */
5294 else if (tree_int_cst_equal (node->right->low, node->right->high)
5295 && node->right->left == 0
5296 && node->right->right == 0
5297 && tree_int_cst_equal (node->left->low, node->left->high)
5298 && node->left->left == 0
5299 && node->left->right == 0)
5300 {
5301 /* Neither node is bounded. First distinguish the two sides;
5302 then emit the code for one side at a time. */
5303
5304 /* See if the value matches what the right hand side
5305 wants. */
5306 do_jump_if_equal (index,
5307 convert_modes (mode, imode,
5308 expand_expr (node->right->low,
5309 NULL_RTX,
5310 VOIDmode, 0),
5311 unsignedp),
5312 label_rtx (node->right->code_label),
5313 unsignedp);
5314
5315 /* See if the value matches what the left hand side
5316 wants. */
5317 do_jump_if_equal (index,
5318 convert_modes (mode, imode,
5319 expand_expr (node->left->low,
5320 NULL_RTX,
5321 VOIDmode, 0),
5322 unsignedp),
5323 label_rtx (node->left->code_label),
5324 unsignedp);
5325 }
5326
5327 else
5328 {
5329 /* Neither node is bounded. First distinguish the two sides;
5330 then emit the code for one side at a time. */
5331
5332 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5333
5334 /* See if the value is on the right. */
5335 emit_cmp_and_jump_insns (index,
5336 convert_modes
5337 (mode, imode,
5338 expand_expr (node->high, NULL_RTX,
5339 VOIDmode, 0),
5340 unsignedp),
5341 GT, NULL_RTX, mode, unsignedp,
5342 label_rtx (test_label));
5343
5344 /* Value must be on the left.
5345 Handle the left-hand subtree. */
5346 emit_case_nodes (index, node->left, default_label, index_type);
5347 /* If left-hand subtree does nothing,
5348 go to default. */
5349 emit_jump_if_reachable (default_label);
5350
5351 /* Code branches here for the right-hand subtree. */
5352 expand_label (test_label);
5353 emit_case_nodes (index, node->right, default_label, index_type);
5354 }
5355 }
5356
5357 else if (node->right != 0 && node->left == 0)
5358 {
5359 /* Here we have a right child but no left so we issue conditional
5360 branch to default and process the right child.
5361
5362 Omit the conditional branch to default if we it avoid only one
5363 right child; it costs too much space to save so little time. */
5364
5365 if (node->right->right || node->right->left
5366 || !tree_int_cst_equal (node->right->low, node->right->high))
5367 {
5368 if (!node_has_low_bound (node, index_type))
5369 {
5370 emit_cmp_and_jump_insns (index,
5371 convert_modes
5372 (mode, imode,
5373 expand_expr (node->high, NULL_RTX,
5374 VOIDmode, 0),
5375 unsignedp),
5376 LT, NULL_RTX, mode, unsignedp,
5377 default_label);
5378 }
5379
5380 emit_case_nodes (index, node->right, default_label, index_type);
5381 }
5382 else
5383 /* We cannot process node->right normally
5384 since we haven't ruled out the numbers less than
5385 this node's value. So handle node->right explicitly. */
5386 do_jump_if_equal (index,
5387 convert_modes
5388 (mode, imode,
5389 expand_expr (node->right->low, NULL_RTX,
5390 VOIDmode, 0),
5391 unsignedp),
5392 label_rtx (node->right->code_label), unsignedp);
5393 }
5394
5395 else if (node->right == 0 && node->left != 0)
5396 {
5397 /* Just one subtree, on the left. */
5398 if (node->left->left || node->left->right
5399 || !tree_int_cst_equal (node->left->low, node->left->high))
5400 {
5401 if (!node_has_high_bound (node, index_type))
5402 {
5403 emit_cmp_and_jump_insns (index,
5404 convert_modes
5405 (mode, imode,
5406 expand_expr (node->high, NULL_RTX,
5407 VOIDmode, 0),
5408 unsignedp),
5409 GT, NULL_RTX, mode, unsignedp,
5410 default_label);
5411 }
5412
5413 emit_case_nodes (index, node->left, default_label, index_type);
5414 }
5415 else
5416 /* We cannot process node->left normally
5417 since we haven't ruled out the numbers less than
5418 this node's value. So handle node->left explicitly. */
5419 do_jump_if_equal (index,
5420 convert_modes
5421 (mode, imode,
5422 expand_expr (node->left->low, NULL_RTX,
5423 VOIDmode, 0),
5424 unsignedp),
5425 label_rtx (node->left->code_label), unsignedp);
5426 }
5427 }
5428 else
5429 {
5430 /* Node is a range. These cases are very similar to those for a single
5431 value, except that we do not start by testing whether this node
5432 is the one to branch to. */
5433
5434 if (node->right != 0 && node->left != 0)
5435 {
5436 /* Node has subtrees on both sides.
5437 If the right-hand subtree is bounded,
5438 test for it first, since we can go straight there.
5439 Otherwise, we need to make a branch in the control structure,
5440 then handle the two subtrees. */
5441 tree test_label = 0;
5442
5443 if (node_is_bounded (node->right, index_type))
5444 /* Right hand node is fully bounded so we can eliminate any
5445 testing and branch directly to the target code. */
5446 emit_cmp_and_jump_insns (index,
5447 convert_modes
5448 (mode, imode,
5449 expand_expr (node->high, NULL_RTX,
5450 VOIDmode, 0),
5451 unsignedp),
5452 GT, NULL_RTX, mode, unsignedp,
5453 label_rtx (node->right->code_label));
5454 else
5455 {
5456 /* Right hand node requires testing.
5457 Branch to a label where we will handle it later. */
5458
5459 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5460 emit_cmp_and_jump_insns (index,
5461 convert_modes
5462 (mode, imode,
5463 expand_expr (node->high, NULL_RTX,
5464 VOIDmode, 0),
5465 unsignedp),
5466 GT, NULL_RTX, mode, unsignedp,
5467 label_rtx (test_label));
5468 }
5469
5470 /* Value belongs to this node or to the left-hand subtree. */
5471
5472 emit_cmp_and_jump_insns (index,
5473 convert_modes
5474 (mode, imode,
5475 expand_expr (node->low, NULL_RTX,
5476 VOIDmode, 0),
5477 unsignedp),
5478 GE, NULL_RTX, mode, unsignedp,
5479 label_rtx (node->code_label));
5480
5481 /* Handle the left-hand subtree. */
5482 emit_case_nodes (index, node->left, default_label, index_type);
5483
5484 /* If right node had to be handled later, do that now. */
5485
5486 if (test_label)
5487 {
5488 /* If the left-hand subtree fell through,
5489 don't let it fall into the right-hand subtree. */
5490 emit_jump_if_reachable (default_label);
5491
5492 expand_label (test_label);
5493 emit_case_nodes (index, node->right, default_label, index_type);
5494 }
5495 }
5496
5497 else if (node->right != 0 && node->left == 0)
5498 {
5499 /* Deal with values to the left of this node,
5500 if they are possible. */
5501 if (!node_has_low_bound (node, index_type))
5502 {
5503 emit_cmp_and_jump_insns (index,
5504 convert_modes
5505 (mode, imode,
5506 expand_expr (node->low, NULL_RTX,
5507 VOIDmode, 0),
5508 unsignedp),
5509 LT, NULL_RTX, mode, unsignedp,
5510 default_label);
5511 }
5512
5513 /* Value belongs to this node or to the right-hand subtree. */
5514
5515 emit_cmp_and_jump_insns (index,
5516 convert_modes
5517 (mode, imode,
5518 expand_expr (node->high, NULL_RTX,
5519 VOIDmode, 0),
5520 unsignedp),
5521 LE, NULL_RTX, mode, unsignedp,
5522 label_rtx (node->code_label));
5523
5524 emit_case_nodes (index, node->right, default_label, index_type);
5525 }
5526
5527 else if (node->right == 0 && node->left != 0)
5528 {
5529 /* Deal with values to the right of this node,
5530 if they are possible. */
5531 if (!node_has_high_bound (node, index_type))
5532 {
5533 emit_cmp_and_jump_insns (index,
5534 convert_modes
5535 (mode, imode,
5536 expand_expr (node->high, NULL_RTX,
5537 VOIDmode, 0),
5538 unsignedp),
5539 GT, NULL_RTX, mode, unsignedp,
5540 default_label);
5541 }
5542
5543 /* Value belongs to this node or to the left-hand subtree. */
5544
5545 emit_cmp_and_jump_insns (index,
5546 convert_modes
5547 (mode, imode,
5548 expand_expr (node->low, NULL_RTX,
5549 VOIDmode, 0),
5550 unsignedp),
5551 GE, NULL_RTX, mode, unsignedp,
5552 label_rtx (node->code_label));
5553
5554 emit_case_nodes (index, node->left, default_label, index_type);
5555 }
5556
5557 else
5558 {
5559 /* Node has no children so we check low and high bounds to remove
5560 redundant tests. Only one of the bounds can exist,
5561 since otherwise this node is bounded--a case tested already. */
5562 int high_bound = node_has_high_bound (node, index_type);
5563 int low_bound = node_has_low_bound (node, index_type);
5564
5565 if (!high_bound && low_bound)
5566 {
5567 emit_cmp_and_jump_insns (index,
5568 convert_modes
5569 (mode, imode,
5570 expand_expr (node->high, NULL_RTX,
5571 VOIDmode, 0),
5572 unsignedp),
5573 GT, NULL_RTX, mode, unsignedp,
5574 default_label);
5575 }
5576
5577 else if (!low_bound && high_bound)
5578 {
5579 emit_cmp_and_jump_insns (index,
5580 convert_modes
5581 (mode, imode,
5582 expand_expr (node->low, NULL_RTX,
5583 VOIDmode, 0),
5584 unsignedp),
5585 LT, NULL_RTX, mode, unsignedp,
5586 default_label);
5587 }
5588 else if (!low_bound && !high_bound)
5589 {
5590 /* Widen LOW and HIGH to the same width as INDEX. */
5591 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
5592 tree low = build1 (CONVERT_EXPR, type, node->low);
5593 tree high = build1 (CONVERT_EXPR, type, node->high);
5594 rtx low_rtx, new_index, new_bound;
5595
5596 /* Instead of doing two branches, emit one unsigned branch for
5597 (index-low) > (high-low). */
5598 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
5599 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
5600 NULL_RTX, unsignedp,
5601 OPTAB_WIDEN);
5602 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
5603 high, low)),
5604 NULL_RTX, mode, 0);
5605
5606 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
5607 mode, 1, default_label);
5608 }
5609
5610 emit_jump (label_rtx (node->code_label));
5611 }
5612 }
5613 }
5614
5615 #include "gt-stmt.h"