[multiple changes]
[gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
22 The functions whose names start with `expand_' are called by the
23 expander to generate RTL instructions for various kinds of constructs. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29
30 #include "rtl.h"
31 #include "hard-reg-set.h"
32 #include "tree.h"
33 #include "tm_p.h"
34 #include "flags.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "machmode.h"
42 #include "diagnostic-core.h"
43 #include "output.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "predict.h"
47 #include "optabs.h"
48 #include "target.h"
49 #include "gimple.h"
50 #include "regs.h"
51 #include "alloc-pool.h"
52 #include "pretty-print.h"
53 #include "pointer-set.h"
54 #include "params.h"
55 #include "dumpfile.h"
56
57 \f
58 /* Functions and data structures for expanding case statements. */
59
60 /* Case label structure, used to hold info on labels within case
61 statements. We handle "range" labels; for a single-value label
62 as in C, the high and low limits are the same.
63
64 We start with a vector of case nodes sorted in ascending order, and
65 the default label as the last element in the vector. Before expanding
66 to RTL, we transform this vector into a list linked via the RIGHT
67 fields in the case_node struct. Nodes with higher case values are
68 later in the list.
69
70 Switch statements can be output in three forms. A branch table is
71 used if there are more than a few labels and the labels are dense
72 within the range between the smallest and largest case value. If a
73 branch table is used, no further manipulations are done with the case
74 node chain.
75
76 The alternative to the use of a branch table is to generate a series
77 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
78 and PARENT fields to hold a binary tree. Initially the tree is
79 totally unbalanced, with everything on the right. We balance the tree
80 with nodes on the left having lower case values than the parent
81 and nodes on the right having higher values. We then output the tree
82 in order.
83
84 For very small, suitable switch statements, we can generate a series
85 of simple bit test and branches instead. */
86
87 struct case_node
88 {
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int prob; /* Probability of taking this case. */
96 /* Probability of reaching subtree rooted at this node */
97 int subtree_prob;
98 };
99
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
102
103 extern basic_block label_to_block_fn (struct function *, tree);
104 \f
105 static int n_occurrences (int, const char *);
106 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
107 static void expand_nl_goto_receiver (void);
108 static bool check_operand_nalternatives (tree, tree);
109 static bool check_unique_operand_names (tree, tree, tree);
110 static char *resolve_operand_name_1 (char *, tree, tree, tree);
111 static void expand_null_return_1 (void);
112 static void expand_value_return (rtx);
113 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
114 static int node_has_low_bound (case_node_ptr, tree);
115 static int node_has_high_bound (case_node_ptr, tree);
116 static int node_is_bounded (case_node_ptr, tree);
117 static void emit_case_nodes (rtx, case_node_ptr, rtx, int, tree);
118 \f
119 /* Return the rtx-label that corresponds to a LABEL_DECL,
120 creating it if necessary. */
121
122 rtx
123 label_rtx (tree label)
124 {
125 gcc_assert (TREE_CODE (label) == LABEL_DECL);
126
127 if (!DECL_RTL_SET_P (label))
128 {
129 rtx r = gen_label_rtx ();
130 SET_DECL_RTL (label, r);
131 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
132 LABEL_PRESERVE_P (r) = 1;
133 }
134
135 return DECL_RTL (label);
136 }
137
138 /* As above, but also put it on the forced-reference list of the
139 function that contains it. */
140 rtx
141 force_label_rtx (tree label)
142 {
143 rtx ref = label_rtx (label);
144 tree function = decl_function_context (label);
145
146 gcc_assert (function);
147
148 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
149 return ref;
150 }
151
152 /* Add an unconditional jump to LABEL as the next sequential instruction. */
153
154 void
155 emit_jump (rtx label)
156 {
157 do_pending_stack_adjust ();
158 emit_jump_insn (gen_jump (label));
159 emit_barrier ();
160 }
161
162 /* Emit code to jump to the address
163 specified by the pointer expression EXP. */
164
165 void
166 expand_computed_goto (tree exp)
167 {
168 rtx x = expand_normal (exp);
169
170 x = convert_memory_address (Pmode, x);
171
172 do_pending_stack_adjust ();
173 emit_indirect_jump (x);
174 }
175 \f
176 /* Handle goto statements and the labels that they can go to. */
177
178 /* Specify the location in the RTL code of a label LABEL,
179 which is a LABEL_DECL tree node.
180
181 This is used for the kind of label that the user can jump to with a
182 goto statement, and for alternatives of a switch or case statement.
183 RTL labels generated for loops and conditionals don't go through here;
184 they are generated directly at the RTL level, by other functions below.
185
186 Note that this has nothing to do with defining label *names*.
187 Languages vary in how they do that and what that even means. */
188
189 void
190 expand_label (tree label)
191 {
192 rtx label_r = label_rtx (label);
193
194 do_pending_stack_adjust ();
195 emit_label (label_r);
196 if (DECL_NAME (label))
197 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
198
199 if (DECL_NONLOCAL (label))
200 {
201 expand_nl_goto_receiver ();
202 nonlocal_goto_handler_labels
203 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
204 nonlocal_goto_handler_labels);
205 }
206
207 if (FORCED_LABEL (label))
208 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
209
210 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
211 maybe_set_first_label_num (label_r);
212 }
213
214 /* Generate RTL code for a `goto' statement with target label LABEL.
215 LABEL should be a LABEL_DECL tree node that was or will later be
216 defined with `expand_label'. */
217
218 void
219 expand_goto (tree label)
220 {
221 #ifdef ENABLE_CHECKING
222 /* Check for a nonlocal goto to a containing function. Should have
223 gotten translated to __builtin_nonlocal_goto. */
224 tree context = decl_function_context (label);
225 gcc_assert (!context || context == current_function_decl);
226 #endif
227
228 emit_jump (label_rtx (label));
229 }
230 \f
231 /* Return the number of times character C occurs in string S. */
232 static int
233 n_occurrences (int c, const char *s)
234 {
235 int n = 0;
236 while (*s)
237 n += (*s++ == c);
238 return n;
239 }
240 \f
241 /* Generate RTL for an asm statement (explicit assembler code).
242 STRING is a STRING_CST node containing the assembler code text,
243 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
244 insn is volatile; don't optimize it. */
245
246 static void
247 expand_asm_loc (tree string, int vol, location_t locus)
248 {
249 rtx body;
250
251 if (TREE_CODE (string) == ADDR_EXPR)
252 string = TREE_OPERAND (string, 0);
253
254 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
255 ggc_strdup (TREE_STRING_POINTER (string)),
256 locus);
257
258 MEM_VOLATILE_P (body) = vol;
259
260 emit_insn (body);
261 }
262
263 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
264 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
265 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
266 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
267 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
268 constraint allows the use of a register operand. And, *IS_INOUT
269 will be true if the operand is read-write, i.e., if it is used as
270 an input as well as an output. If *CONSTRAINT_P is not in
271 canonical form, it will be made canonical. (Note that `+' will be
272 replaced with `=' as part of this process.)
273
274 Returns TRUE if all went well; FALSE if an error occurred. */
275
276 bool
277 parse_output_constraint (const char **constraint_p, int operand_num,
278 int ninputs, int noutputs, bool *allows_mem,
279 bool *allows_reg, bool *is_inout)
280 {
281 const char *constraint = *constraint_p;
282 const char *p;
283
284 /* Assume the constraint doesn't allow the use of either a register
285 or memory. */
286 *allows_mem = false;
287 *allows_reg = false;
288
289 /* Allow the `=' or `+' to not be at the beginning of the string,
290 since it wasn't explicitly documented that way, and there is a
291 large body of code that puts it last. Swap the character to
292 the front, so as not to uglify any place else. */
293 p = strchr (constraint, '=');
294 if (!p)
295 p = strchr (constraint, '+');
296
297 /* If the string doesn't contain an `=', issue an error
298 message. */
299 if (!p)
300 {
301 error ("output operand constraint lacks %<=%>");
302 return false;
303 }
304
305 /* If the constraint begins with `+', then the operand is both read
306 from and written to. */
307 *is_inout = (*p == '+');
308
309 /* Canonicalize the output constraint so that it begins with `='. */
310 if (p != constraint || *is_inout)
311 {
312 char *buf;
313 size_t c_len = strlen (constraint);
314
315 if (p != constraint)
316 warning (0, "output constraint %qc for operand %d "
317 "is not at the beginning",
318 *p, operand_num);
319
320 /* Make a copy of the constraint. */
321 buf = XALLOCAVEC (char, c_len + 1);
322 strcpy (buf, constraint);
323 /* Swap the first character and the `=' or `+'. */
324 buf[p - constraint] = buf[0];
325 /* Make sure the first character is an `='. (Until we do this,
326 it might be a `+'.) */
327 buf[0] = '=';
328 /* Replace the constraint with the canonicalized string. */
329 *constraint_p = ggc_alloc_string (buf, c_len);
330 constraint = *constraint_p;
331 }
332
333 /* Loop through the constraint string. */
334 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
335 switch (*p)
336 {
337 case '+':
338 case '=':
339 error ("operand constraint contains incorrectly positioned "
340 "%<+%> or %<=%>");
341 return false;
342
343 case '%':
344 if (operand_num + 1 == ninputs + noutputs)
345 {
346 error ("%<%%%> constraint used with last operand");
347 return false;
348 }
349 break;
350
351 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
352 *allows_mem = true;
353 break;
354
355 case '?': case '!': case '*': case '&': case '#':
356 case 'E': case 'F': case 'G': case 'H':
357 case 's': case 'i': case 'n':
358 case 'I': case 'J': case 'K': case 'L': case 'M':
359 case 'N': case 'O': case 'P': case ',':
360 break;
361
362 case '0': case '1': case '2': case '3': case '4':
363 case '5': case '6': case '7': case '8': case '9':
364 case '[':
365 error ("matching constraint not valid in output operand");
366 return false;
367
368 case '<': case '>':
369 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
370 excepting those that expand_call created. So match memory
371 and hope. */
372 *allows_mem = true;
373 break;
374
375 case 'g': case 'X':
376 *allows_reg = true;
377 *allows_mem = true;
378 break;
379
380 case 'p': case 'r':
381 *allows_reg = true;
382 break;
383
384 default:
385 if (!ISALPHA (*p))
386 break;
387 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
388 *allows_reg = true;
389 #ifdef EXTRA_CONSTRAINT_STR
390 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
391 *allows_reg = true;
392 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
393 *allows_mem = true;
394 else
395 {
396 /* Otherwise we can't assume anything about the nature of
397 the constraint except that it isn't purely registers.
398 Treat it like "g" and hope for the best. */
399 *allows_reg = true;
400 *allows_mem = true;
401 }
402 #endif
403 break;
404 }
405
406 return true;
407 }
408
409 /* Similar, but for input constraints. */
410
411 bool
412 parse_input_constraint (const char **constraint_p, int input_num,
413 int ninputs, int noutputs, int ninout,
414 const char * const * constraints,
415 bool *allows_mem, bool *allows_reg)
416 {
417 const char *constraint = *constraint_p;
418 const char *orig_constraint = constraint;
419 size_t c_len = strlen (constraint);
420 size_t j;
421 bool saw_match = false;
422
423 /* Assume the constraint doesn't allow the use of either
424 a register or memory. */
425 *allows_mem = false;
426 *allows_reg = false;
427
428 /* Make sure constraint has neither `=', `+', nor '&'. */
429
430 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
431 switch (constraint[j])
432 {
433 case '+': case '=': case '&':
434 if (constraint == orig_constraint)
435 {
436 error ("input operand constraint contains %qc", constraint[j]);
437 return false;
438 }
439 break;
440
441 case '%':
442 if (constraint == orig_constraint
443 && input_num + 1 == ninputs - ninout)
444 {
445 error ("%<%%%> constraint used with last operand");
446 return false;
447 }
448 break;
449
450 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
451 *allows_mem = true;
452 break;
453
454 case '<': case '>':
455 case '?': case '!': case '*': case '#':
456 case 'E': case 'F': case 'G': case 'H':
457 case 's': case 'i': case 'n':
458 case 'I': case 'J': case 'K': case 'L': case 'M':
459 case 'N': case 'O': case 'P': case ',':
460 break;
461
462 /* Whether or not a numeric constraint allows a register is
463 decided by the matching constraint, and so there is no need
464 to do anything special with them. We must handle them in
465 the default case, so that we don't unnecessarily force
466 operands to memory. */
467 case '0': case '1': case '2': case '3': case '4':
468 case '5': case '6': case '7': case '8': case '9':
469 {
470 char *end;
471 unsigned long match;
472
473 saw_match = true;
474
475 match = strtoul (constraint + j, &end, 10);
476 if (match >= (unsigned long) noutputs)
477 {
478 error ("matching constraint references invalid operand number");
479 return false;
480 }
481
482 /* Try and find the real constraint for this dup. Only do this
483 if the matching constraint is the only alternative. */
484 if (*end == '\0'
485 && (j == 0 || (j == 1 && constraint[0] == '%')))
486 {
487 constraint = constraints[match];
488 *constraint_p = constraint;
489 c_len = strlen (constraint);
490 j = 0;
491 /* ??? At the end of the loop, we will skip the first part of
492 the matched constraint. This assumes not only that the
493 other constraint is an output constraint, but also that
494 the '=' or '+' come first. */
495 break;
496 }
497 else
498 j = end - constraint;
499 /* Anticipate increment at end of loop. */
500 j--;
501 }
502 /* Fall through. */
503
504 case 'p': case 'r':
505 *allows_reg = true;
506 break;
507
508 case 'g': case 'X':
509 *allows_reg = true;
510 *allows_mem = true;
511 break;
512
513 default:
514 if (! ISALPHA (constraint[j]))
515 {
516 error ("invalid punctuation %qc in constraint", constraint[j]);
517 return false;
518 }
519 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
520 != NO_REGS)
521 *allows_reg = true;
522 #ifdef EXTRA_CONSTRAINT_STR
523 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
524 *allows_reg = true;
525 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
526 *allows_mem = true;
527 else
528 {
529 /* Otherwise we can't assume anything about the nature of
530 the constraint except that it isn't purely registers.
531 Treat it like "g" and hope for the best. */
532 *allows_reg = true;
533 *allows_mem = true;
534 }
535 #endif
536 break;
537 }
538
539 if (saw_match && !*allows_reg)
540 warning (0, "matching constraint does not allow a register");
541
542 return true;
543 }
544
545 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
546 can be an asm-declared register. Called via walk_tree. */
547
548 static tree
549 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
550 void *data)
551 {
552 tree decl = *declp;
553 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
554
555 if (TREE_CODE (decl) == VAR_DECL)
556 {
557 if (DECL_HARD_REGISTER (decl)
558 && REG_P (DECL_RTL (decl))
559 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
560 {
561 rtx reg = DECL_RTL (decl);
562
563 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
564 return decl;
565 }
566 walk_subtrees = 0;
567 }
568 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
569 walk_subtrees = 0;
570 return NULL_TREE;
571 }
572
573 /* If there is an overlap between *REGS and DECL, return the first overlap
574 found. */
575 tree
576 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
577 {
578 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
579 }
580
581 /* Check for overlap between registers marked in CLOBBERED_REGS and
582 anything inappropriate in T. Emit error and return the register
583 variable definition for error, NULL_TREE for ok. */
584
585 static bool
586 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
587 {
588 /* Conflicts between asm-declared register variables and the clobber
589 list are not allowed. */
590 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
591
592 if (overlap)
593 {
594 error ("asm-specifier for variable %qE conflicts with asm clobber list",
595 DECL_NAME (overlap));
596
597 /* Reset registerness to stop multiple errors emitted for a single
598 variable. */
599 DECL_REGISTER (overlap) = 0;
600 return true;
601 }
602
603 return false;
604 }
605
606 /* Generate RTL for an asm statement with arguments.
607 STRING is the instruction template.
608 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
609 Each output or input has an expression in the TREE_VALUE and
610 a tree list in TREE_PURPOSE which in turn contains a constraint
611 name in TREE_VALUE (or NULL_TREE) and a constraint string
612 in TREE_PURPOSE.
613 CLOBBERS is a list of STRING_CST nodes each naming a hard register
614 that is clobbered by this insn.
615
616 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
617 Some elements of OUTPUTS may be replaced with trees representing temporary
618 values. The caller should copy those temporary values to the originally
619 specified lvalues.
620
621 VOL nonzero means the insn is volatile; don't optimize it. */
622
623 static void
624 expand_asm_operands (tree string, tree outputs, tree inputs,
625 tree clobbers, tree labels, int vol, location_t locus)
626 {
627 rtvec argvec, constraintvec, labelvec;
628 rtx body;
629 int ninputs = list_length (inputs);
630 int noutputs = list_length (outputs);
631 int nlabels = list_length (labels);
632 int ninout;
633 int nclobbers;
634 HARD_REG_SET clobbered_regs;
635 int clobber_conflict_found = 0;
636 tree tail;
637 tree t;
638 int i;
639 /* Vector of RTX's of evaluated output operands. */
640 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
641 int *inout_opnum = XALLOCAVEC (int, noutputs);
642 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
643 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
644 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
645 int old_generating_concat_p = generating_concat_p;
646
647 /* An ASM with no outputs needs to be treated as volatile, for now. */
648 if (noutputs == 0)
649 vol = 1;
650
651 if (! check_operand_nalternatives (outputs, inputs))
652 return;
653
654 string = resolve_asm_operand_names (string, outputs, inputs, labels);
655
656 /* Collect constraints. */
657 i = 0;
658 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
659 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
660 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
661 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
662
663 /* Sometimes we wish to automatically clobber registers across an asm.
664 Case in point is when the i386 backend moved from cc0 to a hard reg --
665 maintaining source-level compatibility means automatically clobbering
666 the flags register. */
667 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
668
669 /* Count the number of meaningful clobbered registers, ignoring what
670 we would ignore later. */
671 nclobbers = 0;
672 CLEAR_HARD_REG_SET (clobbered_regs);
673 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
674 {
675 const char *regname;
676 int nregs;
677
678 if (TREE_VALUE (tail) == error_mark_node)
679 return;
680 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
681
682 i = decode_reg_name_and_count (regname, &nregs);
683 if (i == -4)
684 ++nclobbers;
685 else if (i == -2)
686 error ("unknown register name %qs in %<asm%>", regname);
687
688 /* Mark clobbered registers. */
689 if (i >= 0)
690 {
691 int reg;
692
693 for (reg = i; reg < i + nregs; reg++)
694 {
695 ++nclobbers;
696
697 /* Clobbering the PIC register is an error. */
698 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
699 {
700 error ("PIC register clobbered by %qs in %<asm%>", regname);
701 return;
702 }
703
704 SET_HARD_REG_BIT (clobbered_regs, reg);
705 }
706 }
707 }
708
709 /* First pass over inputs and outputs checks validity and sets
710 mark_addressable if needed. */
711
712 ninout = 0;
713 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
714 {
715 tree val = TREE_VALUE (tail);
716 tree type = TREE_TYPE (val);
717 const char *constraint;
718 bool is_inout;
719 bool allows_reg;
720 bool allows_mem;
721
722 /* If there's an erroneous arg, emit no insn. */
723 if (type == error_mark_node)
724 return;
725
726 /* Try to parse the output constraint. If that fails, there's
727 no point in going further. */
728 constraint = constraints[i];
729 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
730 &allows_mem, &allows_reg, &is_inout))
731 return;
732
733 if (! allows_reg
734 && (allows_mem
735 || is_inout
736 || (DECL_P (val)
737 && REG_P (DECL_RTL (val))
738 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
739 mark_addressable (val);
740
741 if (is_inout)
742 ninout++;
743 }
744
745 ninputs += ninout;
746 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
747 {
748 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
749 return;
750 }
751
752 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
753 {
754 bool allows_reg, allows_mem;
755 const char *constraint;
756
757 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
758 would get VOIDmode and that could cause a crash in reload. */
759 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
760 return;
761
762 constraint = constraints[i + noutputs];
763 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
764 constraints, &allows_mem, &allows_reg))
765 return;
766
767 if (! allows_reg && allows_mem)
768 mark_addressable (TREE_VALUE (tail));
769 }
770
771 /* Second pass evaluates arguments. */
772
773 /* Make sure stack is consistent for asm goto. */
774 if (nlabels > 0)
775 do_pending_stack_adjust ();
776
777 ninout = 0;
778 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
779 {
780 tree val = TREE_VALUE (tail);
781 tree type = TREE_TYPE (val);
782 bool is_inout;
783 bool allows_reg;
784 bool allows_mem;
785 rtx op;
786 bool ok;
787
788 ok = parse_output_constraint (&constraints[i], i, ninputs,
789 noutputs, &allows_mem, &allows_reg,
790 &is_inout);
791 gcc_assert (ok);
792
793 /* If an output operand is not a decl or indirect ref and our constraint
794 allows a register, make a temporary to act as an intermediate.
795 Make the asm insn write into that, then our caller will copy it to
796 the real output operand. Likewise for promoted variables. */
797
798 generating_concat_p = 0;
799
800 real_output_rtx[i] = NULL_RTX;
801 if ((TREE_CODE (val) == INDIRECT_REF
802 && allows_mem)
803 || (DECL_P (val)
804 && (allows_mem || REG_P (DECL_RTL (val)))
805 && ! (REG_P (DECL_RTL (val))
806 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
807 || ! allows_reg
808 || is_inout)
809 {
810 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
811 if (MEM_P (op))
812 op = validize_mem (op);
813
814 if (! allows_reg && !MEM_P (op))
815 error ("output number %d not directly addressable", i);
816 if ((! allows_mem && MEM_P (op))
817 || GET_CODE (op) == CONCAT)
818 {
819 real_output_rtx[i] = op;
820 op = gen_reg_rtx (GET_MODE (op));
821 if (is_inout)
822 emit_move_insn (op, real_output_rtx[i]);
823 }
824 }
825 else
826 {
827 op = assign_temp (type, 0, 1);
828 op = validize_mem (op);
829 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
830 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
831 TREE_VALUE (tail) = make_tree (type, op);
832 }
833 output_rtx[i] = op;
834
835 generating_concat_p = old_generating_concat_p;
836
837 if (is_inout)
838 {
839 inout_mode[ninout] = TYPE_MODE (type);
840 inout_opnum[ninout++] = i;
841 }
842
843 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
844 clobber_conflict_found = 1;
845 }
846
847 /* Make vectors for the expression-rtx, constraint strings,
848 and named operands. */
849
850 argvec = rtvec_alloc (ninputs);
851 constraintvec = rtvec_alloc (ninputs);
852 labelvec = rtvec_alloc (nlabels);
853
854 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
855 : GET_MODE (output_rtx[0])),
856 ggc_strdup (TREE_STRING_POINTER (string)),
857 empty_string, 0, argvec, constraintvec,
858 labelvec, locus);
859
860 MEM_VOLATILE_P (body) = vol;
861
862 /* Eval the inputs and put them into ARGVEC.
863 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
864
865 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
866 {
867 bool allows_reg, allows_mem;
868 const char *constraint;
869 tree val, type;
870 rtx op;
871 bool ok;
872
873 constraint = constraints[i + noutputs];
874 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
875 constraints, &allows_mem, &allows_reg);
876 gcc_assert (ok);
877
878 generating_concat_p = 0;
879
880 val = TREE_VALUE (tail);
881 type = TREE_TYPE (val);
882 /* EXPAND_INITIALIZER will not generate code for valid initializer
883 constants, but will still generate code for other types of operand.
884 This is the behavior we want for constant constraints. */
885 op = expand_expr (val, NULL_RTX, VOIDmode,
886 allows_reg ? EXPAND_NORMAL
887 : allows_mem ? EXPAND_MEMORY
888 : EXPAND_INITIALIZER);
889
890 /* Never pass a CONCAT to an ASM. */
891 if (GET_CODE (op) == CONCAT)
892 op = force_reg (GET_MODE (op), op);
893 else if (MEM_P (op))
894 op = validize_mem (op);
895
896 if (asm_operand_ok (op, constraint, NULL) <= 0)
897 {
898 if (allows_reg && TYPE_MODE (type) != BLKmode)
899 op = force_reg (TYPE_MODE (type), op);
900 else if (!allows_mem)
901 warning (0, "asm operand %d probably doesn%'t match constraints",
902 i + noutputs);
903 else if (MEM_P (op))
904 {
905 /* We won't recognize either volatile memory or memory
906 with a queued address as available a memory_operand
907 at this point. Ignore it: clearly this *is* a memory. */
908 }
909 else
910 gcc_unreachable ();
911 }
912
913 generating_concat_p = old_generating_concat_p;
914 ASM_OPERANDS_INPUT (body, i) = op;
915
916 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
917 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
918 ggc_strdup (constraints[i + noutputs]));
919
920 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
921 clobber_conflict_found = 1;
922 }
923
924 /* Protect all the operands from the queue now that they have all been
925 evaluated. */
926
927 generating_concat_p = 0;
928
929 /* For in-out operands, copy output rtx to input rtx. */
930 for (i = 0; i < ninout; i++)
931 {
932 int j = inout_opnum[i];
933 char buffer[16];
934
935 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
936 = output_rtx[j];
937
938 sprintf (buffer, "%d", j);
939 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
940 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
941 }
942
943 /* Copy labels to the vector. */
944 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
945 ASM_OPERANDS_LABEL (body, i)
946 = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
947
948 generating_concat_p = old_generating_concat_p;
949
950 /* Now, for each output, construct an rtx
951 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
952 ARGVEC CONSTRAINTS OPNAMES))
953 If there is more than one, put them inside a PARALLEL. */
954
955 if (nlabels > 0 && nclobbers == 0)
956 {
957 gcc_assert (noutputs == 0);
958 emit_jump_insn (body);
959 }
960 else if (noutputs == 0 && nclobbers == 0)
961 {
962 /* No output operands: put in a raw ASM_OPERANDS rtx. */
963 emit_insn (body);
964 }
965 else if (noutputs == 1 && nclobbers == 0)
966 {
967 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
968 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
969 }
970 else
971 {
972 rtx obody = body;
973 int num = noutputs;
974
975 if (num == 0)
976 num = 1;
977
978 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
979
980 /* For each output operand, store a SET. */
981 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
982 {
983 XVECEXP (body, 0, i)
984 = gen_rtx_SET (VOIDmode,
985 output_rtx[i],
986 gen_rtx_ASM_OPERANDS
987 (GET_MODE (output_rtx[i]),
988 ggc_strdup (TREE_STRING_POINTER (string)),
989 ggc_strdup (constraints[i]),
990 i, argvec, constraintvec, labelvec, locus));
991
992 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
993 }
994
995 /* If there are no outputs (but there are some clobbers)
996 store the bare ASM_OPERANDS into the PARALLEL. */
997
998 if (i == 0)
999 XVECEXP (body, 0, i++) = obody;
1000
1001 /* Store (clobber REG) for each clobbered register specified. */
1002
1003 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1004 {
1005 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1006 int reg, nregs;
1007 int j = decode_reg_name_and_count (regname, &nregs);
1008 rtx clobbered_reg;
1009
1010 if (j < 0)
1011 {
1012 if (j == -3) /* `cc', which is not a register */
1013 continue;
1014
1015 if (j == -4) /* `memory', don't cache memory across asm */
1016 {
1017 XVECEXP (body, 0, i++)
1018 = gen_rtx_CLOBBER (VOIDmode,
1019 gen_rtx_MEM
1020 (BLKmode,
1021 gen_rtx_SCRATCH (VOIDmode)));
1022 continue;
1023 }
1024
1025 /* Ignore unknown register, error already signaled. */
1026 continue;
1027 }
1028
1029 for (reg = j; reg < j + nregs; reg++)
1030 {
1031 /* Use QImode since that's guaranteed to clobber just
1032 * one reg. */
1033 clobbered_reg = gen_rtx_REG (QImode, reg);
1034
1035 /* Do sanity check for overlap between clobbers and
1036 respectively input and outputs that hasn't been
1037 handled. Such overlap should have been detected and
1038 reported above. */
1039 if (!clobber_conflict_found)
1040 {
1041 int opno;
1042
1043 /* We test the old body (obody) contents to avoid
1044 tripping over the under-construction body. */
1045 for (opno = 0; opno < noutputs; opno++)
1046 if (reg_overlap_mentioned_p (clobbered_reg,
1047 output_rtx[opno]))
1048 internal_error
1049 ("asm clobber conflict with output operand");
1050
1051 for (opno = 0; opno < ninputs - ninout; opno++)
1052 if (reg_overlap_mentioned_p (clobbered_reg,
1053 ASM_OPERANDS_INPUT (obody,
1054 opno)))
1055 internal_error
1056 ("asm clobber conflict with input operand");
1057 }
1058
1059 XVECEXP (body, 0, i++)
1060 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1061 }
1062 }
1063
1064 if (nlabels > 0)
1065 emit_jump_insn (body);
1066 else
1067 emit_insn (body);
1068 }
1069
1070 /* For any outputs that needed reloading into registers, spill them
1071 back to where they belong. */
1072 for (i = 0; i < noutputs; ++i)
1073 if (real_output_rtx[i])
1074 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1075
1076 crtl->has_asm_statement = 1;
1077 free_temp_slots ();
1078 }
1079
1080 void
1081 expand_asm_stmt (gimple stmt)
1082 {
1083 int noutputs;
1084 tree outputs, tail, t;
1085 tree *o;
1086 size_t i, n;
1087 const char *s;
1088 tree str, out, in, cl, labels;
1089 location_t locus = gimple_location (stmt);
1090
1091 /* Meh... convert the gimple asm operands into real tree lists.
1092 Eventually we should make all routines work on the vectors instead
1093 of relying on TREE_CHAIN. */
1094 out = NULL_TREE;
1095 n = gimple_asm_noutputs (stmt);
1096 if (n > 0)
1097 {
1098 t = out = gimple_asm_output_op (stmt, 0);
1099 for (i = 1; i < n; i++)
1100 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1101 }
1102
1103 in = NULL_TREE;
1104 n = gimple_asm_ninputs (stmt);
1105 if (n > 0)
1106 {
1107 t = in = gimple_asm_input_op (stmt, 0);
1108 for (i = 1; i < n; i++)
1109 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1110 }
1111
1112 cl = NULL_TREE;
1113 n = gimple_asm_nclobbers (stmt);
1114 if (n > 0)
1115 {
1116 t = cl = gimple_asm_clobber_op (stmt, 0);
1117 for (i = 1; i < n; i++)
1118 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1119 }
1120
1121 labels = NULL_TREE;
1122 n = gimple_asm_nlabels (stmt);
1123 if (n > 0)
1124 {
1125 t = labels = gimple_asm_label_op (stmt, 0);
1126 for (i = 1; i < n; i++)
1127 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1128 }
1129
1130 s = gimple_asm_string (stmt);
1131 str = build_string (strlen (s), s);
1132
1133 if (gimple_asm_input_p (stmt))
1134 {
1135 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1136 return;
1137 }
1138
1139 outputs = out;
1140 noutputs = gimple_asm_noutputs (stmt);
1141 /* o[I] is the place that output number I should be written. */
1142 o = (tree *) alloca (noutputs * sizeof (tree));
1143
1144 /* Record the contents of OUTPUTS before it is modified. */
1145 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1146 o[i] = TREE_VALUE (tail);
1147
1148 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1149 OUTPUTS some trees for where the values were actually stored. */
1150 expand_asm_operands (str, outputs, in, cl, labels,
1151 gimple_asm_volatile_p (stmt), locus);
1152
1153 /* Copy all the intermediate outputs into the specified outputs. */
1154 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1155 {
1156 if (o[i] != TREE_VALUE (tail))
1157 {
1158 expand_assignment (o[i], TREE_VALUE (tail), false);
1159 free_temp_slots ();
1160
1161 /* Restore the original value so that it's correct the next
1162 time we expand this function. */
1163 TREE_VALUE (tail) = o[i];
1164 }
1165 }
1166 }
1167
1168 /* A subroutine of expand_asm_operands. Check that all operands have
1169 the same number of alternatives. Return true if so. */
1170
1171 static bool
1172 check_operand_nalternatives (tree outputs, tree inputs)
1173 {
1174 if (outputs || inputs)
1175 {
1176 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1177 int nalternatives
1178 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1179 tree next = inputs;
1180
1181 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1182 {
1183 error ("too many alternatives in %<asm%>");
1184 return false;
1185 }
1186
1187 tmp = outputs;
1188 while (tmp)
1189 {
1190 const char *constraint
1191 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1192
1193 if (n_occurrences (',', constraint) != nalternatives)
1194 {
1195 error ("operand constraints for %<asm%> differ "
1196 "in number of alternatives");
1197 return false;
1198 }
1199
1200 if (TREE_CHAIN (tmp))
1201 tmp = TREE_CHAIN (tmp);
1202 else
1203 tmp = next, next = 0;
1204 }
1205 }
1206
1207 return true;
1208 }
1209
1210 /* A subroutine of expand_asm_operands. Check that all operand names
1211 are unique. Return true if so. We rely on the fact that these names
1212 are identifiers, and so have been canonicalized by get_identifier,
1213 so all we need are pointer comparisons. */
1214
1215 static bool
1216 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1217 {
1218 tree i, j, i_name = NULL_TREE;
1219
1220 for (i = outputs; i ; i = TREE_CHAIN (i))
1221 {
1222 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1223 if (! i_name)
1224 continue;
1225
1226 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1227 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1228 goto failure;
1229 }
1230
1231 for (i = inputs; i ; i = TREE_CHAIN (i))
1232 {
1233 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1234 if (! i_name)
1235 continue;
1236
1237 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1238 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1239 goto failure;
1240 for (j = outputs; j ; j = TREE_CHAIN (j))
1241 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1242 goto failure;
1243 }
1244
1245 for (i = labels; i ; i = TREE_CHAIN (i))
1246 {
1247 i_name = TREE_PURPOSE (i);
1248 if (! i_name)
1249 continue;
1250
1251 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1252 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1253 goto failure;
1254 for (j = inputs; j ; j = TREE_CHAIN (j))
1255 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1256 goto failure;
1257 }
1258
1259 return true;
1260
1261 failure:
1262 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name));
1263 return false;
1264 }
1265
1266 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1267 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1268 STRING and in the constraints to those numbers. */
1269
1270 tree
1271 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1272 {
1273 char *buffer;
1274 char *p;
1275 const char *c;
1276 tree t;
1277
1278 check_unique_operand_names (outputs, inputs, labels);
1279
1280 /* Substitute [<name>] in input constraint strings. There should be no
1281 named operands in output constraints. */
1282 for (t = inputs; t ; t = TREE_CHAIN (t))
1283 {
1284 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1285 if (strchr (c, '[') != NULL)
1286 {
1287 p = buffer = xstrdup (c);
1288 while ((p = strchr (p, '[')) != NULL)
1289 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1290 TREE_VALUE (TREE_PURPOSE (t))
1291 = build_string (strlen (buffer), buffer);
1292 free (buffer);
1293 }
1294 }
1295
1296 /* Now check for any needed substitutions in the template. */
1297 c = TREE_STRING_POINTER (string);
1298 while ((c = strchr (c, '%')) != NULL)
1299 {
1300 if (c[1] == '[')
1301 break;
1302 else if (ISALPHA (c[1]) && c[2] == '[')
1303 break;
1304 else
1305 {
1306 c += 1 + (c[1] == '%');
1307 continue;
1308 }
1309 }
1310
1311 if (c)
1312 {
1313 /* OK, we need to make a copy so we can perform the substitutions.
1314 Assume that we will not need extra space--we get to remove '['
1315 and ']', which means we cannot have a problem until we have more
1316 than 999 operands. */
1317 buffer = xstrdup (TREE_STRING_POINTER (string));
1318 p = buffer + (c - TREE_STRING_POINTER (string));
1319
1320 while ((p = strchr (p, '%')) != NULL)
1321 {
1322 if (p[1] == '[')
1323 p += 1;
1324 else if (ISALPHA (p[1]) && p[2] == '[')
1325 p += 2;
1326 else
1327 {
1328 p += 1 + (p[1] == '%');
1329 continue;
1330 }
1331
1332 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1333 }
1334
1335 string = build_string (strlen (buffer), buffer);
1336 free (buffer);
1337 }
1338
1339 return string;
1340 }
1341
1342 /* A subroutine of resolve_operand_names. P points to the '[' for a
1343 potential named operand of the form [<name>]. In place, replace
1344 the name and brackets with a number. Return a pointer to the
1345 balance of the string after substitution. */
1346
1347 static char *
1348 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1349 {
1350 char *q;
1351 int op;
1352 tree t;
1353
1354 /* Collect the operand name. */
1355 q = strchr (++p, ']');
1356 if (!q)
1357 {
1358 error ("missing close brace for named operand");
1359 return strchr (p, '\0');
1360 }
1361 *q = '\0';
1362
1363 /* Resolve the name to a number. */
1364 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1365 {
1366 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1367 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1368 goto found;
1369 }
1370 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1371 {
1372 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1373 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1374 goto found;
1375 }
1376 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1377 {
1378 tree name = TREE_PURPOSE (t);
1379 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1380 goto found;
1381 }
1382
1383 error ("undefined named operand %qs", identifier_to_locale (p));
1384 op = 0;
1385
1386 found:
1387 /* Replace the name with the number. Unfortunately, not all libraries
1388 get the return value of sprintf correct, so search for the end of the
1389 generated string by hand. */
1390 sprintf (--p, "%d", op);
1391 p = strchr (p, '\0');
1392
1393 /* Verify the no extra buffer space assumption. */
1394 gcc_assert (p <= q);
1395
1396 /* Shift the rest of the buffer down to fill the gap. */
1397 memmove (p, q + 1, strlen (q + 1) + 1);
1398
1399 return p;
1400 }
1401 \f
1402 /* Generate RTL to return from the current function, with no value.
1403 (That is, we do not do anything about returning any value.) */
1404
1405 void
1406 expand_null_return (void)
1407 {
1408 /* If this function was declared to return a value, but we
1409 didn't, clobber the return registers so that they are not
1410 propagated live to the rest of the function. */
1411 clobber_return_register ();
1412
1413 expand_null_return_1 ();
1414 }
1415
1416 /* Generate RTL to return directly from the current function.
1417 (That is, we bypass any return value.) */
1418
1419 void
1420 expand_naked_return (void)
1421 {
1422 rtx end_label;
1423
1424 clear_pending_stack_adjust ();
1425 do_pending_stack_adjust ();
1426
1427 end_label = naked_return_label;
1428 if (end_label == 0)
1429 end_label = naked_return_label = gen_label_rtx ();
1430
1431 emit_jump (end_label);
1432 }
1433
1434 /* Generate RTL to return from the current function, with value VAL. */
1435
1436 static void
1437 expand_value_return (rtx val)
1438 {
1439 /* Copy the value to the return location unless it's already there. */
1440
1441 tree decl = DECL_RESULT (current_function_decl);
1442 rtx return_reg = DECL_RTL (decl);
1443 if (return_reg != val)
1444 {
1445 tree funtype = TREE_TYPE (current_function_decl);
1446 tree type = TREE_TYPE (decl);
1447 int unsignedp = TYPE_UNSIGNED (type);
1448 enum machine_mode old_mode = DECL_MODE (decl);
1449 enum machine_mode mode;
1450 if (DECL_BY_REFERENCE (decl))
1451 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1452 else
1453 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1454
1455 if (mode != old_mode)
1456 val = convert_modes (mode, old_mode, val, unsignedp);
1457
1458 if (GET_CODE (return_reg) == PARALLEL)
1459 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1460 else
1461 emit_move_insn (return_reg, val);
1462 }
1463
1464 expand_null_return_1 ();
1465 }
1466
1467 /* Output a return with no value. */
1468
1469 static void
1470 expand_null_return_1 (void)
1471 {
1472 clear_pending_stack_adjust ();
1473 do_pending_stack_adjust ();
1474 emit_jump (return_label);
1475 }
1476 \f
1477 /* Generate RTL to evaluate the expression RETVAL and return it
1478 from the current function. */
1479
1480 void
1481 expand_return (tree retval)
1482 {
1483 rtx result_rtl;
1484 rtx val = 0;
1485 tree retval_rhs;
1486
1487 /* If function wants no value, give it none. */
1488 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1489 {
1490 expand_normal (retval);
1491 expand_null_return ();
1492 return;
1493 }
1494
1495 if (retval == error_mark_node)
1496 {
1497 /* Treat this like a return of no value from a function that
1498 returns a value. */
1499 expand_null_return ();
1500 return;
1501 }
1502 else if ((TREE_CODE (retval) == MODIFY_EXPR
1503 || TREE_CODE (retval) == INIT_EXPR)
1504 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1505 retval_rhs = TREE_OPERAND (retval, 1);
1506 else
1507 retval_rhs = retval;
1508
1509 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1510
1511 /* If we are returning the RESULT_DECL, then the value has already
1512 been stored into it, so we don't have to do anything special. */
1513 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1514 expand_value_return (result_rtl);
1515
1516 /* If the result is an aggregate that is being returned in one (or more)
1517 registers, load the registers here. */
1518
1519 else if (retval_rhs != 0
1520 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1521 && REG_P (result_rtl))
1522 {
1523 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
1524 if (val)
1525 {
1526 /* Use the mode of the result value on the return register. */
1527 PUT_MODE (result_rtl, GET_MODE (val));
1528 expand_value_return (val);
1529 }
1530 else
1531 expand_null_return ();
1532 }
1533 else if (retval_rhs != 0
1534 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1535 && (REG_P (result_rtl)
1536 || (GET_CODE (result_rtl) == PARALLEL)))
1537 {
1538 /* Calculate the return value into a temporary (usually a pseudo
1539 reg). */
1540 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1541 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1542
1543 val = assign_temp (nt, 0, 1);
1544 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1545 val = force_not_mem (val);
1546 /* Return the calculated value. */
1547 expand_value_return (val);
1548 }
1549 else
1550 {
1551 /* No hard reg used; calculate value into hard return reg. */
1552 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1553 expand_value_return (result_rtl);
1554 }
1555 }
1556 \f
1557 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1558 handler. */
1559 static void
1560 expand_nl_goto_receiver (void)
1561 {
1562 rtx chain;
1563
1564 /* Clobber the FP when we get here, so we have to make sure it's
1565 marked as used by this function. */
1566 emit_use (hard_frame_pointer_rtx);
1567
1568 /* Mark the static chain as clobbered here so life information
1569 doesn't get messed up for it. */
1570 chain = targetm.calls.static_chain (current_function_decl, true);
1571 if (chain && REG_P (chain))
1572 emit_clobber (chain);
1573
1574 #ifdef HAVE_nonlocal_goto
1575 if (! HAVE_nonlocal_goto)
1576 #endif
1577 /* First adjust our frame pointer to its actual value. It was
1578 previously set to the start of the virtual area corresponding to
1579 the stacked variables when we branched here and now needs to be
1580 adjusted to the actual hardware fp value.
1581
1582 Assignments are to virtual registers are converted by
1583 instantiate_virtual_regs into the corresponding assignment
1584 to the underlying register (fp in this case) that makes
1585 the original assignment true.
1586 So the following insn will actually be
1587 decrementing fp by STARTING_FRAME_OFFSET. */
1588 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1589
1590 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
1591 if (fixed_regs[ARG_POINTER_REGNUM])
1592 {
1593 #ifdef ELIMINABLE_REGS
1594 /* If the argument pointer can be eliminated in favor of the
1595 frame pointer, we don't need to restore it. We assume here
1596 that if such an elimination is present, it can always be used.
1597 This is the case on all known machines; if we don't make this
1598 assumption, we do unnecessary saving on many machines. */
1599 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1600 size_t i;
1601
1602 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1603 if (elim_regs[i].from == ARG_POINTER_REGNUM
1604 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1605 break;
1606
1607 if (i == ARRAY_SIZE (elim_regs))
1608 #endif
1609 {
1610 /* Now restore our arg pointer from the address at which it
1611 was saved in our stack frame. */
1612 emit_move_insn (crtl->args.internal_arg_pointer,
1613 copy_to_reg (get_arg_pointer_save_area ()));
1614 }
1615 }
1616 #endif
1617
1618 #ifdef HAVE_nonlocal_goto_receiver
1619 if (HAVE_nonlocal_goto_receiver)
1620 emit_insn (gen_nonlocal_goto_receiver ());
1621 #endif
1622
1623 /* We must not allow the code we just generated to be reordered by
1624 scheduling. Specifically, the update of the frame pointer must
1625 happen immediately, not later. */
1626 emit_insn (gen_blockage ());
1627 }
1628 \f
1629 /* Emit code to save the current value of stack. */
1630 rtx
1631 expand_stack_save (void)
1632 {
1633 rtx ret = NULL_RTX;
1634
1635 do_pending_stack_adjust ();
1636 emit_stack_save (SAVE_BLOCK, &ret);
1637 return ret;
1638 }
1639
1640 /* Emit code to restore the current value of stack. */
1641 void
1642 expand_stack_restore (tree var)
1643 {
1644 rtx prev, sa = expand_normal (var);
1645
1646 sa = convert_memory_address (Pmode, sa);
1647
1648 prev = get_last_insn ();
1649 emit_stack_restore (SAVE_BLOCK, sa);
1650 fixup_args_size_notes (prev, get_last_insn (), 0);
1651 }
1652
1653 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
1654 is the probability of jumping to LABEL. */
1655 static void
1656 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
1657 int unsignedp, int prob)
1658 {
1659 gcc_assert (prob <= REG_BR_PROB_BASE);
1660 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
1661 NULL_RTX, NULL_RTX, label, prob);
1662 }
1663 \f
1664 /* Do the insertion of a case label into case_list. The labels are
1665 fed to us in descending order from the sorted vector of case labels used
1666 in the tree part of the middle end. So the list we construct is
1667 sorted in ascending order.
1668
1669 LABEL is the case label to be inserted. LOW and HIGH are the bounds
1670 against which the index is compared to jump to LABEL and PROB is the
1671 estimated probability LABEL is reached from the switch statement. */
1672
1673 static struct case_node *
1674 add_case_node (struct case_node *head, tree low, tree high,
1675 tree label, int prob, alloc_pool case_node_pool)
1676 {
1677 struct case_node *r;
1678
1679 gcc_checking_assert (low);
1680 gcc_checking_assert (high && (TREE_TYPE (low) == TREE_TYPE (high)));
1681
1682 /* Add this label to the chain. */
1683 r = (struct case_node *) pool_alloc (case_node_pool);
1684 r->low = low;
1685 r->high = high;
1686 r->code_label = label;
1687 r->parent = r->left = NULL;
1688 r->prob = prob;
1689 r->subtree_prob = prob;
1690 r->right = head;
1691 return r;
1692 }
1693 \f
1694 /* Dump ROOT, a list or tree of case nodes, to file. */
1695
1696 static void
1697 dump_case_nodes (FILE *f, struct case_node *root,
1698 int indent_step, int indent_level)
1699 {
1700 HOST_WIDE_INT low, high;
1701
1702 if (root == 0)
1703 return;
1704 indent_level++;
1705
1706 dump_case_nodes (f, root->left, indent_step, indent_level);
1707
1708 low = tree_low_cst (root->low, 0);
1709 high = tree_low_cst (root->high, 0);
1710
1711 fputs (";; ", f);
1712 if (high == low)
1713 fprintf(f, "%*s" HOST_WIDE_INT_PRINT_DEC,
1714 indent_step * indent_level, "", low);
1715 else
1716 fprintf(f, "%*s" HOST_WIDE_INT_PRINT_DEC " ... " HOST_WIDE_INT_PRINT_DEC,
1717 indent_step * indent_level, "", low, high);
1718 fputs ("\n", f);
1719
1720 dump_case_nodes (f, root->right, indent_step, indent_level);
1721 }
1722 \f
1723 #ifndef HAVE_casesi
1724 #define HAVE_casesi 0
1725 #endif
1726
1727 #ifndef HAVE_tablejump
1728 #define HAVE_tablejump 0
1729 #endif
1730
1731 /* Return the smallest number of different values for which it is best to use a
1732 jump-table instead of a tree of conditional branches. */
1733
1734 static unsigned int
1735 case_values_threshold (void)
1736 {
1737 unsigned int threshold = PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD);
1738
1739 if (threshold == 0)
1740 threshold = targetm.case_values_threshold ();
1741
1742 return threshold;
1743 }
1744
1745 /* Return true if a switch should be expanded as a decision tree.
1746 RANGE is the difference between highest and lowest case.
1747 UNIQ is number of unique case node targets, not counting the default case.
1748 COUNT is the number of comparisons needed, not counting the default case. */
1749
1750 static bool
1751 expand_switch_as_decision_tree_p (tree range,
1752 unsigned int uniq ATTRIBUTE_UNUSED,
1753 unsigned int count)
1754 {
1755 int max_ratio;
1756
1757 /* If neither casesi or tablejump is available, or flag_jump_tables
1758 over-ruled us, we really have no choice. */
1759 if (!HAVE_casesi && !HAVE_tablejump)
1760 return true;
1761 if (!flag_jump_tables)
1762 return true;
1763 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
1764 if (flag_pic)
1765 return true;
1766 #endif
1767
1768 /* If the switch is relatively small such that the cost of one
1769 indirect jump on the target are higher than the cost of a
1770 decision tree, go with the decision tree.
1771
1772 If range of values is much bigger than number of values,
1773 or if it is too large to represent in a HOST_WIDE_INT,
1774 make a sequence of conditional branches instead of a dispatch.
1775
1776 The definition of "much bigger" depends on whether we are
1777 optimizing for size or for speed. If the former, the maximum
1778 ratio range/count = 3, because this was found to be the optimal
1779 ratio for size on i686-pc-linux-gnu, see PR11823. The ratio
1780 10 is much older, and was probably selected after an extensive
1781 benchmarking investigation on numerous platforms. Or maybe it
1782 just made sense to someone at some point in the history of GCC,
1783 who knows... */
1784 max_ratio = optimize_insn_for_size_p () ? 3 : 10;
1785 if (count < case_values_threshold ()
1786 || ! host_integerp (range, /*pos=*/1)
1787 || compare_tree_int (range, max_ratio * count) > 0)
1788 return true;
1789
1790 return false;
1791 }
1792
1793 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1794 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1795 DEFAULT_PROB is the estimated probability that it jumps to
1796 DEFAULT_LABEL.
1797
1798 We generate a binary decision tree to select the appropriate target
1799 code. This is done as follows:
1800
1801 If the index is a short or char that we do not have
1802 an insn to handle comparisons directly, convert it to
1803 a full integer now, rather than letting each comparison
1804 generate the conversion.
1805
1806 Load the index into a register.
1807
1808 The list of cases is rearranged into a binary tree,
1809 nearly optimal assuming equal probability for each case.
1810
1811 The tree is transformed into RTL, eliminating redundant
1812 test conditions at the same time.
1813
1814 If program flow could reach the end of the decision tree
1815 an unconditional jump to the default code is emitted.
1816
1817 The above process is unaware of the CFG. The caller has to fix up
1818 the CFG itself. This is done in cfgexpand.c. */
1819
1820 static void
1821 emit_case_decision_tree (tree index_expr, tree index_type,
1822 struct case_node *case_list, rtx default_label,
1823 int default_prob)
1824 {
1825 rtx index = expand_normal (index_expr);
1826
1827 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
1828 && ! have_insn_for (COMPARE, GET_MODE (index)))
1829 {
1830 int unsignedp = TYPE_UNSIGNED (index_type);
1831 enum machine_mode wider_mode;
1832 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
1833 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
1834 if (have_insn_for (COMPARE, wider_mode))
1835 {
1836 index = convert_to_mode (wider_mode, index, unsignedp);
1837 break;
1838 }
1839 }
1840
1841 do_pending_stack_adjust ();
1842
1843 if (MEM_P (index))
1844 {
1845 index = copy_to_reg (index);
1846 if (TREE_CODE (index_expr) == SSA_NAME)
1847 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr), index);
1848 }
1849
1850 balance_case_nodes (&case_list, NULL);
1851
1852 if (dump_file && (dump_flags & TDF_DETAILS))
1853 {
1854 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1855 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1856 dump_case_nodes (dump_file, case_list, indent_step, 0);
1857 }
1858
1859 emit_case_nodes (index, case_list, default_label, default_prob, index_type);
1860 if (default_label)
1861 emit_jump (default_label);
1862 }
1863
1864 /* Return the sum of probabilities of outgoing edges of basic block BB. */
1865
1866 static int
1867 get_outgoing_edge_probs (basic_block bb)
1868 {
1869 edge e;
1870 edge_iterator ei;
1871 int prob_sum = 0;
1872 if (!bb)
1873 return 0;
1874 FOR_EACH_EDGE(e, ei, bb->succs)
1875 prob_sum += e->probability;
1876 return prob_sum;
1877 }
1878
1879 /* Computes the conditional probability of jumping to a target if the branch
1880 instruction is executed.
1881 TARGET_PROB is the estimated probability of jumping to a target relative
1882 to some basic block BB.
1883 BASE_PROB is the probability of reaching the branch instruction relative
1884 to the same basic block BB. */
1885
1886 static inline int
1887 conditional_probability (int target_prob, int base_prob)
1888 {
1889 if (base_prob > 0)
1890 {
1891 gcc_assert (target_prob >= 0);
1892 gcc_assert (target_prob <= base_prob);
1893 return GCOV_COMPUTE_SCALE (target_prob, base_prob);
1894 }
1895 return -1;
1896 }
1897
1898 /* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
1899 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1900 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
1901 labels in CASE_LIST. STMT_BB is the basic block containing the statement.
1902
1903 First, a jump insn is emitted. First we try "casesi". If that
1904 fails, try "tablejump". A target *must* have one of them (or both).
1905
1906 Then, a table with the target labels is emitted.
1907
1908 The process is unaware of the CFG. The caller has to fix up
1909 the CFG itself. This is done in cfgexpand.c. */
1910
1911 static void
1912 emit_case_dispatch_table (tree index_expr, tree index_type,
1913 struct case_node *case_list, rtx default_label,
1914 tree minval, tree maxval, tree range,
1915 basic_block stmt_bb)
1916 {
1917 int i, ncases;
1918 struct case_node *n;
1919 rtx *labelvec;
1920 rtx fallback_label = label_rtx (case_list->code_label);
1921 rtx table_label = gen_label_rtx ();
1922 bool has_gaps = false;
1923 edge default_edge = stmt_bb ? EDGE_SUCC(stmt_bb, 0) : NULL;
1924 int default_prob = default_edge ? default_edge->probability : 0;
1925 int base = get_outgoing_edge_probs (stmt_bb);
1926 bool try_with_tablejump = false;
1927
1928 int new_default_prob = conditional_probability (default_prob,
1929 base);
1930
1931 if (! try_casesi (index_type, index_expr, minval, range,
1932 table_label, default_label, fallback_label,
1933 new_default_prob))
1934 {
1935 /* Index jumptables from zero for suitable values of minval to avoid
1936 a subtraction. For the rationale see:
1937 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
1938 if (optimize_insn_for_speed_p ()
1939 && compare_tree_int (minval, 0) > 0
1940 && compare_tree_int (minval, 3) < 0)
1941 {
1942 minval = build_int_cst (index_type, 0);
1943 range = maxval;
1944 has_gaps = true;
1945 }
1946 try_with_tablejump = true;
1947 }
1948
1949 /* Get table of labels to jump to, in order of case index. */
1950
1951 ncases = tree_low_cst (range, 0) + 1;
1952 labelvec = XALLOCAVEC (rtx, ncases);
1953 memset (labelvec, 0, ncases * sizeof (rtx));
1954
1955 for (n = case_list; n; n = n->right)
1956 {
1957 /* Compute the low and high bounds relative to the minimum
1958 value since that should fit in a HOST_WIDE_INT while the
1959 actual values may not. */
1960 HOST_WIDE_INT i_low
1961 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
1962 n->low, minval), 1);
1963 HOST_WIDE_INT i_high
1964 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
1965 n->high, minval), 1);
1966 HOST_WIDE_INT i;
1967
1968 for (i = i_low; i <= i_high; i ++)
1969 labelvec[i]
1970 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
1971 }
1972
1973 /* Fill in the gaps with the default. We may have gaps at
1974 the beginning if we tried to avoid the minval subtraction,
1975 so substitute some label even if the default label was
1976 deemed unreachable. */
1977 if (!default_label)
1978 default_label = fallback_label;
1979 for (i = 0; i < ncases; i++)
1980 if (labelvec[i] == 0)
1981 {
1982 has_gaps = true;
1983 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
1984 }
1985
1986 if (has_gaps)
1987 {
1988 /* There is at least one entry in the jump table that jumps
1989 to default label. The default label can either be reached
1990 through the indirect jump or the direct conditional jump
1991 before that. Split the probability of reaching the
1992 default label among these two jumps. */
1993 new_default_prob = conditional_probability (default_prob/2,
1994 base);
1995 default_prob /= 2;
1996 base -= default_prob;
1997 }
1998 else
1999 {
2000 base -= default_prob;
2001 default_prob = 0;
2002 }
2003
2004 if (default_edge)
2005 default_edge->probability = default_prob;
2006
2007 /* We have altered the probability of the default edge. So the probabilities
2008 of all other edges need to be adjusted so that it sums up to
2009 REG_BR_PROB_BASE. */
2010 if (base)
2011 {
2012 edge e;
2013 edge_iterator ei;
2014 FOR_EACH_EDGE (e, ei, stmt_bb->succs)
2015 e->probability = GCOV_COMPUTE_SCALE (e->probability, base);
2016 }
2017
2018 if (try_with_tablejump)
2019 {
2020 bool ok = try_tablejump (index_type, index_expr, minval, range,
2021 table_label, default_label, new_default_prob);
2022 gcc_assert (ok);
2023 }
2024 /* Output the table. */
2025 emit_label (table_label);
2026
2027 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2028 emit_jump_table_data (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2029 gen_rtx_LABEL_REF (Pmode,
2030 table_label),
2031 gen_rtvec_v (ncases, labelvec),
2032 const0_rtx, const0_rtx));
2033 else
2034 emit_jump_table_data (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2035 gen_rtvec_v (ncases, labelvec)));
2036
2037 /* Record no drop-through after the table. */
2038 emit_barrier ();
2039 }
2040
2041 /* Reset the aux field of all outgoing edges of basic block BB. */
2042
2043 static inline void
2044 reset_out_edges_aux (basic_block bb)
2045 {
2046 edge e;
2047 edge_iterator ei;
2048 FOR_EACH_EDGE(e, ei, bb->succs)
2049 e->aux = (void *)0;
2050 }
2051
2052 /* Compute the number of case labels that correspond to each outgoing edge of
2053 STMT. Record this information in the aux field of the edge. */
2054
2055 static inline void
2056 compute_cases_per_edge (gimple stmt)
2057 {
2058 basic_block bb = gimple_bb (stmt);
2059 reset_out_edges_aux (bb);
2060 int ncases = gimple_switch_num_labels (stmt);
2061 for (int i = ncases - 1; i >= 1; --i)
2062 {
2063 tree elt = gimple_switch_label (stmt, i);
2064 tree lab = CASE_LABEL (elt);
2065 basic_block case_bb = label_to_block_fn (cfun, lab);
2066 edge case_edge = find_edge (bb, case_bb);
2067 case_edge->aux = (void *)((intptr_t)(case_edge->aux) + 1);
2068 }
2069 }
2070
2071 /* Terminate a case (Pascal/Ada) or switch (C) statement
2072 in which ORIG_INDEX is the expression to be tested.
2073 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2074 type as given in the source before any compiler conversions.
2075 Generate the code to test it and jump to the right place. */
2076
2077 void
2078 expand_case (gimple stmt)
2079 {
2080 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2081 rtx default_label = NULL_RTX;
2082 unsigned int count, uniq;
2083 int i;
2084 int ncases = gimple_switch_num_labels (stmt);
2085 tree index_expr = gimple_switch_index (stmt);
2086 tree index_type = TREE_TYPE (index_expr);
2087 tree elt;
2088 basic_block bb = gimple_bb (stmt);
2089
2090 /* A list of case labels; it is first built as a list and it may then
2091 be rearranged into a nearly balanced binary tree. */
2092 struct case_node *case_list = 0;
2093
2094 /* A pool for case nodes. */
2095 alloc_pool case_node_pool;
2096
2097 /* An ERROR_MARK occurs for various reasons including invalid data type.
2098 ??? Can this still happen, with GIMPLE and all? */
2099 if (index_type == error_mark_node)
2100 return;
2101
2102 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2103 expressions being INTEGER_CST. */
2104 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2105
2106 case_node_pool = create_alloc_pool ("struct case_node pool",
2107 sizeof (struct case_node),
2108 100);
2109
2110 do_pending_stack_adjust ();
2111
2112 /* Find the default case target label. */
2113 default_label = label_rtx (CASE_LABEL (gimple_switch_default_label (stmt)));
2114 edge default_edge = EDGE_SUCC(bb, 0);
2115 int default_prob = default_edge->probability;
2116
2117 /* Get upper and lower bounds of case values. */
2118 elt = gimple_switch_label (stmt, 1);
2119 minval = fold_convert (index_type, CASE_LOW (elt));
2120 elt = gimple_switch_label (stmt, ncases - 1);
2121 if (CASE_HIGH (elt))
2122 maxval = fold_convert (index_type, CASE_HIGH (elt));
2123 else
2124 maxval = fold_convert (index_type, CASE_LOW (elt));
2125
2126 /* Compute span of values. */
2127 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2128
2129 /* Listify the labels queue and gather some numbers to decide
2130 how to expand this switch(). */
2131 uniq = 0;
2132 count = 0;
2133 struct pointer_set_t *seen_labels = pointer_set_create ();
2134 compute_cases_per_edge (stmt);
2135
2136 for (i = ncases - 1; i >= 1; --i)
2137 {
2138 elt = gimple_switch_label (stmt, i);
2139 tree low = CASE_LOW (elt);
2140 gcc_assert (low);
2141 tree high = CASE_HIGH (elt);
2142 gcc_assert (! high || tree_int_cst_lt (low, high));
2143 tree lab = CASE_LABEL (elt);
2144
2145 /* Count the elements.
2146 A range counts double, since it requires two compares. */
2147 count++;
2148 if (high)
2149 count++;
2150
2151 /* If we have not seen this label yet, then increase the
2152 number of unique case node targets seen. */
2153 if (!pointer_set_insert (seen_labels, lab))
2154 uniq++;
2155
2156 /* The bounds on the case range, LOW and HIGH, have to be converted
2157 to case's index type TYPE. Note that the original type of the
2158 case index in the source code is usually "lost" during
2159 gimplification due to type promotion, but the case labels retain the
2160 original type. Make sure to drop overflow flags. */
2161 low = fold_convert (index_type, low);
2162 if (TREE_OVERFLOW (low))
2163 low = build_int_cst_wide (index_type,
2164 TREE_INT_CST_LOW (low),
2165 TREE_INT_CST_HIGH (low));
2166
2167 /* The canonical from of a case label in GIMPLE is that a simple case
2168 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2169 the back ends want simple cases to have high == low. */
2170 if (! high)
2171 high = low;
2172 high = fold_convert (index_type, high);
2173 if (TREE_OVERFLOW (high))
2174 high = build_int_cst_wide (index_type,
2175 TREE_INT_CST_LOW (high),
2176 TREE_INT_CST_HIGH (high));
2177
2178 basic_block case_bb = label_to_block_fn (cfun, lab);
2179 edge case_edge = find_edge (bb, case_bb);
2180 case_list = add_case_node (
2181 case_list, low, high, lab,
2182 case_edge->probability / (intptr_t)(case_edge->aux),
2183 case_node_pool);
2184 }
2185 pointer_set_destroy (seen_labels);
2186 reset_out_edges_aux (bb);
2187
2188 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2189 destination, such as one with a default case only.
2190 It also removes cases that are out of range for the switch
2191 type, so we should never get a zero here. */
2192 gcc_assert (count > 0);
2193
2194 rtx before_case = get_last_insn ();
2195
2196 /* Decide how to expand this switch.
2197 The two options at this point are a dispatch table (casesi or
2198 tablejump) or a decision tree. */
2199
2200 if (expand_switch_as_decision_tree_p (range, uniq, count))
2201 emit_case_decision_tree (index_expr, index_type,
2202 case_list, default_label,
2203 default_prob);
2204 else
2205 emit_case_dispatch_table (index_expr, index_type,
2206 case_list, default_label,
2207 minval, maxval, range, bb);
2208
2209 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2210
2211 free_temp_slots ();
2212 free_alloc_pool (case_node_pool);
2213 }
2214
2215 /* Expand the dispatch to a short decrement chain if there are few cases
2216 to dispatch to. Likewise if neither casesi nor tablejump is available,
2217 or if flag_jump_tables is set. Otherwise, expand as a casesi or a
2218 tablejump. The index mode is always the mode of integer_type_node.
2219 Trap if no case matches the index.
2220
2221 DISPATCH_INDEX is the index expression to switch on. It should be a
2222 memory or register operand.
2223
2224 DISPATCH_TABLE is a set of case labels. The set should be sorted in
2225 ascending order, be contiguous, starting with value 0, and contain only
2226 single-valued case labels. */
2227
2228 void
2229 expand_sjlj_dispatch_table (rtx dispatch_index,
2230 vec<tree> dispatch_table)
2231 {
2232 tree index_type = integer_type_node;
2233 enum machine_mode index_mode = TYPE_MODE (index_type);
2234
2235 int ncases = dispatch_table.length ();
2236
2237 do_pending_stack_adjust ();
2238 rtx before_case = get_last_insn ();
2239
2240 /* Expand as a decrement-chain if there are 5 or fewer dispatch
2241 labels. This covers more than 98% of the cases in libjava,
2242 and seems to be a reasonable compromise between the "old way"
2243 of expanding as a decision tree or dispatch table vs. the "new
2244 way" with decrement chain or dispatch table. */
2245 if (dispatch_table.length () <= 5
2246 || (!HAVE_casesi && !HAVE_tablejump)
2247 || !flag_jump_tables)
2248 {
2249 /* Expand the dispatch as a decrement chain:
2250
2251 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"
2252
2253 ==>
2254
2255 if (index == 0) do_0; else index--;
2256 if (index == 0) do_1; else index--;
2257 ...
2258 if (index == 0) do_N; else index--;
2259
2260 This is more efficient than a dispatch table on most machines.
2261 The last "index--" is redundant but the code is trivially dead
2262 and will be cleaned up by later passes. */
2263 rtx index = copy_to_mode_reg (index_mode, dispatch_index);
2264 rtx zero = CONST0_RTX (index_mode);
2265 for (int i = 0; i < ncases; i++)
2266 {
2267 tree elt = dispatch_table[i];
2268 rtx lab = label_rtx (CASE_LABEL (elt));
2269 do_jump_if_equal (index_mode, index, zero, lab, 0, -1);
2270 force_expand_binop (index_mode, sub_optab,
2271 index, CONST1_RTX (index_mode),
2272 index, 0, OPTAB_DIRECT);
2273 }
2274 }
2275 else
2276 {
2277 /* Similar to expand_case, but much simpler. */
2278 struct case_node *case_list = 0;
2279 alloc_pool case_node_pool = create_alloc_pool ("struct sjlj_case pool",
2280 sizeof (struct case_node),
2281 ncases);
2282 tree index_expr = make_tree (index_type, dispatch_index);
2283 tree minval = build_int_cst (index_type, 0);
2284 tree maxval = CASE_LOW (dispatch_table.last ());
2285 tree range = maxval;
2286 rtx default_label = gen_label_rtx ();
2287
2288 for (int i = ncases - 1; i >= 0; --i)
2289 {
2290 tree elt = dispatch_table[i];
2291 tree low = CASE_LOW (elt);
2292 tree lab = CASE_LABEL (elt);
2293 case_list = add_case_node (case_list, low, low, lab, 0, case_node_pool);
2294 }
2295
2296 emit_case_dispatch_table (index_expr, index_type,
2297 case_list, default_label,
2298 minval, maxval, range,
2299 BLOCK_FOR_INSN (before_case));
2300 emit_label (default_label);
2301 free_alloc_pool (case_node_pool);
2302 }
2303
2304 /* Dispatching something not handled? Trap! */
2305 expand_builtin_trap ();
2306
2307 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2308
2309 free_temp_slots ();
2310 }
2311
2312 \f
2313 /* Take an ordered list of case nodes
2314 and transform them into a near optimal binary tree,
2315 on the assumption that any target code selection value is as
2316 likely as any other.
2317
2318 The transformation is performed by splitting the ordered
2319 list into two equal sections plus a pivot. The parts are
2320 then attached to the pivot as left and right branches. Each
2321 branch is then transformed recursively. */
2322
2323 static void
2324 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2325 {
2326 case_node_ptr np;
2327
2328 np = *head;
2329 if (np)
2330 {
2331 int i = 0;
2332 int ranges = 0;
2333 case_node_ptr *npp;
2334 case_node_ptr left;
2335
2336 /* Count the number of entries on branch. Also count the ranges. */
2337
2338 while (np)
2339 {
2340 if (!tree_int_cst_equal (np->low, np->high))
2341 ranges++;
2342
2343 i++;
2344 np = np->right;
2345 }
2346
2347 if (i > 2)
2348 {
2349 /* Split this list if it is long enough for that to help. */
2350 npp = head;
2351 left = *npp;
2352
2353 /* If there are just three nodes, split at the middle one. */
2354 if (i == 3)
2355 npp = &(*npp)->right;
2356 else
2357 {
2358 /* Find the place in the list that bisects the list's total cost,
2359 where ranges count as 2.
2360 Here I gets half the total cost. */
2361 i = (i + ranges + 1) / 2;
2362 while (1)
2363 {
2364 /* Skip nodes while their cost does not reach that amount. */
2365 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2366 i--;
2367 i--;
2368 if (i <= 0)
2369 break;
2370 npp = &(*npp)->right;
2371 }
2372 }
2373 *head = np = *npp;
2374 *npp = 0;
2375 np->parent = parent;
2376 np->left = left;
2377
2378 /* Optimize each of the two split parts. */
2379 balance_case_nodes (&np->left, np);
2380 balance_case_nodes (&np->right, np);
2381 np->subtree_prob = np->prob;
2382 np->subtree_prob += np->left->subtree_prob;
2383 np->subtree_prob += np->right->subtree_prob;
2384 }
2385 else
2386 {
2387 /* Else leave this branch as one level,
2388 but fill in `parent' fields. */
2389 np = *head;
2390 np->parent = parent;
2391 np->subtree_prob = np->prob;
2392 for (; np->right; np = np->right)
2393 {
2394 np->right->parent = np;
2395 (*head)->subtree_prob += np->right->subtree_prob;
2396 }
2397 }
2398 }
2399 }
2400 \f
2401 /* Search the parent sections of the case node tree
2402 to see if a test for the lower bound of NODE would be redundant.
2403 INDEX_TYPE is the type of the index expression.
2404
2405 The instructions to generate the case decision tree are
2406 output in the same order as nodes are processed so it is
2407 known that if a parent node checks the range of the current
2408 node minus one that the current node is bounded at its lower
2409 span. Thus the test would be redundant. */
2410
2411 static int
2412 node_has_low_bound (case_node_ptr node, tree index_type)
2413 {
2414 tree low_minus_one;
2415 case_node_ptr pnode;
2416
2417 /* If the lower bound of this node is the lowest value in the index type,
2418 we need not test it. */
2419
2420 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2421 return 1;
2422
2423 /* If this node has a left branch, the value at the left must be less
2424 than that at this node, so it cannot be bounded at the bottom and
2425 we need not bother testing any further. */
2426
2427 if (node->left)
2428 return 0;
2429
2430 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2431 node->low,
2432 build_int_cst (TREE_TYPE (node->low), 1));
2433
2434 /* If the subtraction above overflowed, we can't verify anything.
2435 Otherwise, look for a parent that tests our value - 1. */
2436
2437 if (! tree_int_cst_lt (low_minus_one, node->low))
2438 return 0;
2439
2440 for (pnode = node->parent; pnode; pnode = pnode->parent)
2441 if (tree_int_cst_equal (low_minus_one, pnode->high))
2442 return 1;
2443
2444 return 0;
2445 }
2446
2447 /* Search the parent sections of the case node tree
2448 to see if a test for the upper bound of NODE would be redundant.
2449 INDEX_TYPE is the type of the index expression.
2450
2451 The instructions to generate the case decision tree are
2452 output in the same order as nodes are processed so it is
2453 known that if a parent node checks the range of the current
2454 node plus one that the current node is bounded at its upper
2455 span. Thus the test would be redundant. */
2456
2457 static int
2458 node_has_high_bound (case_node_ptr node, tree index_type)
2459 {
2460 tree high_plus_one;
2461 case_node_ptr pnode;
2462
2463 /* If there is no upper bound, obviously no test is needed. */
2464
2465 if (TYPE_MAX_VALUE (index_type) == NULL)
2466 return 1;
2467
2468 /* If the upper bound of this node is the highest value in the type
2469 of the index expression, we need not test against it. */
2470
2471 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2472 return 1;
2473
2474 /* If this node has a right branch, the value at the right must be greater
2475 than that at this node, so it cannot be bounded at the top and
2476 we need not bother testing any further. */
2477
2478 if (node->right)
2479 return 0;
2480
2481 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2482 node->high,
2483 build_int_cst (TREE_TYPE (node->high), 1));
2484
2485 /* If the addition above overflowed, we can't verify anything.
2486 Otherwise, look for a parent that tests our value + 1. */
2487
2488 if (! tree_int_cst_lt (node->high, high_plus_one))
2489 return 0;
2490
2491 for (pnode = node->parent; pnode; pnode = pnode->parent)
2492 if (tree_int_cst_equal (high_plus_one, pnode->low))
2493 return 1;
2494
2495 return 0;
2496 }
2497
2498 /* Search the parent sections of the
2499 case node tree to see if both tests for the upper and lower
2500 bounds of NODE would be redundant. */
2501
2502 static int
2503 node_is_bounded (case_node_ptr node, tree index_type)
2504 {
2505 return (node_has_low_bound (node, index_type)
2506 && node_has_high_bound (node, index_type));
2507 }
2508 \f
2509
2510 /* Emit step-by-step code to select a case for the value of INDEX.
2511 The thus generated decision tree follows the form of the
2512 case-node binary tree NODE, whose nodes represent test conditions.
2513 INDEX_TYPE is the type of the index of the switch.
2514
2515 Care is taken to prune redundant tests from the decision tree
2516 by detecting any boundary conditions already checked by
2517 emitted rtx. (See node_has_high_bound, node_has_low_bound
2518 and node_is_bounded, above.)
2519
2520 Where the test conditions can be shown to be redundant we emit
2521 an unconditional jump to the target code. As a further
2522 optimization, the subordinates of a tree node are examined to
2523 check for bounded nodes. In this case conditional and/or
2524 unconditional jumps as a result of the boundary check for the
2525 current node are arranged to target the subordinates associated
2526 code for out of bound conditions on the current node.
2527
2528 We can assume that when control reaches the code generated here,
2529 the index value has already been compared with the parents
2530 of this node, and determined to be on the same side of each parent
2531 as this node is. Thus, if this node tests for the value 51,
2532 and a parent tested for 52, we don't need to consider
2533 the possibility of a value greater than 51. If another parent
2534 tests for the value 50, then this node need not test anything. */
2535
2536 static void
2537 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2538 int default_prob, tree index_type)
2539 {
2540 /* If INDEX has an unsigned type, we must make unsigned branches. */
2541 int unsignedp = TYPE_UNSIGNED (index_type);
2542 int probability;
2543 int prob = node->prob, subtree_prob = node->subtree_prob;
2544 enum machine_mode mode = GET_MODE (index);
2545 enum machine_mode imode = TYPE_MODE (index_type);
2546
2547 /* Handle indices detected as constant during RTL expansion. */
2548 if (mode == VOIDmode)
2549 mode = imode;
2550
2551 /* See if our parents have already tested everything for us.
2552 If they have, emit an unconditional jump for this node. */
2553 if (node_is_bounded (node, index_type))
2554 emit_jump (label_rtx (node->code_label));
2555
2556 else if (tree_int_cst_equal (node->low, node->high))
2557 {
2558 probability = conditional_probability (prob, subtree_prob + default_prob);
2559 /* Node is single valued. First see if the index expression matches
2560 this node and then check our children, if any. */
2561 do_jump_if_equal (mode, index,
2562 convert_modes (mode, imode,
2563 expand_normal (node->low),
2564 unsignedp),
2565 label_rtx (node->code_label), unsignedp, probability);
2566 /* Since this case is taken at this point, reduce its weight from
2567 subtree_weight. */
2568 subtree_prob -= prob;
2569 if (node->right != 0 && node->left != 0)
2570 {
2571 /* This node has children on both sides.
2572 Dispatch to one side or the other
2573 by comparing the index value with this node's value.
2574 If one subtree is bounded, check that one first,
2575 so we can avoid real branches in the tree. */
2576
2577 if (node_is_bounded (node->right, index_type))
2578 {
2579 probability = conditional_probability (
2580 node->right->prob,
2581 subtree_prob + default_prob);
2582 emit_cmp_and_jump_insns (index,
2583 convert_modes
2584 (mode, imode,
2585 expand_normal (node->high),
2586 unsignedp),
2587 GT, NULL_RTX, mode, unsignedp,
2588 label_rtx (node->right->code_label),
2589 probability);
2590 emit_case_nodes (index, node->left, default_label, default_prob,
2591 index_type);
2592 }
2593
2594 else if (node_is_bounded (node->left, index_type))
2595 {
2596 probability = conditional_probability (
2597 node->left->prob,
2598 subtree_prob + default_prob);
2599 emit_cmp_and_jump_insns (index,
2600 convert_modes
2601 (mode, imode,
2602 expand_normal (node->high),
2603 unsignedp),
2604 LT, NULL_RTX, mode, unsignedp,
2605 label_rtx (node->left->code_label),
2606 probability);
2607 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2608 }
2609
2610 /* If both children are single-valued cases with no
2611 children, finish up all the work. This way, we can save
2612 one ordered comparison. */
2613 else if (tree_int_cst_equal (node->right->low, node->right->high)
2614 && node->right->left == 0
2615 && node->right->right == 0
2616 && tree_int_cst_equal (node->left->low, node->left->high)
2617 && node->left->left == 0
2618 && node->left->right == 0)
2619 {
2620 /* Neither node is bounded. First distinguish the two sides;
2621 then emit the code for one side at a time. */
2622
2623 /* See if the value matches what the right hand side
2624 wants. */
2625 probability = conditional_probability (
2626 node->right->prob,
2627 subtree_prob + default_prob);
2628 do_jump_if_equal (mode, index,
2629 convert_modes (mode, imode,
2630 expand_normal (node->right->low),
2631 unsignedp),
2632 label_rtx (node->right->code_label),
2633 unsignedp, probability);
2634
2635 /* See if the value matches what the left hand side
2636 wants. */
2637 probability = conditional_probability (
2638 node->left->prob,
2639 subtree_prob + default_prob);
2640 do_jump_if_equal (mode, index,
2641 convert_modes (mode, imode,
2642 expand_normal (node->left->low),
2643 unsignedp),
2644 label_rtx (node->left->code_label),
2645 unsignedp, probability);
2646 }
2647
2648 else
2649 {
2650 /* Neither node is bounded. First distinguish the two sides;
2651 then emit the code for one side at a time. */
2652
2653 tree test_label
2654 = build_decl (curr_insn_location (),
2655 LABEL_DECL, NULL_TREE, NULL_TREE);
2656
2657 /* The default label could be reached either through the right
2658 subtree or the left subtree. Divide the probability
2659 equally. */
2660 probability = conditional_probability (
2661 node->right->subtree_prob + default_prob/2,
2662 subtree_prob + default_prob);
2663 /* See if the value is on the right. */
2664 emit_cmp_and_jump_insns (index,
2665 convert_modes
2666 (mode, imode,
2667 expand_normal (node->high),
2668 unsignedp),
2669 GT, NULL_RTX, mode, unsignedp,
2670 label_rtx (test_label),
2671 probability);
2672 default_prob /= 2;
2673
2674 /* Value must be on the left.
2675 Handle the left-hand subtree. */
2676 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2677 /* If left-hand subtree does nothing,
2678 go to default. */
2679 if (default_label)
2680 emit_jump (default_label);
2681
2682 /* Code branches here for the right-hand subtree. */
2683 expand_label (test_label);
2684 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2685 }
2686 }
2687
2688 else if (node->right != 0 && node->left == 0)
2689 {
2690 /* Here we have a right child but no left so we issue a conditional
2691 branch to default and process the right child.
2692
2693 Omit the conditional branch to default if the right child
2694 does not have any children and is single valued; it would
2695 cost too much space to save so little time. */
2696
2697 if (node->right->right || node->right->left
2698 || !tree_int_cst_equal (node->right->low, node->right->high))
2699 {
2700 if (!node_has_low_bound (node, index_type))
2701 {
2702 probability = conditional_probability (
2703 default_prob/2,
2704 subtree_prob + default_prob);
2705 emit_cmp_and_jump_insns (index,
2706 convert_modes
2707 (mode, imode,
2708 expand_normal (node->high),
2709 unsignedp),
2710 LT, NULL_RTX, mode, unsignedp,
2711 default_label,
2712 probability);
2713 default_prob /= 2;
2714 }
2715
2716 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2717 }
2718 else
2719 {
2720 probability = conditional_probability (
2721 node->right->subtree_prob,
2722 subtree_prob + default_prob);
2723 /* We cannot process node->right normally
2724 since we haven't ruled out the numbers less than
2725 this node's value. So handle node->right explicitly. */
2726 do_jump_if_equal (mode, index,
2727 convert_modes
2728 (mode, imode,
2729 expand_normal (node->right->low),
2730 unsignedp),
2731 label_rtx (node->right->code_label), unsignedp, probability);
2732 }
2733 }
2734
2735 else if (node->right == 0 && node->left != 0)
2736 {
2737 /* Just one subtree, on the left. */
2738 if (node->left->left || node->left->right
2739 || !tree_int_cst_equal (node->left->low, node->left->high))
2740 {
2741 if (!node_has_high_bound (node, index_type))
2742 {
2743 probability = conditional_probability (
2744 default_prob/2,
2745 subtree_prob + default_prob);
2746 emit_cmp_and_jump_insns (index,
2747 convert_modes
2748 (mode, imode,
2749 expand_normal (node->high),
2750 unsignedp),
2751 GT, NULL_RTX, mode, unsignedp,
2752 default_label,
2753 probability);
2754 default_prob /= 2;
2755 }
2756
2757 emit_case_nodes (index, node->left, default_label,
2758 default_prob, index_type);
2759 }
2760 else
2761 {
2762 probability = conditional_probability (
2763 node->left->subtree_prob,
2764 subtree_prob + default_prob);
2765 /* We cannot process node->left normally
2766 since we haven't ruled out the numbers less than
2767 this node's value. So handle node->left explicitly. */
2768 do_jump_if_equal (mode, index,
2769 convert_modes
2770 (mode, imode,
2771 expand_normal (node->left->low),
2772 unsignedp),
2773 label_rtx (node->left->code_label), unsignedp, probability);
2774 }
2775 }
2776 }
2777 else
2778 {
2779 /* Node is a range. These cases are very similar to those for a single
2780 value, except that we do not start by testing whether this node
2781 is the one to branch to. */
2782
2783 if (node->right != 0 && node->left != 0)
2784 {
2785 /* Node has subtrees on both sides.
2786 If the right-hand subtree is bounded,
2787 test for it first, since we can go straight there.
2788 Otherwise, we need to make a branch in the control structure,
2789 then handle the two subtrees. */
2790 tree test_label = 0;
2791
2792 if (node_is_bounded (node->right, index_type))
2793 {
2794 /* Right hand node is fully bounded so we can eliminate any
2795 testing and branch directly to the target code. */
2796 probability = conditional_probability (
2797 node->right->subtree_prob,
2798 subtree_prob + default_prob);
2799 emit_cmp_and_jump_insns (index,
2800 convert_modes
2801 (mode, imode,
2802 expand_normal (node->high),
2803 unsignedp),
2804 GT, NULL_RTX, mode, unsignedp,
2805 label_rtx (node->right->code_label),
2806 probability);
2807 }
2808 else
2809 {
2810 /* Right hand node requires testing.
2811 Branch to a label where we will handle it later. */
2812
2813 test_label = build_decl (curr_insn_location (),
2814 LABEL_DECL, NULL_TREE, NULL_TREE);
2815 probability = conditional_probability (
2816 node->right->subtree_prob + default_prob/2,
2817 subtree_prob + default_prob);
2818 emit_cmp_and_jump_insns (index,
2819 convert_modes
2820 (mode, imode,
2821 expand_normal (node->high),
2822 unsignedp),
2823 GT, NULL_RTX, mode, unsignedp,
2824 label_rtx (test_label),
2825 probability);
2826 default_prob /= 2;
2827 }
2828
2829 /* Value belongs to this node or to the left-hand subtree. */
2830
2831 probability = conditional_probability (
2832 prob,
2833 subtree_prob + default_prob);
2834 emit_cmp_and_jump_insns (index,
2835 convert_modes
2836 (mode, imode,
2837 expand_normal (node->low),
2838 unsignedp),
2839 GE, NULL_RTX, mode, unsignedp,
2840 label_rtx (node->code_label),
2841 probability);
2842
2843 /* Handle the left-hand subtree. */
2844 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2845
2846 /* If right node had to be handled later, do that now. */
2847
2848 if (test_label)
2849 {
2850 /* If the left-hand subtree fell through,
2851 don't let it fall into the right-hand subtree. */
2852 if (default_label)
2853 emit_jump (default_label);
2854
2855 expand_label (test_label);
2856 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2857 }
2858 }
2859
2860 else if (node->right != 0 && node->left == 0)
2861 {
2862 /* Deal with values to the left of this node,
2863 if they are possible. */
2864 if (!node_has_low_bound (node, index_type))
2865 {
2866 probability = conditional_probability (
2867 default_prob/2,
2868 subtree_prob + default_prob);
2869 emit_cmp_and_jump_insns (index,
2870 convert_modes
2871 (mode, imode,
2872 expand_normal (node->low),
2873 unsignedp),
2874 LT, NULL_RTX, mode, unsignedp,
2875 default_label,
2876 probability);
2877 default_prob /= 2;
2878 }
2879
2880 /* Value belongs to this node or to the right-hand subtree. */
2881
2882 probability = conditional_probability (
2883 prob,
2884 subtree_prob + default_prob);
2885 emit_cmp_and_jump_insns (index,
2886 convert_modes
2887 (mode, imode,
2888 expand_normal (node->high),
2889 unsignedp),
2890 LE, NULL_RTX, mode, unsignedp,
2891 label_rtx (node->code_label),
2892 probability);
2893
2894 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2895 }
2896
2897 else if (node->right == 0 && node->left != 0)
2898 {
2899 /* Deal with values to the right of this node,
2900 if they are possible. */
2901 if (!node_has_high_bound (node, index_type))
2902 {
2903 probability = conditional_probability (
2904 default_prob/2,
2905 subtree_prob + default_prob);
2906 emit_cmp_and_jump_insns (index,
2907 convert_modes
2908 (mode, imode,
2909 expand_normal (node->high),
2910 unsignedp),
2911 GT, NULL_RTX, mode, unsignedp,
2912 default_label,
2913 probability);
2914 default_prob /= 2;
2915 }
2916
2917 /* Value belongs to this node or to the left-hand subtree. */
2918
2919 probability = conditional_probability (
2920 prob,
2921 subtree_prob + default_prob);
2922 emit_cmp_and_jump_insns (index,
2923 convert_modes
2924 (mode, imode,
2925 expand_normal (node->low),
2926 unsignedp),
2927 GE, NULL_RTX, mode, unsignedp,
2928 label_rtx (node->code_label),
2929 probability);
2930
2931 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2932 }
2933
2934 else
2935 {
2936 /* Node has no children so we check low and high bounds to remove
2937 redundant tests. Only one of the bounds can exist,
2938 since otherwise this node is bounded--a case tested already. */
2939 int high_bound = node_has_high_bound (node, index_type);
2940 int low_bound = node_has_low_bound (node, index_type);
2941
2942 if (!high_bound && low_bound)
2943 {
2944 probability = conditional_probability (
2945 default_prob,
2946 subtree_prob + default_prob);
2947 emit_cmp_and_jump_insns (index,
2948 convert_modes
2949 (mode, imode,
2950 expand_normal (node->high),
2951 unsignedp),
2952 GT, NULL_RTX, mode, unsignedp,
2953 default_label,
2954 probability);
2955 }
2956
2957 else if (!low_bound && high_bound)
2958 {
2959 probability = conditional_probability (
2960 default_prob,
2961 subtree_prob + default_prob);
2962 emit_cmp_and_jump_insns (index,
2963 convert_modes
2964 (mode, imode,
2965 expand_normal (node->low),
2966 unsignedp),
2967 LT, NULL_RTX, mode, unsignedp,
2968 default_label,
2969 probability);
2970 }
2971 else if (!low_bound && !high_bound)
2972 {
2973 /* Widen LOW and HIGH to the same width as INDEX. */
2974 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
2975 tree low = build1 (CONVERT_EXPR, type, node->low);
2976 tree high = build1 (CONVERT_EXPR, type, node->high);
2977 rtx low_rtx, new_index, new_bound;
2978
2979 /* Instead of doing two branches, emit one unsigned branch for
2980 (index-low) > (high-low). */
2981 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
2982 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
2983 NULL_RTX, unsignedp,
2984 OPTAB_WIDEN);
2985 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
2986 high, low),
2987 NULL_RTX, mode, EXPAND_NORMAL);
2988
2989 probability = conditional_probability (
2990 default_prob,
2991 subtree_prob + default_prob);
2992 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
2993 mode, 1, default_label, probability);
2994 }
2995
2996 emit_jump (label_rtx (node->code_label));
2997 }
2998 }
2999 }