* g++.dg/cpp0x/nullptr21.c: Remove printfs, make self-checking.
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "except.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "toplev.h"
39 #include "debug.h"
40 #include "params.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
43 #include "target.h"
44 #include "ssaexpand.h"
45 #include "bitmap.h"
46 #include "sbitmap.h"
47 #include "cfgloop.h"
48 #include "regs.h" /* For reg_renumber. */
49 #include "insn-attr.h" /* For INSN_SCHEDULING. */
50
51 /* This variable holds information helping the rewriting of SSA trees
52 into RTL. */
53 struct ssaexpand SA;
54
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
58
59 static rtx expand_debug_expr (tree);
60
61 /* Return an expression tree corresponding to the RHS of GIMPLE
62 statement STMT. */
63
64 tree
65 gimple_assign_rhs_to_tree (gimple stmt)
66 {
67 tree t;
68 enum gimple_rhs_class grhs_class;
69
70 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71
72 if (grhs_class == GIMPLE_TERNARY_RHS)
73 t = build3 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt),
76 gimple_assign_rhs2 (stmt),
77 gimple_assign_rhs3 (stmt));
78 else if (grhs_class == GIMPLE_BINARY_RHS)
79 t = build2 (gimple_assign_rhs_code (stmt),
80 TREE_TYPE (gimple_assign_lhs (stmt)),
81 gimple_assign_rhs1 (stmt),
82 gimple_assign_rhs2 (stmt));
83 else if (grhs_class == GIMPLE_UNARY_RHS)
84 t = build1 (gimple_assign_rhs_code (stmt),
85 TREE_TYPE (gimple_assign_lhs (stmt)),
86 gimple_assign_rhs1 (stmt));
87 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 {
89 t = gimple_assign_rhs1 (stmt);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 && gimple_location (stmt) != EXPR_LOCATION (t))
93 || (gimple_block (stmt)
94 && currently_expanding_to_rtl
95 && EXPR_P (t)
96 && gimple_block (stmt) != TREE_BLOCK (t)))
97 t = copy_node (t);
98 }
99 else
100 gcc_unreachable ();
101
102 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
103 SET_EXPR_LOCATION (t, gimple_location (stmt));
104 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
105 TREE_BLOCK (t) = gimple_block (stmt);
106
107 return t;
108 }
109
110
111 #ifndef STACK_ALIGNMENT_NEEDED
112 #define STACK_ALIGNMENT_NEEDED 1
113 #endif
114
115 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116
117 /* Associate declaration T with storage space X. If T is no
118 SSA name this is exactly SET_DECL_RTL, otherwise make the
119 partition of T associated with X. */
120 static inline void
121 set_rtl (tree t, rtx x)
122 {
123 if (TREE_CODE (t) == SSA_NAME)
124 {
125 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 if (x && !MEM_P (x))
127 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
128 /* For the benefit of debug information at -O0 (where vartracking
129 doesn't run) record the place also in the base DECL if it's
130 a normal variable (not a parameter). */
131 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 {
133 tree var = SSA_NAME_VAR (t);
134 /* If we don't yet have something recorded, just record it now. */
135 if (!DECL_RTL_SET_P (var))
136 SET_DECL_RTL (var, x);
137 /* If we have it set already to "multiple places" don't
138 change this. */
139 else if (DECL_RTL (var) == pc_rtx)
140 ;
141 /* If we have something recorded and it's not the same place
142 as we want to record now, we have multiple partitions for the
143 same base variable, with different places. We can't just
144 randomly chose one, hence we have to say that we don't know.
145 This only happens with optimization, and there var-tracking
146 will figure out the right thing. */
147 else if (DECL_RTL (var) != x)
148 SET_DECL_RTL (var, pc_rtx);
149 }
150 }
151 else
152 SET_DECL_RTL (t, x);
153 }
154
155 /* This structure holds data relevant to one variable that will be
156 placed in a stack slot. */
157 struct stack_var
158 {
159 /* The Variable. */
160 tree decl;
161
162 /* Initially, the size of the variable. Later, the size of the partition,
163 if this variable becomes it's partition's representative. */
164 HOST_WIDE_INT size;
165
166 /* The *byte* alignment required for this variable. Or as, with the
167 size, the alignment for this partition. */
168 unsigned int alignb;
169
170 /* The partition representative. */
171 size_t representative;
172
173 /* The next stack variable in the partition, or EOC. */
174 size_t next;
175
176 /* The numbers of conflicting stack variables. */
177 bitmap conflicts;
178 };
179
180 #define EOC ((size_t)-1)
181
182 /* We have an array of such objects while deciding allocation. */
183 static struct stack_var *stack_vars;
184 static size_t stack_vars_alloc;
185 static size_t stack_vars_num;
186 static struct pointer_map_t *decl_to_stack_part;
187
188 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
189 is non-decreasing. */
190 static size_t *stack_vars_sorted;
191
192 /* The phase of the stack frame. This is the known misalignment of
193 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
194 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
195 static int frame_phase;
196
197 /* Used during expand_used_vars to remember if we saw any decls for
198 which we'd like to enable stack smashing protection. */
199 static bool has_protected_decls;
200
201 /* Used during expand_used_vars. Remember if we say a character buffer
202 smaller than our cutoff threshold. Used for -Wstack-protector. */
203 static bool has_short_buffer;
204
205 /* Compute the byte alignment to use for DECL. Ignore alignment
206 we can't do with expected alignment of the stack boundary. */
207
208 static unsigned int
209 align_local_variable (tree decl)
210 {
211 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
212 DECL_ALIGN (decl) = align;
213 return align / BITS_PER_UNIT;
214 }
215
216 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
217 Return the frame offset. */
218
219 static HOST_WIDE_INT
220 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
221 {
222 HOST_WIDE_INT offset, new_frame_offset;
223
224 new_frame_offset = frame_offset;
225 if (FRAME_GROWS_DOWNWARD)
226 {
227 new_frame_offset -= size + frame_phase;
228 new_frame_offset &= -align;
229 new_frame_offset += frame_phase;
230 offset = new_frame_offset;
231 }
232 else
233 {
234 new_frame_offset -= frame_phase;
235 new_frame_offset += align - 1;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 new_frame_offset += size;
240 }
241 frame_offset = new_frame_offset;
242
243 if (frame_offset_overflow (frame_offset, cfun->decl))
244 frame_offset = offset = 0;
245
246 return offset;
247 }
248
249 /* Accumulate DECL into STACK_VARS. */
250
251 static void
252 add_stack_var (tree decl)
253 {
254 struct stack_var *v;
255
256 if (stack_vars_num >= stack_vars_alloc)
257 {
258 if (stack_vars_alloc)
259 stack_vars_alloc = stack_vars_alloc * 3 / 2;
260 else
261 stack_vars_alloc = 32;
262 stack_vars
263 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
264 }
265 if (!decl_to_stack_part)
266 decl_to_stack_part = pointer_map_create ();
267
268 v = &stack_vars[stack_vars_num];
269 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
270
271 v->decl = decl;
272 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
273 /* Ensure that all variables have size, so that &a != &b for any two
274 variables that are simultaneously live. */
275 if (v->size == 0)
276 v->size = 1;
277 v->alignb = align_local_variable (SSAVAR (decl));
278 /* An alignment of zero can mightily confuse us later. */
279 gcc_assert (v->alignb != 0);
280
281 /* All variables are initially in their own partition. */
282 v->representative = stack_vars_num;
283 v->next = EOC;
284
285 /* All variables initially conflict with no other. */
286 v->conflicts = NULL;
287
288 /* Ensure that this decl doesn't get put onto the list twice. */
289 set_rtl (decl, pc_rtx);
290
291 stack_vars_num++;
292 }
293
294 /* Make the decls associated with luid's X and Y conflict. */
295
296 static void
297 add_stack_var_conflict (size_t x, size_t y)
298 {
299 struct stack_var *a = &stack_vars[x];
300 struct stack_var *b = &stack_vars[y];
301 if (!a->conflicts)
302 a->conflicts = BITMAP_ALLOC (NULL);
303 if (!b->conflicts)
304 b->conflicts = BITMAP_ALLOC (NULL);
305 bitmap_set_bit (a->conflicts, y);
306 bitmap_set_bit (b->conflicts, x);
307 }
308
309 /* Check whether the decls associated with luid's X and Y conflict. */
310
311 static bool
312 stack_var_conflict_p (size_t x, size_t y)
313 {
314 struct stack_var *a = &stack_vars[x];
315 struct stack_var *b = &stack_vars[y];
316 if (x == y)
317 return false;
318 /* Partitions containing an SSA name result from gimple registers
319 with things like unsupported modes. They are top-level and
320 hence conflict with everything else. */
321 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
322 return true;
323
324 if (!a->conflicts || !b->conflicts)
325 return false;
326 return bitmap_bit_p (a->conflicts, y);
327 }
328
329 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
330 enter its partition number into bitmap DATA. */
331
332 static bool
333 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
334 {
335 bitmap active = (bitmap)data;
336 op = get_base_address (op);
337 if (op
338 && DECL_P (op)
339 && DECL_RTL_IF_SET (op) == pc_rtx)
340 {
341 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
342 if (v)
343 bitmap_set_bit (active, *v);
344 }
345 return false;
346 }
347
348 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
349 record conflicts between it and all currently active other partitions
350 from bitmap DATA. */
351
352 static bool
353 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
354 {
355 bitmap active = (bitmap)data;
356 op = get_base_address (op);
357 if (op
358 && DECL_P (op)
359 && DECL_RTL_IF_SET (op) == pc_rtx)
360 {
361 size_t *v =
362 (size_t *) pointer_map_contains (decl_to_stack_part, op);
363 if (v && bitmap_set_bit (active, *v))
364 {
365 size_t num = *v;
366 bitmap_iterator bi;
367 unsigned i;
368 gcc_assert (num < stack_vars_num);
369 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
370 add_stack_var_conflict (num, i);
371 }
372 }
373 return false;
374 }
375
376 /* Helper routine for add_scope_conflicts, calculating the active partitions
377 at the end of BB, leaving the result in WORK. We're called to generate
378 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
379 liveness. */
380
381 static void
382 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
383 {
384 edge e;
385 edge_iterator ei;
386 gimple_stmt_iterator gsi;
387 bool (*visit)(gimple, tree, void *);
388
389 bitmap_clear (work);
390 FOR_EACH_EDGE (e, ei, bb->preds)
391 bitmap_ior_into (work, (bitmap)e->src->aux);
392
393 visit = visit_op;
394
395 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
396 {
397 gimple stmt = gsi_stmt (gsi);
398 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
399 }
400 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
401 {
402 gimple stmt = gsi_stmt (gsi);
403
404 if (gimple_clobber_p (stmt))
405 {
406 tree lhs = gimple_assign_lhs (stmt);
407 size_t *v;
408 /* Nested function lowering might introduce LHSs
409 that are COMPONENT_REFs. */
410 if (TREE_CODE (lhs) != VAR_DECL)
411 continue;
412 if (DECL_RTL_IF_SET (lhs) == pc_rtx
413 && (v = (size_t *)
414 pointer_map_contains (decl_to_stack_part, lhs)))
415 bitmap_clear_bit (work, *v);
416 }
417 else if (!is_gimple_debug (stmt))
418 {
419 if (for_conflict
420 && visit == visit_op)
421 {
422 /* If this is the first real instruction in this BB we need
423 to add conflicts for everything live at this point now.
424 Unlike classical liveness for named objects we can't
425 rely on seeing a def/use of the names we're interested in.
426 There might merely be indirect loads/stores. We'd not add any
427 conflicts for such partitions. */
428 bitmap_iterator bi;
429 unsigned i;
430 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
431 {
432 unsigned j;
433 bitmap_iterator bj;
434 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
435 add_stack_var_conflict (i, j);
436 }
437 visit = visit_conflict;
438 }
439 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
440 }
441 }
442 }
443
444 /* Generate stack partition conflicts between all partitions that are
445 simultaneously live. */
446
447 static void
448 add_scope_conflicts (void)
449 {
450 basic_block bb;
451 bool changed;
452 bitmap work = BITMAP_ALLOC (NULL);
453
454 /* We approximate the live range of a stack variable by taking the first
455 mention of its name as starting point(s), and by the end-of-scope
456 death clobber added by gimplify as ending point(s) of the range.
457 This overapproximates in the case we for instance moved an address-taken
458 operation upward, without also moving a dereference to it upwards.
459 But it's conservatively correct as a variable never can hold values
460 before its name is mentioned at least once.
461
462 We then do a mostly classical bitmap liveness algorithm. */
463
464 FOR_ALL_BB (bb)
465 bb->aux = BITMAP_ALLOC (NULL);
466
467 changed = true;
468 while (changed)
469 {
470 changed = false;
471 FOR_EACH_BB (bb)
472 {
473 bitmap active = (bitmap)bb->aux;
474 add_scope_conflicts_1 (bb, work, false);
475 if (bitmap_ior_into (active, work))
476 changed = true;
477 }
478 }
479
480 FOR_EACH_BB (bb)
481 add_scope_conflicts_1 (bb, work, true);
482
483 BITMAP_FREE (work);
484 FOR_ALL_BB (bb)
485 BITMAP_FREE (bb->aux);
486 }
487
488 /* A subroutine of partition_stack_vars. A comparison function for qsort,
489 sorting an array of indices by the properties of the object. */
490
491 static int
492 stack_var_cmp (const void *a, const void *b)
493 {
494 size_t ia = *(const size_t *)a;
495 size_t ib = *(const size_t *)b;
496 unsigned int aligna = stack_vars[ia].alignb;
497 unsigned int alignb = stack_vars[ib].alignb;
498 HOST_WIDE_INT sizea = stack_vars[ia].size;
499 HOST_WIDE_INT sizeb = stack_vars[ib].size;
500 tree decla = stack_vars[ia].decl;
501 tree declb = stack_vars[ib].decl;
502 bool largea, largeb;
503 unsigned int uida, uidb;
504
505 /* Primary compare on "large" alignment. Large comes first. */
506 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
507 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
508 if (largea != largeb)
509 return (int)largeb - (int)largea;
510
511 /* Secondary compare on size, decreasing */
512 if (sizea > sizeb)
513 return -1;
514 if (sizea < sizeb)
515 return 1;
516
517 /* Tertiary compare on true alignment, decreasing. */
518 if (aligna < alignb)
519 return -1;
520 if (aligna > alignb)
521 return 1;
522
523 /* Final compare on ID for sort stability, increasing.
524 Two SSA names are compared by their version, SSA names come before
525 non-SSA names, and two normal decls are compared by their DECL_UID. */
526 if (TREE_CODE (decla) == SSA_NAME)
527 {
528 if (TREE_CODE (declb) == SSA_NAME)
529 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
530 else
531 return -1;
532 }
533 else if (TREE_CODE (declb) == SSA_NAME)
534 return 1;
535 else
536 uida = DECL_UID (decla), uidb = DECL_UID (declb);
537 if (uida < uidb)
538 return 1;
539 if (uida > uidb)
540 return -1;
541 return 0;
542 }
543
544
545 /* If the points-to solution *PI points to variables that are in a partition
546 together with other variables add all partition members to the pointed-to
547 variables bitmap. */
548
549 static void
550 add_partitioned_vars_to_ptset (struct pt_solution *pt,
551 struct pointer_map_t *decls_to_partitions,
552 struct pointer_set_t *visited, bitmap temp)
553 {
554 bitmap_iterator bi;
555 unsigned i;
556 bitmap *part;
557
558 if (pt->anything
559 || pt->vars == NULL
560 /* The pointed-to vars bitmap is shared, it is enough to
561 visit it once. */
562 || pointer_set_insert(visited, pt->vars))
563 return;
564
565 bitmap_clear (temp);
566
567 /* By using a temporary bitmap to store all members of the partitions
568 we have to add we make sure to visit each of the partitions only
569 once. */
570 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
571 if ((!temp
572 || !bitmap_bit_p (temp, i))
573 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
574 (void *)(size_t) i)))
575 bitmap_ior_into (temp, *part);
576 if (!bitmap_empty_p (temp))
577 bitmap_ior_into (pt->vars, temp);
578 }
579
580 /* Update points-to sets based on partition info, so we can use them on RTL.
581 The bitmaps representing stack partitions will be saved until expand,
582 where partitioned decls used as bases in memory expressions will be
583 rewritten. */
584
585 static void
586 update_alias_info_with_stack_vars (void)
587 {
588 struct pointer_map_t *decls_to_partitions = NULL;
589 size_t i, j;
590 tree var = NULL_TREE;
591
592 for (i = 0; i < stack_vars_num; i++)
593 {
594 bitmap part = NULL;
595 tree name;
596 struct ptr_info_def *pi;
597
598 /* Not interested in partitions with single variable. */
599 if (stack_vars[i].representative != i
600 || stack_vars[i].next == EOC)
601 continue;
602
603 if (!decls_to_partitions)
604 {
605 decls_to_partitions = pointer_map_create ();
606 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
607 }
608
609 /* Create an SSA_NAME that points to the partition for use
610 as base during alias-oracle queries on RTL for bases that
611 have been partitioned. */
612 if (var == NULL_TREE)
613 var = create_tmp_var (ptr_type_node, NULL);
614 name = make_ssa_name (var, NULL);
615
616 /* Create bitmaps representing partitions. They will be used for
617 points-to sets later, so use GGC alloc. */
618 part = BITMAP_GGC_ALLOC ();
619 for (j = i; j != EOC; j = stack_vars[j].next)
620 {
621 tree decl = stack_vars[j].decl;
622 unsigned int uid = DECL_PT_UID (decl);
623 /* We should never end up partitioning SSA names (though they
624 may end up on the stack). Neither should we allocate stack
625 space to something that is unused and thus unreferenced, except
626 for -O0 where we are preserving even unreferenced variables. */
627 gcc_assert (DECL_P (decl)
628 && (!optimize
629 || referenced_var_lookup (cfun, DECL_UID (decl))));
630 bitmap_set_bit (part, uid);
631 *((bitmap *) pointer_map_insert (decls_to_partitions,
632 (void *)(size_t) uid)) = part;
633 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
634 decl)) = name;
635 }
636
637 /* Make the SSA name point to all partition members. */
638 pi = get_ptr_info (name);
639 pt_solution_set (&pi->pt, part, false);
640 }
641
642 /* Make all points-to sets that contain one member of a partition
643 contain all members of the partition. */
644 if (decls_to_partitions)
645 {
646 unsigned i;
647 struct pointer_set_t *visited = pointer_set_create ();
648 bitmap temp = BITMAP_ALLOC (NULL);
649
650 for (i = 1; i < num_ssa_names; i++)
651 {
652 tree name = ssa_name (i);
653 struct ptr_info_def *pi;
654
655 if (name
656 && POINTER_TYPE_P (TREE_TYPE (name))
657 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
658 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
659 visited, temp);
660 }
661
662 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
663 decls_to_partitions, visited, temp);
664
665 pointer_set_destroy (visited);
666 pointer_map_destroy (decls_to_partitions);
667 BITMAP_FREE (temp);
668 }
669 }
670
671 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
672 partitioning algorithm. Partitions A and B are known to be non-conflicting.
673 Merge them into a single partition A. */
674
675 static void
676 union_stack_vars (size_t a, size_t b)
677 {
678 struct stack_var *vb = &stack_vars[b];
679 bitmap_iterator bi;
680 unsigned u;
681
682 gcc_assert (stack_vars[b].next == EOC);
683 /* Add B to A's partition. */
684 stack_vars[b].next = stack_vars[a].next;
685 stack_vars[b].representative = a;
686 stack_vars[a].next = b;
687
688 /* Update the required alignment of partition A to account for B. */
689 if (stack_vars[a].alignb < stack_vars[b].alignb)
690 stack_vars[a].alignb = stack_vars[b].alignb;
691
692 /* Update the interference graph and merge the conflicts. */
693 if (vb->conflicts)
694 {
695 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
696 add_stack_var_conflict (a, stack_vars[u].representative);
697 BITMAP_FREE (vb->conflicts);
698 }
699 }
700
701 /* A subroutine of expand_used_vars. Binpack the variables into
702 partitions constrained by the interference graph. The overall
703 algorithm used is as follows:
704
705 Sort the objects by size in descending order.
706 For each object A {
707 S = size(A)
708 O = 0
709 loop {
710 Look for the largest non-conflicting object B with size <= S.
711 UNION (A, B)
712 }
713 }
714 */
715
716 static void
717 partition_stack_vars (void)
718 {
719 size_t si, sj, n = stack_vars_num;
720
721 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
722 for (si = 0; si < n; ++si)
723 stack_vars_sorted[si] = si;
724
725 if (n == 1)
726 return;
727
728 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
729
730 for (si = 0; si < n; ++si)
731 {
732 size_t i = stack_vars_sorted[si];
733 unsigned int ialign = stack_vars[i].alignb;
734
735 /* Ignore objects that aren't partition representatives. If we
736 see a var that is not a partition representative, it must
737 have been merged earlier. */
738 if (stack_vars[i].representative != i)
739 continue;
740
741 for (sj = si + 1; sj < n; ++sj)
742 {
743 size_t j = stack_vars_sorted[sj];
744 unsigned int jalign = stack_vars[j].alignb;
745
746 /* Ignore objects that aren't partition representatives. */
747 if (stack_vars[j].representative != j)
748 continue;
749
750 /* Ignore conflicting objects. */
751 if (stack_var_conflict_p (i, j))
752 continue;
753
754 /* Do not mix objects of "small" (supported) alignment
755 and "large" (unsupported) alignment. */
756 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
757 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
758 continue;
759
760 /* UNION the objects, placing J at OFFSET. */
761 union_stack_vars (i, j);
762 }
763 }
764
765 update_alias_info_with_stack_vars ();
766 }
767
768 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
769
770 static void
771 dump_stack_var_partition (void)
772 {
773 size_t si, i, j, n = stack_vars_num;
774
775 for (si = 0; si < n; ++si)
776 {
777 i = stack_vars_sorted[si];
778
779 /* Skip variables that aren't partition representatives, for now. */
780 if (stack_vars[i].representative != i)
781 continue;
782
783 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
784 " align %u\n", (unsigned long) i, stack_vars[i].size,
785 stack_vars[i].alignb);
786
787 for (j = i; j != EOC; j = stack_vars[j].next)
788 {
789 fputc ('\t', dump_file);
790 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
791 }
792 fputc ('\n', dump_file);
793 }
794 }
795
796 /* Assign rtl to DECL at BASE + OFFSET. */
797
798 static void
799 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
800 HOST_WIDE_INT offset)
801 {
802 unsigned align;
803 rtx x;
804
805 /* If this fails, we've overflowed the stack frame. Error nicely? */
806 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
807
808 x = plus_constant (Pmode, base, offset);
809 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
810
811 if (TREE_CODE (decl) != SSA_NAME)
812 {
813 /* Set alignment we actually gave this decl if it isn't an SSA name.
814 If it is we generate stack slots only accidentally so it isn't as
815 important, we'll simply use the alignment that is already set. */
816 if (base == virtual_stack_vars_rtx)
817 offset -= frame_phase;
818 align = offset & -offset;
819 align *= BITS_PER_UNIT;
820 if (align == 0 || align > base_align)
821 align = base_align;
822
823 /* One would think that we could assert that we're not decreasing
824 alignment here, but (at least) the i386 port does exactly this
825 via the MINIMUM_ALIGNMENT hook. */
826
827 DECL_ALIGN (decl) = align;
828 DECL_USER_ALIGN (decl) = 0;
829 }
830
831 set_mem_attributes (x, SSAVAR (decl), true);
832 set_rtl (decl, x);
833 }
834
835 /* A subroutine of expand_used_vars. Give each partition representative
836 a unique location within the stack frame. Update each partition member
837 with that location. */
838
839 static void
840 expand_stack_vars (bool (*pred) (tree))
841 {
842 size_t si, i, j, n = stack_vars_num;
843 HOST_WIDE_INT large_size = 0, large_alloc = 0;
844 rtx large_base = NULL;
845 unsigned large_align = 0;
846 tree decl;
847
848 /* Determine if there are any variables requiring "large" alignment.
849 Since these are dynamically allocated, we only process these if
850 no predicate involved. */
851 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
852 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
853 {
854 /* Find the total size of these variables. */
855 for (si = 0; si < n; ++si)
856 {
857 unsigned alignb;
858
859 i = stack_vars_sorted[si];
860 alignb = stack_vars[i].alignb;
861
862 /* Stop when we get to the first decl with "small" alignment. */
863 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
864 break;
865
866 /* Skip variables that aren't partition representatives. */
867 if (stack_vars[i].representative != i)
868 continue;
869
870 /* Skip variables that have already had rtl assigned. See also
871 add_stack_var where we perpetrate this pc_rtx hack. */
872 decl = stack_vars[i].decl;
873 if ((TREE_CODE (decl) == SSA_NAME
874 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
875 : DECL_RTL (decl)) != pc_rtx)
876 continue;
877
878 large_size += alignb - 1;
879 large_size &= -(HOST_WIDE_INT)alignb;
880 large_size += stack_vars[i].size;
881 }
882
883 /* If there were any, allocate space. */
884 if (large_size > 0)
885 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
886 large_align, true);
887 }
888
889 for (si = 0; si < n; ++si)
890 {
891 rtx base;
892 unsigned base_align, alignb;
893 HOST_WIDE_INT offset;
894
895 i = stack_vars_sorted[si];
896
897 /* Skip variables that aren't partition representatives, for now. */
898 if (stack_vars[i].representative != i)
899 continue;
900
901 /* Skip variables that have already had rtl assigned. See also
902 add_stack_var where we perpetrate this pc_rtx hack. */
903 decl = stack_vars[i].decl;
904 if ((TREE_CODE (decl) == SSA_NAME
905 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
906 : DECL_RTL (decl)) != pc_rtx)
907 continue;
908
909 /* Check the predicate to see whether this variable should be
910 allocated in this pass. */
911 if (pred && !pred (decl))
912 continue;
913
914 alignb = stack_vars[i].alignb;
915 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
916 {
917 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
918 base = virtual_stack_vars_rtx;
919 base_align = crtl->max_used_stack_slot_alignment;
920 }
921 else
922 {
923 /* Large alignment is only processed in the last pass. */
924 if (pred)
925 continue;
926 gcc_assert (large_base != NULL);
927
928 large_alloc += alignb - 1;
929 large_alloc &= -(HOST_WIDE_INT)alignb;
930 offset = large_alloc;
931 large_alloc += stack_vars[i].size;
932
933 base = large_base;
934 base_align = large_align;
935 }
936
937 /* Create rtl for each variable based on their location within the
938 partition. */
939 for (j = i; j != EOC; j = stack_vars[j].next)
940 {
941 expand_one_stack_var_at (stack_vars[j].decl,
942 base, base_align,
943 offset);
944 }
945 }
946
947 gcc_assert (large_alloc == large_size);
948 }
949
950 /* Take into account all sizes of partitions and reset DECL_RTLs. */
951 static HOST_WIDE_INT
952 account_stack_vars (void)
953 {
954 size_t si, j, i, n = stack_vars_num;
955 HOST_WIDE_INT size = 0;
956
957 for (si = 0; si < n; ++si)
958 {
959 i = stack_vars_sorted[si];
960
961 /* Skip variables that aren't partition representatives, for now. */
962 if (stack_vars[i].representative != i)
963 continue;
964
965 size += stack_vars[i].size;
966 for (j = i; j != EOC; j = stack_vars[j].next)
967 set_rtl (stack_vars[j].decl, NULL);
968 }
969 return size;
970 }
971
972 /* A subroutine of expand_one_var. Called to immediately assign rtl
973 to a variable to be allocated in the stack frame. */
974
975 static void
976 expand_one_stack_var (tree var)
977 {
978 HOST_WIDE_INT size, offset;
979 unsigned byte_align;
980
981 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
982 byte_align = align_local_variable (SSAVAR (var));
983
984 /* We handle highly aligned variables in expand_stack_vars. */
985 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
986
987 offset = alloc_stack_frame_space (size, byte_align);
988
989 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
990 crtl->max_used_stack_slot_alignment, offset);
991 }
992
993 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
994 that will reside in a hard register. */
995
996 static void
997 expand_one_hard_reg_var (tree var)
998 {
999 rest_of_decl_compilation (var, 0, 0);
1000 }
1001
1002 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1003 that will reside in a pseudo register. */
1004
1005 static void
1006 expand_one_register_var (tree var)
1007 {
1008 tree decl = SSAVAR (var);
1009 tree type = TREE_TYPE (decl);
1010 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1011 rtx x = gen_reg_rtx (reg_mode);
1012
1013 set_rtl (var, x);
1014
1015 /* Note if the object is a user variable. */
1016 if (!DECL_ARTIFICIAL (decl))
1017 mark_user_reg (x);
1018
1019 if (POINTER_TYPE_P (type))
1020 mark_reg_pointer (x, get_pointer_alignment (var));
1021 }
1022
1023 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1024 has some associated error, e.g. its type is error-mark. We just need
1025 to pick something that won't crash the rest of the compiler. */
1026
1027 static void
1028 expand_one_error_var (tree var)
1029 {
1030 enum machine_mode mode = DECL_MODE (var);
1031 rtx x;
1032
1033 if (mode == BLKmode)
1034 x = gen_rtx_MEM (BLKmode, const0_rtx);
1035 else if (mode == VOIDmode)
1036 x = const0_rtx;
1037 else
1038 x = gen_reg_rtx (mode);
1039
1040 SET_DECL_RTL (var, x);
1041 }
1042
1043 /* A subroutine of expand_one_var. VAR is a variable that will be
1044 allocated to the local stack frame. Return true if we wish to
1045 add VAR to STACK_VARS so that it will be coalesced with other
1046 variables. Return false to allocate VAR immediately.
1047
1048 This function is used to reduce the number of variables considered
1049 for coalescing, which reduces the size of the quadratic problem. */
1050
1051 static bool
1052 defer_stack_allocation (tree var, bool toplevel)
1053 {
1054 /* If stack protection is enabled, *all* stack variables must be deferred,
1055 so that we can re-order the strings to the top of the frame. */
1056 if (flag_stack_protect)
1057 return true;
1058
1059 /* We handle "large" alignment via dynamic allocation. We want to handle
1060 this extra complication in only one place, so defer them. */
1061 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1062 return true;
1063
1064 /* Variables in the outermost scope automatically conflict with
1065 every other variable. The only reason to want to defer them
1066 at all is that, after sorting, we can more efficiently pack
1067 small variables in the stack frame. Continue to defer at -O2. */
1068 if (toplevel && optimize < 2)
1069 return false;
1070
1071 /* Without optimization, *most* variables are allocated from the
1072 stack, which makes the quadratic problem large exactly when we
1073 want compilation to proceed as quickly as possible. On the
1074 other hand, we don't want the function's stack frame size to
1075 get completely out of hand. So we avoid adding scalars and
1076 "small" aggregates to the list at all. */
1077 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1078 return false;
1079
1080 return true;
1081 }
1082
1083 /* A subroutine of expand_used_vars. Expand one variable according to
1084 its flavor. Variables to be placed on the stack are not actually
1085 expanded yet, merely recorded.
1086 When REALLY_EXPAND is false, only add stack values to be allocated.
1087 Return stack usage this variable is supposed to take.
1088 */
1089
1090 static HOST_WIDE_INT
1091 expand_one_var (tree var, bool toplevel, bool really_expand)
1092 {
1093 unsigned int align = BITS_PER_UNIT;
1094 tree origvar = var;
1095
1096 var = SSAVAR (var);
1097
1098 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1099 {
1100 /* Because we don't know if VAR will be in register or on stack,
1101 we conservatively assume it will be on stack even if VAR is
1102 eventually put into register after RA pass. For non-automatic
1103 variables, which won't be on stack, we collect alignment of
1104 type and ignore user specified alignment. */
1105 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1106 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1107 TYPE_MODE (TREE_TYPE (var)),
1108 TYPE_ALIGN (TREE_TYPE (var)));
1109 else if (DECL_HAS_VALUE_EXPR_P (var)
1110 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1111 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1112 or variables which were assigned a stack slot already by
1113 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1114 changed from the offset chosen to it. */
1115 align = crtl->stack_alignment_estimated;
1116 else
1117 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1118
1119 /* If the variable alignment is very large we'll dynamicaly allocate
1120 it, which means that in-frame portion is just a pointer. */
1121 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1122 align = POINTER_SIZE;
1123 }
1124
1125 if (SUPPORTS_STACK_ALIGNMENT
1126 && crtl->stack_alignment_estimated < align)
1127 {
1128 /* stack_alignment_estimated shouldn't change after stack
1129 realign decision made */
1130 gcc_assert(!crtl->stack_realign_processed);
1131 crtl->stack_alignment_estimated = align;
1132 }
1133
1134 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1135 So here we only make sure stack_alignment_needed >= align. */
1136 if (crtl->stack_alignment_needed < align)
1137 crtl->stack_alignment_needed = align;
1138 if (crtl->max_used_stack_slot_alignment < align)
1139 crtl->max_used_stack_slot_alignment = align;
1140
1141 if (TREE_CODE (origvar) == SSA_NAME)
1142 {
1143 gcc_assert (TREE_CODE (var) != VAR_DECL
1144 || (!DECL_EXTERNAL (var)
1145 && !DECL_HAS_VALUE_EXPR_P (var)
1146 && !TREE_STATIC (var)
1147 && TREE_TYPE (var) != error_mark_node
1148 && !DECL_HARD_REGISTER (var)
1149 && really_expand));
1150 }
1151 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1152 ;
1153 else if (DECL_EXTERNAL (var))
1154 ;
1155 else if (DECL_HAS_VALUE_EXPR_P (var))
1156 ;
1157 else if (TREE_STATIC (var))
1158 ;
1159 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1160 ;
1161 else if (TREE_TYPE (var) == error_mark_node)
1162 {
1163 if (really_expand)
1164 expand_one_error_var (var);
1165 }
1166 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1167 {
1168 if (really_expand)
1169 expand_one_hard_reg_var (var);
1170 }
1171 else if (use_register_for_decl (var))
1172 {
1173 if (really_expand)
1174 expand_one_register_var (origvar);
1175 }
1176 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1177 {
1178 /* Reject variables which cover more than half of the address-space. */
1179 if (really_expand)
1180 {
1181 error ("size of variable %q+D is too large", var);
1182 expand_one_error_var (var);
1183 }
1184 }
1185 else if (defer_stack_allocation (var, toplevel))
1186 add_stack_var (origvar);
1187 else
1188 {
1189 if (really_expand)
1190 expand_one_stack_var (origvar);
1191 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1192 }
1193 return 0;
1194 }
1195
1196 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1197 expanding variables. Those variables that can be put into registers
1198 are allocated pseudos; those that can't are put on the stack.
1199
1200 TOPLEVEL is true if this is the outermost BLOCK. */
1201
1202 static void
1203 expand_used_vars_for_block (tree block, bool toplevel)
1204 {
1205 tree t;
1206
1207 /* Expand all variables at this level. */
1208 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1209 if (TREE_USED (t)
1210 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1211 || !DECL_NONSHAREABLE (t)))
1212 expand_one_var (t, toplevel, true);
1213
1214 /* Expand all variables at containing levels. */
1215 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1216 expand_used_vars_for_block (t, false);
1217 }
1218
1219 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1220 and clear TREE_USED on all local variables. */
1221
1222 static void
1223 clear_tree_used (tree block)
1224 {
1225 tree t;
1226
1227 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1228 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1229 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1230 || !DECL_NONSHAREABLE (t))
1231 TREE_USED (t) = 0;
1232
1233 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1234 clear_tree_used (t);
1235 }
1236
1237 /* Examine TYPE and determine a bit mask of the following features. */
1238
1239 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1240 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1241 #define SPCT_HAS_ARRAY 4
1242 #define SPCT_HAS_AGGREGATE 8
1243
1244 static unsigned int
1245 stack_protect_classify_type (tree type)
1246 {
1247 unsigned int ret = 0;
1248 tree t;
1249
1250 switch (TREE_CODE (type))
1251 {
1252 case ARRAY_TYPE:
1253 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1254 if (t == char_type_node
1255 || t == signed_char_type_node
1256 || t == unsigned_char_type_node)
1257 {
1258 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1259 unsigned HOST_WIDE_INT len;
1260
1261 if (!TYPE_SIZE_UNIT (type)
1262 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1263 len = max;
1264 else
1265 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1266
1267 if (len < max)
1268 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1269 else
1270 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1271 }
1272 else
1273 ret = SPCT_HAS_ARRAY;
1274 break;
1275
1276 case UNION_TYPE:
1277 case QUAL_UNION_TYPE:
1278 case RECORD_TYPE:
1279 ret = SPCT_HAS_AGGREGATE;
1280 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1281 if (TREE_CODE (t) == FIELD_DECL)
1282 ret |= stack_protect_classify_type (TREE_TYPE (t));
1283 break;
1284
1285 default:
1286 break;
1287 }
1288
1289 return ret;
1290 }
1291
1292 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1293 part of the local stack frame. Remember if we ever return nonzero for
1294 any variable in this function. The return value is the phase number in
1295 which the variable should be allocated. */
1296
1297 static int
1298 stack_protect_decl_phase (tree decl)
1299 {
1300 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1301 int ret = 0;
1302
1303 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1304 has_short_buffer = true;
1305
1306 if (flag_stack_protect == 2)
1307 {
1308 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1309 && !(bits & SPCT_HAS_AGGREGATE))
1310 ret = 1;
1311 else if (bits & SPCT_HAS_ARRAY)
1312 ret = 2;
1313 }
1314 else
1315 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1316
1317 if (ret)
1318 has_protected_decls = true;
1319
1320 return ret;
1321 }
1322
1323 /* Two helper routines that check for phase 1 and phase 2. These are used
1324 as callbacks for expand_stack_vars. */
1325
1326 static bool
1327 stack_protect_decl_phase_1 (tree decl)
1328 {
1329 return stack_protect_decl_phase (decl) == 1;
1330 }
1331
1332 static bool
1333 stack_protect_decl_phase_2 (tree decl)
1334 {
1335 return stack_protect_decl_phase (decl) == 2;
1336 }
1337
1338 /* Ensure that variables in different stack protection phases conflict
1339 so that they are not merged and share the same stack slot. */
1340
1341 static void
1342 add_stack_protection_conflicts (void)
1343 {
1344 size_t i, j, n = stack_vars_num;
1345 unsigned char *phase;
1346
1347 phase = XNEWVEC (unsigned char, n);
1348 for (i = 0; i < n; ++i)
1349 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1350
1351 for (i = 0; i < n; ++i)
1352 {
1353 unsigned char ph_i = phase[i];
1354 for (j = 0; j < i; ++j)
1355 if (ph_i != phase[j])
1356 add_stack_var_conflict (i, j);
1357 }
1358
1359 XDELETEVEC (phase);
1360 }
1361
1362 /* Create a decl for the guard at the top of the stack frame. */
1363
1364 static void
1365 create_stack_guard (void)
1366 {
1367 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1368 VAR_DECL, NULL, ptr_type_node);
1369 TREE_THIS_VOLATILE (guard) = 1;
1370 TREE_USED (guard) = 1;
1371 expand_one_stack_var (guard);
1372 crtl->stack_protect_guard = guard;
1373 }
1374
1375 /* Prepare for expanding variables. */
1376 static void
1377 init_vars_expansion (void)
1378 {
1379 tree t;
1380 unsigned ix;
1381 /* Set TREE_USED on all variables in the local_decls. */
1382 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1383 TREE_USED (t) = 1;
1384
1385 /* Clear TREE_USED on all variables associated with a block scope. */
1386 clear_tree_used (DECL_INITIAL (current_function_decl));
1387
1388 /* Initialize local stack smashing state. */
1389 has_protected_decls = false;
1390 has_short_buffer = false;
1391 }
1392
1393 /* Free up stack variable graph data. */
1394 static void
1395 fini_vars_expansion (void)
1396 {
1397 size_t i, n = stack_vars_num;
1398 for (i = 0; i < n; i++)
1399 BITMAP_FREE (stack_vars[i].conflicts);
1400 XDELETEVEC (stack_vars);
1401 XDELETEVEC (stack_vars_sorted);
1402 stack_vars = NULL;
1403 stack_vars_alloc = stack_vars_num = 0;
1404 pointer_map_destroy (decl_to_stack_part);
1405 decl_to_stack_part = NULL;
1406 }
1407
1408 /* Make a fair guess for the size of the stack frame of the function
1409 in NODE. This doesn't have to be exact, the result is only used in
1410 the inline heuristics. So we don't want to run the full stack var
1411 packing algorithm (which is quadratic in the number of stack vars).
1412 Instead, we calculate the total size of all stack vars. This turns
1413 out to be a pretty fair estimate -- packing of stack vars doesn't
1414 happen very often. */
1415
1416 HOST_WIDE_INT
1417 estimated_stack_frame_size (struct cgraph_node *node)
1418 {
1419 HOST_WIDE_INT size = 0;
1420 size_t i;
1421 tree var;
1422 tree old_cur_fun_decl = current_function_decl;
1423 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1424
1425 current_function_decl = node->symbol.decl;
1426 push_cfun (fn);
1427
1428 FOR_EACH_LOCAL_DECL (fn, i, var)
1429 if (auto_var_in_fn_p (var, fn->decl))
1430 size += expand_one_var (var, true, false);
1431
1432 if (stack_vars_num > 0)
1433 {
1434 /* Fake sorting the stack vars for account_stack_vars (). */
1435 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1436 for (i = 0; i < stack_vars_num; ++i)
1437 stack_vars_sorted[i] = i;
1438 size += account_stack_vars ();
1439 fini_vars_expansion ();
1440 }
1441 pop_cfun ();
1442 current_function_decl = old_cur_fun_decl;
1443 return size;
1444 }
1445
1446 /* Expand all variables used in the function. */
1447
1448 static void
1449 expand_used_vars (void)
1450 {
1451 tree var, outer_block = DECL_INITIAL (current_function_decl);
1452 VEC(tree,heap) *maybe_local_decls = NULL;
1453 unsigned i;
1454 unsigned len;
1455
1456 /* Compute the phase of the stack frame for this function. */
1457 {
1458 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1459 int off = STARTING_FRAME_OFFSET % align;
1460 frame_phase = off ? align - off : 0;
1461 }
1462
1463 init_vars_expansion ();
1464
1465 for (i = 0; i < SA.map->num_partitions; i++)
1466 {
1467 tree var = partition_to_var (SA.map, i);
1468
1469 gcc_assert (is_gimple_reg (var));
1470 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1471 expand_one_var (var, true, true);
1472 else
1473 {
1474 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1475 contain the default def (representing the parm or result itself)
1476 we don't do anything here. But those which don't contain the
1477 default def (representing a temporary based on the parm/result)
1478 we need to allocate space just like for normal VAR_DECLs. */
1479 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1480 {
1481 expand_one_var (var, true, true);
1482 gcc_assert (SA.partition_to_pseudo[i]);
1483 }
1484 }
1485 }
1486
1487 /* At this point all variables on the local_decls with TREE_USED
1488 set are not associated with any block scope. Lay them out. */
1489
1490 len = VEC_length (tree, cfun->local_decls);
1491 FOR_EACH_LOCAL_DECL (cfun, i, var)
1492 {
1493 bool expand_now = false;
1494
1495 /* Expanded above already. */
1496 if (is_gimple_reg (var))
1497 {
1498 TREE_USED (var) = 0;
1499 goto next;
1500 }
1501 /* We didn't set a block for static or extern because it's hard
1502 to tell the difference between a global variable (re)declared
1503 in a local scope, and one that's really declared there to
1504 begin with. And it doesn't really matter much, since we're
1505 not giving them stack space. Expand them now. */
1506 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1507 expand_now = true;
1508
1509 /* If the variable is not associated with any block, then it
1510 was created by the optimizers, and could be live anywhere
1511 in the function. */
1512 else if (TREE_USED (var))
1513 expand_now = true;
1514
1515 /* Finally, mark all variables on the list as used. We'll use
1516 this in a moment when we expand those associated with scopes. */
1517 TREE_USED (var) = 1;
1518
1519 if (expand_now)
1520 expand_one_var (var, true, true);
1521
1522 next:
1523 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1524 {
1525 rtx rtl = DECL_RTL_IF_SET (var);
1526
1527 /* Keep artificial non-ignored vars in cfun->local_decls
1528 chain until instantiate_decls. */
1529 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1530 add_local_decl (cfun, var);
1531 else if (rtl == NULL_RTX)
1532 /* If rtl isn't set yet, which can happen e.g. with
1533 -fstack-protector, retry before returning from this
1534 function. */
1535 VEC_safe_push (tree, heap, maybe_local_decls, var);
1536 }
1537 }
1538
1539 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1540
1541 +-----------------+-----------------+
1542 | ...processed... | ...duplicates...|
1543 +-----------------+-----------------+
1544 ^
1545 +-- LEN points here.
1546
1547 We just want the duplicates, as those are the artificial
1548 non-ignored vars that we want to keep until instantiate_decls.
1549 Move them down and truncate the array. */
1550 if (!VEC_empty (tree, cfun->local_decls))
1551 VEC_block_remove (tree, cfun->local_decls, 0, len);
1552
1553 /* At this point, all variables within the block tree with TREE_USED
1554 set are actually used by the optimized function. Lay them out. */
1555 expand_used_vars_for_block (outer_block, true);
1556
1557 if (stack_vars_num > 0)
1558 {
1559 add_scope_conflicts ();
1560
1561 /* If stack protection is enabled, we don't share space between
1562 vulnerable data and non-vulnerable data. */
1563 if (flag_stack_protect)
1564 add_stack_protection_conflicts ();
1565
1566 /* Now that we have collected all stack variables, and have computed a
1567 minimal interference graph, attempt to save some stack space. */
1568 partition_stack_vars ();
1569 if (dump_file)
1570 dump_stack_var_partition ();
1571 }
1572
1573 /* There are several conditions under which we should create a
1574 stack guard: protect-all, alloca used, protected decls present. */
1575 if (flag_stack_protect == 2
1576 || (flag_stack_protect
1577 && (cfun->calls_alloca || has_protected_decls)))
1578 create_stack_guard ();
1579
1580 /* Assign rtl to each variable based on these partitions. */
1581 if (stack_vars_num > 0)
1582 {
1583 /* Reorder decls to be protected by iterating over the variables
1584 array multiple times, and allocating out of each phase in turn. */
1585 /* ??? We could probably integrate this into the qsort we did
1586 earlier, such that we naturally see these variables first,
1587 and thus naturally allocate things in the right order. */
1588 if (has_protected_decls)
1589 {
1590 /* Phase 1 contains only character arrays. */
1591 expand_stack_vars (stack_protect_decl_phase_1);
1592
1593 /* Phase 2 contains other kinds of arrays. */
1594 if (flag_stack_protect == 2)
1595 expand_stack_vars (stack_protect_decl_phase_2);
1596 }
1597
1598 expand_stack_vars (NULL);
1599
1600 fini_vars_expansion ();
1601 }
1602
1603 /* If there were any artificial non-ignored vars without rtl
1604 found earlier, see if deferred stack allocation hasn't assigned
1605 rtl to them. */
1606 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1607 {
1608 rtx rtl = DECL_RTL_IF_SET (var);
1609
1610 /* Keep artificial non-ignored vars in cfun->local_decls
1611 chain until instantiate_decls. */
1612 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1613 add_local_decl (cfun, var);
1614 }
1615 VEC_free (tree, heap, maybe_local_decls);
1616
1617 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1618 if (STACK_ALIGNMENT_NEEDED)
1619 {
1620 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1621 if (!FRAME_GROWS_DOWNWARD)
1622 frame_offset += align - 1;
1623 frame_offset &= -align;
1624 }
1625 }
1626
1627
1628 /* If we need to produce a detailed dump, print the tree representation
1629 for STMT to the dump file. SINCE is the last RTX after which the RTL
1630 generated for STMT should have been appended. */
1631
1632 static void
1633 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1634 {
1635 if (dump_file && (dump_flags & TDF_DETAILS))
1636 {
1637 fprintf (dump_file, "\n;; ");
1638 print_gimple_stmt (dump_file, stmt, 0,
1639 TDF_SLIM | (dump_flags & TDF_LINENO));
1640 fprintf (dump_file, "\n");
1641
1642 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1643 }
1644 }
1645
1646 /* Maps the blocks that do not contain tree labels to rtx labels. */
1647
1648 static struct pointer_map_t *lab_rtx_for_bb;
1649
1650 /* Returns the label_rtx expression for a label starting basic block BB. */
1651
1652 static rtx
1653 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1654 {
1655 gimple_stmt_iterator gsi;
1656 tree lab;
1657 gimple lab_stmt;
1658 void **elt;
1659
1660 if (bb->flags & BB_RTL)
1661 return block_label (bb);
1662
1663 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1664 if (elt)
1665 return (rtx) *elt;
1666
1667 /* Find the tree label if it is present. */
1668
1669 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1670 {
1671 lab_stmt = gsi_stmt (gsi);
1672 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1673 break;
1674
1675 lab = gimple_label_label (lab_stmt);
1676 if (DECL_NONLOCAL (lab))
1677 break;
1678
1679 return label_rtx (lab);
1680 }
1681
1682 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1683 *elt = gen_label_rtx ();
1684 return (rtx) *elt;
1685 }
1686
1687
1688 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1689 of a basic block where we just expanded the conditional at the end,
1690 possibly clean up the CFG and instruction sequence. LAST is the
1691 last instruction before the just emitted jump sequence. */
1692
1693 static void
1694 maybe_cleanup_end_of_block (edge e, rtx last)
1695 {
1696 /* Special case: when jumpif decides that the condition is
1697 trivial it emits an unconditional jump (and the necessary
1698 barrier). But we still have two edges, the fallthru one is
1699 wrong. purge_dead_edges would clean this up later. Unfortunately
1700 we have to insert insns (and split edges) before
1701 find_many_sub_basic_blocks and hence before purge_dead_edges.
1702 But splitting edges might create new blocks which depend on the
1703 fact that if there are two edges there's no barrier. So the
1704 barrier would get lost and verify_flow_info would ICE. Instead
1705 of auditing all edge splitters to care for the barrier (which
1706 normally isn't there in a cleaned CFG), fix it here. */
1707 if (BARRIER_P (get_last_insn ()))
1708 {
1709 rtx insn;
1710 remove_edge (e);
1711 /* Now, we have a single successor block, if we have insns to
1712 insert on the remaining edge we potentially will insert
1713 it at the end of this block (if the dest block isn't feasible)
1714 in order to avoid splitting the edge. This insertion will take
1715 place in front of the last jump. But we might have emitted
1716 multiple jumps (conditional and one unconditional) to the
1717 same destination. Inserting in front of the last one then
1718 is a problem. See PR 40021. We fix this by deleting all
1719 jumps except the last unconditional one. */
1720 insn = PREV_INSN (get_last_insn ());
1721 /* Make sure we have an unconditional jump. Otherwise we're
1722 confused. */
1723 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1724 for (insn = PREV_INSN (insn); insn != last;)
1725 {
1726 insn = PREV_INSN (insn);
1727 if (JUMP_P (NEXT_INSN (insn)))
1728 {
1729 if (!any_condjump_p (NEXT_INSN (insn)))
1730 {
1731 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1732 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1733 }
1734 delete_insn (NEXT_INSN (insn));
1735 }
1736 }
1737 }
1738 }
1739
1740 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1741 Returns a new basic block if we've terminated the current basic
1742 block and created a new one. */
1743
1744 static basic_block
1745 expand_gimple_cond (basic_block bb, gimple stmt)
1746 {
1747 basic_block new_bb, dest;
1748 edge new_edge;
1749 edge true_edge;
1750 edge false_edge;
1751 rtx last2, last;
1752 enum tree_code code;
1753 tree op0, op1;
1754
1755 code = gimple_cond_code (stmt);
1756 op0 = gimple_cond_lhs (stmt);
1757 op1 = gimple_cond_rhs (stmt);
1758 /* We're sometimes presented with such code:
1759 D.123_1 = x < y;
1760 if (D.123_1 != 0)
1761 ...
1762 This would expand to two comparisons which then later might
1763 be cleaned up by combine. But some pattern matchers like if-conversion
1764 work better when there's only one compare, so make up for this
1765 here as special exception if TER would have made the same change. */
1766 if (gimple_cond_single_var_p (stmt)
1767 && SA.values
1768 && TREE_CODE (op0) == SSA_NAME
1769 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1770 {
1771 gimple second = SSA_NAME_DEF_STMT (op0);
1772 if (gimple_code (second) == GIMPLE_ASSIGN)
1773 {
1774 enum tree_code code2 = gimple_assign_rhs_code (second);
1775 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1776 {
1777 code = code2;
1778 op0 = gimple_assign_rhs1 (second);
1779 op1 = gimple_assign_rhs2 (second);
1780 }
1781 /* If jumps are cheap turn some more codes into
1782 jumpy sequences. */
1783 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1784 {
1785 if ((code2 == BIT_AND_EXPR
1786 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1787 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1788 || code2 == TRUTH_AND_EXPR)
1789 {
1790 code = TRUTH_ANDIF_EXPR;
1791 op0 = gimple_assign_rhs1 (second);
1792 op1 = gimple_assign_rhs2 (second);
1793 }
1794 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1795 {
1796 code = TRUTH_ORIF_EXPR;
1797 op0 = gimple_assign_rhs1 (second);
1798 op1 = gimple_assign_rhs2 (second);
1799 }
1800 }
1801 }
1802 }
1803
1804 last2 = last = get_last_insn ();
1805
1806 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1807 set_curr_insn_source_location (gimple_location (stmt));
1808 set_curr_insn_block (gimple_block (stmt));
1809
1810 /* These flags have no purpose in RTL land. */
1811 true_edge->flags &= ~EDGE_TRUE_VALUE;
1812 false_edge->flags &= ~EDGE_FALSE_VALUE;
1813
1814 /* We can either have a pure conditional jump with one fallthru edge or
1815 two-way jump that needs to be decomposed into two basic blocks. */
1816 if (false_edge->dest == bb->next_bb)
1817 {
1818 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1819 true_edge->probability);
1820 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1821 if (true_edge->goto_locus)
1822 {
1823 set_curr_insn_source_location (true_edge->goto_locus);
1824 set_curr_insn_block (true_edge->goto_block);
1825 true_edge->goto_locus = curr_insn_locator ();
1826 }
1827 true_edge->goto_block = NULL;
1828 false_edge->flags |= EDGE_FALLTHRU;
1829 maybe_cleanup_end_of_block (false_edge, last);
1830 return NULL;
1831 }
1832 if (true_edge->dest == bb->next_bb)
1833 {
1834 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1835 false_edge->probability);
1836 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1837 if (false_edge->goto_locus)
1838 {
1839 set_curr_insn_source_location (false_edge->goto_locus);
1840 set_curr_insn_block (false_edge->goto_block);
1841 false_edge->goto_locus = curr_insn_locator ();
1842 }
1843 false_edge->goto_block = NULL;
1844 true_edge->flags |= EDGE_FALLTHRU;
1845 maybe_cleanup_end_of_block (true_edge, last);
1846 return NULL;
1847 }
1848
1849 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1850 true_edge->probability);
1851 last = get_last_insn ();
1852 if (false_edge->goto_locus)
1853 {
1854 set_curr_insn_source_location (false_edge->goto_locus);
1855 set_curr_insn_block (false_edge->goto_block);
1856 false_edge->goto_locus = curr_insn_locator ();
1857 }
1858 false_edge->goto_block = NULL;
1859 emit_jump (label_rtx_for_bb (false_edge->dest));
1860
1861 BB_END (bb) = last;
1862 if (BARRIER_P (BB_END (bb)))
1863 BB_END (bb) = PREV_INSN (BB_END (bb));
1864 update_bb_for_insn (bb);
1865
1866 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1867 dest = false_edge->dest;
1868 redirect_edge_succ (false_edge, new_bb);
1869 false_edge->flags |= EDGE_FALLTHRU;
1870 new_bb->count = false_edge->count;
1871 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1872 if (current_loops && bb->loop_father)
1873 add_bb_to_loop (new_bb, bb->loop_father);
1874 new_edge = make_edge (new_bb, dest, 0);
1875 new_edge->probability = REG_BR_PROB_BASE;
1876 new_edge->count = new_bb->count;
1877 if (BARRIER_P (BB_END (new_bb)))
1878 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1879 update_bb_for_insn (new_bb);
1880
1881 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1882
1883 if (true_edge->goto_locus)
1884 {
1885 set_curr_insn_source_location (true_edge->goto_locus);
1886 set_curr_insn_block (true_edge->goto_block);
1887 true_edge->goto_locus = curr_insn_locator ();
1888 }
1889 true_edge->goto_block = NULL;
1890
1891 return new_bb;
1892 }
1893
1894 /* Mark all calls that can have a transaction restart. */
1895
1896 static void
1897 mark_transaction_restart_calls (gimple stmt)
1898 {
1899 struct tm_restart_node dummy;
1900 void **slot;
1901
1902 if (!cfun->gimple_df->tm_restart)
1903 return;
1904
1905 dummy.stmt = stmt;
1906 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1907 if (slot)
1908 {
1909 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1910 tree list = n->label_or_list;
1911 rtx insn;
1912
1913 for (insn = next_real_insn (get_last_insn ());
1914 !CALL_P (insn);
1915 insn = next_real_insn (insn))
1916 continue;
1917
1918 if (TREE_CODE (list) == LABEL_DECL)
1919 add_reg_note (insn, REG_TM, label_rtx (list));
1920 else
1921 for (; list ; list = TREE_CHAIN (list))
1922 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1923 }
1924 }
1925
1926 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1927 statement STMT. */
1928
1929 static void
1930 expand_call_stmt (gimple stmt)
1931 {
1932 tree exp, decl, lhs;
1933 bool builtin_p;
1934 size_t i;
1935
1936 if (gimple_call_internal_p (stmt))
1937 {
1938 expand_internal_call (stmt);
1939 return;
1940 }
1941
1942 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1943
1944 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1945 decl = gimple_call_fndecl (stmt);
1946 builtin_p = decl && DECL_BUILT_IN (decl);
1947
1948 /* If this is not a builtin function, the function type through which the
1949 call is made may be different from the type of the function. */
1950 if (!builtin_p)
1951 CALL_EXPR_FN (exp)
1952 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1953 CALL_EXPR_FN (exp));
1954
1955 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1956 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1957
1958 for (i = 0; i < gimple_call_num_args (stmt); i++)
1959 {
1960 tree arg = gimple_call_arg (stmt, i);
1961 gimple def;
1962 /* TER addresses into arguments of builtin functions so we have a
1963 chance to infer more correct alignment information. See PR39954. */
1964 if (builtin_p
1965 && TREE_CODE (arg) == SSA_NAME
1966 && (def = get_gimple_for_ssa_name (arg))
1967 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1968 arg = gimple_assign_rhs1 (def);
1969 CALL_EXPR_ARG (exp, i) = arg;
1970 }
1971
1972 if (gimple_has_side_effects (stmt))
1973 TREE_SIDE_EFFECTS (exp) = 1;
1974
1975 if (gimple_call_nothrow_p (stmt))
1976 TREE_NOTHROW (exp) = 1;
1977
1978 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1979 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1980 if (decl
1981 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1982 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
1983 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
1984 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1985 else
1986 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1987 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1988 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1989 TREE_BLOCK (exp) = gimple_block (stmt);
1990
1991 /* Ensure RTL is created for debug args. */
1992 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
1993 {
1994 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
1995 unsigned int ix;
1996 tree dtemp;
1997
1998 if (debug_args)
1999 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2000 {
2001 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2002 expand_debug_expr (dtemp);
2003 }
2004 }
2005
2006 lhs = gimple_call_lhs (stmt);
2007 if (lhs)
2008 expand_assignment (lhs, exp, false);
2009 else
2010 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2011
2012 mark_transaction_restart_calls (stmt);
2013 }
2014
2015 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2016 STMT that doesn't require special handling for outgoing edges. That
2017 is no tailcalls and no GIMPLE_COND. */
2018
2019 static void
2020 expand_gimple_stmt_1 (gimple stmt)
2021 {
2022 tree op0;
2023
2024 set_curr_insn_source_location (gimple_location (stmt));
2025 set_curr_insn_block (gimple_block (stmt));
2026
2027 switch (gimple_code (stmt))
2028 {
2029 case GIMPLE_GOTO:
2030 op0 = gimple_goto_dest (stmt);
2031 if (TREE_CODE (op0) == LABEL_DECL)
2032 expand_goto (op0);
2033 else
2034 expand_computed_goto (op0);
2035 break;
2036 case GIMPLE_LABEL:
2037 expand_label (gimple_label_label (stmt));
2038 break;
2039 case GIMPLE_NOP:
2040 case GIMPLE_PREDICT:
2041 break;
2042 case GIMPLE_SWITCH:
2043 expand_case (stmt);
2044 break;
2045 case GIMPLE_ASM:
2046 expand_asm_stmt (stmt);
2047 break;
2048 case GIMPLE_CALL:
2049 expand_call_stmt (stmt);
2050 break;
2051
2052 case GIMPLE_RETURN:
2053 op0 = gimple_return_retval (stmt);
2054
2055 if (op0 && op0 != error_mark_node)
2056 {
2057 tree result = DECL_RESULT (current_function_decl);
2058
2059 /* If we are not returning the current function's RESULT_DECL,
2060 build an assignment to it. */
2061 if (op0 != result)
2062 {
2063 /* I believe that a function's RESULT_DECL is unique. */
2064 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2065
2066 /* ??? We'd like to use simply expand_assignment here,
2067 but this fails if the value is of BLKmode but the return
2068 decl is a register. expand_return has special handling
2069 for this combination, which eventually should move
2070 to common code. See comments there. Until then, let's
2071 build a modify expression :-/ */
2072 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2073 result, op0);
2074 }
2075 }
2076 if (!op0)
2077 expand_null_return ();
2078 else
2079 expand_return (op0);
2080 break;
2081
2082 case GIMPLE_ASSIGN:
2083 {
2084 tree lhs = gimple_assign_lhs (stmt);
2085
2086 /* Tree expand used to fiddle with |= and &= of two bitfield
2087 COMPONENT_REFs here. This can't happen with gimple, the LHS
2088 of binary assigns must be a gimple reg. */
2089
2090 if (TREE_CODE (lhs) != SSA_NAME
2091 || get_gimple_rhs_class (gimple_expr_code (stmt))
2092 == GIMPLE_SINGLE_RHS)
2093 {
2094 tree rhs = gimple_assign_rhs1 (stmt);
2095 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2096 == GIMPLE_SINGLE_RHS);
2097 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2098 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2099 if (TREE_CLOBBER_P (rhs))
2100 /* This is a clobber to mark the going out of scope for
2101 this LHS. */
2102 ;
2103 else
2104 expand_assignment (lhs, rhs,
2105 gimple_assign_nontemporal_move_p (stmt));
2106 }
2107 else
2108 {
2109 rtx target, temp;
2110 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2111 struct separate_ops ops;
2112 bool promoted = false;
2113
2114 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2115 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2116 promoted = true;
2117
2118 ops.code = gimple_assign_rhs_code (stmt);
2119 ops.type = TREE_TYPE (lhs);
2120 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2121 {
2122 case GIMPLE_TERNARY_RHS:
2123 ops.op2 = gimple_assign_rhs3 (stmt);
2124 /* Fallthru */
2125 case GIMPLE_BINARY_RHS:
2126 ops.op1 = gimple_assign_rhs2 (stmt);
2127 /* Fallthru */
2128 case GIMPLE_UNARY_RHS:
2129 ops.op0 = gimple_assign_rhs1 (stmt);
2130 break;
2131 default:
2132 gcc_unreachable ();
2133 }
2134 ops.location = gimple_location (stmt);
2135
2136 /* If we want to use a nontemporal store, force the value to
2137 register first. If we store into a promoted register,
2138 don't directly expand to target. */
2139 temp = nontemporal || promoted ? NULL_RTX : target;
2140 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2141 EXPAND_NORMAL);
2142
2143 if (temp == target)
2144 ;
2145 else if (promoted)
2146 {
2147 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2148 /* If TEMP is a VOIDmode constant, use convert_modes to make
2149 sure that we properly convert it. */
2150 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2151 {
2152 temp = convert_modes (GET_MODE (target),
2153 TYPE_MODE (ops.type),
2154 temp, unsignedp);
2155 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2156 GET_MODE (target), temp, unsignedp);
2157 }
2158
2159 convert_move (SUBREG_REG (target), temp, unsignedp);
2160 }
2161 else if (nontemporal && emit_storent_insn (target, temp))
2162 ;
2163 else
2164 {
2165 temp = force_operand (temp, target);
2166 if (temp != target)
2167 emit_move_insn (target, temp);
2168 }
2169 }
2170 }
2171 break;
2172
2173 default:
2174 gcc_unreachable ();
2175 }
2176 }
2177
2178 /* Expand one gimple statement STMT and return the last RTL instruction
2179 before any of the newly generated ones.
2180
2181 In addition to generating the necessary RTL instructions this also
2182 sets REG_EH_REGION notes if necessary and sets the current source
2183 location for diagnostics. */
2184
2185 static rtx
2186 expand_gimple_stmt (gimple stmt)
2187 {
2188 location_t saved_location = input_location;
2189 rtx last = get_last_insn ();
2190 int lp_nr;
2191
2192 gcc_assert (cfun);
2193
2194 /* We need to save and restore the current source location so that errors
2195 discovered during expansion are emitted with the right location. But
2196 it would be better if the diagnostic routines used the source location
2197 embedded in the tree nodes rather than globals. */
2198 if (gimple_has_location (stmt))
2199 input_location = gimple_location (stmt);
2200
2201 expand_gimple_stmt_1 (stmt);
2202
2203 /* Free any temporaries used to evaluate this statement. */
2204 free_temp_slots ();
2205
2206 input_location = saved_location;
2207
2208 /* Mark all insns that may trap. */
2209 lp_nr = lookup_stmt_eh_lp (stmt);
2210 if (lp_nr)
2211 {
2212 rtx insn;
2213 for (insn = next_real_insn (last); insn;
2214 insn = next_real_insn (insn))
2215 {
2216 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2217 /* If we want exceptions for non-call insns, any
2218 may_trap_p instruction may throw. */
2219 && GET_CODE (PATTERN (insn)) != CLOBBER
2220 && GET_CODE (PATTERN (insn)) != USE
2221 && insn_could_throw_p (insn))
2222 make_reg_eh_region_note (insn, 0, lp_nr);
2223 }
2224 }
2225
2226 return last;
2227 }
2228
2229 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2230 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2231 generated a tail call (something that might be denied by the ABI
2232 rules governing the call; see calls.c).
2233
2234 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2235 can still reach the rest of BB. The case here is __builtin_sqrt,
2236 where the NaN result goes through the external function (with a
2237 tailcall) and the normal result happens via a sqrt instruction. */
2238
2239 static basic_block
2240 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2241 {
2242 rtx last2, last;
2243 edge e;
2244 edge_iterator ei;
2245 int probability;
2246 gcov_type count;
2247
2248 last2 = last = expand_gimple_stmt (stmt);
2249
2250 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2251 if (CALL_P (last) && SIBLING_CALL_P (last))
2252 goto found;
2253
2254 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2255
2256 *can_fallthru = true;
2257 return NULL;
2258
2259 found:
2260 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2261 Any instructions emitted here are about to be deleted. */
2262 do_pending_stack_adjust ();
2263
2264 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2265 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2266 EH or abnormal edges, we shouldn't have created a tail call in
2267 the first place. So it seems to me we should just be removing
2268 all edges here, or redirecting the existing fallthru edge to
2269 the exit block. */
2270
2271 probability = 0;
2272 count = 0;
2273
2274 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2275 {
2276 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2277 {
2278 if (e->dest != EXIT_BLOCK_PTR)
2279 {
2280 e->dest->count -= e->count;
2281 e->dest->frequency -= EDGE_FREQUENCY (e);
2282 if (e->dest->count < 0)
2283 e->dest->count = 0;
2284 if (e->dest->frequency < 0)
2285 e->dest->frequency = 0;
2286 }
2287 count += e->count;
2288 probability += e->probability;
2289 remove_edge (e);
2290 }
2291 else
2292 ei_next (&ei);
2293 }
2294
2295 /* This is somewhat ugly: the call_expr expander often emits instructions
2296 after the sibcall (to perform the function return). These confuse the
2297 find_many_sub_basic_blocks code, so we need to get rid of these. */
2298 last = NEXT_INSN (last);
2299 gcc_assert (BARRIER_P (last));
2300
2301 *can_fallthru = false;
2302 while (NEXT_INSN (last))
2303 {
2304 /* For instance an sqrt builtin expander expands if with
2305 sibcall in the then and label for `else`. */
2306 if (LABEL_P (NEXT_INSN (last)))
2307 {
2308 *can_fallthru = true;
2309 break;
2310 }
2311 delete_insn (NEXT_INSN (last));
2312 }
2313
2314 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2315 e->probability += probability;
2316 e->count += count;
2317 BB_END (bb) = last;
2318 update_bb_for_insn (bb);
2319
2320 if (NEXT_INSN (last))
2321 {
2322 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2323
2324 last = BB_END (bb);
2325 if (BARRIER_P (last))
2326 BB_END (bb) = PREV_INSN (last);
2327 }
2328
2329 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2330
2331 return bb;
2332 }
2333
2334 /* Return the difference between the floor and the truncated result of
2335 a signed division by OP1 with remainder MOD. */
2336 static rtx
2337 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2338 {
2339 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2340 return gen_rtx_IF_THEN_ELSE
2341 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2342 gen_rtx_IF_THEN_ELSE
2343 (mode, gen_rtx_LT (BImode,
2344 gen_rtx_DIV (mode, op1, mod),
2345 const0_rtx),
2346 constm1_rtx, const0_rtx),
2347 const0_rtx);
2348 }
2349
2350 /* Return the difference between the ceil and the truncated result of
2351 a signed division by OP1 with remainder MOD. */
2352 static rtx
2353 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2354 {
2355 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2356 return gen_rtx_IF_THEN_ELSE
2357 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2358 gen_rtx_IF_THEN_ELSE
2359 (mode, gen_rtx_GT (BImode,
2360 gen_rtx_DIV (mode, op1, mod),
2361 const0_rtx),
2362 const1_rtx, const0_rtx),
2363 const0_rtx);
2364 }
2365
2366 /* Return the difference between the ceil and the truncated result of
2367 an unsigned division by OP1 with remainder MOD. */
2368 static rtx
2369 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2370 {
2371 /* (mod != 0 ? 1 : 0) */
2372 return gen_rtx_IF_THEN_ELSE
2373 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2374 const1_rtx, const0_rtx);
2375 }
2376
2377 /* Return the difference between the rounded and the truncated result
2378 of a signed division by OP1 with remainder MOD. Halfway cases are
2379 rounded away from zero, rather than to the nearest even number. */
2380 static rtx
2381 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2382 {
2383 /* (abs (mod) >= abs (op1) - abs (mod)
2384 ? (op1 / mod > 0 ? 1 : -1)
2385 : 0) */
2386 return gen_rtx_IF_THEN_ELSE
2387 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2388 gen_rtx_MINUS (mode,
2389 gen_rtx_ABS (mode, op1),
2390 gen_rtx_ABS (mode, mod))),
2391 gen_rtx_IF_THEN_ELSE
2392 (mode, gen_rtx_GT (BImode,
2393 gen_rtx_DIV (mode, op1, mod),
2394 const0_rtx),
2395 const1_rtx, constm1_rtx),
2396 const0_rtx);
2397 }
2398
2399 /* Return the difference between the rounded and the truncated result
2400 of a unsigned division by OP1 with remainder MOD. Halfway cases
2401 are rounded away from zero, rather than to the nearest even
2402 number. */
2403 static rtx
2404 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2405 {
2406 /* (mod >= op1 - mod ? 1 : 0) */
2407 return gen_rtx_IF_THEN_ELSE
2408 (mode, gen_rtx_GE (BImode, mod,
2409 gen_rtx_MINUS (mode, op1, mod)),
2410 const1_rtx, const0_rtx);
2411 }
2412
2413 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2414 any rtl. */
2415
2416 static rtx
2417 convert_debug_memory_address (enum machine_mode mode, rtx x,
2418 addr_space_t as)
2419 {
2420 enum machine_mode xmode = GET_MODE (x);
2421
2422 #ifndef POINTERS_EXTEND_UNSIGNED
2423 gcc_assert (mode == Pmode
2424 || mode == targetm.addr_space.address_mode (as));
2425 gcc_assert (xmode == mode || xmode == VOIDmode);
2426 #else
2427 rtx temp;
2428
2429 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2430
2431 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2432 return x;
2433
2434 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2435 x = simplify_gen_subreg (mode, x, xmode,
2436 subreg_lowpart_offset
2437 (mode, xmode));
2438 else if (POINTERS_EXTEND_UNSIGNED > 0)
2439 x = gen_rtx_ZERO_EXTEND (mode, x);
2440 else if (!POINTERS_EXTEND_UNSIGNED)
2441 x = gen_rtx_SIGN_EXTEND (mode, x);
2442 else
2443 {
2444 switch (GET_CODE (x))
2445 {
2446 case SUBREG:
2447 if ((SUBREG_PROMOTED_VAR_P (x)
2448 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2449 || (GET_CODE (SUBREG_REG (x)) == PLUS
2450 && REG_P (XEXP (SUBREG_REG (x), 0))
2451 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2452 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2453 && GET_MODE (SUBREG_REG (x)) == mode)
2454 return SUBREG_REG (x);
2455 break;
2456 case LABEL_REF:
2457 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2458 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2459 return temp;
2460 case SYMBOL_REF:
2461 temp = shallow_copy_rtx (x);
2462 PUT_MODE (temp, mode);
2463 return temp;
2464 case CONST:
2465 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2466 if (temp)
2467 temp = gen_rtx_CONST (mode, temp);
2468 return temp;
2469 case PLUS:
2470 case MINUS:
2471 if (CONST_INT_P (XEXP (x, 1)))
2472 {
2473 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2474 if (temp)
2475 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2476 }
2477 break;
2478 default:
2479 break;
2480 }
2481 /* Don't know how to express ptr_extend as operation in debug info. */
2482 return NULL;
2483 }
2484 #endif /* POINTERS_EXTEND_UNSIGNED */
2485
2486 return x;
2487 }
2488
2489 /* Return an RTX equivalent to the value of the parameter DECL. */
2490
2491 static rtx
2492 expand_debug_parm_decl (tree decl)
2493 {
2494 rtx incoming = DECL_INCOMING_RTL (decl);
2495
2496 if (incoming
2497 && GET_MODE (incoming) != BLKmode
2498 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2499 || (MEM_P (incoming)
2500 && REG_P (XEXP (incoming, 0))
2501 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2502 {
2503 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2504
2505 #ifdef HAVE_window_save
2506 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2507 If the target machine has an explicit window save instruction, the
2508 actual entry value is the corresponding OUTGOING_REGNO instead. */
2509 if (REG_P (incoming)
2510 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2511 incoming
2512 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2513 OUTGOING_REGNO (REGNO (incoming)), 0);
2514 else if (MEM_P (incoming))
2515 {
2516 rtx reg = XEXP (incoming, 0);
2517 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2518 {
2519 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2520 incoming = replace_equiv_address_nv (incoming, reg);
2521 }
2522 }
2523 #endif
2524
2525 ENTRY_VALUE_EXP (rtl) = incoming;
2526 return rtl;
2527 }
2528
2529 if (incoming
2530 && GET_MODE (incoming) != BLKmode
2531 && !TREE_ADDRESSABLE (decl)
2532 && MEM_P (incoming)
2533 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2534 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2535 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2536 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2537 return incoming;
2538
2539 return NULL_RTX;
2540 }
2541
2542 /* Return an RTX equivalent to the value of the tree expression EXP. */
2543
2544 static rtx
2545 expand_debug_expr (tree exp)
2546 {
2547 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2548 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2549 enum machine_mode inner_mode = VOIDmode;
2550 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2551 addr_space_t as;
2552
2553 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2554 {
2555 case tcc_expression:
2556 switch (TREE_CODE (exp))
2557 {
2558 case COND_EXPR:
2559 case DOT_PROD_EXPR:
2560 case WIDEN_MULT_PLUS_EXPR:
2561 case WIDEN_MULT_MINUS_EXPR:
2562 case FMA_EXPR:
2563 goto ternary;
2564
2565 case TRUTH_ANDIF_EXPR:
2566 case TRUTH_ORIF_EXPR:
2567 case TRUTH_AND_EXPR:
2568 case TRUTH_OR_EXPR:
2569 case TRUTH_XOR_EXPR:
2570 goto binary;
2571
2572 case TRUTH_NOT_EXPR:
2573 goto unary;
2574
2575 default:
2576 break;
2577 }
2578 break;
2579
2580 ternary:
2581 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2582 if (!op2)
2583 return NULL_RTX;
2584 /* Fall through. */
2585
2586 binary:
2587 case tcc_binary:
2588 case tcc_comparison:
2589 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2590 if (!op1)
2591 return NULL_RTX;
2592 /* Fall through. */
2593
2594 unary:
2595 case tcc_unary:
2596 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2597 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2598 if (!op0)
2599 return NULL_RTX;
2600 break;
2601
2602 case tcc_type:
2603 case tcc_statement:
2604 gcc_unreachable ();
2605
2606 case tcc_constant:
2607 case tcc_exceptional:
2608 case tcc_declaration:
2609 case tcc_reference:
2610 case tcc_vl_exp:
2611 break;
2612 }
2613
2614 switch (TREE_CODE (exp))
2615 {
2616 case STRING_CST:
2617 if (!lookup_constant_def (exp))
2618 {
2619 if (strlen (TREE_STRING_POINTER (exp)) + 1
2620 != (size_t) TREE_STRING_LENGTH (exp))
2621 return NULL_RTX;
2622 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2623 op0 = gen_rtx_MEM (BLKmode, op0);
2624 set_mem_attributes (op0, exp, 0);
2625 return op0;
2626 }
2627 /* Fall through... */
2628
2629 case INTEGER_CST:
2630 case REAL_CST:
2631 case FIXED_CST:
2632 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2633 return op0;
2634
2635 case COMPLEX_CST:
2636 gcc_assert (COMPLEX_MODE_P (mode));
2637 op0 = expand_debug_expr (TREE_REALPART (exp));
2638 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2639 return gen_rtx_CONCAT (mode, op0, op1);
2640
2641 case DEBUG_EXPR_DECL:
2642 op0 = DECL_RTL_IF_SET (exp);
2643
2644 if (op0)
2645 return op0;
2646
2647 op0 = gen_rtx_DEBUG_EXPR (mode);
2648 DEBUG_EXPR_TREE_DECL (op0) = exp;
2649 SET_DECL_RTL (exp, op0);
2650
2651 return op0;
2652
2653 case VAR_DECL:
2654 case PARM_DECL:
2655 case FUNCTION_DECL:
2656 case LABEL_DECL:
2657 case CONST_DECL:
2658 case RESULT_DECL:
2659 op0 = DECL_RTL_IF_SET (exp);
2660
2661 /* This decl was probably optimized away. */
2662 if (!op0)
2663 {
2664 if (TREE_CODE (exp) != VAR_DECL
2665 || DECL_EXTERNAL (exp)
2666 || !TREE_STATIC (exp)
2667 || !DECL_NAME (exp)
2668 || DECL_HARD_REGISTER (exp)
2669 || DECL_IN_CONSTANT_POOL (exp)
2670 || mode == VOIDmode)
2671 return NULL;
2672
2673 op0 = make_decl_rtl_for_debug (exp);
2674 if (!MEM_P (op0)
2675 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2676 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2677 return NULL;
2678 }
2679 else
2680 op0 = copy_rtx (op0);
2681
2682 if (GET_MODE (op0) == BLKmode
2683 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2684 below would ICE. While it is likely a FE bug,
2685 try to be robust here. See PR43166. */
2686 || mode == BLKmode
2687 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2688 {
2689 gcc_assert (MEM_P (op0));
2690 op0 = adjust_address_nv (op0, mode, 0);
2691 return op0;
2692 }
2693
2694 /* Fall through. */
2695
2696 adjust_mode:
2697 case PAREN_EXPR:
2698 case NOP_EXPR:
2699 case CONVERT_EXPR:
2700 {
2701 inner_mode = GET_MODE (op0);
2702
2703 if (mode == inner_mode)
2704 return op0;
2705
2706 if (inner_mode == VOIDmode)
2707 {
2708 if (TREE_CODE (exp) == SSA_NAME)
2709 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2710 else
2711 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2712 if (mode == inner_mode)
2713 return op0;
2714 }
2715
2716 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2717 {
2718 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2719 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2720 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2721 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2722 else
2723 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2724 }
2725 else if (FLOAT_MODE_P (mode))
2726 {
2727 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2728 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2729 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2730 else
2731 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2732 }
2733 else if (FLOAT_MODE_P (inner_mode))
2734 {
2735 if (unsignedp)
2736 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2737 else
2738 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2739 }
2740 else if (CONSTANT_P (op0)
2741 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2742 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2743 subreg_lowpart_offset (mode,
2744 inner_mode));
2745 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2746 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2747 : unsignedp)
2748 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2749 else
2750 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2751
2752 return op0;
2753 }
2754
2755 case MEM_REF:
2756 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2757 {
2758 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2759 TREE_OPERAND (exp, 0),
2760 TREE_OPERAND (exp, 1));
2761 if (newexp)
2762 return expand_debug_expr (newexp);
2763 }
2764 /* FALLTHROUGH */
2765 case INDIRECT_REF:
2766 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2767 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2768 if (!op0)
2769 return NULL;
2770
2771 if (TREE_CODE (exp) == MEM_REF)
2772 {
2773 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2774 || (GET_CODE (op0) == PLUS
2775 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2776 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2777 Instead just use get_inner_reference. */
2778 goto component_ref;
2779
2780 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2781 if (!op1 || !CONST_INT_P (op1))
2782 return NULL;
2783
2784 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2785 }
2786
2787 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2788 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2789 else
2790 as = ADDR_SPACE_GENERIC;
2791
2792 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2793 op0, as);
2794 if (op0 == NULL_RTX)
2795 return NULL;
2796
2797 op0 = gen_rtx_MEM (mode, op0);
2798 set_mem_attributes (op0, exp, 0);
2799 if (TREE_CODE (exp) == MEM_REF
2800 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2801 set_mem_expr (op0, NULL_TREE);
2802 set_mem_addr_space (op0, as);
2803
2804 return op0;
2805
2806 case TARGET_MEM_REF:
2807 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2808 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2809 return NULL;
2810
2811 op0 = expand_debug_expr
2812 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2813 if (!op0)
2814 return NULL;
2815
2816 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2817 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2818 else
2819 as = ADDR_SPACE_GENERIC;
2820
2821 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2822 op0, as);
2823 if (op0 == NULL_RTX)
2824 return NULL;
2825
2826 op0 = gen_rtx_MEM (mode, op0);
2827
2828 set_mem_attributes (op0, exp, 0);
2829 set_mem_addr_space (op0, as);
2830
2831 return op0;
2832
2833 component_ref:
2834 case ARRAY_REF:
2835 case ARRAY_RANGE_REF:
2836 case COMPONENT_REF:
2837 case BIT_FIELD_REF:
2838 case REALPART_EXPR:
2839 case IMAGPART_EXPR:
2840 case VIEW_CONVERT_EXPR:
2841 {
2842 enum machine_mode mode1;
2843 HOST_WIDE_INT bitsize, bitpos;
2844 tree offset;
2845 int volatilep = 0;
2846 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2847 &mode1, &unsignedp, &volatilep, false);
2848 rtx orig_op0;
2849
2850 if (bitsize == 0)
2851 return NULL;
2852
2853 orig_op0 = op0 = expand_debug_expr (tem);
2854
2855 if (!op0)
2856 return NULL;
2857
2858 if (offset)
2859 {
2860 enum machine_mode addrmode, offmode;
2861
2862 if (!MEM_P (op0))
2863 return NULL;
2864
2865 op0 = XEXP (op0, 0);
2866 addrmode = GET_MODE (op0);
2867 if (addrmode == VOIDmode)
2868 addrmode = Pmode;
2869
2870 op1 = expand_debug_expr (offset);
2871 if (!op1)
2872 return NULL;
2873
2874 offmode = GET_MODE (op1);
2875 if (offmode == VOIDmode)
2876 offmode = TYPE_MODE (TREE_TYPE (offset));
2877
2878 if (addrmode != offmode)
2879 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2880 subreg_lowpart_offset (addrmode,
2881 offmode));
2882
2883 /* Don't use offset_address here, we don't need a
2884 recognizable address, and we don't want to generate
2885 code. */
2886 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2887 op0, op1));
2888 }
2889
2890 if (MEM_P (op0))
2891 {
2892 if (mode1 == VOIDmode)
2893 /* Bitfield. */
2894 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2895 if (bitpos >= BITS_PER_UNIT)
2896 {
2897 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2898 bitpos %= BITS_PER_UNIT;
2899 }
2900 else if (bitpos < 0)
2901 {
2902 HOST_WIDE_INT units
2903 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2904 op0 = adjust_address_nv (op0, mode1, units);
2905 bitpos += units * BITS_PER_UNIT;
2906 }
2907 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2908 op0 = adjust_address_nv (op0, mode, 0);
2909 else if (GET_MODE (op0) != mode1)
2910 op0 = adjust_address_nv (op0, mode1, 0);
2911 else
2912 op0 = copy_rtx (op0);
2913 if (op0 == orig_op0)
2914 op0 = shallow_copy_rtx (op0);
2915 set_mem_attributes (op0, exp, 0);
2916 }
2917
2918 if (bitpos == 0 && mode == GET_MODE (op0))
2919 return op0;
2920
2921 if (bitpos < 0)
2922 return NULL;
2923
2924 if (GET_MODE (op0) == BLKmode)
2925 return NULL;
2926
2927 if ((bitpos % BITS_PER_UNIT) == 0
2928 && bitsize == GET_MODE_BITSIZE (mode1))
2929 {
2930 enum machine_mode opmode = GET_MODE (op0);
2931
2932 if (opmode == VOIDmode)
2933 opmode = TYPE_MODE (TREE_TYPE (tem));
2934
2935 /* This condition may hold if we're expanding the address
2936 right past the end of an array that turned out not to
2937 be addressable (i.e., the address was only computed in
2938 debug stmts). The gen_subreg below would rightfully
2939 crash, and the address doesn't really exist, so just
2940 drop it. */
2941 if (bitpos >= GET_MODE_BITSIZE (opmode))
2942 return NULL;
2943
2944 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2945 return simplify_gen_subreg (mode, op0, opmode,
2946 bitpos / BITS_PER_UNIT);
2947 }
2948
2949 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2950 && TYPE_UNSIGNED (TREE_TYPE (exp))
2951 ? SIGN_EXTRACT
2952 : ZERO_EXTRACT, mode,
2953 GET_MODE (op0) != VOIDmode
2954 ? GET_MODE (op0)
2955 : TYPE_MODE (TREE_TYPE (tem)),
2956 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2957 }
2958
2959 case ABS_EXPR:
2960 return simplify_gen_unary (ABS, mode, op0, mode);
2961
2962 case NEGATE_EXPR:
2963 return simplify_gen_unary (NEG, mode, op0, mode);
2964
2965 case BIT_NOT_EXPR:
2966 return simplify_gen_unary (NOT, mode, op0, mode);
2967
2968 case FLOAT_EXPR:
2969 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2970 0)))
2971 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2972 inner_mode);
2973
2974 case FIX_TRUNC_EXPR:
2975 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2976 inner_mode);
2977
2978 case POINTER_PLUS_EXPR:
2979 /* For the rare target where pointers are not the same size as
2980 size_t, we need to check for mis-matched modes and correct
2981 the addend. */
2982 if (op0 && op1
2983 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2984 && GET_MODE (op0) != GET_MODE (op1))
2985 {
2986 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2987 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2988 GET_MODE (op1));
2989 else
2990 /* We always sign-extend, regardless of the signedness of
2991 the operand, because the operand is always unsigned
2992 here even if the original C expression is signed. */
2993 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2994 GET_MODE (op1));
2995 }
2996 /* Fall through. */
2997 case PLUS_EXPR:
2998 return simplify_gen_binary (PLUS, mode, op0, op1);
2999
3000 case MINUS_EXPR:
3001 return simplify_gen_binary (MINUS, mode, op0, op1);
3002
3003 case MULT_EXPR:
3004 return simplify_gen_binary (MULT, mode, op0, op1);
3005
3006 case RDIV_EXPR:
3007 case TRUNC_DIV_EXPR:
3008 case EXACT_DIV_EXPR:
3009 if (unsignedp)
3010 return simplify_gen_binary (UDIV, mode, op0, op1);
3011 else
3012 return simplify_gen_binary (DIV, mode, op0, op1);
3013
3014 case TRUNC_MOD_EXPR:
3015 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3016
3017 case FLOOR_DIV_EXPR:
3018 if (unsignedp)
3019 return simplify_gen_binary (UDIV, mode, op0, op1);
3020 else
3021 {
3022 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3023 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3024 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3025 return simplify_gen_binary (PLUS, mode, div, adj);
3026 }
3027
3028 case FLOOR_MOD_EXPR:
3029 if (unsignedp)
3030 return simplify_gen_binary (UMOD, mode, op0, op1);
3031 else
3032 {
3033 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3034 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3035 adj = simplify_gen_unary (NEG, mode,
3036 simplify_gen_binary (MULT, mode, adj, op1),
3037 mode);
3038 return simplify_gen_binary (PLUS, mode, mod, adj);
3039 }
3040
3041 case CEIL_DIV_EXPR:
3042 if (unsignedp)
3043 {
3044 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3045 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3046 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3047 return simplify_gen_binary (PLUS, mode, div, adj);
3048 }
3049 else
3050 {
3051 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3052 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3053 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3054 return simplify_gen_binary (PLUS, mode, div, adj);
3055 }
3056
3057 case CEIL_MOD_EXPR:
3058 if (unsignedp)
3059 {
3060 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3061 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3062 adj = simplify_gen_unary (NEG, mode,
3063 simplify_gen_binary (MULT, mode, adj, op1),
3064 mode);
3065 return simplify_gen_binary (PLUS, mode, mod, adj);
3066 }
3067 else
3068 {
3069 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3070 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3071 adj = simplify_gen_unary (NEG, mode,
3072 simplify_gen_binary (MULT, mode, adj, op1),
3073 mode);
3074 return simplify_gen_binary (PLUS, mode, mod, adj);
3075 }
3076
3077 case ROUND_DIV_EXPR:
3078 if (unsignedp)
3079 {
3080 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3081 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3082 rtx adj = round_udiv_adjust (mode, mod, op1);
3083 return simplify_gen_binary (PLUS, mode, div, adj);
3084 }
3085 else
3086 {
3087 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3088 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3089 rtx adj = round_sdiv_adjust (mode, mod, op1);
3090 return simplify_gen_binary (PLUS, mode, div, adj);
3091 }
3092
3093 case ROUND_MOD_EXPR:
3094 if (unsignedp)
3095 {
3096 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3097 rtx adj = round_udiv_adjust (mode, mod, op1);
3098 adj = simplify_gen_unary (NEG, mode,
3099 simplify_gen_binary (MULT, mode, adj, op1),
3100 mode);
3101 return simplify_gen_binary (PLUS, mode, mod, adj);
3102 }
3103 else
3104 {
3105 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3106 rtx adj = round_sdiv_adjust (mode, mod, op1);
3107 adj = simplify_gen_unary (NEG, mode,
3108 simplify_gen_binary (MULT, mode, adj, op1),
3109 mode);
3110 return simplify_gen_binary (PLUS, mode, mod, adj);
3111 }
3112
3113 case LSHIFT_EXPR:
3114 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3115
3116 case RSHIFT_EXPR:
3117 if (unsignedp)
3118 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3119 else
3120 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3121
3122 case LROTATE_EXPR:
3123 return simplify_gen_binary (ROTATE, mode, op0, op1);
3124
3125 case RROTATE_EXPR:
3126 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3127
3128 case MIN_EXPR:
3129 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3130
3131 case MAX_EXPR:
3132 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3133
3134 case BIT_AND_EXPR:
3135 case TRUTH_AND_EXPR:
3136 return simplify_gen_binary (AND, mode, op0, op1);
3137
3138 case BIT_IOR_EXPR:
3139 case TRUTH_OR_EXPR:
3140 return simplify_gen_binary (IOR, mode, op0, op1);
3141
3142 case BIT_XOR_EXPR:
3143 case TRUTH_XOR_EXPR:
3144 return simplify_gen_binary (XOR, mode, op0, op1);
3145
3146 case TRUTH_ANDIF_EXPR:
3147 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3148
3149 case TRUTH_ORIF_EXPR:
3150 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3151
3152 case TRUTH_NOT_EXPR:
3153 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3154
3155 case LT_EXPR:
3156 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3157 op0, op1);
3158
3159 case LE_EXPR:
3160 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3161 op0, op1);
3162
3163 case GT_EXPR:
3164 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3165 op0, op1);
3166
3167 case GE_EXPR:
3168 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3169 op0, op1);
3170
3171 case EQ_EXPR:
3172 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3173
3174 case NE_EXPR:
3175 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3176
3177 case UNORDERED_EXPR:
3178 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3179
3180 case ORDERED_EXPR:
3181 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3182
3183 case UNLT_EXPR:
3184 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3185
3186 case UNLE_EXPR:
3187 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3188
3189 case UNGT_EXPR:
3190 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3191
3192 case UNGE_EXPR:
3193 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3194
3195 case UNEQ_EXPR:
3196 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3197
3198 case LTGT_EXPR:
3199 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3200
3201 case COND_EXPR:
3202 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3203
3204 case COMPLEX_EXPR:
3205 gcc_assert (COMPLEX_MODE_P (mode));
3206 if (GET_MODE (op0) == VOIDmode)
3207 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3208 if (GET_MODE (op1) == VOIDmode)
3209 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3210 return gen_rtx_CONCAT (mode, op0, op1);
3211
3212 case CONJ_EXPR:
3213 if (GET_CODE (op0) == CONCAT)
3214 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3215 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3216 XEXP (op0, 1),
3217 GET_MODE_INNER (mode)));
3218 else
3219 {
3220 enum machine_mode imode = GET_MODE_INNER (mode);
3221 rtx re, im;
3222
3223 if (MEM_P (op0))
3224 {
3225 re = adjust_address_nv (op0, imode, 0);
3226 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3227 }
3228 else
3229 {
3230 enum machine_mode ifmode = int_mode_for_mode (mode);
3231 enum machine_mode ihmode = int_mode_for_mode (imode);
3232 rtx halfsize;
3233 if (ifmode == BLKmode || ihmode == BLKmode)
3234 return NULL;
3235 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3236 re = op0;
3237 if (mode != ifmode)
3238 re = gen_rtx_SUBREG (ifmode, re, 0);
3239 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3240 if (imode != ihmode)
3241 re = gen_rtx_SUBREG (imode, re, 0);
3242 im = copy_rtx (op0);
3243 if (mode != ifmode)
3244 im = gen_rtx_SUBREG (ifmode, im, 0);
3245 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3246 if (imode != ihmode)
3247 im = gen_rtx_SUBREG (imode, im, 0);
3248 }
3249 im = gen_rtx_NEG (imode, im);
3250 return gen_rtx_CONCAT (mode, re, im);
3251 }
3252
3253 case ADDR_EXPR:
3254 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3255 if (!op0 || !MEM_P (op0))
3256 {
3257 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3258 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3259 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3260 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3261 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3262 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3263
3264 if (handled_component_p (TREE_OPERAND (exp, 0)))
3265 {
3266 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3267 tree decl
3268 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3269 &bitoffset, &bitsize, &maxsize);
3270 if ((TREE_CODE (decl) == VAR_DECL
3271 || TREE_CODE (decl) == PARM_DECL
3272 || TREE_CODE (decl) == RESULT_DECL)
3273 && (!TREE_ADDRESSABLE (decl)
3274 || target_for_debug_bind (decl))
3275 && (bitoffset % BITS_PER_UNIT) == 0
3276 && bitsize > 0
3277 && bitsize == maxsize)
3278 {
3279 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3280 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3281 }
3282 }
3283
3284 return NULL;
3285 }
3286
3287 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3288 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3289
3290 return op0;
3291
3292 case VECTOR_CST:
3293 {
3294 unsigned i;
3295
3296 op0 = gen_rtx_CONCATN
3297 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3298
3299 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3300 {
3301 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3302 if (!op1)
3303 return NULL;
3304 XVECEXP (op0, 0, i) = op1;
3305 }
3306
3307 return op0;
3308 }
3309
3310 case CONSTRUCTOR:
3311 if (TREE_CLOBBER_P (exp))
3312 return NULL;
3313 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3314 {
3315 unsigned i;
3316 tree val;
3317
3318 op0 = gen_rtx_CONCATN
3319 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3320
3321 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3322 {
3323 op1 = expand_debug_expr (val);
3324 if (!op1)
3325 return NULL;
3326 XVECEXP (op0, 0, i) = op1;
3327 }
3328
3329 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3330 {
3331 op1 = expand_debug_expr
3332 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3333
3334 if (!op1)
3335 return NULL;
3336
3337 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3338 XVECEXP (op0, 0, i) = op1;
3339 }
3340
3341 return op0;
3342 }
3343 else
3344 goto flag_unsupported;
3345
3346 case CALL_EXPR:
3347 /* ??? Maybe handle some builtins? */
3348 return NULL;
3349
3350 case SSA_NAME:
3351 {
3352 gimple g = get_gimple_for_ssa_name (exp);
3353 if (g)
3354 {
3355 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3356 if (!op0)
3357 return NULL;
3358 }
3359 else
3360 {
3361 int part = var_to_partition (SA.map, exp);
3362
3363 if (part == NO_PARTITION)
3364 {
3365 /* If this is a reference to an incoming value of parameter
3366 that is never used in the code or where the incoming
3367 value is never used in the code, use PARM_DECL's
3368 DECL_RTL if set. */
3369 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3370 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3371 {
3372 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3373 if (op0)
3374 goto adjust_mode;
3375 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3376 if (op0)
3377 goto adjust_mode;
3378 }
3379 return NULL;
3380 }
3381
3382 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3383
3384 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3385 }
3386 goto adjust_mode;
3387 }
3388
3389 case ERROR_MARK:
3390 return NULL;
3391
3392 /* Vector stuff. For most of the codes we don't have rtl codes. */
3393 case REALIGN_LOAD_EXPR:
3394 case REDUC_MAX_EXPR:
3395 case REDUC_MIN_EXPR:
3396 case REDUC_PLUS_EXPR:
3397 case VEC_COND_EXPR:
3398 case VEC_LSHIFT_EXPR:
3399 case VEC_PACK_FIX_TRUNC_EXPR:
3400 case VEC_PACK_SAT_EXPR:
3401 case VEC_PACK_TRUNC_EXPR:
3402 case VEC_RSHIFT_EXPR:
3403 case VEC_UNPACK_FLOAT_HI_EXPR:
3404 case VEC_UNPACK_FLOAT_LO_EXPR:
3405 case VEC_UNPACK_HI_EXPR:
3406 case VEC_UNPACK_LO_EXPR:
3407 case VEC_WIDEN_MULT_HI_EXPR:
3408 case VEC_WIDEN_MULT_LO_EXPR:
3409 case VEC_WIDEN_MULT_EVEN_EXPR:
3410 case VEC_WIDEN_MULT_ODD_EXPR:
3411 case VEC_WIDEN_LSHIFT_HI_EXPR:
3412 case VEC_WIDEN_LSHIFT_LO_EXPR:
3413 case VEC_PERM_EXPR:
3414 return NULL;
3415
3416 /* Misc codes. */
3417 case ADDR_SPACE_CONVERT_EXPR:
3418 case FIXED_CONVERT_EXPR:
3419 case OBJ_TYPE_REF:
3420 case WITH_SIZE_EXPR:
3421 return NULL;
3422
3423 case DOT_PROD_EXPR:
3424 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3425 && SCALAR_INT_MODE_P (mode))
3426 {
3427 op0
3428 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3429 0)))
3430 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3431 inner_mode);
3432 op1
3433 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3434 1)))
3435 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3436 inner_mode);
3437 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3438 return simplify_gen_binary (PLUS, mode, op0, op2);
3439 }
3440 return NULL;
3441
3442 case WIDEN_MULT_EXPR:
3443 case WIDEN_MULT_PLUS_EXPR:
3444 case WIDEN_MULT_MINUS_EXPR:
3445 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3446 && SCALAR_INT_MODE_P (mode))
3447 {
3448 inner_mode = GET_MODE (op0);
3449 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3450 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3451 else
3452 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3453 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3454 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3455 else
3456 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3457 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3458 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3459 return op0;
3460 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3461 return simplify_gen_binary (PLUS, mode, op0, op2);
3462 else
3463 return simplify_gen_binary (MINUS, mode, op2, op0);
3464 }
3465 return NULL;
3466
3467 case MULT_HIGHPART_EXPR:
3468 /* ??? Similar to the above. */
3469 return NULL;
3470
3471 case WIDEN_SUM_EXPR:
3472 case WIDEN_LSHIFT_EXPR:
3473 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3474 && SCALAR_INT_MODE_P (mode))
3475 {
3476 op0
3477 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3478 0)))
3479 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3480 inner_mode);
3481 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3482 ? ASHIFT : PLUS, mode, op0, op1);
3483 }
3484 return NULL;
3485
3486 case FMA_EXPR:
3487 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3488
3489 default:
3490 flag_unsupported:
3491 #ifdef ENABLE_CHECKING
3492 debug_tree (exp);
3493 gcc_unreachable ();
3494 #else
3495 return NULL;
3496 #endif
3497 }
3498 }
3499
3500 /* Return an RTX equivalent to the source bind value of the tree expression
3501 EXP. */
3502
3503 static rtx
3504 expand_debug_source_expr (tree exp)
3505 {
3506 rtx op0 = NULL_RTX;
3507 enum machine_mode mode = VOIDmode, inner_mode;
3508
3509 switch (TREE_CODE (exp))
3510 {
3511 case PARM_DECL:
3512 {
3513 mode = DECL_MODE (exp);
3514 op0 = expand_debug_parm_decl (exp);
3515 if (op0)
3516 break;
3517 /* See if this isn't an argument that has been completely
3518 optimized out. */
3519 if (!DECL_RTL_SET_P (exp)
3520 && !DECL_INCOMING_RTL (exp)
3521 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3522 {
3523 tree aexp = exp;
3524 if (DECL_ABSTRACT_ORIGIN (exp))
3525 aexp = DECL_ABSTRACT_ORIGIN (exp);
3526 if (DECL_CONTEXT (aexp)
3527 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3528 {
3529 VEC(tree, gc) **debug_args;
3530 unsigned int ix;
3531 tree ddecl;
3532 #ifdef ENABLE_CHECKING
3533 tree parm;
3534 for (parm = DECL_ARGUMENTS (current_function_decl);
3535 parm; parm = DECL_CHAIN (parm))
3536 gcc_assert (parm != exp
3537 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3538 #endif
3539 debug_args = decl_debug_args_lookup (current_function_decl);
3540 if (debug_args != NULL)
3541 {
3542 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3543 ix += 2)
3544 if (ddecl == aexp)
3545 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3546 }
3547 }
3548 }
3549 break;
3550 }
3551 default:
3552 break;
3553 }
3554
3555 if (op0 == NULL_RTX)
3556 return NULL_RTX;
3557
3558 inner_mode = GET_MODE (op0);
3559 if (mode == inner_mode)
3560 return op0;
3561
3562 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3563 {
3564 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3565 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3566 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3567 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3568 else
3569 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3570 }
3571 else if (FLOAT_MODE_P (mode))
3572 gcc_unreachable ();
3573 else if (FLOAT_MODE_P (inner_mode))
3574 {
3575 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3576 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3577 else
3578 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3579 }
3580 else if (CONSTANT_P (op0)
3581 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3582 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3583 subreg_lowpart_offset (mode, inner_mode));
3584 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3585 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3586 else
3587 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3588
3589 return op0;
3590 }
3591
3592 /* Expand the _LOCs in debug insns. We run this after expanding all
3593 regular insns, so that any variables referenced in the function
3594 will have their DECL_RTLs set. */
3595
3596 static void
3597 expand_debug_locations (void)
3598 {
3599 rtx insn;
3600 rtx last = get_last_insn ();
3601 int save_strict_alias = flag_strict_aliasing;
3602
3603 /* New alias sets while setting up memory attributes cause
3604 -fcompare-debug failures, even though it doesn't bring about any
3605 codegen changes. */
3606 flag_strict_aliasing = 0;
3607
3608 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3609 if (DEBUG_INSN_P (insn))
3610 {
3611 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3612 rtx val;
3613 enum machine_mode mode;
3614
3615 if (value == NULL_TREE)
3616 val = NULL_RTX;
3617 else
3618 {
3619 if (INSN_VAR_LOCATION_STATUS (insn)
3620 == VAR_INIT_STATUS_UNINITIALIZED)
3621 val = expand_debug_source_expr (value);
3622 else
3623 val = expand_debug_expr (value);
3624 gcc_assert (last == get_last_insn ());
3625 }
3626
3627 if (!val)
3628 val = gen_rtx_UNKNOWN_VAR_LOC ();
3629 else
3630 {
3631 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3632
3633 gcc_assert (mode == GET_MODE (val)
3634 || (GET_MODE (val) == VOIDmode
3635 && (CONST_INT_P (val)
3636 || GET_CODE (val) == CONST_FIXED
3637 || GET_CODE (val) == CONST_DOUBLE
3638 || GET_CODE (val) == LABEL_REF)));
3639 }
3640
3641 INSN_VAR_LOCATION_LOC (insn) = val;
3642 }
3643
3644 flag_strict_aliasing = save_strict_alias;
3645 }
3646
3647 /* Expand basic block BB from GIMPLE trees to RTL. */
3648
3649 static basic_block
3650 expand_gimple_basic_block (basic_block bb)
3651 {
3652 gimple_stmt_iterator gsi;
3653 gimple_seq stmts;
3654 gimple stmt = NULL;
3655 rtx note, last;
3656 edge e;
3657 edge_iterator ei;
3658 void **elt;
3659
3660 if (dump_file)
3661 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3662 bb->index);
3663
3664 /* Note that since we are now transitioning from GIMPLE to RTL, we
3665 cannot use the gsi_*_bb() routines because they expect the basic
3666 block to be in GIMPLE, instead of RTL. Therefore, we need to
3667 access the BB sequence directly. */
3668 stmts = bb_seq (bb);
3669 bb->il.gimple.seq = NULL;
3670 bb->il.gimple.phi_nodes = NULL;
3671 rtl_profile_for_bb (bb);
3672 init_rtl_bb_info (bb);
3673 bb->flags |= BB_RTL;
3674
3675 /* Remove the RETURN_EXPR if we may fall though to the exit
3676 instead. */
3677 gsi = gsi_last (stmts);
3678 if (!gsi_end_p (gsi)
3679 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3680 {
3681 gimple ret_stmt = gsi_stmt (gsi);
3682
3683 gcc_assert (single_succ_p (bb));
3684 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3685
3686 if (bb->next_bb == EXIT_BLOCK_PTR
3687 && !gimple_return_retval (ret_stmt))
3688 {
3689 gsi_remove (&gsi, false);
3690 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3691 }
3692 }
3693
3694 gsi = gsi_start (stmts);
3695 if (!gsi_end_p (gsi))
3696 {
3697 stmt = gsi_stmt (gsi);
3698 if (gimple_code (stmt) != GIMPLE_LABEL)
3699 stmt = NULL;
3700 }
3701
3702 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3703
3704 if (stmt || elt)
3705 {
3706 last = get_last_insn ();
3707
3708 if (stmt)
3709 {
3710 expand_gimple_stmt (stmt);
3711 gsi_next (&gsi);
3712 }
3713
3714 if (elt)
3715 emit_label ((rtx) *elt);
3716
3717 /* Java emits line number notes in the top of labels.
3718 ??? Make this go away once line number notes are obsoleted. */
3719 BB_HEAD (bb) = NEXT_INSN (last);
3720 if (NOTE_P (BB_HEAD (bb)))
3721 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3722 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3723
3724 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3725 }
3726 else
3727 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3728
3729 NOTE_BASIC_BLOCK (note) = bb;
3730
3731 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3732 {
3733 basic_block new_bb;
3734
3735 stmt = gsi_stmt (gsi);
3736
3737 /* If this statement is a non-debug one, and we generate debug
3738 insns, then this one might be the last real use of a TERed
3739 SSA_NAME, but where there are still some debug uses further
3740 down. Expanding the current SSA name in such further debug
3741 uses by their RHS might lead to wrong debug info, as coalescing
3742 might make the operands of such RHS be placed into the same
3743 pseudo as something else. Like so:
3744 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3745 use(a_1);
3746 a_2 = ...
3747 #DEBUG ... => a_1
3748 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3749 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3750 the write to a_2 would actually have clobbered the place which
3751 formerly held a_0.
3752
3753 So, instead of that, we recognize the situation, and generate
3754 debug temporaries at the last real use of TERed SSA names:
3755 a_1 = a_0 + 1;
3756 #DEBUG #D1 => a_1
3757 use(a_1);
3758 a_2 = ...
3759 #DEBUG ... => #D1
3760 */
3761 if (MAY_HAVE_DEBUG_INSNS
3762 && SA.values
3763 && !is_gimple_debug (stmt))
3764 {
3765 ssa_op_iter iter;
3766 tree op;
3767 gimple def;
3768
3769 location_t sloc = get_curr_insn_source_location ();
3770 tree sblock = get_curr_insn_block ();
3771
3772 /* Look for SSA names that have their last use here (TERed
3773 names always have only one real use). */
3774 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3775 if ((def = get_gimple_for_ssa_name (op)))
3776 {
3777 imm_use_iterator imm_iter;
3778 use_operand_p use_p;
3779 bool have_debug_uses = false;
3780
3781 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3782 {
3783 if (gimple_debug_bind_p (USE_STMT (use_p)))
3784 {
3785 have_debug_uses = true;
3786 break;
3787 }
3788 }
3789
3790 if (have_debug_uses)
3791 {
3792 /* OP is a TERed SSA name, with DEF it's defining
3793 statement, and where OP is used in further debug
3794 instructions. Generate a debug temporary, and
3795 replace all uses of OP in debug insns with that
3796 temporary. */
3797 gimple debugstmt;
3798 tree value = gimple_assign_rhs_to_tree (def);
3799 tree vexpr = make_node (DEBUG_EXPR_DECL);
3800 rtx val;
3801 enum machine_mode mode;
3802
3803 set_curr_insn_source_location (gimple_location (def));
3804 set_curr_insn_block (gimple_block (def));
3805
3806 DECL_ARTIFICIAL (vexpr) = 1;
3807 TREE_TYPE (vexpr) = TREE_TYPE (value);
3808 if (DECL_P (value))
3809 mode = DECL_MODE (value);
3810 else
3811 mode = TYPE_MODE (TREE_TYPE (value));
3812 DECL_MODE (vexpr) = mode;
3813
3814 val = gen_rtx_VAR_LOCATION
3815 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3816
3817 emit_debug_insn (val);
3818
3819 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3820 {
3821 if (!gimple_debug_bind_p (debugstmt))
3822 continue;
3823
3824 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3825 SET_USE (use_p, vexpr);
3826
3827 update_stmt (debugstmt);
3828 }
3829 }
3830 }
3831 set_curr_insn_source_location (sloc);
3832 set_curr_insn_block (sblock);
3833 }
3834
3835 currently_expanding_gimple_stmt = stmt;
3836
3837 /* Expand this statement, then evaluate the resulting RTL and
3838 fixup the CFG accordingly. */
3839 if (gimple_code (stmt) == GIMPLE_COND)
3840 {
3841 new_bb = expand_gimple_cond (bb, stmt);
3842 if (new_bb)
3843 return new_bb;
3844 }
3845 else if (gimple_debug_bind_p (stmt))
3846 {
3847 location_t sloc = get_curr_insn_source_location ();
3848 tree sblock = get_curr_insn_block ();
3849 gimple_stmt_iterator nsi = gsi;
3850
3851 for (;;)
3852 {
3853 tree var = gimple_debug_bind_get_var (stmt);
3854 tree value;
3855 rtx val;
3856 enum machine_mode mode;
3857
3858 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3859 && TREE_CODE (var) != LABEL_DECL
3860 && !target_for_debug_bind (var))
3861 goto delink_debug_stmt;
3862
3863 if (gimple_debug_bind_has_value_p (stmt))
3864 value = gimple_debug_bind_get_value (stmt);
3865 else
3866 value = NULL_TREE;
3867
3868 last = get_last_insn ();
3869
3870 set_curr_insn_source_location (gimple_location (stmt));
3871 set_curr_insn_block (gimple_block (stmt));
3872
3873 if (DECL_P (var))
3874 mode = DECL_MODE (var);
3875 else
3876 mode = TYPE_MODE (TREE_TYPE (var));
3877
3878 val = gen_rtx_VAR_LOCATION
3879 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3880
3881 emit_debug_insn (val);
3882
3883 if (dump_file && (dump_flags & TDF_DETAILS))
3884 {
3885 /* We can't dump the insn with a TREE where an RTX
3886 is expected. */
3887 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3888 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3889 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3890 }
3891
3892 delink_debug_stmt:
3893 /* In order not to generate too many debug temporaries,
3894 we delink all uses of debug statements we already expanded.
3895 Therefore debug statements between definition and real
3896 use of TERed SSA names will continue to use the SSA name,
3897 and not be replaced with debug temps. */
3898 delink_stmt_imm_use (stmt);
3899
3900 gsi = nsi;
3901 gsi_next (&nsi);
3902 if (gsi_end_p (nsi))
3903 break;
3904 stmt = gsi_stmt (nsi);
3905 if (!gimple_debug_bind_p (stmt))
3906 break;
3907 }
3908
3909 set_curr_insn_source_location (sloc);
3910 set_curr_insn_block (sblock);
3911 }
3912 else if (gimple_debug_source_bind_p (stmt))
3913 {
3914 location_t sloc = get_curr_insn_source_location ();
3915 tree sblock = get_curr_insn_block ();
3916 tree var = gimple_debug_source_bind_get_var (stmt);
3917 tree value = gimple_debug_source_bind_get_value (stmt);
3918 rtx val;
3919 enum machine_mode mode;
3920
3921 last = get_last_insn ();
3922
3923 set_curr_insn_source_location (gimple_location (stmt));
3924 set_curr_insn_block (gimple_block (stmt));
3925
3926 mode = DECL_MODE (var);
3927
3928 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3929 VAR_INIT_STATUS_UNINITIALIZED);
3930
3931 emit_debug_insn (val);
3932
3933 if (dump_file && (dump_flags & TDF_DETAILS))
3934 {
3935 /* We can't dump the insn with a TREE where an RTX
3936 is expected. */
3937 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3938 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3939 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3940 }
3941
3942 set_curr_insn_source_location (sloc);
3943 set_curr_insn_block (sblock);
3944 }
3945 else
3946 {
3947 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3948 {
3949 bool can_fallthru;
3950 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3951 if (new_bb)
3952 {
3953 if (can_fallthru)
3954 bb = new_bb;
3955 else
3956 return new_bb;
3957 }
3958 }
3959 else
3960 {
3961 def_operand_p def_p;
3962 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3963
3964 if (def_p != NULL)
3965 {
3966 /* Ignore this stmt if it is in the list of
3967 replaceable expressions. */
3968 if (SA.values
3969 && bitmap_bit_p (SA.values,
3970 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3971 continue;
3972 }
3973 last = expand_gimple_stmt (stmt);
3974 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3975 }
3976 }
3977 }
3978
3979 currently_expanding_gimple_stmt = NULL;
3980
3981 /* Expand implicit goto and convert goto_locus. */
3982 FOR_EACH_EDGE (e, ei, bb->succs)
3983 {
3984 if (e->goto_locus && e->goto_block)
3985 {
3986 set_curr_insn_source_location (e->goto_locus);
3987 set_curr_insn_block (e->goto_block);
3988 e->goto_locus = curr_insn_locator ();
3989 }
3990 e->goto_block = NULL;
3991 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3992 {
3993 emit_jump (label_rtx_for_bb (e->dest));
3994 e->flags &= ~EDGE_FALLTHRU;
3995 }
3996 }
3997
3998 /* Expanded RTL can create a jump in the last instruction of block.
3999 This later might be assumed to be a jump to successor and break edge insertion.
4000 We need to insert dummy move to prevent this. PR41440. */
4001 if (single_succ_p (bb)
4002 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4003 && (last = get_last_insn ())
4004 && JUMP_P (last))
4005 {
4006 rtx dummy = gen_reg_rtx (SImode);
4007 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4008 }
4009
4010 do_pending_stack_adjust ();
4011
4012 /* Find the block tail. The last insn in the block is the insn
4013 before a barrier and/or table jump insn. */
4014 last = get_last_insn ();
4015 if (BARRIER_P (last))
4016 last = PREV_INSN (last);
4017 if (JUMP_TABLE_DATA_P (last))
4018 last = PREV_INSN (PREV_INSN (last));
4019 BB_END (bb) = last;
4020
4021 update_bb_for_insn (bb);
4022
4023 return bb;
4024 }
4025
4026
4027 /* Create a basic block for initialization code. */
4028
4029 static basic_block
4030 construct_init_block (void)
4031 {
4032 basic_block init_block, first_block;
4033 edge e = NULL;
4034 int flags;
4035
4036 /* Multiple entry points not supported yet. */
4037 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4038 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4039 init_rtl_bb_info (EXIT_BLOCK_PTR);
4040 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4041 EXIT_BLOCK_PTR->flags |= BB_RTL;
4042
4043 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4044
4045 /* When entry edge points to first basic block, we don't need jump,
4046 otherwise we have to jump into proper target. */
4047 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4048 {
4049 tree label = gimple_block_label (e->dest);
4050
4051 emit_jump (label_rtx (label));
4052 flags = 0;
4053 }
4054 else
4055 flags = EDGE_FALLTHRU;
4056
4057 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4058 get_last_insn (),
4059 ENTRY_BLOCK_PTR);
4060 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4061 init_block->count = ENTRY_BLOCK_PTR->count;
4062 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4063 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4064 if (e)
4065 {
4066 first_block = e->dest;
4067 redirect_edge_succ (e, init_block);
4068 e = make_edge (init_block, first_block, flags);
4069 }
4070 else
4071 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4072 e->probability = REG_BR_PROB_BASE;
4073 e->count = ENTRY_BLOCK_PTR->count;
4074
4075 update_bb_for_insn (init_block);
4076 return init_block;
4077 }
4078
4079 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4080 found in the block tree. */
4081
4082 static void
4083 set_block_levels (tree block, int level)
4084 {
4085 while (block)
4086 {
4087 BLOCK_NUMBER (block) = level;
4088 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4089 block = BLOCK_CHAIN (block);
4090 }
4091 }
4092
4093 /* Create a block containing landing pads and similar stuff. */
4094
4095 static void
4096 construct_exit_block (void)
4097 {
4098 rtx head = get_last_insn ();
4099 rtx end;
4100 basic_block exit_block;
4101 edge e, e2;
4102 unsigned ix;
4103 edge_iterator ei;
4104 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4105
4106 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4107
4108 /* Make sure the locus is set to the end of the function, so that
4109 epilogue line numbers and warnings are set properly. */
4110 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4111 input_location = cfun->function_end_locus;
4112
4113 /* The following insns belong to the top scope. */
4114 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4115
4116 /* Generate rtl for function exit. */
4117 expand_function_end ();
4118
4119 end = get_last_insn ();
4120 if (head == end)
4121 return;
4122 /* While emitting the function end we could move end of the last basic block.
4123 */
4124 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4125 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4126 head = NEXT_INSN (head);
4127 exit_block = create_basic_block (NEXT_INSN (head), end,
4128 EXIT_BLOCK_PTR->prev_bb);
4129 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4130 exit_block->count = EXIT_BLOCK_PTR->count;
4131 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4132 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4133
4134 ix = 0;
4135 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4136 {
4137 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4138 if (!(e->flags & EDGE_ABNORMAL))
4139 redirect_edge_succ (e, exit_block);
4140 else
4141 ix++;
4142 }
4143
4144 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4145 e->probability = REG_BR_PROB_BASE;
4146 e->count = EXIT_BLOCK_PTR->count;
4147 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4148 if (e2 != e)
4149 {
4150 e->count -= e2->count;
4151 exit_block->count -= e2->count;
4152 exit_block->frequency -= EDGE_FREQUENCY (e2);
4153 }
4154 if (e->count < 0)
4155 e->count = 0;
4156 if (exit_block->count < 0)
4157 exit_block->count = 0;
4158 if (exit_block->frequency < 0)
4159 exit_block->frequency = 0;
4160 update_bb_for_insn (exit_block);
4161 }
4162
4163 /* Helper function for discover_nonconstant_array_refs.
4164 Look for ARRAY_REF nodes with non-constant indexes and mark them
4165 addressable. */
4166
4167 static tree
4168 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4169 void *data ATTRIBUTE_UNUSED)
4170 {
4171 tree t = *tp;
4172
4173 if (IS_TYPE_OR_DECL_P (t))
4174 *walk_subtrees = 0;
4175 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4176 {
4177 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4178 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4179 && (!TREE_OPERAND (t, 2)
4180 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4181 || (TREE_CODE (t) == COMPONENT_REF
4182 && (!TREE_OPERAND (t,2)
4183 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4184 || TREE_CODE (t) == BIT_FIELD_REF
4185 || TREE_CODE (t) == REALPART_EXPR
4186 || TREE_CODE (t) == IMAGPART_EXPR
4187 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4188 || CONVERT_EXPR_P (t))
4189 t = TREE_OPERAND (t, 0);
4190
4191 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4192 {
4193 t = get_base_address (t);
4194 if (t && DECL_P (t)
4195 && DECL_MODE (t) != BLKmode)
4196 TREE_ADDRESSABLE (t) = 1;
4197 }
4198
4199 *walk_subtrees = 0;
4200 }
4201
4202 return NULL_TREE;
4203 }
4204
4205 /* RTL expansion is not able to compile array references with variable
4206 offsets for arrays stored in single register. Discover such
4207 expressions and mark variables as addressable to avoid this
4208 scenario. */
4209
4210 static void
4211 discover_nonconstant_array_refs (void)
4212 {
4213 basic_block bb;
4214 gimple_stmt_iterator gsi;
4215
4216 FOR_EACH_BB (bb)
4217 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4218 {
4219 gimple stmt = gsi_stmt (gsi);
4220 if (!is_gimple_debug (stmt))
4221 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4222 }
4223 }
4224
4225 /* This function sets crtl->args.internal_arg_pointer to a virtual
4226 register if DRAP is needed. Local register allocator will replace
4227 virtual_incoming_args_rtx with the virtual register. */
4228
4229 static void
4230 expand_stack_alignment (void)
4231 {
4232 rtx drap_rtx;
4233 unsigned int preferred_stack_boundary;
4234
4235 if (! SUPPORTS_STACK_ALIGNMENT)
4236 return;
4237
4238 if (cfun->calls_alloca
4239 || cfun->has_nonlocal_label
4240 || crtl->has_nonlocal_goto)
4241 crtl->need_drap = true;
4242
4243 /* Call update_stack_boundary here again to update incoming stack
4244 boundary. It may set incoming stack alignment to a different
4245 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4246 use the minimum incoming stack alignment to check if it is OK
4247 to perform sibcall optimization since sibcall optimization will
4248 only align the outgoing stack to incoming stack boundary. */
4249 if (targetm.calls.update_stack_boundary)
4250 targetm.calls.update_stack_boundary ();
4251
4252 /* The incoming stack frame has to be aligned at least at
4253 parm_stack_boundary. */
4254 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4255
4256 /* Update crtl->stack_alignment_estimated and use it later to align
4257 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4258 exceptions since callgraph doesn't collect incoming stack alignment
4259 in this case. */
4260 if (cfun->can_throw_non_call_exceptions
4261 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4262 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4263 else
4264 preferred_stack_boundary = crtl->preferred_stack_boundary;
4265 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4266 crtl->stack_alignment_estimated = preferred_stack_boundary;
4267 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4268 crtl->stack_alignment_needed = preferred_stack_boundary;
4269
4270 gcc_assert (crtl->stack_alignment_needed
4271 <= crtl->stack_alignment_estimated);
4272
4273 crtl->stack_realign_needed
4274 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4275 crtl->stack_realign_tried = crtl->stack_realign_needed;
4276
4277 crtl->stack_realign_processed = true;
4278
4279 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4280 alignment. */
4281 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4282 drap_rtx = targetm.calls.get_drap_rtx ();
4283
4284 /* stack_realign_drap and drap_rtx must match. */
4285 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4286
4287 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4288 if (NULL != drap_rtx)
4289 {
4290 crtl->args.internal_arg_pointer = drap_rtx;
4291
4292 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4293 needed. */
4294 fixup_tail_calls ();
4295 }
4296 }
4297
4298 /* Translate the intermediate representation contained in the CFG
4299 from GIMPLE trees to RTL.
4300
4301 We do conversion per basic block and preserve/update the tree CFG.
4302 This implies we have to do some magic as the CFG can simultaneously
4303 consist of basic blocks containing RTL and GIMPLE trees. This can
4304 confuse the CFG hooks, so be careful to not manipulate CFG during
4305 the expansion. */
4306
4307 static unsigned int
4308 gimple_expand_cfg (void)
4309 {
4310 basic_block bb, init_block;
4311 sbitmap blocks;
4312 edge_iterator ei;
4313 edge e;
4314 rtx var_seq;
4315 unsigned i;
4316
4317 timevar_push (TV_OUT_OF_SSA);
4318 rewrite_out_of_ssa (&SA);
4319 timevar_pop (TV_OUT_OF_SSA);
4320 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4321 sizeof (rtx));
4322
4323 /* Make sure all values used by the optimization passes have sane
4324 defaults. */
4325 reg_renumber = 0;
4326
4327 /* Some backends want to know that we are expanding to RTL. */
4328 currently_expanding_to_rtl = 1;
4329 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4330 free_dominance_info (CDI_DOMINATORS);
4331
4332 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4333
4334 insn_locators_alloc ();
4335 if (!DECL_IS_BUILTIN (current_function_decl))
4336 {
4337 /* Eventually, all FEs should explicitly set function_start_locus. */
4338 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4339 set_curr_insn_source_location
4340 (DECL_SOURCE_LOCATION (current_function_decl));
4341 else
4342 set_curr_insn_source_location (cfun->function_start_locus);
4343 }
4344 else
4345 set_curr_insn_source_location (UNKNOWN_LOCATION);
4346 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4347 prologue_locator = curr_insn_locator ();
4348
4349 #ifdef INSN_SCHEDULING
4350 init_sched_attrs ();
4351 #endif
4352
4353 /* Make sure first insn is a note even if we don't want linenums.
4354 This makes sure the first insn will never be deleted.
4355 Also, final expects a note to appear there. */
4356 emit_note (NOTE_INSN_DELETED);
4357
4358 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4359 discover_nonconstant_array_refs ();
4360
4361 targetm.expand_to_rtl_hook ();
4362 crtl->stack_alignment_needed = STACK_BOUNDARY;
4363 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4364 crtl->stack_alignment_estimated = 0;
4365 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4366 cfun->cfg->max_jumptable_ents = 0;
4367
4368 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4369 of the function section at exapnsion time to predict distance of calls. */
4370 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4371
4372 /* Expand the variables recorded during gimple lowering. */
4373 timevar_push (TV_VAR_EXPAND);
4374 start_sequence ();
4375
4376 expand_used_vars ();
4377
4378 var_seq = get_insns ();
4379 end_sequence ();
4380 timevar_pop (TV_VAR_EXPAND);
4381
4382 /* Honor stack protection warnings. */
4383 if (warn_stack_protect)
4384 {
4385 if (cfun->calls_alloca)
4386 warning (OPT_Wstack_protector,
4387 "stack protector not protecting local variables: "
4388 "variable length buffer");
4389 if (has_short_buffer && !crtl->stack_protect_guard)
4390 warning (OPT_Wstack_protector,
4391 "stack protector not protecting function: "
4392 "all local arrays are less than %d bytes long",
4393 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4394 }
4395
4396 /* Set up parameters and prepare for return, for the function. */
4397 expand_function_start (current_function_decl);
4398
4399 /* If we emitted any instructions for setting up the variables,
4400 emit them before the FUNCTION_START note. */
4401 if (var_seq)
4402 {
4403 emit_insn_before (var_seq, parm_birth_insn);
4404
4405 /* In expand_function_end we'll insert the alloca save/restore
4406 before parm_birth_insn. We've just insertted an alloca call.
4407 Adjust the pointer to match. */
4408 parm_birth_insn = var_seq;
4409 }
4410
4411 /* Now that we also have the parameter RTXs, copy them over to our
4412 partitions. */
4413 for (i = 0; i < SA.map->num_partitions; i++)
4414 {
4415 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4416
4417 if (TREE_CODE (var) != VAR_DECL
4418 && !SA.partition_to_pseudo[i])
4419 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4420 gcc_assert (SA.partition_to_pseudo[i]);
4421
4422 /* If this decl was marked as living in multiple places, reset
4423 this now to NULL. */
4424 if (DECL_RTL_IF_SET (var) == pc_rtx)
4425 SET_DECL_RTL (var, NULL);
4426
4427 /* Some RTL parts really want to look at DECL_RTL(x) when x
4428 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4429 SET_DECL_RTL here making this available, but that would mean
4430 to select one of the potentially many RTLs for one DECL. Instead
4431 of doing that we simply reset the MEM_EXPR of the RTL in question,
4432 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4433 if (!DECL_RTL_SET_P (var))
4434 {
4435 if (MEM_P (SA.partition_to_pseudo[i]))
4436 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4437 }
4438 }
4439
4440 /* If we have a class containing differently aligned pointers
4441 we need to merge those into the corresponding RTL pointer
4442 alignment. */
4443 for (i = 1; i < num_ssa_names; i++)
4444 {
4445 tree name = ssa_name (i);
4446 int part;
4447 rtx r;
4448
4449 if (!name
4450 || !POINTER_TYPE_P (TREE_TYPE (name))
4451 /* We might have generated new SSA names in
4452 update_alias_info_with_stack_vars. They will have a NULL
4453 defining statements, and won't be part of the partitioning,
4454 so ignore those. */
4455 || !SSA_NAME_DEF_STMT (name))
4456 continue;
4457 part = var_to_partition (SA.map, name);
4458 if (part == NO_PARTITION)
4459 continue;
4460 r = SA.partition_to_pseudo[part];
4461 if (REG_P (r))
4462 mark_reg_pointer (r, get_pointer_alignment (name));
4463 }
4464
4465 /* If this function is `main', emit a call to `__main'
4466 to run global initializers, etc. */
4467 if (DECL_NAME (current_function_decl)
4468 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4469 && DECL_FILE_SCOPE_P (current_function_decl))
4470 expand_main_function ();
4471
4472 /* Initialize the stack_protect_guard field. This must happen after the
4473 call to __main (if any) so that the external decl is initialized. */
4474 if (crtl->stack_protect_guard)
4475 stack_protect_prologue ();
4476
4477 expand_phi_nodes (&SA);
4478
4479 /* Register rtl specific functions for cfg. */
4480 rtl_register_cfg_hooks ();
4481
4482 init_block = construct_init_block ();
4483
4484 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4485 remaining edges later. */
4486 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4487 e->flags &= ~EDGE_EXECUTABLE;
4488
4489 lab_rtx_for_bb = pointer_map_create ();
4490 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4491 bb = expand_gimple_basic_block (bb);
4492
4493 if (MAY_HAVE_DEBUG_INSNS)
4494 expand_debug_locations ();
4495
4496 /* Free stuff we no longer need after GIMPLE optimizations. */
4497 free_dominance_info (CDI_DOMINATORS);
4498 free_dominance_info (CDI_POST_DOMINATORS);
4499 delete_tree_cfg_annotations ();
4500
4501 timevar_push (TV_OUT_OF_SSA);
4502 finish_out_of_ssa (&SA);
4503 timevar_pop (TV_OUT_OF_SSA);
4504
4505 timevar_push (TV_POST_EXPAND);
4506 /* We are no longer in SSA form. */
4507 cfun->gimple_df->in_ssa_p = false;
4508 if (current_loops)
4509 loops_state_clear (LOOP_CLOSED_SSA);
4510
4511 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4512 conservatively to true until they are all profile aware. */
4513 pointer_map_destroy (lab_rtx_for_bb);
4514 free_histograms ();
4515
4516 construct_exit_block ();
4517 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4518 insn_locators_finalize ();
4519
4520 /* Zap the tree EH table. */
4521 set_eh_throw_stmt_table (cfun, NULL);
4522
4523 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4524 split edges which edge insertions might do. */
4525 rebuild_jump_labels (get_insns ());
4526
4527 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4528 {
4529 edge e;
4530 edge_iterator ei;
4531 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4532 {
4533 if (e->insns.r)
4534 {
4535 rebuild_jump_labels_chain (e->insns.r);
4536 /* Avoid putting insns before parm_birth_insn. */
4537 if (e->src == ENTRY_BLOCK_PTR
4538 && single_succ_p (ENTRY_BLOCK_PTR)
4539 && parm_birth_insn)
4540 {
4541 rtx insns = e->insns.r;
4542 e->insns.r = NULL_RTX;
4543 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4544 }
4545 else
4546 commit_one_edge_insertion (e);
4547 }
4548 else
4549 ei_next (&ei);
4550 }
4551 }
4552
4553 /* We're done expanding trees to RTL. */
4554 currently_expanding_to_rtl = 0;
4555
4556 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4557 {
4558 edge e;
4559 edge_iterator ei;
4560 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4561 {
4562 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4563 e->flags &= ~EDGE_EXECUTABLE;
4564
4565 /* At the moment not all abnormal edges match the RTL
4566 representation. It is safe to remove them here as
4567 find_many_sub_basic_blocks will rediscover them.
4568 In the future we should get this fixed properly. */
4569 if ((e->flags & EDGE_ABNORMAL)
4570 && !(e->flags & EDGE_SIBCALL))
4571 remove_edge (e);
4572 else
4573 ei_next (&ei);
4574 }
4575 }
4576
4577 blocks = sbitmap_alloc (last_basic_block);
4578 sbitmap_ones (blocks);
4579 find_many_sub_basic_blocks (blocks);
4580 sbitmap_free (blocks);
4581 purge_all_dead_edges ();
4582
4583 expand_stack_alignment ();
4584
4585 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4586 function. */
4587 if (crtl->tail_call_emit)
4588 fixup_tail_calls ();
4589
4590 /* After initial rtl generation, call back to finish generating
4591 exception support code. We need to do this before cleaning up
4592 the CFG as the code does not expect dead landing pads. */
4593 if (cfun->eh->region_tree != NULL)
4594 finish_eh_generation ();
4595
4596 /* Remove unreachable blocks, otherwise we cannot compute dominators
4597 which are needed for loop state verification. As a side-effect
4598 this also compacts blocks.
4599 ??? We cannot remove trivially dead insns here as for example
4600 the DRAP reg on i?86 is not magically live at this point.
4601 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4602 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4603
4604 #ifdef ENABLE_CHECKING
4605 verify_flow_info ();
4606 #endif
4607
4608 /* Initialize pseudos allocated for hard registers. */
4609 emit_initial_value_sets ();
4610
4611 /* And finally unshare all RTL. */
4612 unshare_all_rtl ();
4613
4614 /* There's no need to defer outputting this function any more; we
4615 know we want to output it. */
4616 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4617
4618 /* Now that we're done expanding trees to RTL, we shouldn't have any
4619 more CONCATs anywhere. */
4620 generating_concat_p = 0;
4621
4622 if (dump_file)
4623 {
4624 fprintf (dump_file,
4625 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4626 /* And the pass manager will dump RTL for us. */
4627 }
4628
4629 /* If we're emitting a nested function, make sure its parent gets
4630 emitted as well. Doing otherwise confuses debug info. */
4631 {
4632 tree parent;
4633 for (parent = DECL_CONTEXT (current_function_decl);
4634 parent != NULL_TREE;
4635 parent = get_containing_scope (parent))
4636 if (TREE_CODE (parent) == FUNCTION_DECL)
4637 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4638 }
4639
4640 /* We are now committed to emitting code for this function. Do any
4641 preparation, such as emitting abstract debug info for the inline
4642 before it gets mangled by optimization. */
4643 if (cgraph_function_possibly_inlined_p (current_function_decl))
4644 (*debug_hooks->outlining_inline_function) (current_function_decl);
4645
4646 TREE_ASM_WRITTEN (current_function_decl) = 1;
4647
4648 /* After expanding, the return labels are no longer needed. */
4649 return_label = NULL;
4650 naked_return_label = NULL;
4651
4652 /* After expanding, the tm_restart map is no longer needed. */
4653 if (cfun->gimple_df->tm_restart)
4654 {
4655 htab_delete (cfun->gimple_df->tm_restart);
4656 cfun->gimple_df->tm_restart = NULL;
4657 }
4658
4659 /* Tag the blocks with a depth number so that change_scope can find
4660 the common parent easily. */
4661 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4662 default_rtl_profile ();
4663
4664 timevar_pop (TV_POST_EXPAND);
4665
4666 return 0;
4667 }
4668
4669 struct rtl_opt_pass pass_expand =
4670 {
4671 {
4672 RTL_PASS,
4673 "expand", /* name */
4674 NULL, /* gate */
4675 gimple_expand_cfg, /* execute */
4676 NULL, /* sub */
4677 NULL, /* next */
4678 0, /* static_pass_number */
4679 TV_EXPAND, /* tv_id */
4680 PROP_ssa | PROP_gimple_leh | PROP_cfg
4681 | PROP_gimple_lcx, /* properties_required */
4682 PROP_rtl, /* properties_provided */
4683 PROP_ssa | PROP_trees, /* properties_destroyed */
4684 TODO_verify_ssa | TODO_verify_flow
4685 | TODO_verify_stmts, /* todo_flags_start */
4686 TODO_ggc_collect /* todo_flags_finish */
4687 }
4688 };