2012-10-15 Paolo Carlini <paolo.carlini@oracle.com>
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "except.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "toplev.h"
39 #include "debug.h"
40 #include "params.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
43 #include "target.h"
44 #include "ssaexpand.h"
45 #include "bitmap.h"
46 #include "sbitmap.h"
47 #include "cfgloop.h"
48 #include "regs.h" /* For reg_renumber. */
49 #include "insn-attr.h" /* For INSN_SCHEDULING. */
50
51 /* This variable holds information helping the rewriting of SSA trees
52 into RTL. */
53 struct ssaexpand SA;
54
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
58
59 static rtx expand_debug_expr (tree);
60
61 /* Return an expression tree corresponding to the RHS of GIMPLE
62 statement STMT. */
63
64 tree
65 gimple_assign_rhs_to_tree (gimple stmt)
66 {
67 tree t;
68 enum gimple_rhs_class grhs_class;
69
70 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71
72 if (grhs_class == GIMPLE_TERNARY_RHS)
73 t = build3 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt),
76 gimple_assign_rhs2 (stmt),
77 gimple_assign_rhs3 (stmt));
78 else if (grhs_class == GIMPLE_BINARY_RHS)
79 t = build2 (gimple_assign_rhs_code (stmt),
80 TREE_TYPE (gimple_assign_lhs (stmt)),
81 gimple_assign_rhs1 (stmt),
82 gimple_assign_rhs2 (stmt));
83 else if (grhs_class == GIMPLE_UNARY_RHS)
84 t = build1 (gimple_assign_rhs_code (stmt),
85 TREE_TYPE (gimple_assign_lhs (stmt)),
86 gimple_assign_rhs1 (stmt));
87 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 {
89 t = gimple_assign_rhs1 (stmt);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 && gimple_location (stmt) != EXPR_LOCATION (t))
93 || (gimple_block (stmt)
94 && currently_expanding_to_rtl
95 && EXPR_P (t)))
96 t = copy_node (t);
97 }
98 else
99 gcc_unreachable ();
100
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103
104 return t;
105 }
106
107
108 #ifndef STACK_ALIGNMENT_NEEDED
109 #define STACK_ALIGNMENT_NEEDED 1
110 #endif
111
112 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
113
114 /* Associate declaration T with storage space X. If T is no
115 SSA name this is exactly SET_DECL_RTL, otherwise make the
116 partition of T associated with X. */
117 static inline void
118 set_rtl (tree t, rtx x)
119 {
120 if (TREE_CODE (t) == SSA_NAME)
121 {
122 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
123 if (x && !MEM_P (x))
124 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
125 /* For the benefit of debug information at -O0 (where vartracking
126 doesn't run) record the place also in the base DECL if it's
127 a normal variable (not a parameter). */
128 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
129 {
130 tree var = SSA_NAME_VAR (t);
131 /* If we don't yet have something recorded, just record it now. */
132 if (!DECL_RTL_SET_P (var))
133 SET_DECL_RTL (var, x);
134 /* If we have it set already to "multiple places" don't
135 change this. */
136 else if (DECL_RTL (var) == pc_rtx)
137 ;
138 /* If we have something recorded and it's not the same place
139 as we want to record now, we have multiple partitions for the
140 same base variable, with different places. We can't just
141 randomly chose one, hence we have to say that we don't know.
142 This only happens with optimization, and there var-tracking
143 will figure out the right thing. */
144 else if (DECL_RTL (var) != x)
145 SET_DECL_RTL (var, pc_rtx);
146 }
147 }
148 else
149 SET_DECL_RTL (t, x);
150 }
151
152 /* This structure holds data relevant to one variable that will be
153 placed in a stack slot. */
154 struct stack_var
155 {
156 /* The Variable. */
157 tree decl;
158
159 /* Initially, the size of the variable. Later, the size of the partition,
160 if this variable becomes it's partition's representative. */
161 HOST_WIDE_INT size;
162
163 /* The *byte* alignment required for this variable. Or as, with the
164 size, the alignment for this partition. */
165 unsigned int alignb;
166
167 /* The partition representative. */
168 size_t representative;
169
170 /* The next stack variable in the partition, or EOC. */
171 size_t next;
172
173 /* The numbers of conflicting stack variables. */
174 bitmap conflicts;
175 };
176
177 #define EOC ((size_t)-1)
178
179 /* We have an array of such objects while deciding allocation. */
180 static struct stack_var *stack_vars;
181 static size_t stack_vars_alloc;
182 static size_t stack_vars_num;
183 static struct pointer_map_t *decl_to_stack_part;
184
185 /* Conflict bitmaps go on this obstack. This allows us to destroy
186 all of them in one big sweep. */
187 static bitmap_obstack stack_var_bitmap_obstack;
188
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
192
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
197
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
201
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
205
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
208
209 static unsigned int
210 align_local_variable (tree decl)
211 {
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
215 }
216
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
219
220 static HOST_WIDE_INT
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
222 {
223 HOST_WIDE_INT offset, new_frame_offset;
224
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
227 {
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
232 }
233 else
234 {
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
241 }
242 frame_offset = new_frame_offset;
243
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
246
247 return offset;
248 }
249
250 /* Accumulate DECL into STACK_VARS. */
251
252 static void
253 add_stack_var (tree decl)
254 {
255 struct stack_var *v;
256
257 if (stack_vars_num >= stack_vars_alloc)
258 {
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 else
262 stack_vars_alloc = 32;
263 stack_vars
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265 }
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
268
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
271
272 v->decl = decl;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
276 if (v->size == 0)
277 v->size = 1;
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
281
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
284 v->next = EOC;
285
286 /* All variables initially conflict with no other. */
287 v->conflicts = NULL;
288
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
291
292 stack_vars_num++;
293 }
294
295 /* Make the decls associated with luid's X and Y conflict. */
296
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
299 {
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
308 }
309
310 /* Check whether the decls associated with luid's X and Y conflict. */
311
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
314 {
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (x == y)
318 return false;
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323 return true;
324
325 if (!a->conflicts || !b->conflicts)
326 return false;
327 return bitmap_bit_p (a->conflicts, y);
328 }
329
330 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
331 enter its partition number into bitmap DATA. */
332
333 static bool
334 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
335 {
336 bitmap active = (bitmap)data;
337 op = get_base_address (op);
338 if (op
339 && DECL_P (op)
340 && DECL_RTL_IF_SET (op) == pc_rtx)
341 {
342 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
343 if (v)
344 bitmap_set_bit (active, *v);
345 }
346 return false;
347 }
348
349 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
350 record conflicts between it and all currently active other partitions
351 from bitmap DATA. */
352
353 static bool
354 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
355 {
356 bitmap active = (bitmap)data;
357 op = get_base_address (op);
358 if (op
359 && DECL_P (op)
360 && DECL_RTL_IF_SET (op) == pc_rtx)
361 {
362 size_t *v =
363 (size_t *) pointer_map_contains (decl_to_stack_part, op);
364 if (v && bitmap_set_bit (active, *v))
365 {
366 size_t num = *v;
367 bitmap_iterator bi;
368 unsigned i;
369 gcc_assert (num < stack_vars_num);
370 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
371 add_stack_var_conflict (num, i);
372 }
373 }
374 return false;
375 }
376
377 /* Helper routine for add_scope_conflicts, calculating the active partitions
378 at the end of BB, leaving the result in WORK. We're called to generate
379 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
380 liveness. */
381
382 static void
383 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
384 {
385 edge e;
386 edge_iterator ei;
387 gimple_stmt_iterator gsi;
388 bool (*visit)(gimple, tree, void *);
389
390 bitmap_clear (work);
391 FOR_EACH_EDGE (e, ei, bb->preds)
392 bitmap_ior_into (work, (bitmap)e->src->aux);
393
394 visit = visit_op;
395
396 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
397 {
398 gimple stmt = gsi_stmt (gsi);
399 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
400 }
401 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
402 {
403 gimple stmt = gsi_stmt (gsi);
404
405 if (gimple_clobber_p (stmt))
406 {
407 tree lhs = gimple_assign_lhs (stmt);
408 size_t *v;
409 /* Nested function lowering might introduce LHSs
410 that are COMPONENT_REFs. */
411 if (TREE_CODE (lhs) != VAR_DECL)
412 continue;
413 if (DECL_RTL_IF_SET (lhs) == pc_rtx
414 && (v = (size_t *)
415 pointer_map_contains (decl_to_stack_part, lhs)))
416 bitmap_clear_bit (work, *v);
417 }
418 else if (!is_gimple_debug (stmt))
419 {
420 if (for_conflict
421 && visit == visit_op)
422 {
423 /* If this is the first real instruction in this BB we need
424 to add conflicts for everything live at this point now.
425 Unlike classical liveness for named objects we can't
426 rely on seeing a def/use of the names we're interested in.
427 There might merely be indirect loads/stores. We'd not add any
428 conflicts for such partitions. */
429 bitmap_iterator bi;
430 unsigned i;
431 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
432 {
433 struct stack_var *a = &stack_vars[i];
434 if (!a->conflicts)
435 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
436 bitmap_ior_into (a->conflicts, work);
437 }
438 visit = visit_conflict;
439 }
440 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
441 }
442 }
443 }
444
445 /* Generate stack partition conflicts between all partitions that are
446 simultaneously live. */
447
448 static void
449 add_scope_conflicts (void)
450 {
451 basic_block bb;
452 bool changed;
453 bitmap work = BITMAP_ALLOC (NULL);
454 int *rpo;
455 int n_bbs;
456
457 /* We approximate the live range of a stack variable by taking the first
458 mention of its name as starting point(s), and by the end-of-scope
459 death clobber added by gimplify as ending point(s) of the range.
460 This overapproximates in the case we for instance moved an address-taken
461 operation upward, without also moving a dereference to it upwards.
462 But it's conservatively correct as a variable never can hold values
463 before its name is mentioned at least once.
464
465 We then do a mostly classical bitmap liveness algorithm. */
466
467 FOR_ALL_BB (bb)
468 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
469
470 rpo = XNEWVEC (int, last_basic_block);
471 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
472
473 changed = true;
474 while (changed)
475 {
476 int i;
477 changed = false;
478 for (i = 0; i < n_bbs; i++)
479 {
480 bitmap active;
481 bb = BASIC_BLOCK (rpo[i]);
482 active = (bitmap)bb->aux;
483 add_scope_conflicts_1 (bb, work, false);
484 if (bitmap_ior_into (active, work))
485 changed = true;
486 }
487 }
488
489 FOR_EACH_BB (bb)
490 add_scope_conflicts_1 (bb, work, true);
491
492 free (rpo);
493 BITMAP_FREE (work);
494 FOR_ALL_BB (bb)
495 BITMAP_FREE (bb->aux);
496 }
497
498 /* A subroutine of partition_stack_vars. A comparison function for qsort,
499 sorting an array of indices by the properties of the object. */
500
501 static int
502 stack_var_cmp (const void *a, const void *b)
503 {
504 size_t ia = *(const size_t *)a;
505 size_t ib = *(const size_t *)b;
506 unsigned int aligna = stack_vars[ia].alignb;
507 unsigned int alignb = stack_vars[ib].alignb;
508 HOST_WIDE_INT sizea = stack_vars[ia].size;
509 HOST_WIDE_INT sizeb = stack_vars[ib].size;
510 tree decla = stack_vars[ia].decl;
511 tree declb = stack_vars[ib].decl;
512 bool largea, largeb;
513 unsigned int uida, uidb;
514
515 /* Primary compare on "large" alignment. Large comes first. */
516 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
517 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
518 if (largea != largeb)
519 return (int)largeb - (int)largea;
520
521 /* Secondary compare on size, decreasing */
522 if (sizea > sizeb)
523 return -1;
524 if (sizea < sizeb)
525 return 1;
526
527 /* Tertiary compare on true alignment, decreasing. */
528 if (aligna < alignb)
529 return -1;
530 if (aligna > alignb)
531 return 1;
532
533 /* Final compare on ID for sort stability, increasing.
534 Two SSA names are compared by their version, SSA names come before
535 non-SSA names, and two normal decls are compared by their DECL_UID. */
536 if (TREE_CODE (decla) == SSA_NAME)
537 {
538 if (TREE_CODE (declb) == SSA_NAME)
539 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
540 else
541 return -1;
542 }
543 else if (TREE_CODE (declb) == SSA_NAME)
544 return 1;
545 else
546 uida = DECL_UID (decla), uidb = DECL_UID (declb);
547 if (uida < uidb)
548 return 1;
549 if (uida > uidb)
550 return -1;
551 return 0;
552 }
553
554
555 /* If the points-to solution *PI points to variables that are in a partition
556 together with other variables add all partition members to the pointed-to
557 variables bitmap. */
558
559 static void
560 add_partitioned_vars_to_ptset (struct pt_solution *pt,
561 struct pointer_map_t *decls_to_partitions,
562 struct pointer_set_t *visited, bitmap temp)
563 {
564 bitmap_iterator bi;
565 unsigned i;
566 bitmap *part;
567
568 if (pt->anything
569 || pt->vars == NULL
570 /* The pointed-to vars bitmap is shared, it is enough to
571 visit it once. */
572 || pointer_set_insert(visited, pt->vars))
573 return;
574
575 bitmap_clear (temp);
576
577 /* By using a temporary bitmap to store all members of the partitions
578 we have to add we make sure to visit each of the partitions only
579 once. */
580 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
581 if ((!temp
582 || !bitmap_bit_p (temp, i))
583 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
584 (void *)(size_t) i)))
585 bitmap_ior_into (temp, *part);
586 if (!bitmap_empty_p (temp))
587 bitmap_ior_into (pt->vars, temp);
588 }
589
590 /* Update points-to sets based on partition info, so we can use them on RTL.
591 The bitmaps representing stack partitions will be saved until expand,
592 where partitioned decls used as bases in memory expressions will be
593 rewritten. */
594
595 static void
596 update_alias_info_with_stack_vars (void)
597 {
598 struct pointer_map_t *decls_to_partitions = NULL;
599 size_t i, j;
600 tree var = NULL_TREE;
601
602 for (i = 0; i < stack_vars_num; i++)
603 {
604 bitmap part = NULL;
605 tree name;
606 struct ptr_info_def *pi;
607
608 /* Not interested in partitions with single variable. */
609 if (stack_vars[i].representative != i
610 || stack_vars[i].next == EOC)
611 continue;
612
613 if (!decls_to_partitions)
614 {
615 decls_to_partitions = pointer_map_create ();
616 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
617 }
618
619 /* Create an SSA_NAME that points to the partition for use
620 as base during alias-oracle queries on RTL for bases that
621 have been partitioned. */
622 if (var == NULL_TREE)
623 var = create_tmp_var (ptr_type_node, NULL);
624 name = make_ssa_name (var, NULL);
625
626 /* Create bitmaps representing partitions. They will be used for
627 points-to sets later, so use GGC alloc. */
628 part = BITMAP_GGC_ALLOC ();
629 for (j = i; j != EOC; j = stack_vars[j].next)
630 {
631 tree decl = stack_vars[j].decl;
632 unsigned int uid = DECL_PT_UID (decl);
633 bitmap_set_bit (part, uid);
634 *((bitmap *) pointer_map_insert (decls_to_partitions,
635 (void *)(size_t) uid)) = part;
636 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
637 decl)) = name;
638 }
639
640 /* Make the SSA name point to all partition members. */
641 pi = get_ptr_info (name);
642 pt_solution_set (&pi->pt, part, false);
643 }
644
645 /* Make all points-to sets that contain one member of a partition
646 contain all members of the partition. */
647 if (decls_to_partitions)
648 {
649 unsigned i;
650 struct pointer_set_t *visited = pointer_set_create ();
651 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
652
653 for (i = 1; i < num_ssa_names; i++)
654 {
655 tree name = ssa_name (i);
656 struct ptr_info_def *pi;
657
658 if (name
659 && POINTER_TYPE_P (TREE_TYPE (name))
660 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
661 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
662 visited, temp);
663 }
664
665 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
666 decls_to_partitions, visited, temp);
667
668 pointer_set_destroy (visited);
669 pointer_map_destroy (decls_to_partitions);
670 BITMAP_FREE (temp);
671 }
672 }
673
674 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
675 partitioning algorithm. Partitions A and B are known to be non-conflicting.
676 Merge them into a single partition A. */
677
678 static void
679 union_stack_vars (size_t a, size_t b)
680 {
681 struct stack_var *vb = &stack_vars[b];
682 bitmap_iterator bi;
683 unsigned u;
684
685 gcc_assert (stack_vars[b].next == EOC);
686 /* Add B to A's partition. */
687 stack_vars[b].next = stack_vars[a].next;
688 stack_vars[b].representative = a;
689 stack_vars[a].next = b;
690
691 /* Update the required alignment of partition A to account for B. */
692 if (stack_vars[a].alignb < stack_vars[b].alignb)
693 stack_vars[a].alignb = stack_vars[b].alignb;
694
695 /* Update the interference graph and merge the conflicts. */
696 if (vb->conflicts)
697 {
698 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
699 add_stack_var_conflict (a, stack_vars[u].representative);
700 BITMAP_FREE (vb->conflicts);
701 }
702 }
703
704 /* A subroutine of expand_used_vars. Binpack the variables into
705 partitions constrained by the interference graph. The overall
706 algorithm used is as follows:
707
708 Sort the objects by size in descending order.
709 For each object A {
710 S = size(A)
711 O = 0
712 loop {
713 Look for the largest non-conflicting object B with size <= S.
714 UNION (A, B)
715 }
716 }
717 */
718
719 static void
720 partition_stack_vars (void)
721 {
722 size_t si, sj, n = stack_vars_num;
723
724 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
725 for (si = 0; si < n; ++si)
726 stack_vars_sorted[si] = si;
727
728 if (n == 1)
729 return;
730
731 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
732
733 for (si = 0; si < n; ++si)
734 {
735 size_t i = stack_vars_sorted[si];
736 unsigned int ialign = stack_vars[i].alignb;
737
738 /* Ignore objects that aren't partition representatives. If we
739 see a var that is not a partition representative, it must
740 have been merged earlier. */
741 if (stack_vars[i].representative != i)
742 continue;
743
744 for (sj = si + 1; sj < n; ++sj)
745 {
746 size_t j = stack_vars_sorted[sj];
747 unsigned int jalign = stack_vars[j].alignb;
748
749 /* Ignore objects that aren't partition representatives. */
750 if (stack_vars[j].representative != j)
751 continue;
752
753 /* Ignore conflicting objects. */
754 if (stack_var_conflict_p (i, j))
755 continue;
756
757 /* Do not mix objects of "small" (supported) alignment
758 and "large" (unsupported) alignment. */
759 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
760 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
761 continue;
762
763 /* UNION the objects, placing J at OFFSET. */
764 union_stack_vars (i, j);
765 }
766 }
767
768 update_alias_info_with_stack_vars ();
769 }
770
771 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
772
773 static void
774 dump_stack_var_partition (void)
775 {
776 size_t si, i, j, n = stack_vars_num;
777
778 for (si = 0; si < n; ++si)
779 {
780 i = stack_vars_sorted[si];
781
782 /* Skip variables that aren't partition representatives, for now. */
783 if (stack_vars[i].representative != i)
784 continue;
785
786 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
787 " align %u\n", (unsigned long) i, stack_vars[i].size,
788 stack_vars[i].alignb);
789
790 for (j = i; j != EOC; j = stack_vars[j].next)
791 {
792 fputc ('\t', dump_file);
793 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
794 }
795 fputc ('\n', dump_file);
796 }
797 }
798
799 /* Assign rtl to DECL at BASE + OFFSET. */
800
801 static void
802 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
803 HOST_WIDE_INT offset)
804 {
805 unsigned align;
806 rtx x;
807
808 /* If this fails, we've overflowed the stack frame. Error nicely? */
809 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
810
811 x = plus_constant (Pmode, base, offset);
812 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
813
814 if (TREE_CODE (decl) != SSA_NAME)
815 {
816 /* Set alignment we actually gave this decl if it isn't an SSA name.
817 If it is we generate stack slots only accidentally so it isn't as
818 important, we'll simply use the alignment that is already set. */
819 if (base == virtual_stack_vars_rtx)
820 offset -= frame_phase;
821 align = offset & -offset;
822 align *= BITS_PER_UNIT;
823 if (align == 0 || align > base_align)
824 align = base_align;
825
826 /* One would think that we could assert that we're not decreasing
827 alignment here, but (at least) the i386 port does exactly this
828 via the MINIMUM_ALIGNMENT hook. */
829
830 DECL_ALIGN (decl) = align;
831 DECL_USER_ALIGN (decl) = 0;
832 }
833
834 set_mem_attributes (x, SSAVAR (decl), true);
835 set_rtl (decl, x);
836 }
837
838 /* A subroutine of expand_used_vars. Give each partition representative
839 a unique location within the stack frame. Update each partition member
840 with that location. */
841
842 static void
843 expand_stack_vars (bool (*pred) (tree))
844 {
845 size_t si, i, j, n = stack_vars_num;
846 HOST_WIDE_INT large_size = 0, large_alloc = 0;
847 rtx large_base = NULL;
848 unsigned large_align = 0;
849 tree decl;
850
851 /* Determine if there are any variables requiring "large" alignment.
852 Since these are dynamically allocated, we only process these if
853 no predicate involved. */
854 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
855 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
856 {
857 /* Find the total size of these variables. */
858 for (si = 0; si < n; ++si)
859 {
860 unsigned alignb;
861
862 i = stack_vars_sorted[si];
863 alignb = stack_vars[i].alignb;
864
865 /* Stop when we get to the first decl with "small" alignment. */
866 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
867 break;
868
869 /* Skip variables that aren't partition representatives. */
870 if (stack_vars[i].representative != i)
871 continue;
872
873 /* Skip variables that have already had rtl assigned. See also
874 add_stack_var where we perpetrate this pc_rtx hack. */
875 decl = stack_vars[i].decl;
876 if ((TREE_CODE (decl) == SSA_NAME
877 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
878 : DECL_RTL (decl)) != pc_rtx)
879 continue;
880
881 large_size += alignb - 1;
882 large_size &= -(HOST_WIDE_INT)alignb;
883 large_size += stack_vars[i].size;
884 }
885
886 /* If there were any, allocate space. */
887 if (large_size > 0)
888 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
889 large_align, true);
890 }
891
892 for (si = 0; si < n; ++si)
893 {
894 rtx base;
895 unsigned base_align, alignb;
896 HOST_WIDE_INT offset;
897
898 i = stack_vars_sorted[si];
899
900 /* Skip variables that aren't partition representatives, for now. */
901 if (stack_vars[i].representative != i)
902 continue;
903
904 /* Skip variables that have already had rtl assigned. See also
905 add_stack_var where we perpetrate this pc_rtx hack. */
906 decl = stack_vars[i].decl;
907 if ((TREE_CODE (decl) == SSA_NAME
908 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
909 : DECL_RTL (decl)) != pc_rtx)
910 continue;
911
912 /* Check the predicate to see whether this variable should be
913 allocated in this pass. */
914 if (pred && !pred (decl))
915 continue;
916
917 alignb = stack_vars[i].alignb;
918 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
919 {
920 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
921 base = virtual_stack_vars_rtx;
922 base_align = crtl->max_used_stack_slot_alignment;
923 }
924 else
925 {
926 /* Large alignment is only processed in the last pass. */
927 if (pred)
928 continue;
929 gcc_assert (large_base != NULL);
930
931 large_alloc += alignb - 1;
932 large_alloc &= -(HOST_WIDE_INT)alignb;
933 offset = large_alloc;
934 large_alloc += stack_vars[i].size;
935
936 base = large_base;
937 base_align = large_align;
938 }
939
940 /* Create rtl for each variable based on their location within the
941 partition. */
942 for (j = i; j != EOC; j = stack_vars[j].next)
943 {
944 expand_one_stack_var_at (stack_vars[j].decl,
945 base, base_align,
946 offset);
947 }
948 }
949
950 gcc_assert (large_alloc == large_size);
951 }
952
953 /* Take into account all sizes of partitions and reset DECL_RTLs. */
954 static HOST_WIDE_INT
955 account_stack_vars (void)
956 {
957 size_t si, j, i, n = stack_vars_num;
958 HOST_WIDE_INT size = 0;
959
960 for (si = 0; si < n; ++si)
961 {
962 i = stack_vars_sorted[si];
963
964 /* Skip variables that aren't partition representatives, for now. */
965 if (stack_vars[i].representative != i)
966 continue;
967
968 size += stack_vars[i].size;
969 for (j = i; j != EOC; j = stack_vars[j].next)
970 set_rtl (stack_vars[j].decl, NULL);
971 }
972 return size;
973 }
974
975 /* A subroutine of expand_one_var. Called to immediately assign rtl
976 to a variable to be allocated in the stack frame. */
977
978 static void
979 expand_one_stack_var (tree var)
980 {
981 HOST_WIDE_INT size, offset;
982 unsigned byte_align;
983
984 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
985 byte_align = align_local_variable (SSAVAR (var));
986
987 /* We handle highly aligned variables in expand_stack_vars. */
988 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
989
990 offset = alloc_stack_frame_space (size, byte_align);
991
992 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
993 crtl->max_used_stack_slot_alignment, offset);
994 }
995
996 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
997 that will reside in a hard register. */
998
999 static void
1000 expand_one_hard_reg_var (tree var)
1001 {
1002 rest_of_decl_compilation (var, 0, 0);
1003 }
1004
1005 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1006 that will reside in a pseudo register. */
1007
1008 static void
1009 expand_one_register_var (tree var)
1010 {
1011 tree decl = SSAVAR (var);
1012 tree type = TREE_TYPE (decl);
1013 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1014 rtx x = gen_reg_rtx (reg_mode);
1015
1016 set_rtl (var, x);
1017
1018 /* Note if the object is a user variable. */
1019 if (!DECL_ARTIFICIAL (decl))
1020 mark_user_reg (x);
1021
1022 if (POINTER_TYPE_P (type))
1023 mark_reg_pointer (x, get_pointer_alignment (var));
1024 }
1025
1026 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1027 has some associated error, e.g. its type is error-mark. We just need
1028 to pick something that won't crash the rest of the compiler. */
1029
1030 static void
1031 expand_one_error_var (tree var)
1032 {
1033 enum machine_mode mode = DECL_MODE (var);
1034 rtx x;
1035
1036 if (mode == BLKmode)
1037 x = gen_rtx_MEM (BLKmode, const0_rtx);
1038 else if (mode == VOIDmode)
1039 x = const0_rtx;
1040 else
1041 x = gen_reg_rtx (mode);
1042
1043 SET_DECL_RTL (var, x);
1044 }
1045
1046 /* A subroutine of expand_one_var. VAR is a variable that will be
1047 allocated to the local stack frame. Return true if we wish to
1048 add VAR to STACK_VARS so that it will be coalesced with other
1049 variables. Return false to allocate VAR immediately.
1050
1051 This function is used to reduce the number of variables considered
1052 for coalescing, which reduces the size of the quadratic problem. */
1053
1054 static bool
1055 defer_stack_allocation (tree var, bool toplevel)
1056 {
1057 /* If stack protection is enabled, *all* stack variables must be deferred,
1058 so that we can re-order the strings to the top of the frame. */
1059 if (flag_stack_protect)
1060 return true;
1061
1062 /* We handle "large" alignment via dynamic allocation. We want to handle
1063 this extra complication in only one place, so defer them. */
1064 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1065 return true;
1066
1067 /* Variables in the outermost scope automatically conflict with
1068 every other variable. The only reason to want to defer them
1069 at all is that, after sorting, we can more efficiently pack
1070 small variables in the stack frame. Continue to defer at -O2. */
1071 if (toplevel && optimize < 2)
1072 return false;
1073
1074 /* Without optimization, *most* variables are allocated from the
1075 stack, which makes the quadratic problem large exactly when we
1076 want compilation to proceed as quickly as possible. On the
1077 other hand, we don't want the function's stack frame size to
1078 get completely out of hand. So we avoid adding scalars and
1079 "small" aggregates to the list at all. */
1080 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1081 return false;
1082
1083 return true;
1084 }
1085
1086 /* A subroutine of expand_used_vars. Expand one variable according to
1087 its flavor. Variables to be placed on the stack are not actually
1088 expanded yet, merely recorded.
1089 When REALLY_EXPAND is false, only add stack values to be allocated.
1090 Return stack usage this variable is supposed to take.
1091 */
1092
1093 static HOST_WIDE_INT
1094 expand_one_var (tree var, bool toplevel, bool really_expand)
1095 {
1096 unsigned int align = BITS_PER_UNIT;
1097 tree origvar = var;
1098
1099 var = SSAVAR (var);
1100
1101 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1102 {
1103 /* Because we don't know if VAR will be in register or on stack,
1104 we conservatively assume it will be on stack even if VAR is
1105 eventually put into register after RA pass. For non-automatic
1106 variables, which won't be on stack, we collect alignment of
1107 type and ignore user specified alignment. */
1108 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1109 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1110 TYPE_MODE (TREE_TYPE (var)),
1111 TYPE_ALIGN (TREE_TYPE (var)));
1112 else if (DECL_HAS_VALUE_EXPR_P (var)
1113 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1114 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1115 or variables which were assigned a stack slot already by
1116 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1117 changed from the offset chosen to it. */
1118 align = crtl->stack_alignment_estimated;
1119 else
1120 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1121
1122 /* If the variable alignment is very large we'll dynamicaly allocate
1123 it, which means that in-frame portion is just a pointer. */
1124 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1125 align = POINTER_SIZE;
1126 }
1127
1128 if (SUPPORTS_STACK_ALIGNMENT
1129 && crtl->stack_alignment_estimated < align)
1130 {
1131 /* stack_alignment_estimated shouldn't change after stack
1132 realign decision made */
1133 gcc_assert(!crtl->stack_realign_processed);
1134 crtl->stack_alignment_estimated = align;
1135 }
1136
1137 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1138 So here we only make sure stack_alignment_needed >= align. */
1139 if (crtl->stack_alignment_needed < align)
1140 crtl->stack_alignment_needed = align;
1141 if (crtl->max_used_stack_slot_alignment < align)
1142 crtl->max_used_stack_slot_alignment = align;
1143
1144 if (TREE_CODE (origvar) == SSA_NAME)
1145 {
1146 gcc_assert (TREE_CODE (var) != VAR_DECL
1147 || (!DECL_EXTERNAL (var)
1148 && !DECL_HAS_VALUE_EXPR_P (var)
1149 && !TREE_STATIC (var)
1150 && TREE_TYPE (var) != error_mark_node
1151 && !DECL_HARD_REGISTER (var)
1152 && really_expand));
1153 }
1154 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1155 ;
1156 else if (DECL_EXTERNAL (var))
1157 ;
1158 else if (DECL_HAS_VALUE_EXPR_P (var))
1159 ;
1160 else if (TREE_STATIC (var))
1161 ;
1162 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1163 ;
1164 else if (TREE_TYPE (var) == error_mark_node)
1165 {
1166 if (really_expand)
1167 expand_one_error_var (var);
1168 }
1169 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1170 {
1171 if (really_expand)
1172 expand_one_hard_reg_var (var);
1173 }
1174 else if (use_register_for_decl (var))
1175 {
1176 if (really_expand)
1177 expand_one_register_var (origvar);
1178 }
1179 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1180 {
1181 /* Reject variables which cover more than half of the address-space. */
1182 if (really_expand)
1183 {
1184 error ("size of variable %q+D is too large", var);
1185 expand_one_error_var (var);
1186 }
1187 }
1188 else if (defer_stack_allocation (var, toplevel))
1189 add_stack_var (origvar);
1190 else
1191 {
1192 if (really_expand)
1193 expand_one_stack_var (origvar);
1194 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1195 }
1196 return 0;
1197 }
1198
1199 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1200 expanding variables. Those variables that can be put into registers
1201 are allocated pseudos; those that can't are put on the stack.
1202
1203 TOPLEVEL is true if this is the outermost BLOCK. */
1204
1205 static void
1206 expand_used_vars_for_block (tree block, bool toplevel)
1207 {
1208 tree t;
1209
1210 /* Expand all variables at this level. */
1211 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1212 if (TREE_USED (t)
1213 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1214 || !DECL_NONSHAREABLE (t)))
1215 expand_one_var (t, toplevel, true);
1216
1217 /* Expand all variables at containing levels. */
1218 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1219 expand_used_vars_for_block (t, false);
1220 }
1221
1222 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1223 and clear TREE_USED on all local variables. */
1224
1225 static void
1226 clear_tree_used (tree block)
1227 {
1228 tree t;
1229
1230 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1231 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1232 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1233 || !DECL_NONSHAREABLE (t))
1234 TREE_USED (t) = 0;
1235
1236 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1237 clear_tree_used (t);
1238 }
1239
1240 /* Examine TYPE and determine a bit mask of the following features. */
1241
1242 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1243 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1244 #define SPCT_HAS_ARRAY 4
1245 #define SPCT_HAS_AGGREGATE 8
1246
1247 static unsigned int
1248 stack_protect_classify_type (tree type)
1249 {
1250 unsigned int ret = 0;
1251 tree t;
1252
1253 switch (TREE_CODE (type))
1254 {
1255 case ARRAY_TYPE:
1256 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1257 if (t == char_type_node
1258 || t == signed_char_type_node
1259 || t == unsigned_char_type_node)
1260 {
1261 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1262 unsigned HOST_WIDE_INT len;
1263
1264 if (!TYPE_SIZE_UNIT (type)
1265 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1266 len = max;
1267 else
1268 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1269
1270 if (len < max)
1271 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1272 else
1273 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1274 }
1275 else
1276 ret = SPCT_HAS_ARRAY;
1277 break;
1278
1279 case UNION_TYPE:
1280 case QUAL_UNION_TYPE:
1281 case RECORD_TYPE:
1282 ret = SPCT_HAS_AGGREGATE;
1283 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1284 if (TREE_CODE (t) == FIELD_DECL)
1285 ret |= stack_protect_classify_type (TREE_TYPE (t));
1286 break;
1287
1288 default:
1289 break;
1290 }
1291
1292 return ret;
1293 }
1294
1295 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1296 part of the local stack frame. Remember if we ever return nonzero for
1297 any variable in this function. The return value is the phase number in
1298 which the variable should be allocated. */
1299
1300 static int
1301 stack_protect_decl_phase (tree decl)
1302 {
1303 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1304 int ret = 0;
1305
1306 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1307 has_short_buffer = true;
1308
1309 if (flag_stack_protect == 2)
1310 {
1311 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1312 && !(bits & SPCT_HAS_AGGREGATE))
1313 ret = 1;
1314 else if (bits & SPCT_HAS_ARRAY)
1315 ret = 2;
1316 }
1317 else
1318 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1319
1320 if (ret)
1321 has_protected_decls = true;
1322
1323 return ret;
1324 }
1325
1326 /* Two helper routines that check for phase 1 and phase 2. These are used
1327 as callbacks for expand_stack_vars. */
1328
1329 static bool
1330 stack_protect_decl_phase_1 (tree decl)
1331 {
1332 return stack_protect_decl_phase (decl) == 1;
1333 }
1334
1335 static bool
1336 stack_protect_decl_phase_2 (tree decl)
1337 {
1338 return stack_protect_decl_phase (decl) == 2;
1339 }
1340
1341 /* Ensure that variables in different stack protection phases conflict
1342 so that they are not merged and share the same stack slot. */
1343
1344 static void
1345 add_stack_protection_conflicts (void)
1346 {
1347 size_t i, j, n = stack_vars_num;
1348 unsigned char *phase;
1349
1350 phase = XNEWVEC (unsigned char, n);
1351 for (i = 0; i < n; ++i)
1352 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1353
1354 for (i = 0; i < n; ++i)
1355 {
1356 unsigned char ph_i = phase[i];
1357 for (j = i + 1; j < n; ++j)
1358 if (ph_i != phase[j])
1359 add_stack_var_conflict (i, j);
1360 }
1361
1362 XDELETEVEC (phase);
1363 }
1364
1365 /* Create a decl for the guard at the top of the stack frame. */
1366
1367 static void
1368 create_stack_guard (void)
1369 {
1370 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1371 VAR_DECL, NULL, ptr_type_node);
1372 TREE_THIS_VOLATILE (guard) = 1;
1373 TREE_USED (guard) = 1;
1374 expand_one_stack_var (guard);
1375 crtl->stack_protect_guard = guard;
1376 }
1377
1378 /* Prepare for expanding variables. */
1379 static void
1380 init_vars_expansion (void)
1381 {
1382 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1383 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1384
1385 /* A map from decl to stack partition. */
1386 decl_to_stack_part = pointer_map_create ();
1387
1388 /* Initialize local stack smashing state. */
1389 has_protected_decls = false;
1390 has_short_buffer = false;
1391 }
1392
1393 /* Free up stack variable graph data. */
1394 static void
1395 fini_vars_expansion (void)
1396 {
1397 bitmap_obstack_release (&stack_var_bitmap_obstack);
1398 if (stack_vars)
1399 XDELETEVEC (stack_vars);
1400 if (stack_vars_sorted)
1401 XDELETEVEC (stack_vars_sorted);
1402 stack_vars = NULL;
1403 stack_vars_sorted = NULL;
1404 stack_vars_alloc = stack_vars_num = 0;
1405 pointer_map_destroy (decl_to_stack_part);
1406 decl_to_stack_part = NULL;
1407 }
1408
1409 /* Make a fair guess for the size of the stack frame of the function
1410 in NODE. This doesn't have to be exact, the result is only used in
1411 the inline heuristics. So we don't want to run the full stack var
1412 packing algorithm (which is quadratic in the number of stack vars).
1413 Instead, we calculate the total size of all stack vars. This turns
1414 out to be a pretty fair estimate -- packing of stack vars doesn't
1415 happen very often. */
1416
1417 HOST_WIDE_INT
1418 estimated_stack_frame_size (struct cgraph_node *node)
1419 {
1420 HOST_WIDE_INT size = 0;
1421 size_t i;
1422 tree var;
1423 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1424
1425 push_cfun (fn);
1426
1427 init_vars_expansion ();
1428
1429 FOR_EACH_LOCAL_DECL (fn, i, var)
1430 if (auto_var_in_fn_p (var, fn->decl))
1431 size += expand_one_var (var, true, false);
1432
1433 if (stack_vars_num > 0)
1434 {
1435 /* Fake sorting the stack vars for account_stack_vars (). */
1436 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1437 for (i = 0; i < stack_vars_num; ++i)
1438 stack_vars_sorted[i] = i;
1439 size += account_stack_vars ();
1440 }
1441
1442 fini_vars_expansion ();
1443 pop_cfun ();
1444 return size;
1445 }
1446
1447 /* Expand all variables used in the function. */
1448
1449 static void
1450 expand_used_vars (void)
1451 {
1452 tree var, outer_block = DECL_INITIAL (current_function_decl);
1453 VEC(tree,heap) *maybe_local_decls = NULL;
1454 struct pointer_map_t *ssa_name_decls;
1455 unsigned i;
1456 unsigned len;
1457
1458 /* Compute the phase of the stack frame for this function. */
1459 {
1460 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1461 int off = STARTING_FRAME_OFFSET % align;
1462 frame_phase = off ? align - off : 0;
1463 }
1464
1465 /* Set TREE_USED on all variables in the local_decls. */
1466 FOR_EACH_LOCAL_DECL (cfun, i, var)
1467 TREE_USED (var) = 1;
1468 /* Clear TREE_USED on all variables associated with a block scope. */
1469 clear_tree_used (DECL_INITIAL (current_function_decl));
1470
1471 init_vars_expansion ();
1472
1473 ssa_name_decls = pointer_map_create ();
1474 for (i = 0; i < SA.map->num_partitions; i++)
1475 {
1476 tree var = partition_to_var (SA.map, i);
1477
1478 gcc_assert (!virtual_operand_p (var));
1479
1480 /* Assign decls to each SSA name partition, share decls for partitions
1481 we could have coalesced (those with the same type). */
1482 if (SSA_NAME_VAR (var) == NULL_TREE)
1483 {
1484 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1485 if (!*slot)
1486 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1487 replace_ssa_name_symbol (var, (tree) *slot);
1488 }
1489
1490 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1491 expand_one_var (var, true, true);
1492 else
1493 {
1494 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1495 contain the default def (representing the parm or result itself)
1496 we don't do anything here. But those which don't contain the
1497 default def (representing a temporary based on the parm/result)
1498 we need to allocate space just like for normal VAR_DECLs. */
1499 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1500 {
1501 expand_one_var (var, true, true);
1502 gcc_assert (SA.partition_to_pseudo[i]);
1503 }
1504 }
1505 }
1506 pointer_map_destroy (ssa_name_decls);
1507
1508 /* At this point all variables on the local_decls with TREE_USED
1509 set are not associated with any block scope. Lay them out. */
1510
1511 len = VEC_length (tree, cfun->local_decls);
1512 FOR_EACH_LOCAL_DECL (cfun, i, var)
1513 {
1514 bool expand_now = false;
1515
1516 /* Expanded above already. */
1517 if (is_gimple_reg (var))
1518 {
1519 TREE_USED (var) = 0;
1520 goto next;
1521 }
1522 /* We didn't set a block for static or extern because it's hard
1523 to tell the difference between a global variable (re)declared
1524 in a local scope, and one that's really declared there to
1525 begin with. And it doesn't really matter much, since we're
1526 not giving them stack space. Expand them now. */
1527 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1528 expand_now = true;
1529
1530 /* If the variable is not associated with any block, then it
1531 was created by the optimizers, and could be live anywhere
1532 in the function. */
1533 else if (TREE_USED (var))
1534 expand_now = true;
1535
1536 /* Finally, mark all variables on the list as used. We'll use
1537 this in a moment when we expand those associated with scopes. */
1538 TREE_USED (var) = 1;
1539
1540 if (expand_now)
1541 expand_one_var (var, true, true);
1542
1543 next:
1544 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1545 {
1546 rtx rtl = DECL_RTL_IF_SET (var);
1547
1548 /* Keep artificial non-ignored vars in cfun->local_decls
1549 chain until instantiate_decls. */
1550 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1551 add_local_decl (cfun, var);
1552 else if (rtl == NULL_RTX)
1553 /* If rtl isn't set yet, which can happen e.g. with
1554 -fstack-protector, retry before returning from this
1555 function. */
1556 VEC_safe_push (tree, heap, maybe_local_decls, var);
1557 }
1558 }
1559
1560 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1561
1562 +-----------------+-----------------+
1563 | ...processed... | ...duplicates...|
1564 +-----------------+-----------------+
1565 ^
1566 +-- LEN points here.
1567
1568 We just want the duplicates, as those are the artificial
1569 non-ignored vars that we want to keep until instantiate_decls.
1570 Move them down and truncate the array. */
1571 if (!VEC_empty (tree, cfun->local_decls))
1572 VEC_block_remove (tree, cfun->local_decls, 0, len);
1573
1574 /* At this point, all variables within the block tree with TREE_USED
1575 set are actually used by the optimized function. Lay them out. */
1576 expand_used_vars_for_block (outer_block, true);
1577
1578 if (stack_vars_num > 0)
1579 {
1580 add_scope_conflicts ();
1581
1582 /* If stack protection is enabled, we don't share space between
1583 vulnerable data and non-vulnerable data. */
1584 if (flag_stack_protect)
1585 add_stack_protection_conflicts ();
1586
1587 /* Now that we have collected all stack variables, and have computed a
1588 minimal interference graph, attempt to save some stack space. */
1589 partition_stack_vars ();
1590 if (dump_file)
1591 dump_stack_var_partition ();
1592 }
1593
1594 /* There are several conditions under which we should create a
1595 stack guard: protect-all, alloca used, protected decls present. */
1596 if (flag_stack_protect == 2
1597 || (flag_stack_protect
1598 && (cfun->calls_alloca || has_protected_decls)))
1599 create_stack_guard ();
1600
1601 /* Assign rtl to each variable based on these partitions. */
1602 if (stack_vars_num > 0)
1603 {
1604 /* Reorder decls to be protected by iterating over the variables
1605 array multiple times, and allocating out of each phase in turn. */
1606 /* ??? We could probably integrate this into the qsort we did
1607 earlier, such that we naturally see these variables first,
1608 and thus naturally allocate things in the right order. */
1609 if (has_protected_decls)
1610 {
1611 /* Phase 1 contains only character arrays. */
1612 expand_stack_vars (stack_protect_decl_phase_1);
1613
1614 /* Phase 2 contains other kinds of arrays. */
1615 if (flag_stack_protect == 2)
1616 expand_stack_vars (stack_protect_decl_phase_2);
1617 }
1618
1619 expand_stack_vars (NULL);
1620 }
1621
1622 fini_vars_expansion ();
1623
1624 /* If there were any artificial non-ignored vars without rtl
1625 found earlier, see if deferred stack allocation hasn't assigned
1626 rtl to them. */
1627 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1628 {
1629 rtx rtl = DECL_RTL_IF_SET (var);
1630
1631 /* Keep artificial non-ignored vars in cfun->local_decls
1632 chain until instantiate_decls. */
1633 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1634 add_local_decl (cfun, var);
1635 }
1636 VEC_free (tree, heap, maybe_local_decls);
1637
1638 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1639 if (STACK_ALIGNMENT_NEEDED)
1640 {
1641 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1642 if (!FRAME_GROWS_DOWNWARD)
1643 frame_offset += align - 1;
1644 frame_offset &= -align;
1645 }
1646 }
1647
1648
1649 /* If we need to produce a detailed dump, print the tree representation
1650 for STMT to the dump file. SINCE is the last RTX after which the RTL
1651 generated for STMT should have been appended. */
1652
1653 static void
1654 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1655 {
1656 if (dump_file && (dump_flags & TDF_DETAILS))
1657 {
1658 fprintf (dump_file, "\n;; ");
1659 print_gimple_stmt (dump_file, stmt, 0,
1660 TDF_SLIM | (dump_flags & TDF_LINENO));
1661 fprintf (dump_file, "\n");
1662
1663 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1664 }
1665 }
1666
1667 /* Maps the blocks that do not contain tree labels to rtx labels. */
1668
1669 static struct pointer_map_t *lab_rtx_for_bb;
1670
1671 /* Returns the label_rtx expression for a label starting basic block BB. */
1672
1673 static rtx
1674 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1675 {
1676 gimple_stmt_iterator gsi;
1677 tree lab;
1678 gimple lab_stmt;
1679 void **elt;
1680
1681 if (bb->flags & BB_RTL)
1682 return block_label (bb);
1683
1684 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1685 if (elt)
1686 return (rtx) *elt;
1687
1688 /* Find the tree label if it is present. */
1689
1690 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1691 {
1692 lab_stmt = gsi_stmt (gsi);
1693 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1694 break;
1695
1696 lab = gimple_label_label (lab_stmt);
1697 if (DECL_NONLOCAL (lab))
1698 break;
1699
1700 return label_rtx (lab);
1701 }
1702
1703 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1704 *elt = gen_label_rtx ();
1705 return (rtx) *elt;
1706 }
1707
1708
1709 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1710 of a basic block where we just expanded the conditional at the end,
1711 possibly clean up the CFG and instruction sequence. LAST is the
1712 last instruction before the just emitted jump sequence. */
1713
1714 static void
1715 maybe_cleanup_end_of_block (edge e, rtx last)
1716 {
1717 /* Special case: when jumpif decides that the condition is
1718 trivial it emits an unconditional jump (and the necessary
1719 barrier). But we still have two edges, the fallthru one is
1720 wrong. purge_dead_edges would clean this up later. Unfortunately
1721 we have to insert insns (and split edges) before
1722 find_many_sub_basic_blocks and hence before purge_dead_edges.
1723 But splitting edges might create new blocks which depend on the
1724 fact that if there are two edges there's no barrier. So the
1725 barrier would get lost and verify_flow_info would ICE. Instead
1726 of auditing all edge splitters to care for the barrier (which
1727 normally isn't there in a cleaned CFG), fix it here. */
1728 if (BARRIER_P (get_last_insn ()))
1729 {
1730 rtx insn;
1731 remove_edge (e);
1732 /* Now, we have a single successor block, if we have insns to
1733 insert on the remaining edge we potentially will insert
1734 it at the end of this block (if the dest block isn't feasible)
1735 in order to avoid splitting the edge. This insertion will take
1736 place in front of the last jump. But we might have emitted
1737 multiple jumps (conditional and one unconditional) to the
1738 same destination. Inserting in front of the last one then
1739 is a problem. See PR 40021. We fix this by deleting all
1740 jumps except the last unconditional one. */
1741 insn = PREV_INSN (get_last_insn ());
1742 /* Make sure we have an unconditional jump. Otherwise we're
1743 confused. */
1744 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1745 for (insn = PREV_INSN (insn); insn != last;)
1746 {
1747 insn = PREV_INSN (insn);
1748 if (JUMP_P (NEXT_INSN (insn)))
1749 {
1750 if (!any_condjump_p (NEXT_INSN (insn)))
1751 {
1752 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1753 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1754 }
1755 delete_insn (NEXT_INSN (insn));
1756 }
1757 }
1758 }
1759 }
1760
1761 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1762 Returns a new basic block if we've terminated the current basic
1763 block and created a new one. */
1764
1765 static basic_block
1766 expand_gimple_cond (basic_block bb, gimple stmt)
1767 {
1768 basic_block new_bb, dest;
1769 edge new_edge;
1770 edge true_edge;
1771 edge false_edge;
1772 rtx last2, last;
1773 enum tree_code code;
1774 tree op0, op1;
1775
1776 code = gimple_cond_code (stmt);
1777 op0 = gimple_cond_lhs (stmt);
1778 op1 = gimple_cond_rhs (stmt);
1779 /* We're sometimes presented with such code:
1780 D.123_1 = x < y;
1781 if (D.123_1 != 0)
1782 ...
1783 This would expand to two comparisons which then later might
1784 be cleaned up by combine. But some pattern matchers like if-conversion
1785 work better when there's only one compare, so make up for this
1786 here as special exception if TER would have made the same change. */
1787 if (gimple_cond_single_var_p (stmt)
1788 && SA.values
1789 && TREE_CODE (op0) == SSA_NAME
1790 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1791 {
1792 gimple second = SSA_NAME_DEF_STMT (op0);
1793 if (gimple_code (second) == GIMPLE_ASSIGN)
1794 {
1795 enum tree_code code2 = gimple_assign_rhs_code (second);
1796 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1797 {
1798 code = code2;
1799 op0 = gimple_assign_rhs1 (second);
1800 op1 = gimple_assign_rhs2 (second);
1801 }
1802 /* If jumps are cheap turn some more codes into
1803 jumpy sequences. */
1804 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1805 {
1806 if ((code2 == BIT_AND_EXPR
1807 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1808 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1809 || code2 == TRUTH_AND_EXPR)
1810 {
1811 code = TRUTH_ANDIF_EXPR;
1812 op0 = gimple_assign_rhs1 (second);
1813 op1 = gimple_assign_rhs2 (second);
1814 }
1815 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1816 {
1817 code = TRUTH_ORIF_EXPR;
1818 op0 = gimple_assign_rhs1 (second);
1819 op1 = gimple_assign_rhs2 (second);
1820 }
1821 }
1822 }
1823 }
1824
1825 last2 = last = get_last_insn ();
1826
1827 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1828 set_curr_insn_location (gimple_location (stmt));
1829
1830 /* These flags have no purpose in RTL land. */
1831 true_edge->flags &= ~EDGE_TRUE_VALUE;
1832 false_edge->flags &= ~EDGE_FALSE_VALUE;
1833
1834 /* We can either have a pure conditional jump with one fallthru edge or
1835 two-way jump that needs to be decomposed into two basic blocks. */
1836 if (false_edge->dest == bb->next_bb)
1837 {
1838 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1839 true_edge->probability);
1840 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1841 if (true_edge->goto_locus != UNKNOWN_LOCATION)
1842 set_curr_insn_location (true_edge->goto_locus);
1843 false_edge->flags |= EDGE_FALLTHRU;
1844 maybe_cleanup_end_of_block (false_edge, last);
1845 return NULL;
1846 }
1847 if (true_edge->dest == bb->next_bb)
1848 {
1849 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1850 false_edge->probability);
1851 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1852 if (false_edge->goto_locus != UNKNOWN_LOCATION)
1853 set_curr_insn_location (false_edge->goto_locus);
1854 true_edge->flags |= EDGE_FALLTHRU;
1855 maybe_cleanup_end_of_block (true_edge, last);
1856 return NULL;
1857 }
1858
1859 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1860 true_edge->probability);
1861 last = get_last_insn ();
1862 if (false_edge->goto_locus != UNKNOWN_LOCATION)
1863 set_curr_insn_location (false_edge->goto_locus);
1864 emit_jump (label_rtx_for_bb (false_edge->dest));
1865
1866 BB_END (bb) = last;
1867 if (BARRIER_P (BB_END (bb)))
1868 BB_END (bb) = PREV_INSN (BB_END (bb));
1869 update_bb_for_insn (bb);
1870
1871 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1872 dest = false_edge->dest;
1873 redirect_edge_succ (false_edge, new_bb);
1874 false_edge->flags |= EDGE_FALLTHRU;
1875 new_bb->count = false_edge->count;
1876 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1877 if (current_loops && bb->loop_father)
1878 add_bb_to_loop (new_bb, bb->loop_father);
1879 new_edge = make_edge (new_bb, dest, 0);
1880 new_edge->probability = REG_BR_PROB_BASE;
1881 new_edge->count = new_bb->count;
1882 if (BARRIER_P (BB_END (new_bb)))
1883 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1884 update_bb_for_insn (new_bb);
1885
1886 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1887
1888 if (true_edge->goto_locus != UNKNOWN_LOCATION)
1889 {
1890 set_curr_insn_location (true_edge->goto_locus);
1891 true_edge->goto_locus = curr_insn_location ();
1892 }
1893
1894 return new_bb;
1895 }
1896
1897 /* Mark all calls that can have a transaction restart. */
1898
1899 static void
1900 mark_transaction_restart_calls (gimple stmt)
1901 {
1902 struct tm_restart_node dummy;
1903 void **slot;
1904
1905 if (!cfun->gimple_df->tm_restart)
1906 return;
1907
1908 dummy.stmt = stmt;
1909 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1910 if (slot)
1911 {
1912 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1913 tree list = n->label_or_list;
1914 rtx insn;
1915
1916 for (insn = next_real_insn (get_last_insn ());
1917 !CALL_P (insn);
1918 insn = next_real_insn (insn))
1919 continue;
1920
1921 if (TREE_CODE (list) == LABEL_DECL)
1922 add_reg_note (insn, REG_TM, label_rtx (list));
1923 else
1924 for (; list ; list = TREE_CHAIN (list))
1925 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1926 }
1927 }
1928
1929 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1930 statement STMT. */
1931
1932 static void
1933 expand_call_stmt (gimple stmt)
1934 {
1935 tree exp, decl, lhs;
1936 bool builtin_p;
1937 size_t i;
1938
1939 if (gimple_call_internal_p (stmt))
1940 {
1941 expand_internal_call (stmt);
1942 return;
1943 }
1944
1945 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1946
1947 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1948 decl = gimple_call_fndecl (stmt);
1949 builtin_p = decl && DECL_BUILT_IN (decl);
1950
1951 /* If this is not a builtin function, the function type through which the
1952 call is made may be different from the type of the function. */
1953 if (!builtin_p)
1954 CALL_EXPR_FN (exp)
1955 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1956 CALL_EXPR_FN (exp));
1957
1958 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1959 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1960
1961 for (i = 0; i < gimple_call_num_args (stmt); i++)
1962 {
1963 tree arg = gimple_call_arg (stmt, i);
1964 gimple def;
1965 /* TER addresses into arguments of builtin functions so we have a
1966 chance to infer more correct alignment information. See PR39954. */
1967 if (builtin_p
1968 && TREE_CODE (arg) == SSA_NAME
1969 && (def = get_gimple_for_ssa_name (arg))
1970 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1971 arg = gimple_assign_rhs1 (def);
1972 CALL_EXPR_ARG (exp, i) = arg;
1973 }
1974
1975 if (gimple_has_side_effects (stmt))
1976 TREE_SIDE_EFFECTS (exp) = 1;
1977
1978 if (gimple_call_nothrow_p (stmt))
1979 TREE_NOTHROW (exp) = 1;
1980
1981 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1982 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1983 if (decl
1984 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1985 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
1986 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
1987 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1988 else
1989 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1990 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1991 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1992
1993 /* Ensure RTL is created for debug args. */
1994 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
1995 {
1996 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
1997 unsigned int ix;
1998 tree dtemp;
1999
2000 if (debug_args)
2001 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2002 {
2003 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2004 expand_debug_expr (dtemp);
2005 }
2006 }
2007
2008 lhs = gimple_call_lhs (stmt);
2009 if (lhs)
2010 expand_assignment (lhs, exp, false);
2011 else
2012 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2013
2014 mark_transaction_restart_calls (stmt);
2015 }
2016
2017 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2018 STMT that doesn't require special handling for outgoing edges. That
2019 is no tailcalls and no GIMPLE_COND. */
2020
2021 static void
2022 expand_gimple_stmt_1 (gimple stmt)
2023 {
2024 tree op0;
2025
2026 set_curr_insn_location (gimple_location (stmt));
2027
2028 switch (gimple_code (stmt))
2029 {
2030 case GIMPLE_GOTO:
2031 op0 = gimple_goto_dest (stmt);
2032 if (TREE_CODE (op0) == LABEL_DECL)
2033 expand_goto (op0);
2034 else
2035 expand_computed_goto (op0);
2036 break;
2037 case GIMPLE_LABEL:
2038 expand_label (gimple_label_label (stmt));
2039 break;
2040 case GIMPLE_NOP:
2041 case GIMPLE_PREDICT:
2042 break;
2043 case GIMPLE_SWITCH:
2044 expand_case (stmt);
2045 break;
2046 case GIMPLE_ASM:
2047 expand_asm_stmt (stmt);
2048 break;
2049 case GIMPLE_CALL:
2050 expand_call_stmt (stmt);
2051 break;
2052
2053 case GIMPLE_RETURN:
2054 op0 = gimple_return_retval (stmt);
2055
2056 if (op0 && op0 != error_mark_node)
2057 {
2058 tree result = DECL_RESULT (current_function_decl);
2059
2060 /* If we are not returning the current function's RESULT_DECL,
2061 build an assignment to it. */
2062 if (op0 != result)
2063 {
2064 /* I believe that a function's RESULT_DECL is unique. */
2065 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2066
2067 /* ??? We'd like to use simply expand_assignment here,
2068 but this fails if the value is of BLKmode but the return
2069 decl is a register. expand_return has special handling
2070 for this combination, which eventually should move
2071 to common code. See comments there. Until then, let's
2072 build a modify expression :-/ */
2073 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2074 result, op0);
2075 }
2076 }
2077 if (!op0)
2078 expand_null_return ();
2079 else
2080 expand_return (op0);
2081 break;
2082
2083 case GIMPLE_ASSIGN:
2084 {
2085 tree lhs = gimple_assign_lhs (stmt);
2086
2087 /* Tree expand used to fiddle with |= and &= of two bitfield
2088 COMPONENT_REFs here. This can't happen with gimple, the LHS
2089 of binary assigns must be a gimple reg. */
2090
2091 if (TREE_CODE (lhs) != SSA_NAME
2092 || get_gimple_rhs_class (gimple_expr_code (stmt))
2093 == GIMPLE_SINGLE_RHS)
2094 {
2095 tree rhs = gimple_assign_rhs1 (stmt);
2096 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2097 == GIMPLE_SINGLE_RHS);
2098 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2099 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2100 if (TREE_CLOBBER_P (rhs))
2101 /* This is a clobber to mark the going out of scope for
2102 this LHS. */
2103 ;
2104 else
2105 expand_assignment (lhs, rhs,
2106 gimple_assign_nontemporal_move_p (stmt));
2107 }
2108 else
2109 {
2110 rtx target, temp;
2111 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2112 struct separate_ops ops;
2113 bool promoted = false;
2114
2115 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2116 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2117 promoted = true;
2118
2119 ops.code = gimple_assign_rhs_code (stmt);
2120 ops.type = TREE_TYPE (lhs);
2121 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2122 {
2123 case GIMPLE_TERNARY_RHS:
2124 ops.op2 = gimple_assign_rhs3 (stmt);
2125 /* Fallthru */
2126 case GIMPLE_BINARY_RHS:
2127 ops.op1 = gimple_assign_rhs2 (stmt);
2128 /* Fallthru */
2129 case GIMPLE_UNARY_RHS:
2130 ops.op0 = gimple_assign_rhs1 (stmt);
2131 break;
2132 default:
2133 gcc_unreachable ();
2134 }
2135 ops.location = gimple_location (stmt);
2136
2137 /* If we want to use a nontemporal store, force the value to
2138 register first. If we store into a promoted register,
2139 don't directly expand to target. */
2140 temp = nontemporal || promoted ? NULL_RTX : target;
2141 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2142 EXPAND_NORMAL);
2143
2144 if (temp == target)
2145 ;
2146 else if (promoted)
2147 {
2148 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2149 /* If TEMP is a VOIDmode constant, use convert_modes to make
2150 sure that we properly convert it. */
2151 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2152 {
2153 temp = convert_modes (GET_MODE (target),
2154 TYPE_MODE (ops.type),
2155 temp, unsignedp);
2156 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2157 GET_MODE (target), temp, unsignedp);
2158 }
2159
2160 convert_move (SUBREG_REG (target), temp, unsignedp);
2161 }
2162 else if (nontemporal && emit_storent_insn (target, temp))
2163 ;
2164 else
2165 {
2166 temp = force_operand (temp, target);
2167 if (temp != target)
2168 emit_move_insn (target, temp);
2169 }
2170 }
2171 }
2172 break;
2173
2174 default:
2175 gcc_unreachable ();
2176 }
2177 }
2178
2179 /* Expand one gimple statement STMT and return the last RTL instruction
2180 before any of the newly generated ones.
2181
2182 In addition to generating the necessary RTL instructions this also
2183 sets REG_EH_REGION notes if necessary and sets the current source
2184 location for diagnostics. */
2185
2186 static rtx
2187 expand_gimple_stmt (gimple stmt)
2188 {
2189 location_t saved_location = input_location;
2190 rtx last = get_last_insn ();
2191 int lp_nr;
2192
2193 gcc_assert (cfun);
2194
2195 /* We need to save and restore the current source location so that errors
2196 discovered during expansion are emitted with the right location. But
2197 it would be better if the diagnostic routines used the source location
2198 embedded in the tree nodes rather than globals. */
2199 if (gimple_has_location (stmt))
2200 input_location = gimple_location (stmt);
2201
2202 expand_gimple_stmt_1 (stmt);
2203
2204 /* Free any temporaries used to evaluate this statement. */
2205 free_temp_slots ();
2206
2207 input_location = saved_location;
2208
2209 /* Mark all insns that may trap. */
2210 lp_nr = lookup_stmt_eh_lp (stmt);
2211 if (lp_nr)
2212 {
2213 rtx insn;
2214 for (insn = next_real_insn (last); insn;
2215 insn = next_real_insn (insn))
2216 {
2217 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2218 /* If we want exceptions for non-call insns, any
2219 may_trap_p instruction may throw. */
2220 && GET_CODE (PATTERN (insn)) != CLOBBER
2221 && GET_CODE (PATTERN (insn)) != USE
2222 && insn_could_throw_p (insn))
2223 make_reg_eh_region_note (insn, 0, lp_nr);
2224 }
2225 }
2226
2227 return last;
2228 }
2229
2230 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2231 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2232 generated a tail call (something that might be denied by the ABI
2233 rules governing the call; see calls.c).
2234
2235 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2236 can still reach the rest of BB. The case here is __builtin_sqrt,
2237 where the NaN result goes through the external function (with a
2238 tailcall) and the normal result happens via a sqrt instruction. */
2239
2240 static basic_block
2241 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2242 {
2243 rtx last2, last;
2244 edge e;
2245 edge_iterator ei;
2246 int probability;
2247 gcov_type count;
2248
2249 last2 = last = expand_gimple_stmt (stmt);
2250
2251 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2252 if (CALL_P (last) && SIBLING_CALL_P (last))
2253 goto found;
2254
2255 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2256
2257 *can_fallthru = true;
2258 return NULL;
2259
2260 found:
2261 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2262 Any instructions emitted here are about to be deleted. */
2263 do_pending_stack_adjust ();
2264
2265 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2266 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2267 EH or abnormal edges, we shouldn't have created a tail call in
2268 the first place. So it seems to me we should just be removing
2269 all edges here, or redirecting the existing fallthru edge to
2270 the exit block. */
2271
2272 probability = 0;
2273 count = 0;
2274
2275 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2276 {
2277 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2278 {
2279 if (e->dest != EXIT_BLOCK_PTR)
2280 {
2281 e->dest->count -= e->count;
2282 e->dest->frequency -= EDGE_FREQUENCY (e);
2283 if (e->dest->count < 0)
2284 e->dest->count = 0;
2285 if (e->dest->frequency < 0)
2286 e->dest->frequency = 0;
2287 }
2288 count += e->count;
2289 probability += e->probability;
2290 remove_edge (e);
2291 }
2292 else
2293 ei_next (&ei);
2294 }
2295
2296 /* This is somewhat ugly: the call_expr expander often emits instructions
2297 after the sibcall (to perform the function return). These confuse the
2298 find_many_sub_basic_blocks code, so we need to get rid of these. */
2299 last = NEXT_INSN (last);
2300 gcc_assert (BARRIER_P (last));
2301
2302 *can_fallthru = false;
2303 while (NEXT_INSN (last))
2304 {
2305 /* For instance an sqrt builtin expander expands if with
2306 sibcall in the then and label for `else`. */
2307 if (LABEL_P (NEXT_INSN (last)))
2308 {
2309 *can_fallthru = true;
2310 break;
2311 }
2312 delete_insn (NEXT_INSN (last));
2313 }
2314
2315 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2316 e->probability += probability;
2317 e->count += count;
2318 BB_END (bb) = last;
2319 update_bb_for_insn (bb);
2320
2321 if (NEXT_INSN (last))
2322 {
2323 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2324
2325 last = BB_END (bb);
2326 if (BARRIER_P (last))
2327 BB_END (bb) = PREV_INSN (last);
2328 }
2329
2330 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2331
2332 return bb;
2333 }
2334
2335 /* Return the difference between the floor and the truncated result of
2336 a signed division by OP1 with remainder MOD. */
2337 static rtx
2338 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2339 {
2340 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2341 return gen_rtx_IF_THEN_ELSE
2342 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2343 gen_rtx_IF_THEN_ELSE
2344 (mode, gen_rtx_LT (BImode,
2345 gen_rtx_DIV (mode, op1, mod),
2346 const0_rtx),
2347 constm1_rtx, const0_rtx),
2348 const0_rtx);
2349 }
2350
2351 /* Return the difference between the ceil and the truncated result of
2352 a signed division by OP1 with remainder MOD. */
2353 static rtx
2354 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2355 {
2356 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2357 return gen_rtx_IF_THEN_ELSE
2358 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2359 gen_rtx_IF_THEN_ELSE
2360 (mode, gen_rtx_GT (BImode,
2361 gen_rtx_DIV (mode, op1, mod),
2362 const0_rtx),
2363 const1_rtx, const0_rtx),
2364 const0_rtx);
2365 }
2366
2367 /* Return the difference between the ceil and the truncated result of
2368 an unsigned division by OP1 with remainder MOD. */
2369 static rtx
2370 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2371 {
2372 /* (mod != 0 ? 1 : 0) */
2373 return gen_rtx_IF_THEN_ELSE
2374 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2375 const1_rtx, const0_rtx);
2376 }
2377
2378 /* Return the difference between the rounded and the truncated result
2379 of a signed division by OP1 with remainder MOD. Halfway cases are
2380 rounded away from zero, rather than to the nearest even number. */
2381 static rtx
2382 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2383 {
2384 /* (abs (mod) >= abs (op1) - abs (mod)
2385 ? (op1 / mod > 0 ? 1 : -1)
2386 : 0) */
2387 return gen_rtx_IF_THEN_ELSE
2388 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2389 gen_rtx_MINUS (mode,
2390 gen_rtx_ABS (mode, op1),
2391 gen_rtx_ABS (mode, mod))),
2392 gen_rtx_IF_THEN_ELSE
2393 (mode, gen_rtx_GT (BImode,
2394 gen_rtx_DIV (mode, op1, mod),
2395 const0_rtx),
2396 const1_rtx, constm1_rtx),
2397 const0_rtx);
2398 }
2399
2400 /* Return the difference between the rounded and the truncated result
2401 of a unsigned division by OP1 with remainder MOD. Halfway cases
2402 are rounded away from zero, rather than to the nearest even
2403 number. */
2404 static rtx
2405 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2406 {
2407 /* (mod >= op1 - mod ? 1 : 0) */
2408 return gen_rtx_IF_THEN_ELSE
2409 (mode, gen_rtx_GE (BImode, mod,
2410 gen_rtx_MINUS (mode, op1, mod)),
2411 const1_rtx, const0_rtx);
2412 }
2413
2414 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2415 any rtl. */
2416
2417 static rtx
2418 convert_debug_memory_address (enum machine_mode mode, rtx x,
2419 addr_space_t as)
2420 {
2421 enum machine_mode xmode = GET_MODE (x);
2422
2423 #ifndef POINTERS_EXTEND_UNSIGNED
2424 gcc_assert (mode == Pmode
2425 || mode == targetm.addr_space.address_mode (as));
2426 gcc_assert (xmode == mode || xmode == VOIDmode);
2427 #else
2428 rtx temp;
2429
2430 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2431
2432 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2433 return x;
2434
2435 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2436 x = simplify_gen_subreg (mode, x, xmode,
2437 subreg_lowpart_offset
2438 (mode, xmode));
2439 else if (POINTERS_EXTEND_UNSIGNED > 0)
2440 x = gen_rtx_ZERO_EXTEND (mode, x);
2441 else if (!POINTERS_EXTEND_UNSIGNED)
2442 x = gen_rtx_SIGN_EXTEND (mode, x);
2443 else
2444 {
2445 switch (GET_CODE (x))
2446 {
2447 case SUBREG:
2448 if ((SUBREG_PROMOTED_VAR_P (x)
2449 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2450 || (GET_CODE (SUBREG_REG (x)) == PLUS
2451 && REG_P (XEXP (SUBREG_REG (x), 0))
2452 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2453 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2454 && GET_MODE (SUBREG_REG (x)) == mode)
2455 return SUBREG_REG (x);
2456 break;
2457 case LABEL_REF:
2458 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2459 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2460 return temp;
2461 case SYMBOL_REF:
2462 temp = shallow_copy_rtx (x);
2463 PUT_MODE (temp, mode);
2464 return temp;
2465 case CONST:
2466 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2467 if (temp)
2468 temp = gen_rtx_CONST (mode, temp);
2469 return temp;
2470 case PLUS:
2471 case MINUS:
2472 if (CONST_INT_P (XEXP (x, 1)))
2473 {
2474 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2475 if (temp)
2476 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2477 }
2478 break;
2479 default:
2480 break;
2481 }
2482 /* Don't know how to express ptr_extend as operation in debug info. */
2483 return NULL;
2484 }
2485 #endif /* POINTERS_EXTEND_UNSIGNED */
2486
2487 return x;
2488 }
2489
2490 /* Return an RTX equivalent to the value of the parameter DECL. */
2491
2492 static rtx
2493 expand_debug_parm_decl (tree decl)
2494 {
2495 rtx incoming = DECL_INCOMING_RTL (decl);
2496
2497 if (incoming
2498 && GET_MODE (incoming) != BLKmode
2499 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2500 || (MEM_P (incoming)
2501 && REG_P (XEXP (incoming, 0))
2502 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2503 {
2504 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2505
2506 #ifdef HAVE_window_save
2507 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2508 If the target machine has an explicit window save instruction, the
2509 actual entry value is the corresponding OUTGOING_REGNO instead. */
2510 if (REG_P (incoming)
2511 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2512 incoming
2513 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2514 OUTGOING_REGNO (REGNO (incoming)), 0);
2515 else if (MEM_P (incoming))
2516 {
2517 rtx reg = XEXP (incoming, 0);
2518 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2519 {
2520 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2521 incoming = replace_equiv_address_nv (incoming, reg);
2522 }
2523 }
2524 #endif
2525
2526 ENTRY_VALUE_EXP (rtl) = incoming;
2527 return rtl;
2528 }
2529
2530 if (incoming
2531 && GET_MODE (incoming) != BLKmode
2532 && !TREE_ADDRESSABLE (decl)
2533 && MEM_P (incoming)
2534 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2535 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2536 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2537 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2538 return incoming;
2539
2540 return NULL_RTX;
2541 }
2542
2543 /* Return an RTX equivalent to the value of the tree expression EXP. */
2544
2545 static rtx
2546 expand_debug_expr (tree exp)
2547 {
2548 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2549 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2550 enum machine_mode inner_mode = VOIDmode;
2551 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2552 addr_space_t as;
2553
2554 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2555 {
2556 case tcc_expression:
2557 switch (TREE_CODE (exp))
2558 {
2559 case COND_EXPR:
2560 case DOT_PROD_EXPR:
2561 case WIDEN_MULT_PLUS_EXPR:
2562 case WIDEN_MULT_MINUS_EXPR:
2563 case FMA_EXPR:
2564 goto ternary;
2565
2566 case TRUTH_ANDIF_EXPR:
2567 case TRUTH_ORIF_EXPR:
2568 case TRUTH_AND_EXPR:
2569 case TRUTH_OR_EXPR:
2570 case TRUTH_XOR_EXPR:
2571 goto binary;
2572
2573 case TRUTH_NOT_EXPR:
2574 goto unary;
2575
2576 default:
2577 break;
2578 }
2579 break;
2580
2581 ternary:
2582 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2583 if (!op2)
2584 return NULL_RTX;
2585 /* Fall through. */
2586
2587 binary:
2588 case tcc_binary:
2589 case tcc_comparison:
2590 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2591 if (!op1)
2592 return NULL_RTX;
2593 /* Fall through. */
2594
2595 unary:
2596 case tcc_unary:
2597 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2598 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2599 if (!op0)
2600 return NULL_RTX;
2601 break;
2602
2603 case tcc_type:
2604 case tcc_statement:
2605 gcc_unreachable ();
2606
2607 case tcc_constant:
2608 case tcc_exceptional:
2609 case tcc_declaration:
2610 case tcc_reference:
2611 case tcc_vl_exp:
2612 break;
2613 }
2614
2615 switch (TREE_CODE (exp))
2616 {
2617 case STRING_CST:
2618 if (!lookup_constant_def (exp))
2619 {
2620 if (strlen (TREE_STRING_POINTER (exp)) + 1
2621 != (size_t) TREE_STRING_LENGTH (exp))
2622 return NULL_RTX;
2623 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2624 op0 = gen_rtx_MEM (BLKmode, op0);
2625 set_mem_attributes (op0, exp, 0);
2626 return op0;
2627 }
2628 /* Fall through... */
2629
2630 case INTEGER_CST:
2631 case REAL_CST:
2632 case FIXED_CST:
2633 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2634 return op0;
2635
2636 case COMPLEX_CST:
2637 gcc_assert (COMPLEX_MODE_P (mode));
2638 op0 = expand_debug_expr (TREE_REALPART (exp));
2639 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2640 return gen_rtx_CONCAT (mode, op0, op1);
2641
2642 case DEBUG_EXPR_DECL:
2643 op0 = DECL_RTL_IF_SET (exp);
2644
2645 if (op0)
2646 return op0;
2647
2648 op0 = gen_rtx_DEBUG_EXPR (mode);
2649 DEBUG_EXPR_TREE_DECL (op0) = exp;
2650 SET_DECL_RTL (exp, op0);
2651
2652 return op0;
2653
2654 case VAR_DECL:
2655 case PARM_DECL:
2656 case FUNCTION_DECL:
2657 case LABEL_DECL:
2658 case CONST_DECL:
2659 case RESULT_DECL:
2660 op0 = DECL_RTL_IF_SET (exp);
2661
2662 /* This decl was probably optimized away. */
2663 if (!op0)
2664 {
2665 if (TREE_CODE (exp) != VAR_DECL
2666 || DECL_EXTERNAL (exp)
2667 || !TREE_STATIC (exp)
2668 || !DECL_NAME (exp)
2669 || DECL_HARD_REGISTER (exp)
2670 || DECL_IN_CONSTANT_POOL (exp)
2671 || mode == VOIDmode)
2672 return NULL;
2673
2674 op0 = make_decl_rtl_for_debug (exp);
2675 if (!MEM_P (op0)
2676 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2677 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2678 return NULL;
2679 }
2680 else
2681 op0 = copy_rtx (op0);
2682
2683 if (GET_MODE (op0) == BLKmode
2684 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2685 below would ICE. While it is likely a FE bug,
2686 try to be robust here. See PR43166. */
2687 || mode == BLKmode
2688 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2689 {
2690 gcc_assert (MEM_P (op0));
2691 op0 = adjust_address_nv (op0, mode, 0);
2692 return op0;
2693 }
2694
2695 /* Fall through. */
2696
2697 adjust_mode:
2698 case PAREN_EXPR:
2699 case NOP_EXPR:
2700 case CONVERT_EXPR:
2701 {
2702 inner_mode = GET_MODE (op0);
2703
2704 if (mode == inner_mode)
2705 return op0;
2706
2707 if (inner_mode == VOIDmode)
2708 {
2709 if (TREE_CODE (exp) == SSA_NAME)
2710 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2711 else
2712 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2713 if (mode == inner_mode)
2714 return op0;
2715 }
2716
2717 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2718 {
2719 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2720 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2721 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2722 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2723 else
2724 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2725 }
2726 else if (FLOAT_MODE_P (mode))
2727 {
2728 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2729 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2730 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2731 else
2732 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2733 }
2734 else if (FLOAT_MODE_P (inner_mode))
2735 {
2736 if (unsignedp)
2737 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2738 else
2739 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2740 }
2741 else if (CONSTANT_P (op0)
2742 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2743 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2744 subreg_lowpart_offset (mode,
2745 inner_mode));
2746 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2747 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2748 : unsignedp)
2749 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2750 else
2751 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2752
2753 return op0;
2754 }
2755
2756 case MEM_REF:
2757 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2758 {
2759 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2760 TREE_OPERAND (exp, 0),
2761 TREE_OPERAND (exp, 1));
2762 if (newexp)
2763 return expand_debug_expr (newexp);
2764 }
2765 /* FALLTHROUGH */
2766 case INDIRECT_REF:
2767 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2768 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2769 if (!op0)
2770 return NULL;
2771
2772 if (TREE_CODE (exp) == MEM_REF)
2773 {
2774 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2775 || (GET_CODE (op0) == PLUS
2776 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2777 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2778 Instead just use get_inner_reference. */
2779 goto component_ref;
2780
2781 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2782 if (!op1 || !CONST_INT_P (op1))
2783 return NULL;
2784
2785 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2786 }
2787
2788 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2789 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2790 else
2791 as = ADDR_SPACE_GENERIC;
2792
2793 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2794 op0, as);
2795 if (op0 == NULL_RTX)
2796 return NULL;
2797
2798 op0 = gen_rtx_MEM (mode, op0);
2799 set_mem_attributes (op0, exp, 0);
2800 if (TREE_CODE (exp) == MEM_REF
2801 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2802 set_mem_expr (op0, NULL_TREE);
2803 set_mem_addr_space (op0, as);
2804
2805 return op0;
2806
2807 case TARGET_MEM_REF:
2808 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2809 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2810 return NULL;
2811
2812 op0 = expand_debug_expr
2813 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2814 if (!op0)
2815 return NULL;
2816
2817 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2818 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2819 else
2820 as = ADDR_SPACE_GENERIC;
2821
2822 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2823 op0, as);
2824 if (op0 == NULL_RTX)
2825 return NULL;
2826
2827 op0 = gen_rtx_MEM (mode, op0);
2828
2829 set_mem_attributes (op0, exp, 0);
2830 set_mem_addr_space (op0, as);
2831
2832 return op0;
2833
2834 component_ref:
2835 case ARRAY_REF:
2836 case ARRAY_RANGE_REF:
2837 case COMPONENT_REF:
2838 case BIT_FIELD_REF:
2839 case REALPART_EXPR:
2840 case IMAGPART_EXPR:
2841 case VIEW_CONVERT_EXPR:
2842 {
2843 enum machine_mode mode1;
2844 HOST_WIDE_INT bitsize, bitpos;
2845 tree offset;
2846 int volatilep = 0;
2847 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2848 &mode1, &unsignedp, &volatilep, false);
2849 rtx orig_op0;
2850
2851 if (bitsize == 0)
2852 return NULL;
2853
2854 orig_op0 = op0 = expand_debug_expr (tem);
2855
2856 if (!op0)
2857 return NULL;
2858
2859 if (offset)
2860 {
2861 enum machine_mode addrmode, offmode;
2862
2863 if (!MEM_P (op0))
2864 return NULL;
2865
2866 op0 = XEXP (op0, 0);
2867 addrmode = GET_MODE (op0);
2868 if (addrmode == VOIDmode)
2869 addrmode = Pmode;
2870
2871 op1 = expand_debug_expr (offset);
2872 if (!op1)
2873 return NULL;
2874
2875 offmode = GET_MODE (op1);
2876 if (offmode == VOIDmode)
2877 offmode = TYPE_MODE (TREE_TYPE (offset));
2878
2879 if (addrmode != offmode)
2880 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2881 subreg_lowpart_offset (addrmode,
2882 offmode));
2883
2884 /* Don't use offset_address here, we don't need a
2885 recognizable address, and we don't want to generate
2886 code. */
2887 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2888 op0, op1));
2889 }
2890
2891 if (MEM_P (op0))
2892 {
2893 if (mode1 == VOIDmode)
2894 /* Bitfield. */
2895 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2896 if (bitpos >= BITS_PER_UNIT)
2897 {
2898 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2899 bitpos %= BITS_PER_UNIT;
2900 }
2901 else if (bitpos < 0)
2902 {
2903 HOST_WIDE_INT units
2904 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2905 op0 = adjust_address_nv (op0, mode1, units);
2906 bitpos += units * BITS_PER_UNIT;
2907 }
2908 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2909 op0 = adjust_address_nv (op0, mode, 0);
2910 else if (GET_MODE (op0) != mode1)
2911 op0 = adjust_address_nv (op0, mode1, 0);
2912 else
2913 op0 = copy_rtx (op0);
2914 if (op0 == orig_op0)
2915 op0 = shallow_copy_rtx (op0);
2916 set_mem_attributes (op0, exp, 0);
2917 }
2918
2919 if (bitpos == 0 && mode == GET_MODE (op0))
2920 return op0;
2921
2922 if (bitpos < 0)
2923 return NULL;
2924
2925 if (GET_MODE (op0) == BLKmode)
2926 return NULL;
2927
2928 if ((bitpos % BITS_PER_UNIT) == 0
2929 && bitsize == GET_MODE_BITSIZE (mode1))
2930 {
2931 enum machine_mode opmode = GET_MODE (op0);
2932
2933 if (opmode == VOIDmode)
2934 opmode = TYPE_MODE (TREE_TYPE (tem));
2935
2936 /* This condition may hold if we're expanding the address
2937 right past the end of an array that turned out not to
2938 be addressable (i.e., the address was only computed in
2939 debug stmts). The gen_subreg below would rightfully
2940 crash, and the address doesn't really exist, so just
2941 drop it. */
2942 if (bitpos >= GET_MODE_BITSIZE (opmode))
2943 return NULL;
2944
2945 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2946 return simplify_gen_subreg (mode, op0, opmode,
2947 bitpos / BITS_PER_UNIT);
2948 }
2949
2950 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2951 && TYPE_UNSIGNED (TREE_TYPE (exp))
2952 ? SIGN_EXTRACT
2953 : ZERO_EXTRACT, mode,
2954 GET_MODE (op0) != VOIDmode
2955 ? GET_MODE (op0)
2956 : TYPE_MODE (TREE_TYPE (tem)),
2957 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2958 }
2959
2960 case ABS_EXPR:
2961 return simplify_gen_unary (ABS, mode, op0, mode);
2962
2963 case NEGATE_EXPR:
2964 return simplify_gen_unary (NEG, mode, op0, mode);
2965
2966 case BIT_NOT_EXPR:
2967 return simplify_gen_unary (NOT, mode, op0, mode);
2968
2969 case FLOAT_EXPR:
2970 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2971 0)))
2972 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2973 inner_mode);
2974
2975 case FIX_TRUNC_EXPR:
2976 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2977 inner_mode);
2978
2979 case POINTER_PLUS_EXPR:
2980 /* For the rare target where pointers are not the same size as
2981 size_t, we need to check for mis-matched modes and correct
2982 the addend. */
2983 if (op0 && op1
2984 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2985 && GET_MODE (op0) != GET_MODE (op1))
2986 {
2987 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2988 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2989 GET_MODE (op1));
2990 else
2991 /* We always sign-extend, regardless of the signedness of
2992 the operand, because the operand is always unsigned
2993 here even if the original C expression is signed. */
2994 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2995 GET_MODE (op1));
2996 }
2997 /* Fall through. */
2998 case PLUS_EXPR:
2999 return simplify_gen_binary (PLUS, mode, op0, op1);
3000
3001 case MINUS_EXPR:
3002 return simplify_gen_binary (MINUS, mode, op0, op1);
3003
3004 case MULT_EXPR:
3005 return simplify_gen_binary (MULT, mode, op0, op1);
3006
3007 case RDIV_EXPR:
3008 case TRUNC_DIV_EXPR:
3009 case EXACT_DIV_EXPR:
3010 if (unsignedp)
3011 return simplify_gen_binary (UDIV, mode, op0, op1);
3012 else
3013 return simplify_gen_binary (DIV, mode, op0, op1);
3014
3015 case TRUNC_MOD_EXPR:
3016 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3017
3018 case FLOOR_DIV_EXPR:
3019 if (unsignedp)
3020 return simplify_gen_binary (UDIV, mode, op0, op1);
3021 else
3022 {
3023 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3024 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3025 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3026 return simplify_gen_binary (PLUS, mode, div, adj);
3027 }
3028
3029 case FLOOR_MOD_EXPR:
3030 if (unsignedp)
3031 return simplify_gen_binary (UMOD, mode, op0, op1);
3032 else
3033 {
3034 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3035 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3036 adj = simplify_gen_unary (NEG, mode,
3037 simplify_gen_binary (MULT, mode, adj, op1),
3038 mode);
3039 return simplify_gen_binary (PLUS, mode, mod, adj);
3040 }
3041
3042 case CEIL_DIV_EXPR:
3043 if (unsignedp)
3044 {
3045 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3046 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3047 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3048 return simplify_gen_binary (PLUS, mode, div, adj);
3049 }
3050 else
3051 {
3052 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3053 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3054 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3055 return simplify_gen_binary (PLUS, mode, div, adj);
3056 }
3057
3058 case CEIL_MOD_EXPR:
3059 if (unsignedp)
3060 {
3061 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3062 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3063 adj = simplify_gen_unary (NEG, mode,
3064 simplify_gen_binary (MULT, mode, adj, op1),
3065 mode);
3066 return simplify_gen_binary (PLUS, mode, mod, adj);
3067 }
3068 else
3069 {
3070 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3071 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3072 adj = simplify_gen_unary (NEG, mode,
3073 simplify_gen_binary (MULT, mode, adj, op1),
3074 mode);
3075 return simplify_gen_binary (PLUS, mode, mod, adj);
3076 }
3077
3078 case ROUND_DIV_EXPR:
3079 if (unsignedp)
3080 {
3081 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3082 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3083 rtx adj = round_udiv_adjust (mode, mod, op1);
3084 return simplify_gen_binary (PLUS, mode, div, adj);
3085 }
3086 else
3087 {
3088 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3089 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3090 rtx adj = round_sdiv_adjust (mode, mod, op1);
3091 return simplify_gen_binary (PLUS, mode, div, adj);
3092 }
3093
3094 case ROUND_MOD_EXPR:
3095 if (unsignedp)
3096 {
3097 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3098 rtx adj = round_udiv_adjust (mode, mod, op1);
3099 adj = simplify_gen_unary (NEG, mode,
3100 simplify_gen_binary (MULT, mode, adj, op1),
3101 mode);
3102 return simplify_gen_binary (PLUS, mode, mod, adj);
3103 }
3104 else
3105 {
3106 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3107 rtx adj = round_sdiv_adjust (mode, mod, op1);
3108 adj = simplify_gen_unary (NEG, mode,
3109 simplify_gen_binary (MULT, mode, adj, op1),
3110 mode);
3111 return simplify_gen_binary (PLUS, mode, mod, adj);
3112 }
3113
3114 case LSHIFT_EXPR:
3115 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3116
3117 case RSHIFT_EXPR:
3118 if (unsignedp)
3119 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3120 else
3121 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3122
3123 case LROTATE_EXPR:
3124 return simplify_gen_binary (ROTATE, mode, op0, op1);
3125
3126 case RROTATE_EXPR:
3127 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3128
3129 case MIN_EXPR:
3130 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3131
3132 case MAX_EXPR:
3133 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3134
3135 case BIT_AND_EXPR:
3136 case TRUTH_AND_EXPR:
3137 return simplify_gen_binary (AND, mode, op0, op1);
3138
3139 case BIT_IOR_EXPR:
3140 case TRUTH_OR_EXPR:
3141 return simplify_gen_binary (IOR, mode, op0, op1);
3142
3143 case BIT_XOR_EXPR:
3144 case TRUTH_XOR_EXPR:
3145 return simplify_gen_binary (XOR, mode, op0, op1);
3146
3147 case TRUTH_ANDIF_EXPR:
3148 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3149
3150 case TRUTH_ORIF_EXPR:
3151 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3152
3153 case TRUTH_NOT_EXPR:
3154 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3155
3156 case LT_EXPR:
3157 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3158 op0, op1);
3159
3160 case LE_EXPR:
3161 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3162 op0, op1);
3163
3164 case GT_EXPR:
3165 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3166 op0, op1);
3167
3168 case GE_EXPR:
3169 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3170 op0, op1);
3171
3172 case EQ_EXPR:
3173 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3174
3175 case NE_EXPR:
3176 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3177
3178 case UNORDERED_EXPR:
3179 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3180
3181 case ORDERED_EXPR:
3182 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3183
3184 case UNLT_EXPR:
3185 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3186
3187 case UNLE_EXPR:
3188 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3189
3190 case UNGT_EXPR:
3191 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3192
3193 case UNGE_EXPR:
3194 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3195
3196 case UNEQ_EXPR:
3197 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3198
3199 case LTGT_EXPR:
3200 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3201
3202 case COND_EXPR:
3203 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3204
3205 case COMPLEX_EXPR:
3206 gcc_assert (COMPLEX_MODE_P (mode));
3207 if (GET_MODE (op0) == VOIDmode)
3208 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3209 if (GET_MODE (op1) == VOIDmode)
3210 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3211 return gen_rtx_CONCAT (mode, op0, op1);
3212
3213 case CONJ_EXPR:
3214 if (GET_CODE (op0) == CONCAT)
3215 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3216 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3217 XEXP (op0, 1),
3218 GET_MODE_INNER (mode)));
3219 else
3220 {
3221 enum machine_mode imode = GET_MODE_INNER (mode);
3222 rtx re, im;
3223
3224 if (MEM_P (op0))
3225 {
3226 re = adjust_address_nv (op0, imode, 0);
3227 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3228 }
3229 else
3230 {
3231 enum machine_mode ifmode = int_mode_for_mode (mode);
3232 enum machine_mode ihmode = int_mode_for_mode (imode);
3233 rtx halfsize;
3234 if (ifmode == BLKmode || ihmode == BLKmode)
3235 return NULL;
3236 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3237 re = op0;
3238 if (mode != ifmode)
3239 re = gen_rtx_SUBREG (ifmode, re, 0);
3240 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3241 if (imode != ihmode)
3242 re = gen_rtx_SUBREG (imode, re, 0);
3243 im = copy_rtx (op0);
3244 if (mode != ifmode)
3245 im = gen_rtx_SUBREG (ifmode, im, 0);
3246 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3247 if (imode != ihmode)
3248 im = gen_rtx_SUBREG (imode, im, 0);
3249 }
3250 im = gen_rtx_NEG (imode, im);
3251 return gen_rtx_CONCAT (mode, re, im);
3252 }
3253
3254 case ADDR_EXPR:
3255 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3256 if (!op0 || !MEM_P (op0))
3257 {
3258 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3259 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3260 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3261 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3262 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3263 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3264
3265 if (handled_component_p (TREE_OPERAND (exp, 0)))
3266 {
3267 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3268 tree decl
3269 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3270 &bitoffset, &bitsize, &maxsize);
3271 if ((TREE_CODE (decl) == VAR_DECL
3272 || TREE_CODE (decl) == PARM_DECL
3273 || TREE_CODE (decl) == RESULT_DECL)
3274 && (!TREE_ADDRESSABLE (decl)
3275 || target_for_debug_bind (decl))
3276 && (bitoffset % BITS_PER_UNIT) == 0
3277 && bitsize > 0
3278 && bitsize == maxsize)
3279 {
3280 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3281 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3282 }
3283 }
3284
3285 return NULL;
3286 }
3287
3288 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3289 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3290
3291 return op0;
3292
3293 case VECTOR_CST:
3294 {
3295 unsigned i;
3296
3297 op0 = gen_rtx_CONCATN
3298 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3299
3300 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3301 {
3302 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3303 if (!op1)
3304 return NULL;
3305 XVECEXP (op0, 0, i) = op1;
3306 }
3307
3308 return op0;
3309 }
3310
3311 case CONSTRUCTOR:
3312 if (TREE_CLOBBER_P (exp))
3313 return NULL;
3314 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3315 {
3316 unsigned i;
3317 tree val;
3318
3319 op0 = gen_rtx_CONCATN
3320 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3321
3322 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3323 {
3324 op1 = expand_debug_expr (val);
3325 if (!op1)
3326 return NULL;
3327 XVECEXP (op0, 0, i) = op1;
3328 }
3329
3330 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3331 {
3332 op1 = expand_debug_expr
3333 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3334
3335 if (!op1)
3336 return NULL;
3337
3338 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3339 XVECEXP (op0, 0, i) = op1;
3340 }
3341
3342 return op0;
3343 }
3344 else
3345 goto flag_unsupported;
3346
3347 case CALL_EXPR:
3348 /* ??? Maybe handle some builtins? */
3349 return NULL;
3350
3351 case SSA_NAME:
3352 {
3353 gimple g = get_gimple_for_ssa_name (exp);
3354 if (g)
3355 {
3356 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3357 if (!op0)
3358 return NULL;
3359 }
3360 else
3361 {
3362 int part = var_to_partition (SA.map, exp);
3363
3364 if (part == NO_PARTITION)
3365 {
3366 /* If this is a reference to an incoming value of parameter
3367 that is never used in the code or where the incoming
3368 value is never used in the code, use PARM_DECL's
3369 DECL_RTL if set. */
3370 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3371 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3372 {
3373 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3374 if (op0)
3375 goto adjust_mode;
3376 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3377 if (op0)
3378 goto adjust_mode;
3379 }
3380 return NULL;
3381 }
3382
3383 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3384
3385 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3386 }
3387 goto adjust_mode;
3388 }
3389
3390 case ERROR_MARK:
3391 return NULL;
3392
3393 /* Vector stuff. For most of the codes we don't have rtl codes. */
3394 case REALIGN_LOAD_EXPR:
3395 case REDUC_MAX_EXPR:
3396 case REDUC_MIN_EXPR:
3397 case REDUC_PLUS_EXPR:
3398 case VEC_COND_EXPR:
3399 case VEC_LSHIFT_EXPR:
3400 case VEC_PACK_FIX_TRUNC_EXPR:
3401 case VEC_PACK_SAT_EXPR:
3402 case VEC_PACK_TRUNC_EXPR:
3403 case VEC_RSHIFT_EXPR:
3404 case VEC_UNPACK_FLOAT_HI_EXPR:
3405 case VEC_UNPACK_FLOAT_LO_EXPR:
3406 case VEC_UNPACK_HI_EXPR:
3407 case VEC_UNPACK_LO_EXPR:
3408 case VEC_WIDEN_MULT_HI_EXPR:
3409 case VEC_WIDEN_MULT_LO_EXPR:
3410 case VEC_WIDEN_MULT_EVEN_EXPR:
3411 case VEC_WIDEN_MULT_ODD_EXPR:
3412 case VEC_WIDEN_LSHIFT_HI_EXPR:
3413 case VEC_WIDEN_LSHIFT_LO_EXPR:
3414 case VEC_PERM_EXPR:
3415 return NULL;
3416
3417 /* Misc codes. */
3418 case ADDR_SPACE_CONVERT_EXPR:
3419 case FIXED_CONVERT_EXPR:
3420 case OBJ_TYPE_REF:
3421 case WITH_SIZE_EXPR:
3422 return NULL;
3423
3424 case DOT_PROD_EXPR:
3425 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3426 && SCALAR_INT_MODE_P (mode))
3427 {
3428 op0
3429 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3430 0)))
3431 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3432 inner_mode);
3433 op1
3434 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3435 1)))
3436 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3437 inner_mode);
3438 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3439 return simplify_gen_binary (PLUS, mode, op0, op2);
3440 }
3441 return NULL;
3442
3443 case WIDEN_MULT_EXPR:
3444 case WIDEN_MULT_PLUS_EXPR:
3445 case WIDEN_MULT_MINUS_EXPR:
3446 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3447 && SCALAR_INT_MODE_P (mode))
3448 {
3449 inner_mode = GET_MODE (op0);
3450 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3451 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3452 else
3453 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3454 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3455 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3456 else
3457 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3458 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3459 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3460 return op0;
3461 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3462 return simplify_gen_binary (PLUS, mode, op0, op2);
3463 else
3464 return simplify_gen_binary (MINUS, mode, op2, op0);
3465 }
3466 return NULL;
3467
3468 case MULT_HIGHPART_EXPR:
3469 /* ??? Similar to the above. */
3470 return NULL;
3471
3472 case WIDEN_SUM_EXPR:
3473 case WIDEN_LSHIFT_EXPR:
3474 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3475 && SCALAR_INT_MODE_P (mode))
3476 {
3477 op0
3478 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3479 0)))
3480 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3481 inner_mode);
3482 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3483 ? ASHIFT : PLUS, mode, op0, op1);
3484 }
3485 return NULL;
3486
3487 case FMA_EXPR:
3488 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3489
3490 default:
3491 flag_unsupported:
3492 #ifdef ENABLE_CHECKING
3493 debug_tree (exp);
3494 gcc_unreachable ();
3495 #else
3496 return NULL;
3497 #endif
3498 }
3499 }
3500
3501 /* Return an RTX equivalent to the source bind value of the tree expression
3502 EXP. */
3503
3504 static rtx
3505 expand_debug_source_expr (tree exp)
3506 {
3507 rtx op0 = NULL_RTX;
3508 enum machine_mode mode = VOIDmode, inner_mode;
3509
3510 switch (TREE_CODE (exp))
3511 {
3512 case PARM_DECL:
3513 {
3514 mode = DECL_MODE (exp);
3515 op0 = expand_debug_parm_decl (exp);
3516 if (op0)
3517 break;
3518 /* See if this isn't an argument that has been completely
3519 optimized out. */
3520 if (!DECL_RTL_SET_P (exp)
3521 && !DECL_INCOMING_RTL (exp)
3522 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3523 {
3524 tree aexp = DECL_ORIGIN (exp);
3525 if (DECL_CONTEXT (aexp)
3526 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3527 {
3528 VEC(tree, gc) **debug_args;
3529 unsigned int ix;
3530 tree ddecl;
3531 debug_args = decl_debug_args_lookup (current_function_decl);
3532 if (debug_args != NULL)
3533 {
3534 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3535 ix += 2)
3536 if (ddecl == aexp)
3537 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3538 }
3539 }
3540 }
3541 break;
3542 }
3543 default:
3544 break;
3545 }
3546
3547 if (op0 == NULL_RTX)
3548 return NULL_RTX;
3549
3550 inner_mode = GET_MODE (op0);
3551 if (mode == inner_mode)
3552 return op0;
3553
3554 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3555 {
3556 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3557 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3558 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3559 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3560 else
3561 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3562 }
3563 else if (FLOAT_MODE_P (mode))
3564 gcc_unreachable ();
3565 else if (FLOAT_MODE_P (inner_mode))
3566 {
3567 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3568 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3569 else
3570 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3571 }
3572 else if (CONSTANT_P (op0)
3573 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3574 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3575 subreg_lowpart_offset (mode, inner_mode));
3576 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3577 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3578 else
3579 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3580
3581 return op0;
3582 }
3583
3584 /* Expand the _LOCs in debug insns. We run this after expanding all
3585 regular insns, so that any variables referenced in the function
3586 will have their DECL_RTLs set. */
3587
3588 static void
3589 expand_debug_locations (void)
3590 {
3591 rtx insn;
3592 rtx last = get_last_insn ();
3593 int save_strict_alias = flag_strict_aliasing;
3594
3595 /* New alias sets while setting up memory attributes cause
3596 -fcompare-debug failures, even though it doesn't bring about any
3597 codegen changes. */
3598 flag_strict_aliasing = 0;
3599
3600 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3601 if (DEBUG_INSN_P (insn))
3602 {
3603 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3604 rtx val;
3605 enum machine_mode mode;
3606
3607 if (value == NULL_TREE)
3608 val = NULL_RTX;
3609 else
3610 {
3611 if (INSN_VAR_LOCATION_STATUS (insn)
3612 == VAR_INIT_STATUS_UNINITIALIZED)
3613 val = expand_debug_source_expr (value);
3614 else
3615 val = expand_debug_expr (value);
3616 gcc_assert (last == get_last_insn ());
3617 }
3618
3619 if (!val)
3620 val = gen_rtx_UNKNOWN_VAR_LOC ();
3621 else
3622 {
3623 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3624
3625 gcc_assert (mode == GET_MODE (val)
3626 || (GET_MODE (val) == VOIDmode
3627 && (CONST_INT_P (val)
3628 || GET_CODE (val) == CONST_FIXED
3629 || CONST_DOUBLE_AS_INT_P (val)
3630 || GET_CODE (val) == LABEL_REF)));
3631 }
3632
3633 INSN_VAR_LOCATION_LOC (insn) = val;
3634 }
3635
3636 flag_strict_aliasing = save_strict_alias;
3637 }
3638
3639 /* Expand basic block BB from GIMPLE trees to RTL. */
3640
3641 static basic_block
3642 expand_gimple_basic_block (basic_block bb)
3643 {
3644 gimple_stmt_iterator gsi;
3645 gimple_seq stmts;
3646 gimple stmt = NULL;
3647 rtx note, last;
3648 edge e;
3649 edge_iterator ei;
3650 void **elt;
3651
3652 if (dump_file)
3653 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3654 bb->index);
3655
3656 /* Note that since we are now transitioning from GIMPLE to RTL, we
3657 cannot use the gsi_*_bb() routines because they expect the basic
3658 block to be in GIMPLE, instead of RTL. Therefore, we need to
3659 access the BB sequence directly. */
3660 stmts = bb_seq (bb);
3661 bb->il.gimple.seq = NULL;
3662 bb->il.gimple.phi_nodes = NULL;
3663 rtl_profile_for_bb (bb);
3664 init_rtl_bb_info (bb);
3665 bb->flags |= BB_RTL;
3666
3667 /* Remove the RETURN_EXPR if we may fall though to the exit
3668 instead. */
3669 gsi = gsi_last (stmts);
3670 if (!gsi_end_p (gsi)
3671 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3672 {
3673 gimple ret_stmt = gsi_stmt (gsi);
3674
3675 gcc_assert (single_succ_p (bb));
3676 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3677
3678 if (bb->next_bb == EXIT_BLOCK_PTR
3679 && !gimple_return_retval (ret_stmt))
3680 {
3681 gsi_remove (&gsi, false);
3682 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3683 }
3684 }
3685
3686 gsi = gsi_start (stmts);
3687 if (!gsi_end_p (gsi))
3688 {
3689 stmt = gsi_stmt (gsi);
3690 if (gimple_code (stmt) != GIMPLE_LABEL)
3691 stmt = NULL;
3692 }
3693
3694 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3695
3696 if (stmt || elt)
3697 {
3698 last = get_last_insn ();
3699
3700 if (stmt)
3701 {
3702 expand_gimple_stmt (stmt);
3703 gsi_next (&gsi);
3704 }
3705
3706 if (elt)
3707 emit_label ((rtx) *elt);
3708
3709 /* Java emits line number notes in the top of labels.
3710 ??? Make this go away once line number notes are obsoleted. */
3711 BB_HEAD (bb) = NEXT_INSN (last);
3712 if (NOTE_P (BB_HEAD (bb)))
3713 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3714 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3715
3716 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3717 }
3718 else
3719 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3720
3721 NOTE_BASIC_BLOCK (note) = bb;
3722
3723 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3724 {
3725 basic_block new_bb;
3726
3727 stmt = gsi_stmt (gsi);
3728
3729 /* If this statement is a non-debug one, and we generate debug
3730 insns, then this one might be the last real use of a TERed
3731 SSA_NAME, but where there are still some debug uses further
3732 down. Expanding the current SSA name in such further debug
3733 uses by their RHS might lead to wrong debug info, as coalescing
3734 might make the operands of such RHS be placed into the same
3735 pseudo as something else. Like so:
3736 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3737 use(a_1);
3738 a_2 = ...
3739 #DEBUG ... => a_1
3740 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3741 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3742 the write to a_2 would actually have clobbered the place which
3743 formerly held a_0.
3744
3745 So, instead of that, we recognize the situation, and generate
3746 debug temporaries at the last real use of TERed SSA names:
3747 a_1 = a_0 + 1;
3748 #DEBUG #D1 => a_1
3749 use(a_1);
3750 a_2 = ...
3751 #DEBUG ... => #D1
3752 */
3753 if (MAY_HAVE_DEBUG_INSNS
3754 && SA.values
3755 && !is_gimple_debug (stmt))
3756 {
3757 ssa_op_iter iter;
3758 tree op;
3759 gimple def;
3760
3761 location_t sloc = curr_insn_location ();
3762
3763 /* Look for SSA names that have their last use here (TERed
3764 names always have only one real use). */
3765 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3766 if ((def = get_gimple_for_ssa_name (op)))
3767 {
3768 imm_use_iterator imm_iter;
3769 use_operand_p use_p;
3770 bool have_debug_uses = false;
3771
3772 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3773 {
3774 if (gimple_debug_bind_p (USE_STMT (use_p)))
3775 {
3776 have_debug_uses = true;
3777 break;
3778 }
3779 }
3780
3781 if (have_debug_uses)
3782 {
3783 /* OP is a TERed SSA name, with DEF it's defining
3784 statement, and where OP is used in further debug
3785 instructions. Generate a debug temporary, and
3786 replace all uses of OP in debug insns with that
3787 temporary. */
3788 gimple debugstmt;
3789 tree value = gimple_assign_rhs_to_tree (def);
3790 tree vexpr = make_node (DEBUG_EXPR_DECL);
3791 rtx val;
3792 enum machine_mode mode;
3793
3794 set_curr_insn_location (gimple_location (def));
3795
3796 DECL_ARTIFICIAL (vexpr) = 1;
3797 TREE_TYPE (vexpr) = TREE_TYPE (value);
3798 if (DECL_P (value))
3799 mode = DECL_MODE (value);
3800 else
3801 mode = TYPE_MODE (TREE_TYPE (value));
3802 DECL_MODE (vexpr) = mode;
3803
3804 val = gen_rtx_VAR_LOCATION
3805 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3806
3807 emit_debug_insn (val);
3808
3809 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3810 {
3811 if (!gimple_debug_bind_p (debugstmt))
3812 continue;
3813
3814 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3815 SET_USE (use_p, vexpr);
3816
3817 update_stmt (debugstmt);
3818 }
3819 }
3820 }
3821 set_curr_insn_location (sloc);
3822 }
3823
3824 currently_expanding_gimple_stmt = stmt;
3825
3826 /* Expand this statement, then evaluate the resulting RTL and
3827 fixup the CFG accordingly. */
3828 if (gimple_code (stmt) == GIMPLE_COND)
3829 {
3830 new_bb = expand_gimple_cond (bb, stmt);
3831 if (new_bb)
3832 return new_bb;
3833 }
3834 else if (gimple_debug_bind_p (stmt))
3835 {
3836 location_t sloc = curr_insn_location ();
3837 gimple_stmt_iterator nsi = gsi;
3838
3839 for (;;)
3840 {
3841 tree var = gimple_debug_bind_get_var (stmt);
3842 tree value;
3843 rtx val;
3844 enum machine_mode mode;
3845
3846 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3847 && TREE_CODE (var) != LABEL_DECL
3848 && !target_for_debug_bind (var))
3849 goto delink_debug_stmt;
3850
3851 if (gimple_debug_bind_has_value_p (stmt))
3852 value = gimple_debug_bind_get_value (stmt);
3853 else
3854 value = NULL_TREE;
3855
3856 last = get_last_insn ();
3857
3858 set_curr_insn_location (gimple_location (stmt));
3859
3860 if (DECL_P (var))
3861 mode = DECL_MODE (var);
3862 else
3863 mode = TYPE_MODE (TREE_TYPE (var));
3864
3865 val = gen_rtx_VAR_LOCATION
3866 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3867
3868 emit_debug_insn (val);
3869
3870 if (dump_file && (dump_flags & TDF_DETAILS))
3871 {
3872 /* We can't dump the insn with a TREE where an RTX
3873 is expected. */
3874 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3875 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3876 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3877 }
3878
3879 delink_debug_stmt:
3880 /* In order not to generate too many debug temporaries,
3881 we delink all uses of debug statements we already expanded.
3882 Therefore debug statements between definition and real
3883 use of TERed SSA names will continue to use the SSA name,
3884 and not be replaced with debug temps. */
3885 delink_stmt_imm_use (stmt);
3886
3887 gsi = nsi;
3888 gsi_next (&nsi);
3889 if (gsi_end_p (nsi))
3890 break;
3891 stmt = gsi_stmt (nsi);
3892 if (!gimple_debug_bind_p (stmt))
3893 break;
3894 }
3895
3896 set_curr_insn_location (sloc);
3897 }
3898 else if (gimple_debug_source_bind_p (stmt))
3899 {
3900 location_t sloc = curr_insn_location ();
3901 tree var = gimple_debug_source_bind_get_var (stmt);
3902 tree value = gimple_debug_source_bind_get_value (stmt);
3903 rtx val;
3904 enum machine_mode mode;
3905
3906 last = get_last_insn ();
3907
3908 set_curr_insn_location (gimple_location (stmt));
3909
3910 mode = DECL_MODE (var);
3911
3912 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3913 VAR_INIT_STATUS_UNINITIALIZED);
3914
3915 emit_debug_insn (val);
3916
3917 if (dump_file && (dump_flags & TDF_DETAILS))
3918 {
3919 /* We can't dump the insn with a TREE where an RTX
3920 is expected. */
3921 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3922 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3923 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3924 }
3925
3926 set_curr_insn_location (sloc);
3927 }
3928 else
3929 {
3930 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3931 {
3932 bool can_fallthru;
3933 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3934 if (new_bb)
3935 {
3936 if (can_fallthru)
3937 bb = new_bb;
3938 else
3939 return new_bb;
3940 }
3941 }
3942 else
3943 {
3944 def_operand_p def_p;
3945 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3946
3947 if (def_p != NULL)
3948 {
3949 /* Ignore this stmt if it is in the list of
3950 replaceable expressions. */
3951 if (SA.values
3952 && bitmap_bit_p (SA.values,
3953 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3954 continue;
3955 }
3956 last = expand_gimple_stmt (stmt);
3957 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3958 }
3959 }
3960 }
3961
3962 currently_expanding_gimple_stmt = NULL;
3963
3964 /* Expand implicit goto and convert goto_locus. */
3965 FOR_EACH_EDGE (e, ei, bb->succs)
3966 {
3967 if (e->goto_locus != UNKNOWN_LOCATION)
3968 set_curr_insn_location (e->goto_locus);
3969 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3970 {
3971 emit_jump (label_rtx_for_bb (e->dest));
3972 e->flags &= ~EDGE_FALLTHRU;
3973 }
3974 }
3975
3976 /* Expanded RTL can create a jump in the last instruction of block.
3977 This later might be assumed to be a jump to successor and break edge insertion.
3978 We need to insert dummy move to prevent this. PR41440. */
3979 if (single_succ_p (bb)
3980 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3981 && (last = get_last_insn ())
3982 && JUMP_P (last))
3983 {
3984 rtx dummy = gen_reg_rtx (SImode);
3985 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3986 }
3987
3988 do_pending_stack_adjust ();
3989
3990 /* Find the block tail. The last insn in the block is the insn
3991 before a barrier and/or table jump insn. */
3992 last = get_last_insn ();
3993 if (BARRIER_P (last))
3994 last = PREV_INSN (last);
3995 if (JUMP_TABLE_DATA_P (last))
3996 last = PREV_INSN (PREV_INSN (last));
3997 BB_END (bb) = last;
3998
3999 update_bb_for_insn (bb);
4000
4001 return bb;
4002 }
4003
4004
4005 /* Create a basic block for initialization code. */
4006
4007 static basic_block
4008 construct_init_block (void)
4009 {
4010 basic_block init_block, first_block;
4011 edge e = NULL;
4012 int flags;
4013
4014 /* Multiple entry points not supported yet. */
4015 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4016 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4017 init_rtl_bb_info (EXIT_BLOCK_PTR);
4018 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4019 EXIT_BLOCK_PTR->flags |= BB_RTL;
4020
4021 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4022
4023 /* When entry edge points to first basic block, we don't need jump,
4024 otherwise we have to jump into proper target. */
4025 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4026 {
4027 tree label = gimple_block_label (e->dest);
4028
4029 emit_jump (label_rtx (label));
4030 flags = 0;
4031 }
4032 else
4033 flags = EDGE_FALLTHRU;
4034
4035 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4036 get_last_insn (),
4037 ENTRY_BLOCK_PTR);
4038 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4039 init_block->count = ENTRY_BLOCK_PTR->count;
4040 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4041 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4042 if (e)
4043 {
4044 first_block = e->dest;
4045 redirect_edge_succ (e, init_block);
4046 e = make_edge (init_block, first_block, flags);
4047 }
4048 else
4049 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4050 e->probability = REG_BR_PROB_BASE;
4051 e->count = ENTRY_BLOCK_PTR->count;
4052
4053 update_bb_for_insn (init_block);
4054 return init_block;
4055 }
4056
4057 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4058 found in the block tree. */
4059
4060 static void
4061 set_block_levels (tree block, int level)
4062 {
4063 while (block)
4064 {
4065 BLOCK_NUMBER (block) = level;
4066 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4067 block = BLOCK_CHAIN (block);
4068 }
4069 }
4070
4071 /* Create a block containing landing pads and similar stuff. */
4072
4073 static void
4074 construct_exit_block (void)
4075 {
4076 rtx head = get_last_insn ();
4077 rtx end;
4078 basic_block exit_block;
4079 edge e, e2;
4080 unsigned ix;
4081 edge_iterator ei;
4082 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4083
4084 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4085
4086 /* Make sure the locus is set to the end of the function, so that
4087 epilogue line numbers and warnings are set properly. */
4088 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
4089 input_location = cfun->function_end_locus;
4090
4091 /* Generate rtl for function exit. */
4092 expand_function_end ();
4093
4094 end = get_last_insn ();
4095 if (head == end)
4096 return;
4097 /* While emitting the function end we could move end of the last basic block.
4098 */
4099 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4100 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4101 head = NEXT_INSN (head);
4102 exit_block = create_basic_block (NEXT_INSN (head), end,
4103 EXIT_BLOCK_PTR->prev_bb);
4104 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4105 exit_block->count = EXIT_BLOCK_PTR->count;
4106 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4107 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4108
4109 ix = 0;
4110 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4111 {
4112 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4113 if (!(e->flags & EDGE_ABNORMAL))
4114 redirect_edge_succ (e, exit_block);
4115 else
4116 ix++;
4117 }
4118
4119 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4120 e->probability = REG_BR_PROB_BASE;
4121 e->count = EXIT_BLOCK_PTR->count;
4122 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4123 if (e2 != e)
4124 {
4125 e->count -= e2->count;
4126 exit_block->count -= e2->count;
4127 exit_block->frequency -= EDGE_FREQUENCY (e2);
4128 }
4129 if (e->count < 0)
4130 e->count = 0;
4131 if (exit_block->count < 0)
4132 exit_block->count = 0;
4133 if (exit_block->frequency < 0)
4134 exit_block->frequency = 0;
4135 update_bb_for_insn (exit_block);
4136 }
4137
4138 /* Helper function for discover_nonconstant_array_refs.
4139 Look for ARRAY_REF nodes with non-constant indexes and mark them
4140 addressable. */
4141
4142 static tree
4143 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4144 void *data ATTRIBUTE_UNUSED)
4145 {
4146 tree t = *tp;
4147
4148 if (IS_TYPE_OR_DECL_P (t))
4149 *walk_subtrees = 0;
4150 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4151 {
4152 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4153 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4154 && (!TREE_OPERAND (t, 2)
4155 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4156 || (TREE_CODE (t) == COMPONENT_REF
4157 && (!TREE_OPERAND (t,2)
4158 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4159 || TREE_CODE (t) == BIT_FIELD_REF
4160 || TREE_CODE (t) == REALPART_EXPR
4161 || TREE_CODE (t) == IMAGPART_EXPR
4162 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4163 || CONVERT_EXPR_P (t))
4164 t = TREE_OPERAND (t, 0);
4165
4166 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4167 {
4168 t = get_base_address (t);
4169 if (t && DECL_P (t)
4170 && DECL_MODE (t) != BLKmode)
4171 TREE_ADDRESSABLE (t) = 1;
4172 }
4173
4174 *walk_subtrees = 0;
4175 }
4176
4177 return NULL_TREE;
4178 }
4179
4180 /* RTL expansion is not able to compile array references with variable
4181 offsets for arrays stored in single register. Discover such
4182 expressions and mark variables as addressable to avoid this
4183 scenario. */
4184
4185 static void
4186 discover_nonconstant_array_refs (void)
4187 {
4188 basic_block bb;
4189 gimple_stmt_iterator gsi;
4190
4191 FOR_EACH_BB (bb)
4192 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4193 {
4194 gimple stmt = gsi_stmt (gsi);
4195 if (!is_gimple_debug (stmt))
4196 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4197 }
4198 }
4199
4200 /* This function sets crtl->args.internal_arg_pointer to a virtual
4201 register if DRAP is needed. Local register allocator will replace
4202 virtual_incoming_args_rtx with the virtual register. */
4203
4204 static void
4205 expand_stack_alignment (void)
4206 {
4207 rtx drap_rtx;
4208 unsigned int preferred_stack_boundary;
4209
4210 if (! SUPPORTS_STACK_ALIGNMENT)
4211 return;
4212
4213 if (cfun->calls_alloca
4214 || cfun->has_nonlocal_label
4215 || crtl->has_nonlocal_goto)
4216 crtl->need_drap = true;
4217
4218 /* Call update_stack_boundary here again to update incoming stack
4219 boundary. It may set incoming stack alignment to a different
4220 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4221 use the minimum incoming stack alignment to check if it is OK
4222 to perform sibcall optimization since sibcall optimization will
4223 only align the outgoing stack to incoming stack boundary. */
4224 if (targetm.calls.update_stack_boundary)
4225 targetm.calls.update_stack_boundary ();
4226
4227 /* The incoming stack frame has to be aligned at least at
4228 parm_stack_boundary. */
4229 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4230
4231 /* Update crtl->stack_alignment_estimated and use it later to align
4232 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4233 exceptions since callgraph doesn't collect incoming stack alignment
4234 in this case. */
4235 if (cfun->can_throw_non_call_exceptions
4236 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4237 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4238 else
4239 preferred_stack_boundary = crtl->preferred_stack_boundary;
4240 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4241 crtl->stack_alignment_estimated = preferred_stack_boundary;
4242 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4243 crtl->stack_alignment_needed = preferred_stack_boundary;
4244
4245 gcc_assert (crtl->stack_alignment_needed
4246 <= crtl->stack_alignment_estimated);
4247
4248 crtl->stack_realign_needed
4249 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4250 crtl->stack_realign_tried = crtl->stack_realign_needed;
4251
4252 crtl->stack_realign_processed = true;
4253
4254 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4255 alignment. */
4256 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4257 drap_rtx = targetm.calls.get_drap_rtx ();
4258
4259 /* stack_realign_drap and drap_rtx must match. */
4260 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4261
4262 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4263 if (NULL != drap_rtx)
4264 {
4265 crtl->args.internal_arg_pointer = drap_rtx;
4266
4267 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4268 needed. */
4269 fixup_tail_calls ();
4270 }
4271 }
4272
4273 /* Translate the intermediate representation contained in the CFG
4274 from GIMPLE trees to RTL.
4275
4276 We do conversion per basic block and preserve/update the tree CFG.
4277 This implies we have to do some magic as the CFG can simultaneously
4278 consist of basic blocks containing RTL and GIMPLE trees. This can
4279 confuse the CFG hooks, so be careful to not manipulate CFG during
4280 the expansion. */
4281
4282 static unsigned int
4283 gimple_expand_cfg (void)
4284 {
4285 basic_block bb, init_block;
4286 sbitmap blocks;
4287 edge_iterator ei;
4288 edge e;
4289 rtx var_seq;
4290 unsigned i;
4291
4292 timevar_push (TV_OUT_OF_SSA);
4293 rewrite_out_of_ssa (&SA);
4294 timevar_pop (TV_OUT_OF_SSA);
4295 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4296
4297 /* Make sure all values used by the optimization passes have sane
4298 defaults. */
4299 reg_renumber = 0;
4300
4301 /* Some backends want to know that we are expanding to RTL. */
4302 currently_expanding_to_rtl = 1;
4303 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4304 free_dominance_info (CDI_DOMINATORS);
4305
4306 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4307
4308 insn_locations_init ();
4309 if (!DECL_IS_BUILTIN (current_function_decl))
4310 {
4311 /* Eventually, all FEs should explicitly set function_start_locus. */
4312 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
4313 set_curr_insn_location
4314 (DECL_SOURCE_LOCATION (current_function_decl));
4315 else
4316 set_curr_insn_location (cfun->function_start_locus);
4317 }
4318 else
4319 set_curr_insn_location (UNKNOWN_LOCATION);
4320 prologue_location = curr_insn_location ();
4321
4322 #ifdef INSN_SCHEDULING
4323 init_sched_attrs ();
4324 #endif
4325
4326 /* Make sure first insn is a note even if we don't want linenums.
4327 This makes sure the first insn will never be deleted.
4328 Also, final expects a note to appear there. */
4329 emit_note (NOTE_INSN_DELETED);
4330
4331 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4332 discover_nonconstant_array_refs ();
4333
4334 targetm.expand_to_rtl_hook ();
4335 crtl->stack_alignment_needed = STACK_BOUNDARY;
4336 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4337 crtl->stack_alignment_estimated = 0;
4338 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4339 cfun->cfg->max_jumptable_ents = 0;
4340
4341 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4342 of the function section at exapnsion time to predict distance of calls. */
4343 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4344
4345 /* Expand the variables recorded during gimple lowering. */
4346 timevar_push (TV_VAR_EXPAND);
4347 start_sequence ();
4348
4349 expand_used_vars ();
4350
4351 var_seq = get_insns ();
4352 end_sequence ();
4353 timevar_pop (TV_VAR_EXPAND);
4354
4355 /* Honor stack protection warnings. */
4356 if (warn_stack_protect)
4357 {
4358 if (cfun->calls_alloca)
4359 warning (OPT_Wstack_protector,
4360 "stack protector not protecting local variables: "
4361 "variable length buffer");
4362 if (has_short_buffer && !crtl->stack_protect_guard)
4363 warning (OPT_Wstack_protector,
4364 "stack protector not protecting function: "
4365 "all local arrays are less than %d bytes long",
4366 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4367 }
4368
4369 /* Set up parameters and prepare for return, for the function. */
4370 expand_function_start (current_function_decl);
4371
4372 /* If we emitted any instructions for setting up the variables,
4373 emit them before the FUNCTION_START note. */
4374 if (var_seq)
4375 {
4376 emit_insn_before (var_seq, parm_birth_insn);
4377
4378 /* In expand_function_end we'll insert the alloca save/restore
4379 before parm_birth_insn. We've just insertted an alloca call.
4380 Adjust the pointer to match. */
4381 parm_birth_insn = var_seq;
4382 }
4383
4384 /* Now that we also have the parameter RTXs, copy them over to our
4385 partitions. */
4386 for (i = 0; i < SA.map->num_partitions; i++)
4387 {
4388 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4389
4390 if (TREE_CODE (var) != VAR_DECL
4391 && !SA.partition_to_pseudo[i])
4392 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4393 gcc_assert (SA.partition_to_pseudo[i]);
4394
4395 /* If this decl was marked as living in multiple places, reset
4396 this now to NULL. */
4397 if (DECL_RTL_IF_SET (var) == pc_rtx)
4398 SET_DECL_RTL (var, NULL);
4399
4400 /* Some RTL parts really want to look at DECL_RTL(x) when x
4401 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4402 SET_DECL_RTL here making this available, but that would mean
4403 to select one of the potentially many RTLs for one DECL. Instead
4404 of doing that we simply reset the MEM_EXPR of the RTL in question,
4405 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4406 if (!DECL_RTL_SET_P (var))
4407 {
4408 if (MEM_P (SA.partition_to_pseudo[i]))
4409 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4410 }
4411 }
4412
4413 /* If we have a class containing differently aligned pointers
4414 we need to merge those into the corresponding RTL pointer
4415 alignment. */
4416 for (i = 1; i < num_ssa_names; i++)
4417 {
4418 tree name = ssa_name (i);
4419 int part;
4420 rtx r;
4421
4422 if (!name
4423 /* We might have generated new SSA names in
4424 update_alias_info_with_stack_vars. They will have a NULL
4425 defining statements, and won't be part of the partitioning,
4426 so ignore those. */
4427 || !SSA_NAME_DEF_STMT (name))
4428 continue;
4429 part = var_to_partition (SA.map, name);
4430 if (part == NO_PARTITION)
4431 continue;
4432
4433 /* Adjust all partition members to get the underlying decl of
4434 the representative which we might have created in expand_one_var. */
4435 if (SSA_NAME_VAR (name) == NULL_TREE)
4436 {
4437 tree leader = partition_to_var (SA.map, part);
4438 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4439 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4440 }
4441 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4442 continue;
4443
4444 r = SA.partition_to_pseudo[part];
4445 if (REG_P (r))
4446 mark_reg_pointer (r, get_pointer_alignment (name));
4447 }
4448
4449 /* If this function is `main', emit a call to `__main'
4450 to run global initializers, etc. */
4451 if (DECL_NAME (current_function_decl)
4452 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4453 && DECL_FILE_SCOPE_P (current_function_decl))
4454 expand_main_function ();
4455
4456 /* Initialize the stack_protect_guard field. This must happen after the
4457 call to __main (if any) so that the external decl is initialized. */
4458 if (crtl->stack_protect_guard)
4459 stack_protect_prologue ();
4460
4461 expand_phi_nodes (&SA);
4462
4463 /* Register rtl specific functions for cfg. */
4464 rtl_register_cfg_hooks ();
4465
4466 init_block = construct_init_block ();
4467
4468 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4469 remaining edges later. */
4470 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4471 e->flags &= ~EDGE_EXECUTABLE;
4472
4473 lab_rtx_for_bb = pointer_map_create ();
4474 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4475 bb = expand_gimple_basic_block (bb);
4476
4477 if (MAY_HAVE_DEBUG_INSNS)
4478 expand_debug_locations ();
4479
4480 /* Free stuff we no longer need after GIMPLE optimizations. */
4481 free_dominance_info (CDI_DOMINATORS);
4482 free_dominance_info (CDI_POST_DOMINATORS);
4483 delete_tree_cfg_annotations ();
4484
4485 timevar_push (TV_OUT_OF_SSA);
4486 finish_out_of_ssa (&SA);
4487 timevar_pop (TV_OUT_OF_SSA);
4488
4489 timevar_push (TV_POST_EXPAND);
4490 /* We are no longer in SSA form. */
4491 cfun->gimple_df->in_ssa_p = false;
4492 if (current_loops)
4493 loops_state_clear (LOOP_CLOSED_SSA);
4494
4495 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4496 conservatively to true until they are all profile aware. */
4497 pointer_map_destroy (lab_rtx_for_bb);
4498 free_histograms ();
4499
4500 construct_exit_block ();
4501 insn_locations_finalize ();
4502
4503 /* Zap the tree EH table. */
4504 set_eh_throw_stmt_table (cfun, NULL);
4505
4506 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4507 split edges which edge insertions might do. */
4508 rebuild_jump_labels (get_insns ());
4509
4510 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4511 {
4512 edge e;
4513 edge_iterator ei;
4514 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4515 {
4516 if (e->insns.r)
4517 {
4518 rebuild_jump_labels_chain (e->insns.r);
4519 /* Avoid putting insns before parm_birth_insn. */
4520 if (e->src == ENTRY_BLOCK_PTR
4521 && single_succ_p (ENTRY_BLOCK_PTR)
4522 && parm_birth_insn)
4523 {
4524 rtx insns = e->insns.r;
4525 e->insns.r = NULL_RTX;
4526 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4527 }
4528 else
4529 commit_one_edge_insertion (e);
4530 }
4531 else
4532 ei_next (&ei);
4533 }
4534 }
4535
4536 /* We're done expanding trees to RTL. */
4537 currently_expanding_to_rtl = 0;
4538
4539 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4540 {
4541 edge e;
4542 edge_iterator ei;
4543 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4544 {
4545 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4546 e->flags &= ~EDGE_EXECUTABLE;
4547
4548 /* At the moment not all abnormal edges match the RTL
4549 representation. It is safe to remove them here as
4550 find_many_sub_basic_blocks will rediscover them.
4551 In the future we should get this fixed properly. */
4552 if ((e->flags & EDGE_ABNORMAL)
4553 && !(e->flags & EDGE_SIBCALL))
4554 remove_edge (e);
4555 else
4556 ei_next (&ei);
4557 }
4558 }
4559
4560 blocks = sbitmap_alloc (last_basic_block);
4561 sbitmap_ones (blocks);
4562 find_many_sub_basic_blocks (blocks);
4563 sbitmap_free (blocks);
4564 purge_all_dead_edges ();
4565
4566 expand_stack_alignment ();
4567
4568 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4569 function. */
4570 if (crtl->tail_call_emit)
4571 fixup_tail_calls ();
4572
4573 /* After initial rtl generation, call back to finish generating
4574 exception support code. We need to do this before cleaning up
4575 the CFG as the code does not expect dead landing pads. */
4576 if (cfun->eh->region_tree != NULL)
4577 finish_eh_generation ();
4578
4579 /* Remove unreachable blocks, otherwise we cannot compute dominators
4580 which are needed for loop state verification. As a side-effect
4581 this also compacts blocks.
4582 ??? We cannot remove trivially dead insns here as for example
4583 the DRAP reg on i?86 is not magically live at this point.
4584 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4585 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4586
4587 #ifdef ENABLE_CHECKING
4588 verify_flow_info ();
4589 #endif
4590
4591 /* Initialize pseudos allocated for hard registers. */
4592 emit_initial_value_sets ();
4593
4594 /* And finally unshare all RTL. */
4595 unshare_all_rtl ();
4596
4597 /* There's no need to defer outputting this function any more; we
4598 know we want to output it. */
4599 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4600
4601 /* Now that we're done expanding trees to RTL, we shouldn't have any
4602 more CONCATs anywhere. */
4603 generating_concat_p = 0;
4604
4605 if (dump_file)
4606 {
4607 fprintf (dump_file,
4608 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4609 /* And the pass manager will dump RTL for us. */
4610 }
4611
4612 /* If we're emitting a nested function, make sure its parent gets
4613 emitted as well. Doing otherwise confuses debug info. */
4614 {
4615 tree parent;
4616 for (parent = DECL_CONTEXT (current_function_decl);
4617 parent != NULL_TREE;
4618 parent = get_containing_scope (parent))
4619 if (TREE_CODE (parent) == FUNCTION_DECL)
4620 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4621 }
4622
4623 /* We are now committed to emitting code for this function. Do any
4624 preparation, such as emitting abstract debug info for the inline
4625 before it gets mangled by optimization. */
4626 if (cgraph_function_possibly_inlined_p (current_function_decl))
4627 (*debug_hooks->outlining_inline_function) (current_function_decl);
4628
4629 TREE_ASM_WRITTEN (current_function_decl) = 1;
4630
4631 /* After expanding, the return labels are no longer needed. */
4632 return_label = NULL;
4633 naked_return_label = NULL;
4634
4635 /* After expanding, the tm_restart map is no longer needed. */
4636 if (cfun->gimple_df->tm_restart)
4637 {
4638 htab_delete (cfun->gimple_df->tm_restart);
4639 cfun->gimple_df->tm_restart = NULL;
4640 }
4641
4642 /* Tag the blocks with a depth number so that change_scope can find
4643 the common parent easily. */
4644 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4645 default_rtl_profile ();
4646
4647 timevar_pop (TV_POST_EXPAND);
4648
4649 return 0;
4650 }
4651
4652 struct rtl_opt_pass pass_expand =
4653 {
4654 {
4655 RTL_PASS,
4656 "expand", /* name */
4657 NULL, /* gate */
4658 gimple_expand_cfg, /* execute */
4659 NULL, /* sub */
4660 NULL, /* next */
4661 0, /* static_pass_number */
4662 TV_EXPAND, /* tv_id */
4663 PROP_ssa | PROP_gimple_leh | PROP_cfg
4664 | PROP_gimple_lcx, /* properties_required */
4665 PROP_rtl, /* properties_provided */
4666 PROP_ssa | PROP_trees, /* properties_destroyed */
4667 TODO_verify_ssa | TODO_verify_flow
4668 | TODO_verify_stmts, /* todo_flags_start */
4669 TODO_ggc_collect /* todo_flags_finish */
4670 }
4671 };