Fix line number data for PIC register setup code.
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "langhooks.h"
31 #include "bitmap.h"
32 #include "gimple.h"
33 #include "gimple-ssa.h"
34 #include "cgraph.h"
35 #include "tree-cfg.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-ssanames.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "tree-pass.h"
42 #include "except.h"
43 #include "flags.h"
44 #include "diagnostic.h"
45 #include "gimple-pretty-print.h"
46 #include "toplev.h"
47 #include "debug.h"
48 #include "params.h"
49 #include "tree-inline.h"
50 #include "value-prof.h"
51 #include "target.h"
52 #include "tree-outof-ssa.h"
53 #include "sbitmap.h"
54 #include "cfgloop.h"
55 #include "regs.h" /* For reg_renumber. */
56 #include "insn-attr.h" /* For INSN_SCHEDULING. */
57 #include "asan.h"
58 #include "tree-ssa-address.h"
59
60 /* This variable holds information helping the rewriting of SSA trees
61 into RTL. */
62 struct ssaexpand SA;
63
64 /* This variable holds the currently expanded gimple statement for purposes
65 of comminucating the profile info to the builtin expanders. */
66 gimple currently_expanding_gimple_stmt;
67
68 static rtx expand_debug_expr (tree);
69
70 /* Return an expression tree corresponding to the RHS of GIMPLE
71 statement STMT. */
72
73 tree
74 gimple_assign_rhs_to_tree (gimple stmt)
75 {
76 tree t;
77 enum gimple_rhs_class grhs_class;
78
79 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
80
81 if (grhs_class == GIMPLE_TERNARY_RHS)
82 t = build3 (gimple_assign_rhs_code (stmt),
83 TREE_TYPE (gimple_assign_lhs (stmt)),
84 gimple_assign_rhs1 (stmt),
85 gimple_assign_rhs2 (stmt),
86 gimple_assign_rhs3 (stmt));
87 else if (grhs_class == GIMPLE_BINARY_RHS)
88 t = build2 (gimple_assign_rhs_code (stmt),
89 TREE_TYPE (gimple_assign_lhs (stmt)),
90 gimple_assign_rhs1 (stmt),
91 gimple_assign_rhs2 (stmt));
92 else if (grhs_class == GIMPLE_UNARY_RHS)
93 t = build1 (gimple_assign_rhs_code (stmt),
94 TREE_TYPE (gimple_assign_lhs (stmt)),
95 gimple_assign_rhs1 (stmt));
96 else if (grhs_class == GIMPLE_SINGLE_RHS)
97 {
98 t = gimple_assign_rhs1 (stmt);
99 /* Avoid modifying this tree in place below. */
100 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
101 && gimple_location (stmt) != EXPR_LOCATION (t))
102 || (gimple_block (stmt)
103 && currently_expanding_to_rtl
104 && EXPR_P (t)))
105 t = copy_node (t);
106 }
107 else
108 gcc_unreachable ();
109
110 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
111 SET_EXPR_LOCATION (t, gimple_location (stmt));
112
113 return t;
114 }
115
116
117 #ifndef STACK_ALIGNMENT_NEEDED
118 #define STACK_ALIGNMENT_NEEDED 1
119 #endif
120
121 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
122
123 /* Associate declaration T with storage space X. If T is no
124 SSA name this is exactly SET_DECL_RTL, otherwise make the
125 partition of T associated with X. */
126 static inline void
127 set_rtl (tree t, rtx x)
128 {
129 if (TREE_CODE (t) == SSA_NAME)
130 {
131 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
132 if (x && !MEM_P (x))
133 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
134 /* For the benefit of debug information at -O0 (where vartracking
135 doesn't run) record the place also in the base DECL if it's
136 a normal variable (not a parameter). */
137 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
138 {
139 tree var = SSA_NAME_VAR (t);
140 /* If we don't yet have something recorded, just record it now. */
141 if (!DECL_RTL_SET_P (var))
142 SET_DECL_RTL (var, x);
143 /* If we have it set already to "multiple places" don't
144 change this. */
145 else if (DECL_RTL (var) == pc_rtx)
146 ;
147 /* If we have something recorded and it's not the same place
148 as we want to record now, we have multiple partitions for the
149 same base variable, with different places. We can't just
150 randomly chose one, hence we have to say that we don't know.
151 This only happens with optimization, and there var-tracking
152 will figure out the right thing. */
153 else if (DECL_RTL (var) != x)
154 SET_DECL_RTL (var, pc_rtx);
155 }
156 }
157 else
158 SET_DECL_RTL (t, x);
159 }
160
161 /* This structure holds data relevant to one variable that will be
162 placed in a stack slot. */
163 struct stack_var
164 {
165 /* The Variable. */
166 tree decl;
167
168 /* Initially, the size of the variable. Later, the size of the partition,
169 if this variable becomes it's partition's representative. */
170 HOST_WIDE_INT size;
171
172 /* The *byte* alignment required for this variable. Or as, with the
173 size, the alignment for this partition. */
174 unsigned int alignb;
175
176 /* The partition representative. */
177 size_t representative;
178
179 /* The next stack variable in the partition, or EOC. */
180 size_t next;
181
182 /* The numbers of conflicting stack variables. */
183 bitmap conflicts;
184 };
185
186 #define EOC ((size_t)-1)
187
188 /* We have an array of such objects while deciding allocation. */
189 static struct stack_var *stack_vars;
190 static size_t stack_vars_alloc;
191 static size_t stack_vars_num;
192 static struct pointer_map_t *decl_to_stack_part;
193
194 /* Conflict bitmaps go on this obstack. This allows us to destroy
195 all of them in one big sweep. */
196 static bitmap_obstack stack_var_bitmap_obstack;
197
198 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
199 is non-decreasing. */
200 static size_t *stack_vars_sorted;
201
202 /* The phase of the stack frame. This is the known misalignment of
203 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
204 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
205 static int frame_phase;
206
207 /* Used during expand_used_vars to remember if we saw any decls for
208 which we'd like to enable stack smashing protection. */
209 static bool has_protected_decls;
210
211 /* Used during expand_used_vars. Remember if we say a character buffer
212 smaller than our cutoff threshold. Used for -Wstack-protector. */
213 static bool has_short_buffer;
214
215 /* Compute the byte alignment to use for DECL. Ignore alignment
216 we can't do with expected alignment of the stack boundary. */
217
218 static unsigned int
219 align_local_variable (tree decl)
220 {
221 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
222 DECL_ALIGN (decl) = align;
223 return align / BITS_PER_UNIT;
224 }
225
226 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
227 Return the frame offset. */
228
229 static HOST_WIDE_INT
230 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
231 {
232 HOST_WIDE_INT offset, new_frame_offset;
233
234 new_frame_offset = frame_offset;
235 if (FRAME_GROWS_DOWNWARD)
236 {
237 new_frame_offset -= size + frame_phase;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 }
242 else
243 {
244 new_frame_offset -= frame_phase;
245 new_frame_offset += align - 1;
246 new_frame_offset &= -align;
247 new_frame_offset += frame_phase;
248 offset = new_frame_offset;
249 new_frame_offset += size;
250 }
251 frame_offset = new_frame_offset;
252
253 if (frame_offset_overflow (frame_offset, cfun->decl))
254 frame_offset = offset = 0;
255
256 return offset;
257 }
258
259 /* Accumulate DECL into STACK_VARS. */
260
261 static void
262 add_stack_var (tree decl)
263 {
264 struct stack_var *v;
265
266 if (stack_vars_num >= stack_vars_alloc)
267 {
268 if (stack_vars_alloc)
269 stack_vars_alloc = stack_vars_alloc * 3 / 2;
270 else
271 stack_vars_alloc = 32;
272 stack_vars
273 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
274 }
275 if (!decl_to_stack_part)
276 decl_to_stack_part = pointer_map_create ();
277
278 v = &stack_vars[stack_vars_num];
279 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
280
281 v->decl = decl;
282 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
283 /* Ensure that all variables have size, so that &a != &b for any two
284 variables that are simultaneously live. */
285 if (v->size == 0)
286 v->size = 1;
287 v->alignb = align_local_variable (SSAVAR (decl));
288 /* An alignment of zero can mightily confuse us later. */
289 gcc_assert (v->alignb != 0);
290
291 /* All variables are initially in their own partition. */
292 v->representative = stack_vars_num;
293 v->next = EOC;
294
295 /* All variables initially conflict with no other. */
296 v->conflicts = NULL;
297
298 /* Ensure that this decl doesn't get put onto the list twice. */
299 set_rtl (decl, pc_rtx);
300
301 stack_vars_num++;
302 }
303
304 /* Make the decls associated with luid's X and Y conflict. */
305
306 static void
307 add_stack_var_conflict (size_t x, size_t y)
308 {
309 struct stack_var *a = &stack_vars[x];
310 struct stack_var *b = &stack_vars[y];
311 if (!a->conflicts)
312 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
313 if (!b->conflicts)
314 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
315 bitmap_set_bit (a->conflicts, y);
316 bitmap_set_bit (b->conflicts, x);
317 }
318
319 /* Check whether the decls associated with luid's X and Y conflict. */
320
321 static bool
322 stack_var_conflict_p (size_t x, size_t y)
323 {
324 struct stack_var *a = &stack_vars[x];
325 struct stack_var *b = &stack_vars[y];
326 if (x == y)
327 return false;
328 /* Partitions containing an SSA name result from gimple registers
329 with things like unsupported modes. They are top-level and
330 hence conflict with everything else. */
331 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
332 return true;
333
334 if (!a->conflicts || !b->conflicts)
335 return false;
336 return bitmap_bit_p (a->conflicts, y);
337 }
338
339 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
340 enter its partition number into bitmap DATA. */
341
342 static bool
343 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
344 {
345 bitmap active = (bitmap)data;
346 op = get_base_address (op);
347 if (op
348 && DECL_P (op)
349 && DECL_RTL_IF_SET (op) == pc_rtx)
350 {
351 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
352 if (v)
353 bitmap_set_bit (active, *v);
354 }
355 return false;
356 }
357
358 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
359 record conflicts between it and all currently active other partitions
360 from bitmap DATA. */
361
362 static bool
363 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
364 {
365 bitmap active = (bitmap)data;
366 op = get_base_address (op);
367 if (op
368 && DECL_P (op)
369 && DECL_RTL_IF_SET (op) == pc_rtx)
370 {
371 size_t *v =
372 (size_t *) pointer_map_contains (decl_to_stack_part, op);
373 if (v && bitmap_set_bit (active, *v))
374 {
375 size_t num = *v;
376 bitmap_iterator bi;
377 unsigned i;
378 gcc_assert (num < stack_vars_num);
379 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
380 add_stack_var_conflict (num, i);
381 }
382 }
383 return false;
384 }
385
386 /* Helper routine for add_scope_conflicts, calculating the active partitions
387 at the end of BB, leaving the result in WORK. We're called to generate
388 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
389 liveness. */
390
391 static void
392 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
393 {
394 edge e;
395 edge_iterator ei;
396 gimple_stmt_iterator gsi;
397 bool (*visit)(gimple, tree, void *);
398
399 bitmap_clear (work);
400 FOR_EACH_EDGE (e, ei, bb->preds)
401 bitmap_ior_into (work, (bitmap)e->src->aux);
402
403 visit = visit_op;
404
405 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
406 {
407 gimple stmt = gsi_stmt (gsi);
408 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
409 }
410 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
411 {
412 gimple stmt = gsi_stmt (gsi);
413
414 if (gimple_clobber_p (stmt))
415 {
416 tree lhs = gimple_assign_lhs (stmt);
417 size_t *v;
418 /* Nested function lowering might introduce LHSs
419 that are COMPONENT_REFs. */
420 if (TREE_CODE (lhs) != VAR_DECL)
421 continue;
422 if (DECL_RTL_IF_SET (lhs) == pc_rtx
423 && (v = (size_t *)
424 pointer_map_contains (decl_to_stack_part, lhs)))
425 bitmap_clear_bit (work, *v);
426 }
427 else if (!is_gimple_debug (stmt))
428 {
429 if (for_conflict
430 && visit == visit_op)
431 {
432 /* If this is the first real instruction in this BB we need
433 to add conflicts for everything live at this point now.
434 Unlike classical liveness for named objects we can't
435 rely on seeing a def/use of the names we're interested in.
436 There might merely be indirect loads/stores. We'd not add any
437 conflicts for such partitions. */
438 bitmap_iterator bi;
439 unsigned i;
440 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
441 {
442 struct stack_var *a = &stack_vars[i];
443 if (!a->conflicts)
444 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
445 bitmap_ior_into (a->conflicts, work);
446 }
447 visit = visit_conflict;
448 }
449 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
450 }
451 }
452 }
453
454 /* Generate stack partition conflicts between all partitions that are
455 simultaneously live. */
456
457 static void
458 add_scope_conflicts (void)
459 {
460 basic_block bb;
461 bool changed;
462 bitmap work = BITMAP_ALLOC (NULL);
463 int *rpo;
464 int n_bbs;
465
466 /* We approximate the live range of a stack variable by taking the first
467 mention of its name as starting point(s), and by the end-of-scope
468 death clobber added by gimplify as ending point(s) of the range.
469 This overapproximates in the case we for instance moved an address-taken
470 operation upward, without also moving a dereference to it upwards.
471 But it's conservatively correct as a variable never can hold values
472 before its name is mentioned at least once.
473
474 We then do a mostly classical bitmap liveness algorithm. */
475
476 FOR_ALL_BB (bb)
477 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
478
479 rpo = XNEWVEC (int, last_basic_block);
480 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
481
482 changed = true;
483 while (changed)
484 {
485 int i;
486 changed = false;
487 for (i = 0; i < n_bbs; i++)
488 {
489 bitmap active;
490 bb = BASIC_BLOCK (rpo[i]);
491 active = (bitmap)bb->aux;
492 add_scope_conflicts_1 (bb, work, false);
493 if (bitmap_ior_into (active, work))
494 changed = true;
495 }
496 }
497
498 FOR_EACH_BB (bb)
499 add_scope_conflicts_1 (bb, work, true);
500
501 free (rpo);
502 BITMAP_FREE (work);
503 FOR_ALL_BB (bb)
504 BITMAP_FREE (bb->aux);
505 }
506
507 /* A subroutine of partition_stack_vars. A comparison function for qsort,
508 sorting an array of indices by the properties of the object. */
509
510 static int
511 stack_var_cmp (const void *a, const void *b)
512 {
513 size_t ia = *(const size_t *)a;
514 size_t ib = *(const size_t *)b;
515 unsigned int aligna = stack_vars[ia].alignb;
516 unsigned int alignb = stack_vars[ib].alignb;
517 HOST_WIDE_INT sizea = stack_vars[ia].size;
518 HOST_WIDE_INT sizeb = stack_vars[ib].size;
519 tree decla = stack_vars[ia].decl;
520 tree declb = stack_vars[ib].decl;
521 bool largea, largeb;
522 unsigned int uida, uidb;
523
524 /* Primary compare on "large" alignment. Large comes first. */
525 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
526 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
527 if (largea != largeb)
528 return (int)largeb - (int)largea;
529
530 /* Secondary compare on size, decreasing */
531 if (sizea > sizeb)
532 return -1;
533 if (sizea < sizeb)
534 return 1;
535
536 /* Tertiary compare on true alignment, decreasing. */
537 if (aligna < alignb)
538 return -1;
539 if (aligna > alignb)
540 return 1;
541
542 /* Final compare on ID for sort stability, increasing.
543 Two SSA names are compared by their version, SSA names come before
544 non-SSA names, and two normal decls are compared by their DECL_UID. */
545 if (TREE_CODE (decla) == SSA_NAME)
546 {
547 if (TREE_CODE (declb) == SSA_NAME)
548 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
549 else
550 return -1;
551 }
552 else if (TREE_CODE (declb) == SSA_NAME)
553 return 1;
554 else
555 uida = DECL_UID (decla), uidb = DECL_UID (declb);
556 if (uida < uidb)
557 return 1;
558 if (uida > uidb)
559 return -1;
560 return 0;
561 }
562
563
564 /* If the points-to solution *PI points to variables that are in a partition
565 together with other variables add all partition members to the pointed-to
566 variables bitmap. */
567
568 static void
569 add_partitioned_vars_to_ptset (struct pt_solution *pt,
570 struct pointer_map_t *decls_to_partitions,
571 struct pointer_set_t *visited, bitmap temp)
572 {
573 bitmap_iterator bi;
574 unsigned i;
575 bitmap *part;
576
577 if (pt->anything
578 || pt->vars == NULL
579 /* The pointed-to vars bitmap is shared, it is enough to
580 visit it once. */
581 || pointer_set_insert (visited, pt->vars))
582 return;
583
584 bitmap_clear (temp);
585
586 /* By using a temporary bitmap to store all members of the partitions
587 we have to add we make sure to visit each of the partitions only
588 once. */
589 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
590 if ((!temp
591 || !bitmap_bit_p (temp, i))
592 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
593 (void *)(size_t) i)))
594 bitmap_ior_into (temp, *part);
595 if (!bitmap_empty_p (temp))
596 bitmap_ior_into (pt->vars, temp);
597 }
598
599 /* Update points-to sets based on partition info, so we can use them on RTL.
600 The bitmaps representing stack partitions will be saved until expand,
601 where partitioned decls used as bases in memory expressions will be
602 rewritten. */
603
604 static void
605 update_alias_info_with_stack_vars (void)
606 {
607 struct pointer_map_t *decls_to_partitions = NULL;
608 size_t i, j;
609 tree var = NULL_TREE;
610
611 for (i = 0; i < stack_vars_num; i++)
612 {
613 bitmap part = NULL;
614 tree name;
615 struct ptr_info_def *pi;
616
617 /* Not interested in partitions with single variable. */
618 if (stack_vars[i].representative != i
619 || stack_vars[i].next == EOC)
620 continue;
621
622 if (!decls_to_partitions)
623 {
624 decls_to_partitions = pointer_map_create ();
625 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
626 }
627
628 /* Create an SSA_NAME that points to the partition for use
629 as base during alias-oracle queries on RTL for bases that
630 have been partitioned. */
631 if (var == NULL_TREE)
632 var = create_tmp_var (ptr_type_node, NULL);
633 name = make_ssa_name (var, NULL);
634
635 /* Create bitmaps representing partitions. They will be used for
636 points-to sets later, so use GGC alloc. */
637 part = BITMAP_GGC_ALLOC ();
638 for (j = i; j != EOC; j = stack_vars[j].next)
639 {
640 tree decl = stack_vars[j].decl;
641 unsigned int uid = DECL_PT_UID (decl);
642 bitmap_set_bit (part, uid);
643 *((bitmap *) pointer_map_insert (decls_to_partitions,
644 (void *)(size_t) uid)) = part;
645 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
646 decl)) = name;
647 if (TREE_ADDRESSABLE (decl))
648 TREE_ADDRESSABLE (name) = 1;
649 }
650
651 /* Make the SSA name point to all partition members. */
652 pi = get_ptr_info (name);
653 pt_solution_set (&pi->pt, part, false);
654 }
655
656 /* Make all points-to sets that contain one member of a partition
657 contain all members of the partition. */
658 if (decls_to_partitions)
659 {
660 unsigned i;
661 struct pointer_set_t *visited = pointer_set_create ();
662 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
663
664 for (i = 1; i < num_ssa_names; i++)
665 {
666 tree name = ssa_name (i);
667 struct ptr_info_def *pi;
668
669 if (name
670 && POINTER_TYPE_P (TREE_TYPE (name))
671 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
672 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
673 visited, temp);
674 }
675
676 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
677 decls_to_partitions, visited, temp);
678
679 pointer_set_destroy (visited);
680 pointer_map_destroy (decls_to_partitions);
681 BITMAP_FREE (temp);
682 }
683 }
684
685 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
686 partitioning algorithm. Partitions A and B are known to be non-conflicting.
687 Merge them into a single partition A. */
688
689 static void
690 union_stack_vars (size_t a, size_t b)
691 {
692 struct stack_var *vb = &stack_vars[b];
693 bitmap_iterator bi;
694 unsigned u;
695
696 gcc_assert (stack_vars[b].next == EOC);
697 /* Add B to A's partition. */
698 stack_vars[b].next = stack_vars[a].next;
699 stack_vars[b].representative = a;
700 stack_vars[a].next = b;
701
702 /* Update the required alignment of partition A to account for B. */
703 if (stack_vars[a].alignb < stack_vars[b].alignb)
704 stack_vars[a].alignb = stack_vars[b].alignb;
705
706 /* Update the interference graph and merge the conflicts. */
707 if (vb->conflicts)
708 {
709 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
710 add_stack_var_conflict (a, stack_vars[u].representative);
711 BITMAP_FREE (vb->conflicts);
712 }
713 }
714
715 /* A subroutine of expand_used_vars. Binpack the variables into
716 partitions constrained by the interference graph. The overall
717 algorithm used is as follows:
718
719 Sort the objects by size in descending order.
720 For each object A {
721 S = size(A)
722 O = 0
723 loop {
724 Look for the largest non-conflicting object B with size <= S.
725 UNION (A, B)
726 }
727 }
728 */
729
730 static void
731 partition_stack_vars (void)
732 {
733 size_t si, sj, n = stack_vars_num;
734
735 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
736 for (si = 0; si < n; ++si)
737 stack_vars_sorted[si] = si;
738
739 if (n == 1)
740 return;
741
742 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
743
744 for (si = 0; si < n; ++si)
745 {
746 size_t i = stack_vars_sorted[si];
747 unsigned int ialign = stack_vars[i].alignb;
748 HOST_WIDE_INT isize = stack_vars[i].size;
749
750 /* Ignore objects that aren't partition representatives. If we
751 see a var that is not a partition representative, it must
752 have been merged earlier. */
753 if (stack_vars[i].representative != i)
754 continue;
755
756 for (sj = si + 1; sj < n; ++sj)
757 {
758 size_t j = stack_vars_sorted[sj];
759 unsigned int jalign = stack_vars[j].alignb;
760 HOST_WIDE_INT jsize = stack_vars[j].size;
761
762 /* Ignore objects that aren't partition representatives. */
763 if (stack_vars[j].representative != j)
764 continue;
765
766 /* Do not mix objects of "small" (supported) alignment
767 and "large" (unsupported) alignment. */
768 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
769 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
770 break;
771
772 /* For Address Sanitizer do not mix objects with different
773 sizes, as the shorter vars wouldn't be adequately protected.
774 Don't do that for "large" (unsupported) alignment objects,
775 those aren't protected anyway. */
776 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
777 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
778 break;
779
780 /* Ignore conflicting objects. */
781 if (stack_var_conflict_p (i, j))
782 continue;
783
784 /* UNION the objects, placing J at OFFSET. */
785 union_stack_vars (i, j);
786 }
787 }
788
789 update_alias_info_with_stack_vars ();
790 }
791
792 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
793
794 static void
795 dump_stack_var_partition (void)
796 {
797 size_t si, i, j, n = stack_vars_num;
798
799 for (si = 0; si < n; ++si)
800 {
801 i = stack_vars_sorted[si];
802
803 /* Skip variables that aren't partition representatives, for now. */
804 if (stack_vars[i].representative != i)
805 continue;
806
807 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
808 " align %u\n", (unsigned long) i, stack_vars[i].size,
809 stack_vars[i].alignb);
810
811 for (j = i; j != EOC; j = stack_vars[j].next)
812 {
813 fputc ('\t', dump_file);
814 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
815 }
816 fputc ('\n', dump_file);
817 }
818 }
819
820 /* Assign rtl to DECL at BASE + OFFSET. */
821
822 static void
823 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
824 HOST_WIDE_INT offset)
825 {
826 unsigned align;
827 rtx x;
828
829 /* If this fails, we've overflowed the stack frame. Error nicely? */
830 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
831
832 x = plus_constant (Pmode, base, offset);
833 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
834
835 if (TREE_CODE (decl) != SSA_NAME)
836 {
837 /* Set alignment we actually gave this decl if it isn't an SSA name.
838 If it is we generate stack slots only accidentally so it isn't as
839 important, we'll simply use the alignment that is already set. */
840 if (base == virtual_stack_vars_rtx)
841 offset -= frame_phase;
842 align = offset & -offset;
843 align *= BITS_PER_UNIT;
844 if (align == 0 || align > base_align)
845 align = base_align;
846
847 /* One would think that we could assert that we're not decreasing
848 alignment here, but (at least) the i386 port does exactly this
849 via the MINIMUM_ALIGNMENT hook. */
850
851 DECL_ALIGN (decl) = align;
852 DECL_USER_ALIGN (decl) = 0;
853 }
854
855 set_mem_attributes (x, SSAVAR (decl), true);
856 set_rtl (decl, x);
857 }
858
859 struct stack_vars_data
860 {
861 /* Vector of offset pairs, always end of some padding followed
862 by start of the padding that needs Address Sanitizer protection.
863 The vector is in reversed, highest offset pairs come first. */
864 vec<HOST_WIDE_INT> asan_vec;
865
866 /* Vector of partition representative decls in between the paddings. */
867 vec<tree> asan_decl_vec;
868 };
869
870 /* A subroutine of expand_used_vars. Give each partition representative
871 a unique location within the stack frame. Update each partition member
872 with that location. */
873
874 static void
875 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
876 {
877 size_t si, i, j, n = stack_vars_num;
878 HOST_WIDE_INT large_size = 0, large_alloc = 0;
879 rtx large_base = NULL;
880 unsigned large_align = 0;
881 tree decl;
882
883 /* Determine if there are any variables requiring "large" alignment.
884 Since these are dynamically allocated, we only process these if
885 no predicate involved. */
886 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
887 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
888 {
889 /* Find the total size of these variables. */
890 for (si = 0; si < n; ++si)
891 {
892 unsigned alignb;
893
894 i = stack_vars_sorted[si];
895 alignb = stack_vars[i].alignb;
896
897 /* Stop when we get to the first decl with "small" alignment. */
898 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
899 break;
900
901 /* Skip variables that aren't partition representatives. */
902 if (stack_vars[i].representative != i)
903 continue;
904
905 /* Skip variables that have already had rtl assigned. See also
906 add_stack_var where we perpetrate this pc_rtx hack. */
907 decl = stack_vars[i].decl;
908 if ((TREE_CODE (decl) == SSA_NAME
909 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
910 : DECL_RTL (decl)) != pc_rtx)
911 continue;
912
913 large_size += alignb - 1;
914 large_size &= -(HOST_WIDE_INT)alignb;
915 large_size += stack_vars[i].size;
916 }
917
918 /* If there were any, allocate space. */
919 if (large_size > 0)
920 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
921 large_align, true);
922 }
923
924 for (si = 0; si < n; ++si)
925 {
926 rtx base;
927 unsigned base_align, alignb;
928 HOST_WIDE_INT offset;
929
930 i = stack_vars_sorted[si];
931
932 /* Skip variables that aren't partition representatives, for now. */
933 if (stack_vars[i].representative != i)
934 continue;
935
936 /* Skip variables that have already had rtl assigned. See also
937 add_stack_var where we perpetrate this pc_rtx hack. */
938 decl = stack_vars[i].decl;
939 if ((TREE_CODE (decl) == SSA_NAME
940 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
941 : DECL_RTL (decl)) != pc_rtx)
942 continue;
943
944 /* Check the predicate to see whether this variable should be
945 allocated in this pass. */
946 if (pred && !pred (i))
947 continue;
948
949 alignb = stack_vars[i].alignb;
950 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
951 {
952 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
953 {
954 HOST_WIDE_INT prev_offset = frame_offset;
955 tree repr_decl = NULL_TREE;
956
957 offset
958 = alloc_stack_frame_space (stack_vars[i].size
959 + ASAN_RED_ZONE_SIZE,
960 MAX (alignb, ASAN_RED_ZONE_SIZE));
961 data->asan_vec.safe_push (prev_offset);
962 data->asan_vec.safe_push (offset + stack_vars[i].size);
963 /* Find best representative of the partition.
964 Prefer those with DECL_NAME, even better
965 satisfying asan_protect_stack_decl predicate. */
966 for (j = i; j != EOC; j = stack_vars[j].next)
967 if (asan_protect_stack_decl (stack_vars[j].decl)
968 && DECL_NAME (stack_vars[j].decl))
969 {
970 repr_decl = stack_vars[j].decl;
971 break;
972 }
973 else if (repr_decl == NULL_TREE
974 && DECL_P (stack_vars[j].decl)
975 && DECL_NAME (stack_vars[j].decl))
976 repr_decl = stack_vars[j].decl;
977 if (repr_decl == NULL_TREE)
978 repr_decl = stack_vars[i].decl;
979 data->asan_decl_vec.safe_push (repr_decl);
980 }
981 else
982 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
983 base = virtual_stack_vars_rtx;
984 base_align = crtl->max_used_stack_slot_alignment;
985 }
986 else
987 {
988 /* Large alignment is only processed in the last pass. */
989 if (pred)
990 continue;
991 gcc_assert (large_base != NULL);
992
993 large_alloc += alignb - 1;
994 large_alloc &= -(HOST_WIDE_INT)alignb;
995 offset = large_alloc;
996 large_alloc += stack_vars[i].size;
997
998 base = large_base;
999 base_align = large_align;
1000 }
1001
1002 /* Create rtl for each variable based on their location within the
1003 partition. */
1004 for (j = i; j != EOC; j = stack_vars[j].next)
1005 {
1006 expand_one_stack_var_at (stack_vars[j].decl,
1007 base, base_align,
1008 offset);
1009 }
1010 }
1011
1012 gcc_assert (large_alloc == large_size);
1013 }
1014
1015 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1016 static HOST_WIDE_INT
1017 account_stack_vars (void)
1018 {
1019 size_t si, j, i, n = stack_vars_num;
1020 HOST_WIDE_INT size = 0;
1021
1022 for (si = 0; si < n; ++si)
1023 {
1024 i = stack_vars_sorted[si];
1025
1026 /* Skip variables that aren't partition representatives, for now. */
1027 if (stack_vars[i].representative != i)
1028 continue;
1029
1030 size += stack_vars[i].size;
1031 for (j = i; j != EOC; j = stack_vars[j].next)
1032 set_rtl (stack_vars[j].decl, NULL);
1033 }
1034 return size;
1035 }
1036
1037 /* A subroutine of expand_one_var. Called to immediately assign rtl
1038 to a variable to be allocated in the stack frame. */
1039
1040 static void
1041 expand_one_stack_var (tree var)
1042 {
1043 HOST_WIDE_INT size, offset;
1044 unsigned byte_align;
1045
1046 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1047 byte_align = align_local_variable (SSAVAR (var));
1048
1049 /* We handle highly aligned variables in expand_stack_vars. */
1050 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1051
1052 offset = alloc_stack_frame_space (size, byte_align);
1053
1054 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1055 crtl->max_used_stack_slot_alignment, offset);
1056 }
1057
1058 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1059 that will reside in a hard register. */
1060
1061 static void
1062 expand_one_hard_reg_var (tree var)
1063 {
1064 rest_of_decl_compilation (var, 0, 0);
1065 }
1066
1067 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1068 that will reside in a pseudo register. */
1069
1070 static void
1071 expand_one_register_var (tree var)
1072 {
1073 tree decl = SSAVAR (var);
1074 tree type = TREE_TYPE (decl);
1075 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1076 rtx x = gen_reg_rtx (reg_mode);
1077
1078 set_rtl (var, x);
1079
1080 /* Note if the object is a user variable. */
1081 if (!DECL_ARTIFICIAL (decl))
1082 mark_user_reg (x);
1083
1084 if (POINTER_TYPE_P (type))
1085 mark_reg_pointer (x, get_pointer_alignment (var));
1086 }
1087
1088 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1089 has some associated error, e.g. its type is error-mark. We just need
1090 to pick something that won't crash the rest of the compiler. */
1091
1092 static void
1093 expand_one_error_var (tree var)
1094 {
1095 enum machine_mode mode = DECL_MODE (var);
1096 rtx x;
1097
1098 if (mode == BLKmode)
1099 x = gen_rtx_MEM (BLKmode, const0_rtx);
1100 else if (mode == VOIDmode)
1101 x = const0_rtx;
1102 else
1103 x = gen_reg_rtx (mode);
1104
1105 SET_DECL_RTL (var, x);
1106 }
1107
1108 /* A subroutine of expand_one_var. VAR is a variable that will be
1109 allocated to the local stack frame. Return true if we wish to
1110 add VAR to STACK_VARS so that it will be coalesced with other
1111 variables. Return false to allocate VAR immediately.
1112
1113 This function is used to reduce the number of variables considered
1114 for coalescing, which reduces the size of the quadratic problem. */
1115
1116 static bool
1117 defer_stack_allocation (tree var, bool toplevel)
1118 {
1119 /* If stack protection is enabled, *all* stack variables must be deferred,
1120 so that we can re-order the strings to the top of the frame.
1121 Similarly for Address Sanitizer. */
1122 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
1123 return true;
1124
1125 /* We handle "large" alignment via dynamic allocation. We want to handle
1126 this extra complication in only one place, so defer them. */
1127 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1128 return true;
1129
1130 /* Variables in the outermost scope automatically conflict with
1131 every other variable. The only reason to want to defer them
1132 at all is that, after sorting, we can more efficiently pack
1133 small variables in the stack frame. Continue to defer at -O2. */
1134 if (toplevel && optimize < 2)
1135 return false;
1136
1137 /* Without optimization, *most* variables are allocated from the
1138 stack, which makes the quadratic problem large exactly when we
1139 want compilation to proceed as quickly as possible. On the
1140 other hand, we don't want the function's stack frame size to
1141 get completely out of hand. So we avoid adding scalars and
1142 "small" aggregates to the list at all. */
1143 if (optimize == 0
1144 && (tree_low_cst (DECL_SIZE_UNIT (var), 1)
1145 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)))
1146 return false;
1147
1148 return true;
1149 }
1150
1151 /* A subroutine of expand_used_vars. Expand one variable according to
1152 its flavor. Variables to be placed on the stack are not actually
1153 expanded yet, merely recorded.
1154 When REALLY_EXPAND is false, only add stack values to be allocated.
1155 Return stack usage this variable is supposed to take.
1156 */
1157
1158 static HOST_WIDE_INT
1159 expand_one_var (tree var, bool toplevel, bool really_expand)
1160 {
1161 unsigned int align = BITS_PER_UNIT;
1162 tree origvar = var;
1163
1164 var = SSAVAR (var);
1165
1166 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1167 {
1168 /* Because we don't know if VAR will be in register or on stack,
1169 we conservatively assume it will be on stack even if VAR is
1170 eventually put into register after RA pass. For non-automatic
1171 variables, which won't be on stack, we collect alignment of
1172 type and ignore user specified alignment. */
1173 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1174 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1175 TYPE_MODE (TREE_TYPE (var)),
1176 TYPE_ALIGN (TREE_TYPE (var)));
1177 else if (DECL_HAS_VALUE_EXPR_P (var)
1178 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1179 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1180 or variables which were assigned a stack slot already by
1181 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1182 changed from the offset chosen to it. */
1183 align = crtl->stack_alignment_estimated;
1184 else
1185 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1186
1187 /* If the variable alignment is very large we'll dynamicaly allocate
1188 it, which means that in-frame portion is just a pointer. */
1189 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1190 align = POINTER_SIZE;
1191 }
1192
1193 if (SUPPORTS_STACK_ALIGNMENT
1194 && crtl->stack_alignment_estimated < align)
1195 {
1196 /* stack_alignment_estimated shouldn't change after stack
1197 realign decision made */
1198 gcc_assert (!crtl->stack_realign_processed);
1199 crtl->stack_alignment_estimated = align;
1200 }
1201
1202 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1203 So here we only make sure stack_alignment_needed >= align. */
1204 if (crtl->stack_alignment_needed < align)
1205 crtl->stack_alignment_needed = align;
1206 if (crtl->max_used_stack_slot_alignment < align)
1207 crtl->max_used_stack_slot_alignment = align;
1208
1209 if (TREE_CODE (origvar) == SSA_NAME)
1210 {
1211 gcc_assert (TREE_CODE (var) != VAR_DECL
1212 || (!DECL_EXTERNAL (var)
1213 && !DECL_HAS_VALUE_EXPR_P (var)
1214 && !TREE_STATIC (var)
1215 && TREE_TYPE (var) != error_mark_node
1216 && !DECL_HARD_REGISTER (var)
1217 && really_expand));
1218 }
1219 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1220 ;
1221 else if (DECL_EXTERNAL (var))
1222 ;
1223 else if (DECL_HAS_VALUE_EXPR_P (var))
1224 ;
1225 else if (TREE_STATIC (var))
1226 ;
1227 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1228 ;
1229 else if (TREE_TYPE (var) == error_mark_node)
1230 {
1231 if (really_expand)
1232 expand_one_error_var (var);
1233 }
1234 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1235 {
1236 if (really_expand)
1237 expand_one_hard_reg_var (var);
1238 }
1239 else if (use_register_for_decl (var))
1240 {
1241 if (really_expand)
1242 expand_one_register_var (origvar);
1243 }
1244 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1245 {
1246 /* Reject variables which cover more than half of the address-space. */
1247 if (really_expand)
1248 {
1249 error ("size of variable %q+D is too large", var);
1250 expand_one_error_var (var);
1251 }
1252 }
1253 else if (defer_stack_allocation (var, toplevel))
1254 add_stack_var (origvar);
1255 else
1256 {
1257 if (really_expand)
1258 expand_one_stack_var (origvar);
1259 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1260 }
1261 return 0;
1262 }
1263
1264 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1265 expanding variables. Those variables that can be put into registers
1266 are allocated pseudos; those that can't are put on the stack.
1267
1268 TOPLEVEL is true if this is the outermost BLOCK. */
1269
1270 static void
1271 expand_used_vars_for_block (tree block, bool toplevel)
1272 {
1273 tree t;
1274
1275 /* Expand all variables at this level. */
1276 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1277 if (TREE_USED (t)
1278 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1279 || !DECL_NONSHAREABLE (t)))
1280 expand_one_var (t, toplevel, true);
1281
1282 /* Expand all variables at containing levels. */
1283 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1284 expand_used_vars_for_block (t, false);
1285 }
1286
1287 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1288 and clear TREE_USED on all local variables. */
1289
1290 static void
1291 clear_tree_used (tree block)
1292 {
1293 tree t;
1294
1295 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1296 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1297 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1298 || !DECL_NONSHAREABLE (t))
1299 TREE_USED (t) = 0;
1300
1301 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1302 clear_tree_used (t);
1303 }
1304
1305 enum {
1306 SPCT_FLAG_DEFAULT = 1,
1307 SPCT_FLAG_ALL = 2,
1308 SPCT_FLAG_STRONG = 3
1309 };
1310
1311 /* Examine TYPE and determine a bit mask of the following features. */
1312
1313 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1314 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1315 #define SPCT_HAS_ARRAY 4
1316 #define SPCT_HAS_AGGREGATE 8
1317
1318 static unsigned int
1319 stack_protect_classify_type (tree type)
1320 {
1321 unsigned int ret = 0;
1322 tree t;
1323
1324 switch (TREE_CODE (type))
1325 {
1326 case ARRAY_TYPE:
1327 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1328 if (t == char_type_node
1329 || t == signed_char_type_node
1330 || t == unsigned_char_type_node)
1331 {
1332 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1333 unsigned HOST_WIDE_INT len;
1334
1335 if (!TYPE_SIZE_UNIT (type)
1336 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1337 len = max;
1338 else
1339 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1340
1341 if (len < max)
1342 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1343 else
1344 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1345 }
1346 else
1347 ret = SPCT_HAS_ARRAY;
1348 break;
1349
1350 case UNION_TYPE:
1351 case QUAL_UNION_TYPE:
1352 case RECORD_TYPE:
1353 ret = SPCT_HAS_AGGREGATE;
1354 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1355 if (TREE_CODE (t) == FIELD_DECL)
1356 ret |= stack_protect_classify_type (TREE_TYPE (t));
1357 break;
1358
1359 default:
1360 break;
1361 }
1362
1363 return ret;
1364 }
1365
1366 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1367 part of the local stack frame. Remember if we ever return nonzero for
1368 any variable in this function. The return value is the phase number in
1369 which the variable should be allocated. */
1370
1371 static int
1372 stack_protect_decl_phase (tree decl)
1373 {
1374 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1375 int ret = 0;
1376
1377 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1378 has_short_buffer = true;
1379
1380 if (flag_stack_protect == SPCT_FLAG_ALL
1381 || flag_stack_protect == SPCT_FLAG_STRONG)
1382 {
1383 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1384 && !(bits & SPCT_HAS_AGGREGATE))
1385 ret = 1;
1386 else if (bits & SPCT_HAS_ARRAY)
1387 ret = 2;
1388 }
1389 else
1390 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1391
1392 if (ret)
1393 has_protected_decls = true;
1394
1395 return ret;
1396 }
1397
1398 /* Two helper routines that check for phase 1 and phase 2. These are used
1399 as callbacks for expand_stack_vars. */
1400
1401 static bool
1402 stack_protect_decl_phase_1 (size_t i)
1403 {
1404 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1405 }
1406
1407 static bool
1408 stack_protect_decl_phase_2 (size_t i)
1409 {
1410 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1411 }
1412
1413 /* And helper function that checks for asan phase (with stack protector
1414 it is phase 3). This is used as callback for expand_stack_vars.
1415 Returns true if any of the vars in the partition need to be protected. */
1416
1417 static bool
1418 asan_decl_phase_3 (size_t i)
1419 {
1420 while (i != EOC)
1421 {
1422 if (asan_protect_stack_decl (stack_vars[i].decl))
1423 return true;
1424 i = stack_vars[i].next;
1425 }
1426 return false;
1427 }
1428
1429 /* Ensure that variables in different stack protection phases conflict
1430 so that they are not merged and share the same stack slot. */
1431
1432 static void
1433 add_stack_protection_conflicts (void)
1434 {
1435 size_t i, j, n = stack_vars_num;
1436 unsigned char *phase;
1437
1438 phase = XNEWVEC (unsigned char, n);
1439 for (i = 0; i < n; ++i)
1440 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1441
1442 for (i = 0; i < n; ++i)
1443 {
1444 unsigned char ph_i = phase[i];
1445 for (j = i + 1; j < n; ++j)
1446 if (ph_i != phase[j])
1447 add_stack_var_conflict (i, j);
1448 }
1449
1450 XDELETEVEC (phase);
1451 }
1452
1453 /* Create a decl for the guard at the top of the stack frame. */
1454
1455 static void
1456 create_stack_guard (void)
1457 {
1458 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1459 VAR_DECL, NULL, ptr_type_node);
1460 TREE_THIS_VOLATILE (guard) = 1;
1461 TREE_USED (guard) = 1;
1462 expand_one_stack_var (guard);
1463 crtl->stack_protect_guard = guard;
1464 }
1465
1466 /* Prepare for expanding variables. */
1467 static void
1468 init_vars_expansion (void)
1469 {
1470 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1471 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1472
1473 /* A map from decl to stack partition. */
1474 decl_to_stack_part = pointer_map_create ();
1475
1476 /* Initialize local stack smashing state. */
1477 has_protected_decls = false;
1478 has_short_buffer = false;
1479 }
1480
1481 /* Free up stack variable graph data. */
1482 static void
1483 fini_vars_expansion (void)
1484 {
1485 bitmap_obstack_release (&stack_var_bitmap_obstack);
1486 if (stack_vars)
1487 XDELETEVEC (stack_vars);
1488 if (stack_vars_sorted)
1489 XDELETEVEC (stack_vars_sorted);
1490 stack_vars = NULL;
1491 stack_vars_sorted = NULL;
1492 stack_vars_alloc = stack_vars_num = 0;
1493 pointer_map_destroy (decl_to_stack_part);
1494 decl_to_stack_part = NULL;
1495 }
1496
1497 /* Make a fair guess for the size of the stack frame of the function
1498 in NODE. This doesn't have to be exact, the result is only used in
1499 the inline heuristics. So we don't want to run the full stack var
1500 packing algorithm (which is quadratic in the number of stack vars).
1501 Instead, we calculate the total size of all stack vars. This turns
1502 out to be a pretty fair estimate -- packing of stack vars doesn't
1503 happen very often. */
1504
1505 HOST_WIDE_INT
1506 estimated_stack_frame_size (struct cgraph_node *node)
1507 {
1508 HOST_WIDE_INT size = 0;
1509 size_t i;
1510 tree var;
1511 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1512
1513 push_cfun (fn);
1514
1515 init_vars_expansion ();
1516
1517 FOR_EACH_LOCAL_DECL (fn, i, var)
1518 if (auto_var_in_fn_p (var, fn->decl))
1519 size += expand_one_var (var, true, false);
1520
1521 if (stack_vars_num > 0)
1522 {
1523 /* Fake sorting the stack vars for account_stack_vars (). */
1524 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1525 for (i = 0; i < stack_vars_num; ++i)
1526 stack_vars_sorted[i] = i;
1527 size += account_stack_vars ();
1528 }
1529
1530 fini_vars_expansion ();
1531 pop_cfun ();
1532 return size;
1533 }
1534
1535 /* Helper routine to check if a record or union contains an array field. */
1536
1537 static int
1538 record_or_union_type_has_array_p (const_tree tree_type)
1539 {
1540 tree fields = TYPE_FIELDS (tree_type);
1541 tree f;
1542
1543 for (f = fields; f; f = DECL_CHAIN (f))
1544 if (TREE_CODE (f) == FIELD_DECL)
1545 {
1546 tree field_type = TREE_TYPE (f);
1547 if (RECORD_OR_UNION_TYPE_P (field_type)
1548 && record_or_union_type_has_array_p (field_type))
1549 return 1;
1550 if (TREE_CODE (field_type) == ARRAY_TYPE)
1551 return 1;
1552 }
1553 return 0;
1554 }
1555
1556 /* Expand all variables used in the function. */
1557
1558 static rtx
1559 expand_used_vars (void)
1560 {
1561 tree var, outer_block = DECL_INITIAL (current_function_decl);
1562 vec<tree> maybe_local_decls = vNULL;
1563 rtx var_end_seq = NULL_RTX;
1564 struct pointer_map_t *ssa_name_decls;
1565 unsigned i;
1566 unsigned len;
1567 bool gen_stack_protect_signal = false;
1568
1569 /* Compute the phase of the stack frame for this function. */
1570 {
1571 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1572 int off = STARTING_FRAME_OFFSET % align;
1573 frame_phase = off ? align - off : 0;
1574 }
1575
1576 /* Set TREE_USED on all variables in the local_decls. */
1577 FOR_EACH_LOCAL_DECL (cfun, i, var)
1578 TREE_USED (var) = 1;
1579 /* Clear TREE_USED on all variables associated with a block scope. */
1580 clear_tree_used (DECL_INITIAL (current_function_decl));
1581
1582 init_vars_expansion ();
1583
1584 ssa_name_decls = pointer_map_create ();
1585 for (i = 0; i < SA.map->num_partitions; i++)
1586 {
1587 tree var = partition_to_var (SA.map, i);
1588
1589 gcc_assert (!virtual_operand_p (var));
1590
1591 /* Assign decls to each SSA name partition, share decls for partitions
1592 we could have coalesced (those with the same type). */
1593 if (SSA_NAME_VAR (var) == NULL_TREE)
1594 {
1595 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1596 if (!*slot)
1597 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1598 replace_ssa_name_symbol (var, (tree) *slot);
1599 }
1600
1601 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1602 expand_one_var (var, true, true);
1603 else
1604 {
1605 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1606 contain the default def (representing the parm or result itself)
1607 we don't do anything here. But those which don't contain the
1608 default def (representing a temporary based on the parm/result)
1609 we need to allocate space just like for normal VAR_DECLs. */
1610 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1611 {
1612 expand_one_var (var, true, true);
1613 gcc_assert (SA.partition_to_pseudo[i]);
1614 }
1615 }
1616 }
1617 pointer_map_destroy (ssa_name_decls);
1618
1619 if (flag_stack_protect == SPCT_FLAG_STRONG)
1620 FOR_EACH_LOCAL_DECL (cfun, i, var)
1621 if (!is_global_var (var))
1622 {
1623 tree var_type = TREE_TYPE (var);
1624 /* Examine local referenced variables that have their addresses taken,
1625 contain an array, or are arrays. */
1626 if (TREE_CODE (var) == VAR_DECL
1627 && (TREE_CODE (var_type) == ARRAY_TYPE
1628 || TREE_ADDRESSABLE (var)
1629 || (RECORD_OR_UNION_TYPE_P (var_type)
1630 && record_or_union_type_has_array_p (var_type))))
1631 {
1632 gen_stack_protect_signal = true;
1633 break;
1634 }
1635 }
1636
1637 /* At this point all variables on the local_decls with TREE_USED
1638 set are not associated with any block scope. Lay them out. */
1639
1640 len = vec_safe_length (cfun->local_decls);
1641 FOR_EACH_LOCAL_DECL (cfun, i, var)
1642 {
1643 bool expand_now = false;
1644
1645 /* Expanded above already. */
1646 if (is_gimple_reg (var))
1647 {
1648 TREE_USED (var) = 0;
1649 goto next;
1650 }
1651 /* We didn't set a block for static or extern because it's hard
1652 to tell the difference between a global variable (re)declared
1653 in a local scope, and one that's really declared there to
1654 begin with. And it doesn't really matter much, since we're
1655 not giving them stack space. Expand them now. */
1656 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1657 expand_now = true;
1658
1659 /* If the variable is not associated with any block, then it
1660 was created by the optimizers, and could be live anywhere
1661 in the function. */
1662 else if (TREE_USED (var))
1663 expand_now = true;
1664
1665 /* Finally, mark all variables on the list as used. We'll use
1666 this in a moment when we expand those associated with scopes. */
1667 TREE_USED (var) = 1;
1668
1669 if (expand_now)
1670 expand_one_var (var, true, true);
1671
1672 next:
1673 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1674 {
1675 rtx rtl = DECL_RTL_IF_SET (var);
1676
1677 /* Keep artificial non-ignored vars in cfun->local_decls
1678 chain until instantiate_decls. */
1679 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1680 add_local_decl (cfun, var);
1681 else if (rtl == NULL_RTX)
1682 /* If rtl isn't set yet, which can happen e.g. with
1683 -fstack-protector, retry before returning from this
1684 function. */
1685 maybe_local_decls.safe_push (var);
1686 }
1687 }
1688
1689 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1690
1691 +-----------------+-----------------+
1692 | ...processed... | ...duplicates...|
1693 +-----------------+-----------------+
1694 ^
1695 +-- LEN points here.
1696
1697 We just want the duplicates, as those are the artificial
1698 non-ignored vars that we want to keep until instantiate_decls.
1699 Move them down and truncate the array. */
1700 if (!vec_safe_is_empty (cfun->local_decls))
1701 cfun->local_decls->block_remove (0, len);
1702
1703 /* At this point, all variables within the block tree with TREE_USED
1704 set are actually used by the optimized function. Lay them out. */
1705 expand_used_vars_for_block (outer_block, true);
1706
1707 if (stack_vars_num > 0)
1708 {
1709 add_scope_conflicts ();
1710
1711 /* If stack protection is enabled, we don't share space between
1712 vulnerable data and non-vulnerable data. */
1713 if (flag_stack_protect)
1714 add_stack_protection_conflicts ();
1715
1716 /* Now that we have collected all stack variables, and have computed a
1717 minimal interference graph, attempt to save some stack space. */
1718 partition_stack_vars ();
1719 if (dump_file)
1720 dump_stack_var_partition ();
1721 }
1722
1723 switch (flag_stack_protect)
1724 {
1725 case SPCT_FLAG_ALL:
1726 create_stack_guard ();
1727 break;
1728
1729 case SPCT_FLAG_STRONG:
1730 if (gen_stack_protect_signal
1731 || cfun->calls_alloca || has_protected_decls)
1732 create_stack_guard ();
1733 break;
1734
1735 case SPCT_FLAG_DEFAULT:
1736 if (cfun->calls_alloca || has_protected_decls)
1737 create_stack_guard ();
1738 break;
1739
1740 default:
1741 ;
1742 }
1743
1744 /* Assign rtl to each variable based on these partitions. */
1745 if (stack_vars_num > 0)
1746 {
1747 struct stack_vars_data data;
1748
1749 data.asan_vec = vNULL;
1750 data.asan_decl_vec = vNULL;
1751
1752 /* Reorder decls to be protected by iterating over the variables
1753 array multiple times, and allocating out of each phase in turn. */
1754 /* ??? We could probably integrate this into the qsort we did
1755 earlier, such that we naturally see these variables first,
1756 and thus naturally allocate things in the right order. */
1757 if (has_protected_decls)
1758 {
1759 /* Phase 1 contains only character arrays. */
1760 expand_stack_vars (stack_protect_decl_phase_1, &data);
1761
1762 /* Phase 2 contains other kinds of arrays. */
1763 if (flag_stack_protect == 2)
1764 expand_stack_vars (stack_protect_decl_phase_2, &data);
1765 }
1766
1767 if (flag_sanitize & SANITIZE_ADDRESS)
1768 /* Phase 3, any partitions that need asan protection
1769 in addition to phase 1 and 2. */
1770 expand_stack_vars (asan_decl_phase_3, &data);
1771
1772 if (!data.asan_vec.is_empty ())
1773 {
1774 HOST_WIDE_INT prev_offset = frame_offset;
1775 HOST_WIDE_INT offset
1776 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1777 ASAN_RED_ZONE_SIZE);
1778 data.asan_vec.safe_push (prev_offset);
1779 data.asan_vec.safe_push (offset);
1780
1781 var_end_seq
1782 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1783 data.asan_vec.address (),
1784 data.asan_decl_vec. address (),
1785 data.asan_vec.length ());
1786 }
1787
1788 expand_stack_vars (NULL, &data);
1789
1790 data.asan_vec.release ();
1791 data.asan_decl_vec.release ();
1792 }
1793
1794 fini_vars_expansion ();
1795
1796 /* If there were any artificial non-ignored vars without rtl
1797 found earlier, see if deferred stack allocation hasn't assigned
1798 rtl to them. */
1799 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1800 {
1801 rtx rtl = DECL_RTL_IF_SET (var);
1802
1803 /* Keep artificial non-ignored vars in cfun->local_decls
1804 chain until instantiate_decls. */
1805 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1806 add_local_decl (cfun, var);
1807 }
1808 maybe_local_decls.release ();
1809
1810 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1811 if (STACK_ALIGNMENT_NEEDED)
1812 {
1813 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1814 if (!FRAME_GROWS_DOWNWARD)
1815 frame_offset += align - 1;
1816 frame_offset &= -align;
1817 }
1818
1819 return var_end_seq;
1820 }
1821
1822
1823 /* If we need to produce a detailed dump, print the tree representation
1824 for STMT to the dump file. SINCE is the last RTX after which the RTL
1825 generated for STMT should have been appended. */
1826
1827 static void
1828 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1829 {
1830 if (dump_file && (dump_flags & TDF_DETAILS))
1831 {
1832 fprintf (dump_file, "\n;; ");
1833 print_gimple_stmt (dump_file, stmt, 0,
1834 TDF_SLIM | (dump_flags & TDF_LINENO));
1835 fprintf (dump_file, "\n");
1836
1837 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1838 }
1839 }
1840
1841 /* Maps the blocks that do not contain tree labels to rtx labels. */
1842
1843 static struct pointer_map_t *lab_rtx_for_bb;
1844
1845 /* Returns the label_rtx expression for a label starting basic block BB. */
1846
1847 static rtx
1848 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1849 {
1850 gimple_stmt_iterator gsi;
1851 tree lab;
1852 gimple lab_stmt;
1853 void **elt;
1854
1855 if (bb->flags & BB_RTL)
1856 return block_label (bb);
1857
1858 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1859 if (elt)
1860 return (rtx) *elt;
1861
1862 /* Find the tree label if it is present. */
1863
1864 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1865 {
1866 lab_stmt = gsi_stmt (gsi);
1867 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1868 break;
1869
1870 lab = gimple_label_label (lab_stmt);
1871 if (DECL_NONLOCAL (lab))
1872 break;
1873
1874 return label_rtx (lab);
1875 }
1876
1877 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1878 *elt = gen_label_rtx ();
1879 return (rtx) *elt;
1880 }
1881
1882
1883 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1884 of a basic block where we just expanded the conditional at the end,
1885 possibly clean up the CFG and instruction sequence. LAST is the
1886 last instruction before the just emitted jump sequence. */
1887
1888 static void
1889 maybe_cleanup_end_of_block (edge e, rtx last)
1890 {
1891 /* Special case: when jumpif decides that the condition is
1892 trivial it emits an unconditional jump (and the necessary
1893 barrier). But we still have two edges, the fallthru one is
1894 wrong. purge_dead_edges would clean this up later. Unfortunately
1895 we have to insert insns (and split edges) before
1896 find_many_sub_basic_blocks and hence before purge_dead_edges.
1897 But splitting edges might create new blocks which depend on the
1898 fact that if there are two edges there's no barrier. So the
1899 barrier would get lost and verify_flow_info would ICE. Instead
1900 of auditing all edge splitters to care for the barrier (which
1901 normally isn't there in a cleaned CFG), fix it here. */
1902 if (BARRIER_P (get_last_insn ()))
1903 {
1904 rtx insn;
1905 remove_edge (e);
1906 /* Now, we have a single successor block, if we have insns to
1907 insert on the remaining edge we potentially will insert
1908 it at the end of this block (if the dest block isn't feasible)
1909 in order to avoid splitting the edge. This insertion will take
1910 place in front of the last jump. But we might have emitted
1911 multiple jumps (conditional and one unconditional) to the
1912 same destination. Inserting in front of the last one then
1913 is a problem. See PR 40021. We fix this by deleting all
1914 jumps except the last unconditional one. */
1915 insn = PREV_INSN (get_last_insn ());
1916 /* Make sure we have an unconditional jump. Otherwise we're
1917 confused. */
1918 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1919 for (insn = PREV_INSN (insn); insn != last;)
1920 {
1921 insn = PREV_INSN (insn);
1922 if (JUMP_P (NEXT_INSN (insn)))
1923 {
1924 if (!any_condjump_p (NEXT_INSN (insn)))
1925 {
1926 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1927 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1928 }
1929 delete_insn (NEXT_INSN (insn));
1930 }
1931 }
1932 }
1933 }
1934
1935 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1936 Returns a new basic block if we've terminated the current basic
1937 block and created a new one. */
1938
1939 static basic_block
1940 expand_gimple_cond (basic_block bb, gimple stmt)
1941 {
1942 basic_block new_bb, dest;
1943 edge new_edge;
1944 edge true_edge;
1945 edge false_edge;
1946 rtx last2, last;
1947 enum tree_code code;
1948 tree op0, op1;
1949
1950 code = gimple_cond_code (stmt);
1951 op0 = gimple_cond_lhs (stmt);
1952 op1 = gimple_cond_rhs (stmt);
1953 /* We're sometimes presented with such code:
1954 D.123_1 = x < y;
1955 if (D.123_1 != 0)
1956 ...
1957 This would expand to two comparisons which then later might
1958 be cleaned up by combine. But some pattern matchers like if-conversion
1959 work better when there's only one compare, so make up for this
1960 here as special exception if TER would have made the same change. */
1961 if (SA.values
1962 && TREE_CODE (op0) == SSA_NAME
1963 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1964 && TREE_CODE (op1) == INTEGER_CST
1965 && ((gimple_cond_code (stmt) == NE_EXPR
1966 && integer_zerop (op1))
1967 || (gimple_cond_code (stmt) == EQ_EXPR
1968 && integer_onep (op1)))
1969 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1970 {
1971 gimple second = SSA_NAME_DEF_STMT (op0);
1972 if (gimple_code (second) == GIMPLE_ASSIGN)
1973 {
1974 enum tree_code code2 = gimple_assign_rhs_code (second);
1975 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1976 {
1977 code = code2;
1978 op0 = gimple_assign_rhs1 (second);
1979 op1 = gimple_assign_rhs2 (second);
1980 }
1981 /* If jumps are cheap turn some more codes into
1982 jumpy sequences. */
1983 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1984 {
1985 if ((code2 == BIT_AND_EXPR
1986 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1987 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1988 || code2 == TRUTH_AND_EXPR)
1989 {
1990 code = TRUTH_ANDIF_EXPR;
1991 op0 = gimple_assign_rhs1 (second);
1992 op1 = gimple_assign_rhs2 (second);
1993 }
1994 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1995 {
1996 code = TRUTH_ORIF_EXPR;
1997 op0 = gimple_assign_rhs1 (second);
1998 op1 = gimple_assign_rhs2 (second);
1999 }
2000 }
2001 }
2002 }
2003
2004 last2 = last = get_last_insn ();
2005
2006 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2007 set_curr_insn_location (gimple_location (stmt));
2008
2009 /* These flags have no purpose in RTL land. */
2010 true_edge->flags &= ~EDGE_TRUE_VALUE;
2011 false_edge->flags &= ~EDGE_FALSE_VALUE;
2012
2013 /* We can either have a pure conditional jump with one fallthru edge or
2014 two-way jump that needs to be decomposed into two basic blocks. */
2015 if (false_edge->dest == bb->next_bb)
2016 {
2017 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2018 true_edge->probability);
2019 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2020 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2021 set_curr_insn_location (true_edge->goto_locus);
2022 false_edge->flags |= EDGE_FALLTHRU;
2023 maybe_cleanup_end_of_block (false_edge, last);
2024 return NULL;
2025 }
2026 if (true_edge->dest == bb->next_bb)
2027 {
2028 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2029 false_edge->probability);
2030 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2031 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2032 set_curr_insn_location (false_edge->goto_locus);
2033 true_edge->flags |= EDGE_FALLTHRU;
2034 maybe_cleanup_end_of_block (true_edge, last);
2035 return NULL;
2036 }
2037
2038 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2039 true_edge->probability);
2040 last = get_last_insn ();
2041 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2042 set_curr_insn_location (false_edge->goto_locus);
2043 emit_jump (label_rtx_for_bb (false_edge->dest));
2044
2045 BB_END (bb) = last;
2046 if (BARRIER_P (BB_END (bb)))
2047 BB_END (bb) = PREV_INSN (BB_END (bb));
2048 update_bb_for_insn (bb);
2049
2050 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2051 dest = false_edge->dest;
2052 redirect_edge_succ (false_edge, new_bb);
2053 false_edge->flags |= EDGE_FALLTHRU;
2054 new_bb->count = false_edge->count;
2055 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2056 if (current_loops && bb->loop_father)
2057 add_bb_to_loop (new_bb, bb->loop_father);
2058 new_edge = make_edge (new_bb, dest, 0);
2059 new_edge->probability = REG_BR_PROB_BASE;
2060 new_edge->count = new_bb->count;
2061 if (BARRIER_P (BB_END (new_bb)))
2062 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2063 update_bb_for_insn (new_bb);
2064
2065 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2066
2067 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2068 {
2069 set_curr_insn_location (true_edge->goto_locus);
2070 true_edge->goto_locus = curr_insn_location ();
2071 }
2072
2073 return new_bb;
2074 }
2075
2076 /* Mark all calls that can have a transaction restart. */
2077
2078 static void
2079 mark_transaction_restart_calls (gimple stmt)
2080 {
2081 struct tm_restart_node dummy;
2082 void **slot;
2083
2084 if (!cfun->gimple_df->tm_restart)
2085 return;
2086
2087 dummy.stmt = stmt;
2088 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2089 if (slot)
2090 {
2091 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2092 tree list = n->label_or_list;
2093 rtx insn;
2094
2095 for (insn = next_real_insn (get_last_insn ());
2096 !CALL_P (insn);
2097 insn = next_real_insn (insn))
2098 continue;
2099
2100 if (TREE_CODE (list) == LABEL_DECL)
2101 add_reg_note (insn, REG_TM, label_rtx (list));
2102 else
2103 for (; list ; list = TREE_CHAIN (list))
2104 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2105 }
2106 }
2107
2108 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2109 statement STMT. */
2110
2111 static void
2112 expand_call_stmt (gimple stmt)
2113 {
2114 tree exp, decl, lhs;
2115 bool builtin_p;
2116 size_t i;
2117
2118 if (gimple_call_internal_p (stmt))
2119 {
2120 expand_internal_call (stmt);
2121 return;
2122 }
2123
2124 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2125
2126 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2127 decl = gimple_call_fndecl (stmt);
2128 builtin_p = decl && DECL_BUILT_IN (decl);
2129
2130 /* If this is not a builtin function, the function type through which the
2131 call is made may be different from the type of the function. */
2132 if (!builtin_p)
2133 CALL_EXPR_FN (exp)
2134 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2135 CALL_EXPR_FN (exp));
2136
2137 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2138 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2139
2140 for (i = 0; i < gimple_call_num_args (stmt); i++)
2141 {
2142 tree arg = gimple_call_arg (stmt, i);
2143 gimple def;
2144 /* TER addresses into arguments of builtin functions so we have a
2145 chance to infer more correct alignment information. See PR39954. */
2146 if (builtin_p
2147 && TREE_CODE (arg) == SSA_NAME
2148 && (def = get_gimple_for_ssa_name (arg))
2149 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2150 arg = gimple_assign_rhs1 (def);
2151 CALL_EXPR_ARG (exp, i) = arg;
2152 }
2153
2154 if (gimple_has_side_effects (stmt))
2155 TREE_SIDE_EFFECTS (exp) = 1;
2156
2157 if (gimple_call_nothrow_p (stmt))
2158 TREE_NOTHROW (exp) = 1;
2159
2160 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2161 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2162 if (decl
2163 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2164 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2165 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2166 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2167 else
2168 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2169 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2170 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2171
2172 /* Ensure RTL is created for debug args. */
2173 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2174 {
2175 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2176 unsigned int ix;
2177 tree dtemp;
2178
2179 if (debug_args)
2180 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2181 {
2182 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2183 expand_debug_expr (dtemp);
2184 }
2185 }
2186
2187 lhs = gimple_call_lhs (stmt);
2188 if (lhs)
2189 expand_assignment (lhs, exp, false);
2190 else
2191 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2192
2193 mark_transaction_restart_calls (stmt);
2194 }
2195
2196 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2197 STMT that doesn't require special handling for outgoing edges. That
2198 is no tailcalls and no GIMPLE_COND. */
2199
2200 static void
2201 expand_gimple_stmt_1 (gimple stmt)
2202 {
2203 tree op0;
2204
2205 set_curr_insn_location (gimple_location (stmt));
2206
2207 switch (gimple_code (stmt))
2208 {
2209 case GIMPLE_GOTO:
2210 op0 = gimple_goto_dest (stmt);
2211 if (TREE_CODE (op0) == LABEL_DECL)
2212 expand_goto (op0);
2213 else
2214 expand_computed_goto (op0);
2215 break;
2216 case GIMPLE_LABEL:
2217 expand_label (gimple_label_label (stmt));
2218 break;
2219 case GIMPLE_NOP:
2220 case GIMPLE_PREDICT:
2221 break;
2222 case GIMPLE_SWITCH:
2223 expand_case (stmt);
2224 break;
2225 case GIMPLE_ASM:
2226 expand_asm_stmt (stmt);
2227 break;
2228 case GIMPLE_CALL:
2229 expand_call_stmt (stmt);
2230 break;
2231
2232 case GIMPLE_RETURN:
2233 op0 = gimple_return_retval (stmt);
2234
2235 if (op0 && op0 != error_mark_node)
2236 {
2237 tree result = DECL_RESULT (current_function_decl);
2238
2239 /* If we are not returning the current function's RESULT_DECL,
2240 build an assignment to it. */
2241 if (op0 != result)
2242 {
2243 /* I believe that a function's RESULT_DECL is unique. */
2244 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2245
2246 /* ??? We'd like to use simply expand_assignment here,
2247 but this fails if the value is of BLKmode but the return
2248 decl is a register. expand_return has special handling
2249 for this combination, which eventually should move
2250 to common code. See comments there. Until then, let's
2251 build a modify expression :-/ */
2252 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2253 result, op0);
2254 }
2255 }
2256 if (!op0)
2257 expand_null_return ();
2258 else
2259 expand_return (op0);
2260 break;
2261
2262 case GIMPLE_ASSIGN:
2263 {
2264 tree lhs = gimple_assign_lhs (stmt);
2265
2266 /* Tree expand used to fiddle with |= and &= of two bitfield
2267 COMPONENT_REFs here. This can't happen with gimple, the LHS
2268 of binary assigns must be a gimple reg. */
2269
2270 if (TREE_CODE (lhs) != SSA_NAME
2271 || get_gimple_rhs_class (gimple_expr_code (stmt))
2272 == GIMPLE_SINGLE_RHS)
2273 {
2274 tree rhs = gimple_assign_rhs1 (stmt);
2275 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2276 == GIMPLE_SINGLE_RHS);
2277 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2278 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2279 if (TREE_CLOBBER_P (rhs))
2280 /* This is a clobber to mark the going out of scope for
2281 this LHS. */
2282 ;
2283 else
2284 expand_assignment (lhs, rhs,
2285 gimple_assign_nontemporal_move_p (stmt));
2286 }
2287 else
2288 {
2289 rtx target, temp;
2290 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2291 struct separate_ops ops;
2292 bool promoted = false;
2293
2294 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2295 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2296 promoted = true;
2297
2298 ops.code = gimple_assign_rhs_code (stmt);
2299 ops.type = TREE_TYPE (lhs);
2300 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2301 {
2302 case GIMPLE_TERNARY_RHS:
2303 ops.op2 = gimple_assign_rhs3 (stmt);
2304 /* Fallthru */
2305 case GIMPLE_BINARY_RHS:
2306 ops.op1 = gimple_assign_rhs2 (stmt);
2307 /* Fallthru */
2308 case GIMPLE_UNARY_RHS:
2309 ops.op0 = gimple_assign_rhs1 (stmt);
2310 break;
2311 default:
2312 gcc_unreachable ();
2313 }
2314 ops.location = gimple_location (stmt);
2315
2316 /* If we want to use a nontemporal store, force the value to
2317 register first. If we store into a promoted register,
2318 don't directly expand to target. */
2319 temp = nontemporal || promoted ? NULL_RTX : target;
2320 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2321 EXPAND_NORMAL);
2322
2323 if (temp == target)
2324 ;
2325 else if (promoted)
2326 {
2327 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2328 /* If TEMP is a VOIDmode constant, use convert_modes to make
2329 sure that we properly convert it. */
2330 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2331 {
2332 temp = convert_modes (GET_MODE (target),
2333 TYPE_MODE (ops.type),
2334 temp, unsignedp);
2335 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2336 GET_MODE (target), temp, unsignedp);
2337 }
2338
2339 convert_move (SUBREG_REG (target), temp, unsignedp);
2340 }
2341 else if (nontemporal && emit_storent_insn (target, temp))
2342 ;
2343 else
2344 {
2345 temp = force_operand (temp, target);
2346 if (temp != target)
2347 emit_move_insn (target, temp);
2348 }
2349 }
2350 }
2351 break;
2352
2353 default:
2354 gcc_unreachable ();
2355 }
2356 }
2357
2358 /* Expand one gimple statement STMT and return the last RTL instruction
2359 before any of the newly generated ones.
2360
2361 In addition to generating the necessary RTL instructions this also
2362 sets REG_EH_REGION notes if necessary and sets the current source
2363 location for diagnostics. */
2364
2365 static rtx
2366 expand_gimple_stmt (gimple stmt)
2367 {
2368 location_t saved_location = input_location;
2369 rtx last = get_last_insn ();
2370 int lp_nr;
2371
2372 gcc_assert (cfun);
2373
2374 /* We need to save and restore the current source location so that errors
2375 discovered during expansion are emitted with the right location. But
2376 it would be better if the diagnostic routines used the source location
2377 embedded in the tree nodes rather than globals. */
2378 if (gimple_has_location (stmt))
2379 input_location = gimple_location (stmt);
2380
2381 expand_gimple_stmt_1 (stmt);
2382
2383 /* Free any temporaries used to evaluate this statement. */
2384 free_temp_slots ();
2385
2386 input_location = saved_location;
2387
2388 /* Mark all insns that may trap. */
2389 lp_nr = lookup_stmt_eh_lp (stmt);
2390 if (lp_nr)
2391 {
2392 rtx insn;
2393 for (insn = next_real_insn (last); insn;
2394 insn = next_real_insn (insn))
2395 {
2396 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2397 /* If we want exceptions for non-call insns, any
2398 may_trap_p instruction may throw. */
2399 && GET_CODE (PATTERN (insn)) != CLOBBER
2400 && GET_CODE (PATTERN (insn)) != USE
2401 && insn_could_throw_p (insn))
2402 make_reg_eh_region_note (insn, 0, lp_nr);
2403 }
2404 }
2405
2406 return last;
2407 }
2408
2409 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2410 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2411 generated a tail call (something that might be denied by the ABI
2412 rules governing the call; see calls.c).
2413
2414 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2415 can still reach the rest of BB. The case here is __builtin_sqrt,
2416 where the NaN result goes through the external function (with a
2417 tailcall) and the normal result happens via a sqrt instruction. */
2418
2419 static basic_block
2420 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2421 {
2422 rtx last2, last;
2423 edge e;
2424 edge_iterator ei;
2425 int probability;
2426 gcov_type count;
2427
2428 last2 = last = expand_gimple_stmt (stmt);
2429
2430 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2431 if (CALL_P (last) && SIBLING_CALL_P (last))
2432 goto found;
2433
2434 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2435
2436 *can_fallthru = true;
2437 return NULL;
2438
2439 found:
2440 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2441 Any instructions emitted here are about to be deleted. */
2442 do_pending_stack_adjust ();
2443
2444 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2445 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2446 EH or abnormal edges, we shouldn't have created a tail call in
2447 the first place. So it seems to me we should just be removing
2448 all edges here, or redirecting the existing fallthru edge to
2449 the exit block. */
2450
2451 probability = 0;
2452 count = 0;
2453
2454 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2455 {
2456 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2457 {
2458 if (e->dest != EXIT_BLOCK_PTR)
2459 {
2460 e->dest->count -= e->count;
2461 e->dest->frequency -= EDGE_FREQUENCY (e);
2462 if (e->dest->count < 0)
2463 e->dest->count = 0;
2464 if (e->dest->frequency < 0)
2465 e->dest->frequency = 0;
2466 }
2467 count += e->count;
2468 probability += e->probability;
2469 remove_edge (e);
2470 }
2471 else
2472 ei_next (&ei);
2473 }
2474
2475 /* This is somewhat ugly: the call_expr expander often emits instructions
2476 after the sibcall (to perform the function return). These confuse the
2477 find_many_sub_basic_blocks code, so we need to get rid of these. */
2478 last = NEXT_INSN (last);
2479 gcc_assert (BARRIER_P (last));
2480
2481 *can_fallthru = false;
2482 while (NEXT_INSN (last))
2483 {
2484 /* For instance an sqrt builtin expander expands if with
2485 sibcall in the then and label for `else`. */
2486 if (LABEL_P (NEXT_INSN (last)))
2487 {
2488 *can_fallthru = true;
2489 break;
2490 }
2491 delete_insn (NEXT_INSN (last));
2492 }
2493
2494 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2495 e->probability += probability;
2496 e->count += count;
2497 BB_END (bb) = last;
2498 update_bb_for_insn (bb);
2499
2500 if (NEXT_INSN (last))
2501 {
2502 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2503
2504 last = BB_END (bb);
2505 if (BARRIER_P (last))
2506 BB_END (bb) = PREV_INSN (last);
2507 }
2508
2509 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2510
2511 return bb;
2512 }
2513
2514 /* Return the difference between the floor and the truncated result of
2515 a signed division by OP1 with remainder MOD. */
2516 static rtx
2517 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2518 {
2519 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2520 return gen_rtx_IF_THEN_ELSE
2521 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2522 gen_rtx_IF_THEN_ELSE
2523 (mode, gen_rtx_LT (BImode,
2524 gen_rtx_DIV (mode, op1, mod),
2525 const0_rtx),
2526 constm1_rtx, const0_rtx),
2527 const0_rtx);
2528 }
2529
2530 /* Return the difference between the ceil and the truncated result of
2531 a signed division by OP1 with remainder MOD. */
2532 static rtx
2533 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2534 {
2535 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2536 return gen_rtx_IF_THEN_ELSE
2537 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2538 gen_rtx_IF_THEN_ELSE
2539 (mode, gen_rtx_GT (BImode,
2540 gen_rtx_DIV (mode, op1, mod),
2541 const0_rtx),
2542 const1_rtx, const0_rtx),
2543 const0_rtx);
2544 }
2545
2546 /* Return the difference between the ceil and the truncated result of
2547 an unsigned division by OP1 with remainder MOD. */
2548 static rtx
2549 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2550 {
2551 /* (mod != 0 ? 1 : 0) */
2552 return gen_rtx_IF_THEN_ELSE
2553 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2554 const1_rtx, const0_rtx);
2555 }
2556
2557 /* Return the difference between the rounded and the truncated result
2558 of a signed division by OP1 with remainder MOD. Halfway cases are
2559 rounded away from zero, rather than to the nearest even number. */
2560 static rtx
2561 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2562 {
2563 /* (abs (mod) >= abs (op1) - abs (mod)
2564 ? (op1 / mod > 0 ? 1 : -1)
2565 : 0) */
2566 return gen_rtx_IF_THEN_ELSE
2567 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2568 gen_rtx_MINUS (mode,
2569 gen_rtx_ABS (mode, op1),
2570 gen_rtx_ABS (mode, mod))),
2571 gen_rtx_IF_THEN_ELSE
2572 (mode, gen_rtx_GT (BImode,
2573 gen_rtx_DIV (mode, op1, mod),
2574 const0_rtx),
2575 const1_rtx, constm1_rtx),
2576 const0_rtx);
2577 }
2578
2579 /* Return the difference between the rounded and the truncated result
2580 of a unsigned division by OP1 with remainder MOD. Halfway cases
2581 are rounded away from zero, rather than to the nearest even
2582 number. */
2583 static rtx
2584 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2585 {
2586 /* (mod >= op1 - mod ? 1 : 0) */
2587 return gen_rtx_IF_THEN_ELSE
2588 (mode, gen_rtx_GE (BImode, mod,
2589 gen_rtx_MINUS (mode, op1, mod)),
2590 const1_rtx, const0_rtx);
2591 }
2592
2593 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2594 any rtl. */
2595
2596 static rtx
2597 convert_debug_memory_address (enum machine_mode mode, rtx x,
2598 addr_space_t as)
2599 {
2600 enum machine_mode xmode = GET_MODE (x);
2601
2602 #ifndef POINTERS_EXTEND_UNSIGNED
2603 gcc_assert (mode == Pmode
2604 || mode == targetm.addr_space.address_mode (as));
2605 gcc_assert (xmode == mode || xmode == VOIDmode);
2606 #else
2607 rtx temp;
2608
2609 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2610
2611 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2612 return x;
2613
2614 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2615 x = simplify_gen_subreg (mode, x, xmode,
2616 subreg_lowpart_offset
2617 (mode, xmode));
2618 else if (POINTERS_EXTEND_UNSIGNED > 0)
2619 x = gen_rtx_ZERO_EXTEND (mode, x);
2620 else if (!POINTERS_EXTEND_UNSIGNED)
2621 x = gen_rtx_SIGN_EXTEND (mode, x);
2622 else
2623 {
2624 switch (GET_CODE (x))
2625 {
2626 case SUBREG:
2627 if ((SUBREG_PROMOTED_VAR_P (x)
2628 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2629 || (GET_CODE (SUBREG_REG (x)) == PLUS
2630 && REG_P (XEXP (SUBREG_REG (x), 0))
2631 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2632 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2633 && GET_MODE (SUBREG_REG (x)) == mode)
2634 return SUBREG_REG (x);
2635 break;
2636 case LABEL_REF:
2637 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2638 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2639 return temp;
2640 case SYMBOL_REF:
2641 temp = shallow_copy_rtx (x);
2642 PUT_MODE (temp, mode);
2643 return temp;
2644 case CONST:
2645 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2646 if (temp)
2647 temp = gen_rtx_CONST (mode, temp);
2648 return temp;
2649 case PLUS:
2650 case MINUS:
2651 if (CONST_INT_P (XEXP (x, 1)))
2652 {
2653 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2654 if (temp)
2655 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2656 }
2657 break;
2658 default:
2659 break;
2660 }
2661 /* Don't know how to express ptr_extend as operation in debug info. */
2662 return NULL;
2663 }
2664 #endif /* POINTERS_EXTEND_UNSIGNED */
2665
2666 return x;
2667 }
2668
2669 /* Return an RTX equivalent to the value of the parameter DECL. */
2670
2671 static rtx
2672 expand_debug_parm_decl (tree decl)
2673 {
2674 rtx incoming = DECL_INCOMING_RTL (decl);
2675
2676 if (incoming
2677 && GET_MODE (incoming) != BLKmode
2678 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2679 || (MEM_P (incoming)
2680 && REG_P (XEXP (incoming, 0))
2681 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2682 {
2683 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2684
2685 #ifdef HAVE_window_save
2686 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2687 If the target machine has an explicit window save instruction, the
2688 actual entry value is the corresponding OUTGOING_REGNO instead. */
2689 if (REG_P (incoming)
2690 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2691 incoming
2692 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2693 OUTGOING_REGNO (REGNO (incoming)), 0);
2694 else if (MEM_P (incoming))
2695 {
2696 rtx reg = XEXP (incoming, 0);
2697 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2698 {
2699 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2700 incoming = replace_equiv_address_nv (incoming, reg);
2701 }
2702 else
2703 incoming = copy_rtx (incoming);
2704 }
2705 #endif
2706
2707 ENTRY_VALUE_EXP (rtl) = incoming;
2708 return rtl;
2709 }
2710
2711 if (incoming
2712 && GET_MODE (incoming) != BLKmode
2713 && !TREE_ADDRESSABLE (decl)
2714 && MEM_P (incoming)
2715 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2716 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2717 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2718 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2719 return copy_rtx (incoming);
2720
2721 return NULL_RTX;
2722 }
2723
2724 /* Return an RTX equivalent to the value of the tree expression EXP. */
2725
2726 static rtx
2727 expand_debug_expr (tree exp)
2728 {
2729 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2730 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2731 enum machine_mode inner_mode = VOIDmode;
2732 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2733 addr_space_t as;
2734
2735 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2736 {
2737 case tcc_expression:
2738 switch (TREE_CODE (exp))
2739 {
2740 case COND_EXPR:
2741 case DOT_PROD_EXPR:
2742 case WIDEN_MULT_PLUS_EXPR:
2743 case WIDEN_MULT_MINUS_EXPR:
2744 case FMA_EXPR:
2745 goto ternary;
2746
2747 case TRUTH_ANDIF_EXPR:
2748 case TRUTH_ORIF_EXPR:
2749 case TRUTH_AND_EXPR:
2750 case TRUTH_OR_EXPR:
2751 case TRUTH_XOR_EXPR:
2752 goto binary;
2753
2754 case TRUTH_NOT_EXPR:
2755 goto unary;
2756
2757 default:
2758 break;
2759 }
2760 break;
2761
2762 ternary:
2763 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2764 if (!op2)
2765 return NULL_RTX;
2766 /* Fall through. */
2767
2768 binary:
2769 case tcc_binary:
2770 case tcc_comparison:
2771 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2772 if (!op1)
2773 return NULL_RTX;
2774 /* Fall through. */
2775
2776 unary:
2777 case tcc_unary:
2778 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2779 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2780 if (!op0)
2781 return NULL_RTX;
2782 break;
2783
2784 case tcc_type:
2785 case tcc_statement:
2786 gcc_unreachable ();
2787
2788 case tcc_constant:
2789 case tcc_exceptional:
2790 case tcc_declaration:
2791 case tcc_reference:
2792 case tcc_vl_exp:
2793 break;
2794 }
2795
2796 switch (TREE_CODE (exp))
2797 {
2798 case STRING_CST:
2799 if (!lookup_constant_def (exp))
2800 {
2801 if (strlen (TREE_STRING_POINTER (exp)) + 1
2802 != (size_t) TREE_STRING_LENGTH (exp))
2803 return NULL_RTX;
2804 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2805 op0 = gen_rtx_MEM (BLKmode, op0);
2806 set_mem_attributes (op0, exp, 0);
2807 return op0;
2808 }
2809 /* Fall through... */
2810
2811 case INTEGER_CST:
2812 case REAL_CST:
2813 case FIXED_CST:
2814 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2815 return op0;
2816
2817 case COMPLEX_CST:
2818 gcc_assert (COMPLEX_MODE_P (mode));
2819 op0 = expand_debug_expr (TREE_REALPART (exp));
2820 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2821 return gen_rtx_CONCAT (mode, op0, op1);
2822
2823 case DEBUG_EXPR_DECL:
2824 op0 = DECL_RTL_IF_SET (exp);
2825
2826 if (op0)
2827 return op0;
2828
2829 op0 = gen_rtx_DEBUG_EXPR (mode);
2830 DEBUG_EXPR_TREE_DECL (op0) = exp;
2831 SET_DECL_RTL (exp, op0);
2832
2833 return op0;
2834
2835 case VAR_DECL:
2836 case PARM_DECL:
2837 case FUNCTION_DECL:
2838 case LABEL_DECL:
2839 case CONST_DECL:
2840 case RESULT_DECL:
2841 op0 = DECL_RTL_IF_SET (exp);
2842
2843 /* This decl was probably optimized away. */
2844 if (!op0)
2845 {
2846 if (TREE_CODE (exp) != VAR_DECL
2847 || DECL_EXTERNAL (exp)
2848 || !TREE_STATIC (exp)
2849 || !DECL_NAME (exp)
2850 || DECL_HARD_REGISTER (exp)
2851 || DECL_IN_CONSTANT_POOL (exp)
2852 || mode == VOIDmode)
2853 return NULL;
2854
2855 op0 = make_decl_rtl_for_debug (exp);
2856 if (!MEM_P (op0)
2857 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2858 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2859 return NULL;
2860 }
2861 else
2862 op0 = copy_rtx (op0);
2863
2864 if (GET_MODE (op0) == BLKmode
2865 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2866 below would ICE. While it is likely a FE bug,
2867 try to be robust here. See PR43166. */
2868 || mode == BLKmode
2869 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2870 {
2871 gcc_assert (MEM_P (op0));
2872 op0 = adjust_address_nv (op0, mode, 0);
2873 return op0;
2874 }
2875
2876 /* Fall through. */
2877
2878 adjust_mode:
2879 case PAREN_EXPR:
2880 case NOP_EXPR:
2881 case CONVERT_EXPR:
2882 {
2883 inner_mode = GET_MODE (op0);
2884
2885 if (mode == inner_mode)
2886 return op0;
2887
2888 if (inner_mode == VOIDmode)
2889 {
2890 if (TREE_CODE (exp) == SSA_NAME)
2891 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2892 else
2893 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2894 if (mode == inner_mode)
2895 return op0;
2896 }
2897
2898 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2899 {
2900 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2901 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2902 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2903 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2904 else
2905 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2906 }
2907 else if (FLOAT_MODE_P (mode))
2908 {
2909 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2910 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2911 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2912 else
2913 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2914 }
2915 else if (FLOAT_MODE_P (inner_mode))
2916 {
2917 if (unsignedp)
2918 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2919 else
2920 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2921 }
2922 else if (CONSTANT_P (op0)
2923 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2924 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2925 subreg_lowpart_offset (mode,
2926 inner_mode));
2927 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2928 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2929 : unsignedp)
2930 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2931 else
2932 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2933
2934 return op0;
2935 }
2936
2937 case MEM_REF:
2938 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2939 {
2940 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2941 TREE_OPERAND (exp, 0),
2942 TREE_OPERAND (exp, 1));
2943 if (newexp)
2944 return expand_debug_expr (newexp);
2945 }
2946 /* FALLTHROUGH */
2947 case INDIRECT_REF:
2948 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2949 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2950 if (!op0)
2951 return NULL;
2952
2953 if (TREE_CODE (exp) == MEM_REF)
2954 {
2955 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2956 || (GET_CODE (op0) == PLUS
2957 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2958 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2959 Instead just use get_inner_reference. */
2960 goto component_ref;
2961
2962 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2963 if (!op1 || !CONST_INT_P (op1))
2964 return NULL;
2965
2966 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2967 }
2968
2969 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2970 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2971 else
2972 as = ADDR_SPACE_GENERIC;
2973
2974 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2975 op0, as);
2976 if (op0 == NULL_RTX)
2977 return NULL;
2978
2979 op0 = gen_rtx_MEM (mode, op0);
2980 set_mem_attributes (op0, exp, 0);
2981 if (TREE_CODE (exp) == MEM_REF
2982 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2983 set_mem_expr (op0, NULL_TREE);
2984 set_mem_addr_space (op0, as);
2985
2986 return op0;
2987
2988 case TARGET_MEM_REF:
2989 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2990 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2991 return NULL;
2992
2993 op0 = expand_debug_expr
2994 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2995 if (!op0)
2996 return NULL;
2997
2998 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2999 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3000 else
3001 as = ADDR_SPACE_GENERIC;
3002
3003 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3004 op0, as);
3005 if (op0 == NULL_RTX)
3006 return NULL;
3007
3008 op0 = gen_rtx_MEM (mode, op0);
3009
3010 set_mem_attributes (op0, exp, 0);
3011 set_mem_addr_space (op0, as);
3012
3013 return op0;
3014
3015 component_ref:
3016 case ARRAY_REF:
3017 case ARRAY_RANGE_REF:
3018 case COMPONENT_REF:
3019 case BIT_FIELD_REF:
3020 case REALPART_EXPR:
3021 case IMAGPART_EXPR:
3022 case VIEW_CONVERT_EXPR:
3023 {
3024 enum machine_mode mode1;
3025 HOST_WIDE_INT bitsize, bitpos;
3026 tree offset;
3027 int volatilep = 0;
3028 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3029 &mode1, &unsignedp, &volatilep, false);
3030 rtx orig_op0;
3031
3032 if (bitsize == 0)
3033 return NULL;
3034
3035 orig_op0 = op0 = expand_debug_expr (tem);
3036
3037 if (!op0)
3038 return NULL;
3039
3040 if (offset)
3041 {
3042 enum machine_mode addrmode, offmode;
3043
3044 if (!MEM_P (op0))
3045 return NULL;
3046
3047 op0 = XEXP (op0, 0);
3048 addrmode = GET_MODE (op0);
3049 if (addrmode == VOIDmode)
3050 addrmode = Pmode;
3051
3052 op1 = expand_debug_expr (offset);
3053 if (!op1)
3054 return NULL;
3055
3056 offmode = GET_MODE (op1);
3057 if (offmode == VOIDmode)
3058 offmode = TYPE_MODE (TREE_TYPE (offset));
3059
3060 if (addrmode != offmode)
3061 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3062 subreg_lowpart_offset (addrmode,
3063 offmode));
3064
3065 /* Don't use offset_address here, we don't need a
3066 recognizable address, and we don't want to generate
3067 code. */
3068 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3069 op0, op1));
3070 }
3071
3072 if (MEM_P (op0))
3073 {
3074 if (mode1 == VOIDmode)
3075 /* Bitfield. */
3076 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
3077 if (bitpos >= BITS_PER_UNIT)
3078 {
3079 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3080 bitpos %= BITS_PER_UNIT;
3081 }
3082 else if (bitpos < 0)
3083 {
3084 HOST_WIDE_INT units
3085 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
3086 op0 = adjust_address_nv (op0, mode1, units);
3087 bitpos += units * BITS_PER_UNIT;
3088 }
3089 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3090 op0 = adjust_address_nv (op0, mode, 0);
3091 else if (GET_MODE (op0) != mode1)
3092 op0 = adjust_address_nv (op0, mode1, 0);
3093 else
3094 op0 = copy_rtx (op0);
3095 if (op0 == orig_op0)
3096 op0 = shallow_copy_rtx (op0);
3097 set_mem_attributes (op0, exp, 0);
3098 }
3099
3100 if (bitpos == 0 && mode == GET_MODE (op0))
3101 return op0;
3102
3103 if (bitpos < 0)
3104 return NULL;
3105
3106 if (GET_MODE (op0) == BLKmode)
3107 return NULL;
3108
3109 if ((bitpos % BITS_PER_UNIT) == 0
3110 && bitsize == GET_MODE_BITSIZE (mode1))
3111 {
3112 enum machine_mode opmode = GET_MODE (op0);
3113
3114 if (opmode == VOIDmode)
3115 opmode = TYPE_MODE (TREE_TYPE (tem));
3116
3117 /* This condition may hold if we're expanding the address
3118 right past the end of an array that turned out not to
3119 be addressable (i.e., the address was only computed in
3120 debug stmts). The gen_subreg below would rightfully
3121 crash, and the address doesn't really exist, so just
3122 drop it. */
3123 if (bitpos >= GET_MODE_BITSIZE (opmode))
3124 return NULL;
3125
3126 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3127 return simplify_gen_subreg (mode, op0, opmode,
3128 bitpos / BITS_PER_UNIT);
3129 }
3130
3131 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3132 && TYPE_UNSIGNED (TREE_TYPE (exp))
3133 ? SIGN_EXTRACT
3134 : ZERO_EXTRACT, mode,
3135 GET_MODE (op0) != VOIDmode
3136 ? GET_MODE (op0)
3137 : TYPE_MODE (TREE_TYPE (tem)),
3138 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3139 }
3140
3141 case ABS_EXPR:
3142 return simplify_gen_unary (ABS, mode, op0, mode);
3143
3144 case NEGATE_EXPR:
3145 return simplify_gen_unary (NEG, mode, op0, mode);
3146
3147 case BIT_NOT_EXPR:
3148 return simplify_gen_unary (NOT, mode, op0, mode);
3149
3150 case FLOAT_EXPR:
3151 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3152 0)))
3153 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3154 inner_mode);
3155
3156 case FIX_TRUNC_EXPR:
3157 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3158 inner_mode);
3159
3160 case POINTER_PLUS_EXPR:
3161 /* For the rare target where pointers are not the same size as
3162 size_t, we need to check for mis-matched modes and correct
3163 the addend. */
3164 if (op0 && op1
3165 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3166 && GET_MODE (op0) != GET_MODE (op1))
3167 {
3168 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
3169 /* If OP0 is a partial mode, then we must truncate, even if it has
3170 the same bitsize as OP1 as GCC's representation of partial modes
3171 is opaque. */
3172 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
3173 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
3174 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3175 GET_MODE (op1));
3176 else
3177 /* We always sign-extend, regardless of the signedness of
3178 the operand, because the operand is always unsigned
3179 here even if the original C expression is signed. */
3180 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3181 GET_MODE (op1));
3182 }
3183 /* Fall through. */
3184 case PLUS_EXPR:
3185 return simplify_gen_binary (PLUS, mode, op0, op1);
3186
3187 case MINUS_EXPR:
3188 return simplify_gen_binary (MINUS, mode, op0, op1);
3189
3190 case MULT_EXPR:
3191 return simplify_gen_binary (MULT, mode, op0, op1);
3192
3193 case RDIV_EXPR:
3194 case TRUNC_DIV_EXPR:
3195 case EXACT_DIV_EXPR:
3196 if (unsignedp)
3197 return simplify_gen_binary (UDIV, mode, op0, op1);
3198 else
3199 return simplify_gen_binary (DIV, mode, op0, op1);
3200
3201 case TRUNC_MOD_EXPR:
3202 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3203
3204 case FLOOR_DIV_EXPR:
3205 if (unsignedp)
3206 return simplify_gen_binary (UDIV, mode, op0, op1);
3207 else
3208 {
3209 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3210 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3211 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3212 return simplify_gen_binary (PLUS, mode, div, adj);
3213 }
3214
3215 case FLOOR_MOD_EXPR:
3216 if (unsignedp)
3217 return simplify_gen_binary (UMOD, mode, op0, op1);
3218 else
3219 {
3220 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3221 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3222 adj = simplify_gen_unary (NEG, mode,
3223 simplify_gen_binary (MULT, mode, adj, op1),
3224 mode);
3225 return simplify_gen_binary (PLUS, mode, mod, adj);
3226 }
3227
3228 case CEIL_DIV_EXPR:
3229 if (unsignedp)
3230 {
3231 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3232 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3233 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3234 return simplify_gen_binary (PLUS, mode, div, adj);
3235 }
3236 else
3237 {
3238 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3239 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3240 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3241 return simplify_gen_binary (PLUS, mode, div, adj);
3242 }
3243
3244 case CEIL_MOD_EXPR:
3245 if (unsignedp)
3246 {
3247 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3248 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3249 adj = simplify_gen_unary (NEG, mode,
3250 simplify_gen_binary (MULT, mode, adj, op1),
3251 mode);
3252 return simplify_gen_binary (PLUS, mode, mod, adj);
3253 }
3254 else
3255 {
3256 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3257 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3258 adj = simplify_gen_unary (NEG, mode,
3259 simplify_gen_binary (MULT, mode, adj, op1),
3260 mode);
3261 return simplify_gen_binary (PLUS, mode, mod, adj);
3262 }
3263
3264 case ROUND_DIV_EXPR:
3265 if (unsignedp)
3266 {
3267 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3268 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3269 rtx adj = round_udiv_adjust (mode, mod, op1);
3270 return simplify_gen_binary (PLUS, mode, div, adj);
3271 }
3272 else
3273 {
3274 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3275 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3276 rtx adj = round_sdiv_adjust (mode, mod, op1);
3277 return simplify_gen_binary (PLUS, mode, div, adj);
3278 }
3279
3280 case ROUND_MOD_EXPR:
3281 if (unsignedp)
3282 {
3283 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3284 rtx adj = round_udiv_adjust (mode, mod, op1);
3285 adj = simplify_gen_unary (NEG, mode,
3286 simplify_gen_binary (MULT, mode, adj, op1),
3287 mode);
3288 return simplify_gen_binary (PLUS, mode, mod, adj);
3289 }
3290 else
3291 {
3292 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3293 rtx adj = round_sdiv_adjust (mode, mod, op1);
3294 adj = simplify_gen_unary (NEG, mode,
3295 simplify_gen_binary (MULT, mode, adj, op1),
3296 mode);
3297 return simplify_gen_binary (PLUS, mode, mod, adj);
3298 }
3299
3300 case LSHIFT_EXPR:
3301 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3302
3303 case RSHIFT_EXPR:
3304 if (unsignedp)
3305 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3306 else
3307 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3308
3309 case LROTATE_EXPR:
3310 return simplify_gen_binary (ROTATE, mode, op0, op1);
3311
3312 case RROTATE_EXPR:
3313 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3314
3315 case MIN_EXPR:
3316 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3317
3318 case MAX_EXPR:
3319 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3320
3321 case BIT_AND_EXPR:
3322 case TRUTH_AND_EXPR:
3323 return simplify_gen_binary (AND, mode, op0, op1);
3324
3325 case BIT_IOR_EXPR:
3326 case TRUTH_OR_EXPR:
3327 return simplify_gen_binary (IOR, mode, op0, op1);
3328
3329 case BIT_XOR_EXPR:
3330 case TRUTH_XOR_EXPR:
3331 return simplify_gen_binary (XOR, mode, op0, op1);
3332
3333 case TRUTH_ANDIF_EXPR:
3334 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3335
3336 case TRUTH_ORIF_EXPR:
3337 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3338
3339 case TRUTH_NOT_EXPR:
3340 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3341
3342 case LT_EXPR:
3343 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3344 op0, op1);
3345
3346 case LE_EXPR:
3347 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3348 op0, op1);
3349
3350 case GT_EXPR:
3351 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3352 op0, op1);
3353
3354 case GE_EXPR:
3355 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3356 op0, op1);
3357
3358 case EQ_EXPR:
3359 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3360
3361 case NE_EXPR:
3362 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3363
3364 case UNORDERED_EXPR:
3365 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3366
3367 case ORDERED_EXPR:
3368 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3369
3370 case UNLT_EXPR:
3371 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3372
3373 case UNLE_EXPR:
3374 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3375
3376 case UNGT_EXPR:
3377 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3378
3379 case UNGE_EXPR:
3380 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3381
3382 case UNEQ_EXPR:
3383 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3384
3385 case LTGT_EXPR:
3386 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3387
3388 case COND_EXPR:
3389 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3390
3391 case COMPLEX_EXPR:
3392 gcc_assert (COMPLEX_MODE_P (mode));
3393 if (GET_MODE (op0) == VOIDmode)
3394 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3395 if (GET_MODE (op1) == VOIDmode)
3396 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3397 return gen_rtx_CONCAT (mode, op0, op1);
3398
3399 case CONJ_EXPR:
3400 if (GET_CODE (op0) == CONCAT)
3401 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3402 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3403 XEXP (op0, 1),
3404 GET_MODE_INNER (mode)));
3405 else
3406 {
3407 enum machine_mode imode = GET_MODE_INNER (mode);
3408 rtx re, im;
3409
3410 if (MEM_P (op0))
3411 {
3412 re = adjust_address_nv (op0, imode, 0);
3413 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3414 }
3415 else
3416 {
3417 enum machine_mode ifmode = int_mode_for_mode (mode);
3418 enum machine_mode ihmode = int_mode_for_mode (imode);
3419 rtx halfsize;
3420 if (ifmode == BLKmode || ihmode == BLKmode)
3421 return NULL;
3422 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3423 re = op0;
3424 if (mode != ifmode)
3425 re = gen_rtx_SUBREG (ifmode, re, 0);
3426 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3427 if (imode != ihmode)
3428 re = gen_rtx_SUBREG (imode, re, 0);
3429 im = copy_rtx (op0);
3430 if (mode != ifmode)
3431 im = gen_rtx_SUBREG (ifmode, im, 0);
3432 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3433 if (imode != ihmode)
3434 im = gen_rtx_SUBREG (imode, im, 0);
3435 }
3436 im = gen_rtx_NEG (imode, im);
3437 return gen_rtx_CONCAT (mode, re, im);
3438 }
3439
3440 case ADDR_EXPR:
3441 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3442 if (!op0 || !MEM_P (op0))
3443 {
3444 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3445 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3446 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3447 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3448 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3449 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3450
3451 if (handled_component_p (TREE_OPERAND (exp, 0)))
3452 {
3453 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3454 tree decl
3455 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3456 &bitoffset, &bitsize, &maxsize);
3457 if ((TREE_CODE (decl) == VAR_DECL
3458 || TREE_CODE (decl) == PARM_DECL
3459 || TREE_CODE (decl) == RESULT_DECL)
3460 && (!TREE_ADDRESSABLE (decl)
3461 || target_for_debug_bind (decl))
3462 && (bitoffset % BITS_PER_UNIT) == 0
3463 && bitsize > 0
3464 && bitsize == maxsize)
3465 {
3466 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3467 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3468 }
3469 }
3470
3471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
3472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
3473 == ADDR_EXPR)
3474 {
3475 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3476 0));
3477 if (op0 != NULL
3478 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3479 || (GET_CODE (op0) == PLUS
3480 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
3481 && CONST_INT_P (XEXP (op0, 1)))))
3482 {
3483 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3484 1));
3485 if (!op1 || !CONST_INT_P (op1))
3486 return NULL;
3487
3488 return plus_constant (mode, op0, INTVAL (op1));
3489 }
3490 }
3491
3492 return NULL;
3493 }
3494
3495 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3496 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3497
3498 return op0;
3499
3500 case VECTOR_CST:
3501 {
3502 unsigned i;
3503
3504 op0 = gen_rtx_CONCATN
3505 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3506
3507 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3508 {
3509 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3510 if (!op1)
3511 return NULL;
3512 XVECEXP (op0, 0, i) = op1;
3513 }
3514
3515 return op0;
3516 }
3517
3518 case CONSTRUCTOR:
3519 if (TREE_CLOBBER_P (exp))
3520 return NULL;
3521 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3522 {
3523 unsigned i;
3524 tree val;
3525
3526 op0 = gen_rtx_CONCATN
3527 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3528
3529 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3530 {
3531 op1 = expand_debug_expr (val);
3532 if (!op1)
3533 return NULL;
3534 XVECEXP (op0, 0, i) = op1;
3535 }
3536
3537 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3538 {
3539 op1 = expand_debug_expr
3540 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3541
3542 if (!op1)
3543 return NULL;
3544
3545 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3546 XVECEXP (op0, 0, i) = op1;
3547 }
3548
3549 return op0;
3550 }
3551 else
3552 goto flag_unsupported;
3553
3554 case CALL_EXPR:
3555 /* ??? Maybe handle some builtins? */
3556 return NULL;
3557
3558 case SSA_NAME:
3559 {
3560 gimple g = get_gimple_for_ssa_name (exp);
3561 if (g)
3562 {
3563 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3564 if (!op0)
3565 return NULL;
3566 }
3567 else
3568 {
3569 int part = var_to_partition (SA.map, exp);
3570
3571 if (part == NO_PARTITION)
3572 {
3573 /* If this is a reference to an incoming value of parameter
3574 that is never used in the code or where the incoming
3575 value is never used in the code, use PARM_DECL's
3576 DECL_RTL if set. */
3577 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3578 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3579 {
3580 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3581 if (op0)
3582 goto adjust_mode;
3583 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3584 if (op0)
3585 goto adjust_mode;
3586 }
3587 return NULL;
3588 }
3589
3590 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3591
3592 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3593 }
3594 goto adjust_mode;
3595 }
3596
3597 case ERROR_MARK:
3598 return NULL;
3599
3600 /* Vector stuff. For most of the codes we don't have rtl codes. */
3601 case REALIGN_LOAD_EXPR:
3602 case REDUC_MAX_EXPR:
3603 case REDUC_MIN_EXPR:
3604 case REDUC_PLUS_EXPR:
3605 case VEC_COND_EXPR:
3606 case VEC_LSHIFT_EXPR:
3607 case VEC_PACK_FIX_TRUNC_EXPR:
3608 case VEC_PACK_SAT_EXPR:
3609 case VEC_PACK_TRUNC_EXPR:
3610 case VEC_RSHIFT_EXPR:
3611 case VEC_UNPACK_FLOAT_HI_EXPR:
3612 case VEC_UNPACK_FLOAT_LO_EXPR:
3613 case VEC_UNPACK_HI_EXPR:
3614 case VEC_UNPACK_LO_EXPR:
3615 case VEC_WIDEN_MULT_HI_EXPR:
3616 case VEC_WIDEN_MULT_LO_EXPR:
3617 case VEC_WIDEN_MULT_EVEN_EXPR:
3618 case VEC_WIDEN_MULT_ODD_EXPR:
3619 case VEC_WIDEN_LSHIFT_HI_EXPR:
3620 case VEC_WIDEN_LSHIFT_LO_EXPR:
3621 case VEC_PERM_EXPR:
3622 return NULL;
3623
3624 /* Misc codes. */
3625 case ADDR_SPACE_CONVERT_EXPR:
3626 case FIXED_CONVERT_EXPR:
3627 case OBJ_TYPE_REF:
3628 case WITH_SIZE_EXPR:
3629 return NULL;
3630
3631 case DOT_PROD_EXPR:
3632 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3633 && SCALAR_INT_MODE_P (mode))
3634 {
3635 op0
3636 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3637 0)))
3638 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3639 inner_mode);
3640 op1
3641 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3642 1)))
3643 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3644 inner_mode);
3645 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3646 return simplify_gen_binary (PLUS, mode, op0, op2);
3647 }
3648 return NULL;
3649
3650 case WIDEN_MULT_EXPR:
3651 case WIDEN_MULT_PLUS_EXPR:
3652 case WIDEN_MULT_MINUS_EXPR:
3653 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3654 && SCALAR_INT_MODE_P (mode))
3655 {
3656 inner_mode = GET_MODE (op0);
3657 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3658 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3659 else
3660 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3661 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3662 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3663 else
3664 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3665 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3666 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3667 return op0;
3668 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3669 return simplify_gen_binary (PLUS, mode, op0, op2);
3670 else
3671 return simplify_gen_binary (MINUS, mode, op2, op0);
3672 }
3673 return NULL;
3674
3675 case MULT_HIGHPART_EXPR:
3676 /* ??? Similar to the above. */
3677 return NULL;
3678
3679 case WIDEN_SUM_EXPR:
3680 case WIDEN_LSHIFT_EXPR:
3681 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3682 && SCALAR_INT_MODE_P (mode))
3683 {
3684 op0
3685 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3686 0)))
3687 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3688 inner_mode);
3689 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3690 ? ASHIFT : PLUS, mode, op0, op1);
3691 }
3692 return NULL;
3693
3694 case FMA_EXPR:
3695 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3696
3697 default:
3698 flag_unsupported:
3699 #ifdef ENABLE_CHECKING
3700 debug_tree (exp);
3701 gcc_unreachable ();
3702 #else
3703 return NULL;
3704 #endif
3705 }
3706 }
3707
3708 /* Return an RTX equivalent to the source bind value of the tree expression
3709 EXP. */
3710
3711 static rtx
3712 expand_debug_source_expr (tree exp)
3713 {
3714 rtx op0 = NULL_RTX;
3715 enum machine_mode mode = VOIDmode, inner_mode;
3716
3717 switch (TREE_CODE (exp))
3718 {
3719 case PARM_DECL:
3720 {
3721 mode = DECL_MODE (exp);
3722 op0 = expand_debug_parm_decl (exp);
3723 if (op0)
3724 break;
3725 /* See if this isn't an argument that has been completely
3726 optimized out. */
3727 if (!DECL_RTL_SET_P (exp)
3728 && !DECL_INCOMING_RTL (exp)
3729 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3730 {
3731 tree aexp = DECL_ORIGIN (exp);
3732 if (DECL_CONTEXT (aexp)
3733 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3734 {
3735 vec<tree, va_gc> **debug_args;
3736 unsigned int ix;
3737 tree ddecl;
3738 debug_args = decl_debug_args_lookup (current_function_decl);
3739 if (debug_args != NULL)
3740 {
3741 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
3742 ix += 2)
3743 if (ddecl == aexp)
3744 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3745 }
3746 }
3747 }
3748 break;
3749 }
3750 default:
3751 break;
3752 }
3753
3754 if (op0 == NULL_RTX)
3755 return NULL_RTX;
3756
3757 inner_mode = GET_MODE (op0);
3758 if (mode == inner_mode)
3759 return op0;
3760
3761 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3762 {
3763 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3764 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3765 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3766 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3767 else
3768 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3769 }
3770 else if (FLOAT_MODE_P (mode))
3771 gcc_unreachable ();
3772 else if (FLOAT_MODE_P (inner_mode))
3773 {
3774 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3775 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3776 else
3777 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3778 }
3779 else if (CONSTANT_P (op0)
3780 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3781 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3782 subreg_lowpart_offset (mode, inner_mode));
3783 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3784 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3785 else
3786 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3787
3788 return op0;
3789 }
3790
3791 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3792 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3793 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
3794
3795 static void
3796 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3797 {
3798 rtx exp = *exp_p;
3799
3800 if (exp == NULL_RTX)
3801 return;
3802
3803 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3804 return;
3805
3806 if (depth == 4)
3807 {
3808 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3809 rtx dval = make_debug_expr_from_rtl (exp);
3810
3811 /* Emit a debug bind insn before INSN. */
3812 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3813 DEBUG_EXPR_TREE_DECL (dval), exp,
3814 VAR_INIT_STATUS_INITIALIZED);
3815
3816 emit_debug_insn_before (bind, insn);
3817 *exp_p = dval;
3818 return;
3819 }
3820
3821 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3822 int i, j;
3823 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3824 switch (*format_ptr++)
3825 {
3826 case 'e':
3827 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3828 break;
3829
3830 case 'E':
3831 case 'V':
3832 for (j = 0; j < XVECLEN (exp, i); j++)
3833 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3834 break;
3835
3836 default:
3837 break;
3838 }
3839 }
3840
3841 /* Expand the _LOCs in debug insns. We run this after expanding all
3842 regular insns, so that any variables referenced in the function
3843 will have their DECL_RTLs set. */
3844
3845 static void
3846 expand_debug_locations (void)
3847 {
3848 rtx insn;
3849 rtx last = get_last_insn ();
3850 int save_strict_alias = flag_strict_aliasing;
3851
3852 /* New alias sets while setting up memory attributes cause
3853 -fcompare-debug failures, even though it doesn't bring about any
3854 codegen changes. */
3855 flag_strict_aliasing = 0;
3856
3857 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3858 if (DEBUG_INSN_P (insn))
3859 {
3860 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3861 rtx val, prev_insn, insn2;
3862 enum machine_mode mode;
3863
3864 if (value == NULL_TREE)
3865 val = NULL_RTX;
3866 else
3867 {
3868 if (INSN_VAR_LOCATION_STATUS (insn)
3869 == VAR_INIT_STATUS_UNINITIALIZED)
3870 val = expand_debug_source_expr (value);
3871 else
3872 val = expand_debug_expr (value);
3873 gcc_assert (last == get_last_insn ());
3874 }
3875
3876 if (!val)
3877 val = gen_rtx_UNKNOWN_VAR_LOC ();
3878 else
3879 {
3880 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3881
3882 gcc_assert (mode == GET_MODE (val)
3883 || (GET_MODE (val) == VOIDmode
3884 && (CONST_SCALAR_INT_P (val)
3885 || GET_CODE (val) == CONST_FIXED
3886 || GET_CODE (val) == LABEL_REF)));
3887 }
3888
3889 INSN_VAR_LOCATION_LOC (insn) = val;
3890 prev_insn = PREV_INSN (insn);
3891 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3892 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
3893 }
3894
3895 flag_strict_aliasing = save_strict_alias;
3896 }
3897
3898 /* Expand basic block BB from GIMPLE trees to RTL. */
3899
3900 static basic_block
3901 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
3902 {
3903 gimple_stmt_iterator gsi;
3904 gimple_seq stmts;
3905 gimple stmt = NULL;
3906 rtx note, last;
3907 edge e;
3908 edge_iterator ei;
3909 void **elt;
3910
3911 if (dump_file)
3912 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3913 bb->index);
3914
3915 /* Note that since we are now transitioning from GIMPLE to RTL, we
3916 cannot use the gsi_*_bb() routines because they expect the basic
3917 block to be in GIMPLE, instead of RTL. Therefore, we need to
3918 access the BB sequence directly. */
3919 stmts = bb_seq (bb);
3920 bb->il.gimple.seq = NULL;
3921 bb->il.gimple.phi_nodes = NULL;
3922 rtl_profile_for_bb (bb);
3923 init_rtl_bb_info (bb);
3924 bb->flags |= BB_RTL;
3925
3926 /* Remove the RETURN_EXPR if we may fall though to the exit
3927 instead. */
3928 gsi = gsi_last (stmts);
3929 if (!gsi_end_p (gsi)
3930 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3931 {
3932 gimple ret_stmt = gsi_stmt (gsi);
3933
3934 gcc_assert (single_succ_p (bb));
3935 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3936
3937 if (bb->next_bb == EXIT_BLOCK_PTR
3938 && !gimple_return_retval (ret_stmt))
3939 {
3940 gsi_remove (&gsi, false);
3941 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3942 }
3943 }
3944
3945 gsi = gsi_start (stmts);
3946 if (!gsi_end_p (gsi))
3947 {
3948 stmt = gsi_stmt (gsi);
3949 if (gimple_code (stmt) != GIMPLE_LABEL)
3950 stmt = NULL;
3951 }
3952
3953 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3954
3955 if (stmt || elt)
3956 {
3957 last = get_last_insn ();
3958
3959 if (stmt)
3960 {
3961 expand_gimple_stmt (stmt);
3962 gsi_next (&gsi);
3963 }
3964
3965 if (elt)
3966 emit_label ((rtx) *elt);
3967
3968 /* Java emits line number notes in the top of labels.
3969 ??? Make this go away once line number notes are obsoleted. */
3970 BB_HEAD (bb) = NEXT_INSN (last);
3971 if (NOTE_P (BB_HEAD (bb)))
3972 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3973 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3974
3975 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3976 }
3977 else
3978 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3979
3980 NOTE_BASIC_BLOCK (note) = bb;
3981
3982 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3983 {
3984 basic_block new_bb;
3985
3986 stmt = gsi_stmt (gsi);
3987
3988 /* If this statement is a non-debug one, and we generate debug
3989 insns, then this one might be the last real use of a TERed
3990 SSA_NAME, but where there are still some debug uses further
3991 down. Expanding the current SSA name in such further debug
3992 uses by their RHS might lead to wrong debug info, as coalescing
3993 might make the operands of such RHS be placed into the same
3994 pseudo as something else. Like so:
3995 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3996 use(a_1);
3997 a_2 = ...
3998 #DEBUG ... => a_1
3999 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4000 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4001 the write to a_2 would actually have clobbered the place which
4002 formerly held a_0.
4003
4004 So, instead of that, we recognize the situation, and generate
4005 debug temporaries at the last real use of TERed SSA names:
4006 a_1 = a_0 + 1;
4007 #DEBUG #D1 => a_1
4008 use(a_1);
4009 a_2 = ...
4010 #DEBUG ... => #D1
4011 */
4012 if (MAY_HAVE_DEBUG_INSNS
4013 && SA.values
4014 && !is_gimple_debug (stmt))
4015 {
4016 ssa_op_iter iter;
4017 tree op;
4018 gimple def;
4019
4020 location_t sloc = curr_insn_location ();
4021
4022 /* Look for SSA names that have their last use here (TERed
4023 names always have only one real use). */
4024 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4025 if ((def = get_gimple_for_ssa_name (op)))
4026 {
4027 imm_use_iterator imm_iter;
4028 use_operand_p use_p;
4029 bool have_debug_uses = false;
4030
4031 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4032 {
4033 if (gimple_debug_bind_p (USE_STMT (use_p)))
4034 {
4035 have_debug_uses = true;
4036 break;
4037 }
4038 }
4039
4040 if (have_debug_uses)
4041 {
4042 /* OP is a TERed SSA name, with DEF it's defining
4043 statement, and where OP is used in further debug
4044 instructions. Generate a debug temporary, and
4045 replace all uses of OP in debug insns with that
4046 temporary. */
4047 gimple debugstmt;
4048 tree value = gimple_assign_rhs_to_tree (def);
4049 tree vexpr = make_node (DEBUG_EXPR_DECL);
4050 rtx val;
4051 enum machine_mode mode;
4052
4053 set_curr_insn_location (gimple_location (def));
4054
4055 DECL_ARTIFICIAL (vexpr) = 1;
4056 TREE_TYPE (vexpr) = TREE_TYPE (value);
4057 if (DECL_P (value))
4058 mode = DECL_MODE (value);
4059 else
4060 mode = TYPE_MODE (TREE_TYPE (value));
4061 DECL_MODE (vexpr) = mode;
4062
4063 val = gen_rtx_VAR_LOCATION
4064 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4065
4066 emit_debug_insn (val);
4067
4068 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4069 {
4070 if (!gimple_debug_bind_p (debugstmt))
4071 continue;
4072
4073 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4074 SET_USE (use_p, vexpr);
4075
4076 update_stmt (debugstmt);
4077 }
4078 }
4079 }
4080 set_curr_insn_location (sloc);
4081 }
4082
4083 currently_expanding_gimple_stmt = stmt;
4084
4085 /* Expand this statement, then evaluate the resulting RTL and
4086 fixup the CFG accordingly. */
4087 if (gimple_code (stmt) == GIMPLE_COND)
4088 {
4089 new_bb = expand_gimple_cond (bb, stmt);
4090 if (new_bb)
4091 return new_bb;
4092 }
4093 else if (gimple_debug_bind_p (stmt))
4094 {
4095 location_t sloc = curr_insn_location ();
4096 gimple_stmt_iterator nsi = gsi;
4097
4098 for (;;)
4099 {
4100 tree var = gimple_debug_bind_get_var (stmt);
4101 tree value;
4102 rtx val;
4103 enum machine_mode mode;
4104
4105 if (TREE_CODE (var) != DEBUG_EXPR_DECL
4106 && TREE_CODE (var) != LABEL_DECL
4107 && !target_for_debug_bind (var))
4108 goto delink_debug_stmt;
4109
4110 if (gimple_debug_bind_has_value_p (stmt))
4111 value = gimple_debug_bind_get_value (stmt);
4112 else
4113 value = NULL_TREE;
4114
4115 last = get_last_insn ();
4116
4117 set_curr_insn_location (gimple_location (stmt));
4118
4119 if (DECL_P (var))
4120 mode = DECL_MODE (var);
4121 else
4122 mode = TYPE_MODE (TREE_TYPE (var));
4123
4124 val = gen_rtx_VAR_LOCATION
4125 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4126
4127 emit_debug_insn (val);
4128
4129 if (dump_file && (dump_flags & TDF_DETAILS))
4130 {
4131 /* We can't dump the insn with a TREE where an RTX
4132 is expected. */
4133 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4134 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4135 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4136 }
4137
4138 delink_debug_stmt:
4139 /* In order not to generate too many debug temporaries,
4140 we delink all uses of debug statements we already expanded.
4141 Therefore debug statements between definition and real
4142 use of TERed SSA names will continue to use the SSA name,
4143 and not be replaced with debug temps. */
4144 delink_stmt_imm_use (stmt);
4145
4146 gsi = nsi;
4147 gsi_next (&nsi);
4148 if (gsi_end_p (nsi))
4149 break;
4150 stmt = gsi_stmt (nsi);
4151 if (!gimple_debug_bind_p (stmt))
4152 break;
4153 }
4154
4155 set_curr_insn_location (sloc);
4156 }
4157 else if (gimple_debug_source_bind_p (stmt))
4158 {
4159 location_t sloc = curr_insn_location ();
4160 tree var = gimple_debug_source_bind_get_var (stmt);
4161 tree value = gimple_debug_source_bind_get_value (stmt);
4162 rtx val;
4163 enum machine_mode mode;
4164
4165 last = get_last_insn ();
4166
4167 set_curr_insn_location (gimple_location (stmt));
4168
4169 mode = DECL_MODE (var);
4170
4171 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4172 VAR_INIT_STATUS_UNINITIALIZED);
4173
4174 emit_debug_insn (val);
4175
4176 if (dump_file && (dump_flags & TDF_DETAILS))
4177 {
4178 /* We can't dump the insn with a TREE where an RTX
4179 is expected. */
4180 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4181 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4182 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4183 }
4184
4185 set_curr_insn_location (sloc);
4186 }
4187 else
4188 {
4189 if (is_gimple_call (stmt)
4190 && gimple_call_tail_p (stmt)
4191 && disable_tail_calls)
4192 gimple_call_set_tail (stmt, false);
4193
4194 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4195 {
4196 bool can_fallthru;
4197 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4198 if (new_bb)
4199 {
4200 if (can_fallthru)
4201 bb = new_bb;
4202 else
4203 return new_bb;
4204 }
4205 }
4206 else
4207 {
4208 def_operand_p def_p;
4209 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4210
4211 if (def_p != NULL)
4212 {
4213 /* Ignore this stmt if it is in the list of
4214 replaceable expressions. */
4215 if (SA.values
4216 && bitmap_bit_p (SA.values,
4217 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4218 continue;
4219 }
4220 last = expand_gimple_stmt (stmt);
4221 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4222 }
4223 }
4224 }
4225
4226 currently_expanding_gimple_stmt = NULL;
4227
4228 /* Expand implicit goto and convert goto_locus. */
4229 FOR_EACH_EDGE (e, ei, bb->succs)
4230 {
4231 if (e->goto_locus != UNKNOWN_LOCATION)
4232 set_curr_insn_location (e->goto_locus);
4233 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4234 {
4235 emit_jump (label_rtx_for_bb (e->dest));
4236 e->flags &= ~EDGE_FALLTHRU;
4237 }
4238 }
4239
4240 /* Expanded RTL can create a jump in the last instruction of block.
4241 This later might be assumed to be a jump to successor and break edge insertion.
4242 We need to insert dummy move to prevent this. PR41440. */
4243 if (single_succ_p (bb)
4244 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4245 && (last = get_last_insn ())
4246 && JUMP_P (last))
4247 {
4248 rtx dummy = gen_reg_rtx (SImode);
4249 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4250 }
4251
4252 do_pending_stack_adjust ();
4253
4254 /* Find the block tail. The last insn in the block is the insn
4255 before a barrier and/or table jump insn. */
4256 last = get_last_insn ();
4257 if (BARRIER_P (last))
4258 last = PREV_INSN (last);
4259 if (JUMP_TABLE_DATA_P (last))
4260 last = PREV_INSN (PREV_INSN (last));
4261 BB_END (bb) = last;
4262
4263 update_bb_for_insn (bb);
4264
4265 return bb;
4266 }
4267
4268
4269 /* Create a basic block for initialization code. */
4270
4271 static basic_block
4272 construct_init_block (void)
4273 {
4274 basic_block init_block, first_block;
4275 edge e = NULL;
4276 int flags;
4277
4278 /* Multiple entry points not supported yet. */
4279 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4280 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4281 init_rtl_bb_info (EXIT_BLOCK_PTR);
4282 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4283 EXIT_BLOCK_PTR->flags |= BB_RTL;
4284
4285 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4286
4287 /* When entry edge points to first basic block, we don't need jump,
4288 otherwise we have to jump into proper target. */
4289 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4290 {
4291 tree label = gimple_block_label (e->dest);
4292
4293 emit_jump (label_rtx (label));
4294 flags = 0;
4295 }
4296 else
4297 flags = EDGE_FALLTHRU;
4298
4299 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4300 get_last_insn (),
4301 ENTRY_BLOCK_PTR);
4302 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4303 init_block->count = ENTRY_BLOCK_PTR->count;
4304 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4305 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4306 if (e)
4307 {
4308 first_block = e->dest;
4309 redirect_edge_succ (e, init_block);
4310 e = make_edge (init_block, first_block, flags);
4311 }
4312 else
4313 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4314 e->probability = REG_BR_PROB_BASE;
4315 e->count = ENTRY_BLOCK_PTR->count;
4316
4317 update_bb_for_insn (init_block);
4318 return init_block;
4319 }
4320
4321 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4322 found in the block tree. */
4323
4324 static void
4325 set_block_levels (tree block, int level)
4326 {
4327 while (block)
4328 {
4329 BLOCK_NUMBER (block) = level;
4330 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4331 block = BLOCK_CHAIN (block);
4332 }
4333 }
4334
4335 /* Create a block containing landing pads and similar stuff. */
4336
4337 static void
4338 construct_exit_block (void)
4339 {
4340 rtx head = get_last_insn ();
4341 rtx end;
4342 basic_block exit_block;
4343 edge e, e2;
4344 unsigned ix;
4345 edge_iterator ei;
4346 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4347
4348 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4349
4350 /* Make sure the locus is set to the end of the function, so that
4351 epilogue line numbers and warnings are set properly. */
4352 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
4353 input_location = cfun->function_end_locus;
4354
4355 /* Generate rtl for function exit. */
4356 expand_function_end ();
4357
4358 end = get_last_insn ();
4359 if (head == end)
4360 return;
4361 /* While emitting the function end we could move end of the last basic block.
4362 */
4363 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4364 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4365 head = NEXT_INSN (head);
4366 exit_block = create_basic_block (NEXT_INSN (head), end,
4367 EXIT_BLOCK_PTR->prev_bb);
4368 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4369 exit_block->count = EXIT_BLOCK_PTR->count;
4370 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4371 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4372
4373 ix = 0;
4374 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4375 {
4376 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4377 if (!(e->flags & EDGE_ABNORMAL))
4378 redirect_edge_succ (e, exit_block);
4379 else
4380 ix++;
4381 }
4382
4383 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4384 e->probability = REG_BR_PROB_BASE;
4385 e->count = EXIT_BLOCK_PTR->count;
4386 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4387 if (e2 != e)
4388 {
4389 e->count -= e2->count;
4390 exit_block->count -= e2->count;
4391 exit_block->frequency -= EDGE_FREQUENCY (e2);
4392 }
4393 if (e->count < 0)
4394 e->count = 0;
4395 if (exit_block->count < 0)
4396 exit_block->count = 0;
4397 if (exit_block->frequency < 0)
4398 exit_block->frequency = 0;
4399 update_bb_for_insn (exit_block);
4400 }
4401
4402 /* Helper function for discover_nonconstant_array_refs.
4403 Look for ARRAY_REF nodes with non-constant indexes and mark them
4404 addressable. */
4405
4406 static tree
4407 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4408 void *data ATTRIBUTE_UNUSED)
4409 {
4410 tree t = *tp;
4411
4412 if (IS_TYPE_OR_DECL_P (t))
4413 *walk_subtrees = 0;
4414 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4415 {
4416 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4417 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4418 && (!TREE_OPERAND (t, 2)
4419 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4420 || (TREE_CODE (t) == COMPONENT_REF
4421 && (!TREE_OPERAND (t,2)
4422 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4423 || TREE_CODE (t) == BIT_FIELD_REF
4424 || TREE_CODE (t) == REALPART_EXPR
4425 || TREE_CODE (t) == IMAGPART_EXPR
4426 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4427 || CONVERT_EXPR_P (t))
4428 t = TREE_OPERAND (t, 0);
4429
4430 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4431 {
4432 t = get_base_address (t);
4433 if (t && DECL_P (t)
4434 && DECL_MODE (t) != BLKmode)
4435 TREE_ADDRESSABLE (t) = 1;
4436 }
4437
4438 *walk_subtrees = 0;
4439 }
4440
4441 return NULL_TREE;
4442 }
4443
4444 /* RTL expansion is not able to compile array references with variable
4445 offsets for arrays stored in single register. Discover such
4446 expressions and mark variables as addressable to avoid this
4447 scenario. */
4448
4449 static void
4450 discover_nonconstant_array_refs (void)
4451 {
4452 basic_block bb;
4453 gimple_stmt_iterator gsi;
4454
4455 FOR_EACH_BB (bb)
4456 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4457 {
4458 gimple stmt = gsi_stmt (gsi);
4459 if (!is_gimple_debug (stmt))
4460 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4461 }
4462 }
4463
4464 /* This function sets crtl->args.internal_arg_pointer to a virtual
4465 register if DRAP is needed. Local register allocator will replace
4466 virtual_incoming_args_rtx with the virtual register. */
4467
4468 static void
4469 expand_stack_alignment (void)
4470 {
4471 rtx drap_rtx;
4472 unsigned int preferred_stack_boundary;
4473
4474 if (! SUPPORTS_STACK_ALIGNMENT)
4475 return;
4476
4477 if (cfun->calls_alloca
4478 || cfun->has_nonlocal_label
4479 || crtl->has_nonlocal_goto)
4480 crtl->need_drap = true;
4481
4482 /* Call update_stack_boundary here again to update incoming stack
4483 boundary. It may set incoming stack alignment to a different
4484 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4485 use the minimum incoming stack alignment to check if it is OK
4486 to perform sibcall optimization since sibcall optimization will
4487 only align the outgoing stack to incoming stack boundary. */
4488 if (targetm.calls.update_stack_boundary)
4489 targetm.calls.update_stack_boundary ();
4490
4491 /* The incoming stack frame has to be aligned at least at
4492 parm_stack_boundary. */
4493 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4494
4495 /* Update crtl->stack_alignment_estimated and use it later to align
4496 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4497 exceptions since callgraph doesn't collect incoming stack alignment
4498 in this case. */
4499 if (cfun->can_throw_non_call_exceptions
4500 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4501 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4502 else
4503 preferred_stack_boundary = crtl->preferred_stack_boundary;
4504 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4505 crtl->stack_alignment_estimated = preferred_stack_boundary;
4506 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4507 crtl->stack_alignment_needed = preferred_stack_boundary;
4508
4509 gcc_assert (crtl->stack_alignment_needed
4510 <= crtl->stack_alignment_estimated);
4511
4512 crtl->stack_realign_needed
4513 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4514 crtl->stack_realign_tried = crtl->stack_realign_needed;
4515
4516 crtl->stack_realign_processed = true;
4517
4518 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4519 alignment. */
4520 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4521 drap_rtx = targetm.calls.get_drap_rtx ();
4522
4523 /* stack_realign_drap and drap_rtx must match. */
4524 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4525
4526 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4527 if (NULL != drap_rtx)
4528 {
4529 crtl->args.internal_arg_pointer = drap_rtx;
4530
4531 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4532 needed. */
4533 fixup_tail_calls ();
4534 }
4535 }
4536
4537 /* Translate the intermediate representation contained in the CFG
4538 from GIMPLE trees to RTL.
4539
4540 We do conversion per basic block and preserve/update the tree CFG.
4541 This implies we have to do some magic as the CFG can simultaneously
4542 consist of basic blocks containing RTL and GIMPLE trees. This can
4543 confuse the CFG hooks, so be careful to not manipulate CFG during
4544 the expansion. */
4545
4546 static unsigned int
4547 gimple_expand_cfg (void)
4548 {
4549 basic_block bb, init_block;
4550 sbitmap blocks;
4551 edge_iterator ei;
4552 edge e;
4553 rtx var_seq, var_ret_seq;
4554 unsigned i;
4555
4556 timevar_push (TV_OUT_OF_SSA);
4557 rewrite_out_of_ssa (&SA);
4558 timevar_pop (TV_OUT_OF_SSA);
4559 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4560
4561 /* Make sure all values used by the optimization passes have sane
4562 defaults. */
4563 reg_renumber = 0;
4564
4565 /* Some backends want to know that we are expanding to RTL. */
4566 currently_expanding_to_rtl = 1;
4567 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4568 free_dominance_info (CDI_DOMINATORS);
4569
4570 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4571
4572 insn_locations_init ();
4573 if (!DECL_IS_BUILTIN (current_function_decl))
4574 {
4575 /* Eventually, all FEs should explicitly set function_start_locus. */
4576 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
4577 set_curr_insn_location
4578 (DECL_SOURCE_LOCATION (current_function_decl));
4579 else
4580 set_curr_insn_location (cfun->function_start_locus);
4581 }
4582 else
4583 set_curr_insn_location (UNKNOWN_LOCATION);
4584 prologue_location = curr_insn_location ();
4585
4586 #ifdef INSN_SCHEDULING
4587 init_sched_attrs ();
4588 #endif
4589
4590 /* Make sure first insn is a note even if we don't want linenums.
4591 This makes sure the first insn will never be deleted.
4592 Also, final expects a note to appear there. */
4593 emit_note (NOTE_INSN_DELETED);
4594
4595 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4596 discover_nonconstant_array_refs ();
4597
4598 targetm.expand_to_rtl_hook ();
4599 crtl->stack_alignment_needed = STACK_BOUNDARY;
4600 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4601 crtl->stack_alignment_estimated = 0;
4602 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4603 cfun->cfg->max_jumptable_ents = 0;
4604
4605 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4606 of the function section at exapnsion time to predict distance of calls. */
4607 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4608
4609 /* Expand the variables recorded during gimple lowering. */
4610 timevar_push (TV_VAR_EXPAND);
4611 start_sequence ();
4612
4613 var_ret_seq = expand_used_vars ();
4614
4615 var_seq = get_insns ();
4616 end_sequence ();
4617 timevar_pop (TV_VAR_EXPAND);
4618
4619 /* Honor stack protection warnings. */
4620 if (warn_stack_protect)
4621 {
4622 if (cfun->calls_alloca)
4623 warning (OPT_Wstack_protector,
4624 "stack protector not protecting local variables: "
4625 "variable length buffer");
4626 if (has_short_buffer && !crtl->stack_protect_guard)
4627 warning (OPT_Wstack_protector,
4628 "stack protector not protecting function: "
4629 "all local arrays are less than %d bytes long",
4630 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4631 }
4632
4633 /* Set up parameters and prepare for return, for the function. */
4634 expand_function_start (current_function_decl);
4635
4636 /* If we emitted any instructions for setting up the variables,
4637 emit them before the FUNCTION_START note. */
4638 if (var_seq)
4639 {
4640 emit_insn_before (var_seq, parm_birth_insn);
4641
4642 /* In expand_function_end we'll insert the alloca save/restore
4643 before parm_birth_insn. We've just insertted an alloca call.
4644 Adjust the pointer to match. */
4645 parm_birth_insn = var_seq;
4646 }
4647
4648 /* Now that we also have the parameter RTXs, copy them over to our
4649 partitions. */
4650 for (i = 0; i < SA.map->num_partitions; i++)
4651 {
4652 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4653
4654 if (TREE_CODE (var) != VAR_DECL
4655 && !SA.partition_to_pseudo[i])
4656 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4657 gcc_assert (SA.partition_to_pseudo[i]);
4658
4659 /* If this decl was marked as living in multiple places, reset
4660 this now to NULL. */
4661 if (DECL_RTL_IF_SET (var) == pc_rtx)
4662 SET_DECL_RTL (var, NULL);
4663
4664 /* Some RTL parts really want to look at DECL_RTL(x) when x
4665 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4666 SET_DECL_RTL here making this available, but that would mean
4667 to select one of the potentially many RTLs for one DECL. Instead
4668 of doing that we simply reset the MEM_EXPR of the RTL in question,
4669 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4670 if (!DECL_RTL_SET_P (var))
4671 {
4672 if (MEM_P (SA.partition_to_pseudo[i]))
4673 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4674 }
4675 }
4676
4677 /* If we have a class containing differently aligned pointers
4678 we need to merge those into the corresponding RTL pointer
4679 alignment. */
4680 for (i = 1; i < num_ssa_names; i++)
4681 {
4682 tree name = ssa_name (i);
4683 int part;
4684 rtx r;
4685
4686 if (!name
4687 /* We might have generated new SSA names in
4688 update_alias_info_with_stack_vars. They will have a NULL
4689 defining statements, and won't be part of the partitioning,
4690 so ignore those. */
4691 || !SSA_NAME_DEF_STMT (name))
4692 continue;
4693 part = var_to_partition (SA.map, name);
4694 if (part == NO_PARTITION)
4695 continue;
4696
4697 /* Adjust all partition members to get the underlying decl of
4698 the representative which we might have created in expand_one_var. */
4699 if (SSA_NAME_VAR (name) == NULL_TREE)
4700 {
4701 tree leader = partition_to_var (SA.map, part);
4702 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4703 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4704 }
4705 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4706 continue;
4707
4708 r = SA.partition_to_pseudo[part];
4709 if (REG_P (r))
4710 mark_reg_pointer (r, get_pointer_alignment (name));
4711 }
4712
4713 /* If this function is `main', emit a call to `__main'
4714 to run global initializers, etc. */
4715 if (DECL_NAME (current_function_decl)
4716 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4717 && DECL_FILE_SCOPE_P (current_function_decl))
4718 expand_main_function ();
4719
4720 /* Initialize the stack_protect_guard field. This must happen after the
4721 call to __main (if any) so that the external decl is initialized. */
4722 if (crtl->stack_protect_guard)
4723 stack_protect_prologue ();
4724
4725 expand_phi_nodes (&SA);
4726
4727 /* Register rtl specific functions for cfg. */
4728 rtl_register_cfg_hooks ();
4729
4730 init_block = construct_init_block ();
4731
4732 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4733 remaining edges later. */
4734 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4735 e->flags &= ~EDGE_EXECUTABLE;
4736
4737 lab_rtx_for_bb = pointer_map_create ();
4738 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4739 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
4740
4741 if (MAY_HAVE_DEBUG_INSNS)
4742 expand_debug_locations ();
4743
4744 /* Free stuff we no longer need after GIMPLE optimizations. */
4745 free_dominance_info (CDI_DOMINATORS);
4746 free_dominance_info (CDI_POST_DOMINATORS);
4747 delete_tree_cfg_annotations ();
4748
4749 timevar_push (TV_OUT_OF_SSA);
4750 finish_out_of_ssa (&SA);
4751 timevar_pop (TV_OUT_OF_SSA);
4752
4753 timevar_push (TV_POST_EXPAND);
4754 /* We are no longer in SSA form. */
4755 cfun->gimple_df->in_ssa_p = false;
4756 if (current_loops)
4757 loops_state_clear (LOOP_CLOSED_SSA);
4758
4759 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4760 conservatively to true until they are all profile aware. */
4761 pointer_map_destroy (lab_rtx_for_bb);
4762 free_histograms ();
4763
4764 construct_exit_block ();
4765 insn_locations_finalize ();
4766
4767 if (var_ret_seq)
4768 {
4769 rtx after = return_label;
4770 rtx next = NEXT_INSN (after);
4771 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
4772 after = next;
4773 emit_insn_after (var_ret_seq, after);
4774 }
4775
4776 /* Zap the tree EH table. */
4777 set_eh_throw_stmt_table (cfun, NULL);
4778
4779 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4780 split edges which edge insertions might do. */
4781 rebuild_jump_labels (get_insns ());
4782
4783 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4784 {
4785 edge e;
4786 edge_iterator ei;
4787 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4788 {
4789 if (e->insns.r)
4790 {
4791 rebuild_jump_labels_chain (e->insns.r);
4792 /* Put insns after parm birth, but before
4793 NOTE_INSNS_FUNCTION_BEG. */
4794 if (e->src == ENTRY_BLOCK_PTR
4795 && single_succ_p (ENTRY_BLOCK_PTR))
4796 {
4797 rtx insns = e->insns.r;
4798 e->insns.r = NULL_RTX;
4799 if (NOTE_P (parm_birth_insn)
4800 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
4801 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
4802 else
4803 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4804 }
4805 else
4806 commit_one_edge_insertion (e);
4807 }
4808 else
4809 ei_next (&ei);
4810 }
4811 }
4812
4813 /* We're done expanding trees to RTL. */
4814 currently_expanding_to_rtl = 0;
4815
4816 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4817 {
4818 edge e;
4819 edge_iterator ei;
4820 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4821 {
4822 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4823 e->flags &= ~EDGE_EXECUTABLE;
4824
4825 /* At the moment not all abnormal edges match the RTL
4826 representation. It is safe to remove them here as
4827 find_many_sub_basic_blocks will rediscover them.
4828 In the future we should get this fixed properly. */
4829 if ((e->flags & EDGE_ABNORMAL)
4830 && !(e->flags & EDGE_SIBCALL))
4831 remove_edge (e);
4832 else
4833 ei_next (&ei);
4834 }
4835 }
4836
4837 blocks = sbitmap_alloc (last_basic_block);
4838 bitmap_ones (blocks);
4839 find_many_sub_basic_blocks (blocks);
4840 sbitmap_free (blocks);
4841 purge_all_dead_edges ();
4842
4843 expand_stack_alignment ();
4844
4845 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4846 function. */
4847 if (crtl->tail_call_emit)
4848 fixup_tail_calls ();
4849
4850 /* After initial rtl generation, call back to finish generating
4851 exception support code. We need to do this before cleaning up
4852 the CFG as the code does not expect dead landing pads. */
4853 if (cfun->eh->region_tree != NULL)
4854 finish_eh_generation ();
4855
4856 /* Remove unreachable blocks, otherwise we cannot compute dominators
4857 which are needed for loop state verification. As a side-effect
4858 this also compacts blocks.
4859 ??? We cannot remove trivially dead insns here as for example
4860 the DRAP reg on i?86 is not magically live at this point.
4861 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4862 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4863
4864 #ifdef ENABLE_CHECKING
4865 verify_flow_info ();
4866 #endif
4867
4868 /* Initialize pseudos allocated for hard registers. */
4869 emit_initial_value_sets ();
4870
4871 /* And finally unshare all RTL. */
4872 unshare_all_rtl ();
4873
4874 /* There's no need to defer outputting this function any more; we
4875 know we want to output it. */
4876 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4877
4878 /* Now that we're done expanding trees to RTL, we shouldn't have any
4879 more CONCATs anywhere. */
4880 generating_concat_p = 0;
4881
4882 if (dump_file)
4883 {
4884 fprintf (dump_file,
4885 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4886 /* And the pass manager will dump RTL for us. */
4887 }
4888
4889 /* If we're emitting a nested function, make sure its parent gets
4890 emitted as well. Doing otherwise confuses debug info. */
4891 {
4892 tree parent;
4893 for (parent = DECL_CONTEXT (current_function_decl);
4894 parent != NULL_TREE;
4895 parent = get_containing_scope (parent))
4896 if (TREE_CODE (parent) == FUNCTION_DECL)
4897 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4898 }
4899
4900 /* We are now committed to emitting code for this function. Do any
4901 preparation, such as emitting abstract debug info for the inline
4902 before it gets mangled by optimization. */
4903 if (cgraph_function_possibly_inlined_p (current_function_decl))
4904 (*debug_hooks->outlining_inline_function) (current_function_decl);
4905
4906 TREE_ASM_WRITTEN (current_function_decl) = 1;
4907
4908 /* After expanding, the return labels are no longer needed. */
4909 return_label = NULL;
4910 naked_return_label = NULL;
4911
4912 /* After expanding, the tm_restart map is no longer needed. */
4913 if (cfun->gimple_df->tm_restart)
4914 {
4915 htab_delete (cfun->gimple_df->tm_restart);
4916 cfun->gimple_df->tm_restart = NULL;
4917 }
4918
4919 /* Tag the blocks with a depth number so that change_scope can find
4920 the common parent easily. */
4921 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4922 default_rtl_profile ();
4923
4924 timevar_pop (TV_POST_EXPAND);
4925
4926 return 0;
4927 }
4928
4929 namespace {
4930
4931 const pass_data pass_data_expand =
4932 {
4933 RTL_PASS, /* type */
4934 "expand", /* name */
4935 OPTGROUP_NONE, /* optinfo_flags */
4936 false, /* has_gate */
4937 true, /* has_execute */
4938 TV_EXPAND, /* tv_id */
4939 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
4940 | PROP_gimple_lcx
4941 | PROP_gimple_lvec ), /* properties_required */
4942 PROP_rtl, /* properties_provided */
4943 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
4944 ( TODO_verify_ssa | TODO_verify_flow
4945 | TODO_verify_stmts ), /* todo_flags_start */
4946 0, /* todo_flags_finish */
4947 };
4948
4949 class pass_expand : public rtl_opt_pass
4950 {
4951 public:
4952 pass_expand (gcc::context *ctxt)
4953 : rtl_opt_pass (pass_data_expand, ctxt)
4954 {}
4955
4956 /* opt_pass methods: */
4957 unsigned int execute () { return gimple_expand_cfg (); }
4958
4959 }; // class pass_expand
4960
4961 } // anon namespace
4962
4963 rtl_opt_pass *
4964 make_pass_expand (gcc::context *ctxt)
4965 {
4966 return new pass_expand (ctxt);
4967 }