C-family, Objective-C [1/3] : Implement Wobjc-root-class [PR77404].
[gcc.git] / gcc / cfgexpand.c
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "optabs.h"
34 #include "regs.h" /* For reg_renumber. */
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "stmt.h"
43 #include "print-tree.h"
44 #include "cfgrtl.h"
45 #include "cfganal.h"
46 #include "cfgbuild.h"
47 #include "cfgcleanup.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "expr.h"
52 #include "internal-fn.h"
53 #include "tree-eh.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
57 #include "tree-cfg.h"
58 #include "tree-dfa.h"
59 #include "tree-ssa.h"
60 #include "except.h"
61 #include "gimple-pretty-print.h"
62 #include "toplev.h"
63 #include "debug.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "cfgloop.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
70 #include "stringpool.h"
71 #include "attribs.h"
72 #include "asan.h"
73 #include "tree-ssa-address.h"
74 #include "output.h"
75 #include "builtins.h"
76
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
84
85 /* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87 struct ssaexpand SA;
88
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple *currently_expanding_gimple_stmt;
92
93 static rtx expand_debug_expr (tree);
94
95 static bool defer_stack_allocation (tree, bool);
96
97 static void record_alignment_for_reg_var (unsigned int);
98
99 /* Return an expression tree corresponding to the RHS of GIMPLE
100 statement STMT. */
101
102 tree
103 gimple_assign_rhs_to_tree (gimple *stmt)
104 {
105 tree t;
106 switch (gimple_assign_rhs_class (stmt))
107 {
108 case GIMPLE_TERNARY_RHS:
109 t = build3 (gimple_assign_rhs_code (stmt),
110 TREE_TYPE (gimple_assign_lhs (stmt)),
111 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt),
112 gimple_assign_rhs3 (stmt));
113 break;
114 case GIMPLE_BINARY_RHS:
115 t = build2 (gimple_assign_rhs_code (stmt),
116 TREE_TYPE (gimple_assign_lhs (stmt)),
117 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
118 break;
119 case GIMPLE_UNARY_RHS:
120 t = build1 (gimple_assign_rhs_code (stmt),
121 TREE_TYPE (gimple_assign_lhs (stmt)),
122 gimple_assign_rhs1 (stmt));
123 break;
124 case GIMPLE_SINGLE_RHS:
125 {
126 t = gimple_assign_rhs1 (stmt);
127 /* Avoid modifying this tree in place below. */
128 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
129 && gimple_location (stmt) != EXPR_LOCATION (t))
130 || (gimple_block (stmt) && currently_expanding_to_rtl
131 && EXPR_P (t)))
132 t = copy_node (t);
133 break;
134 }
135 default:
136 gcc_unreachable ();
137 }
138
139 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140 SET_EXPR_LOCATION (t, gimple_location (stmt));
141
142 return t;
143 }
144
145
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
148 #endif
149
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
151
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
156
157 static tree
158 leader_merge (tree cur, tree next)
159 {
160 if (cur == NULL || cur == next)
161 return next;
162
163 if (DECL_P (cur) && DECL_IGNORED_P (cur))
164 return cur;
165
166 if (DECL_P (next) && DECL_IGNORED_P (next))
167 return next;
168
169 return cur;
170 }
171
172 /* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
175 static inline void
176 set_rtl (tree t, rtx x)
177 {
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 || (use_register_for_decl (t)
181 ? (REG_P (x)
182 || (GET_CODE (x) == CONCAT
183 && (REG_P (XEXP (x, 0))
184 || SUBREG_P (XEXP (x, 0)))
185 && (REG_P (XEXP (x, 1))
186 || SUBREG_P (XEXP (x, 1))))
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
191 || (GET_CODE (x) == PARALLEL
192 && SSAVAR (t)
193 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
194 && (GET_MODE (x) == BLKmode
195 || !flag_tree_coalesce_vars)))
196 : (MEM_P (x) || x == pc_rtx
197 || (GET_CODE (x) == CONCAT
198 && MEM_P (XEXP (x, 0))
199 && MEM_P (XEXP (x, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
208 unpromoted REGs. */
209 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
210 || (SSAVAR (t)
211 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 && (promote_ssa_mode (t, NULL) == BLKmode
213 || !flag_tree_coalesce_vars))
214 || !use_register_for_decl (t)
215 || GET_MODE (x) == promote_ssa_mode (t, NULL));
216
217 if (x)
218 {
219 bool skip = false;
220 tree cur = NULL_TREE;
221 rtx xm = x;
222
223 retry:
224 if (MEM_P (xm))
225 cur = MEM_EXPR (xm);
226 else if (REG_P (xm))
227 cur = REG_EXPR (xm);
228 else if (SUBREG_P (xm))
229 {
230 gcc_assert (subreg_lowpart_p (xm));
231 xm = SUBREG_REG (xm);
232 goto retry;
233 }
234 else if (GET_CODE (xm) == CONCAT)
235 {
236 xm = XEXP (xm, 0);
237 goto retry;
238 }
239 else if (GET_CODE (xm) == PARALLEL)
240 {
241 xm = XVECEXP (xm, 0, 0);
242 gcc_assert (GET_CODE (xm) == EXPR_LIST);
243 xm = XEXP (xm, 0);
244 goto retry;
245 }
246 else if (xm == pc_rtx)
247 skip = true;
248 else
249 gcc_unreachable ();
250
251 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
252
253 if (cur != next)
254 {
255 if (MEM_P (x))
256 set_mem_attributes (x,
257 next && TREE_CODE (next) == SSA_NAME
258 ? TREE_TYPE (next)
259 : next, true);
260 else
261 set_reg_attrs_for_decl_rtl (next, x);
262 }
263 }
264
265 if (TREE_CODE (t) == SSA_NAME)
266 {
267 int part = var_to_partition (SA.map, t);
268 if (part != NO_PARTITION)
269 {
270 if (SA.partition_to_pseudo[part])
271 gcc_assert (SA.partition_to_pseudo[part] == x);
272 else if (x != pc_rtx)
273 SA.partition_to_pseudo[part] = x;
274 }
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
277 DECL. For PARMs and RESULTs, do so only when setting the
278 default def. */
279 if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 && (VAR_P (SSA_NAME_VAR (t))
281 || SSA_NAME_IS_DEFAULT_DEF (t)))
282 {
283 tree var = SSA_NAME_VAR (t);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var))
286 SET_DECL_RTL (var, x);
287 /* If we have it set already to "multiple places" don't
288 change this. */
289 else if (DECL_RTL (var) == pc_rtx)
290 ;
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var) != x)
298 SET_DECL_RTL (var, pc_rtx);
299 }
300 }
301 else
302 SET_DECL_RTL (t, x);
303 }
304
305 /* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
307 class stack_var
308 {
309 public:
310 /* The Variable. */
311 tree decl;
312
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
315 poly_uint64 size;
316
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
319 unsigned int alignb;
320
321 /* The partition representative. */
322 size_t representative;
323
324 /* The next stack variable in the partition, or EOC. */
325 size_t next;
326
327 /* The numbers of conflicting stack variables. */
328 bitmap conflicts;
329 };
330
331 #define EOC ((size_t)-1)
332
333 /* We have an array of such objects while deciding allocation. */
334 static class stack_var *stack_vars;
335 static size_t stack_vars_alloc;
336 static size_t stack_vars_num;
337 static hash_map<tree, size_t> *decl_to_stack_part;
338
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack;
342
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted;
346
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase;
351
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls;
355
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer;
359
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
362
363 static unsigned int
364 align_local_variable (tree decl, bool really_expand)
365 {
366 unsigned int align;
367
368 if (TREE_CODE (decl) == SSA_NAME)
369 {
370 tree type = TREE_TYPE (decl);
371 machine_mode mode = TYPE_MODE (type);
372
373 align = TYPE_ALIGN (type);
374 if (mode != BLKmode
375 && align < GET_MODE_ALIGNMENT (mode))
376 align = GET_MODE_ALIGNMENT (mode);
377 }
378 else
379 {
380 align = LOCAL_DECL_ALIGNMENT (decl);
381 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
382 That is done before IPA and could bump alignment based on host
383 backend even for offloaded code which wants different
384 LOCAL_DECL_ALIGNMENT. */
385 if (really_expand)
386 SET_DECL_ALIGN (decl, align);
387 }
388 return align / BITS_PER_UNIT;
389 }
390
391 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
392 down otherwise. Return truncated BASE value. */
393
394 static inline unsigned HOST_WIDE_INT
395 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
396 {
397 return align_up ? (base + align - 1) & -align : base & -align;
398 }
399
400 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
401 Return the frame offset. */
402
403 static poly_int64
404 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
405 {
406 poly_int64 offset, new_frame_offset;
407
408 if (FRAME_GROWS_DOWNWARD)
409 {
410 new_frame_offset
411 = aligned_lower_bound (frame_offset - frame_phase - size,
412 align) + frame_phase;
413 offset = new_frame_offset;
414 }
415 else
416 {
417 new_frame_offset
418 = aligned_upper_bound (frame_offset - frame_phase,
419 align) + frame_phase;
420 offset = new_frame_offset;
421 new_frame_offset += size;
422 }
423 frame_offset = new_frame_offset;
424
425 if (frame_offset_overflow (frame_offset, cfun->decl))
426 frame_offset = offset = 0;
427
428 return offset;
429 }
430
431 /* Accumulate DECL into STACK_VARS. */
432
433 static void
434 add_stack_var (tree decl, bool really_expand)
435 {
436 class stack_var *v;
437
438 if (stack_vars_num >= stack_vars_alloc)
439 {
440 if (stack_vars_alloc)
441 stack_vars_alloc = stack_vars_alloc * 3 / 2;
442 else
443 stack_vars_alloc = 32;
444 stack_vars
445 = XRESIZEVEC (class stack_var, stack_vars, stack_vars_alloc);
446 }
447 if (!decl_to_stack_part)
448 decl_to_stack_part = new hash_map<tree, size_t>;
449
450 v = &stack_vars[stack_vars_num];
451 decl_to_stack_part->put (decl, stack_vars_num);
452
453 v->decl = decl;
454 tree size = TREE_CODE (decl) == SSA_NAME
455 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
456 : DECL_SIZE_UNIT (decl);
457 v->size = tree_to_poly_uint64 (size);
458 /* Ensure that all variables have size, so that &a != &b for any two
459 variables that are simultaneously live. */
460 if (known_eq (v->size, 0U))
461 v->size = 1;
462 v->alignb = align_local_variable (decl, really_expand);
463 /* An alignment of zero can mightily confuse us later. */
464 gcc_assert (v->alignb != 0);
465
466 /* All variables are initially in their own partition. */
467 v->representative = stack_vars_num;
468 v->next = EOC;
469
470 /* All variables initially conflict with no other. */
471 v->conflicts = NULL;
472
473 /* Ensure that this decl doesn't get put onto the list twice. */
474 set_rtl (decl, pc_rtx);
475
476 stack_vars_num++;
477 }
478
479 /* Make the decls associated with luid's X and Y conflict. */
480
481 static void
482 add_stack_var_conflict (size_t x, size_t y)
483 {
484 class stack_var *a = &stack_vars[x];
485 class stack_var *b = &stack_vars[y];
486 if (x == y)
487 return;
488 if (!a->conflicts)
489 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
490 if (!b->conflicts)
491 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
492 bitmap_set_bit (a->conflicts, y);
493 bitmap_set_bit (b->conflicts, x);
494 }
495
496 /* Check whether the decls associated with luid's X and Y conflict. */
497
498 static bool
499 stack_var_conflict_p (size_t x, size_t y)
500 {
501 class stack_var *a = &stack_vars[x];
502 class stack_var *b = &stack_vars[y];
503 if (x == y)
504 return false;
505 /* Partitions containing an SSA name result from gimple registers
506 with things like unsupported modes. They are top-level and
507 hence conflict with everything else. */
508 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
509 return true;
510
511 if (!a->conflicts || !b->conflicts)
512 return false;
513 return bitmap_bit_p (a->conflicts, y);
514 }
515
516 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
517 enter its partition number into bitmap DATA. */
518
519 static bool
520 visit_op (gimple *, tree op, tree, void *data)
521 {
522 bitmap active = (bitmap)data;
523 op = get_base_address (op);
524 if (op
525 && DECL_P (op)
526 && DECL_RTL_IF_SET (op) == pc_rtx)
527 {
528 size_t *v = decl_to_stack_part->get (op);
529 if (v)
530 bitmap_set_bit (active, *v);
531 }
532 return false;
533 }
534
535 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
536 record conflicts between it and all currently active other partitions
537 from bitmap DATA. */
538
539 static bool
540 visit_conflict (gimple *, tree op, tree, void *data)
541 {
542 bitmap active = (bitmap)data;
543 op = get_base_address (op);
544 if (op
545 && DECL_P (op)
546 && DECL_RTL_IF_SET (op) == pc_rtx)
547 {
548 size_t *v = decl_to_stack_part->get (op);
549 if (v && bitmap_set_bit (active, *v))
550 {
551 size_t num = *v;
552 bitmap_iterator bi;
553 unsigned i;
554 gcc_assert (num < stack_vars_num);
555 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
556 add_stack_var_conflict (num, i);
557 }
558 }
559 return false;
560 }
561
562 /* Helper routine for add_scope_conflicts, calculating the active partitions
563 at the end of BB, leaving the result in WORK. We're called to generate
564 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
565 liveness. */
566
567 static void
568 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
569 {
570 edge e;
571 edge_iterator ei;
572 gimple_stmt_iterator gsi;
573 walk_stmt_load_store_addr_fn visit;
574
575 bitmap_clear (work);
576 FOR_EACH_EDGE (e, ei, bb->preds)
577 bitmap_ior_into (work, (bitmap)e->src->aux);
578
579 visit = visit_op;
580
581 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
582 {
583 gimple *stmt = gsi_stmt (gsi);
584 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
585 }
586 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
587 {
588 gimple *stmt = gsi_stmt (gsi);
589
590 if (gimple_clobber_p (stmt))
591 {
592 tree lhs = gimple_assign_lhs (stmt);
593 size_t *v;
594 /* Nested function lowering might introduce LHSs
595 that are COMPONENT_REFs. */
596 if (!VAR_P (lhs))
597 continue;
598 if (DECL_RTL_IF_SET (lhs) == pc_rtx
599 && (v = decl_to_stack_part->get (lhs)))
600 bitmap_clear_bit (work, *v);
601 }
602 else if (!is_gimple_debug (stmt))
603 {
604 if (for_conflict
605 && visit == visit_op)
606 {
607 /* If this is the first real instruction in this BB we need
608 to add conflicts for everything live at this point now.
609 Unlike classical liveness for named objects we can't
610 rely on seeing a def/use of the names we're interested in.
611 There might merely be indirect loads/stores. We'd not add any
612 conflicts for such partitions. */
613 bitmap_iterator bi;
614 unsigned i;
615 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
616 {
617 class stack_var *a = &stack_vars[i];
618 if (!a->conflicts)
619 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
620 bitmap_ior_into (a->conflicts, work);
621 }
622 visit = visit_conflict;
623 }
624 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
625 }
626 }
627 }
628
629 /* Generate stack partition conflicts between all partitions that are
630 simultaneously live. */
631
632 static void
633 add_scope_conflicts (void)
634 {
635 basic_block bb;
636 bool changed;
637 bitmap work = BITMAP_ALLOC (NULL);
638 int *rpo;
639 int n_bbs;
640
641 /* We approximate the live range of a stack variable by taking the first
642 mention of its name as starting point(s), and by the end-of-scope
643 death clobber added by gimplify as ending point(s) of the range.
644 This overapproximates in the case we for instance moved an address-taken
645 operation upward, without also moving a dereference to it upwards.
646 But it's conservatively correct as a variable never can hold values
647 before its name is mentioned at least once.
648
649 We then do a mostly classical bitmap liveness algorithm. */
650
651 FOR_ALL_BB_FN (bb, cfun)
652 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
653
654 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
655 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
656
657 changed = true;
658 while (changed)
659 {
660 int i;
661 changed = false;
662 for (i = 0; i < n_bbs; i++)
663 {
664 bitmap active;
665 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
666 active = (bitmap)bb->aux;
667 add_scope_conflicts_1 (bb, work, false);
668 if (bitmap_ior_into (active, work))
669 changed = true;
670 }
671 }
672
673 FOR_EACH_BB_FN (bb, cfun)
674 add_scope_conflicts_1 (bb, work, true);
675
676 free (rpo);
677 BITMAP_FREE (work);
678 FOR_ALL_BB_FN (bb, cfun)
679 BITMAP_FREE (bb->aux);
680 }
681
682 /* A subroutine of partition_stack_vars. A comparison function for qsort,
683 sorting an array of indices by the properties of the object. */
684
685 static int
686 stack_var_cmp (const void *a, const void *b)
687 {
688 size_t ia = *(const size_t *)a;
689 size_t ib = *(const size_t *)b;
690 unsigned int aligna = stack_vars[ia].alignb;
691 unsigned int alignb = stack_vars[ib].alignb;
692 poly_int64 sizea = stack_vars[ia].size;
693 poly_int64 sizeb = stack_vars[ib].size;
694 tree decla = stack_vars[ia].decl;
695 tree declb = stack_vars[ib].decl;
696 bool largea, largeb;
697 unsigned int uida, uidb;
698
699 /* Primary compare on "large" alignment. Large comes first. */
700 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
701 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
702 if (largea != largeb)
703 return (int)largeb - (int)largea;
704
705 /* Secondary compare on size, decreasing */
706 int diff = compare_sizes_for_sort (sizeb, sizea);
707 if (diff != 0)
708 return diff;
709
710 /* Tertiary compare on true alignment, decreasing. */
711 if (aligna < alignb)
712 return -1;
713 if (aligna > alignb)
714 return 1;
715
716 /* Final compare on ID for sort stability, increasing.
717 Two SSA names are compared by their version, SSA names come before
718 non-SSA names, and two normal decls are compared by their DECL_UID. */
719 if (TREE_CODE (decla) == SSA_NAME)
720 {
721 if (TREE_CODE (declb) == SSA_NAME)
722 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
723 else
724 return -1;
725 }
726 else if (TREE_CODE (declb) == SSA_NAME)
727 return 1;
728 else
729 uida = DECL_UID (decla), uidb = DECL_UID (declb);
730 if (uida < uidb)
731 return 1;
732 if (uida > uidb)
733 return -1;
734 return 0;
735 }
736
737 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
738 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
739
740 /* If the points-to solution *PI points to variables that are in a partition
741 together with other variables add all partition members to the pointed-to
742 variables bitmap. */
743
744 static void
745 add_partitioned_vars_to_ptset (struct pt_solution *pt,
746 part_hashmap *decls_to_partitions,
747 hash_set<bitmap> *visited, bitmap temp)
748 {
749 bitmap_iterator bi;
750 unsigned i;
751 bitmap *part;
752
753 if (pt->anything
754 || pt->vars == NULL
755 /* The pointed-to vars bitmap is shared, it is enough to
756 visit it once. */
757 || visited->add (pt->vars))
758 return;
759
760 bitmap_clear (temp);
761
762 /* By using a temporary bitmap to store all members of the partitions
763 we have to add we make sure to visit each of the partitions only
764 once. */
765 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
766 if ((!temp
767 || !bitmap_bit_p (temp, i))
768 && (part = decls_to_partitions->get (i)))
769 bitmap_ior_into (temp, *part);
770 if (!bitmap_empty_p (temp))
771 bitmap_ior_into (pt->vars, temp);
772 }
773
774 /* Update points-to sets based on partition info, so we can use them on RTL.
775 The bitmaps representing stack partitions will be saved until expand,
776 where partitioned decls used as bases in memory expressions will be
777 rewritten. */
778
779 static void
780 update_alias_info_with_stack_vars (void)
781 {
782 part_hashmap *decls_to_partitions = NULL;
783 size_t i, j;
784 tree var = NULL_TREE;
785
786 for (i = 0; i < stack_vars_num; i++)
787 {
788 bitmap part = NULL;
789 tree name;
790 struct ptr_info_def *pi;
791
792 /* Not interested in partitions with single variable. */
793 if (stack_vars[i].representative != i
794 || stack_vars[i].next == EOC)
795 continue;
796
797 if (!decls_to_partitions)
798 {
799 decls_to_partitions = new part_hashmap;
800 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
801 }
802
803 /* Create an SSA_NAME that points to the partition for use
804 as base during alias-oracle queries on RTL for bases that
805 have been partitioned. */
806 if (var == NULL_TREE)
807 var = create_tmp_var (ptr_type_node);
808 name = make_ssa_name (var);
809
810 /* Create bitmaps representing partitions. They will be used for
811 points-to sets later, so use GGC alloc. */
812 part = BITMAP_GGC_ALLOC ();
813 for (j = i; j != EOC; j = stack_vars[j].next)
814 {
815 tree decl = stack_vars[j].decl;
816 unsigned int uid = DECL_PT_UID (decl);
817 bitmap_set_bit (part, uid);
818 decls_to_partitions->put (uid, part);
819 cfun->gimple_df->decls_to_pointers->put (decl, name);
820 if (TREE_ADDRESSABLE (decl))
821 TREE_ADDRESSABLE (name) = 1;
822 }
823
824 /* Make the SSA name point to all partition members. */
825 pi = get_ptr_info (name);
826 pt_solution_set (&pi->pt, part, false);
827 }
828
829 /* Make all points-to sets that contain one member of a partition
830 contain all members of the partition. */
831 if (decls_to_partitions)
832 {
833 unsigned i;
834 tree name;
835 hash_set<bitmap> visited;
836 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
837
838 FOR_EACH_SSA_NAME (i, name, cfun)
839 {
840 struct ptr_info_def *pi;
841
842 if (POINTER_TYPE_P (TREE_TYPE (name))
843 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
844 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
845 &visited, temp);
846 }
847
848 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
849 decls_to_partitions, &visited, temp);
850
851 delete decls_to_partitions;
852 BITMAP_FREE (temp);
853 }
854 }
855
856 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
857 partitioning algorithm. Partitions A and B are known to be non-conflicting.
858 Merge them into a single partition A. */
859
860 static void
861 union_stack_vars (size_t a, size_t b)
862 {
863 class stack_var *vb = &stack_vars[b];
864 bitmap_iterator bi;
865 unsigned u;
866
867 gcc_assert (stack_vars[b].next == EOC);
868 /* Add B to A's partition. */
869 stack_vars[b].next = stack_vars[a].next;
870 stack_vars[b].representative = a;
871 stack_vars[a].next = b;
872
873 /* Make sure A is big enough to hold B. */
874 stack_vars[a].size = upper_bound (stack_vars[a].size, stack_vars[b].size);
875
876 /* Update the required alignment of partition A to account for B. */
877 if (stack_vars[a].alignb < stack_vars[b].alignb)
878 stack_vars[a].alignb = stack_vars[b].alignb;
879
880 /* Update the interference graph and merge the conflicts. */
881 if (vb->conflicts)
882 {
883 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
884 add_stack_var_conflict (a, stack_vars[u].representative);
885 BITMAP_FREE (vb->conflicts);
886 }
887 }
888
889 /* A subroutine of expand_used_vars. Binpack the variables into
890 partitions constrained by the interference graph. The overall
891 algorithm used is as follows:
892
893 Sort the objects by size in descending order.
894 For each object A {
895 S = size(A)
896 O = 0
897 loop {
898 Look for the largest non-conflicting object B with size <= S.
899 UNION (A, B)
900 }
901 }
902 */
903
904 static void
905 partition_stack_vars (void)
906 {
907 size_t si, sj, n = stack_vars_num;
908
909 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
910 for (si = 0; si < n; ++si)
911 stack_vars_sorted[si] = si;
912
913 if (n == 1)
914 return;
915
916 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
917
918 for (si = 0; si < n; ++si)
919 {
920 size_t i = stack_vars_sorted[si];
921 unsigned int ialign = stack_vars[i].alignb;
922 poly_int64 isize = stack_vars[i].size;
923
924 /* Ignore objects that aren't partition representatives. If we
925 see a var that is not a partition representative, it must
926 have been merged earlier. */
927 if (stack_vars[i].representative != i)
928 continue;
929
930 for (sj = si + 1; sj < n; ++sj)
931 {
932 size_t j = stack_vars_sorted[sj];
933 unsigned int jalign = stack_vars[j].alignb;
934 poly_int64 jsize = stack_vars[j].size;
935
936 /* Ignore objects that aren't partition representatives. */
937 if (stack_vars[j].representative != j)
938 continue;
939
940 /* Do not mix objects of "small" (supported) alignment
941 and "large" (unsupported) alignment. */
942 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
943 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
944 break;
945
946 /* For Address Sanitizer do not mix objects with different
947 sizes, as the shorter vars wouldn't be adequately protected.
948 Don't do that for "large" (unsupported) alignment objects,
949 those aren't protected anyway. */
950 if (asan_sanitize_stack_p ()
951 && maybe_ne (isize, jsize)
952 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
953 break;
954
955 /* Ignore conflicting objects. */
956 if (stack_var_conflict_p (i, j))
957 continue;
958
959 /* UNION the objects, placing J at OFFSET. */
960 union_stack_vars (i, j);
961 }
962 }
963
964 update_alias_info_with_stack_vars ();
965 }
966
967 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
968
969 static void
970 dump_stack_var_partition (void)
971 {
972 size_t si, i, j, n = stack_vars_num;
973
974 for (si = 0; si < n; ++si)
975 {
976 i = stack_vars_sorted[si];
977
978 /* Skip variables that aren't partition representatives, for now. */
979 if (stack_vars[i].representative != i)
980 continue;
981
982 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
983 print_dec (stack_vars[i].size, dump_file);
984 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
985
986 for (j = i; j != EOC; j = stack_vars[j].next)
987 {
988 fputc ('\t', dump_file);
989 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
990 }
991 fputc ('\n', dump_file);
992 }
993 }
994
995 /* Assign rtl to DECL at BASE + OFFSET. */
996
997 static void
998 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
999 poly_int64 offset)
1000 {
1001 unsigned align;
1002 rtx x;
1003
1004 /* If this fails, we've overflowed the stack frame. Error nicely? */
1005 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
1006
1007 x = plus_constant (Pmode, base, offset);
1008 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
1009 ? TYPE_MODE (TREE_TYPE (decl))
1010 : DECL_MODE (decl), x);
1011
1012 /* Set alignment we actually gave this decl if it isn't an SSA name.
1013 If it is we generate stack slots only accidentally so it isn't as
1014 important, we'll simply set the alignment directly on the MEM. */
1015
1016 if (base == virtual_stack_vars_rtx)
1017 offset -= frame_phase;
1018 align = known_alignment (offset);
1019 align *= BITS_PER_UNIT;
1020 if (align == 0 || align > base_align)
1021 align = base_align;
1022
1023 if (TREE_CODE (decl) != SSA_NAME)
1024 {
1025 /* One would think that we could assert that we're not decreasing
1026 alignment here, but (at least) the i386 port does exactly this
1027 via the MINIMUM_ALIGNMENT hook. */
1028
1029 SET_DECL_ALIGN (decl, align);
1030 DECL_USER_ALIGN (decl) = 0;
1031 }
1032
1033 set_rtl (decl, x);
1034
1035 set_mem_align (x, align);
1036 }
1037
1038 class stack_vars_data
1039 {
1040 public:
1041 /* Vector of offset pairs, always end of some padding followed
1042 by start of the padding that needs Address Sanitizer protection.
1043 The vector is in reversed, highest offset pairs come first. */
1044 auto_vec<HOST_WIDE_INT> asan_vec;
1045
1046 /* Vector of partition representative decls in between the paddings. */
1047 auto_vec<tree> asan_decl_vec;
1048
1049 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1050 rtx asan_base;
1051
1052 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1053 unsigned int asan_alignb;
1054 };
1055
1056 /* A subroutine of expand_used_vars. Give each partition representative
1057 a unique location within the stack frame. Update each partition member
1058 with that location. */
1059
1060 static void
1061 expand_stack_vars (bool (*pred) (size_t), class stack_vars_data *data)
1062 {
1063 size_t si, i, j, n = stack_vars_num;
1064 poly_uint64 large_size = 0, large_alloc = 0;
1065 rtx large_base = NULL;
1066 unsigned large_align = 0;
1067 bool large_allocation_done = false;
1068 tree decl;
1069
1070 /* Determine if there are any variables requiring "large" alignment.
1071 Since these are dynamically allocated, we only process these if
1072 no predicate involved. */
1073 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1074 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1075 {
1076 /* Find the total size of these variables. */
1077 for (si = 0; si < n; ++si)
1078 {
1079 unsigned alignb;
1080
1081 i = stack_vars_sorted[si];
1082 alignb = stack_vars[i].alignb;
1083
1084 /* All "large" alignment decls come before all "small" alignment
1085 decls, but "large" alignment decls are not sorted based on
1086 their alignment. Increase large_align to track the largest
1087 required alignment. */
1088 if ((alignb * BITS_PER_UNIT) > large_align)
1089 large_align = alignb * BITS_PER_UNIT;
1090
1091 /* Stop when we get to the first decl with "small" alignment. */
1092 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1093 break;
1094
1095 /* Skip variables that aren't partition representatives. */
1096 if (stack_vars[i].representative != i)
1097 continue;
1098
1099 /* Skip variables that have already had rtl assigned. See also
1100 add_stack_var where we perpetrate this pc_rtx hack. */
1101 decl = stack_vars[i].decl;
1102 if (TREE_CODE (decl) == SSA_NAME
1103 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1104 : DECL_RTL (decl) != pc_rtx)
1105 continue;
1106
1107 large_size = aligned_upper_bound (large_size, alignb);
1108 large_size += stack_vars[i].size;
1109 }
1110 }
1111
1112 for (si = 0; si < n; ++si)
1113 {
1114 rtx base;
1115 unsigned base_align, alignb;
1116 poly_int64 offset;
1117
1118 i = stack_vars_sorted[si];
1119
1120 /* Skip variables that aren't partition representatives, for now. */
1121 if (stack_vars[i].representative != i)
1122 continue;
1123
1124 /* Skip variables that have already had rtl assigned. See also
1125 add_stack_var where we perpetrate this pc_rtx hack. */
1126 decl = stack_vars[i].decl;
1127 if (TREE_CODE (decl) == SSA_NAME
1128 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1129 : DECL_RTL (decl) != pc_rtx)
1130 continue;
1131
1132 /* Check the predicate to see whether this variable should be
1133 allocated in this pass. */
1134 if (pred && !pred (i))
1135 continue;
1136
1137 alignb = stack_vars[i].alignb;
1138 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1139 {
1140 base = virtual_stack_vars_rtx;
1141 /* ASAN description strings don't yet have a syntax for expressing
1142 polynomial offsets. */
1143 HOST_WIDE_INT prev_offset;
1144 if (asan_sanitize_stack_p ()
1145 && pred
1146 && frame_offset.is_constant (&prev_offset)
1147 && stack_vars[i].size.is_constant ())
1148 {
1149 if (data->asan_vec.is_empty ())
1150 {
1151 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE);
1152 prev_offset = frame_offset.to_constant ();
1153 }
1154 prev_offset = align_base (prev_offset,
1155 ASAN_MIN_RED_ZONE_SIZE,
1156 !FRAME_GROWS_DOWNWARD);
1157 tree repr_decl = NULL_TREE;
1158 unsigned HOST_WIDE_INT size
1159 = asan_var_and_redzone_size (stack_vars[i].size.to_constant ());
1160 if (data->asan_vec.is_empty ())
1161 size = MAX (size, ASAN_RED_ZONE_SIZE);
1162
1163 unsigned HOST_WIDE_INT alignment = MAX (alignb,
1164 ASAN_MIN_RED_ZONE_SIZE);
1165 offset = alloc_stack_frame_space (size, alignment);
1166
1167 data->asan_vec.safe_push (prev_offset);
1168 /* Allocating a constant amount of space from a constant
1169 starting offset must give a constant result. */
1170 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1171 .to_constant ());
1172 /* Find best representative of the partition.
1173 Prefer those with DECL_NAME, even better
1174 satisfying asan_protect_stack_decl predicate. */
1175 for (j = i; j != EOC; j = stack_vars[j].next)
1176 if (asan_protect_stack_decl (stack_vars[j].decl)
1177 && DECL_NAME (stack_vars[j].decl))
1178 {
1179 repr_decl = stack_vars[j].decl;
1180 break;
1181 }
1182 else if (repr_decl == NULL_TREE
1183 && DECL_P (stack_vars[j].decl)
1184 && DECL_NAME (stack_vars[j].decl))
1185 repr_decl = stack_vars[j].decl;
1186 if (repr_decl == NULL_TREE)
1187 repr_decl = stack_vars[i].decl;
1188 data->asan_decl_vec.safe_push (repr_decl);
1189
1190 /* Make sure a representative is unpoison if another
1191 variable in the partition is handled by
1192 use-after-scope sanitization. */
1193 if (asan_handled_variables != NULL
1194 && !asan_handled_variables->contains (repr_decl))
1195 {
1196 for (j = i; j != EOC; j = stack_vars[j].next)
1197 if (asan_handled_variables->contains (stack_vars[j].decl))
1198 break;
1199 if (j != EOC)
1200 asan_handled_variables->add (repr_decl);
1201 }
1202
1203 data->asan_alignb = MAX (data->asan_alignb, alignb);
1204 if (data->asan_base == NULL)
1205 data->asan_base = gen_reg_rtx (Pmode);
1206 base = data->asan_base;
1207
1208 if (!STRICT_ALIGNMENT)
1209 base_align = crtl->max_used_stack_slot_alignment;
1210 else
1211 base_align = MAX (crtl->max_used_stack_slot_alignment,
1212 GET_MODE_ALIGNMENT (SImode)
1213 << ASAN_SHADOW_SHIFT);
1214 }
1215 else
1216 {
1217 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1218 base_align = crtl->max_used_stack_slot_alignment;
1219 }
1220 }
1221 else
1222 {
1223 /* Large alignment is only processed in the last pass. */
1224 if (pred)
1225 continue;
1226
1227 /* If there were any variables requiring "large" alignment, allocate
1228 space. */
1229 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1230 {
1231 poly_int64 loffset;
1232 rtx large_allocsize;
1233
1234 large_allocsize = gen_int_mode (large_size, Pmode);
1235 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1236 loffset = alloc_stack_frame_space
1237 (rtx_to_poly_int64 (large_allocsize),
1238 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1239 large_base = get_dynamic_stack_base (loffset, large_align);
1240 large_allocation_done = true;
1241 }
1242 gcc_assert (large_base != NULL);
1243
1244 large_alloc = aligned_upper_bound (large_alloc, alignb);
1245 offset = large_alloc;
1246 large_alloc += stack_vars[i].size;
1247
1248 base = large_base;
1249 base_align = large_align;
1250 }
1251
1252 /* Create rtl for each variable based on their location within the
1253 partition. */
1254 for (j = i; j != EOC; j = stack_vars[j].next)
1255 {
1256 expand_one_stack_var_at (stack_vars[j].decl,
1257 base, base_align,
1258 offset);
1259 }
1260 }
1261
1262 gcc_assert (known_eq (large_alloc, large_size));
1263 }
1264
1265 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1266 static poly_uint64
1267 account_stack_vars (void)
1268 {
1269 size_t si, j, i, n = stack_vars_num;
1270 poly_uint64 size = 0;
1271
1272 for (si = 0; si < n; ++si)
1273 {
1274 i = stack_vars_sorted[si];
1275
1276 /* Skip variables that aren't partition representatives, for now. */
1277 if (stack_vars[i].representative != i)
1278 continue;
1279
1280 size += stack_vars[i].size;
1281 for (j = i; j != EOC; j = stack_vars[j].next)
1282 set_rtl (stack_vars[j].decl, NULL);
1283 }
1284 return size;
1285 }
1286
1287 /* Record the RTL assignment X for the default def of PARM. */
1288
1289 extern void
1290 set_parm_rtl (tree parm, rtx x)
1291 {
1292 gcc_assert (TREE_CODE (parm) == PARM_DECL
1293 || TREE_CODE (parm) == RESULT_DECL);
1294
1295 if (x && !MEM_P (x))
1296 {
1297 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1298 TYPE_MODE (TREE_TYPE (parm)),
1299 TYPE_ALIGN (TREE_TYPE (parm)));
1300
1301 /* If the variable alignment is very large we'll dynamicaly
1302 allocate it, which means that in-frame portion is just a
1303 pointer. ??? We've got a pseudo for sure here, do we
1304 actually dynamically allocate its spilling area if needed?
1305 ??? Isn't it a problem when Pmode alignment also exceeds
1306 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1307 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1308 align = GET_MODE_ALIGNMENT (Pmode);
1309
1310 record_alignment_for_reg_var (align);
1311 }
1312
1313 tree ssa = ssa_default_def (cfun, parm);
1314 if (!ssa)
1315 return set_rtl (parm, x);
1316
1317 int part = var_to_partition (SA.map, ssa);
1318 gcc_assert (part != NO_PARTITION);
1319
1320 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1321 gcc_assert (changed);
1322
1323 set_rtl (ssa, x);
1324 gcc_assert (DECL_RTL (parm) == x);
1325 }
1326
1327 /* A subroutine of expand_one_var. Called to immediately assign rtl
1328 to a variable to be allocated in the stack frame. */
1329
1330 static void
1331 expand_one_stack_var_1 (tree var)
1332 {
1333 poly_uint64 size;
1334 poly_int64 offset;
1335 unsigned byte_align;
1336
1337 if (TREE_CODE (var) == SSA_NAME)
1338 {
1339 tree type = TREE_TYPE (var);
1340 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1341 }
1342 else
1343 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1344
1345 byte_align = align_local_variable (var, true);
1346
1347 /* We handle highly aligned variables in expand_stack_vars. */
1348 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1349
1350 offset = alloc_stack_frame_space (size, byte_align);
1351
1352 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1353 crtl->max_used_stack_slot_alignment, offset);
1354 }
1355
1356 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1357 already assigned some MEM. */
1358
1359 static void
1360 expand_one_stack_var (tree var)
1361 {
1362 if (TREE_CODE (var) == SSA_NAME)
1363 {
1364 int part = var_to_partition (SA.map, var);
1365 if (part != NO_PARTITION)
1366 {
1367 rtx x = SA.partition_to_pseudo[part];
1368 gcc_assert (x);
1369 gcc_assert (MEM_P (x));
1370 return;
1371 }
1372 }
1373
1374 return expand_one_stack_var_1 (var);
1375 }
1376
1377 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1378 that will reside in a hard register. */
1379
1380 static void
1381 expand_one_hard_reg_var (tree var)
1382 {
1383 rest_of_decl_compilation (var, 0, 0);
1384 }
1385
1386 /* Record the alignment requirements of some variable assigned to a
1387 pseudo. */
1388
1389 static void
1390 record_alignment_for_reg_var (unsigned int align)
1391 {
1392 if (SUPPORTS_STACK_ALIGNMENT
1393 && crtl->stack_alignment_estimated < align)
1394 {
1395 /* stack_alignment_estimated shouldn't change after stack
1396 realign decision made */
1397 gcc_assert (!crtl->stack_realign_processed);
1398 crtl->stack_alignment_estimated = align;
1399 }
1400
1401 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1402 So here we only make sure stack_alignment_needed >= align. */
1403 if (crtl->stack_alignment_needed < align)
1404 crtl->stack_alignment_needed = align;
1405 if (crtl->max_used_stack_slot_alignment < align)
1406 crtl->max_used_stack_slot_alignment = align;
1407 }
1408
1409 /* Create RTL for an SSA partition. */
1410
1411 static void
1412 expand_one_ssa_partition (tree var)
1413 {
1414 int part = var_to_partition (SA.map, var);
1415 gcc_assert (part != NO_PARTITION);
1416
1417 if (SA.partition_to_pseudo[part])
1418 return;
1419
1420 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1421 TYPE_MODE (TREE_TYPE (var)),
1422 TYPE_ALIGN (TREE_TYPE (var)));
1423
1424 /* If the variable alignment is very large we'll dynamicaly allocate
1425 it, which means that in-frame portion is just a pointer. */
1426 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1427 align = GET_MODE_ALIGNMENT (Pmode);
1428
1429 record_alignment_for_reg_var (align);
1430
1431 if (!use_register_for_decl (var))
1432 {
1433 if (defer_stack_allocation (var, true))
1434 add_stack_var (var, true);
1435 else
1436 expand_one_stack_var_1 (var);
1437 return;
1438 }
1439
1440 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1441 rtx x = gen_reg_rtx (reg_mode);
1442
1443 set_rtl (var, x);
1444
1445 /* For a promoted variable, X will not be used directly but wrapped in a
1446 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1447 will assume that its upper bits can be inferred from its lower bits.
1448 Therefore, if X isn't initialized on every path from the entry, then
1449 we must do it manually in order to fulfill the above assumption. */
1450 if (reg_mode != TYPE_MODE (TREE_TYPE (var))
1451 && bitmap_bit_p (SA.partitions_for_undefined_values, part))
1452 emit_move_insn (x, CONST0_RTX (reg_mode));
1453 }
1454
1455 /* Record the association between the RTL generated for partition PART
1456 and the underlying variable of the SSA_NAME VAR. */
1457
1458 static void
1459 adjust_one_expanded_partition_var (tree var)
1460 {
1461 if (!var)
1462 return;
1463
1464 tree decl = SSA_NAME_VAR (var);
1465
1466 int part = var_to_partition (SA.map, var);
1467 if (part == NO_PARTITION)
1468 return;
1469
1470 rtx x = SA.partition_to_pseudo[part];
1471
1472 gcc_assert (x);
1473
1474 set_rtl (var, x);
1475
1476 if (!REG_P (x))
1477 return;
1478
1479 /* Note if the object is a user variable. */
1480 if (decl && !DECL_ARTIFICIAL (decl))
1481 mark_user_reg (x);
1482
1483 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1484 mark_reg_pointer (x, get_pointer_alignment (var));
1485 }
1486
1487 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1488 that will reside in a pseudo register. */
1489
1490 static void
1491 expand_one_register_var (tree var)
1492 {
1493 if (TREE_CODE (var) == SSA_NAME)
1494 {
1495 int part = var_to_partition (SA.map, var);
1496 if (part != NO_PARTITION)
1497 {
1498 rtx x = SA.partition_to_pseudo[part];
1499 gcc_assert (x);
1500 gcc_assert (REG_P (x));
1501 return;
1502 }
1503 gcc_unreachable ();
1504 }
1505
1506 tree decl = var;
1507 tree type = TREE_TYPE (decl);
1508 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1509 rtx x = gen_reg_rtx (reg_mode);
1510
1511 set_rtl (var, x);
1512
1513 /* Note if the object is a user variable. */
1514 if (!DECL_ARTIFICIAL (decl))
1515 mark_user_reg (x);
1516
1517 if (POINTER_TYPE_P (type))
1518 mark_reg_pointer (x, get_pointer_alignment (var));
1519 }
1520
1521 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1522 has some associated error, e.g. its type is error-mark. We just need
1523 to pick something that won't crash the rest of the compiler. */
1524
1525 static void
1526 expand_one_error_var (tree var)
1527 {
1528 machine_mode mode = DECL_MODE (var);
1529 rtx x;
1530
1531 if (mode == BLKmode)
1532 x = gen_rtx_MEM (BLKmode, const0_rtx);
1533 else if (mode == VOIDmode)
1534 x = const0_rtx;
1535 else
1536 x = gen_reg_rtx (mode);
1537
1538 SET_DECL_RTL (var, x);
1539 }
1540
1541 /* A subroutine of expand_one_var. VAR is a variable that will be
1542 allocated to the local stack frame. Return true if we wish to
1543 add VAR to STACK_VARS so that it will be coalesced with other
1544 variables. Return false to allocate VAR immediately.
1545
1546 This function is used to reduce the number of variables considered
1547 for coalescing, which reduces the size of the quadratic problem. */
1548
1549 static bool
1550 defer_stack_allocation (tree var, bool toplevel)
1551 {
1552 tree size_unit = TREE_CODE (var) == SSA_NAME
1553 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1554 : DECL_SIZE_UNIT (var);
1555 poly_uint64 size;
1556
1557 /* Whether the variable is small enough for immediate allocation not to be
1558 a problem with regard to the frame size. */
1559 bool smallish
1560 = (poly_int_tree_p (size_unit, &size)
1561 && (estimated_poly_value (size)
1562 < param_min_size_for_stack_sharing));
1563
1564 /* If stack protection is enabled, *all* stack variables must be deferred,
1565 so that we can re-order the strings to the top of the frame.
1566 Similarly for Address Sanitizer. */
1567 if (flag_stack_protect || asan_sanitize_stack_p ())
1568 return true;
1569
1570 unsigned int align = TREE_CODE (var) == SSA_NAME
1571 ? TYPE_ALIGN (TREE_TYPE (var))
1572 : DECL_ALIGN (var);
1573
1574 /* We handle "large" alignment via dynamic allocation. We want to handle
1575 this extra complication in only one place, so defer them. */
1576 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1577 return true;
1578
1579 bool ignored = TREE_CODE (var) == SSA_NAME
1580 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1581 : DECL_IGNORED_P (var);
1582
1583 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1584 might be detached from their block and appear at toplevel when we reach
1585 here. We want to coalesce them with variables from other blocks when
1586 the immediate contribution to the frame size would be noticeable. */
1587 if (toplevel && optimize > 0 && ignored && !smallish)
1588 return true;
1589
1590 /* Variables declared in the outermost scope automatically conflict
1591 with every other variable. The only reason to want to defer them
1592 at all is that, after sorting, we can more efficiently pack
1593 small variables in the stack frame. Continue to defer at -O2. */
1594 if (toplevel && optimize < 2)
1595 return false;
1596
1597 /* Without optimization, *most* variables are allocated from the
1598 stack, which makes the quadratic problem large exactly when we
1599 want compilation to proceed as quickly as possible. On the
1600 other hand, we don't want the function's stack frame size to
1601 get completely out of hand. So we avoid adding scalars and
1602 "small" aggregates to the list at all. */
1603 if (optimize == 0 && smallish)
1604 return false;
1605
1606 return true;
1607 }
1608
1609 /* A subroutine of expand_used_vars. Expand one variable according to
1610 its flavor. Variables to be placed on the stack are not actually
1611 expanded yet, merely recorded.
1612 When REALLY_EXPAND is false, only add stack values to be allocated.
1613 Return stack usage this variable is supposed to take.
1614 */
1615
1616 static poly_uint64
1617 expand_one_var (tree var, bool toplevel, bool really_expand)
1618 {
1619 unsigned int align = BITS_PER_UNIT;
1620 tree origvar = var;
1621
1622 var = SSAVAR (var);
1623
1624 if (TREE_TYPE (var) != error_mark_node && VAR_P (var))
1625 {
1626 if (is_global_var (var))
1627 return 0;
1628
1629 /* Because we don't know if VAR will be in register or on stack,
1630 we conservatively assume it will be on stack even if VAR is
1631 eventually put into register after RA pass. For non-automatic
1632 variables, which won't be on stack, we collect alignment of
1633 type and ignore user specified alignment. Similarly for
1634 SSA_NAMEs for which use_register_for_decl returns true. */
1635 if (TREE_STATIC (var)
1636 || DECL_EXTERNAL (var)
1637 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1638 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1639 TYPE_MODE (TREE_TYPE (var)),
1640 TYPE_ALIGN (TREE_TYPE (var)));
1641 else if (DECL_HAS_VALUE_EXPR_P (var)
1642 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1643 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1644 or variables which were assigned a stack slot already by
1645 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1646 changed from the offset chosen to it. */
1647 align = crtl->stack_alignment_estimated;
1648 else
1649 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1650
1651 /* If the variable alignment is very large we'll dynamicaly allocate
1652 it, which means that in-frame portion is just a pointer. */
1653 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1654 align = GET_MODE_ALIGNMENT (Pmode);
1655 }
1656
1657 record_alignment_for_reg_var (align);
1658
1659 poly_uint64 size;
1660 if (TREE_CODE (origvar) == SSA_NAME)
1661 {
1662 gcc_assert (!VAR_P (var)
1663 || (!DECL_EXTERNAL (var)
1664 && !DECL_HAS_VALUE_EXPR_P (var)
1665 && !TREE_STATIC (var)
1666 && TREE_TYPE (var) != error_mark_node
1667 && !DECL_HARD_REGISTER (var)
1668 && really_expand));
1669 }
1670 if (!VAR_P (var) && TREE_CODE (origvar) != SSA_NAME)
1671 ;
1672 else if (DECL_EXTERNAL (var))
1673 ;
1674 else if (DECL_HAS_VALUE_EXPR_P (var))
1675 ;
1676 else if (TREE_STATIC (var))
1677 ;
1678 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1679 ;
1680 else if (TREE_TYPE (var) == error_mark_node)
1681 {
1682 if (really_expand)
1683 expand_one_error_var (var);
1684 }
1685 else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1686 {
1687 if (really_expand)
1688 {
1689 expand_one_hard_reg_var (var);
1690 if (!DECL_HARD_REGISTER (var))
1691 /* Invalid register specification. */
1692 expand_one_error_var (var);
1693 }
1694 }
1695 else if (use_register_for_decl (var))
1696 {
1697 if (really_expand)
1698 expand_one_register_var (origvar);
1699 }
1700 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1701 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1702 {
1703 /* Reject variables which cover more than half of the address-space. */
1704 if (really_expand)
1705 {
1706 if (DECL_NONLOCAL_FRAME (var))
1707 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1708 "total size of local objects is too large");
1709 else
1710 error_at (DECL_SOURCE_LOCATION (var),
1711 "size of variable %q+D is too large", var);
1712 expand_one_error_var (var);
1713 }
1714 }
1715 else if (defer_stack_allocation (var, toplevel))
1716 add_stack_var (origvar, really_expand);
1717 else
1718 {
1719 if (really_expand)
1720 {
1721 if (lookup_attribute ("naked",
1722 DECL_ATTRIBUTES (current_function_decl)))
1723 error ("cannot allocate stack for variable %q+D, naked function",
1724 var);
1725
1726 expand_one_stack_var (origvar);
1727 }
1728 return size;
1729 }
1730 return 0;
1731 }
1732
1733 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1734 expanding variables. Those variables that can be put into registers
1735 are allocated pseudos; those that can't are put on the stack.
1736
1737 TOPLEVEL is true if this is the outermost BLOCK. */
1738
1739 static void
1740 expand_used_vars_for_block (tree block, bool toplevel)
1741 {
1742 tree t;
1743
1744 /* Expand all variables at this level. */
1745 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1746 if (TREE_USED (t)
1747 && ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1748 || !DECL_NONSHAREABLE (t)))
1749 expand_one_var (t, toplevel, true);
1750
1751 /* Expand all variables at containing levels. */
1752 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1753 expand_used_vars_for_block (t, false);
1754 }
1755
1756 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1757 and clear TREE_USED on all local variables. */
1758
1759 static void
1760 clear_tree_used (tree block)
1761 {
1762 tree t;
1763
1764 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1765 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1766 if ((!VAR_P (t) && TREE_CODE (t) != RESULT_DECL)
1767 || !DECL_NONSHAREABLE (t))
1768 TREE_USED (t) = 0;
1769
1770 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1771 clear_tree_used (t);
1772 }
1773
1774 /* Examine TYPE and determine a bit mask of the following features. */
1775
1776 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1777 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1778 #define SPCT_HAS_ARRAY 4
1779 #define SPCT_HAS_AGGREGATE 8
1780
1781 static unsigned int
1782 stack_protect_classify_type (tree type)
1783 {
1784 unsigned int ret = 0;
1785 tree t;
1786
1787 switch (TREE_CODE (type))
1788 {
1789 case ARRAY_TYPE:
1790 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1791 if (t == char_type_node
1792 || t == signed_char_type_node
1793 || t == unsigned_char_type_node)
1794 {
1795 unsigned HOST_WIDE_INT max = param_ssp_buffer_size;
1796 unsigned HOST_WIDE_INT len;
1797
1798 if (!TYPE_SIZE_UNIT (type)
1799 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1800 len = max;
1801 else
1802 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1803
1804 if (len < max)
1805 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1806 else
1807 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1808 }
1809 else
1810 ret = SPCT_HAS_ARRAY;
1811 break;
1812
1813 case UNION_TYPE:
1814 case QUAL_UNION_TYPE:
1815 case RECORD_TYPE:
1816 ret = SPCT_HAS_AGGREGATE;
1817 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1818 if (TREE_CODE (t) == FIELD_DECL)
1819 ret |= stack_protect_classify_type (TREE_TYPE (t));
1820 break;
1821
1822 default:
1823 break;
1824 }
1825
1826 return ret;
1827 }
1828
1829 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1830 part of the local stack frame. Remember if we ever return nonzero for
1831 any variable in this function. The return value is the phase number in
1832 which the variable should be allocated. */
1833
1834 static int
1835 stack_protect_decl_phase (tree decl)
1836 {
1837 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1838 int ret = 0;
1839
1840 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1841 has_short_buffer = true;
1842
1843 tree attribs = DECL_ATTRIBUTES (current_function_decl);
1844 if (!lookup_attribute ("no_stack_protector", attribs)
1845 && (flag_stack_protect == SPCT_FLAG_ALL
1846 || flag_stack_protect == SPCT_FLAG_STRONG
1847 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1848 && lookup_attribute ("stack_protect", attribs))))
1849 {
1850 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1851 && !(bits & SPCT_HAS_AGGREGATE))
1852 ret = 1;
1853 else if (bits & SPCT_HAS_ARRAY)
1854 ret = 2;
1855 }
1856 else
1857 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1858
1859 if (ret)
1860 has_protected_decls = true;
1861
1862 return ret;
1863 }
1864
1865 /* Two helper routines that check for phase 1 and phase 2. These are used
1866 as callbacks for expand_stack_vars. */
1867
1868 static bool
1869 stack_protect_decl_phase_1 (size_t i)
1870 {
1871 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1872 }
1873
1874 static bool
1875 stack_protect_decl_phase_2 (size_t i)
1876 {
1877 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1878 }
1879
1880 /* And helper function that checks for asan phase (with stack protector
1881 it is phase 3). This is used as callback for expand_stack_vars.
1882 Returns true if any of the vars in the partition need to be protected. */
1883
1884 static bool
1885 asan_decl_phase_3 (size_t i)
1886 {
1887 while (i != EOC)
1888 {
1889 if (asan_protect_stack_decl (stack_vars[i].decl))
1890 return true;
1891 i = stack_vars[i].next;
1892 }
1893 return false;
1894 }
1895
1896 /* Ensure that variables in different stack protection phases conflict
1897 so that they are not merged and share the same stack slot.
1898 Return true if there are any address taken variables. */
1899
1900 static bool
1901 add_stack_protection_conflicts (void)
1902 {
1903 size_t i, j, n = stack_vars_num;
1904 unsigned char *phase;
1905 bool ret = false;
1906
1907 phase = XNEWVEC (unsigned char, n);
1908 for (i = 0; i < n; ++i)
1909 {
1910 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1911 if (TREE_ADDRESSABLE (stack_vars[i].decl))
1912 ret = true;
1913 }
1914
1915 for (i = 0; i < n; ++i)
1916 {
1917 unsigned char ph_i = phase[i];
1918 for (j = i + 1; j < n; ++j)
1919 if (ph_i != phase[j])
1920 add_stack_var_conflict (i, j);
1921 }
1922
1923 XDELETEVEC (phase);
1924 return ret;
1925 }
1926
1927 /* Create a decl for the guard at the top of the stack frame. */
1928
1929 static void
1930 create_stack_guard (void)
1931 {
1932 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1933 VAR_DECL, NULL, ptr_type_node);
1934 TREE_THIS_VOLATILE (guard) = 1;
1935 TREE_USED (guard) = 1;
1936 expand_one_stack_var (guard);
1937 crtl->stack_protect_guard = guard;
1938 }
1939
1940 /* Prepare for expanding variables. */
1941 static void
1942 init_vars_expansion (void)
1943 {
1944 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1945 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1946
1947 /* A map from decl to stack partition. */
1948 decl_to_stack_part = new hash_map<tree, size_t>;
1949
1950 /* Initialize local stack smashing state. */
1951 has_protected_decls = false;
1952 has_short_buffer = false;
1953 }
1954
1955 /* Free up stack variable graph data. */
1956 static void
1957 fini_vars_expansion (void)
1958 {
1959 bitmap_obstack_release (&stack_var_bitmap_obstack);
1960 if (stack_vars)
1961 XDELETEVEC (stack_vars);
1962 if (stack_vars_sorted)
1963 XDELETEVEC (stack_vars_sorted);
1964 stack_vars = NULL;
1965 stack_vars_sorted = NULL;
1966 stack_vars_alloc = stack_vars_num = 0;
1967 delete decl_to_stack_part;
1968 decl_to_stack_part = NULL;
1969 }
1970
1971 /* Make a fair guess for the size of the stack frame of the function
1972 in NODE. This doesn't have to be exact, the result is only used in
1973 the inline heuristics. So we don't want to run the full stack var
1974 packing algorithm (which is quadratic in the number of stack vars).
1975 Instead, we calculate the total size of all stack vars. This turns
1976 out to be a pretty fair estimate -- packing of stack vars doesn't
1977 happen very often. */
1978
1979 HOST_WIDE_INT
1980 estimated_stack_frame_size (struct cgraph_node *node)
1981 {
1982 poly_int64 size = 0;
1983 size_t i;
1984 tree var;
1985 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1986
1987 push_cfun (fn);
1988
1989 init_vars_expansion ();
1990
1991 FOR_EACH_LOCAL_DECL (fn, i, var)
1992 if (auto_var_in_fn_p (var, fn->decl))
1993 size += expand_one_var (var, true, false);
1994
1995 if (stack_vars_num > 0)
1996 {
1997 /* Fake sorting the stack vars for account_stack_vars (). */
1998 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1999 for (i = 0; i < stack_vars_num; ++i)
2000 stack_vars_sorted[i] = i;
2001 size += account_stack_vars ();
2002 }
2003
2004 fini_vars_expansion ();
2005 pop_cfun ();
2006 return estimated_poly_value (size);
2007 }
2008
2009 /* Check if the current function has calls that use a return slot. */
2010
2011 static bool
2012 stack_protect_return_slot_p ()
2013 {
2014 basic_block bb;
2015
2016 FOR_ALL_BB_FN (bb, cfun)
2017 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2018 !gsi_end_p (gsi); gsi_next (&gsi))
2019 {
2020 gimple *stmt = gsi_stmt (gsi);
2021 /* This assumes that calls to internal-only functions never
2022 use a return slot. */
2023 if (is_gimple_call (stmt)
2024 && !gimple_call_internal_p (stmt)
2025 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
2026 gimple_call_fndecl (stmt)))
2027 return true;
2028 }
2029 return false;
2030 }
2031
2032 /* Expand all variables used in the function. */
2033
2034 static rtx_insn *
2035 expand_used_vars (void)
2036 {
2037 tree var, outer_block = DECL_INITIAL (current_function_decl);
2038 auto_vec<tree> maybe_local_decls;
2039 rtx_insn *var_end_seq = NULL;
2040 unsigned i;
2041 unsigned len;
2042 bool gen_stack_protect_signal = false;
2043
2044 /* Compute the phase of the stack frame for this function. */
2045 {
2046 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2047 int off = targetm.starting_frame_offset () % align;
2048 frame_phase = off ? align - off : 0;
2049 }
2050
2051 /* Set TREE_USED on all variables in the local_decls. */
2052 FOR_EACH_LOCAL_DECL (cfun, i, var)
2053 TREE_USED (var) = 1;
2054 /* Clear TREE_USED on all variables associated with a block scope. */
2055 clear_tree_used (DECL_INITIAL (current_function_decl));
2056
2057 init_vars_expansion ();
2058
2059 if (targetm.use_pseudo_pic_reg ())
2060 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2061
2062 for (i = 0; i < SA.map->num_partitions; i++)
2063 {
2064 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2065 continue;
2066
2067 tree var = partition_to_var (SA.map, i);
2068
2069 gcc_assert (!virtual_operand_p (var));
2070
2071 expand_one_ssa_partition (var);
2072 }
2073
2074 if (flag_stack_protect == SPCT_FLAG_STRONG)
2075 gen_stack_protect_signal = stack_protect_return_slot_p ();
2076
2077 /* At this point all variables on the local_decls with TREE_USED
2078 set are not associated with any block scope. Lay them out. */
2079
2080 len = vec_safe_length (cfun->local_decls);
2081 FOR_EACH_LOCAL_DECL (cfun, i, var)
2082 {
2083 bool expand_now = false;
2084
2085 /* Expanded above already. */
2086 if (is_gimple_reg (var))
2087 {
2088 TREE_USED (var) = 0;
2089 goto next;
2090 }
2091 /* We didn't set a block for static or extern because it's hard
2092 to tell the difference between a global variable (re)declared
2093 in a local scope, and one that's really declared there to
2094 begin with. And it doesn't really matter much, since we're
2095 not giving them stack space. Expand them now. */
2096 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2097 expand_now = true;
2098
2099 /* Expand variables not associated with any block now. Those created by
2100 the optimizers could be live anywhere in the function. Those that
2101 could possibly have been scoped originally and detached from their
2102 block will have their allocation deferred so we coalesce them with
2103 others when optimization is enabled. */
2104 else if (TREE_USED (var))
2105 expand_now = true;
2106
2107 /* Finally, mark all variables on the list as used. We'll use
2108 this in a moment when we expand those associated with scopes. */
2109 TREE_USED (var) = 1;
2110
2111 if (expand_now)
2112 expand_one_var (var, true, true);
2113
2114 next:
2115 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2116 {
2117 rtx rtl = DECL_RTL_IF_SET (var);
2118
2119 /* Keep artificial non-ignored vars in cfun->local_decls
2120 chain until instantiate_decls. */
2121 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2122 add_local_decl (cfun, var);
2123 else if (rtl == NULL_RTX)
2124 /* If rtl isn't set yet, which can happen e.g. with
2125 -fstack-protector, retry before returning from this
2126 function. */
2127 maybe_local_decls.safe_push (var);
2128 }
2129 }
2130
2131 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2132
2133 +-----------------+-----------------+
2134 | ...processed... | ...duplicates...|
2135 +-----------------+-----------------+
2136 ^
2137 +-- LEN points here.
2138
2139 We just want the duplicates, as those are the artificial
2140 non-ignored vars that we want to keep until instantiate_decls.
2141 Move them down and truncate the array. */
2142 if (!vec_safe_is_empty (cfun->local_decls))
2143 cfun->local_decls->block_remove (0, len);
2144
2145 /* At this point, all variables within the block tree with TREE_USED
2146 set are actually used by the optimized function. Lay them out. */
2147 expand_used_vars_for_block (outer_block, true);
2148
2149 tree attribs = DECL_ATTRIBUTES (current_function_decl);
2150 if (stack_vars_num > 0)
2151 {
2152 bool has_addressable_vars = false;
2153
2154 add_scope_conflicts ();
2155
2156 /* If stack protection is enabled, we don't share space between
2157 vulnerable data and non-vulnerable data. */
2158 if (flag_stack_protect != 0
2159 && !lookup_attribute ("no_stack_protector", attribs)
2160 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2161 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2162 && lookup_attribute ("stack_protect", attribs))))
2163 has_addressable_vars = add_stack_protection_conflicts ();
2164
2165 if (flag_stack_protect == SPCT_FLAG_STRONG && has_addressable_vars)
2166 gen_stack_protect_signal = true;
2167
2168 /* Now that we have collected all stack variables, and have computed a
2169 minimal interference graph, attempt to save some stack space. */
2170 partition_stack_vars ();
2171 if (dump_file)
2172 dump_stack_var_partition ();
2173 }
2174
2175
2176 if (!lookup_attribute ("no_stack_protector", attribs))
2177 switch (flag_stack_protect)
2178 {
2179 case SPCT_FLAG_ALL:
2180 create_stack_guard ();
2181 break;
2182
2183 case SPCT_FLAG_STRONG:
2184 if (gen_stack_protect_signal
2185 || cfun->calls_alloca
2186 || has_protected_decls
2187 || lookup_attribute ("stack_protect",
2188 DECL_ATTRIBUTES (current_function_decl)))
2189 create_stack_guard ();
2190 break;
2191
2192 case SPCT_FLAG_DEFAULT:
2193 if (cfun->calls_alloca
2194 || has_protected_decls
2195 || lookup_attribute ("stack_protect",
2196 DECL_ATTRIBUTES (current_function_decl)))
2197 create_stack_guard ();
2198 break;
2199
2200 case SPCT_FLAG_EXPLICIT:
2201 if (lookup_attribute ("stack_protect",
2202 DECL_ATTRIBUTES (current_function_decl)))
2203 create_stack_guard ();
2204 break;
2205
2206 default:
2207 break;
2208 }
2209
2210 /* Assign rtl to each variable based on these partitions. */
2211 if (stack_vars_num > 0)
2212 {
2213 class stack_vars_data data;
2214
2215 data.asan_base = NULL_RTX;
2216 data.asan_alignb = 0;
2217
2218 /* Reorder decls to be protected by iterating over the variables
2219 array multiple times, and allocating out of each phase in turn. */
2220 /* ??? We could probably integrate this into the qsort we did
2221 earlier, such that we naturally see these variables first,
2222 and thus naturally allocate things in the right order. */
2223 if (has_protected_decls)
2224 {
2225 /* Phase 1 contains only character arrays. */
2226 expand_stack_vars (stack_protect_decl_phase_1, &data);
2227
2228 /* Phase 2 contains other kinds of arrays. */
2229 if (!lookup_attribute ("no_stack_protector", attribs)
2230 && (flag_stack_protect == SPCT_FLAG_ALL
2231 || flag_stack_protect == SPCT_FLAG_STRONG
2232 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2233 && lookup_attribute ("stack_protect", attribs))))
2234 expand_stack_vars (stack_protect_decl_phase_2, &data);
2235 }
2236
2237 if (asan_sanitize_stack_p ())
2238 /* Phase 3, any partitions that need asan protection
2239 in addition to phase 1 and 2. */
2240 expand_stack_vars (asan_decl_phase_3, &data);
2241
2242 /* ASAN description strings don't yet have a syntax for expressing
2243 polynomial offsets. */
2244 HOST_WIDE_INT prev_offset;
2245 if (!data.asan_vec.is_empty ()
2246 && frame_offset.is_constant (&prev_offset))
2247 {
2248 HOST_WIDE_INT offset, sz, redzonesz;
2249 redzonesz = ASAN_RED_ZONE_SIZE;
2250 sz = data.asan_vec[0] - prev_offset;
2251 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2252 && data.asan_alignb <= 4096
2253 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2254 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2255 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2256 /* Allocating a constant amount of space from a constant
2257 starting offset must give a constant result. */
2258 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2259 .to_constant ());
2260 data.asan_vec.safe_push (prev_offset);
2261 data.asan_vec.safe_push (offset);
2262 /* Leave space for alignment if STRICT_ALIGNMENT. */
2263 if (STRICT_ALIGNMENT)
2264 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2265 << ASAN_SHADOW_SHIFT)
2266 / BITS_PER_UNIT, 1);
2267
2268 var_end_seq
2269 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2270 data.asan_base,
2271 data.asan_alignb,
2272 data.asan_vec.address (),
2273 data.asan_decl_vec.address (),
2274 data.asan_vec.length ());
2275 }
2276
2277 expand_stack_vars (NULL, &data);
2278 }
2279
2280 if (asan_sanitize_allocas_p () && cfun->calls_alloca)
2281 var_end_seq = asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx,
2282 virtual_stack_vars_rtx,
2283 var_end_seq);
2284
2285 fini_vars_expansion ();
2286
2287 /* If there were any artificial non-ignored vars without rtl
2288 found earlier, see if deferred stack allocation hasn't assigned
2289 rtl to them. */
2290 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2291 {
2292 rtx rtl = DECL_RTL_IF_SET (var);
2293
2294 /* Keep artificial non-ignored vars in cfun->local_decls
2295 chain until instantiate_decls. */
2296 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2297 add_local_decl (cfun, var);
2298 }
2299
2300 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2301 if (STACK_ALIGNMENT_NEEDED)
2302 {
2303 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2304 if (FRAME_GROWS_DOWNWARD)
2305 frame_offset = aligned_lower_bound (frame_offset, align);
2306 else
2307 frame_offset = aligned_upper_bound (frame_offset, align);
2308 }
2309
2310 return var_end_seq;
2311 }
2312
2313
2314 /* If we need to produce a detailed dump, print the tree representation
2315 for STMT to the dump file. SINCE is the last RTX after which the RTL
2316 generated for STMT should have been appended. */
2317
2318 static void
2319 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2320 {
2321 if (dump_file && (dump_flags & TDF_DETAILS))
2322 {
2323 fprintf (dump_file, "\n;; ");
2324 print_gimple_stmt (dump_file, stmt, 0,
2325 TDF_SLIM | (dump_flags & TDF_LINENO));
2326 fprintf (dump_file, "\n");
2327
2328 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2329 }
2330 }
2331
2332 /* Maps the blocks that do not contain tree labels to rtx labels. */
2333
2334 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2335
2336 /* Returns the label_rtx expression for a label starting basic block BB. */
2337
2338 static rtx_code_label *
2339 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2340 {
2341 gimple_stmt_iterator gsi;
2342 tree lab;
2343
2344 if (bb->flags & BB_RTL)
2345 return block_label (bb);
2346
2347 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2348 if (elt)
2349 return *elt;
2350
2351 /* Find the tree label if it is present. */
2352
2353 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2354 {
2355 glabel *lab_stmt;
2356
2357 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2358 if (!lab_stmt)
2359 break;
2360
2361 lab = gimple_label_label (lab_stmt);
2362 if (DECL_NONLOCAL (lab))
2363 break;
2364
2365 return jump_target_rtx (lab);
2366 }
2367
2368 rtx_code_label *l = gen_label_rtx ();
2369 lab_rtx_for_bb->put (bb, l);
2370 return l;
2371 }
2372
2373
2374 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2375 of a basic block where we just expanded the conditional at the end,
2376 possibly clean up the CFG and instruction sequence. LAST is the
2377 last instruction before the just emitted jump sequence. */
2378
2379 static void
2380 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2381 {
2382 /* Special case: when jumpif decides that the condition is
2383 trivial it emits an unconditional jump (and the necessary
2384 barrier). But we still have two edges, the fallthru one is
2385 wrong. purge_dead_edges would clean this up later. Unfortunately
2386 we have to insert insns (and split edges) before
2387 find_many_sub_basic_blocks and hence before purge_dead_edges.
2388 But splitting edges might create new blocks which depend on the
2389 fact that if there are two edges there's no barrier. So the
2390 barrier would get lost and verify_flow_info would ICE. Instead
2391 of auditing all edge splitters to care for the barrier (which
2392 normally isn't there in a cleaned CFG), fix it here. */
2393 if (BARRIER_P (get_last_insn ()))
2394 {
2395 rtx_insn *insn;
2396 remove_edge (e);
2397 /* Now, we have a single successor block, if we have insns to
2398 insert on the remaining edge we potentially will insert
2399 it at the end of this block (if the dest block isn't feasible)
2400 in order to avoid splitting the edge. This insertion will take
2401 place in front of the last jump. But we might have emitted
2402 multiple jumps (conditional and one unconditional) to the
2403 same destination. Inserting in front of the last one then
2404 is a problem. See PR 40021. We fix this by deleting all
2405 jumps except the last unconditional one. */
2406 insn = PREV_INSN (get_last_insn ());
2407 /* Make sure we have an unconditional jump. Otherwise we're
2408 confused. */
2409 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2410 for (insn = PREV_INSN (insn); insn != last;)
2411 {
2412 insn = PREV_INSN (insn);
2413 if (JUMP_P (NEXT_INSN (insn)))
2414 {
2415 if (!any_condjump_p (NEXT_INSN (insn)))
2416 {
2417 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2418 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2419 }
2420 delete_insn (NEXT_INSN (insn));
2421 }
2422 }
2423 }
2424 }
2425
2426 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2427 Returns a new basic block if we've terminated the current basic
2428 block and created a new one. */
2429
2430 static basic_block
2431 expand_gimple_cond (basic_block bb, gcond *stmt)
2432 {
2433 basic_block new_bb, dest;
2434 edge true_edge;
2435 edge false_edge;
2436 rtx_insn *last2, *last;
2437 enum tree_code code;
2438 tree op0, op1;
2439
2440 code = gimple_cond_code (stmt);
2441 op0 = gimple_cond_lhs (stmt);
2442 op1 = gimple_cond_rhs (stmt);
2443 /* We're sometimes presented with such code:
2444 D.123_1 = x < y;
2445 if (D.123_1 != 0)
2446 ...
2447 This would expand to two comparisons which then later might
2448 be cleaned up by combine. But some pattern matchers like if-conversion
2449 work better when there's only one compare, so make up for this
2450 here as special exception if TER would have made the same change. */
2451 if (SA.values
2452 && TREE_CODE (op0) == SSA_NAME
2453 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2454 && TREE_CODE (op1) == INTEGER_CST
2455 && ((gimple_cond_code (stmt) == NE_EXPR
2456 && integer_zerop (op1))
2457 || (gimple_cond_code (stmt) == EQ_EXPR
2458 && integer_onep (op1)))
2459 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2460 {
2461 gimple *second = SSA_NAME_DEF_STMT (op0);
2462 if (gimple_code (second) == GIMPLE_ASSIGN)
2463 {
2464 enum tree_code code2 = gimple_assign_rhs_code (second);
2465 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2466 {
2467 code = code2;
2468 op0 = gimple_assign_rhs1 (second);
2469 op1 = gimple_assign_rhs2 (second);
2470 }
2471 /* If jumps are cheap and the target does not support conditional
2472 compare, turn some more codes into jumpy sequences. */
2473 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2474 && targetm.gen_ccmp_first == NULL)
2475 {
2476 if ((code2 == BIT_AND_EXPR
2477 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2478 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2479 || code2 == TRUTH_AND_EXPR)
2480 {
2481 code = TRUTH_ANDIF_EXPR;
2482 op0 = gimple_assign_rhs1 (second);
2483 op1 = gimple_assign_rhs2 (second);
2484 }
2485 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2486 {
2487 code = TRUTH_ORIF_EXPR;
2488 op0 = gimple_assign_rhs1 (second);
2489 op1 = gimple_assign_rhs2 (second);
2490 }
2491 }
2492 }
2493 }
2494
2495 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2496 into (x - C2) * C3 < C4. */
2497 if ((code == EQ_EXPR || code == NE_EXPR)
2498 && TREE_CODE (op0) == SSA_NAME
2499 && TREE_CODE (op1) == INTEGER_CST)
2500 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2501
2502 last2 = last = get_last_insn ();
2503
2504 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2505 set_curr_insn_location (gimple_location (stmt));
2506
2507 /* These flags have no purpose in RTL land. */
2508 true_edge->flags &= ~EDGE_TRUE_VALUE;
2509 false_edge->flags &= ~EDGE_FALSE_VALUE;
2510
2511 /* We can either have a pure conditional jump with one fallthru edge or
2512 two-way jump that needs to be decomposed into two basic blocks. */
2513 if (false_edge->dest == bb->next_bb)
2514 {
2515 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2516 true_edge->probability);
2517 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2518 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2519 set_curr_insn_location (true_edge->goto_locus);
2520 false_edge->flags |= EDGE_FALLTHRU;
2521 maybe_cleanup_end_of_block (false_edge, last);
2522 return NULL;
2523 }
2524 if (true_edge->dest == bb->next_bb)
2525 {
2526 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2527 false_edge->probability);
2528 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2529 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2530 set_curr_insn_location (false_edge->goto_locus);
2531 true_edge->flags |= EDGE_FALLTHRU;
2532 maybe_cleanup_end_of_block (true_edge, last);
2533 return NULL;
2534 }
2535
2536 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2537 true_edge->probability);
2538 last = get_last_insn ();
2539 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2540 set_curr_insn_location (false_edge->goto_locus);
2541 emit_jump (label_rtx_for_bb (false_edge->dest));
2542
2543 BB_END (bb) = last;
2544 if (BARRIER_P (BB_END (bb)))
2545 BB_END (bb) = PREV_INSN (BB_END (bb));
2546 update_bb_for_insn (bb);
2547
2548 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2549 dest = false_edge->dest;
2550 redirect_edge_succ (false_edge, new_bb);
2551 false_edge->flags |= EDGE_FALLTHRU;
2552 new_bb->count = false_edge->count ();
2553 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2554 add_bb_to_loop (new_bb, loop);
2555 if (loop->latch == bb
2556 && loop->header == dest)
2557 loop->latch = new_bb;
2558 make_single_succ_edge (new_bb, dest, 0);
2559 if (BARRIER_P (BB_END (new_bb)))
2560 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2561 update_bb_for_insn (new_bb);
2562
2563 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2564
2565 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2566 {
2567 set_curr_insn_location (true_edge->goto_locus);
2568 true_edge->goto_locus = curr_insn_location ();
2569 }
2570
2571 return new_bb;
2572 }
2573
2574 /* Mark all calls that can have a transaction restart. */
2575
2576 static void
2577 mark_transaction_restart_calls (gimple *stmt)
2578 {
2579 struct tm_restart_node dummy;
2580 tm_restart_node **slot;
2581
2582 if (!cfun->gimple_df->tm_restart)
2583 return;
2584
2585 dummy.stmt = stmt;
2586 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2587 if (slot)
2588 {
2589 struct tm_restart_node *n = *slot;
2590 tree list = n->label_or_list;
2591 rtx_insn *insn;
2592
2593 for (insn = next_real_insn (get_last_insn ());
2594 !CALL_P (insn);
2595 insn = next_real_insn (insn))
2596 continue;
2597
2598 if (TREE_CODE (list) == LABEL_DECL)
2599 add_reg_note (insn, REG_TM, label_rtx (list));
2600 else
2601 for (; list ; list = TREE_CHAIN (list))
2602 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2603 }
2604 }
2605
2606 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2607 statement STMT. */
2608
2609 static void
2610 expand_call_stmt (gcall *stmt)
2611 {
2612 tree exp, decl, lhs;
2613 bool builtin_p;
2614 size_t i;
2615
2616 if (gimple_call_internal_p (stmt))
2617 {
2618 expand_internal_call (stmt);
2619 return;
2620 }
2621
2622 /* If this is a call to a built-in function and it has no effect other
2623 than setting the lhs, try to implement it using an internal function
2624 instead. */
2625 decl = gimple_call_fndecl (stmt);
2626 if (gimple_call_lhs (stmt)
2627 && !gimple_has_side_effects (stmt)
2628 && (optimize || (decl && called_as_built_in (decl))))
2629 {
2630 internal_fn ifn = replacement_internal_fn (stmt);
2631 if (ifn != IFN_LAST)
2632 {
2633 expand_internal_call (ifn, stmt);
2634 return;
2635 }
2636 }
2637
2638 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2639
2640 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2641 builtin_p = decl && fndecl_built_in_p (decl);
2642
2643 /* If this is not a builtin function, the function type through which the
2644 call is made may be different from the type of the function. */
2645 if (!builtin_p)
2646 CALL_EXPR_FN (exp)
2647 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2648 CALL_EXPR_FN (exp));
2649
2650 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2651 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2652
2653 for (i = 0; i < gimple_call_num_args (stmt); i++)
2654 {
2655 tree arg = gimple_call_arg (stmt, i);
2656 gimple *def;
2657 /* TER addresses into arguments of builtin functions so we have a
2658 chance to infer more correct alignment information. See PR39954. */
2659 if (builtin_p
2660 && TREE_CODE (arg) == SSA_NAME
2661 && (def = get_gimple_for_ssa_name (arg))
2662 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2663 arg = gimple_assign_rhs1 (def);
2664 CALL_EXPR_ARG (exp, i) = arg;
2665 }
2666
2667 if (gimple_has_side_effects (stmt))
2668 TREE_SIDE_EFFECTS (exp) = 1;
2669
2670 if (gimple_call_nothrow_p (stmt))
2671 TREE_NOTHROW (exp) = 1;
2672
2673 if (gimple_no_warning_p (stmt))
2674 TREE_NO_WARNING (exp) = 1;
2675
2676 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2677 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2678 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2679 if (decl
2680 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2681 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2682 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2683 else
2684 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2685 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2686 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2687 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2688
2689 /* Ensure RTL is created for debug args. */
2690 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2691 {
2692 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2693 unsigned int ix;
2694 tree dtemp;
2695
2696 if (debug_args)
2697 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2698 {
2699 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2700 expand_debug_expr (dtemp);
2701 }
2702 }
2703
2704 rtx_insn *before_call = get_last_insn ();
2705 lhs = gimple_call_lhs (stmt);
2706 if (lhs)
2707 expand_assignment (lhs, exp, false);
2708 else
2709 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2710
2711 /* If the gimple call is an indirect call and has 'nocf_check'
2712 attribute find a generated CALL insn to mark it as no
2713 control-flow verification is needed. */
2714 if (gimple_call_nocf_check_p (stmt)
2715 && !gimple_call_fndecl (stmt))
2716 {
2717 rtx_insn *last = get_last_insn ();
2718 while (!CALL_P (last)
2719 && last != before_call)
2720 last = PREV_INSN (last);
2721
2722 if (last != before_call)
2723 add_reg_note (last, REG_CALL_NOCF_CHECK, const0_rtx);
2724 }
2725
2726 mark_transaction_restart_calls (stmt);
2727 }
2728
2729
2730 /* Generate RTL for an asm statement (explicit assembler code).
2731 STRING is a STRING_CST node containing the assembler code text,
2732 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2733 insn is volatile; don't optimize it. */
2734
2735 static void
2736 expand_asm_loc (tree string, int vol, location_t locus)
2737 {
2738 rtx body;
2739
2740 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2741 ggc_strdup (TREE_STRING_POINTER (string)),
2742 locus);
2743
2744 MEM_VOLATILE_P (body) = vol;
2745
2746 /* Non-empty basic ASM implicitly clobbers memory. */
2747 if (TREE_STRING_LENGTH (string) != 0)
2748 {
2749 rtx asm_op, clob;
2750 unsigned i, nclobbers;
2751 auto_vec<rtx> input_rvec, output_rvec;
2752 auto_vec<const char *> constraints;
2753 auto_vec<rtx> clobber_rvec;
2754 HARD_REG_SET clobbered_regs;
2755 CLEAR_HARD_REG_SET (clobbered_regs);
2756
2757 clob = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2758 clobber_rvec.safe_push (clob);
2759
2760 if (targetm.md_asm_adjust)
2761 targetm.md_asm_adjust (output_rvec, input_rvec,
2762 constraints, clobber_rvec,
2763 clobbered_regs);
2764
2765 asm_op = body;
2766 nclobbers = clobber_rvec.length ();
2767 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (1 + nclobbers));
2768
2769 XVECEXP (body, 0, 0) = asm_op;
2770 for (i = 0; i < nclobbers; i++)
2771 XVECEXP (body, 0, i + 1) = gen_rtx_CLOBBER (VOIDmode, clobber_rvec[i]);
2772 }
2773
2774 emit_insn (body);
2775 }
2776
2777 /* Return the number of times character C occurs in string S. */
2778 static int
2779 n_occurrences (int c, const char *s)
2780 {
2781 int n = 0;
2782 while (*s)
2783 n += (*s++ == c);
2784 return n;
2785 }
2786
2787 /* A subroutine of expand_asm_operands. Check that all operands have
2788 the same number of alternatives. Return true if so. */
2789
2790 static bool
2791 check_operand_nalternatives (const vec<const char *> &constraints)
2792 {
2793 unsigned len = constraints.length();
2794 if (len > 0)
2795 {
2796 int nalternatives = n_occurrences (',', constraints[0]);
2797
2798 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2799 {
2800 error ("too many alternatives in %<asm%>");
2801 return false;
2802 }
2803
2804 for (unsigned i = 1; i < len; ++i)
2805 if (n_occurrences (',', constraints[i]) != nalternatives)
2806 {
2807 error ("operand constraints for %<asm%> differ "
2808 "in number of alternatives");
2809 return false;
2810 }
2811 }
2812 return true;
2813 }
2814
2815 /* Check for overlap between registers marked in CLOBBERED_REGS and
2816 anything inappropriate in T. Emit error and return the register
2817 variable definition for error, NULL_TREE for ok. */
2818
2819 static bool
2820 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2821 {
2822 /* Conflicts between asm-declared register variables and the clobber
2823 list are not allowed. */
2824 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2825
2826 if (overlap)
2827 {
2828 error ("%<asm%> specifier for variable %qE conflicts with "
2829 "%<asm%> clobber list",
2830 DECL_NAME (overlap));
2831
2832 /* Reset registerness to stop multiple errors emitted for a single
2833 variable. */
2834 DECL_REGISTER (overlap) = 0;
2835 return true;
2836 }
2837
2838 return false;
2839 }
2840
2841 /* Check that the given REGNO spanning NREGS is a valid
2842 asm clobber operand. Some HW registers cannot be
2843 saved/restored, hence they should not be clobbered by
2844 asm statements. */
2845 static bool
2846 asm_clobber_reg_is_valid (int regno, int nregs, const char *regname)
2847 {
2848 bool is_valid = true;
2849 HARD_REG_SET regset;
2850
2851 CLEAR_HARD_REG_SET (regset);
2852
2853 add_range_to_hard_reg_set (&regset, regno, nregs);
2854
2855 /* Clobbering the PIC register is an error. */
2856 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
2857 && overlaps_hard_reg_set_p (regset, Pmode, PIC_OFFSET_TABLE_REGNUM))
2858 {
2859 /* ??? Diagnose during gimplification? */
2860 error ("PIC register clobbered by %qs in %<asm%>", regname);
2861 is_valid = false;
2862 }
2863 else if (!in_hard_reg_set_p
2864 (accessible_reg_set, reg_raw_mode[regno], regno))
2865 {
2866 /* ??? Diagnose during gimplification? */
2867 error ("the register %qs cannot be clobbered in %<asm%>"
2868 " for the current target", regname);
2869 is_valid = false;
2870 }
2871
2872 /* Clobbering the stack pointer register is deprecated. GCC expects
2873 the value of the stack pointer after an asm statement to be the same
2874 as it was before, so no asm can validly clobber the stack pointer in
2875 the usual sense. Adding the stack pointer to the clobber list has
2876 traditionally had some undocumented and somewhat obscure side-effects. */
2877 if (overlaps_hard_reg_set_p (regset, Pmode, STACK_POINTER_REGNUM))
2878 {
2879 crtl->sp_is_clobbered_by_asm = true;
2880 if (warning (OPT_Wdeprecated, "listing the stack pointer register"
2881 " %qs in a clobber list is deprecated", regname))
2882 inform (input_location, "the value of the stack pointer after"
2883 " an %<asm%> statement must be the same as it was before"
2884 " the statement");
2885 }
2886
2887 return is_valid;
2888 }
2889
2890 /* Generate RTL for an asm statement with arguments.
2891 STRING is the instruction template.
2892 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2893 Each output or input has an expression in the TREE_VALUE and
2894 a tree list in TREE_PURPOSE which in turn contains a constraint
2895 name in TREE_VALUE (or NULL_TREE) and a constraint string
2896 in TREE_PURPOSE.
2897 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2898 that is clobbered by this insn.
2899
2900 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2901 should be the fallthru basic block of the asm goto.
2902
2903 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2904 Some elements of OUTPUTS may be replaced with trees representing temporary
2905 values. The caller should copy those temporary values to the originally
2906 specified lvalues.
2907
2908 VOL nonzero means the insn is volatile; don't optimize it. */
2909
2910 static void
2911 expand_asm_stmt (gasm *stmt)
2912 {
2913 class save_input_location
2914 {
2915 location_t old;
2916
2917 public:
2918 explicit save_input_location(location_t where)
2919 {
2920 old = input_location;
2921 input_location = where;
2922 }
2923
2924 ~save_input_location()
2925 {
2926 input_location = old;
2927 }
2928 };
2929
2930 location_t locus = gimple_location (stmt);
2931
2932 if (gimple_asm_input_p (stmt))
2933 {
2934 const char *s = gimple_asm_string (stmt);
2935 tree string = build_string (strlen (s), s);
2936 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2937 return;
2938 }
2939
2940 /* There are some legacy diagnostics in here, and also avoids a
2941 sixth parameger to targetm.md_asm_adjust. */
2942 save_input_location s_i_l(locus);
2943
2944 unsigned noutputs = gimple_asm_noutputs (stmt);
2945 unsigned ninputs = gimple_asm_ninputs (stmt);
2946 unsigned nlabels = gimple_asm_nlabels (stmt);
2947 unsigned i;
2948
2949 /* ??? Diagnose during gimplification? */
2950 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2951 {
2952 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2953 return;
2954 }
2955
2956 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2957 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2958 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2959
2960 /* Copy the gimple vectors into new vectors that we can manipulate. */
2961
2962 output_tvec.safe_grow (noutputs, true);
2963 input_tvec.safe_grow (ninputs, true);
2964 constraints.safe_grow (noutputs + ninputs, true);
2965
2966 for (i = 0; i < noutputs; ++i)
2967 {
2968 tree t = gimple_asm_output_op (stmt, i);
2969 output_tvec[i] = TREE_VALUE (t);
2970 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2971 }
2972 for (i = 0; i < ninputs; i++)
2973 {
2974 tree t = gimple_asm_input_op (stmt, i);
2975 input_tvec[i] = TREE_VALUE (t);
2976 constraints[i + noutputs]
2977 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2978 }
2979
2980 /* ??? Diagnose during gimplification? */
2981 if (! check_operand_nalternatives (constraints))
2982 return;
2983
2984 /* Count the number of meaningful clobbered registers, ignoring what
2985 we would ignore later. */
2986 auto_vec<rtx> clobber_rvec;
2987 HARD_REG_SET clobbered_regs;
2988 CLEAR_HARD_REG_SET (clobbered_regs);
2989
2990 if (unsigned n = gimple_asm_nclobbers (stmt))
2991 {
2992 clobber_rvec.reserve (n);
2993 for (i = 0; i < n; i++)
2994 {
2995 tree t = gimple_asm_clobber_op (stmt, i);
2996 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2997 int nregs, j;
2998
2999 j = decode_reg_name_and_count (regname, &nregs);
3000 if (j < 0)
3001 {
3002 if (j == -2)
3003 {
3004 /* ??? Diagnose during gimplification? */
3005 error ("unknown register name %qs in %<asm%>", regname);
3006 }
3007 else if (j == -4)
3008 {
3009 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3010 clobber_rvec.safe_push (x);
3011 }
3012 else
3013 {
3014 /* Otherwise we should have -1 == empty string
3015 or -3 == cc, which is not a register. */
3016 gcc_assert (j == -1 || j == -3);
3017 }
3018 }
3019 else
3020 for (int reg = j; reg < j + nregs; reg++)
3021 {
3022 if (!asm_clobber_reg_is_valid (reg, nregs, regname))
3023 return;
3024
3025 SET_HARD_REG_BIT (clobbered_regs, reg);
3026 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
3027 clobber_rvec.safe_push (x);
3028 }
3029 }
3030 }
3031
3032 /* First pass over inputs and outputs checks validity and sets
3033 mark_addressable if needed. */
3034 /* ??? Diagnose during gimplification? */
3035
3036 for (i = 0; i < noutputs; ++i)
3037 {
3038 tree val = output_tvec[i];
3039 tree type = TREE_TYPE (val);
3040 const char *constraint;
3041 bool is_inout;
3042 bool allows_reg;
3043 bool allows_mem;
3044
3045 /* Try to parse the output constraint. If that fails, there's
3046 no point in going further. */
3047 constraint = constraints[i];
3048 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
3049 &allows_mem, &allows_reg, &is_inout))
3050 return;
3051
3052 /* If the output is a hard register, verify it doesn't conflict with
3053 any other operand's possible hard register use. */
3054 if (DECL_P (val)
3055 && REG_P (DECL_RTL (val))
3056 && HARD_REGISTER_P (DECL_RTL (val)))
3057 {
3058 unsigned j, output_hregno = REGNO (DECL_RTL (val));
3059 bool early_clobber_p = strchr (constraints[i], '&') != NULL;
3060 unsigned long match;
3061
3062 /* Verify the other outputs do not use the same hard register. */
3063 for (j = i + 1; j < noutputs; ++j)
3064 if (DECL_P (output_tvec[j])
3065 && REG_P (DECL_RTL (output_tvec[j]))
3066 && HARD_REGISTER_P (DECL_RTL (output_tvec[j]))
3067 && output_hregno == REGNO (DECL_RTL (output_tvec[j])))
3068 error ("invalid hard register usage between output operands");
3069
3070 /* Verify matching constraint operands use the same hard register
3071 and that the non-matching constraint operands do not use the same
3072 hard register if the output is an early clobber operand. */
3073 for (j = 0; j < ninputs; ++j)
3074 if (DECL_P (input_tvec[j])
3075 && REG_P (DECL_RTL (input_tvec[j]))
3076 && HARD_REGISTER_P (DECL_RTL (input_tvec[j])))
3077 {
3078 unsigned input_hregno = REGNO (DECL_RTL (input_tvec[j]));
3079 switch (*constraints[j + noutputs])
3080 {
3081 case '0': case '1': case '2': case '3': case '4':
3082 case '5': case '6': case '7': case '8': case '9':
3083 match = strtoul (constraints[j + noutputs], NULL, 10);
3084 break;
3085 default:
3086 match = ULONG_MAX;
3087 break;
3088 }
3089 if (i == match
3090 && output_hregno != input_hregno)
3091 error ("invalid hard register usage between output operand "
3092 "and matching constraint operand");
3093 else if (early_clobber_p
3094 && i != match
3095 && output_hregno == input_hregno)
3096 error ("invalid hard register usage between earlyclobber "
3097 "operand and input operand");
3098 }
3099 }
3100
3101 if (! allows_reg
3102 && (allows_mem
3103 || is_inout
3104 || (DECL_P (val)
3105 && REG_P (DECL_RTL (val))
3106 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
3107 mark_addressable (val);
3108 }
3109
3110 for (i = 0; i < ninputs; ++i)
3111 {
3112 bool allows_reg, allows_mem;
3113 const char *constraint;
3114
3115 constraint = constraints[i + noutputs];
3116 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3117 constraints.address (),
3118 &allows_mem, &allows_reg))
3119 return;
3120
3121 if (! allows_reg && allows_mem)
3122 mark_addressable (input_tvec[i]);
3123 }
3124
3125 /* Second pass evaluates arguments. */
3126
3127 /* Make sure stack is consistent for asm goto. */
3128 if (nlabels > 0)
3129 do_pending_stack_adjust ();
3130 int old_generating_concat_p = generating_concat_p;
3131
3132 /* Vector of RTX's of evaluated output operands. */
3133 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
3134 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
3135 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
3136
3137 output_rvec.safe_grow (noutputs, true);
3138
3139 for (i = 0; i < noutputs; ++i)
3140 {
3141 tree val = output_tvec[i];
3142 tree type = TREE_TYPE (val);
3143 bool is_inout, allows_reg, allows_mem, ok;
3144 rtx op;
3145
3146 ok = parse_output_constraint (&constraints[i], i, ninputs,
3147 noutputs, &allows_mem, &allows_reg,
3148 &is_inout);
3149 gcc_assert (ok);
3150
3151 /* If an output operand is not a decl or indirect ref and our constraint
3152 allows a register, make a temporary to act as an intermediate.
3153 Make the asm insn write into that, then we will copy it to
3154 the real output operand. Likewise for promoted variables. */
3155
3156 generating_concat_p = 0;
3157
3158 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3159 || (DECL_P (val)
3160 && (allows_mem || REG_P (DECL_RTL (val)))
3161 && ! (REG_P (DECL_RTL (val))
3162 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3163 || ! allows_reg
3164 || is_inout
3165 || TREE_ADDRESSABLE (type))
3166 {
3167 op = expand_expr (val, NULL_RTX, VOIDmode,
3168 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3169 if (MEM_P (op))
3170 op = validize_mem (op);
3171
3172 if (! allows_reg && !MEM_P (op))
3173 error ("output number %d not directly addressable", i);
3174 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3175 || GET_CODE (op) == CONCAT)
3176 {
3177 rtx old_op = op;
3178 op = gen_reg_rtx (GET_MODE (op));
3179
3180 generating_concat_p = old_generating_concat_p;
3181
3182 if (is_inout)
3183 emit_move_insn (op, old_op);
3184
3185 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3186 emit_move_insn (old_op, op);
3187 after_rtl_seq = get_insns ();
3188 after_rtl_end = get_last_insn ();
3189 end_sequence ();
3190 }
3191 }
3192 else
3193 {
3194 op = assign_temp (type, 0, 1);
3195 op = validize_mem (op);
3196 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3197 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3198
3199 generating_concat_p = old_generating_concat_p;
3200
3201 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3202 expand_assignment (val, make_tree (type, op), false);
3203 after_rtl_seq = get_insns ();
3204 after_rtl_end = get_last_insn ();
3205 end_sequence ();
3206 }
3207 output_rvec[i] = op;
3208
3209 if (is_inout)
3210 inout_opnum.safe_push (i);
3211 }
3212
3213 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3214 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3215
3216 input_rvec.safe_grow (ninputs, true);
3217 input_mode.safe_grow (ninputs, true);
3218
3219 generating_concat_p = 0;
3220
3221 for (i = 0; i < ninputs; ++i)
3222 {
3223 tree val = input_tvec[i];
3224 tree type = TREE_TYPE (val);
3225 bool allows_reg, allows_mem, ok;
3226 const char *constraint;
3227 rtx op;
3228
3229 constraint = constraints[i + noutputs];
3230 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3231 constraints.address (),
3232 &allows_mem, &allows_reg);
3233 gcc_assert (ok);
3234
3235 /* EXPAND_INITIALIZER will not generate code for valid initializer
3236 constants, but will still generate code for other types of operand.
3237 This is the behavior we want for constant constraints. */
3238 op = expand_expr (val, NULL_RTX, VOIDmode,
3239 allows_reg ? EXPAND_NORMAL
3240 : allows_mem ? EXPAND_MEMORY
3241 : EXPAND_INITIALIZER);
3242
3243 /* Never pass a CONCAT to an ASM. */
3244 if (GET_CODE (op) == CONCAT)
3245 op = force_reg (GET_MODE (op), op);
3246 else if (MEM_P (op))
3247 op = validize_mem (op);
3248
3249 if (asm_operand_ok (op, constraint, NULL) <= 0)
3250 {
3251 if (allows_reg && TYPE_MODE (type) != BLKmode)
3252 op = force_reg (TYPE_MODE (type), op);
3253 else if (!allows_mem)
3254 warning (0, "%<asm%> operand %d probably does not match "
3255 "constraints",
3256 i + noutputs);
3257 else if (MEM_P (op))
3258 {
3259 /* We won't recognize either volatile memory or memory
3260 with a queued address as available a memory_operand
3261 at this point. Ignore it: clearly this *is* a memory. */
3262 }
3263 else
3264 gcc_unreachable ();
3265 }
3266 input_rvec[i] = op;
3267 input_mode[i] = TYPE_MODE (type);
3268 }
3269
3270 /* For in-out operands, copy output rtx to input rtx. */
3271 unsigned ninout = inout_opnum.length();
3272 for (i = 0; i < ninout; i++)
3273 {
3274 int j = inout_opnum[i];
3275 rtx o = output_rvec[j];
3276
3277 input_rvec.safe_push (o);
3278 input_mode.safe_push (GET_MODE (o));
3279
3280 char buffer[16];
3281 sprintf (buffer, "%d", j);
3282 constraints.safe_push (ggc_strdup (buffer));
3283 }
3284 ninputs += ninout;
3285
3286 /* Sometimes we wish to automatically clobber registers across an asm.
3287 Case in point is when the i386 backend moved from cc0 to a hard reg --
3288 maintaining source-level compatibility means automatically clobbering
3289 the flags register. */
3290 rtx_insn *after_md_seq = NULL;
3291 if (targetm.md_asm_adjust)
3292 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3293 constraints, clobber_rvec,
3294 clobbered_regs);
3295
3296 /* Do not allow the hook to change the output and input count,
3297 lest it mess up the operand numbering. */
3298 gcc_assert (output_rvec.length() == noutputs);
3299 gcc_assert (input_rvec.length() == ninputs);
3300 gcc_assert (constraints.length() == noutputs + ninputs);
3301
3302 /* But it certainly can adjust the clobbers. */
3303 unsigned nclobbers = clobber_rvec.length ();
3304
3305 /* Third pass checks for easy conflicts. */
3306 /* ??? Why are we doing this on trees instead of rtx. */
3307
3308 bool clobber_conflict_found = 0;
3309 for (i = 0; i < noutputs; ++i)
3310 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3311 clobber_conflict_found = 1;
3312 for (i = 0; i < ninputs - ninout; ++i)
3313 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3314 clobber_conflict_found = 1;
3315
3316 /* Make vectors for the expression-rtx, constraint strings,
3317 and named operands. */
3318
3319 rtvec argvec = rtvec_alloc (ninputs);
3320 rtvec constraintvec = rtvec_alloc (ninputs);
3321 rtvec labelvec = rtvec_alloc (nlabels);
3322
3323 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3324 : GET_MODE (output_rvec[0])),
3325 ggc_strdup (gimple_asm_string (stmt)),
3326 "", 0, argvec, constraintvec,
3327 labelvec, locus);
3328 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3329
3330 for (i = 0; i < ninputs; ++i)
3331 {
3332 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3333 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3334 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3335 constraints[i + noutputs],
3336 locus);
3337 }
3338
3339 /* Copy labels to the vector. */
3340 rtx_code_label *fallthru_label = NULL;
3341 if (nlabels > 0)
3342 {
3343 basic_block fallthru_bb = NULL;
3344 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3345 if (fallthru)
3346 fallthru_bb = fallthru->dest;
3347
3348 for (i = 0; i < nlabels; ++i)
3349 {
3350 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3351 rtx_insn *r;
3352 /* If asm goto has any labels in the fallthru basic block, use
3353 a label that we emit immediately after the asm goto. Expansion
3354 may insert further instructions into the same basic block after
3355 asm goto and if we don't do this, insertion of instructions on
3356 the fallthru edge might misbehave. See PR58670. */
3357 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3358 {
3359 if (fallthru_label == NULL_RTX)
3360 fallthru_label = gen_label_rtx ();
3361 r = fallthru_label;
3362 }
3363 else
3364 r = label_rtx (label);
3365 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3366 }
3367 }
3368
3369 /* Now, for each output, construct an rtx
3370 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3371 ARGVEC CONSTRAINTS OPNAMES))
3372 If there is more than one, put them inside a PARALLEL. */
3373
3374 if (nlabels > 0 && nclobbers == 0)
3375 {
3376 gcc_assert (noutputs == 0);
3377 emit_jump_insn (body);
3378 }
3379 else if (noutputs == 0 && nclobbers == 0)
3380 {
3381 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3382 emit_insn (body);
3383 }
3384 else if (noutputs == 1 && nclobbers == 0)
3385 {
3386 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3387 emit_insn (gen_rtx_SET (output_rvec[0], body));
3388 }
3389 else
3390 {
3391 rtx obody = body;
3392 int num = noutputs;
3393
3394 if (num == 0)
3395 num = 1;
3396
3397 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3398
3399 /* For each output operand, store a SET. */
3400 for (i = 0; i < noutputs; ++i)
3401 {
3402 rtx src, o = output_rvec[i];
3403 if (i == 0)
3404 {
3405 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3406 src = obody;
3407 }
3408 else
3409 {
3410 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3411 ASM_OPERANDS_TEMPLATE (obody),
3412 constraints[i], i, argvec,
3413 constraintvec, labelvec, locus);
3414 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3415 }
3416 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3417 }
3418
3419 /* If there are no outputs (but there are some clobbers)
3420 store the bare ASM_OPERANDS into the PARALLEL. */
3421 if (i == 0)
3422 XVECEXP (body, 0, i++) = obody;
3423
3424 /* Store (clobber REG) for each clobbered register specified. */
3425 for (unsigned j = 0; j < nclobbers; ++j)
3426 {
3427 rtx clobbered_reg = clobber_rvec[j];
3428
3429 /* Do sanity check for overlap between clobbers and respectively
3430 input and outputs that hasn't been handled. Such overlap
3431 should have been detected and reported above. */
3432 if (!clobber_conflict_found && REG_P (clobbered_reg))
3433 {
3434 /* We test the old body (obody) contents to avoid
3435 tripping over the under-construction body. */
3436 for (unsigned k = 0; k < noutputs; ++k)
3437 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3438 internal_error ("%<asm%> clobber conflict with "
3439 "output operand");
3440
3441 for (unsigned k = 0; k < ninputs - ninout; ++k)
3442 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3443 internal_error ("%<asm%> clobber conflict with "
3444 "input operand");
3445 }
3446
3447 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3448 }
3449
3450 if (nlabels > 0)
3451 emit_jump_insn (body);
3452 else
3453 emit_insn (body);
3454 }
3455
3456 generating_concat_p = old_generating_concat_p;
3457
3458 if (fallthru_label)
3459 emit_label (fallthru_label);
3460
3461 if (after_md_seq)
3462 emit_insn (after_md_seq);
3463 if (after_rtl_seq)
3464 emit_insn (after_rtl_seq);
3465
3466 free_temp_slots ();
3467 crtl->has_asm_statement = 1;
3468 }
3469
3470 /* Emit code to jump to the address
3471 specified by the pointer expression EXP. */
3472
3473 static void
3474 expand_computed_goto (tree exp)
3475 {
3476 rtx x = expand_normal (exp);
3477
3478 do_pending_stack_adjust ();
3479 emit_indirect_jump (x);
3480 }
3481
3482 /* Generate RTL code for a `goto' statement with target label LABEL.
3483 LABEL should be a LABEL_DECL tree node that was or will later be
3484 defined with `expand_label'. */
3485
3486 static void
3487 expand_goto (tree label)
3488 {
3489 if (flag_checking)
3490 {
3491 /* Check for a nonlocal goto to a containing function. Should have
3492 gotten translated to __builtin_nonlocal_goto. */
3493 tree context = decl_function_context (label);
3494 gcc_assert (!context || context == current_function_decl);
3495 }
3496
3497 emit_jump (jump_target_rtx (label));
3498 }
3499
3500 /* Output a return with no value. */
3501
3502 static void
3503 expand_null_return_1 (void)
3504 {
3505 clear_pending_stack_adjust ();
3506 do_pending_stack_adjust ();
3507 emit_jump (return_label);
3508 }
3509
3510 /* Generate RTL to return from the current function, with no value.
3511 (That is, we do not do anything about returning any value.) */
3512
3513 void
3514 expand_null_return (void)
3515 {
3516 /* If this function was declared to return a value, but we
3517 didn't, clobber the return registers so that they are not
3518 propagated live to the rest of the function. */
3519 clobber_return_register ();
3520
3521 expand_null_return_1 ();
3522 }
3523
3524 /* Generate RTL to return from the current function, with value VAL. */
3525
3526 static void
3527 expand_value_return (rtx val)
3528 {
3529 /* Copy the value to the return location unless it's already there. */
3530
3531 tree decl = DECL_RESULT (current_function_decl);
3532 rtx return_reg = DECL_RTL (decl);
3533 if (return_reg != val)
3534 {
3535 tree funtype = TREE_TYPE (current_function_decl);
3536 tree type = TREE_TYPE (decl);
3537 int unsignedp = TYPE_UNSIGNED (type);
3538 machine_mode old_mode = DECL_MODE (decl);
3539 machine_mode mode;
3540 if (DECL_BY_REFERENCE (decl))
3541 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3542 else
3543 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3544
3545 if (mode != old_mode)
3546 val = convert_modes (mode, old_mode, val, unsignedp);
3547
3548 if (GET_CODE (return_reg) == PARALLEL)
3549 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3550 else
3551 emit_move_insn (return_reg, val);
3552 }
3553
3554 expand_null_return_1 ();
3555 }
3556
3557 /* Generate RTL to evaluate the expression RETVAL and return it
3558 from the current function. */
3559
3560 static void
3561 expand_return (tree retval)
3562 {
3563 rtx result_rtl;
3564 rtx val = 0;
3565 tree retval_rhs;
3566
3567 /* If function wants no value, give it none. */
3568 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3569 {
3570 expand_normal (retval);
3571 expand_null_return ();
3572 return;
3573 }
3574
3575 if (retval == error_mark_node)
3576 {
3577 /* Treat this like a return of no value from a function that
3578 returns a value. */
3579 expand_null_return ();
3580 return;
3581 }
3582 else if ((TREE_CODE (retval) == MODIFY_EXPR
3583 || TREE_CODE (retval) == INIT_EXPR)
3584 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3585 retval_rhs = TREE_OPERAND (retval, 1);
3586 else
3587 retval_rhs = retval;
3588
3589 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3590
3591 /* If we are returning the RESULT_DECL, then the value has already
3592 been stored into it, so we don't have to do anything special. */
3593 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3594 expand_value_return (result_rtl);
3595
3596 /* If the result is an aggregate that is being returned in one (or more)
3597 registers, load the registers here. */
3598
3599 else if (retval_rhs != 0
3600 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3601 && REG_P (result_rtl))
3602 {
3603 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3604 if (val)
3605 {
3606 /* Use the mode of the result value on the return register. */
3607 PUT_MODE (result_rtl, GET_MODE (val));
3608 expand_value_return (val);
3609 }
3610 else
3611 expand_null_return ();
3612 }
3613 else if (retval_rhs != 0
3614 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3615 && (REG_P (result_rtl)
3616 || (GET_CODE (result_rtl) == PARALLEL)))
3617 {
3618 /* Compute the return value into a temporary (usually a pseudo reg). */
3619 val
3620 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3621 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3622 val = force_not_mem (val);
3623 expand_value_return (val);
3624 }
3625 else
3626 {
3627 /* No hard reg used; calculate value into hard return reg. */
3628 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3629 expand_value_return (result_rtl);
3630 }
3631 }
3632
3633 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3634 register, tell the rtl optimizers that its value is no longer
3635 needed. */
3636
3637 static void
3638 expand_clobber (tree lhs)
3639 {
3640 if (DECL_P (lhs))
3641 {
3642 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3643 if (decl_rtl && REG_P (decl_rtl))
3644 {
3645 machine_mode decl_mode = GET_MODE (decl_rtl);
3646 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3647 REGMODE_NATURAL_SIZE (decl_mode)))
3648 emit_clobber (decl_rtl);
3649 }
3650 }
3651 }
3652
3653 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3654 STMT that doesn't require special handling for outgoing edges. That
3655 is no tailcalls and no GIMPLE_COND. */
3656
3657 static void
3658 expand_gimple_stmt_1 (gimple *stmt)
3659 {
3660 tree op0;
3661
3662 set_curr_insn_location (gimple_location (stmt));
3663
3664 switch (gimple_code (stmt))
3665 {
3666 case GIMPLE_GOTO:
3667 op0 = gimple_goto_dest (stmt);
3668 if (TREE_CODE (op0) == LABEL_DECL)
3669 expand_goto (op0);
3670 else
3671 expand_computed_goto (op0);
3672 break;
3673 case GIMPLE_LABEL:
3674 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3675 break;
3676 case GIMPLE_NOP:
3677 case GIMPLE_PREDICT:
3678 break;
3679 case GIMPLE_SWITCH:
3680 {
3681 gswitch *swtch = as_a <gswitch *> (stmt);
3682 if (gimple_switch_num_labels (swtch) == 1)
3683 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch)));
3684 else
3685 expand_case (swtch);
3686 }
3687 break;
3688 case GIMPLE_ASM:
3689 expand_asm_stmt (as_a <gasm *> (stmt));
3690 break;
3691 case GIMPLE_CALL:
3692 expand_call_stmt (as_a <gcall *> (stmt));
3693 break;
3694
3695 case GIMPLE_RETURN:
3696 {
3697 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3698
3699 /* If a return doesn't have a location, it very likely represents
3700 multiple user returns so we cannot let it inherit the location
3701 of the last statement of the previous basic block in RTL. */
3702 if (!gimple_has_location (stmt))
3703 set_curr_insn_location (cfun->function_end_locus);
3704
3705 if (op0 && op0 != error_mark_node)
3706 {
3707 tree result = DECL_RESULT (current_function_decl);
3708
3709 /* If we are not returning the current function's RESULT_DECL,
3710 build an assignment to it. */
3711 if (op0 != result)
3712 {
3713 /* I believe that a function's RESULT_DECL is unique. */
3714 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3715
3716 /* ??? We'd like to use simply expand_assignment here,
3717 but this fails if the value is of BLKmode but the return
3718 decl is a register. expand_return has special handling
3719 for this combination, which eventually should move
3720 to common code. See comments there. Until then, let's
3721 build a modify expression :-/ */
3722 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3723 result, op0);
3724 }
3725 }
3726
3727 if (!op0)
3728 expand_null_return ();
3729 else
3730 expand_return (op0);
3731 }
3732 break;
3733
3734 case GIMPLE_ASSIGN:
3735 {
3736 gassign *assign_stmt = as_a <gassign *> (stmt);
3737 tree lhs = gimple_assign_lhs (assign_stmt);
3738
3739 /* Tree expand used to fiddle with |= and &= of two bitfield
3740 COMPONENT_REFs here. This can't happen with gimple, the LHS
3741 of binary assigns must be a gimple reg. */
3742
3743 if (TREE_CODE (lhs) != SSA_NAME
3744 || gimple_assign_rhs_class (assign_stmt) == GIMPLE_SINGLE_RHS)
3745 {
3746 tree rhs = gimple_assign_rhs1 (assign_stmt);
3747 gcc_assert (gimple_assign_rhs_class (assign_stmt)
3748 == GIMPLE_SINGLE_RHS);
3749 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3750 /* Do not put locations on possibly shared trees. */
3751 && !is_gimple_min_invariant (rhs))
3752 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3753 if (TREE_CLOBBER_P (rhs))
3754 /* This is a clobber to mark the going out of scope for
3755 this LHS. */
3756 expand_clobber (lhs);
3757 else
3758 expand_assignment (lhs, rhs,
3759 gimple_assign_nontemporal_move_p (
3760 assign_stmt));
3761 }
3762 else
3763 {
3764 rtx target, temp;
3765 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3766 struct separate_ops ops;
3767 bool promoted = false;
3768
3769 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3770 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3771 promoted = true;
3772
3773 ops.code = gimple_assign_rhs_code (assign_stmt);
3774 ops.type = TREE_TYPE (lhs);
3775 switch (get_gimple_rhs_class (ops.code))
3776 {
3777 case GIMPLE_TERNARY_RHS:
3778 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3779 /* Fallthru */
3780 case GIMPLE_BINARY_RHS:
3781 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3782 /* Fallthru */
3783 case GIMPLE_UNARY_RHS:
3784 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3785 break;
3786 default:
3787 gcc_unreachable ();
3788 }
3789 ops.location = gimple_location (stmt);
3790
3791 /* If we want to use a nontemporal store, force the value to
3792 register first. If we store into a promoted register,
3793 don't directly expand to target. */
3794 temp = nontemporal || promoted ? NULL_RTX : target;
3795 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3796 EXPAND_NORMAL);
3797
3798 if (temp == target)
3799 ;
3800 else if (promoted)
3801 {
3802 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3803 /* If TEMP is a VOIDmode constant, use convert_modes to make
3804 sure that we properly convert it. */
3805 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3806 {
3807 temp = convert_modes (GET_MODE (target),
3808 TYPE_MODE (ops.type),
3809 temp, unsignedp);
3810 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3811 GET_MODE (target), temp, unsignedp);
3812 }
3813
3814 convert_move (SUBREG_REG (target), temp, unsignedp);
3815 }
3816 else if (nontemporal && emit_storent_insn (target, temp))
3817 ;
3818 else
3819 {
3820 temp = force_operand (temp, target);
3821 if (temp != target)
3822 emit_move_insn (target, temp);
3823 }
3824 }
3825 }
3826 break;
3827
3828 default:
3829 gcc_unreachable ();
3830 }
3831 }
3832
3833 /* Expand one gimple statement STMT and return the last RTL instruction
3834 before any of the newly generated ones.
3835
3836 In addition to generating the necessary RTL instructions this also
3837 sets REG_EH_REGION notes if necessary and sets the current source
3838 location for diagnostics. */
3839
3840 static rtx_insn *
3841 expand_gimple_stmt (gimple *stmt)
3842 {
3843 location_t saved_location = input_location;
3844 rtx_insn *last = get_last_insn ();
3845 int lp_nr;
3846
3847 gcc_assert (cfun);
3848
3849 /* We need to save and restore the current source location so that errors
3850 discovered during expansion are emitted with the right location. But
3851 it would be better if the diagnostic routines used the source location
3852 embedded in the tree nodes rather than globals. */
3853 if (gimple_has_location (stmt))
3854 input_location = gimple_location (stmt);
3855
3856 expand_gimple_stmt_1 (stmt);
3857
3858 /* Free any temporaries used to evaluate this statement. */
3859 free_temp_slots ();
3860
3861 input_location = saved_location;
3862
3863 /* Mark all insns that may trap. */
3864 lp_nr = lookup_stmt_eh_lp (stmt);
3865 if (lp_nr)
3866 {
3867 rtx_insn *insn;
3868 for (insn = next_real_insn (last); insn;
3869 insn = next_real_insn (insn))
3870 {
3871 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3872 /* If we want exceptions for non-call insns, any
3873 may_trap_p instruction may throw. */
3874 && GET_CODE (PATTERN (insn)) != CLOBBER
3875 && GET_CODE (PATTERN (insn)) != USE
3876 && insn_could_throw_p (insn))
3877 make_reg_eh_region_note (insn, 0, lp_nr);
3878 }
3879 }
3880
3881 return last;
3882 }
3883
3884 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3885 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3886 generated a tail call (something that might be denied by the ABI
3887 rules governing the call; see calls.c).
3888
3889 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3890 can still reach the rest of BB. The case here is __builtin_sqrt,
3891 where the NaN result goes through the external function (with a
3892 tailcall) and the normal result happens via a sqrt instruction. */
3893
3894 static basic_block
3895 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3896 {
3897 rtx_insn *last2, *last;
3898 edge e;
3899 edge_iterator ei;
3900 profile_probability probability;
3901
3902 last2 = last = expand_gimple_stmt (stmt);
3903
3904 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3905 if (CALL_P (last) && SIBLING_CALL_P (last))
3906 goto found;
3907
3908 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3909
3910 *can_fallthru = true;
3911 return NULL;
3912
3913 found:
3914 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3915 Any instructions emitted here are about to be deleted. */
3916 do_pending_stack_adjust ();
3917
3918 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3919 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3920 EH or abnormal edges, we shouldn't have created a tail call in
3921 the first place. So it seems to me we should just be removing
3922 all edges here, or redirecting the existing fallthru edge to
3923 the exit block. */
3924
3925 probability = profile_probability::never ();
3926
3927 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3928 {
3929 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3930 {
3931 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3932 e->dest->count -= e->count ();
3933 probability += e->probability;
3934 remove_edge (e);
3935 }
3936 else
3937 ei_next (&ei);
3938 }
3939
3940 /* This is somewhat ugly: the call_expr expander often emits instructions
3941 after the sibcall (to perform the function return). These confuse the
3942 find_many_sub_basic_blocks code, so we need to get rid of these. */
3943 last = NEXT_INSN (last);
3944 gcc_assert (BARRIER_P (last));
3945
3946 *can_fallthru = false;
3947 while (NEXT_INSN (last))
3948 {
3949 /* For instance an sqrt builtin expander expands if with
3950 sibcall in the then and label for `else`. */
3951 if (LABEL_P (NEXT_INSN (last)))
3952 {
3953 *can_fallthru = true;
3954 break;
3955 }
3956 delete_insn (NEXT_INSN (last));
3957 }
3958
3959 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3960 | EDGE_SIBCALL);
3961 e->probability = probability;
3962 BB_END (bb) = last;
3963 update_bb_for_insn (bb);
3964
3965 if (NEXT_INSN (last))
3966 {
3967 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3968
3969 last = BB_END (bb);
3970 if (BARRIER_P (last))
3971 BB_END (bb) = PREV_INSN (last);
3972 }
3973
3974 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3975
3976 return bb;
3977 }
3978
3979 /* Return the difference between the floor and the truncated result of
3980 a signed division by OP1 with remainder MOD. */
3981 static rtx
3982 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3983 {
3984 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3985 return gen_rtx_IF_THEN_ELSE
3986 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3987 gen_rtx_IF_THEN_ELSE
3988 (mode, gen_rtx_LT (BImode,
3989 gen_rtx_DIV (mode, op1, mod),
3990 const0_rtx),
3991 constm1_rtx, const0_rtx),
3992 const0_rtx);
3993 }
3994
3995 /* Return the difference between the ceil and the truncated result of
3996 a signed division by OP1 with remainder MOD. */
3997 static rtx
3998 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3999 {
4000 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4001 return gen_rtx_IF_THEN_ELSE
4002 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4003 gen_rtx_IF_THEN_ELSE
4004 (mode, gen_rtx_GT (BImode,
4005 gen_rtx_DIV (mode, op1, mod),
4006 const0_rtx),
4007 const1_rtx, const0_rtx),
4008 const0_rtx);
4009 }
4010
4011 /* Return the difference between the ceil and the truncated result of
4012 an unsigned division by OP1 with remainder MOD. */
4013 static rtx
4014 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
4015 {
4016 /* (mod != 0 ? 1 : 0) */
4017 return gen_rtx_IF_THEN_ELSE
4018 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
4019 const1_rtx, const0_rtx);
4020 }
4021
4022 /* Return the difference between the rounded and the truncated result
4023 of a signed division by OP1 with remainder MOD. Halfway cases are
4024 rounded away from zero, rather than to the nearest even number. */
4025 static rtx
4026 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
4027 {
4028 /* (abs (mod) >= abs (op1) - abs (mod)
4029 ? (op1 / mod > 0 ? 1 : -1)
4030 : 0) */
4031 return gen_rtx_IF_THEN_ELSE
4032 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
4033 gen_rtx_MINUS (mode,
4034 gen_rtx_ABS (mode, op1),
4035 gen_rtx_ABS (mode, mod))),
4036 gen_rtx_IF_THEN_ELSE
4037 (mode, gen_rtx_GT (BImode,
4038 gen_rtx_DIV (mode, op1, mod),
4039 const0_rtx),
4040 const1_rtx, constm1_rtx),
4041 const0_rtx);
4042 }
4043
4044 /* Return the difference between the rounded and the truncated result
4045 of a unsigned division by OP1 with remainder MOD. Halfway cases
4046 are rounded away from zero, rather than to the nearest even
4047 number. */
4048 static rtx
4049 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
4050 {
4051 /* (mod >= op1 - mod ? 1 : 0) */
4052 return gen_rtx_IF_THEN_ELSE
4053 (mode, gen_rtx_GE (BImode, mod,
4054 gen_rtx_MINUS (mode, op1, mod)),
4055 const1_rtx, const0_rtx);
4056 }
4057
4058 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4059 any rtl. */
4060
4061 static rtx
4062 convert_debug_memory_address (scalar_int_mode mode, rtx x,
4063 addr_space_t as)
4064 {
4065 #ifndef POINTERS_EXTEND_UNSIGNED
4066 gcc_assert (mode == Pmode
4067 || mode == targetm.addr_space.address_mode (as));
4068 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
4069 #else
4070 rtx temp;
4071
4072 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
4073
4074 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
4075 return x;
4076
4077 /* X must have some form of address mode already. */
4078 scalar_int_mode xmode = as_a <scalar_int_mode> (GET_MODE (x));
4079 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
4080 x = lowpart_subreg (mode, x, xmode);
4081 else if (POINTERS_EXTEND_UNSIGNED > 0)
4082 x = gen_rtx_ZERO_EXTEND (mode, x);
4083 else if (!POINTERS_EXTEND_UNSIGNED)
4084 x = gen_rtx_SIGN_EXTEND (mode, x);
4085 else
4086 {
4087 switch (GET_CODE (x))
4088 {
4089 case SUBREG:
4090 if ((SUBREG_PROMOTED_VAR_P (x)
4091 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
4092 || (GET_CODE (SUBREG_REG (x)) == PLUS
4093 && REG_P (XEXP (SUBREG_REG (x), 0))
4094 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
4095 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
4096 && GET_MODE (SUBREG_REG (x)) == mode)
4097 return SUBREG_REG (x);
4098 break;
4099 case LABEL_REF:
4100 temp = gen_rtx_LABEL_REF (mode, label_ref_label (x));
4101 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
4102 return temp;
4103 case SYMBOL_REF:
4104 temp = shallow_copy_rtx (x);
4105 PUT_MODE (temp, mode);
4106 return temp;
4107 case CONST:
4108 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4109 if (temp)
4110 temp = gen_rtx_CONST (mode, temp);
4111 return temp;
4112 case PLUS:
4113 case MINUS:
4114 if (CONST_INT_P (XEXP (x, 1)))
4115 {
4116 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
4117 if (temp)
4118 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
4119 }
4120 break;
4121 default:
4122 break;
4123 }
4124 /* Don't know how to express ptr_extend as operation in debug info. */
4125 return NULL;
4126 }
4127 #endif /* POINTERS_EXTEND_UNSIGNED */
4128
4129 return x;
4130 }
4131
4132 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4133 by avoid_deep_ter_for_debug. */
4134
4135 static hash_map<tree, tree> *deep_ter_debug_map;
4136
4137 /* Split too deep TER chains for debug stmts using debug temporaries. */
4138
4139 static void
4140 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4141 {
4142 use_operand_p use_p;
4143 ssa_op_iter iter;
4144 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4145 {
4146 tree use = USE_FROM_PTR (use_p);
4147 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4148 continue;
4149 gimple *g = get_gimple_for_ssa_name (use);
4150 if (g == NULL)
4151 continue;
4152 if (depth > 6 && !stmt_ends_bb_p (g))
4153 {
4154 if (deep_ter_debug_map == NULL)
4155 deep_ter_debug_map = new hash_map<tree, tree>;
4156
4157 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4158 if (vexpr != NULL)
4159 continue;
4160 vexpr = make_node (DEBUG_EXPR_DECL);
4161 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4162 DECL_ARTIFICIAL (vexpr) = 1;
4163 TREE_TYPE (vexpr) = TREE_TYPE (use);
4164 SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (use)));
4165 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4166 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4167 avoid_deep_ter_for_debug (def_temp, 0);
4168 }
4169 else
4170 avoid_deep_ter_for_debug (g, depth + 1);
4171 }
4172 }
4173
4174 /* Return an RTX equivalent to the value of the parameter DECL. */
4175
4176 static rtx
4177 expand_debug_parm_decl (tree decl)
4178 {
4179 rtx incoming = DECL_INCOMING_RTL (decl);
4180
4181 if (incoming
4182 && GET_MODE (incoming) != BLKmode
4183 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4184 || (MEM_P (incoming)
4185 && REG_P (XEXP (incoming, 0))
4186 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4187 {
4188 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4189
4190 #ifdef HAVE_window_save
4191 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4192 If the target machine has an explicit window save instruction, the
4193 actual entry value is the corresponding OUTGOING_REGNO instead. */
4194 if (REG_P (incoming)
4195 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4196 incoming
4197 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4198 OUTGOING_REGNO (REGNO (incoming)), 0);
4199 else if (MEM_P (incoming))
4200 {
4201 rtx reg = XEXP (incoming, 0);
4202 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4203 {
4204 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4205 incoming = replace_equiv_address_nv (incoming, reg);
4206 }
4207 else
4208 incoming = copy_rtx (incoming);
4209 }
4210 #endif
4211
4212 ENTRY_VALUE_EXP (rtl) = incoming;
4213 return rtl;
4214 }
4215
4216 if (incoming
4217 && GET_MODE (incoming) != BLKmode
4218 && !TREE_ADDRESSABLE (decl)
4219 && MEM_P (incoming)
4220 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4221 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4222 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4223 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4224 return copy_rtx (incoming);
4225
4226 return NULL_RTX;
4227 }
4228
4229 /* Return an RTX equivalent to the value of the tree expression EXP. */
4230
4231 static rtx
4232 expand_debug_expr (tree exp)
4233 {
4234 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4235 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4236 machine_mode inner_mode = VOIDmode;
4237 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4238 addr_space_t as;
4239 scalar_int_mode op0_mode, op1_mode, addr_mode;
4240
4241 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4242 {
4243 case tcc_expression:
4244 switch (TREE_CODE (exp))
4245 {
4246 case COND_EXPR:
4247 case DOT_PROD_EXPR:
4248 case SAD_EXPR:
4249 case WIDEN_MULT_PLUS_EXPR:
4250 case WIDEN_MULT_MINUS_EXPR:
4251 goto ternary;
4252
4253 case TRUTH_ANDIF_EXPR:
4254 case TRUTH_ORIF_EXPR:
4255 case TRUTH_AND_EXPR:
4256 case TRUTH_OR_EXPR:
4257 case TRUTH_XOR_EXPR:
4258 goto binary;
4259
4260 case TRUTH_NOT_EXPR:
4261 goto unary;
4262
4263 default:
4264 break;
4265 }
4266 break;
4267
4268 ternary:
4269 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4270 if (!op2)
4271 return NULL_RTX;
4272 /* Fall through. */
4273
4274 binary:
4275 case tcc_binary:
4276 if (mode == BLKmode)
4277 return NULL_RTX;
4278 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4279 if (!op1)
4280 return NULL_RTX;
4281 switch (TREE_CODE (exp))
4282 {
4283 case LSHIFT_EXPR:
4284 case RSHIFT_EXPR:
4285 case LROTATE_EXPR:
4286 case RROTATE_EXPR:
4287 case WIDEN_LSHIFT_EXPR:
4288 /* Ensure second operand isn't wider than the first one. */
4289 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4290 if (is_a <scalar_int_mode> (inner_mode, &op1_mode)
4291 && (GET_MODE_UNIT_PRECISION (mode)
4292 < GET_MODE_PRECISION (op1_mode)))
4293 op1 = lowpart_subreg (GET_MODE_INNER (mode), op1, op1_mode);
4294 break;
4295 default:
4296 break;
4297 }
4298 /* Fall through. */
4299
4300 unary:
4301 case tcc_unary:
4302 if (mode == BLKmode)
4303 return NULL_RTX;
4304 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4305 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4306 if (!op0)
4307 return NULL_RTX;
4308 break;
4309
4310 case tcc_comparison:
4311 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4312 goto binary;
4313
4314 case tcc_type:
4315 case tcc_statement:
4316 gcc_unreachable ();
4317
4318 case tcc_constant:
4319 case tcc_exceptional:
4320 case tcc_declaration:
4321 case tcc_reference:
4322 case tcc_vl_exp:
4323 break;
4324 }
4325
4326 switch (TREE_CODE (exp))
4327 {
4328 case STRING_CST:
4329 if (!lookup_constant_def (exp))
4330 {
4331 if (strlen (TREE_STRING_POINTER (exp)) + 1
4332 != (size_t) TREE_STRING_LENGTH (exp))
4333 return NULL_RTX;
4334 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4335 op0 = gen_rtx_MEM (BLKmode, op0);
4336 set_mem_attributes (op0, exp, 0);
4337 return op0;
4338 }
4339 /* Fall through. */
4340
4341 case INTEGER_CST:
4342 case REAL_CST:
4343 case FIXED_CST:
4344 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4345 return op0;
4346
4347 case POLY_INT_CST:
4348 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4349
4350 case COMPLEX_CST:
4351 gcc_assert (COMPLEX_MODE_P (mode));
4352 op0 = expand_debug_expr (TREE_REALPART (exp));
4353 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4354 return gen_rtx_CONCAT (mode, op0, op1);
4355
4356 case DEBUG_EXPR_DECL:
4357 op0 = DECL_RTL_IF_SET (exp);
4358
4359 if (op0)
4360 return op0;
4361
4362 op0 = gen_rtx_DEBUG_EXPR (mode);
4363 DEBUG_EXPR_TREE_DECL (op0) = exp;
4364 SET_DECL_RTL (exp, op0);
4365
4366 return op0;
4367
4368 case VAR_DECL:
4369 case PARM_DECL:
4370 case FUNCTION_DECL:
4371 case LABEL_DECL:
4372 case CONST_DECL:
4373 case RESULT_DECL:
4374 op0 = DECL_RTL_IF_SET (exp);
4375
4376 /* This decl was probably optimized away. */
4377 if (!op0
4378 /* At least label RTXen are sometimes replaced by
4379 NOTE_INSN_DELETED_LABEL. Any notes here are not
4380 handled by copy_rtx. */
4381 || NOTE_P (op0))
4382 {
4383 if (!VAR_P (exp)
4384 || DECL_EXTERNAL (exp)
4385 || !TREE_STATIC (exp)
4386 || !DECL_NAME (exp)
4387 || DECL_HARD_REGISTER (exp)
4388 || DECL_IN_CONSTANT_POOL (exp)
4389 || mode == VOIDmode)
4390 return NULL;
4391
4392 op0 = make_decl_rtl_for_debug (exp);
4393 if (!MEM_P (op0)
4394 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4395 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4396 return NULL;
4397 }
4398 else
4399 op0 = copy_rtx (op0);
4400
4401 if (GET_MODE (op0) == BLKmode
4402 /* If op0 is not BLKmode, but mode is, adjust_mode
4403 below would ICE. While it is likely a FE bug,
4404 try to be robust here. See PR43166. */
4405 || mode == BLKmode
4406 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4407 {
4408 gcc_assert (MEM_P (op0));
4409 op0 = adjust_address_nv (op0, mode, 0);
4410 return op0;
4411 }
4412
4413 /* Fall through. */
4414
4415 adjust_mode:
4416 case PAREN_EXPR:
4417 CASE_CONVERT:
4418 {
4419 inner_mode = GET_MODE (op0);
4420
4421 if (mode == inner_mode)
4422 return op0;
4423
4424 if (inner_mode == VOIDmode)
4425 {
4426 if (TREE_CODE (exp) == SSA_NAME)
4427 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4428 else
4429 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4430 if (mode == inner_mode)
4431 return op0;
4432 }
4433
4434 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4435 {
4436 if (GET_MODE_UNIT_BITSIZE (mode)
4437 == GET_MODE_UNIT_BITSIZE (inner_mode))
4438 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4439 else if (GET_MODE_UNIT_BITSIZE (mode)
4440 < GET_MODE_UNIT_BITSIZE (inner_mode))
4441 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4442 else
4443 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4444 }
4445 else if (FLOAT_MODE_P (mode))
4446 {
4447 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4448 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4449 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4450 else
4451 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4452 }
4453 else if (FLOAT_MODE_P (inner_mode))
4454 {
4455 if (unsignedp)
4456 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4457 else
4458 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4459 }
4460 else if (GET_MODE_UNIT_PRECISION (mode)
4461 == GET_MODE_UNIT_PRECISION (inner_mode))
4462 op0 = lowpart_subreg (mode, op0, inner_mode);
4463 else if (GET_MODE_UNIT_PRECISION (mode)
4464 < GET_MODE_UNIT_PRECISION (inner_mode))
4465 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
4466 else if (UNARY_CLASS_P (exp)
4467 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4468 : unsignedp)
4469 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4470 else
4471 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4472
4473 return op0;
4474 }
4475
4476 case MEM_REF:
4477 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4478 {
4479 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4480 TREE_OPERAND (exp, 0),
4481 TREE_OPERAND (exp, 1));
4482 if (newexp)
4483 return expand_debug_expr (newexp);
4484 }
4485 /* FALLTHROUGH */
4486 case INDIRECT_REF:
4487 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4488 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4489 if (!op0)
4490 return NULL;
4491
4492 if (TREE_CODE (exp) == MEM_REF)
4493 {
4494 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4495 || (GET_CODE (op0) == PLUS
4496 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4497 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4498 Instead just use get_inner_reference. */
4499 goto component_ref;
4500
4501 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4502 poly_int64 offset;
4503 if (!op1 || !poly_int_rtx_p (op1, &offset))
4504 return NULL;
4505
4506 op0 = plus_constant (inner_mode, op0, offset);
4507 }
4508
4509 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4510
4511 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4512 op0, as);
4513 if (op0 == NULL_RTX)
4514 return NULL;
4515
4516 op0 = gen_rtx_MEM (mode, op0);
4517 set_mem_attributes (op0, exp, 0);
4518 if (TREE_CODE (exp) == MEM_REF
4519 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4520 set_mem_expr (op0, NULL_TREE);
4521 set_mem_addr_space (op0, as);
4522
4523 return op0;
4524
4525 case TARGET_MEM_REF:
4526 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4527 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4528 return NULL;
4529
4530 op0 = expand_debug_expr
4531 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4532 if (!op0)
4533 return NULL;
4534
4535 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4536 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4537 op0, as);
4538 if (op0 == NULL_RTX)
4539 return NULL;
4540
4541 op0 = gen_rtx_MEM (mode, op0);
4542
4543 set_mem_attributes (op0, exp, 0);
4544 set_mem_addr_space (op0, as);
4545
4546 return op0;
4547
4548 component_ref:
4549 case ARRAY_REF:
4550 case ARRAY_RANGE_REF:
4551 case COMPONENT_REF:
4552 case BIT_FIELD_REF:
4553 case REALPART_EXPR:
4554 case IMAGPART_EXPR:
4555 case VIEW_CONVERT_EXPR:
4556 {
4557 machine_mode mode1;
4558 poly_int64 bitsize, bitpos;
4559 tree offset;
4560 int reversep, volatilep = 0;
4561 tree tem
4562 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4563 &unsignedp, &reversep, &volatilep);
4564 rtx orig_op0;
4565
4566 if (known_eq (bitsize, 0))
4567 return NULL;
4568
4569 orig_op0 = op0 = expand_debug_expr (tem);
4570
4571 if (!op0)
4572 return NULL;
4573
4574 if (offset)
4575 {
4576 machine_mode addrmode, offmode;
4577
4578 if (!MEM_P (op0))
4579 return NULL;
4580
4581 op0 = XEXP (op0, 0);
4582 addrmode = GET_MODE (op0);
4583 if (addrmode == VOIDmode)
4584 addrmode = Pmode;
4585
4586 op1 = expand_debug_expr (offset);
4587 if (!op1)
4588 return NULL;
4589
4590 offmode = GET_MODE (op1);
4591 if (offmode == VOIDmode)
4592 offmode = TYPE_MODE (TREE_TYPE (offset));
4593
4594 if (addrmode != offmode)
4595 op1 = lowpart_subreg (addrmode, op1, offmode);
4596
4597 /* Don't use offset_address here, we don't need a
4598 recognizable address, and we don't want to generate
4599 code. */
4600 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4601 op0, op1));
4602 }
4603
4604 if (MEM_P (op0))
4605 {
4606 if (mode1 == VOIDmode)
4607 {
4608 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4609 return NULL;
4610 /* Bitfield. */
4611 mode1 = smallest_int_mode_for_size (bitsize);
4612 }
4613 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4614 if (maybe_ne (bytepos, 0))
4615 {
4616 op0 = adjust_address_nv (op0, mode1, bytepos);
4617 bitpos = num_trailing_bits (bitpos);
4618 }
4619 else if (known_eq (bitpos, 0)
4620 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4621 op0 = adjust_address_nv (op0, mode, 0);
4622 else if (GET_MODE (op0) != mode1)
4623 op0 = adjust_address_nv (op0, mode1, 0);
4624 else
4625 op0 = copy_rtx (op0);
4626 if (op0 == orig_op0)
4627 op0 = shallow_copy_rtx (op0);
4628 if (TREE_CODE (tem) != SSA_NAME)
4629 set_mem_attributes (op0, exp, 0);
4630 }
4631
4632 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4633 return op0;
4634
4635 if (maybe_lt (bitpos, 0))
4636 return NULL;
4637
4638 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4639 return NULL;
4640
4641 poly_int64 bytepos;
4642 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4643 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4644 {
4645 machine_mode opmode = GET_MODE (op0);
4646
4647 if (opmode == VOIDmode)
4648 opmode = TYPE_MODE (TREE_TYPE (tem));
4649
4650 /* This condition may hold if we're expanding the address
4651 right past the end of an array that turned out not to
4652 be addressable (i.e., the address was only computed in
4653 debug stmts). The gen_subreg below would rightfully
4654 crash, and the address doesn't really exist, so just
4655 drop it. */
4656 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4657 return NULL;
4658
4659 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4660 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4661 }
4662
4663 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4664 && TYPE_UNSIGNED (TREE_TYPE (exp))
4665 ? SIGN_EXTRACT
4666 : ZERO_EXTRACT, mode,
4667 GET_MODE (op0) != VOIDmode
4668 ? GET_MODE (op0)
4669 : TYPE_MODE (TREE_TYPE (tem)),
4670 op0, gen_int_mode (bitsize, word_mode),
4671 gen_int_mode (bitpos, word_mode));
4672 }
4673
4674 case ABS_EXPR:
4675 case ABSU_EXPR:
4676 return simplify_gen_unary (ABS, mode, op0, mode);
4677
4678 case NEGATE_EXPR:
4679 return simplify_gen_unary (NEG, mode, op0, mode);
4680
4681 case BIT_NOT_EXPR:
4682 return simplify_gen_unary (NOT, mode, op0, mode);
4683
4684 case FLOAT_EXPR:
4685 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4686 0)))
4687 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4688 inner_mode);
4689
4690 case FIX_TRUNC_EXPR:
4691 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4692 inner_mode);
4693
4694 case POINTER_PLUS_EXPR:
4695 /* For the rare target where pointers are not the same size as
4696 size_t, we need to check for mis-matched modes and correct
4697 the addend. */
4698 if (op0 && op1
4699 && is_a <scalar_int_mode> (GET_MODE (op0), &op0_mode)
4700 && is_a <scalar_int_mode> (GET_MODE (op1), &op1_mode)
4701 && op0_mode != op1_mode)
4702 {
4703 if (GET_MODE_BITSIZE (op0_mode) < GET_MODE_BITSIZE (op1_mode)
4704 /* If OP0 is a partial mode, then we must truncate, even
4705 if it has the same bitsize as OP1 as GCC's
4706 representation of partial modes is opaque. */
4707 || (GET_MODE_CLASS (op0_mode) == MODE_PARTIAL_INT
4708 && (GET_MODE_BITSIZE (op0_mode)
4709 == GET_MODE_BITSIZE (op1_mode))))
4710 op1 = simplify_gen_unary (TRUNCATE, op0_mode, op1, op1_mode);
4711 else
4712 /* We always sign-extend, regardless of the signedness of
4713 the operand, because the operand is always unsigned
4714 here even if the original C expression is signed. */
4715 op1 = simplify_gen_unary (SIGN_EXTEND, op0_mode, op1, op1_mode);
4716 }
4717 /* Fall through. */
4718 case PLUS_EXPR:
4719 return simplify_gen_binary (PLUS, mode, op0, op1);
4720
4721 case MINUS_EXPR:
4722 case POINTER_DIFF_EXPR:
4723 return simplify_gen_binary (MINUS, mode, op0, op1);
4724
4725 case MULT_EXPR:
4726 return simplify_gen_binary (MULT, mode, op0, op1);
4727
4728 case RDIV_EXPR:
4729 case TRUNC_DIV_EXPR:
4730 case EXACT_DIV_EXPR:
4731 if (unsignedp)
4732 return simplify_gen_binary (UDIV, mode, op0, op1);
4733 else
4734 return simplify_gen_binary (DIV, mode, op0, op1);
4735
4736 case TRUNC_MOD_EXPR:
4737 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4738
4739 case FLOOR_DIV_EXPR:
4740 if (unsignedp)
4741 return simplify_gen_binary (UDIV, mode, op0, op1);
4742 else
4743 {
4744 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4745 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4746 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4747 return simplify_gen_binary (PLUS, mode, div, adj);
4748 }
4749
4750 case FLOOR_MOD_EXPR:
4751 if (unsignedp)
4752 return simplify_gen_binary (UMOD, mode, op0, op1);
4753 else
4754 {
4755 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4756 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4757 adj = simplify_gen_unary (NEG, mode,
4758 simplify_gen_binary (MULT, mode, adj, op1),
4759 mode);
4760 return simplify_gen_binary (PLUS, mode, mod, adj);
4761 }
4762
4763 case CEIL_DIV_EXPR:
4764 if (unsignedp)
4765 {
4766 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4767 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4768 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4769 return simplify_gen_binary (PLUS, mode, div, adj);
4770 }
4771 else
4772 {
4773 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4774 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4775 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4776 return simplify_gen_binary (PLUS, mode, div, adj);
4777 }
4778
4779 case CEIL_MOD_EXPR:
4780 if (unsignedp)
4781 {
4782 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4783 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4784 adj = simplify_gen_unary (NEG, mode,
4785 simplify_gen_binary (MULT, mode, adj, op1),
4786 mode);
4787 return simplify_gen_binary (PLUS, mode, mod, adj);
4788 }
4789 else
4790 {
4791 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4792 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4793 adj = simplify_gen_unary (NEG, mode,
4794 simplify_gen_binary (MULT, mode, adj, op1),
4795 mode);
4796 return simplify_gen_binary (PLUS, mode, mod, adj);
4797 }
4798
4799 case ROUND_DIV_EXPR:
4800 if (unsignedp)
4801 {
4802 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4803 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4804 rtx adj = round_udiv_adjust (mode, mod, op1);
4805 return simplify_gen_binary (PLUS, mode, div, adj);
4806 }
4807 else
4808 {
4809 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4810 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4811 rtx adj = round_sdiv_adjust (mode, mod, op1);
4812 return simplify_gen_binary (PLUS, mode, div, adj);
4813 }
4814
4815 case ROUND_MOD_EXPR:
4816 if (unsignedp)
4817 {
4818 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4819 rtx adj = round_udiv_adjust (mode, mod, op1);
4820 adj = simplify_gen_unary (NEG, mode,
4821 simplify_gen_binary (MULT, mode, adj, op1),
4822 mode);
4823 return simplify_gen_binary (PLUS, mode, mod, adj);
4824 }
4825 else
4826 {
4827 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4828 rtx adj = round_sdiv_adjust (mode, mod, op1);
4829 adj = simplify_gen_unary (NEG, mode,
4830 simplify_gen_binary (MULT, mode, adj, op1),
4831 mode);
4832 return simplify_gen_binary (PLUS, mode, mod, adj);
4833 }
4834
4835 case LSHIFT_EXPR:
4836 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4837
4838 case RSHIFT_EXPR:
4839 if (unsignedp)
4840 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4841 else
4842 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4843
4844 case LROTATE_EXPR:
4845 return simplify_gen_binary (ROTATE, mode, op0, op1);
4846
4847 case RROTATE_EXPR:
4848 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4849
4850 case MIN_EXPR:
4851 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4852
4853 case MAX_EXPR:
4854 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4855
4856 case BIT_AND_EXPR:
4857 case TRUTH_AND_EXPR:
4858 return simplify_gen_binary (AND, mode, op0, op1);
4859
4860 case BIT_IOR_EXPR:
4861 case TRUTH_OR_EXPR:
4862 return simplify_gen_binary (IOR, mode, op0, op1);
4863
4864 case BIT_XOR_EXPR:
4865 case TRUTH_XOR_EXPR:
4866 return simplify_gen_binary (XOR, mode, op0, op1);
4867
4868 case TRUTH_ANDIF_EXPR:
4869 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4870
4871 case TRUTH_ORIF_EXPR:
4872 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4873
4874 case TRUTH_NOT_EXPR:
4875 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4876
4877 case LT_EXPR:
4878 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4879 op0, op1);
4880
4881 case LE_EXPR:
4882 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4883 op0, op1);
4884
4885 case GT_EXPR:
4886 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4887 op0, op1);
4888
4889 case GE_EXPR:
4890 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4891 op0, op1);
4892
4893 case EQ_EXPR:
4894 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4895
4896 case NE_EXPR:
4897 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4898
4899 case UNORDERED_EXPR:
4900 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4901
4902 case ORDERED_EXPR:
4903 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4904
4905 case UNLT_EXPR:
4906 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4907
4908 case UNLE_EXPR:
4909 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4910
4911 case UNGT_EXPR:
4912 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4913
4914 case UNGE_EXPR:
4915 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4916
4917 case UNEQ_EXPR:
4918 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4919
4920 case LTGT_EXPR:
4921 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4922
4923 case COND_EXPR:
4924 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4925
4926 case COMPLEX_EXPR:
4927 gcc_assert (COMPLEX_MODE_P (mode));
4928 if (GET_MODE (op0) == VOIDmode)
4929 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4930 if (GET_MODE (op1) == VOIDmode)
4931 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4932 return gen_rtx_CONCAT (mode, op0, op1);
4933
4934 case CONJ_EXPR:
4935 if (GET_CODE (op0) == CONCAT)
4936 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4937 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4938 XEXP (op0, 1),
4939 GET_MODE_INNER (mode)));
4940 else
4941 {
4942 scalar_mode imode = GET_MODE_INNER (mode);
4943 rtx re, im;
4944
4945 if (MEM_P (op0))
4946 {
4947 re = adjust_address_nv (op0, imode, 0);
4948 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4949 }
4950 else
4951 {
4952 scalar_int_mode ifmode;
4953 scalar_int_mode ihmode;
4954 rtx halfsize;
4955 if (!int_mode_for_mode (mode).exists (&ifmode)
4956 || !int_mode_for_mode (imode).exists (&ihmode))
4957 return NULL;
4958 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4959 re = op0;
4960 if (mode != ifmode)
4961 re = gen_rtx_SUBREG (ifmode, re, 0);
4962 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4963 if (imode != ihmode)
4964 re = gen_rtx_SUBREG (imode, re, 0);
4965 im = copy_rtx (op0);
4966 if (mode != ifmode)
4967 im = gen_rtx_SUBREG (ifmode, im, 0);
4968 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4969 if (imode != ihmode)
4970 im = gen_rtx_SUBREG (imode, im, 0);
4971 }
4972 im = gen_rtx_NEG (imode, im);
4973 return gen_rtx_CONCAT (mode, re, im);
4974 }
4975
4976 case ADDR_EXPR:
4977 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4978 if (!op0 || !MEM_P (op0))
4979 {
4980 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4981 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4982 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4983 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4984 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4985 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4986
4987 if (handled_component_p (TREE_OPERAND (exp, 0)))
4988 {
4989 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4990 bool reverse;
4991 tree decl
4992 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4993 &bitsize, &maxsize, &reverse);
4994 if ((VAR_P (decl)
4995 || TREE_CODE (decl) == PARM_DECL
4996 || TREE_CODE (decl) == RESULT_DECL)
4997 && (!TREE_ADDRESSABLE (decl)
4998 || target_for_debug_bind (decl))
4999 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
5000 && known_gt (bitsize, 0)
5001 && known_eq (bitsize, maxsize))
5002 {
5003 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
5004 return plus_constant (mode, base, byteoffset);
5005 }
5006 }
5007
5008 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
5009 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5010 == ADDR_EXPR)
5011 {
5012 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5013 0));
5014 if (op0 != NULL
5015 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
5016 || (GET_CODE (op0) == PLUS
5017 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
5018 && CONST_INT_P (XEXP (op0, 1)))))
5019 {
5020 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
5021 1));
5022 poly_int64 offset;
5023 if (!op1 || !poly_int_rtx_p (op1, &offset))
5024 return NULL;
5025
5026 return plus_constant (mode, op0, offset);
5027 }
5028 }
5029
5030 return NULL;
5031 }
5032
5033 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
5034 addr_mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5035 op0 = convert_debug_memory_address (addr_mode, XEXP (op0, 0), as);
5036
5037 return op0;
5038
5039 case VECTOR_CST:
5040 {
5041 unsigned HOST_WIDE_INT i, nelts;
5042
5043 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
5044 return NULL;
5045
5046 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5047
5048 for (i = 0; i < nelts; ++i)
5049 {
5050 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
5051 if (!op1)
5052 return NULL;
5053 XVECEXP (op0, 0, i) = op1;
5054 }
5055
5056 return op0;
5057 }
5058
5059 case CONSTRUCTOR:
5060 if (TREE_CLOBBER_P (exp))
5061 return NULL;
5062 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
5063 {
5064 unsigned i;
5065 unsigned HOST_WIDE_INT nelts;
5066 tree val;
5067
5068 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
5069 goto flag_unsupported;
5070
5071 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
5072
5073 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
5074 {
5075 op1 = expand_debug_expr (val);
5076 if (!op1)
5077 return NULL;
5078 XVECEXP (op0, 0, i) = op1;
5079 }
5080
5081 if (i < nelts)
5082 {
5083 op1 = expand_debug_expr
5084 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
5085
5086 if (!op1)
5087 return NULL;
5088
5089 for (; i < nelts; i++)
5090 XVECEXP (op0, 0, i) = op1;
5091 }
5092
5093 return op0;
5094 }
5095 else
5096 goto flag_unsupported;
5097
5098 case CALL_EXPR:
5099 /* ??? Maybe handle some builtins? */
5100 return NULL;
5101
5102 case SSA_NAME:
5103 {
5104 gimple *g = get_gimple_for_ssa_name (exp);
5105 if (g)
5106 {
5107 tree t = NULL_TREE;
5108 if (deep_ter_debug_map)
5109 {
5110 tree *slot = deep_ter_debug_map->get (exp);
5111 if (slot)
5112 t = *slot;
5113 }
5114 if (t == NULL_TREE)
5115 t = gimple_assign_rhs_to_tree (g);
5116 op0 = expand_debug_expr (t);
5117 if (!op0)
5118 return NULL;
5119 }
5120 else
5121 {
5122 /* If this is a reference to an incoming value of
5123 parameter that is never used in the code or where the
5124 incoming value is never used in the code, use
5125 PARM_DECL's DECL_RTL if set. */
5126 if (SSA_NAME_IS_DEFAULT_DEF (exp)
5127 && SSA_NAME_VAR (exp)
5128 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
5129 && has_zero_uses (exp))
5130 {
5131 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
5132 if (op0)
5133 goto adjust_mode;
5134 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
5135 if (op0)
5136 goto adjust_mode;
5137 }
5138
5139 int part = var_to_partition (SA.map, exp);
5140
5141 if (part == NO_PARTITION)
5142 return NULL;
5143
5144 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
5145
5146 op0 = copy_rtx (SA.partition_to_pseudo[part]);
5147 }
5148 goto adjust_mode;
5149 }
5150
5151 case ERROR_MARK:
5152 return NULL;
5153
5154 /* Vector stuff. For most of the codes we don't have rtl codes. */
5155 case REALIGN_LOAD_EXPR:
5156 case VEC_COND_EXPR:
5157 case VEC_PACK_FIX_TRUNC_EXPR:
5158 case VEC_PACK_FLOAT_EXPR:
5159 case VEC_PACK_SAT_EXPR:
5160 case VEC_PACK_TRUNC_EXPR:
5161 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5162 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5163 case VEC_UNPACK_FLOAT_HI_EXPR:
5164 case VEC_UNPACK_FLOAT_LO_EXPR:
5165 case VEC_UNPACK_HI_EXPR:
5166 case VEC_UNPACK_LO_EXPR:
5167 case VEC_WIDEN_MULT_HI_EXPR:
5168 case VEC_WIDEN_MULT_LO_EXPR:
5169 case VEC_WIDEN_MULT_EVEN_EXPR:
5170 case VEC_WIDEN_MULT_ODD_EXPR:
5171 case VEC_WIDEN_LSHIFT_HI_EXPR:
5172 case VEC_WIDEN_LSHIFT_LO_EXPR:
5173 case VEC_PERM_EXPR:
5174 case VEC_DUPLICATE_EXPR:
5175 case VEC_SERIES_EXPR:
5176 case SAD_EXPR:
5177 return NULL;
5178
5179 /* Misc codes. */
5180 case ADDR_SPACE_CONVERT_EXPR:
5181 case FIXED_CONVERT_EXPR:
5182 case OBJ_TYPE_REF:
5183 case WITH_SIZE_EXPR:
5184 case BIT_INSERT_EXPR:
5185 return NULL;
5186
5187 case DOT_PROD_EXPR:
5188 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5189 && SCALAR_INT_MODE_P (mode))
5190 {
5191 op0
5192 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5193 0)))
5194 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5195 inner_mode);
5196 op1
5197 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5198 1)))
5199 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5200 inner_mode);
5201 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5202 return simplify_gen_binary (PLUS, mode, op0, op2);
5203 }
5204 return NULL;
5205
5206 case WIDEN_MULT_EXPR:
5207 case WIDEN_MULT_PLUS_EXPR:
5208 case WIDEN_MULT_MINUS_EXPR:
5209 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5210 && SCALAR_INT_MODE_P (mode))
5211 {
5212 inner_mode = GET_MODE (op0);
5213 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5214 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5215 else
5216 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5217 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5218 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5219 else
5220 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5221 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5222 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5223 return op0;
5224 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5225 return simplify_gen_binary (PLUS, mode, op0, op2);
5226 else
5227 return simplify_gen_binary (MINUS, mode, op2, op0);
5228 }
5229 return NULL;
5230
5231 case MULT_HIGHPART_EXPR:
5232 /* ??? Similar to the above. */
5233 return NULL;
5234
5235 case WIDEN_SUM_EXPR:
5236 case WIDEN_LSHIFT_EXPR:
5237 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5238 && SCALAR_INT_MODE_P (mode))
5239 {
5240 op0
5241 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5242 0)))
5243 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5244 inner_mode);
5245 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5246 ? ASHIFT : PLUS, mode, op0, op1);
5247 }
5248 return NULL;
5249
5250 default:
5251 flag_unsupported:
5252 if (flag_checking)
5253 {
5254 debug_tree (exp);
5255 gcc_unreachable ();
5256 }
5257 return NULL;
5258 }
5259 }
5260
5261 /* Return an RTX equivalent to the source bind value of the tree expression
5262 EXP. */
5263
5264 static rtx
5265 expand_debug_source_expr (tree exp)
5266 {
5267 rtx op0 = NULL_RTX;
5268 machine_mode mode = VOIDmode, inner_mode;
5269
5270 switch (TREE_CODE (exp))
5271 {
5272 case VAR_DECL:
5273 if (DECL_ABSTRACT_ORIGIN (exp))
5274 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5275 break;
5276 case PARM_DECL:
5277 {
5278 mode = DECL_MODE (exp);
5279 op0 = expand_debug_parm_decl (exp);
5280 if (op0)
5281 break;
5282 /* See if this isn't an argument that has been completely
5283 optimized out. */
5284 if (!DECL_RTL_SET_P (exp)
5285 && !DECL_INCOMING_RTL (exp)
5286 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5287 {
5288 tree aexp = DECL_ORIGIN (exp);
5289 if (DECL_CONTEXT (aexp)
5290 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5291 {
5292 vec<tree, va_gc> **debug_args;
5293 unsigned int ix;
5294 tree ddecl;
5295 debug_args = decl_debug_args_lookup (current_function_decl);
5296 if (debug_args != NULL)
5297 {
5298 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5299 ix += 2)
5300 if (ddecl == aexp)
5301 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5302 }
5303 }
5304 }
5305 break;
5306 }
5307 default:
5308 break;
5309 }
5310
5311 if (op0 == NULL_RTX)
5312 return NULL_RTX;
5313
5314 inner_mode = GET_MODE (op0);
5315 if (mode == inner_mode)
5316 return op0;
5317
5318 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5319 {
5320 if (GET_MODE_UNIT_BITSIZE (mode)
5321 == GET_MODE_UNIT_BITSIZE (inner_mode))
5322 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5323 else if (GET_MODE_UNIT_BITSIZE (mode)
5324 < GET_MODE_UNIT_BITSIZE (inner_mode))
5325 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5326 else
5327 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5328 }
5329 else if (FLOAT_MODE_P (mode))
5330 gcc_unreachable ();
5331 else if (FLOAT_MODE_P (inner_mode))
5332 {
5333 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5334 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5335 else
5336 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5337 }
5338 else if (GET_MODE_UNIT_PRECISION (mode)
5339 == GET_MODE_UNIT_PRECISION (inner_mode))
5340 op0 = lowpart_subreg (mode, op0, inner_mode);
5341 else if (GET_MODE_UNIT_PRECISION (mode)
5342 < GET_MODE_UNIT_PRECISION (inner_mode))
5343 op0 = simplify_gen_unary (TRUNCATE, mode, op0, inner_mode);
5344 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5345 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5346 else
5347 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5348
5349 return op0;
5350 }
5351
5352 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5353 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5354 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5355
5356 static void
5357 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5358 {
5359 rtx exp = *exp_p;
5360
5361 if (exp == NULL_RTX)
5362 return;
5363
5364 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5365 return;
5366
5367 if (depth == 4)
5368 {
5369 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5370 rtx dval = make_debug_expr_from_rtl (exp);
5371
5372 /* Emit a debug bind insn before INSN. */
5373 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5374 DEBUG_EXPR_TREE_DECL (dval), exp,
5375 VAR_INIT_STATUS_INITIALIZED);
5376
5377 emit_debug_insn_before (bind, insn);
5378 *exp_p = dval;
5379 return;
5380 }
5381
5382 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5383 int i, j;
5384 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5385 switch (*format_ptr++)
5386 {
5387 case 'e':
5388 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5389 break;
5390
5391 case 'E':
5392 case 'V':
5393 for (j = 0; j < XVECLEN (exp, i); j++)
5394 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5395 break;
5396
5397 default:
5398 break;
5399 }
5400 }
5401
5402 /* Expand the _LOCs in debug insns. We run this after expanding all
5403 regular insns, so that any variables referenced in the function
5404 will have their DECL_RTLs set. */
5405
5406 static void
5407 expand_debug_locations (void)
5408 {
5409 rtx_insn *insn;
5410 rtx_insn *last = get_last_insn ();
5411 int save_strict_alias = flag_strict_aliasing;
5412
5413 /* New alias sets while setting up memory attributes cause
5414 -fcompare-debug failures, even though it doesn't bring about any
5415 codegen changes. */
5416 flag_strict_aliasing = 0;
5417
5418 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5419 if (DEBUG_BIND_INSN_P (insn))
5420 {
5421 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5422 rtx val;
5423 rtx_insn *prev_insn, *insn2;
5424 machine_mode mode;
5425
5426 if (value == NULL_TREE)
5427 val = NULL_RTX;
5428 else
5429 {
5430 if (INSN_VAR_LOCATION_STATUS (insn)
5431 == VAR_INIT_STATUS_UNINITIALIZED)
5432 val = expand_debug_source_expr (value);
5433 /* The avoid_deep_ter_for_debug function inserts
5434 debug bind stmts after SSA_NAME definition, with the
5435 SSA_NAME as the whole bind location. Disable temporarily
5436 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5437 being defined in this DEBUG_INSN. */
5438 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5439 {
5440 tree *slot = deep_ter_debug_map->get (value);
5441 if (slot)
5442 {
5443 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5444 *slot = NULL_TREE;
5445 else
5446 slot = NULL;
5447 }
5448 val = expand_debug_expr (value);
5449 if (slot)
5450 *slot = INSN_VAR_LOCATION_DECL (insn);
5451 }
5452 else
5453 val = expand_debug_expr (value);
5454 gcc_assert (last == get_last_insn ());
5455 }
5456
5457 if (!val)
5458 val = gen_rtx_UNKNOWN_VAR_LOC ();
5459 else
5460 {
5461 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5462
5463 gcc_assert (mode == GET_MODE (val)
5464 || (GET_MODE (val) == VOIDmode
5465 && (CONST_SCALAR_INT_P (val)
5466 || GET_CODE (val) == CONST_FIXED
5467 || GET_CODE (val) == LABEL_REF)));
5468 }
5469
5470 INSN_VAR_LOCATION_LOC (insn) = val;
5471 prev_insn = PREV_INSN (insn);
5472 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5473 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5474 }
5475
5476 flag_strict_aliasing = save_strict_alias;
5477 }
5478
5479 /* Performs swapping operands of commutative operations to expand
5480 the expensive one first. */
5481
5482 static void
5483 reorder_operands (basic_block bb)
5484 {
5485 unsigned int *lattice; /* Hold cost of each statement. */
5486 unsigned int i = 0, n = 0;
5487 gimple_stmt_iterator gsi;
5488 gimple_seq stmts;
5489 gimple *stmt;
5490 bool swap;
5491 tree op0, op1;
5492 ssa_op_iter iter;
5493 use_operand_p use_p;
5494 gimple *def0, *def1;
5495
5496 /* Compute cost of each statement using estimate_num_insns. */
5497 stmts = bb_seq (bb);
5498 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5499 {
5500 stmt = gsi_stmt (gsi);
5501 if (!is_gimple_debug (stmt))
5502 gimple_set_uid (stmt, n++);
5503 }
5504 lattice = XNEWVEC (unsigned int, n);
5505 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5506 {
5507 unsigned cost;
5508 stmt = gsi_stmt (gsi);
5509 if (is_gimple_debug (stmt))
5510 continue;
5511 cost = estimate_num_insns (stmt, &eni_size_weights);
5512 lattice[i] = cost;
5513 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5514 {
5515 tree use = USE_FROM_PTR (use_p);
5516 gimple *def_stmt;
5517 if (TREE_CODE (use) != SSA_NAME)
5518 continue;
5519 def_stmt = get_gimple_for_ssa_name (use);
5520 if (!def_stmt)
5521 continue;
5522 lattice[i] += lattice[gimple_uid (def_stmt)];
5523 }
5524 i++;
5525 if (!is_gimple_assign (stmt)
5526 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5527 continue;
5528 op0 = gimple_op (stmt, 1);
5529 op1 = gimple_op (stmt, 2);
5530 if (TREE_CODE (op0) != SSA_NAME
5531 || TREE_CODE (op1) != SSA_NAME)
5532 continue;
5533 /* Swap operands if the second one is more expensive. */
5534 def0 = get_gimple_for_ssa_name (op0);
5535 def1 = get_gimple_for_ssa_name (op1);
5536 if (!def1)
5537 continue;
5538 swap = false;
5539 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5540 swap = true;
5541 if (swap)
5542 {
5543 if (dump_file && (dump_flags & TDF_DETAILS))
5544 {
5545 fprintf (dump_file, "Swap operands in stmt:\n");
5546 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5547 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5548 def0 ? lattice[gimple_uid (def0)] : 0,
5549 lattice[gimple_uid (def1)]);
5550 }
5551 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5552 gimple_assign_rhs2_ptr (stmt));
5553 }
5554 }
5555 XDELETE (lattice);
5556 }
5557
5558 /* Expand basic block BB from GIMPLE trees to RTL. */
5559
5560 static basic_block
5561 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5562 {
5563 gimple_stmt_iterator gsi;
5564 gimple_seq stmts;
5565 gimple *stmt = NULL;
5566 rtx_note *note = NULL;
5567 rtx_insn *last;
5568 edge e;
5569 edge_iterator ei;
5570
5571 if (dump_file)
5572 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5573 bb->index);
5574
5575 /* Note that since we are now transitioning from GIMPLE to RTL, we
5576 cannot use the gsi_*_bb() routines because they expect the basic
5577 block to be in GIMPLE, instead of RTL. Therefore, we need to
5578 access the BB sequence directly. */
5579 if (optimize)
5580 reorder_operands (bb);
5581 stmts = bb_seq (bb);
5582 bb->il.gimple.seq = NULL;
5583 bb->il.gimple.phi_nodes = NULL;
5584 rtl_profile_for_bb (bb);
5585 init_rtl_bb_info (bb);
5586 bb->flags |= BB_RTL;
5587
5588 /* Remove the RETURN_EXPR if we may fall though to the exit
5589 instead. */
5590 gsi = gsi_last (stmts);
5591 if (!gsi_end_p (gsi)
5592 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5593 {
5594 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5595
5596 gcc_assert (single_succ_p (bb));
5597 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5598
5599 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5600 && !gimple_return_retval (ret_stmt))
5601 {
5602 gsi_remove (&gsi, false);
5603 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5604 }
5605 }
5606
5607 gsi = gsi_start (stmts);
5608 if (!gsi_end_p (gsi))
5609 {
5610 stmt = gsi_stmt (gsi);
5611 if (gimple_code (stmt) != GIMPLE_LABEL)
5612 stmt = NULL;
5613 }
5614
5615 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5616
5617 if (stmt || elt)
5618 {
5619 gcc_checking_assert (!note);
5620 last = get_last_insn ();
5621
5622 if (stmt)
5623 {
5624 expand_gimple_stmt (stmt);
5625 gsi_next (&gsi);
5626 }
5627
5628 if (elt)
5629 emit_label (*elt);
5630
5631 BB_HEAD (bb) = NEXT_INSN (last);
5632 if (NOTE_P (BB_HEAD (bb)))
5633 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5634 gcc_assert (LABEL_P (BB_HEAD (bb)));
5635 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5636
5637 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5638 }
5639 else
5640 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5641
5642 if (note)
5643 NOTE_BASIC_BLOCK (note) = bb;
5644
5645 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5646 {
5647 basic_block new_bb;
5648
5649 stmt = gsi_stmt (gsi);
5650
5651 /* If this statement is a non-debug one, and we generate debug
5652 insns, then this one might be the last real use of a TERed
5653 SSA_NAME, but where there are still some debug uses further
5654 down. Expanding the current SSA name in such further debug
5655 uses by their RHS might lead to wrong debug info, as coalescing
5656 might make the operands of such RHS be placed into the same
5657 pseudo as something else. Like so:
5658 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5659 use(a_1);
5660 a_2 = ...
5661 #DEBUG ... => a_1
5662 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5663 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5664 the write to a_2 would actually have clobbered the place which
5665 formerly held a_0.
5666
5667 So, instead of that, we recognize the situation, and generate
5668 debug temporaries at the last real use of TERed SSA names:
5669 a_1 = a_0 + 1;
5670 #DEBUG #D1 => a_1
5671 use(a_1);
5672 a_2 = ...
5673 #DEBUG ... => #D1
5674 */
5675 if (MAY_HAVE_DEBUG_BIND_INSNS
5676 && SA.values
5677 && !is_gimple_debug (stmt))
5678 {
5679 ssa_op_iter iter;
5680 tree op;
5681 gimple *def;
5682
5683 location_t sloc = curr_insn_location ();
5684
5685 /* Look for SSA names that have their last use here (TERed
5686 names always have only one real use). */
5687 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5688 if ((def = get_gimple_for_ssa_name (op)))
5689 {
5690 imm_use_iterator imm_iter;
5691 use_operand_p use_p;
5692 bool have_debug_uses = false;
5693
5694 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5695 {
5696 if (gimple_debug_bind_p (USE_STMT (use_p)))
5697 {
5698 have_debug_uses = true;
5699 break;
5700 }
5701 }
5702
5703 if (have_debug_uses)
5704 {
5705 /* OP is a TERed SSA name, with DEF its defining
5706 statement, and where OP is used in further debug
5707 instructions. Generate a debug temporary, and
5708 replace all uses of OP in debug insns with that
5709 temporary. */
5710 gimple *debugstmt;
5711 tree value = gimple_assign_rhs_to_tree (def);
5712 tree vexpr = make_node (DEBUG_EXPR_DECL);
5713 rtx val;
5714 machine_mode mode;
5715
5716 set_curr_insn_location (gimple_location (def));
5717
5718 DECL_ARTIFICIAL (vexpr) = 1;
5719 TREE_TYPE (vexpr) = TREE_TYPE (value);
5720 if (DECL_P (value))
5721 mode = DECL_MODE (value);
5722 else
5723 mode = TYPE_MODE (TREE_TYPE (value));
5724 SET_DECL_MODE (vexpr, mode);
5725
5726 val = gen_rtx_VAR_LOCATION
5727 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5728
5729 emit_debug_insn (val);
5730
5731 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5732 {
5733 if (!gimple_debug_bind_p (debugstmt))
5734 continue;
5735
5736 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5737 SET_USE (use_p, vexpr);
5738
5739 update_stmt (debugstmt);
5740 }
5741 }
5742 }
5743 set_curr_insn_location (sloc);
5744 }
5745
5746 currently_expanding_gimple_stmt = stmt;
5747
5748 /* Expand this statement, then evaluate the resulting RTL and
5749 fixup the CFG accordingly. */
5750 if (gimple_code (stmt) == GIMPLE_COND)
5751 {
5752 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5753 if (new_bb)
5754 return new_bb;
5755 }
5756 else if (is_gimple_debug (stmt))
5757 {
5758 location_t sloc = curr_insn_location ();
5759 gimple_stmt_iterator nsi = gsi;
5760
5761 for (;;)
5762 {
5763 tree var;
5764 tree value = NULL_TREE;
5765 rtx val = NULL_RTX;
5766 machine_mode mode;
5767
5768 if (!gimple_debug_nonbind_marker_p (stmt))
5769 {
5770 if (gimple_debug_bind_p (stmt))
5771 {
5772 var = gimple_debug_bind_get_var (stmt);
5773
5774 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5775 && TREE_CODE (var) != LABEL_DECL
5776 && !target_for_debug_bind (var))
5777 goto delink_debug_stmt;
5778
5779 if (DECL_P (var))
5780 mode = DECL_MODE (var);
5781 else
5782 mode = TYPE_MODE (TREE_TYPE (var));
5783
5784 if (gimple_debug_bind_has_value_p (stmt))
5785 value = gimple_debug_bind_get_value (stmt);
5786
5787 val = gen_rtx_VAR_LOCATION
5788 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5789 }
5790 else if (gimple_debug_source_bind_p (stmt))
5791 {
5792 var = gimple_debug_source_bind_get_var (stmt);
5793
5794 value = gimple_debug_source_bind_get_value (stmt);
5795
5796 mode = DECL_MODE (var);
5797
5798 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5799 VAR_INIT_STATUS_UNINITIALIZED);
5800 }
5801 else
5802 gcc_unreachable ();
5803 }
5804 /* If this function was first compiled with markers
5805 enabled, but they're now disable (e.g. LTO), drop
5806 them on the floor. */
5807 else if (gimple_debug_nonbind_marker_p (stmt)
5808 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5809 goto delink_debug_stmt;
5810 else if (gimple_debug_begin_stmt_p (stmt))
5811 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5812 else if (gimple_debug_inline_entry_p (stmt))
5813 {
5814 tree block = gimple_block (stmt);
5815
5816 if (block)
5817 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5818 else
5819 goto delink_debug_stmt;
5820 }
5821 else
5822 gcc_unreachable ();
5823
5824 last = get_last_insn ();
5825
5826 set_curr_insn_location (gimple_location (stmt));
5827
5828 emit_debug_insn (val);
5829
5830 if (dump_file && (dump_flags & TDF_DETAILS))
5831 {
5832 /* We can't dump the insn with a TREE where an RTX
5833 is expected. */
5834 if (GET_CODE (val) == VAR_LOCATION)
5835 {
5836 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5837 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5838 }
5839 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5840 if (GET_CODE (val) == VAR_LOCATION)
5841 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5842 }
5843
5844 delink_debug_stmt:
5845 /* In order not to generate too many debug temporaries,
5846 we delink all uses of debug statements we already expanded.
5847 Therefore debug statements between definition and real
5848 use of TERed SSA names will continue to use the SSA name,
5849 and not be replaced with debug temps. */
5850 delink_stmt_imm_use (stmt);
5851
5852 gsi = nsi;
5853 gsi_next (&nsi);
5854 if (gsi_end_p (nsi))
5855 break;
5856 stmt = gsi_stmt (nsi);
5857 if (!is_gimple_debug (stmt))
5858 break;
5859 }
5860
5861 set_curr_insn_location (sloc);
5862 }
5863 else
5864 {
5865 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5866 if (call_stmt
5867 && gimple_call_tail_p (call_stmt)
5868 && disable_tail_calls)
5869 gimple_call_set_tail (call_stmt, false);
5870
5871 if (call_stmt && gimple_call_tail_p (call_stmt))
5872 {
5873 bool can_fallthru;
5874 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5875 if (new_bb)
5876 {
5877 if (can_fallthru)
5878 bb = new_bb;
5879 else
5880 return new_bb;
5881 }
5882 }
5883 else
5884 {
5885 def_operand_p def_p;
5886 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5887
5888 if (def_p != NULL)
5889 {
5890 /* Ignore this stmt if it is in the list of
5891 replaceable expressions. */
5892 if (SA.values
5893 && bitmap_bit_p (SA.values,
5894 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5895 continue;
5896 }
5897 last = expand_gimple_stmt (stmt);
5898 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5899 }
5900 }
5901 }
5902
5903 currently_expanding_gimple_stmt = NULL;
5904
5905 /* Expand implicit goto and convert goto_locus. */
5906 FOR_EACH_EDGE (e, ei, bb->succs)
5907 {
5908 if (e->goto_locus != UNKNOWN_LOCATION)
5909 set_curr_insn_location (e->goto_locus);
5910 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5911 {
5912 emit_jump (label_rtx_for_bb (e->dest));
5913 e->flags &= ~EDGE_FALLTHRU;
5914 }
5915 }
5916
5917 /* Expanded RTL can create a jump in the last instruction of block.
5918 This later might be assumed to be a jump to successor and break edge insertion.
5919 We need to insert dummy move to prevent this. PR41440. */
5920 if (single_succ_p (bb)
5921 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5922 && (last = get_last_insn ())
5923 && (JUMP_P (last)
5924 || (DEBUG_INSN_P (last)
5925 && JUMP_P (prev_nondebug_insn (last)))))
5926 {
5927 rtx dummy = gen_reg_rtx (SImode);
5928 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5929 }
5930
5931 do_pending_stack_adjust ();
5932
5933 /* Find the block tail. The last insn in the block is the insn
5934 before a barrier and/or table jump insn. */
5935 last = get_last_insn ();
5936 if (BARRIER_P (last))
5937 last = PREV_INSN (last);
5938 if (JUMP_TABLE_DATA_P (last))
5939 last = PREV_INSN (PREV_INSN (last));
5940 if (BARRIER_P (last))
5941 last = PREV_INSN (last);
5942 BB_END (bb) = last;
5943
5944 update_bb_for_insn (bb);
5945
5946 return bb;
5947 }
5948
5949
5950 /* Create a basic block for initialization code. */
5951
5952 static basic_block
5953 construct_init_block (void)
5954 {
5955 basic_block init_block, first_block;
5956 edge e = NULL;
5957 int flags;
5958
5959 /* Multiple entry points not supported yet. */
5960 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5961 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5962 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5963 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5964 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5965
5966 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5967
5968 /* When entry edge points to first basic block, we don't need jump,
5969 otherwise we have to jump into proper target. */
5970 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5971 {
5972 tree label = gimple_block_label (e->dest);
5973
5974 emit_jump (jump_target_rtx (label));
5975 flags = 0;
5976 }
5977 else
5978 flags = EDGE_FALLTHRU;
5979
5980 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5981 get_last_insn (),
5982 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5983 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5984 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5985 if (e)
5986 {
5987 first_block = e->dest;
5988 redirect_edge_succ (e, init_block);
5989 make_single_succ_edge (init_block, first_block, flags);
5990 }
5991 else
5992 make_single_succ_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
5993 EDGE_FALLTHRU);
5994
5995 update_bb_for_insn (init_block);
5996 return init_block;
5997 }
5998
5999 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6000 found in the block tree. */
6001
6002 static void
6003 set_block_levels (tree block, int level)
6004 {
6005 while (block)
6006 {
6007 BLOCK_NUMBER (block) = level;
6008 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
6009 block = BLOCK_CHAIN (block);
6010 }
6011 }
6012
6013 /* Create a block containing landing pads and similar stuff. */
6014
6015 static void
6016 construct_exit_block (void)
6017 {
6018 rtx_insn *head = get_last_insn ();
6019 rtx_insn *end;
6020 basic_block exit_block;
6021 edge e, e2;
6022 unsigned ix;
6023 edge_iterator ei;
6024 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
6025 rtx_insn *orig_end = BB_END (prev_bb);
6026
6027 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6028
6029 /* Make sure the locus is set to the end of the function, so that
6030 epilogue line numbers and warnings are set properly. */
6031 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
6032 input_location = cfun->function_end_locus;
6033
6034 /* Generate rtl for function exit. */
6035 expand_function_end ();
6036
6037 end = get_last_insn ();
6038 if (head == end)
6039 return;
6040 /* While emitting the function end we could move end of the last basic
6041 block. */
6042 BB_END (prev_bb) = orig_end;
6043 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
6044 head = NEXT_INSN (head);
6045 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6046 bb count counting will be confused. Any instructions before that
6047 label are emitted for the case where PREV_BB falls through into the
6048 exit block, so append those instructions to prev_bb in that case. */
6049 if (NEXT_INSN (head) != return_label)
6050 {
6051 while (NEXT_INSN (head) != return_label)
6052 {
6053 if (!NOTE_P (NEXT_INSN (head)))
6054 BB_END (prev_bb) = NEXT_INSN (head);
6055 head = NEXT_INSN (head);
6056 }
6057 }
6058 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
6059 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
6060 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
6061
6062 ix = 0;
6063 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
6064 {
6065 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
6066 if (!(e->flags & EDGE_ABNORMAL))
6067 redirect_edge_succ (e, exit_block);
6068 else
6069 ix++;
6070 }
6071
6072 e = make_single_succ_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
6073 EDGE_FALLTHRU);
6074 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6075 if (e2 != e)
6076 {
6077 exit_block->count -= e2->count ();
6078 }
6079 update_bb_for_insn (exit_block);
6080 }
6081
6082 /* Helper function for discover_nonconstant_array_refs.
6083 Look for ARRAY_REF nodes with non-constant indexes and mark them
6084 addressable. */
6085
6086 static tree
6087 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
6088 void *data ATTRIBUTE_UNUSED)
6089 {
6090 tree t = *tp;
6091
6092 if (IS_TYPE_OR_DECL_P (t))
6093 *walk_subtrees = 0;
6094 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6095 {
6096 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6097 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
6098 && (!TREE_OPERAND (t, 2)
6099 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6100 || (TREE_CODE (t) == COMPONENT_REF
6101 && (!TREE_OPERAND (t,2)
6102 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
6103 || TREE_CODE (t) == BIT_FIELD_REF
6104 || TREE_CODE (t) == REALPART_EXPR
6105 || TREE_CODE (t) == IMAGPART_EXPR
6106 || TREE_CODE (t) == VIEW_CONVERT_EXPR
6107 || CONVERT_EXPR_P (t))
6108 t = TREE_OPERAND (t, 0);
6109
6110 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6111 {
6112 t = get_base_address (t);
6113 if (t && DECL_P (t)
6114 && DECL_MODE (t) != BLKmode)
6115 TREE_ADDRESSABLE (t) = 1;
6116 }
6117
6118 *walk_subtrees = 0;
6119 }
6120 /* References of size POLY_INT_CST to a fixed-size object must go
6121 through memory. It's more efficient to force that here than
6122 to create temporary slots on the fly. */
6123 else if ((TREE_CODE (t) == MEM_REF || TREE_CODE (t) == TARGET_MEM_REF)
6124 && TYPE_SIZE (TREE_TYPE (t))
6125 && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t))))
6126 {
6127 tree base = get_base_address (t);
6128 if (base
6129 && DECL_P (base)
6130 && DECL_MODE (base) != BLKmode
6131 && GET_MODE_SIZE (DECL_MODE (base)).is_constant ())
6132 TREE_ADDRESSABLE (base) = 1;
6133 *walk_subtrees = 0;
6134 }
6135
6136 return NULL_TREE;
6137 }
6138
6139 /* If there's a chance to get a pseudo for t then if it would be of float mode
6140 and the actual access is via an integer mode (lowered memcpy or similar
6141 access) then avoid the register expansion if the mode likely is not storage
6142 suitable for raw bits processing (like XFmode on i?86). */
6143
6144 static void
6145 avoid_type_punning_on_regs (tree t)
6146 {
6147 machine_mode access_mode = TYPE_MODE (TREE_TYPE (t));
6148 if (access_mode != BLKmode
6149 && !SCALAR_INT_MODE_P (access_mode))
6150 return;
6151 tree base = get_base_address (t);
6152 if (DECL_P (base)
6153 && !TREE_ADDRESSABLE (base)
6154 && FLOAT_MODE_P (DECL_MODE (base))
6155 && maybe_lt (GET_MODE_PRECISION (DECL_MODE (base)),
6156 GET_MODE_BITSIZE (GET_MODE_INNER (DECL_MODE (base))))
6157 /* Double check in the expensive way we really would get a pseudo. */
6158 && use_register_for_decl (base))
6159 TREE_ADDRESSABLE (base) = 1;
6160 }
6161
6162 /* RTL expansion is not able to compile array references with variable
6163 offsets for arrays stored in single register. Discover such
6164 expressions and mark variables as addressable to avoid this
6165 scenario. */
6166
6167 static void
6168 discover_nonconstant_array_refs (void)
6169 {
6170 basic_block bb;
6171 gimple_stmt_iterator gsi;
6172
6173 FOR_EACH_BB_FN (bb, cfun)
6174 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6175 {
6176 gimple *stmt = gsi_stmt (gsi);
6177 if (!is_gimple_debug (stmt))
6178 {
6179 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
6180 gcall *call = dyn_cast <gcall *> (stmt);
6181 if (call && gimple_call_internal_p (call))
6182 switch (gimple_call_internal_fn (call))
6183 {
6184 case IFN_LOAD_LANES:
6185 /* The source must be a MEM. */
6186 mark_addressable (gimple_call_arg (call, 0));
6187 break;
6188 case IFN_STORE_LANES:
6189 /* The destination must be a MEM. */
6190 mark_addressable (gimple_call_lhs (call));
6191 break;
6192 default:
6193 break;
6194 }
6195 if (gimple_vdef (stmt))
6196 {
6197 tree t = gimple_get_lhs (stmt);
6198 if (t && REFERENCE_CLASS_P (t))
6199 avoid_type_punning_on_regs (t);
6200 }
6201 }
6202 }
6203 }
6204
6205 /* This function sets crtl->args.internal_arg_pointer to a virtual
6206 register if DRAP is needed. Local register allocator will replace
6207 virtual_incoming_args_rtx with the virtual register. */
6208
6209 static void
6210 expand_stack_alignment (void)
6211 {
6212 rtx drap_rtx;
6213 unsigned int preferred_stack_boundary;
6214
6215 if (! SUPPORTS_STACK_ALIGNMENT)
6216 return;
6217
6218 if (cfun->calls_alloca
6219 || cfun->has_nonlocal_label
6220 || crtl->has_nonlocal_goto)
6221 crtl->need_drap = true;
6222
6223 /* Call update_stack_boundary here again to update incoming stack
6224 boundary. It may set incoming stack alignment to a different
6225 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6226 use the minimum incoming stack alignment to check if it is OK
6227 to perform sibcall optimization since sibcall optimization will
6228 only align the outgoing stack to incoming stack boundary. */
6229 if (targetm.calls.update_stack_boundary)
6230 targetm.calls.update_stack_boundary ();
6231
6232 /* The incoming stack frame has to be aligned at least at
6233 parm_stack_boundary. */
6234 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
6235
6236 /* Update crtl->stack_alignment_estimated and use it later to align
6237 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6238 exceptions since callgraph doesn't collect incoming stack alignment
6239 in this case. */
6240 if (cfun->can_throw_non_call_exceptions
6241 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6242 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6243 else
6244 preferred_stack_boundary = crtl->preferred_stack_boundary;
6245 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6246 crtl->stack_alignment_estimated = preferred_stack_boundary;
6247 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6248 crtl->stack_alignment_needed = preferred_stack_boundary;
6249
6250 gcc_assert (crtl->stack_alignment_needed
6251 <= crtl->stack_alignment_estimated);
6252
6253 crtl->stack_realign_needed
6254 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6255 crtl->stack_realign_tried = crtl->stack_realign_needed;
6256
6257 crtl->stack_realign_processed = true;
6258
6259 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6260 alignment. */
6261 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6262 drap_rtx = targetm.calls.get_drap_rtx ();
6263
6264 /* stack_realign_drap and drap_rtx must match. */
6265 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6266
6267 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6268 if (drap_rtx != NULL)
6269 {
6270 crtl->args.internal_arg_pointer = drap_rtx;
6271
6272 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6273 needed. */
6274 fixup_tail_calls ();
6275 }
6276 }
6277 \f
6278
6279 static void
6280 expand_main_function (void)
6281 {
6282 #if (defined(INVOKE__main) \
6283 || (!defined(HAS_INIT_SECTION) \
6284 && !defined(INIT_SECTION_ASM_OP) \
6285 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6286 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode);
6287 #endif
6288 }
6289 \f
6290
6291 /* Expand code to initialize the stack_protect_guard. This is invoked at
6292 the beginning of a function to be protected. */
6293
6294 static void
6295 stack_protect_prologue (void)
6296 {
6297 tree guard_decl = targetm.stack_protect_guard ();
6298 rtx x, y;
6299
6300 crtl->stack_protect_guard_decl = guard_decl;
6301 x = expand_normal (crtl->stack_protect_guard);
6302
6303 if (targetm.have_stack_protect_combined_set () && guard_decl)
6304 {
6305 gcc_assert (DECL_P (guard_decl));
6306 y = DECL_RTL (guard_decl);
6307
6308 /* Allow the target to compute address of Y and copy it to X without
6309 leaking Y into a register. This combined address + copy pattern
6310 allows the target to prevent spilling of any intermediate results by
6311 splitting it after register allocator. */
6312 if (rtx_insn *insn = targetm.gen_stack_protect_combined_set (x, y))
6313 {
6314 emit_insn (insn);
6315 return;
6316 }
6317 }
6318
6319 if (guard_decl)
6320 y = expand_normal (guard_decl);
6321 else
6322 y = const0_rtx;
6323
6324 /* Allow the target to copy from Y to X without leaking Y into a
6325 register. */
6326 if (targetm.have_stack_protect_set ())
6327 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6328 {
6329 emit_insn (insn);
6330 return;
6331 }
6332
6333 /* Otherwise do a straight move. */
6334 emit_move_insn (x, y);
6335 }
6336
6337 /* Translate the intermediate representation contained in the CFG
6338 from GIMPLE trees to RTL.
6339
6340 We do conversion per basic block and preserve/update the tree CFG.
6341 This implies we have to do some magic as the CFG can simultaneously
6342 consist of basic blocks containing RTL and GIMPLE trees. This can
6343 confuse the CFG hooks, so be careful to not manipulate CFG during
6344 the expansion. */
6345
6346 namespace {
6347
6348 const pass_data pass_data_expand =
6349 {
6350 RTL_PASS, /* type */
6351 "expand", /* name */
6352 OPTGROUP_NONE, /* optinfo_flags */
6353 TV_EXPAND, /* tv_id */
6354 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6355 | PROP_gimple_lcx
6356 | PROP_gimple_lvec
6357 | PROP_gimple_lva), /* properties_required */
6358 PROP_rtl, /* properties_provided */
6359 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6360 0, /* todo_flags_start */
6361 0, /* todo_flags_finish */
6362 };
6363
6364 class pass_expand : public rtl_opt_pass
6365 {
6366 public:
6367 pass_expand (gcc::context *ctxt)
6368 : rtl_opt_pass (pass_data_expand, ctxt)
6369 {}
6370
6371 /* opt_pass methods: */
6372 virtual unsigned int execute (function *);
6373
6374 }; // class pass_expand
6375
6376 unsigned int
6377 pass_expand::execute (function *fun)
6378 {
6379 basic_block bb, init_block;
6380 edge_iterator ei;
6381 edge e;
6382 rtx_insn *var_seq, *var_ret_seq;
6383 unsigned i;
6384
6385 timevar_push (TV_OUT_OF_SSA);
6386 rewrite_out_of_ssa (&SA);
6387 timevar_pop (TV_OUT_OF_SSA);
6388 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6389
6390 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6391 {
6392 gimple_stmt_iterator gsi;
6393 FOR_EACH_BB_FN (bb, cfun)
6394 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6395 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6396 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6397 }
6398
6399 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6400 discover_nonconstant_array_refs ();
6401
6402 /* Make sure all values used by the optimization passes have sane
6403 defaults. */
6404 reg_renumber = 0;
6405
6406 /* Some backends want to know that we are expanding to RTL. */
6407 currently_expanding_to_rtl = 1;
6408 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6409 free_dominance_info (CDI_DOMINATORS);
6410
6411 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6412
6413 insn_locations_init ();
6414 if (!DECL_IS_UNDECLARED_BUILTIN (current_function_decl))
6415 {
6416 /* Eventually, all FEs should explicitly set function_start_locus. */
6417 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6418 set_curr_insn_location
6419 (DECL_SOURCE_LOCATION (current_function_decl));
6420 else
6421 set_curr_insn_location (fun->function_start_locus);
6422 }
6423 else
6424 set_curr_insn_location (UNKNOWN_LOCATION);
6425 prologue_location = curr_insn_location ();
6426
6427 #ifdef INSN_SCHEDULING
6428 init_sched_attrs ();
6429 #endif
6430
6431 /* Make sure first insn is a note even if we don't want linenums.
6432 This makes sure the first insn will never be deleted.
6433 Also, final expects a note to appear there. */
6434 emit_note (NOTE_INSN_DELETED);
6435
6436 targetm.expand_to_rtl_hook ();
6437 crtl->init_stack_alignment ();
6438 fun->cfg->max_jumptable_ents = 0;
6439
6440 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6441 of the function section at exapnsion time to predict distance of calls. */
6442 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6443
6444 /* Expand the variables recorded during gimple lowering. */
6445 timevar_push (TV_VAR_EXPAND);
6446 start_sequence ();
6447
6448 var_ret_seq = expand_used_vars ();
6449
6450 var_seq = get_insns ();
6451 end_sequence ();
6452 timevar_pop (TV_VAR_EXPAND);
6453
6454 /* Honor stack protection warnings. */
6455 if (warn_stack_protect)
6456 {
6457 if (fun->calls_alloca)
6458 warning (OPT_Wstack_protector,
6459 "stack protector not protecting local variables: "
6460 "variable length buffer");
6461 if (has_short_buffer && !crtl->stack_protect_guard)
6462 warning (OPT_Wstack_protector,
6463 "stack protector not protecting function: "
6464 "all local arrays are less than %d bytes long",
6465 (int) param_ssp_buffer_size);
6466 }
6467
6468 /* Set up parameters and prepare for return, for the function. */
6469 expand_function_start (current_function_decl);
6470
6471 /* If we emitted any instructions for setting up the variables,
6472 emit them before the FUNCTION_START note. */
6473 if (var_seq)
6474 {
6475 emit_insn_before (var_seq, parm_birth_insn);
6476
6477 /* In expand_function_end we'll insert the alloca save/restore
6478 before parm_birth_insn. We've just insertted an alloca call.
6479 Adjust the pointer to match. */
6480 parm_birth_insn = var_seq;
6481 }
6482
6483 /* Now propagate the RTL assignment of each partition to the
6484 underlying var of each SSA_NAME. */
6485 tree name;
6486
6487 FOR_EACH_SSA_NAME (i, name, cfun)
6488 {
6489 /* We might have generated new SSA names in
6490 update_alias_info_with_stack_vars. They will have a NULL
6491 defining statements, and won't be part of the partitioning,
6492 so ignore those. */
6493 if (!SSA_NAME_DEF_STMT (name))
6494 continue;
6495
6496 adjust_one_expanded_partition_var (name);
6497 }
6498
6499 /* Clean up RTL of variables that straddle across multiple
6500 partitions, and check that the rtl of any PARM_DECLs that are not
6501 cleaned up is that of their default defs. */
6502 FOR_EACH_SSA_NAME (i, name, cfun)
6503 {
6504 int part;
6505
6506 /* We might have generated new SSA names in
6507 update_alias_info_with_stack_vars. They will have a NULL
6508 defining statements, and won't be part of the partitioning,
6509 so ignore those. */
6510 if (!SSA_NAME_DEF_STMT (name))
6511 continue;
6512 part = var_to_partition (SA.map, name);
6513 if (part == NO_PARTITION)
6514 continue;
6515
6516 /* If this decl was marked as living in multiple places, reset
6517 this now to NULL. */
6518 tree var = SSA_NAME_VAR (name);
6519 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6520 SET_DECL_RTL (var, NULL);
6521 /* Check that the pseudos chosen by assign_parms are those of
6522 the corresponding default defs. */
6523 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6524 && (TREE_CODE (var) == PARM_DECL
6525 || TREE_CODE (var) == RESULT_DECL))
6526 {
6527 rtx in = DECL_RTL_IF_SET (var);
6528 gcc_assert (in);
6529 rtx out = SA.partition_to_pseudo[part];
6530 gcc_assert (in == out);
6531
6532 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6533 those expected by debug backends for each parm and for
6534 the result. This is particularly important for stabs,
6535 whose register elimination from parm's DECL_RTL may cause
6536 -fcompare-debug differences as SET_DECL_RTL changes reg's
6537 attrs. So, make sure the RTL already has the parm as the
6538 EXPR, so that it won't change. */
6539 SET_DECL_RTL (var, NULL_RTX);
6540 if (MEM_P (in))
6541 set_mem_attributes (in, var, true);
6542 SET_DECL_RTL (var, in);
6543 }
6544 }
6545
6546 /* If this function is `main', emit a call to `__main'
6547 to run global initializers, etc. */
6548 if (DECL_NAME (current_function_decl)
6549 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6550 && DECL_FILE_SCOPE_P (current_function_decl))
6551 expand_main_function ();
6552
6553 /* Initialize the stack_protect_guard field. This must happen after the
6554 call to __main (if any) so that the external decl is initialized. */
6555 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
6556 stack_protect_prologue ();
6557
6558 expand_phi_nodes (&SA);
6559
6560 /* Release any stale SSA redirection data. */
6561 redirect_edge_var_map_empty ();
6562
6563 /* Register rtl specific functions for cfg. */
6564 rtl_register_cfg_hooks ();
6565
6566 init_block = construct_init_block ();
6567
6568 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6569 remaining edges later. */
6570 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6571 e->flags &= ~EDGE_EXECUTABLE;
6572
6573 /* If the function has too many markers, drop them while expanding. */
6574 if (cfun->debug_marker_count
6575 >= param_max_debug_marker_count)
6576 cfun->debug_nonbind_markers = false;
6577
6578 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6579 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6580 next_bb)
6581 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6582
6583 if (MAY_HAVE_DEBUG_BIND_INSNS)
6584 expand_debug_locations ();
6585
6586 if (deep_ter_debug_map)
6587 {
6588 delete deep_ter_debug_map;
6589 deep_ter_debug_map = NULL;
6590 }
6591
6592 /* Free stuff we no longer need after GIMPLE optimizations. */
6593 free_dominance_info (CDI_DOMINATORS);
6594 free_dominance_info (CDI_POST_DOMINATORS);
6595 delete_tree_cfg_annotations (fun);
6596
6597 timevar_push (TV_OUT_OF_SSA);
6598 finish_out_of_ssa (&SA);
6599 timevar_pop (TV_OUT_OF_SSA);
6600
6601 timevar_push (TV_POST_EXPAND);
6602 /* We are no longer in SSA form. */
6603 fun->gimple_df->in_ssa_p = false;
6604 loops_state_clear (LOOP_CLOSED_SSA);
6605
6606 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6607 conservatively to true until they are all profile aware. */
6608 delete lab_rtx_for_bb;
6609 free_histograms (fun);
6610
6611 construct_exit_block ();
6612 insn_locations_finalize ();
6613
6614 if (var_ret_seq)
6615 {
6616 rtx_insn *after = return_label;
6617 rtx_insn *next = NEXT_INSN (after);
6618 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6619 after = next;
6620 emit_insn_after (var_ret_seq, after);
6621 }
6622
6623 /* Zap the tree EH table. */
6624 set_eh_throw_stmt_table (fun, NULL);
6625
6626 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6627 split edges which edge insertions might do. */
6628 rebuild_jump_labels (get_insns ());
6629
6630 /* If we have a single successor to the entry block, put the pending insns
6631 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6632 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6633 {
6634 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
6635 if (e->insns.r)
6636 {
6637 rtx_insn *insns = e->insns.r;
6638 e->insns.r = NULL;
6639 rebuild_jump_labels_chain (insns);
6640 if (NOTE_P (parm_birth_insn)
6641 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6642 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6643 else
6644 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6645 }
6646 }
6647
6648 /* Otherwise, as well as for other edges, take the usual way. */
6649 commit_edge_insertions ();
6650
6651 /* We're done expanding trees to RTL. */
6652 currently_expanding_to_rtl = 0;
6653
6654 flush_mark_addressable_queue ();
6655
6656 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6657 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6658 {
6659 edge e;
6660 edge_iterator ei;
6661 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6662 {
6663 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6664 e->flags &= ~EDGE_EXECUTABLE;
6665
6666 /* At the moment not all abnormal edges match the RTL
6667 representation. It is safe to remove them here as
6668 find_many_sub_basic_blocks will rediscover them.
6669 In the future we should get this fixed properly. */
6670 if ((e->flags & EDGE_ABNORMAL)
6671 && !(e->flags & EDGE_SIBCALL))
6672 remove_edge (e);
6673 else
6674 ei_next (&ei);
6675 }
6676 }
6677
6678 auto_sbitmap blocks (last_basic_block_for_fn (fun));
6679 bitmap_ones (blocks);
6680 find_many_sub_basic_blocks (blocks);
6681 purge_all_dead_edges ();
6682
6683 /* After initial rtl generation, call back to finish generating
6684 exception support code. We need to do this before cleaning up
6685 the CFG as the code does not expect dead landing pads. */
6686 if (fun->eh->region_tree != NULL)
6687 finish_eh_generation ();
6688
6689 /* Call expand_stack_alignment after finishing all
6690 updates to crtl->preferred_stack_boundary. */
6691 expand_stack_alignment ();
6692
6693 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6694 function. */
6695 if (crtl->tail_call_emit)
6696 fixup_tail_calls ();
6697
6698 unsigned HOST_WIDE_INT patch_area_size = function_entry_patch_area_size;
6699 unsigned HOST_WIDE_INT patch_area_entry = function_entry_patch_area_start;
6700
6701 tree patchable_function_entry_attr
6702 = lookup_attribute ("patchable_function_entry",
6703 DECL_ATTRIBUTES (cfun->decl));
6704 if (patchable_function_entry_attr)
6705 {
6706 tree pp_val = TREE_VALUE (patchable_function_entry_attr);
6707 tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
6708
6709 patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
6710 patch_area_entry = 0;
6711 if (TREE_CHAIN (pp_val) != NULL_TREE)
6712 {
6713 tree patchable_function_entry_value2
6714 = TREE_VALUE (TREE_CHAIN (pp_val));
6715 patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
6716 }
6717 }
6718
6719 if (patch_area_entry > patch_area_size)
6720 {
6721 if (patch_area_size > 0)
6722 warning (OPT_Wattributes,
6723 "patchable function entry %wu exceeds size %wu",
6724 patch_area_entry, patch_area_size);
6725 patch_area_entry = 0;
6726 }
6727
6728 crtl->patch_area_size = patch_area_size;
6729 crtl->patch_area_entry = patch_area_entry;
6730
6731 /* BB subdivision may have created basic blocks that are only reachable
6732 from unlikely bbs but not marked as such in the profile. */
6733 if (optimize)
6734 propagate_unlikely_bbs_forward ();
6735
6736 /* Remove unreachable blocks, otherwise we cannot compute dominators
6737 which are needed for loop state verification. As a side-effect
6738 this also compacts blocks.
6739 ??? We cannot remove trivially dead insns here as for example
6740 the DRAP reg on i?86 is not magically live at this point.
6741 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6742 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6743
6744 checking_verify_flow_info ();
6745
6746 /* Initialize pseudos allocated for hard registers. */
6747 emit_initial_value_sets ();
6748
6749 /* And finally unshare all RTL. */
6750 unshare_all_rtl ();
6751
6752 /* There's no need to defer outputting this function any more; we
6753 know we want to output it. */
6754 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6755
6756 /* Now that we're done expanding trees to RTL, we shouldn't have any
6757 more CONCATs anywhere. */
6758 generating_concat_p = 0;
6759
6760 if (dump_file)
6761 {
6762 fprintf (dump_file,
6763 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6764 /* And the pass manager will dump RTL for us. */
6765 }
6766
6767 /* If we're emitting a nested function, make sure its parent gets
6768 emitted as well. Doing otherwise confuses debug info. */
6769 {
6770 tree parent;
6771 for (parent = DECL_CONTEXT (current_function_decl);
6772 parent != NULL_TREE;
6773 parent = get_containing_scope (parent))
6774 if (TREE_CODE (parent) == FUNCTION_DECL)
6775 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6776 }
6777
6778 TREE_ASM_WRITTEN (current_function_decl) = 1;
6779
6780 /* After expanding, the return labels are no longer needed. */
6781 return_label = NULL;
6782 naked_return_label = NULL;
6783
6784 /* After expanding, the tm_restart map is no longer needed. */
6785 if (fun->gimple_df->tm_restart)
6786 fun->gimple_df->tm_restart = NULL;
6787
6788 /* Tag the blocks with a depth number so that change_scope can find
6789 the common parent easily. */
6790 set_block_levels (DECL_INITIAL (fun->decl), 0);
6791 default_rtl_profile ();
6792
6793 /* For -dx discard loops now, otherwise IL verify in clean_state will
6794 ICE. */
6795 if (rtl_dump_and_exit)
6796 {
6797 cfun->curr_properties &= ~PROP_loops;
6798 loop_optimizer_finalize ();
6799 }
6800
6801 timevar_pop (TV_POST_EXPAND);
6802
6803 return 0;
6804 }
6805
6806 } // anon namespace
6807
6808 rtl_opt_pass *
6809 make_pass_expand (gcc::context *ctxt)
6810 {
6811 return new pass_expand (ctxt);
6812 }