c++: Correct the handling of alignof(expr) [PR88115]
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70 #include "tree-nested.h"
71
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set<tree> *asan_poisoned_variables = NULL;
74
75 enum gimplify_omp_var_data
76 {
77 GOVD_SEEN = 0x000001,
78 GOVD_EXPLICIT = 0x000002,
79 GOVD_SHARED = 0x000004,
80 GOVD_PRIVATE = 0x000008,
81 GOVD_FIRSTPRIVATE = 0x000010,
82 GOVD_LASTPRIVATE = 0x000020,
83 GOVD_REDUCTION = 0x000040,
84 GOVD_LOCAL = 0x00080,
85 GOVD_MAP = 0x000100,
86 GOVD_DEBUG_PRIVATE = 0x000200,
87 GOVD_PRIVATE_OUTER_REF = 0x000400,
88 GOVD_LINEAR = 0x000800,
89 GOVD_ALIGNED = 0x001000,
90
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY = 0x002000,
93
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
96
97 GOVD_MAP_0LEN_ARRAY = 0x008000,
98
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO = 0x010000,
101
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN = 0x020000,
104
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE = 0x040000,
107
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT = 0x080000,
110
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY = 0x100000,
113
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY = 0x200000,
116
117 GOVD_NONTEMPORAL = 0x400000,
118
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
121
122 GOVD_CONDTEMP = 0x1000000,
123
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN = 0x2000000,
126
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
128 fields. */
129 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
130
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
134 };
135
136
137 enum omp_region_type
138 {
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
142
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
145
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
150
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
155
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
158
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
163
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
171
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
175 };
176
177 /* Gimplify hashtable helper. */
178
179 struct gimplify_hasher : free_ptr_hash <elt_t>
180 {
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
183 };
184
185 struct gimplify_ctx
186 {
187 struct gimplify_ctx *prev_context;
188
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
194
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
199
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
207 };
208
209 enum gimplify_defaultmap_kind
210 {
211 GDMK_SCALAR,
212 GDMK_AGGREGATE,
213 GDMK_ALLOCATABLE,
214 GDMK_POINTER
215 };
216
217 struct gimplify_omp_ctx
218 {
219 struct gimplify_omp_ctx *outer_context;
220 splay_tree variables;
221 hash_set<tree> *privatized_types;
222 tree clauses;
223 /* Iteration variables in an OMP_FOR. */
224 vec<tree> loop_iter_var;
225 location_t location;
226 enum omp_clause_default_kind default_kind;
227 enum omp_region_type region_type;
228 enum tree_code code;
229 bool combined_loop;
230 bool distribute;
231 bool target_firstprivatize_array_bases;
232 bool add_safelen1;
233 bool order_concurrent;
234 int defaultmap[4];
235 };
236
237 static struct gimplify_ctx *gimplify_ctxp;
238 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
239 static bool in_omp_construct;
240
241 /* Forward declaration. */
242 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
243 static hash_map<tree, tree> *oacc_declare_returns;
244 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
245 bool (*) (tree), fallback_t, bool);
246
247 /* Shorter alias name for the above function for use in gimplify.c
248 only. */
249
250 static inline void
251 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
252 {
253 gimple_seq_add_stmt_without_update (seq_p, gs);
254 }
255
256 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
257 NULL, a new sequence is allocated. This function is
258 similar to gimple_seq_add_seq, but does not scan the operands.
259 During gimplification, we need to manipulate statement sequences
260 before the def/use vectors have been constructed. */
261
262 static void
263 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
264 {
265 gimple_stmt_iterator si;
266
267 if (src == NULL)
268 return;
269
270 si = gsi_last (*dst_p);
271 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
272 }
273
274
275 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
276 and popping gimplify contexts. */
277
278 static struct gimplify_ctx *ctx_pool = NULL;
279
280 /* Return a gimplify context struct from the pool. */
281
282 static inline struct gimplify_ctx *
283 ctx_alloc (void)
284 {
285 struct gimplify_ctx * c = ctx_pool;
286
287 if (c)
288 ctx_pool = c->prev_context;
289 else
290 c = XNEW (struct gimplify_ctx);
291
292 memset (c, '\0', sizeof (*c));
293 return c;
294 }
295
296 /* Put gimplify context C back into the pool. */
297
298 static inline void
299 ctx_free (struct gimplify_ctx *c)
300 {
301 c->prev_context = ctx_pool;
302 ctx_pool = c;
303 }
304
305 /* Free allocated ctx stack memory. */
306
307 void
308 free_gimplify_stack (void)
309 {
310 struct gimplify_ctx *c;
311
312 while ((c = ctx_pool))
313 {
314 ctx_pool = c->prev_context;
315 free (c);
316 }
317 }
318
319
320 /* Set up a context for the gimplifier. */
321
322 void
323 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
324 {
325 struct gimplify_ctx *c = ctx_alloc ();
326
327 c->prev_context = gimplify_ctxp;
328 gimplify_ctxp = c;
329 gimplify_ctxp->into_ssa = in_ssa;
330 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
331 }
332
333 /* Tear down a context for the gimplifier. If BODY is non-null, then
334 put the temporaries into the outer BIND_EXPR. Otherwise, put them
335 in the local_decls.
336
337 BODY is not a sequence, but the first tuple in a sequence. */
338
339 void
340 pop_gimplify_context (gimple *body)
341 {
342 struct gimplify_ctx *c = gimplify_ctxp;
343
344 gcc_assert (c
345 && (!c->bind_expr_stack.exists ()
346 || c->bind_expr_stack.is_empty ()));
347 c->bind_expr_stack.release ();
348 gimplify_ctxp = c->prev_context;
349
350 if (body)
351 declare_vars (c->temps, body, false);
352 else
353 record_vars (c->temps);
354
355 delete c->temp_htab;
356 c->temp_htab = NULL;
357 ctx_free (c);
358 }
359
360 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
361
362 static void
363 gimple_push_bind_expr (gbind *bind_stmt)
364 {
365 gimplify_ctxp->bind_expr_stack.reserve (8);
366 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
367 }
368
369 /* Pop the first element off the stack of bindings. */
370
371 static void
372 gimple_pop_bind_expr (void)
373 {
374 gimplify_ctxp->bind_expr_stack.pop ();
375 }
376
377 /* Return the first element of the stack of bindings. */
378
379 gbind *
380 gimple_current_bind_expr (void)
381 {
382 return gimplify_ctxp->bind_expr_stack.last ();
383 }
384
385 /* Return the stack of bindings created during gimplification. */
386
387 vec<gbind *>
388 gimple_bind_expr_stack (void)
389 {
390 return gimplify_ctxp->bind_expr_stack;
391 }
392
393 /* Return true iff there is a COND_EXPR between us and the innermost
394 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
395
396 static bool
397 gimple_conditional_context (void)
398 {
399 return gimplify_ctxp->conditions > 0;
400 }
401
402 /* Note that we've entered a COND_EXPR. */
403
404 static void
405 gimple_push_condition (void)
406 {
407 #ifdef ENABLE_GIMPLE_CHECKING
408 if (gimplify_ctxp->conditions == 0)
409 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
410 #endif
411 ++(gimplify_ctxp->conditions);
412 }
413
414 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
415 now, add any conditional cleanups we've seen to the prequeue. */
416
417 static void
418 gimple_pop_condition (gimple_seq *pre_p)
419 {
420 int conds = --(gimplify_ctxp->conditions);
421
422 gcc_assert (conds >= 0);
423 if (conds == 0)
424 {
425 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
426 gimplify_ctxp->conditional_cleanups = NULL;
427 }
428 }
429
430 /* A stable comparison routine for use with splay trees and DECLs. */
431
432 static int
433 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
434 {
435 tree a = (tree) xa;
436 tree b = (tree) xb;
437
438 return DECL_UID (a) - DECL_UID (b);
439 }
440
441 /* Create a new omp construct that deals with variable remapping. */
442
443 static struct gimplify_omp_ctx *
444 new_omp_context (enum omp_region_type region_type)
445 {
446 struct gimplify_omp_ctx *c;
447
448 c = XCNEW (struct gimplify_omp_ctx);
449 c->outer_context = gimplify_omp_ctxp;
450 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
451 c->privatized_types = new hash_set<tree>;
452 c->location = input_location;
453 c->region_type = region_type;
454 if ((region_type & ORT_TASK) == 0)
455 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
456 else
457 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
458 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
459 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
460 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
461 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
462
463 return c;
464 }
465
466 /* Destroy an omp construct that deals with variable remapping. */
467
468 static void
469 delete_omp_context (struct gimplify_omp_ctx *c)
470 {
471 splay_tree_delete (c->variables);
472 delete c->privatized_types;
473 c->loop_iter_var.release ();
474 XDELETE (c);
475 }
476
477 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
478 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
479
480 /* Both gimplify the statement T and append it to *SEQ_P. This function
481 behaves exactly as gimplify_stmt, but you don't have to pass T as a
482 reference. */
483
484 void
485 gimplify_and_add (tree t, gimple_seq *seq_p)
486 {
487 gimplify_stmt (&t, seq_p);
488 }
489
490 /* Gimplify statement T into sequence *SEQ_P, and return the first
491 tuple in the sequence of generated tuples for this statement.
492 Return NULL if gimplifying T produced no tuples. */
493
494 static gimple *
495 gimplify_and_return_first (tree t, gimple_seq *seq_p)
496 {
497 gimple_stmt_iterator last = gsi_last (*seq_p);
498
499 gimplify_and_add (t, seq_p);
500
501 if (!gsi_end_p (last))
502 {
503 gsi_next (&last);
504 return gsi_stmt (last);
505 }
506 else
507 return gimple_seq_first_stmt (*seq_p);
508 }
509
510 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
511 LHS, or for a call argument. */
512
513 static bool
514 is_gimple_mem_rhs (tree t)
515 {
516 /* If we're dealing with a renamable type, either source or dest must be
517 a renamed variable. */
518 if (is_gimple_reg_type (TREE_TYPE (t)))
519 return is_gimple_val (t);
520 else
521 return is_gimple_val (t) || is_gimple_lvalue (t);
522 }
523
524 /* Return true if T is a CALL_EXPR or an expression that can be
525 assigned to a temporary. Note that this predicate should only be
526 used during gimplification. See the rationale for this in
527 gimplify_modify_expr. */
528
529 static bool
530 is_gimple_reg_rhs_or_call (tree t)
531 {
532 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
533 || TREE_CODE (t) == CALL_EXPR);
534 }
535
536 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
537 this predicate should only be used during gimplification. See the
538 rationale for this in gimplify_modify_expr. */
539
540 static bool
541 is_gimple_mem_rhs_or_call (tree t)
542 {
543 /* If we're dealing with a renamable type, either source or dest must be
544 a renamed variable. */
545 if (is_gimple_reg_type (TREE_TYPE (t)))
546 return is_gimple_val (t);
547 else
548 return (is_gimple_val (t)
549 || is_gimple_lvalue (t)
550 || TREE_CLOBBER_P (t)
551 || TREE_CODE (t) == CALL_EXPR);
552 }
553
554 /* Create a temporary with a name derived from VAL. Subroutine of
555 lookup_tmp_var; nobody else should call this function. */
556
557 static inline tree
558 create_tmp_from_val (tree val)
559 {
560 /* Drop all qualifiers and address-space information from the value type. */
561 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
562 tree var = create_tmp_var (type, get_name (val));
563 return var;
564 }
565
566 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
567 an existing expression temporary. */
568
569 static tree
570 lookup_tmp_var (tree val, bool is_formal)
571 {
572 tree ret;
573
574 /* If not optimizing, never really reuse a temporary. local-alloc
575 won't allocate any variable that is used in more than one basic
576 block, which means it will go into memory, causing much extra
577 work in reload and final and poorer code generation, outweighing
578 the extra memory allocation here. */
579 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
580 ret = create_tmp_from_val (val);
581 else
582 {
583 elt_t elt, *elt_p;
584 elt_t **slot;
585
586 elt.val = val;
587 if (!gimplify_ctxp->temp_htab)
588 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
589 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
590 if (*slot == NULL)
591 {
592 elt_p = XNEW (elt_t);
593 elt_p->val = val;
594 elt_p->temp = ret = create_tmp_from_val (val);
595 *slot = elt_p;
596 }
597 else
598 {
599 elt_p = *slot;
600 ret = elt_p->temp;
601 }
602 }
603
604 return ret;
605 }
606
607 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
608
609 static tree
610 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
611 bool is_formal, bool allow_ssa)
612 {
613 tree t, mod;
614
615 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
616 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
617 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
618 fb_rvalue);
619
620 if (allow_ssa
621 && gimplify_ctxp->into_ssa
622 && is_gimple_reg_type (TREE_TYPE (val)))
623 {
624 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
625 if (! gimple_in_ssa_p (cfun))
626 {
627 const char *name = get_name (val);
628 if (name)
629 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
630 }
631 }
632 else
633 t = lookup_tmp_var (val, is_formal);
634
635 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
636
637 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
638
639 /* gimplify_modify_expr might want to reduce this further. */
640 gimplify_and_add (mod, pre_p);
641 ggc_free (mod);
642
643 return t;
644 }
645
646 /* Return a formal temporary variable initialized with VAL. PRE_P is as
647 in gimplify_expr. Only use this function if:
648
649 1) The value of the unfactored expression represented by VAL will not
650 change between the initialization and use of the temporary, and
651 2) The temporary will not be otherwise modified.
652
653 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
654 and #2 means it is inappropriate for && temps.
655
656 For other cases, use get_initialized_tmp_var instead. */
657
658 tree
659 get_formal_tmp_var (tree val, gimple_seq *pre_p)
660 {
661 return internal_get_tmp_var (val, pre_p, NULL, true, true);
662 }
663
664 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
665 are as in gimplify_expr. */
666
667 tree
668 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
669 gimple_seq *post_p /* = NULL */,
670 bool allow_ssa /* = true */)
671 {
672 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
673 }
674
675 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
676 generate debug info for them; otherwise don't. */
677
678 void
679 declare_vars (tree vars, gimple *gs, bool debug_info)
680 {
681 tree last = vars;
682 if (last)
683 {
684 tree temps, block;
685
686 gbind *scope = as_a <gbind *> (gs);
687
688 temps = nreverse (last);
689
690 block = gimple_bind_block (scope);
691 gcc_assert (!block || TREE_CODE (block) == BLOCK);
692 if (!block || !debug_info)
693 {
694 DECL_CHAIN (last) = gimple_bind_vars (scope);
695 gimple_bind_set_vars (scope, temps);
696 }
697 else
698 {
699 /* We need to attach the nodes both to the BIND_EXPR and to its
700 associated BLOCK for debugging purposes. The key point here
701 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
702 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
703 if (BLOCK_VARS (block))
704 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
705 else
706 {
707 gimple_bind_set_vars (scope,
708 chainon (gimple_bind_vars (scope), temps));
709 BLOCK_VARS (block) = temps;
710 }
711 }
712 }
713 }
714
715 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
716 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
717 no such upper bound can be obtained. */
718
719 static void
720 force_constant_size (tree var)
721 {
722 /* The only attempt we make is by querying the maximum size of objects
723 of the variable's type. */
724
725 HOST_WIDE_INT max_size;
726
727 gcc_assert (VAR_P (var));
728
729 max_size = max_int_size_in_bytes (TREE_TYPE (var));
730
731 gcc_assert (max_size >= 0);
732
733 DECL_SIZE_UNIT (var)
734 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
735 DECL_SIZE (var)
736 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
737 }
738
739 /* Push the temporary variable TMP into the current binding. */
740
741 void
742 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
743 {
744 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745
746 /* Later processing assumes that the object size is constant, which might
747 not be true at this point. Force the use of a constant upper bound in
748 this case. */
749 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
750 force_constant_size (tmp);
751
752 DECL_CONTEXT (tmp) = fn->decl;
753 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754
755 record_vars_into (tmp, fn->decl);
756 }
757
758 /* Push the temporary variable TMP into the current binding. */
759
760 void
761 gimple_add_tmp_var (tree tmp)
762 {
763 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
764
765 /* Later processing assumes that the object size is constant, which might
766 not be true at this point. Force the use of a constant upper bound in
767 this case. */
768 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
769 force_constant_size (tmp);
770
771 DECL_CONTEXT (tmp) = current_function_decl;
772 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
773
774 if (gimplify_ctxp)
775 {
776 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
777 gimplify_ctxp->temps = tmp;
778
779 /* Mark temporaries local within the nearest enclosing parallel. */
780 if (gimplify_omp_ctxp)
781 {
782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
783 int flag = GOVD_LOCAL;
784 while (ctx
785 && (ctx->region_type == ORT_WORKSHARE
786 || ctx->region_type == ORT_TASKGROUP
787 || ctx->region_type == ORT_SIMD
788 || ctx->region_type == ORT_ACC))
789 {
790 if (ctx->region_type == ORT_SIMD
791 && TREE_ADDRESSABLE (tmp)
792 && !TREE_STATIC (tmp))
793 {
794 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
795 ctx->add_safelen1 = true;
796 else
797 flag = GOVD_PRIVATE;
798 break;
799 }
800 ctx = ctx->outer_context;
801 }
802 if (ctx)
803 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
804 }
805 }
806 else if (cfun)
807 record_vars (tmp);
808 else
809 {
810 gimple_seq body_seq;
811
812 /* This case is for nested functions. We need to expose the locals
813 they create. */
814 body_seq = gimple_body (current_function_decl);
815 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
816 }
817 }
818
819
820 \f
821 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
822 nodes that are referenced more than once in GENERIC functions. This is
823 necessary because gimplification (translation into GIMPLE) is performed
824 by modifying tree nodes in-place, so gimplication of a shared node in a
825 first context could generate an invalid GIMPLE form in a second context.
826
827 This is achieved with a simple mark/copy/unmark algorithm that walks the
828 GENERIC representation top-down, marks nodes with TREE_VISITED the first
829 time it encounters them, duplicates them if they already have TREE_VISITED
830 set, and finally removes the TREE_VISITED marks it has set.
831
832 The algorithm works only at the function level, i.e. it generates a GENERIC
833 representation of a function with no nodes shared within the function when
834 passed a GENERIC function (except for nodes that are allowed to be shared).
835
836 At the global level, it is also necessary to unshare tree nodes that are
837 referenced in more than one function, for the same aforementioned reason.
838 This requires some cooperation from the front-end. There are 2 strategies:
839
840 1. Manual unsharing. The front-end needs to call unshare_expr on every
841 expression that might end up being shared across functions.
842
843 2. Deep unsharing. This is an extension of regular unsharing. Instead
844 of calling unshare_expr on expressions that might be shared across
845 functions, the front-end pre-marks them with TREE_VISITED. This will
846 ensure that they are unshared on the first reference within functions
847 when the regular unsharing algorithm runs. The counterpart is that
848 this algorithm must look deeper than for manual unsharing, which is
849 specified by LANG_HOOKS_DEEP_UNSHARING.
850
851 If there are only few specific cases of node sharing across functions, it is
852 probably easier for a front-end to unshare the expressions manually. On the
853 contrary, if the expressions generated at the global level are as widespread
854 as expressions generated within functions, deep unsharing is very likely the
855 way to go. */
856
857 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
858 These nodes model computations that must be done once. If we were to
859 unshare something like SAVE_EXPR(i++), the gimplification process would
860 create wrong code. However, if DATA is non-null, it must hold a pointer
861 set that is used to unshare the subtrees of these nodes. */
862
863 static tree
864 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
865 {
866 tree t = *tp;
867 enum tree_code code = TREE_CODE (t);
868
869 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
870 copy their subtrees if we can make sure to do it only once. */
871 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
872 {
873 if (data && !((hash_set<tree> *)data)->add (t))
874 ;
875 else
876 *walk_subtrees = 0;
877 }
878
879 /* Stop at types, decls, constants like copy_tree_r. */
880 else if (TREE_CODE_CLASS (code) == tcc_type
881 || TREE_CODE_CLASS (code) == tcc_declaration
882 || TREE_CODE_CLASS (code) == tcc_constant)
883 *walk_subtrees = 0;
884
885 /* Cope with the statement expression extension. */
886 else if (code == STATEMENT_LIST)
887 ;
888
889 /* Leave the bulk of the work to copy_tree_r itself. */
890 else
891 copy_tree_r (tp, walk_subtrees, NULL);
892
893 return NULL_TREE;
894 }
895
896 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
897 If *TP has been visited already, then *TP is deeply copied by calling
898 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
899
900 static tree
901 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
902 {
903 tree t = *tp;
904 enum tree_code code = TREE_CODE (t);
905
906 /* Skip types, decls, and constants. But we do want to look at their
907 types and the bounds of types. Mark them as visited so we properly
908 unmark their subtrees on the unmark pass. If we've already seen them,
909 don't look down further. */
910 if (TREE_CODE_CLASS (code) == tcc_type
911 || TREE_CODE_CLASS (code) == tcc_declaration
912 || TREE_CODE_CLASS (code) == tcc_constant)
913 {
914 if (TREE_VISITED (t))
915 *walk_subtrees = 0;
916 else
917 TREE_VISITED (t) = 1;
918 }
919
920 /* If this node has been visited already, unshare it and don't look
921 any deeper. */
922 else if (TREE_VISITED (t))
923 {
924 walk_tree (tp, mostly_copy_tree_r, data, NULL);
925 *walk_subtrees = 0;
926 }
927
928 /* Otherwise, mark the node as visited and keep looking. */
929 else
930 TREE_VISITED (t) = 1;
931
932 return NULL_TREE;
933 }
934
935 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
936 copy_if_shared_r callback unmodified. */
937
938 void
939 copy_if_shared (tree *tp, void *data)
940 {
941 walk_tree (tp, copy_if_shared_r, data, NULL);
942 }
943
944 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
945 any nested functions. */
946
947 static void
948 unshare_body (tree fndecl)
949 {
950 struct cgraph_node *cgn = cgraph_node::get (fndecl);
951 /* If the language requires deep unsharing, we need a pointer set to make
952 sure we don't repeatedly unshare subtrees of unshareable nodes. */
953 hash_set<tree> *visited
954 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
955
956 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
957 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
958 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
959
960 delete visited;
961
962 if (cgn)
963 for (cgn = first_nested_function (cgn); cgn;
964 cgn = next_nested_function (cgn))
965 unshare_body (cgn->decl);
966 }
967
968 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
969 Subtrees are walked until the first unvisited node is encountered. */
970
971 static tree
972 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
973 {
974 tree t = *tp;
975
976 /* If this node has been visited, unmark it and keep looking. */
977 if (TREE_VISITED (t))
978 TREE_VISITED (t) = 0;
979
980 /* Otherwise, don't look any deeper. */
981 else
982 *walk_subtrees = 0;
983
984 return NULL_TREE;
985 }
986
987 /* Unmark the visited trees rooted at *TP. */
988
989 static inline void
990 unmark_visited (tree *tp)
991 {
992 walk_tree (tp, unmark_visited_r, NULL, NULL);
993 }
994
995 /* Likewise, but mark all trees as not visited. */
996
997 static void
998 unvisit_body (tree fndecl)
999 {
1000 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1001
1002 unmark_visited (&DECL_SAVED_TREE (fndecl));
1003 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1004 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1005
1006 if (cgn)
1007 for (cgn = first_nested_function (cgn);
1008 cgn; cgn = next_nested_function (cgn))
1009 unvisit_body (cgn->decl);
1010 }
1011
1012 /* Unconditionally make an unshared copy of EXPR. This is used when using
1013 stored expressions which span multiple functions, such as BINFO_VTABLE,
1014 as the normal unsharing process can't tell that they're shared. */
1015
1016 tree
1017 unshare_expr (tree expr)
1018 {
1019 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1020 return expr;
1021 }
1022
1023 /* Worker for unshare_expr_without_location. */
1024
1025 static tree
1026 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1027 {
1028 if (EXPR_P (*tp))
1029 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1030 else
1031 *walk_subtrees = 0;
1032 return NULL_TREE;
1033 }
1034
1035 /* Similar to unshare_expr but also prune all expression locations
1036 from EXPR. */
1037
1038 tree
1039 unshare_expr_without_location (tree expr)
1040 {
1041 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1042 if (EXPR_P (expr))
1043 walk_tree (&expr, prune_expr_location, NULL, NULL);
1044 return expr;
1045 }
1046
1047 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1048 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1049 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1050 EXPR is the location of the EXPR. */
1051
1052 static location_t
1053 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1054 {
1055 if (!expr)
1056 return or_else;
1057
1058 if (EXPR_HAS_LOCATION (expr))
1059 return EXPR_LOCATION (expr);
1060
1061 if (TREE_CODE (expr) != STATEMENT_LIST)
1062 return or_else;
1063
1064 tree_stmt_iterator i = tsi_start (expr);
1065
1066 bool found = false;
1067 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1068 {
1069 found = true;
1070 tsi_next (&i);
1071 }
1072
1073 if (!found || !tsi_one_before_end_p (i))
1074 return or_else;
1075
1076 return rexpr_location (tsi_stmt (i), or_else);
1077 }
1078
1079 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1080 rexpr_location for the potential recursion. */
1081
1082 static inline bool
1083 rexpr_has_location (tree expr)
1084 {
1085 return rexpr_location (expr) != UNKNOWN_LOCATION;
1086 }
1087
1088 \f
1089 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1090 contain statements and have a value. Assign its value to a temporary
1091 and give it void_type_node. Return the temporary, or NULL_TREE if
1092 WRAPPER was already void. */
1093
1094 tree
1095 voidify_wrapper_expr (tree wrapper, tree temp)
1096 {
1097 tree type = TREE_TYPE (wrapper);
1098 if (type && !VOID_TYPE_P (type))
1099 {
1100 tree *p;
1101
1102 /* Set p to point to the body of the wrapper. Loop until we find
1103 something that isn't a wrapper. */
1104 for (p = &wrapper; p && *p; )
1105 {
1106 switch (TREE_CODE (*p))
1107 {
1108 case BIND_EXPR:
1109 TREE_SIDE_EFFECTS (*p) = 1;
1110 TREE_TYPE (*p) = void_type_node;
1111 /* For a BIND_EXPR, the body is operand 1. */
1112 p = &BIND_EXPR_BODY (*p);
1113 break;
1114
1115 case CLEANUP_POINT_EXPR:
1116 case TRY_FINALLY_EXPR:
1117 case TRY_CATCH_EXPR:
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1122
1123 case STATEMENT_LIST:
1124 {
1125 tree_stmt_iterator i = tsi_last (*p);
1126 TREE_SIDE_EFFECTS (*p) = 1;
1127 TREE_TYPE (*p) = void_type_node;
1128 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1129 }
1130 break;
1131
1132 case COMPOUND_EXPR:
1133 /* Advance to the last statement. Set all container types to
1134 void. */
1135 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1136 {
1137 TREE_SIDE_EFFECTS (*p) = 1;
1138 TREE_TYPE (*p) = void_type_node;
1139 }
1140 break;
1141
1142 case TRANSACTION_EXPR:
1143 TREE_SIDE_EFFECTS (*p) = 1;
1144 TREE_TYPE (*p) = void_type_node;
1145 p = &TRANSACTION_EXPR_BODY (*p);
1146 break;
1147
1148 default:
1149 /* Assume that any tree upon which voidify_wrapper_expr is
1150 directly called is a wrapper, and that its body is op0. */
1151 if (p == &wrapper)
1152 {
1153 TREE_SIDE_EFFECTS (*p) = 1;
1154 TREE_TYPE (*p) = void_type_node;
1155 p = &TREE_OPERAND (*p, 0);
1156 break;
1157 }
1158 goto out;
1159 }
1160 }
1161
1162 out:
1163 if (p == NULL || IS_EMPTY_STMT (*p))
1164 temp = NULL_TREE;
1165 else if (temp)
1166 {
1167 /* The wrapper is on the RHS of an assignment that we're pushing
1168 down. */
1169 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1170 || TREE_CODE (temp) == MODIFY_EXPR);
1171 TREE_OPERAND (temp, 1) = *p;
1172 *p = temp;
1173 }
1174 else
1175 {
1176 temp = create_tmp_var (type, "retval");
1177 *p = build2 (INIT_EXPR, type, temp, *p);
1178 }
1179
1180 return temp;
1181 }
1182
1183 return NULL_TREE;
1184 }
1185
1186 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1187 a temporary through which they communicate. */
1188
1189 static void
1190 build_stack_save_restore (gcall **save, gcall **restore)
1191 {
1192 tree tmp_var;
1193
1194 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1195 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1196 gimple_call_set_lhs (*save, tmp_var);
1197
1198 *restore
1199 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1200 1, tmp_var);
1201 }
1202
1203 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1204
1205 static tree
1206 build_asan_poison_call_expr (tree decl)
1207 {
1208 /* Do not poison variables that have size equal to zero. */
1209 tree unit_size = DECL_SIZE_UNIT (decl);
1210 if (zerop (unit_size))
1211 return NULL_TREE;
1212
1213 tree base = build_fold_addr_expr (decl);
1214
1215 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1216 void_type_node, 3,
1217 build_int_cst (integer_type_node,
1218 ASAN_MARK_POISON),
1219 base, unit_size);
1220 }
1221
1222 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1223 on POISON flag, shadow memory of a DECL variable. The call will be
1224 put on location identified by IT iterator, where BEFORE flag drives
1225 position where the stmt will be put. */
1226
1227 static void
1228 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1229 bool before)
1230 {
1231 tree unit_size = DECL_SIZE_UNIT (decl);
1232 tree base = build_fold_addr_expr (decl);
1233
1234 /* Do not poison variables that have size equal to zero. */
1235 if (zerop (unit_size))
1236 return;
1237
1238 /* It's necessary to have all stack variables aligned to ASAN granularity
1239 bytes. */
1240 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1241 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1242
1243 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1244
1245 gimple *g
1246 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1247 build_int_cst (integer_type_node, flags),
1248 base, unit_size);
1249
1250 if (before)
1251 gsi_insert_before (it, g, GSI_NEW_STMT);
1252 else
1253 gsi_insert_after (it, g, GSI_NEW_STMT);
1254 }
1255
1256 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1257 either poisons or unpoisons a DECL. Created statement is appended
1258 to SEQ_P gimple sequence. */
1259
1260 static void
1261 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1262 {
1263 gimple_stmt_iterator it = gsi_last (*seq_p);
1264 bool before = false;
1265
1266 if (gsi_end_p (it))
1267 before = true;
1268
1269 asan_poison_variable (decl, poison, &it, before);
1270 }
1271
1272 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1273
1274 static int
1275 sort_by_decl_uid (const void *a, const void *b)
1276 {
1277 const tree *t1 = (const tree *)a;
1278 const tree *t2 = (const tree *)b;
1279
1280 int uid1 = DECL_UID (*t1);
1281 int uid2 = DECL_UID (*t2);
1282
1283 if (uid1 < uid2)
1284 return -1;
1285 else if (uid1 > uid2)
1286 return 1;
1287 else
1288 return 0;
1289 }
1290
1291 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1292 depending on POISON flag. Created statement is appended
1293 to SEQ_P gimple sequence. */
1294
1295 static void
1296 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1297 {
1298 unsigned c = variables->elements ();
1299 if (c == 0)
1300 return;
1301
1302 auto_vec<tree> sorted_variables (c);
1303
1304 for (hash_set<tree>::iterator it = variables->begin ();
1305 it != variables->end (); ++it)
1306 sorted_variables.safe_push (*it);
1307
1308 sorted_variables.qsort (sort_by_decl_uid);
1309
1310 unsigned i;
1311 tree var;
1312 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1313 {
1314 asan_poison_variable (var, poison, seq_p);
1315
1316 /* Add use_after_scope_memory attribute for the variable in order
1317 to prevent re-written into SSA. */
1318 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1319 DECL_ATTRIBUTES (var)))
1320 DECL_ATTRIBUTES (var)
1321 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1322 integer_one_node,
1323 DECL_ATTRIBUTES (var));
1324 }
1325 }
1326
1327 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1328
1329 static enum gimplify_status
1330 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1331 {
1332 tree bind_expr = *expr_p;
1333 bool old_keep_stack = gimplify_ctxp->keep_stack;
1334 bool old_save_stack = gimplify_ctxp->save_stack;
1335 tree t;
1336 gbind *bind_stmt;
1337 gimple_seq body, cleanup;
1338 gcall *stack_save;
1339 location_t start_locus = 0, end_locus = 0;
1340 tree ret_clauses = NULL;
1341
1342 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1343
1344 /* Mark variables seen in this bind expr. */
1345 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1346 {
1347 if (VAR_P (t))
1348 {
1349 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1350
1351 /* Mark variable as local. */
1352 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1353 {
1354 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1355 || splay_tree_lookup (ctx->variables,
1356 (splay_tree_key) t) == NULL)
1357 {
1358 int flag = GOVD_LOCAL;
1359 if (ctx->region_type == ORT_SIMD
1360 && TREE_ADDRESSABLE (t)
1361 && !TREE_STATIC (t))
1362 {
1363 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1364 ctx->add_safelen1 = true;
1365 else
1366 flag = GOVD_PRIVATE;
1367 }
1368 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1369 }
1370 /* Static locals inside of target construct or offloaded
1371 routines need to be "omp declare target". */
1372 if (TREE_STATIC (t))
1373 for (; ctx; ctx = ctx->outer_context)
1374 if ((ctx->region_type & ORT_TARGET) != 0)
1375 {
1376 if (!lookup_attribute ("omp declare target",
1377 DECL_ATTRIBUTES (t)))
1378 {
1379 tree id = get_identifier ("omp declare target");
1380 DECL_ATTRIBUTES (t)
1381 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1382 varpool_node *node = varpool_node::get (t);
1383 if (node)
1384 {
1385 node->offloadable = 1;
1386 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1387 {
1388 g->have_offload = true;
1389 if (!in_lto_p)
1390 vec_safe_push (offload_vars, t);
1391 }
1392 }
1393 }
1394 break;
1395 }
1396 }
1397
1398 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1399
1400 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1401 cfun->has_local_explicit_reg_vars = true;
1402 }
1403 }
1404
1405 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1406 BIND_EXPR_BLOCK (bind_expr));
1407 gimple_push_bind_expr (bind_stmt);
1408
1409 gimplify_ctxp->keep_stack = false;
1410 gimplify_ctxp->save_stack = false;
1411
1412 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1413 body = NULL;
1414 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1415 gimple_bind_set_body (bind_stmt, body);
1416
1417 /* Source location wise, the cleanup code (stack_restore and clobbers)
1418 belongs to the end of the block, so propagate what we have. The
1419 stack_save operation belongs to the beginning of block, which we can
1420 infer from the bind_expr directly if the block has no explicit
1421 assignment. */
1422 if (BIND_EXPR_BLOCK (bind_expr))
1423 {
1424 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1425 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1426 }
1427 if (start_locus == 0)
1428 start_locus = EXPR_LOCATION (bind_expr);
1429
1430 cleanup = NULL;
1431 stack_save = NULL;
1432
1433 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1434 the stack space allocated to the VLAs. */
1435 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1436 {
1437 gcall *stack_restore;
1438
1439 /* Save stack on entry and restore it on exit. Add a try_finally
1440 block to achieve this. */
1441 build_stack_save_restore (&stack_save, &stack_restore);
1442
1443 gimple_set_location (stack_save, start_locus);
1444 gimple_set_location (stack_restore, end_locus);
1445
1446 gimplify_seq_add_stmt (&cleanup, stack_restore);
1447 }
1448
1449 /* Add clobbers for all variables that go out of scope. */
1450 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1451 {
1452 if (VAR_P (t)
1453 && !is_global_var (t)
1454 && DECL_CONTEXT (t) == current_function_decl)
1455 {
1456 if (!DECL_HARD_REGISTER (t)
1457 && !TREE_THIS_VOLATILE (t)
1458 && !DECL_HAS_VALUE_EXPR_P (t)
1459 /* Only care for variables that have to be in memory. Others
1460 will be rewritten into SSA names, hence moved to the
1461 top-level. */
1462 && !is_gimple_reg (t)
1463 && flag_stack_reuse != SR_NONE)
1464 {
1465 tree clobber = build_clobber (TREE_TYPE (t));
1466 gimple *clobber_stmt;
1467 clobber_stmt = gimple_build_assign (t, clobber);
1468 gimple_set_location (clobber_stmt, end_locus);
1469 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1470 }
1471
1472 if (flag_openacc && oacc_declare_returns != NULL)
1473 {
1474 tree key = t;
1475 if (DECL_HAS_VALUE_EXPR_P (key))
1476 {
1477 key = DECL_VALUE_EXPR (key);
1478 if (TREE_CODE (key) == INDIRECT_REF)
1479 key = TREE_OPERAND (key, 0);
1480 }
1481 tree *c = oacc_declare_returns->get (key);
1482 if (c != NULL)
1483 {
1484 if (ret_clauses)
1485 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1486
1487 ret_clauses = unshare_expr (*c);
1488
1489 oacc_declare_returns->remove (key);
1490
1491 if (oacc_declare_returns->is_empty ())
1492 {
1493 delete oacc_declare_returns;
1494 oacc_declare_returns = NULL;
1495 }
1496 }
1497 }
1498 }
1499
1500 if (asan_poisoned_variables != NULL
1501 && asan_poisoned_variables->contains (t))
1502 {
1503 asan_poisoned_variables->remove (t);
1504 asan_poison_variable (t, true, &cleanup);
1505 }
1506
1507 if (gimplify_ctxp->live_switch_vars != NULL
1508 && gimplify_ctxp->live_switch_vars->contains (t))
1509 gimplify_ctxp->live_switch_vars->remove (t);
1510 }
1511
1512 if (ret_clauses)
1513 {
1514 gomp_target *stmt;
1515 gimple_stmt_iterator si = gsi_start (cleanup);
1516
1517 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1518 ret_clauses);
1519 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1520 }
1521
1522 if (cleanup)
1523 {
1524 gtry *gs;
1525 gimple_seq new_body;
1526
1527 new_body = NULL;
1528 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1529 GIMPLE_TRY_FINALLY);
1530
1531 if (stack_save)
1532 gimplify_seq_add_stmt (&new_body, stack_save);
1533 gimplify_seq_add_stmt (&new_body, gs);
1534 gimple_bind_set_body (bind_stmt, new_body);
1535 }
1536
1537 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1538 if (!gimplify_ctxp->keep_stack)
1539 gimplify_ctxp->keep_stack = old_keep_stack;
1540 gimplify_ctxp->save_stack = old_save_stack;
1541
1542 gimple_pop_bind_expr ();
1543
1544 gimplify_seq_add_stmt (pre_p, bind_stmt);
1545
1546 if (temp)
1547 {
1548 *expr_p = temp;
1549 return GS_OK;
1550 }
1551
1552 *expr_p = NULL_TREE;
1553 return GS_ALL_DONE;
1554 }
1555
1556 /* Maybe add early return predict statement to PRE_P sequence. */
1557
1558 static void
1559 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1560 {
1561 /* If we are not in a conditional context, add PREDICT statement. */
1562 if (gimple_conditional_context ())
1563 {
1564 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1565 NOT_TAKEN);
1566 gimplify_seq_add_stmt (pre_p, predict);
1567 }
1568 }
1569
1570 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1571 GIMPLE value, it is assigned to a new temporary and the statement is
1572 re-written to return the temporary.
1573
1574 PRE_P points to the sequence where side effects that must happen before
1575 STMT should be stored. */
1576
1577 static enum gimplify_status
1578 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1579 {
1580 greturn *ret;
1581 tree ret_expr = TREE_OPERAND (stmt, 0);
1582 tree result_decl, result;
1583
1584 if (ret_expr == error_mark_node)
1585 return GS_ERROR;
1586
1587 if (!ret_expr
1588 || TREE_CODE (ret_expr) == RESULT_DECL)
1589 {
1590 maybe_add_early_return_predict_stmt (pre_p);
1591 greturn *ret = gimple_build_return (ret_expr);
1592 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1593 gimplify_seq_add_stmt (pre_p, ret);
1594 return GS_ALL_DONE;
1595 }
1596
1597 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1598 result_decl = NULL_TREE;
1599 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1600 {
1601 /* Used in C++ for handling EH cleanup of the return value if a local
1602 cleanup throws. Assume the front-end knows what it's doing. */
1603 result_decl = DECL_RESULT (current_function_decl);
1604 /* But crash if we end up trying to modify ret_expr below. */
1605 ret_expr = NULL_TREE;
1606 }
1607 else
1608 {
1609 result_decl = TREE_OPERAND (ret_expr, 0);
1610
1611 /* See through a return by reference. */
1612 if (TREE_CODE (result_decl) == INDIRECT_REF)
1613 result_decl = TREE_OPERAND (result_decl, 0);
1614
1615 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1616 || TREE_CODE (ret_expr) == INIT_EXPR)
1617 && TREE_CODE (result_decl) == RESULT_DECL);
1618 }
1619
1620 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1621 Recall that aggregate_value_p is FALSE for any aggregate type that is
1622 returned in registers. If we're returning values in registers, then
1623 we don't want to extend the lifetime of the RESULT_DECL, particularly
1624 across another call. In addition, for those aggregates for which
1625 hard_function_value generates a PARALLEL, we'll die during normal
1626 expansion of structure assignments; there's special code in expand_return
1627 to handle this case that does not exist in expand_expr. */
1628 if (!result_decl)
1629 result = NULL_TREE;
1630 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1631 {
1632 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1633 {
1634 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1635 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1636 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1637 should be effectively allocated by the caller, i.e. all calls to
1638 this function must be subject to the Return Slot Optimization. */
1639 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1640 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1641 }
1642 result = result_decl;
1643 }
1644 else if (gimplify_ctxp->return_temp)
1645 result = gimplify_ctxp->return_temp;
1646 else
1647 {
1648 result = create_tmp_reg (TREE_TYPE (result_decl));
1649
1650 /* ??? With complex control flow (usually involving abnormal edges),
1651 we can wind up warning about an uninitialized value for this. Due
1652 to how this variable is constructed and initialized, this is never
1653 true. Give up and never warn. */
1654 TREE_NO_WARNING (result) = 1;
1655
1656 gimplify_ctxp->return_temp = result;
1657 }
1658
1659 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1660 Then gimplify the whole thing. */
1661 if (result != result_decl)
1662 TREE_OPERAND (ret_expr, 0) = result;
1663
1664 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1665
1666 maybe_add_early_return_predict_stmt (pre_p);
1667 ret = gimple_build_return (result);
1668 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1669 gimplify_seq_add_stmt (pre_p, ret);
1670
1671 return GS_ALL_DONE;
1672 }
1673
1674 /* Gimplify a variable-length array DECL. */
1675
1676 static void
1677 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1678 {
1679 /* This is a variable-sized decl. Simplify its size and mark it
1680 for deferred expansion. */
1681 tree t, addr, ptr_type;
1682
1683 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1684 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1685
1686 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1687 if (DECL_HAS_VALUE_EXPR_P (decl))
1688 return;
1689
1690 /* All occurrences of this decl in final gimplified code will be
1691 replaced by indirection. Setting DECL_VALUE_EXPR does two
1692 things: First, it lets the rest of the gimplifier know what
1693 replacement to use. Second, it lets the debug info know
1694 where to find the value. */
1695 ptr_type = build_pointer_type (TREE_TYPE (decl));
1696 addr = create_tmp_var (ptr_type, get_name (decl));
1697 DECL_IGNORED_P (addr) = 0;
1698 t = build_fold_indirect_ref (addr);
1699 TREE_THIS_NOTRAP (t) = 1;
1700 SET_DECL_VALUE_EXPR (decl, t);
1701 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1702
1703 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1704 max_int_size_in_bytes (TREE_TYPE (decl)));
1705 /* The call has been built for a variable-sized object. */
1706 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1707 t = fold_convert (ptr_type, t);
1708 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1709
1710 gimplify_and_add (t, seq_p);
1711
1712 /* Record the dynamic allocation associated with DECL if requested. */
1713 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1714 record_dynamic_alloc (decl);
1715 }
1716
1717 /* A helper function to be called via walk_tree. Mark all labels under *TP
1718 as being forced. To be called for DECL_INITIAL of static variables. */
1719
1720 static tree
1721 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1722 {
1723 if (TYPE_P (*tp))
1724 *walk_subtrees = 0;
1725 if (TREE_CODE (*tp) == LABEL_DECL)
1726 {
1727 FORCED_LABEL (*tp) = 1;
1728 cfun->has_forced_label_in_static = 1;
1729 }
1730
1731 return NULL_TREE;
1732 }
1733
1734 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1735 and initialization explicit. */
1736
1737 static enum gimplify_status
1738 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1739 {
1740 tree stmt = *stmt_p;
1741 tree decl = DECL_EXPR_DECL (stmt);
1742
1743 *stmt_p = NULL_TREE;
1744
1745 if (TREE_TYPE (decl) == error_mark_node)
1746 return GS_ERROR;
1747
1748 if ((TREE_CODE (decl) == TYPE_DECL
1749 || VAR_P (decl))
1750 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1751 {
1752 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1753 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1754 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1755 }
1756
1757 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1758 in case its size expressions contain problematic nodes like CALL_EXPR. */
1759 if (TREE_CODE (decl) == TYPE_DECL
1760 && DECL_ORIGINAL_TYPE (decl)
1761 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1762 {
1763 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1764 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1765 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1766 }
1767
1768 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1769 {
1770 tree init = DECL_INITIAL (decl);
1771 bool is_vla = false;
1772
1773 poly_uint64 size;
1774 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1775 || (!TREE_STATIC (decl)
1776 && flag_stack_check == GENERIC_STACK_CHECK
1777 && maybe_gt (size,
1778 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1779 {
1780 gimplify_vla_decl (decl, seq_p);
1781 is_vla = true;
1782 }
1783
1784 if (asan_poisoned_variables
1785 && !is_vla
1786 && TREE_ADDRESSABLE (decl)
1787 && !TREE_STATIC (decl)
1788 && !DECL_HAS_VALUE_EXPR_P (decl)
1789 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1790 && dbg_cnt (asan_use_after_scope)
1791 && !gimplify_omp_ctxp)
1792 {
1793 asan_poisoned_variables->add (decl);
1794 asan_poison_variable (decl, false, seq_p);
1795 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1796 gimplify_ctxp->live_switch_vars->add (decl);
1797 }
1798
1799 /* Some front ends do not explicitly declare all anonymous
1800 artificial variables. We compensate here by declaring the
1801 variables, though it would be better if the front ends would
1802 explicitly declare them. */
1803 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1804 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1805 gimple_add_tmp_var (decl);
1806
1807 if (init && init != error_mark_node)
1808 {
1809 if (!TREE_STATIC (decl))
1810 {
1811 DECL_INITIAL (decl) = NULL_TREE;
1812 init = build2 (INIT_EXPR, void_type_node, decl, init);
1813 gimplify_and_add (init, seq_p);
1814 ggc_free (init);
1815 }
1816 else
1817 /* We must still examine initializers for static variables
1818 as they may contain a label address. */
1819 walk_tree (&init, force_labels_r, NULL, NULL);
1820 }
1821 }
1822
1823 return GS_ALL_DONE;
1824 }
1825
1826 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1827 and replacing the LOOP_EXPR with goto, but if the loop contains an
1828 EXIT_EXPR, we need to append a label for it to jump to. */
1829
1830 static enum gimplify_status
1831 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1832 {
1833 tree saved_label = gimplify_ctxp->exit_label;
1834 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1835
1836 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1837
1838 gimplify_ctxp->exit_label = NULL_TREE;
1839
1840 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1841
1842 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1843
1844 if (gimplify_ctxp->exit_label)
1845 gimplify_seq_add_stmt (pre_p,
1846 gimple_build_label (gimplify_ctxp->exit_label));
1847
1848 gimplify_ctxp->exit_label = saved_label;
1849
1850 *expr_p = NULL;
1851 return GS_ALL_DONE;
1852 }
1853
1854 /* Gimplify a statement list onto a sequence. These may be created either
1855 by an enlightened front-end, or by shortcut_cond_expr. */
1856
1857 static enum gimplify_status
1858 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1859 {
1860 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1861
1862 tree_stmt_iterator i = tsi_start (*expr_p);
1863
1864 while (!tsi_end_p (i))
1865 {
1866 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1867 tsi_delink (&i);
1868 }
1869
1870 if (temp)
1871 {
1872 *expr_p = temp;
1873 return GS_OK;
1874 }
1875
1876 return GS_ALL_DONE;
1877 }
1878
1879 /* Callback for walk_gimple_seq. */
1880
1881 static tree
1882 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1883 struct walk_stmt_info *wi)
1884 {
1885 gimple *stmt = gsi_stmt (*gsi_p);
1886
1887 *handled_ops_p = true;
1888 switch (gimple_code (stmt))
1889 {
1890 case GIMPLE_TRY:
1891 /* A compiler-generated cleanup or a user-written try block.
1892 If it's empty, don't dive into it--that would result in
1893 worse location info. */
1894 if (gimple_try_eval (stmt) == NULL)
1895 {
1896 wi->info = stmt;
1897 return integer_zero_node;
1898 }
1899 /* Fall through. */
1900 case GIMPLE_BIND:
1901 case GIMPLE_CATCH:
1902 case GIMPLE_EH_FILTER:
1903 case GIMPLE_TRANSACTION:
1904 /* Walk the sub-statements. */
1905 *handled_ops_p = false;
1906 break;
1907
1908 case GIMPLE_DEBUG:
1909 /* Ignore these. We may generate them before declarations that
1910 are never executed. If there's something to warn about,
1911 there will be non-debug stmts too, and we'll catch those. */
1912 break;
1913
1914 case GIMPLE_CALL:
1915 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1916 {
1917 *handled_ops_p = false;
1918 break;
1919 }
1920 /* Fall through. */
1921 default:
1922 /* Save the first "real" statement (not a decl/lexical scope/...). */
1923 wi->info = stmt;
1924 return integer_zero_node;
1925 }
1926 return NULL_TREE;
1927 }
1928
1929 /* Possibly warn about unreachable statements between switch's controlling
1930 expression and the first case. SEQ is the body of a switch expression. */
1931
1932 static void
1933 maybe_warn_switch_unreachable (gimple_seq seq)
1934 {
1935 if (!warn_switch_unreachable
1936 /* This warning doesn't play well with Fortran when optimizations
1937 are on. */
1938 || lang_GNU_Fortran ()
1939 || seq == NULL)
1940 return;
1941
1942 struct walk_stmt_info wi;
1943 memset (&wi, 0, sizeof (wi));
1944 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1945 gimple *stmt = (gimple *) wi.info;
1946
1947 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1948 {
1949 if (gimple_code (stmt) == GIMPLE_GOTO
1950 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1951 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1952 /* Don't warn for compiler-generated gotos. These occur
1953 in Duff's devices, for example. */;
1954 else
1955 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1956 "statement will never be executed");
1957 }
1958 }
1959
1960
1961 /* A label entry that pairs label and a location. */
1962 struct label_entry
1963 {
1964 tree label;
1965 location_t loc;
1966 };
1967
1968 /* Find LABEL in vector of label entries VEC. */
1969
1970 static struct label_entry *
1971 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1972 {
1973 unsigned int i;
1974 struct label_entry *l;
1975
1976 FOR_EACH_VEC_ELT (*vec, i, l)
1977 if (l->label == label)
1978 return l;
1979 return NULL;
1980 }
1981
1982 /* Return true if LABEL, a LABEL_DECL, represents a case label
1983 in a vector of labels CASES. */
1984
1985 static bool
1986 case_label_p (const vec<tree> *cases, tree label)
1987 {
1988 unsigned int i;
1989 tree l;
1990
1991 FOR_EACH_VEC_ELT (*cases, i, l)
1992 if (CASE_LABEL (l) == label)
1993 return true;
1994 return false;
1995 }
1996
1997 /* Find the last nondebug statement in a scope STMT. */
1998
1999 static gimple *
2000 last_stmt_in_scope (gimple *stmt)
2001 {
2002 if (!stmt)
2003 return NULL;
2004
2005 switch (gimple_code (stmt))
2006 {
2007 case GIMPLE_BIND:
2008 {
2009 gbind *bind = as_a <gbind *> (stmt);
2010 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2011 return last_stmt_in_scope (stmt);
2012 }
2013
2014 case GIMPLE_TRY:
2015 {
2016 gtry *try_stmt = as_a <gtry *> (stmt);
2017 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2018 gimple *last_eval = last_stmt_in_scope (stmt);
2019 if (gimple_stmt_may_fallthru (last_eval)
2020 && (last_eval == NULL
2021 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2022 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2023 {
2024 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2025 return last_stmt_in_scope (stmt);
2026 }
2027 else
2028 return last_eval;
2029 }
2030
2031 case GIMPLE_DEBUG:
2032 gcc_unreachable ();
2033
2034 default:
2035 return stmt;
2036 }
2037 }
2038
2039 /* Collect interesting labels in LABELS and return the statement preceding
2040 another case label, or a user-defined label. Store a location useful
2041 to give warnings at *PREVLOC (usually the location of the returned
2042 statement or of its surrounding scope). */
2043
2044 static gimple *
2045 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2046 auto_vec <struct label_entry> *labels,
2047 location_t *prevloc)
2048 {
2049 gimple *prev = NULL;
2050
2051 *prevloc = UNKNOWN_LOCATION;
2052 do
2053 {
2054 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2055 {
2056 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2057 which starts on a GIMPLE_SWITCH and ends with a break label.
2058 Handle that as a single statement that can fall through. */
2059 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2060 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2061 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2062 if (last
2063 && gimple_code (first) == GIMPLE_SWITCH
2064 && gimple_code (last) == GIMPLE_LABEL)
2065 {
2066 tree label = gimple_label_label (as_a <glabel *> (last));
2067 if (SWITCH_BREAK_LABEL_P (label))
2068 {
2069 prev = bind;
2070 gsi_next (gsi_p);
2071 continue;
2072 }
2073 }
2074 }
2075 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2076 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2077 {
2078 /* Nested scope. Only look at the last statement of
2079 the innermost scope. */
2080 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2081 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2082 if (last)
2083 {
2084 prev = last;
2085 /* It might be a label without a location. Use the
2086 location of the scope then. */
2087 if (!gimple_has_location (prev))
2088 *prevloc = bind_loc;
2089 }
2090 gsi_next (gsi_p);
2091 continue;
2092 }
2093
2094 /* Ifs are tricky. */
2095 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2096 {
2097 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2098 tree false_lab = gimple_cond_false_label (cond_stmt);
2099 location_t if_loc = gimple_location (cond_stmt);
2100
2101 /* If we have e.g.
2102 if (i > 1) goto <D.2259>; else goto D;
2103 we can't do much with the else-branch. */
2104 if (!DECL_ARTIFICIAL (false_lab))
2105 break;
2106
2107 /* Go on until the false label, then one step back. */
2108 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2109 {
2110 gimple *stmt = gsi_stmt (*gsi_p);
2111 if (gimple_code (stmt) == GIMPLE_LABEL
2112 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2113 break;
2114 }
2115
2116 /* Not found? Oops. */
2117 if (gsi_end_p (*gsi_p))
2118 break;
2119
2120 struct label_entry l = { false_lab, if_loc };
2121 labels->safe_push (l);
2122
2123 /* Go to the last statement of the then branch. */
2124 gsi_prev (gsi_p);
2125
2126 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2127 <D.1759>:
2128 <stmt>;
2129 goto <D.1761>;
2130 <D.1760>:
2131 */
2132 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2133 && !gimple_has_location (gsi_stmt (*gsi_p)))
2134 {
2135 /* Look at the statement before, it might be
2136 attribute fallthrough, in which case don't warn. */
2137 gsi_prev (gsi_p);
2138 bool fallthru_before_dest
2139 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2140 gsi_next (gsi_p);
2141 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2142 if (!fallthru_before_dest)
2143 {
2144 struct label_entry l = { goto_dest, if_loc };
2145 labels->safe_push (l);
2146 }
2147 }
2148 /* And move back. */
2149 gsi_next (gsi_p);
2150 }
2151
2152 /* Remember the last statement. Skip labels that are of no interest
2153 to us. */
2154 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2155 {
2156 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2157 if (find_label_entry (labels, label))
2158 prev = gsi_stmt (*gsi_p);
2159 }
2160 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2161 ;
2162 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2163 ;
2164 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2165 prev = gsi_stmt (*gsi_p);
2166 gsi_next (gsi_p);
2167 }
2168 while (!gsi_end_p (*gsi_p)
2169 /* Stop if we find a case or a user-defined label. */
2170 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2171 || !gimple_has_location (gsi_stmt (*gsi_p))));
2172
2173 if (prev && gimple_has_location (prev))
2174 *prevloc = gimple_location (prev);
2175 return prev;
2176 }
2177
2178 /* Return true if the switch fallthough warning should occur. LABEL is
2179 the label statement that we're falling through to. */
2180
2181 static bool
2182 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2183 {
2184 gimple_stmt_iterator gsi = *gsi_p;
2185
2186 /* Don't warn if the label is marked with a "falls through" comment. */
2187 if (FALLTHROUGH_LABEL_P (label))
2188 return false;
2189
2190 /* Don't warn for non-case labels followed by a statement:
2191 case 0:
2192 foo ();
2193 label:
2194 bar ();
2195 as these are likely intentional. */
2196 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2197 {
2198 tree l;
2199 while (!gsi_end_p (gsi)
2200 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2201 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2202 && !case_label_p (&gimplify_ctxp->case_labels, l))
2203 gsi_next_nondebug (&gsi);
2204 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2205 return false;
2206 }
2207
2208 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2209 immediately breaks. */
2210 gsi = *gsi_p;
2211
2212 /* Skip all immediately following labels. */
2213 while (!gsi_end_p (gsi)
2214 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2215 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2216 gsi_next_nondebug (&gsi);
2217
2218 /* { ... something; default:; } */
2219 if (gsi_end_p (gsi)
2220 /* { ... something; default: break; } or
2221 { ... something; default: goto L; } */
2222 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2223 /* { ... something; default: return; } */
2224 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2225 return false;
2226
2227 return true;
2228 }
2229
2230 /* Callback for walk_gimple_seq. */
2231
2232 static tree
2233 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2234 struct walk_stmt_info *)
2235 {
2236 gimple *stmt = gsi_stmt (*gsi_p);
2237
2238 *handled_ops_p = true;
2239 switch (gimple_code (stmt))
2240 {
2241 case GIMPLE_TRY:
2242 case GIMPLE_BIND:
2243 case GIMPLE_CATCH:
2244 case GIMPLE_EH_FILTER:
2245 case GIMPLE_TRANSACTION:
2246 /* Walk the sub-statements. */
2247 *handled_ops_p = false;
2248 break;
2249
2250 /* Find a sequence of form:
2251
2252 GIMPLE_LABEL
2253 [...]
2254 <may fallthru stmt>
2255 GIMPLE_LABEL
2256
2257 and possibly warn. */
2258 case GIMPLE_LABEL:
2259 {
2260 /* Found a label. Skip all immediately following labels. */
2261 while (!gsi_end_p (*gsi_p)
2262 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2263 gsi_next_nondebug (gsi_p);
2264
2265 /* There might be no more statements. */
2266 if (gsi_end_p (*gsi_p))
2267 return integer_zero_node;
2268
2269 /* Vector of labels that fall through. */
2270 auto_vec <struct label_entry> labels;
2271 location_t prevloc;
2272 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2273
2274 /* There might be no more statements. */
2275 if (gsi_end_p (*gsi_p))
2276 return integer_zero_node;
2277
2278 gimple *next = gsi_stmt (*gsi_p);
2279 tree label;
2280 /* If what follows is a label, then we may have a fallthrough. */
2281 if (gimple_code (next) == GIMPLE_LABEL
2282 && gimple_has_location (next)
2283 && (label = gimple_label_label (as_a <glabel *> (next)))
2284 && prev != NULL)
2285 {
2286 struct label_entry *l;
2287 bool warned_p = false;
2288 auto_diagnostic_group d;
2289 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2290 /* Quiet. */;
2291 else if (gimple_code (prev) == GIMPLE_LABEL
2292 && (label = gimple_label_label (as_a <glabel *> (prev)))
2293 && (l = find_label_entry (&labels, label)))
2294 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2295 "this statement may fall through");
2296 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2297 /* Try to be clever and don't warn when the statement
2298 can't actually fall through. */
2299 && gimple_stmt_may_fallthru (prev)
2300 && prevloc != UNKNOWN_LOCATION)
2301 warned_p = warning_at (prevloc,
2302 OPT_Wimplicit_fallthrough_,
2303 "this statement may fall through");
2304 if (warned_p)
2305 inform (gimple_location (next), "here");
2306
2307 /* Mark this label as processed so as to prevent multiple
2308 warnings in nested switches. */
2309 FALLTHROUGH_LABEL_P (label) = true;
2310
2311 /* So that next warn_implicit_fallthrough_r will start looking for
2312 a new sequence starting with this label. */
2313 gsi_prev (gsi_p);
2314 }
2315 }
2316 break;
2317 default:
2318 break;
2319 }
2320 return NULL_TREE;
2321 }
2322
2323 /* Warn when a switch case falls through. */
2324
2325 static void
2326 maybe_warn_implicit_fallthrough (gimple_seq seq)
2327 {
2328 if (!warn_implicit_fallthrough)
2329 return;
2330
2331 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2332 if (!(lang_GNU_C ()
2333 || lang_GNU_CXX ()
2334 || lang_GNU_OBJC ()))
2335 return;
2336
2337 struct walk_stmt_info wi;
2338 memset (&wi, 0, sizeof (wi));
2339 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2340 }
2341
2342 /* Callback for walk_gimple_seq. */
2343
2344 static tree
2345 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2346 struct walk_stmt_info *wi)
2347 {
2348 gimple *stmt = gsi_stmt (*gsi_p);
2349
2350 *handled_ops_p = true;
2351 switch (gimple_code (stmt))
2352 {
2353 case GIMPLE_TRY:
2354 case GIMPLE_BIND:
2355 case GIMPLE_CATCH:
2356 case GIMPLE_EH_FILTER:
2357 case GIMPLE_TRANSACTION:
2358 /* Walk the sub-statements. */
2359 *handled_ops_p = false;
2360 break;
2361 case GIMPLE_CALL:
2362 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2363 {
2364 gsi_remove (gsi_p, true);
2365 if (gsi_end_p (*gsi_p))
2366 {
2367 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2368 return integer_zero_node;
2369 }
2370
2371 bool found = false;
2372 location_t loc = gimple_location (stmt);
2373
2374 gimple_stmt_iterator gsi2 = *gsi_p;
2375 stmt = gsi_stmt (gsi2);
2376 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2377 {
2378 /* Go on until the artificial label. */
2379 tree goto_dest = gimple_goto_dest (stmt);
2380 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2381 {
2382 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2383 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2384 == goto_dest)
2385 break;
2386 }
2387
2388 /* Not found? Stop. */
2389 if (gsi_end_p (gsi2))
2390 break;
2391
2392 /* Look one past it. */
2393 gsi_next (&gsi2);
2394 }
2395
2396 /* We're looking for a case label or default label here. */
2397 while (!gsi_end_p (gsi2))
2398 {
2399 stmt = gsi_stmt (gsi2);
2400 if (gimple_code (stmt) == GIMPLE_LABEL)
2401 {
2402 tree label = gimple_label_label (as_a <glabel *> (stmt));
2403 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2404 {
2405 found = true;
2406 break;
2407 }
2408 }
2409 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2410 ;
2411 else if (!is_gimple_debug (stmt))
2412 /* Anything else is not expected. */
2413 break;
2414 gsi_next (&gsi2);
2415 }
2416 if (!found)
2417 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2418 "a case label or default label");
2419 }
2420 break;
2421 default:
2422 break;
2423 }
2424 return NULL_TREE;
2425 }
2426
2427 /* Expand all FALLTHROUGH () calls in SEQ. */
2428
2429 static void
2430 expand_FALLTHROUGH (gimple_seq *seq_p)
2431 {
2432 struct walk_stmt_info wi;
2433 location_t loc;
2434 memset (&wi, 0, sizeof (wi));
2435 wi.info = (void *) &loc;
2436 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2437 if (wi.callback_result == integer_zero_node)
2438 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2439 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2440 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2441 "a case label or default label");
2442 }
2443
2444 \f
2445 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2446 branch to. */
2447
2448 static enum gimplify_status
2449 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2450 {
2451 tree switch_expr = *expr_p;
2452 gimple_seq switch_body_seq = NULL;
2453 enum gimplify_status ret;
2454 tree index_type = TREE_TYPE (switch_expr);
2455 if (index_type == NULL_TREE)
2456 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2457
2458 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2459 fb_rvalue);
2460 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2461 return ret;
2462
2463 if (SWITCH_BODY (switch_expr))
2464 {
2465 vec<tree> labels;
2466 vec<tree> saved_labels;
2467 hash_set<tree> *saved_live_switch_vars = NULL;
2468 tree default_case = NULL_TREE;
2469 gswitch *switch_stmt;
2470
2471 /* Save old labels, get new ones from body, then restore the old
2472 labels. Save all the things from the switch body to append after. */
2473 saved_labels = gimplify_ctxp->case_labels;
2474 gimplify_ctxp->case_labels.create (8);
2475
2476 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2477 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2478 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2479 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2480 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2481 else
2482 gimplify_ctxp->live_switch_vars = NULL;
2483
2484 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2485 gimplify_ctxp->in_switch_expr = true;
2486
2487 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2488
2489 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2490 maybe_warn_switch_unreachable (switch_body_seq);
2491 maybe_warn_implicit_fallthrough (switch_body_seq);
2492 /* Only do this for the outermost GIMPLE_SWITCH. */
2493 if (!gimplify_ctxp->in_switch_expr)
2494 expand_FALLTHROUGH (&switch_body_seq);
2495
2496 labels = gimplify_ctxp->case_labels;
2497 gimplify_ctxp->case_labels = saved_labels;
2498
2499 if (gimplify_ctxp->live_switch_vars)
2500 {
2501 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2502 delete gimplify_ctxp->live_switch_vars;
2503 }
2504 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2505
2506 preprocess_case_label_vec_for_gimple (labels, index_type,
2507 &default_case);
2508
2509 bool add_bind = false;
2510 if (!default_case)
2511 {
2512 glabel *new_default;
2513
2514 default_case
2515 = build_case_label (NULL_TREE, NULL_TREE,
2516 create_artificial_label (UNKNOWN_LOCATION));
2517 if (old_in_switch_expr)
2518 {
2519 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2520 add_bind = true;
2521 }
2522 new_default = gimple_build_label (CASE_LABEL (default_case));
2523 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2524 }
2525 else if (old_in_switch_expr)
2526 {
2527 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2528 if (last && gimple_code (last) == GIMPLE_LABEL)
2529 {
2530 tree label = gimple_label_label (as_a <glabel *> (last));
2531 if (SWITCH_BREAK_LABEL_P (label))
2532 add_bind = true;
2533 }
2534 }
2535
2536 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2537 default_case, labels);
2538 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2539 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2540 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2541 so that we can easily find the start and end of the switch
2542 statement. */
2543 if (add_bind)
2544 {
2545 gimple_seq bind_body = NULL;
2546 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2547 gimple_seq_add_seq (&bind_body, switch_body_seq);
2548 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2549 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2550 gimplify_seq_add_stmt (pre_p, bind);
2551 }
2552 else
2553 {
2554 gimplify_seq_add_stmt (pre_p, switch_stmt);
2555 gimplify_seq_add_seq (pre_p, switch_body_seq);
2556 }
2557 labels.release ();
2558 }
2559 else
2560 gcc_unreachable ();
2561
2562 return GS_ALL_DONE;
2563 }
2564
2565 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2566
2567 static enum gimplify_status
2568 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2569 {
2570 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2571 == current_function_decl);
2572
2573 tree label = LABEL_EXPR_LABEL (*expr_p);
2574 glabel *label_stmt = gimple_build_label (label);
2575 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2576 gimplify_seq_add_stmt (pre_p, label_stmt);
2577
2578 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2579 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2580 NOT_TAKEN));
2581 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2582 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2583 TAKEN));
2584
2585 return GS_ALL_DONE;
2586 }
2587
2588 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2589
2590 static enum gimplify_status
2591 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2592 {
2593 struct gimplify_ctx *ctxp;
2594 glabel *label_stmt;
2595
2596 /* Invalid programs can play Duff's Device type games with, for example,
2597 #pragma omp parallel. At least in the C front end, we don't
2598 detect such invalid branches until after gimplification, in the
2599 diagnose_omp_blocks pass. */
2600 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2601 if (ctxp->case_labels.exists ())
2602 break;
2603
2604 tree label = CASE_LABEL (*expr_p);
2605 label_stmt = gimple_build_label (label);
2606 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2607 ctxp->case_labels.safe_push (*expr_p);
2608 gimplify_seq_add_stmt (pre_p, label_stmt);
2609
2610 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2611 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2612 NOT_TAKEN));
2613 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2614 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2615 TAKEN));
2616
2617 return GS_ALL_DONE;
2618 }
2619
2620 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2621 if necessary. */
2622
2623 tree
2624 build_and_jump (tree *label_p)
2625 {
2626 if (label_p == NULL)
2627 /* If there's nowhere to jump, just fall through. */
2628 return NULL_TREE;
2629
2630 if (*label_p == NULL_TREE)
2631 {
2632 tree label = create_artificial_label (UNKNOWN_LOCATION);
2633 *label_p = label;
2634 }
2635
2636 return build1 (GOTO_EXPR, void_type_node, *label_p);
2637 }
2638
2639 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2640 This also involves building a label to jump to and communicating it to
2641 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2642
2643 static enum gimplify_status
2644 gimplify_exit_expr (tree *expr_p)
2645 {
2646 tree cond = TREE_OPERAND (*expr_p, 0);
2647 tree expr;
2648
2649 expr = build_and_jump (&gimplify_ctxp->exit_label);
2650 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2651 *expr_p = expr;
2652
2653 return GS_OK;
2654 }
2655
2656 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2657 different from its canonical type, wrap the whole thing inside a
2658 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2659 type.
2660
2661 The canonical type of a COMPONENT_REF is the type of the field being
2662 referenced--unless the field is a bit-field which can be read directly
2663 in a smaller mode, in which case the canonical type is the
2664 sign-appropriate type corresponding to that mode. */
2665
2666 static void
2667 canonicalize_component_ref (tree *expr_p)
2668 {
2669 tree expr = *expr_p;
2670 tree type;
2671
2672 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2673
2674 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2675 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2676 else
2677 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2678
2679 /* One could argue that all the stuff below is not necessary for
2680 the non-bitfield case and declare it a FE error if type
2681 adjustment would be needed. */
2682 if (TREE_TYPE (expr) != type)
2683 {
2684 #ifdef ENABLE_TYPES_CHECKING
2685 tree old_type = TREE_TYPE (expr);
2686 #endif
2687 int type_quals;
2688
2689 /* We need to preserve qualifiers and propagate them from
2690 operand 0. */
2691 type_quals = TYPE_QUALS (type)
2692 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2693 if (TYPE_QUALS (type) != type_quals)
2694 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2695
2696 /* Set the type of the COMPONENT_REF to the underlying type. */
2697 TREE_TYPE (expr) = type;
2698
2699 #ifdef ENABLE_TYPES_CHECKING
2700 /* It is now a FE error, if the conversion from the canonical
2701 type to the original expression type is not useless. */
2702 gcc_assert (useless_type_conversion_p (old_type, type));
2703 #endif
2704 }
2705 }
2706
2707 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2708 to foo, embed that change in the ADDR_EXPR by converting
2709 T array[U];
2710 (T *)&array
2711 ==>
2712 &array[L]
2713 where L is the lower bound. For simplicity, only do this for constant
2714 lower bound.
2715 The constraint is that the type of &array[L] is trivially convertible
2716 to T *. */
2717
2718 static void
2719 canonicalize_addr_expr (tree *expr_p)
2720 {
2721 tree expr = *expr_p;
2722 tree addr_expr = TREE_OPERAND (expr, 0);
2723 tree datype, ddatype, pddatype;
2724
2725 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2726 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2727 || TREE_CODE (addr_expr) != ADDR_EXPR)
2728 return;
2729
2730 /* The addr_expr type should be a pointer to an array. */
2731 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2732 if (TREE_CODE (datype) != ARRAY_TYPE)
2733 return;
2734
2735 /* The pointer to element type shall be trivially convertible to
2736 the expression pointer type. */
2737 ddatype = TREE_TYPE (datype);
2738 pddatype = build_pointer_type (ddatype);
2739 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2740 pddatype))
2741 return;
2742
2743 /* The lower bound and element sizes must be constant. */
2744 if (!TYPE_SIZE_UNIT (ddatype)
2745 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2746 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2747 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2748 return;
2749
2750 /* All checks succeeded. Build a new node to merge the cast. */
2751 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2752 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2753 NULL_TREE, NULL_TREE);
2754 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2755
2756 /* We can have stripped a required restrict qualifier above. */
2757 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2758 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2759 }
2760
2761 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2762 underneath as appropriate. */
2763
2764 static enum gimplify_status
2765 gimplify_conversion (tree *expr_p)
2766 {
2767 location_t loc = EXPR_LOCATION (*expr_p);
2768 gcc_assert (CONVERT_EXPR_P (*expr_p));
2769
2770 /* Then strip away all but the outermost conversion. */
2771 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2772
2773 /* And remove the outermost conversion if it's useless. */
2774 if (tree_ssa_useless_type_conversion (*expr_p))
2775 *expr_p = TREE_OPERAND (*expr_p, 0);
2776
2777 /* If we still have a conversion at the toplevel,
2778 then canonicalize some constructs. */
2779 if (CONVERT_EXPR_P (*expr_p))
2780 {
2781 tree sub = TREE_OPERAND (*expr_p, 0);
2782
2783 /* If a NOP conversion is changing the type of a COMPONENT_REF
2784 expression, then canonicalize its type now in order to expose more
2785 redundant conversions. */
2786 if (TREE_CODE (sub) == COMPONENT_REF)
2787 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2788
2789 /* If a NOP conversion is changing a pointer to array of foo
2790 to a pointer to foo, embed that change in the ADDR_EXPR. */
2791 else if (TREE_CODE (sub) == ADDR_EXPR)
2792 canonicalize_addr_expr (expr_p);
2793 }
2794
2795 /* If we have a conversion to a non-register type force the
2796 use of a VIEW_CONVERT_EXPR instead. */
2797 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2798 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2799 TREE_OPERAND (*expr_p, 0));
2800
2801 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2802 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2803 TREE_SET_CODE (*expr_p, NOP_EXPR);
2804
2805 return GS_OK;
2806 }
2807
2808 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2809 DECL_VALUE_EXPR, and it's worth re-examining things. */
2810
2811 static enum gimplify_status
2812 gimplify_var_or_parm_decl (tree *expr_p)
2813 {
2814 tree decl = *expr_p;
2815
2816 /* ??? If this is a local variable, and it has not been seen in any
2817 outer BIND_EXPR, then it's probably the result of a duplicate
2818 declaration, for which we've already issued an error. It would
2819 be really nice if the front end wouldn't leak these at all.
2820 Currently the only known culprit is C++ destructors, as seen
2821 in g++.old-deja/g++.jason/binding.C. */
2822 if (VAR_P (decl)
2823 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2824 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2825 && decl_function_context (decl) == current_function_decl)
2826 {
2827 gcc_assert (seen_error ());
2828 return GS_ERROR;
2829 }
2830
2831 /* When within an OMP context, notice uses of variables. */
2832 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2833 return GS_ALL_DONE;
2834
2835 /* If the decl is an alias for another expression, substitute it now. */
2836 if (DECL_HAS_VALUE_EXPR_P (decl))
2837 {
2838 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2839 return GS_OK;
2840 }
2841
2842 return GS_ALL_DONE;
2843 }
2844
2845 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2846
2847 static void
2848 recalculate_side_effects (tree t)
2849 {
2850 enum tree_code code = TREE_CODE (t);
2851 int len = TREE_OPERAND_LENGTH (t);
2852 int i;
2853
2854 switch (TREE_CODE_CLASS (code))
2855 {
2856 case tcc_expression:
2857 switch (code)
2858 {
2859 case INIT_EXPR:
2860 case MODIFY_EXPR:
2861 case VA_ARG_EXPR:
2862 case PREDECREMENT_EXPR:
2863 case PREINCREMENT_EXPR:
2864 case POSTDECREMENT_EXPR:
2865 case POSTINCREMENT_EXPR:
2866 /* All of these have side-effects, no matter what their
2867 operands are. */
2868 return;
2869
2870 default:
2871 break;
2872 }
2873 /* Fall through. */
2874
2875 case tcc_comparison: /* a comparison expression */
2876 case tcc_unary: /* a unary arithmetic expression */
2877 case tcc_binary: /* a binary arithmetic expression */
2878 case tcc_reference: /* a reference */
2879 case tcc_vl_exp: /* a function call */
2880 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2881 for (i = 0; i < len; ++i)
2882 {
2883 tree op = TREE_OPERAND (t, i);
2884 if (op && TREE_SIDE_EFFECTS (op))
2885 TREE_SIDE_EFFECTS (t) = 1;
2886 }
2887 break;
2888
2889 case tcc_constant:
2890 /* No side-effects. */
2891 return;
2892
2893 default:
2894 gcc_unreachable ();
2895 }
2896 }
2897
2898 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2899 node *EXPR_P.
2900
2901 compound_lval
2902 : min_lval '[' val ']'
2903 | min_lval '.' ID
2904 | compound_lval '[' val ']'
2905 | compound_lval '.' ID
2906
2907 This is not part of the original SIMPLE definition, which separates
2908 array and member references, but it seems reasonable to handle them
2909 together. Also, this way we don't run into problems with union
2910 aliasing; gcc requires that for accesses through a union to alias, the
2911 union reference must be explicit, which was not always the case when we
2912 were splitting up array and member refs.
2913
2914 PRE_P points to the sequence where side effects that must happen before
2915 *EXPR_P should be stored.
2916
2917 POST_P points to the sequence where side effects that must happen after
2918 *EXPR_P should be stored. */
2919
2920 static enum gimplify_status
2921 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2922 fallback_t fallback)
2923 {
2924 tree *p;
2925 enum gimplify_status ret = GS_ALL_DONE, tret;
2926 int i;
2927 location_t loc = EXPR_LOCATION (*expr_p);
2928 tree expr = *expr_p;
2929
2930 /* Create a stack of the subexpressions so later we can walk them in
2931 order from inner to outer. */
2932 auto_vec<tree, 10> expr_stack;
2933
2934 /* We can handle anything that get_inner_reference can deal with. */
2935 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2936 {
2937 restart:
2938 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2939 if (TREE_CODE (*p) == INDIRECT_REF)
2940 *p = fold_indirect_ref_loc (loc, *p);
2941
2942 if (handled_component_p (*p))
2943 ;
2944 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2945 additional COMPONENT_REFs. */
2946 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2947 && gimplify_var_or_parm_decl (p) == GS_OK)
2948 goto restart;
2949 else
2950 break;
2951
2952 expr_stack.safe_push (*p);
2953 }
2954
2955 gcc_assert (expr_stack.length ());
2956
2957 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2958 walked through and P points to the innermost expression.
2959
2960 Java requires that we elaborated nodes in source order. That
2961 means we must gimplify the inner expression followed by each of
2962 the indices, in order. But we can't gimplify the inner
2963 expression until we deal with any variable bounds, sizes, or
2964 positions in order to deal with PLACEHOLDER_EXPRs.
2965
2966 So we do this in three steps. First we deal with the annotations
2967 for any variables in the components, then we gimplify the base,
2968 then we gimplify any indices, from left to right. */
2969 for (i = expr_stack.length () - 1; i >= 0; i--)
2970 {
2971 tree t = expr_stack[i];
2972
2973 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2974 {
2975 /* Gimplify the low bound and element type size and put them into
2976 the ARRAY_REF. If these values are set, they have already been
2977 gimplified. */
2978 if (TREE_OPERAND (t, 2) == NULL_TREE)
2979 {
2980 tree low = unshare_expr (array_ref_low_bound (t));
2981 if (!is_gimple_min_invariant (low))
2982 {
2983 TREE_OPERAND (t, 2) = low;
2984 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2985 post_p, is_gimple_reg,
2986 fb_rvalue);
2987 ret = MIN (ret, tret);
2988 }
2989 }
2990 else
2991 {
2992 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2993 is_gimple_reg, fb_rvalue);
2994 ret = MIN (ret, tret);
2995 }
2996
2997 if (TREE_OPERAND (t, 3) == NULL_TREE)
2998 {
2999 tree elmt_size = array_ref_element_size (t);
3000 if (!is_gimple_min_invariant (elmt_size))
3001 {
3002 elmt_size = unshare_expr (elmt_size);
3003 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3004 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3005
3006 /* Divide the element size by the alignment of the element
3007 type (above). */
3008 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3009 elmt_size, factor);
3010
3011 TREE_OPERAND (t, 3) = elmt_size;
3012 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3013 post_p, is_gimple_reg,
3014 fb_rvalue);
3015 ret = MIN (ret, tret);
3016 }
3017 }
3018 else
3019 {
3020 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3021 is_gimple_reg, fb_rvalue);
3022 ret = MIN (ret, tret);
3023 }
3024 }
3025 else if (TREE_CODE (t) == COMPONENT_REF)
3026 {
3027 /* Set the field offset into T and gimplify it. */
3028 if (TREE_OPERAND (t, 2) == NULL_TREE)
3029 {
3030 tree offset = component_ref_field_offset (t);
3031 if (!is_gimple_min_invariant (offset))
3032 {
3033 offset = unshare_expr (offset);
3034 tree field = TREE_OPERAND (t, 1);
3035 tree factor
3036 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3037
3038 /* Divide the offset by its alignment. */
3039 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3040 offset, factor);
3041
3042 TREE_OPERAND (t, 2) = offset;
3043 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3044 post_p, is_gimple_reg,
3045 fb_rvalue);
3046 ret = MIN (ret, tret);
3047 }
3048 }
3049 else
3050 {
3051 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3052 is_gimple_reg, fb_rvalue);
3053 ret = MIN (ret, tret);
3054 }
3055 }
3056 }
3057
3058 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3059 so as to match the min_lval predicate. Failure to do so may result
3060 in the creation of large aggregate temporaries. */
3061 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3062 fallback | fb_lvalue);
3063 ret = MIN (ret, tret);
3064
3065 /* And finally, the indices and operands of ARRAY_REF. During this
3066 loop we also remove any useless conversions. */
3067 for (; expr_stack.length () > 0; )
3068 {
3069 tree t = expr_stack.pop ();
3070
3071 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3072 {
3073 /* Gimplify the dimension. */
3074 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3075 {
3076 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3077 is_gimple_val, fb_rvalue);
3078 ret = MIN (ret, tret);
3079 }
3080 }
3081
3082 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3083
3084 /* The innermost expression P may have originally had
3085 TREE_SIDE_EFFECTS set which would have caused all the outer
3086 expressions in *EXPR_P leading to P to also have had
3087 TREE_SIDE_EFFECTS set. */
3088 recalculate_side_effects (t);
3089 }
3090
3091 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3092 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3093 {
3094 canonicalize_component_ref (expr_p);
3095 }
3096
3097 expr_stack.release ();
3098
3099 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3100
3101 return ret;
3102 }
3103
3104 /* Gimplify the self modifying expression pointed to by EXPR_P
3105 (++, --, +=, -=).
3106
3107 PRE_P points to the list where side effects that must happen before
3108 *EXPR_P should be stored.
3109
3110 POST_P points to the list where side effects that must happen after
3111 *EXPR_P should be stored.
3112
3113 WANT_VALUE is nonzero iff we want to use the value of this expression
3114 in another expression.
3115
3116 ARITH_TYPE is the type the computation should be performed in. */
3117
3118 enum gimplify_status
3119 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3120 bool want_value, tree arith_type)
3121 {
3122 enum tree_code code;
3123 tree lhs, lvalue, rhs, t1;
3124 gimple_seq post = NULL, *orig_post_p = post_p;
3125 bool postfix;
3126 enum tree_code arith_code;
3127 enum gimplify_status ret;
3128 location_t loc = EXPR_LOCATION (*expr_p);
3129
3130 code = TREE_CODE (*expr_p);
3131
3132 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3133 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3134
3135 /* Prefix or postfix? */
3136 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3137 /* Faster to treat as prefix if result is not used. */
3138 postfix = want_value;
3139 else
3140 postfix = false;
3141
3142 /* For postfix, make sure the inner expression's post side effects
3143 are executed after side effects from this expression. */
3144 if (postfix)
3145 post_p = &post;
3146
3147 /* Add or subtract? */
3148 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3149 arith_code = PLUS_EXPR;
3150 else
3151 arith_code = MINUS_EXPR;
3152
3153 /* Gimplify the LHS into a GIMPLE lvalue. */
3154 lvalue = TREE_OPERAND (*expr_p, 0);
3155 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3156 if (ret == GS_ERROR)
3157 return ret;
3158
3159 /* Extract the operands to the arithmetic operation. */
3160 lhs = lvalue;
3161 rhs = TREE_OPERAND (*expr_p, 1);
3162
3163 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3164 that as the result value and in the postqueue operation. */
3165 if (postfix)
3166 {
3167 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3168 if (ret == GS_ERROR)
3169 return ret;
3170
3171 lhs = get_initialized_tmp_var (lhs, pre_p);
3172 }
3173
3174 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3175 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3176 {
3177 rhs = convert_to_ptrofftype_loc (loc, rhs);
3178 if (arith_code == MINUS_EXPR)
3179 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3180 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3181 }
3182 else
3183 t1 = fold_convert (TREE_TYPE (*expr_p),
3184 fold_build2 (arith_code, arith_type,
3185 fold_convert (arith_type, lhs),
3186 fold_convert (arith_type, rhs)));
3187
3188 if (postfix)
3189 {
3190 gimplify_assign (lvalue, t1, pre_p);
3191 gimplify_seq_add_seq (orig_post_p, post);
3192 *expr_p = lhs;
3193 return GS_ALL_DONE;
3194 }
3195 else
3196 {
3197 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3198 return GS_OK;
3199 }
3200 }
3201
3202 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3203
3204 static void
3205 maybe_with_size_expr (tree *expr_p)
3206 {
3207 tree expr = *expr_p;
3208 tree type = TREE_TYPE (expr);
3209 tree size;
3210
3211 /* If we've already wrapped this or the type is error_mark_node, we can't do
3212 anything. */
3213 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3214 || type == error_mark_node)
3215 return;
3216
3217 /* If the size isn't known or is a constant, we have nothing to do. */
3218 size = TYPE_SIZE_UNIT (type);
3219 if (!size || poly_int_tree_p (size))
3220 return;
3221
3222 /* Otherwise, make a WITH_SIZE_EXPR. */
3223 size = unshare_expr (size);
3224 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3225 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3226 }
3227
3228 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3229 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3230 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3231 gimplified to an SSA name. */
3232
3233 enum gimplify_status
3234 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3235 bool allow_ssa)
3236 {
3237 bool (*test) (tree);
3238 fallback_t fb;
3239
3240 /* In general, we allow lvalues for function arguments to avoid
3241 extra overhead of copying large aggregates out of even larger
3242 aggregates into temporaries only to copy the temporaries to
3243 the argument list. Make optimizers happy by pulling out to
3244 temporaries those types that fit in registers. */
3245 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3246 test = is_gimple_val, fb = fb_rvalue;
3247 else
3248 {
3249 test = is_gimple_lvalue, fb = fb_either;
3250 /* Also strip a TARGET_EXPR that would force an extra copy. */
3251 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3252 {
3253 tree init = TARGET_EXPR_INITIAL (*arg_p);
3254 if (init
3255 && !VOID_TYPE_P (TREE_TYPE (init)))
3256 *arg_p = init;
3257 }
3258 }
3259
3260 /* If this is a variable sized type, we must remember the size. */
3261 maybe_with_size_expr (arg_p);
3262
3263 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3264 /* Make sure arguments have the same location as the function call
3265 itself. */
3266 protected_set_expr_location (*arg_p, call_location);
3267
3268 /* There is a sequence point before a function call. Side effects in
3269 the argument list must occur before the actual call. So, when
3270 gimplifying arguments, force gimplify_expr to use an internal
3271 post queue which is then appended to the end of PRE_P. */
3272 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3273 }
3274
3275 /* Don't fold inside offloading or taskreg regions: it can break code by
3276 adding decl references that weren't in the source. We'll do it during
3277 omplower pass instead. */
3278
3279 static bool
3280 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3281 {
3282 struct gimplify_omp_ctx *ctx;
3283 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3284 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3285 return false;
3286 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3287 return false;
3288 /* Delay folding of builtins until the IL is in consistent state
3289 so the diagnostic machinery can do a better job. */
3290 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3291 return false;
3292 return fold_stmt (gsi);
3293 }
3294
3295 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3296 WANT_VALUE is true if the result of the call is desired. */
3297
3298 static enum gimplify_status
3299 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3300 {
3301 tree fndecl, parms, p, fnptrtype;
3302 enum gimplify_status ret;
3303 int i, nargs;
3304 gcall *call;
3305 bool builtin_va_start_p = false;
3306 location_t loc = EXPR_LOCATION (*expr_p);
3307
3308 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3309
3310 /* For reliable diagnostics during inlining, it is necessary that
3311 every call_expr be annotated with file and line. */
3312 if (! EXPR_HAS_LOCATION (*expr_p))
3313 SET_EXPR_LOCATION (*expr_p, input_location);
3314
3315 /* Gimplify internal functions created in the FEs. */
3316 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3317 {
3318 if (want_value)
3319 return GS_ALL_DONE;
3320
3321 nargs = call_expr_nargs (*expr_p);
3322 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3323 auto_vec<tree> vargs (nargs);
3324
3325 for (i = 0; i < nargs; i++)
3326 {
3327 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3328 EXPR_LOCATION (*expr_p));
3329 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3330 }
3331
3332 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3333 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3334 gimplify_seq_add_stmt (pre_p, call);
3335 return GS_ALL_DONE;
3336 }
3337
3338 /* This may be a call to a builtin function.
3339
3340 Builtin function calls may be transformed into different
3341 (and more efficient) builtin function calls under certain
3342 circumstances. Unfortunately, gimplification can muck things
3343 up enough that the builtin expanders are not aware that certain
3344 transformations are still valid.
3345
3346 So we attempt transformation/gimplification of the call before
3347 we gimplify the CALL_EXPR. At this time we do not manage to
3348 transform all calls in the same manner as the expanders do, but
3349 we do transform most of them. */
3350 fndecl = get_callee_fndecl (*expr_p);
3351 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3352 switch (DECL_FUNCTION_CODE (fndecl))
3353 {
3354 CASE_BUILT_IN_ALLOCA:
3355 /* If the call has been built for a variable-sized object, then we
3356 want to restore the stack level when the enclosing BIND_EXPR is
3357 exited to reclaim the allocated space; otherwise, we precisely
3358 need to do the opposite and preserve the latest stack level. */
3359 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3360 gimplify_ctxp->save_stack = true;
3361 else
3362 gimplify_ctxp->keep_stack = true;
3363 break;
3364
3365 case BUILT_IN_VA_START:
3366 {
3367 builtin_va_start_p = TRUE;
3368 if (call_expr_nargs (*expr_p) < 2)
3369 {
3370 error ("too few arguments to function %<va_start%>");
3371 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3372 return GS_OK;
3373 }
3374
3375 if (fold_builtin_next_arg (*expr_p, true))
3376 {
3377 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3378 return GS_OK;
3379 }
3380 break;
3381 }
3382
3383 case BUILT_IN_EH_RETURN:
3384 cfun->calls_eh_return = true;
3385 break;
3386
3387 default:
3388 ;
3389 }
3390 if (fndecl && fndecl_built_in_p (fndecl))
3391 {
3392 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3393 if (new_tree && new_tree != *expr_p)
3394 {
3395 /* There was a transformation of this call which computes the
3396 same value, but in a more efficient way. Return and try
3397 again. */
3398 *expr_p = new_tree;
3399 return GS_OK;
3400 }
3401 }
3402
3403 /* Remember the original function pointer type. */
3404 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3405
3406 if (flag_openmp
3407 && fndecl
3408 && cfun
3409 && (cfun->curr_properties & PROP_gimple_any) == 0)
3410 {
3411 tree variant = omp_resolve_declare_variant (fndecl);
3412 if (variant != fndecl)
3413 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3414 }
3415
3416 /* There is a sequence point before the call, so any side effects in
3417 the calling expression must occur before the actual call. Force
3418 gimplify_expr to use an internal post queue. */
3419 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3420 is_gimple_call_addr, fb_rvalue);
3421
3422 nargs = call_expr_nargs (*expr_p);
3423
3424 /* Get argument types for verification. */
3425 fndecl = get_callee_fndecl (*expr_p);
3426 parms = NULL_TREE;
3427 if (fndecl)
3428 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3429 else
3430 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3431
3432 if (fndecl && DECL_ARGUMENTS (fndecl))
3433 p = DECL_ARGUMENTS (fndecl);
3434 else if (parms)
3435 p = parms;
3436 else
3437 p = NULL_TREE;
3438 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3439 ;
3440
3441 /* If the last argument is __builtin_va_arg_pack () and it is not
3442 passed as a named argument, decrease the number of CALL_EXPR
3443 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3444 if (!p
3445 && i < nargs
3446 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3447 {
3448 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3449 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3450
3451 if (last_arg_fndecl
3452 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3453 {
3454 tree call = *expr_p;
3455
3456 --nargs;
3457 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3458 CALL_EXPR_FN (call),
3459 nargs, CALL_EXPR_ARGP (call));
3460
3461 /* Copy all CALL_EXPR flags, location and block, except
3462 CALL_EXPR_VA_ARG_PACK flag. */
3463 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3464 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3465 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3466 = CALL_EXPR_RETURN_SLOT_OPT (call);
3467 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3468 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3469
3470 /* Set CALL_EXPR_VA_ARG_PACK. */
3471 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3472 }
3473 }
3474
3475 /* If the call returns twice then after building the CFG the call
3476 argument computations will no longer dominate the call because
3477 we add an abnormal incoming edge to the call. So do not use SSA
3478 vars there. */
3479 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3480
3481 /* Gimplify the function arguments. */
3482 if (nargs > 0)
3483 {
3484 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3485 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3486 PUSH_ARGS_REVERSED ? i-- : i++)
3487 {
3488 enum gimplify_status t;
3489
3490 /* Avoid gimplifying the second argument to va_start, which needs to
3491 be the plain PARM_DECL. */
3492 if ((i != 1) || !builtin_va_start_p)
3493 {
3494 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3495 EXPR_LOCATION (*expr_p), ! returns_twice);
3496
3497 if (t == GS_ERROR)
3498 ret = GS_ERROR;
3499 }
3500 }
3501 }
3502
3503 /* Gimplify the static chain. */
3504 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3505 {
3506 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3507 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3508 else
3509 {
3510 enum gimplify_status t;
3511 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3512 EXPR_LOCATION (*expr_p), ! returns_twice);
3513 if (t == GS_ERROR)
3514 ret = GS_ERROR;
3515 }
3516 }
3517
3518 /* Verify the function result. */
3519 if (want_value && fndecl
3520 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3521 {
3522 error_at (loc, "using result of function returning %<void%>");
3523 ret = GS_ERROR;
3524 }
3525
3526 /* Try this again in case gimplification exposed something. */
3527 if (ret != GS_ERROR)
3528 {
3529 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3530
3531 if (new_tree && new_tree != *expr_p)
3532 {
3533 /* There was a transformation of this call which computes the
3534 same value, but in a more efficient way. Return and try
3535 again. */
3536 *expr_p = new_tree;
3537 return GS_OK;
3538 }
3539 }
3540 else
3541 {
3542 *expr_p = error_mark_node;
3543 return GS_ERROR;
3544 }
3545
3546 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3547 decl. This allows us to eliminate redundant or useless
3548 calls to "const" functions. */
3549 if (TREE_CODE (*expr_p) == CALL_EXPR)
3550 {
3551 int flags = call_expr_flags (*expr_p);
3552 if (flags & (ECF_CONST | ECF_PURE)
3553 /* An infinite loop is considered a side effect. */
3554 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3555 TREE_SIDE_EFFECTS (*expr_p) = 0;
3556 }
3557
3558 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3559 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3560 form and delegate the creation of a GIMPLE_CALL to
3561 gimplify_modify_expr. This is always possible because when
3562 WANT_VALUE is true, the caller wants the result of this call into
3563 a temporary, which means that we will emit an INIT_EXPR in
3564 internal_get_tmp_var which will then be handled by
3565 gimplify_modify_expr. */
3566 if (!want_value)
3567 {
3568 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3569 have to do is replicate it as a GIMPLE_CALL tuple. */
3570 gimple_stmt_iterator gsi;
3571 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3572 notice_special_calls (call);
3573 gimplify_seq_add_stmt (pre_p, call);
3574 gsi = gsi_last (*pre_p);
3575 maybe_fold_stmt (&gsi);
3576 *expr_p = NULL_TREE;
3577 }
3578 else
3579 /* Remember the original function type. */
3580 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3581 CALL_EXPR_FN (*expr_p));
3582
3583 return ret;
3584 }
3585
3586 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3587 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3588
3589 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3590 condition is true or false, respectively. If null, we should generate
3591 our own to skip over the evaluation of this specific expression.
3592
3593 LOCUS is the source location of the COND_EXPR.
3594
3595 This function is the tree equivalent of do_jump.
3596
3597 shortcut_cond_r should only be called by shortcut_cond_expr. */
3598
3599 static tree
3600 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3601 location_t locus)
3602 {
3603 tree local_label = NULL_TREE;
3604 tree t, expr = NULL;
3605
3606 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3607 retain the shortcut semantics. Just insert the gotos here;
3608 shortcut_cond_expr will append the real blocks later. */
3609 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3610 {
3611 location_t new_locus;
3612
3613 /* Turn if (a && b) into
3614
3615 if (a); else goto no;
3616 if (b) goto yes; else goto no;
3617 (no:) */
3618
3619 if (false_label_p == NULL)
3620 false_label_p = &local_label;
3621
3622 /* Keep the original source location on the first 'if'. */
3623 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3624 append_to_statement_list (t, &expr);
3625
3626 /* Set the source location of the && on the second 'if'. */
3627 new_locus = rexpr_location (pred, locus);
3628 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3629 new_locus);
3630 append_to_statement_list (t, &expr);
3631 }
3632 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3633 {
3634 location_t new_locus;
3635
3636 /* Turn if (a || b) into
3637
3638 if (a) goto yes;
3639 if (b) goto yes; else goto no;
3640 (yes:) */
3641
3642 if (true_label_p == NULL)
3643 true_label_p = &local_label;
3644
3645 /* Keep the original source location on the first 'if'. */
3646 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3647 append_to_statement_list (t, &expr);
3648
3649 /* Set the source location of the || on the second 'if'. */
3650 new_locus = rexpr_location (pred, locus);
3651 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3652 new_locus);
3653 append_to_statement_list (t, &expr);
3654 }
3655 else if (TREE_CODE (pred) == COND_EXPR
3656 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3657 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3658 {
3659 location_t new_locus;
3660
3661 /* As long as we're messing with gotos, turn if (a ? b : c) into
3662 if (a)
3663 if (b) goto yes; else goto no;
3664 else
3665 if (c) goto yes; else goto no;
3666
3667 Don't do this if one of the arms has void type, which can happen
3668 in C++ when the arm is throw. */
3669
3670 /* Keep the original source location on the first 'if'. Set the source
3671 location of the ? on the second 'if'. */
3672 new_locus = rexpr_location (pred, locus);
3673 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3674 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3675 false_label_p, locus),
3676 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3677 false_label_p, new_locus));
3678 }
3679 else
3680 {
3681 expr = build3 (COND_EXPR, void_type_node, pred,
3682 build_and_jump (true_label_p),
3683 build_and_jump (false_label_p));
3684 SET_EXPR_LOCATION (expr, locus);
3685 }
3686
3687 if (local_label)
3688 {
3689 t = build1 (LABEL_EXPR, void_type_node, local_label);
3690 append_to_statement_list (t, &expr);
3691 }
3692
3693 return expr;
3694 }
3695
3696 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3697 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3698 statement, if it is the last one. Otherwise, return NULL. */
3699
3700 static tree
3701 find_goto (tree expr)
3702 {
3703 if (!expr)
3704 return NULL_TREE;
3705
3706 if (TREE_CODE (expr) == GOTO_EXPR)
3707 return expr;
3708
3709 if (TREE_CODE (expr) != STATEMENT_LIST)
3710 return NULL_TREE;
3711
3712 tree_stmt_iterator i = tsi_start (expr);
3713
3714 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3715 tsi_next (&i);
3716
3717 if (!tsi_one_before_end_p (i))
3718 return NULL_TREE;
3719
3720 return find_goto (tsi_stmt (i));
3721 }
3722
3723 /* Same as find_goto, except that it returns NULL if the destination
3724 is not a LABEL_DECL. */
3725
3726 static inline tree
3727 find_goto_label (tree expr)
3728 {
3729 tree dest = find_goto (expr);
3730 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3731 return dest;
3732 return NULL_TREE;
3733 }
3734
3735 /* Given a conditional expression EXPR with short-circuit boolean
3736 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3737 predicate apart into the equivalent sequence of conditionals. */
3738
3739 static tree
3740 shortcut_cond_expr (tree expr)
3741 {
3742 tree pred = TREE_OPERAND (expr, 0);
3743 tree then_ = TREE_OPERAND (expr, 1);
3744 tree else_ = TREE_OPERAND (expr, 2);
3745 tree true_label, false_label, end_label, t;
3746 tree *true_label_p;
3747 tree *false_label_p;
3748 bool emit_end, emit_false, jump_over_else;
3749 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3750 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3751
3752 /* First do simple transformations. */
3753 if (!else_se)
3754 {
3755 /* If there is no 'else', turn
3756 if (a && b) then c
3757 into
3758 if (a) if (b) then c. */
3759 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3760 {
3761 /* Keep the original source location on the first 'if'. */
3762 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3763 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3764 /* Set the source location of the && on the second 'if'. */
3765 if (rexpr_has_location (pred))
3766 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3767 then_ = shortcut_cond_expr (expr);
3768 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3769 pred = TREE_OPERAND (pred, 0);
3770 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3771 SET_EXPR_LOCATION (expr, locus);
3772 }
3773 }
3774
3775 if (!then_se)
3776 {
3777 /* If there is no 'then', turn
3778 if (a || b); else d
3779 into
3780 if (a); else if (b); else d. */
3781 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3782 {
3783 /* Keep the original source location on the first 'if'. */
3784 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3785 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3786 /* Set the source location of the || on the second 'if'. */
3787 if (rexpr_has_location (pred))
3788 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3789 else_ = shortcut_cond_expr (expr);
3790 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3791 pred = TREE_OPERAND (pred, 0);
3792 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3793 SET_EXPR_LOCATION (expr, locus);
3794 }
3795 }
3796
3797 /* If we're done, great. */
3798 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3799 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3800 return expr;
3801
3802 /* Otherwise we need to mess with gotos. Change
3803 if (a) c; else d;
3804 to
3805 if (a); else goto no;
3806 c; goto end;
3807 no: d; end:
3808 and recursively gimplify the condition. */
3809
3810 true_label = false_label = end_label = NULL_TREE;
3811
3812 /* If our arms just jump somewhere, hijack those labels so we don't
3813 generate jumps to jumps. */
3814
3815 if (tree then_goto = find_goto_label (then_))
3816 {
3817 true_label = GOTO_DESTINATION (then_goto);
3818 then_ = NULL;
3819 then_se = false;
3820 }
3821
3822 if (tree else_goto = find_goto_label (else_))
3823 {
3824 false_label = GOTO_DESTINATION (else_goto);
3825 else_ = NULL;
3826 else_se = false;
3827 }
3828
3829 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3830 if (true_label)
3831 true_label_p = &true_label;
3832 else
3833 true_label_p = NULL;
3834
3835 /* The 'else' branch also needs a label if it contains interesting code. */
3836 if (false_label || else_se)
3837 false_label_p = &false_label;
3838 else
3839 false_label_p = NULL;
3840
3841 /* If there was nothing else in our arms, just forward the label(s). */
3842 if (!then_se && !else_se)
3843 return shortcut_cond_r (pred, true_label_p, false_label_p,
3844 EXPR_LOC_OR_LOC (expr, input_location));
3845
3846 /* If our last subexpression already has a terminal label, reuse it. */
3847 if (else_se)
3848 t = expr_last (else_);
3849 else if (then_se)
3850 t = expr_last (then_);
3851 else
3852 t = NULL;
3853 if (t && TREE_CODE (t) == LABEL_EXPR)
3854 end_label = LABEL_EXPR_LABEL (t);
3855
3856 /* If we don't care about jumping to the 'else' branch, jump to the end
3857 if the condition is false. */
3858 if (!false_label_p)
3859 false_label_p = &end_label;
3860
3861 /* We only want to emit these labels if we aren't hijacking them. */
3862 emit_end = (end_label == NULL_TREE);
3863 emit_false = (false_label == NULL_TREE);
3864
3865 /* We only emit the jump over the else clause if we have to--if the
3866 then clause may fall through. Otherwise we can wind up with a
3867 useless jump and a useless label at the end of gimplified code,
3868 which will cause us to think that this conditional as a whole
3869 falls through even if it doesn't. If we then inline a function
3870 which ends with such a condition, that can cause us to issue an
3871 inappropriate warning about control reaching the end of a
3872 non-void function. */
3873 jump_over_else = block_may_fallthru (then_);
3874
3875 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3876 EXPR_LOC_OR_LOC (expr, input_location));
3877
3878 expr = NULL;
3879 append_to_statement_list (pred, &expr);
3880
3881 append_to_statement_list (then_, &expr);
3882 if (else_se)
3883 {
3884 if (jump_over_else)
3885 {
3886 tree last = expr_last (expr);
3887 t = build_and_jump (&end_label);
3888 if (rexpr_has_location (last))
3889 SET_EXPR_LOCATION (t, rexpr_location (last));
3890 append_to_statement_list (t, &expr);
3891 }
3892 if (emit_false)
3893 {
3894 t = build1 (LABEL_EXPR, void_type_node, false_label);
3895 append_to_statement_list (t, &expr);
3896 }
3897 append_to_statement_list (else_, &expr);
3898 }
3899 if (emit_end && end_label)
3900 {
3901 t = build1 (LABEL_EXPR, void_type_node, end_label);
3902 append_to_statement_list (t, &expr);
3903 }
3904
3905 return expr;
3906 }
3907
3908 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3909
3910 tree
3911 gimple_boolify (tree expr)
3912 {
3913 tree type = TREE_TYPE (expr);
3914 location_t loc = EXPR_LOCATION (expr);
3915
3916 if (TREE_CODE (expr) == NE_EXPR
3917 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3918 && integer_zerop (TREE_OPERAND (expr, 1)))
3919 {
3920 tree call = TREE_OPERAND (expr, 0);
3921 tree fn = get_callee_fndecl (call);
3922
3923 /* For __builtin_expect ((long) (x), y) recurse into x as well
3924 if x is truth_value_p. */
3925 if (fn
3926 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3927 && call_expr_nargs (call) == 2)
3928 {
3929 tree arg = CALL_EXPR_ARG (call, 0);
3930 if (arg)
3931 {
3932 if (TREE_CODE (arg) == NOP_EXPR
3933 && TREE_TYPE (arg) == TREE_TYPE (call))
3934 arg = TREE_OPERAND (arg, 0);
3935 if (truth_value_p (TREE_CODE (arg)))
3936 {
3937 arg = gimple_boolify (arg);
3938 CALL_EXPR_ARG (call, 0)
3939 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3940 }
3941 }
3942 }
3943 }
3944
3945 switch (TREE_CODE (expr))
3946 {
3947 case TRUTH_AND_EXPR:
3948 case TRUTH_OR_EXPR:
3949 case TRUTH_XOR_EXPR:
3950 case TRUTH_ANDIF_EXPR:
3951 case TRUTH_ORIF_EXPR:
3952 /* Also boolify the arguments of truth exprs. */
3953 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3954 /* FALLTHRU */
3955
3956 case TRUTH_NOT_EXPR:
3957 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3958
3959 /* These expressions always produce boolean results. */
3960 if (TREE_CODE (type) != BOOLEAN_TYPE)
3961 TREE_TYPE (expr) = boolean_type_node;
3962 return expr;
3963
3964 case ANNOTATE_EXPR:
3965 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3966 {
3967 case annot_expr_ivdep_kind:
3968 case annot_expr_unroll_kind:
3969 case annot_expr_no_vector_kind:
3970 case annot_expr_vector_kind:
3971 case annot_expr_parallel_kind:
3972 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3973 if (TREE_CODE (type) != BOOLEAN_TYPE)
3974 TREE_TYPE (expr) = boolean_type_node;
3975 return expr;
3976 default:
3977 gcc_unreachable ();
3978 }
3979
3980 default:
3981 if (COMPARISON_CLASS_P (expr))
3982 {
3983 /* There expressions always prduce boolean results. */
3984 if (TREE_CODE (type) != BOOLEAN_TYPE)
3985 TREE_TYPE (expr) = boolean_type_node;
3986 return expr;
3987 }
3988 /* Other expressions that get here must have boolean values, but
3989 might need to be converted to the appropriate mode. */
3990 if (TREE_CODE (type) == BOOLEAN_TYPE)
3991 return expr;
3992 return fold_convert_loc (loc, boolean_type_node, expr);
3993 }
3994 }
3995
3996 /* Given a conditional expression *EXPR_P without side effects, gimplify
3997 its operands. New statements are inserted to PRE_P. */
3998
3999 static enum gimplify_status
4000 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4001 {
4002 tree expr = *expr_p, cond;
4003 enum gimplify_status ret, tret;
4004 enum tree_code code;
4005
4006 cond = gimple_boolify (COND_EXPR_COND (expr));
4007
4008 /* We need to handle && and || specially, as their gimplification
4009 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4010 code = TREE_CODE (cond);
4011 if (code == TRUTH_ANDIF_EXPR)
4012 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4013 else if (code == TRUTH_ORIF_EXPR)
4014 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4015 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4016 COND_EXPR_COND (*expr_p) = cond;
4017
4018 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4019 is_gimple_val, fb_rvalue);
4020 ret = MIN (ret, tret);
4021 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4022 is_gimple_val, fb_rvalue);
4023
4024 return MIN (ret, tret);
4025 }
4026
4027 /* Return true if evaluating EXPR could trap.
4028 EXPR is GENERIC, while tree_could_trap_p can be called
4029 only on GIMPLE. */
4030
4031 bool
4032 generic_expr_could_trap_p (tree expr)
4033 {
4034 unsigned i, n;
4035
4036 if (!expr || is_gimple_val (expr))
4037 return false;
4038
4039 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4040 return true;
4041
4042 n = TREE_OPERAND_LENGTH (expr);
4043 for (i = 0; i < n; i++)
4044 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4045 return true;
4046
4047 return false;
4048 }
4049
4050 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4051 into
4052
4053 if (p) if (p)
4054 t1 = a; a;
4055 else or else
4056 t1 = b; b;
4057 t1;
4058
4059 The second form is used when *EXPR_P is of type void.
4060
4061 PRE_P points to the list where side effects that must happen before
4062 *EXPR_P should be stored. */
4063
4064 static enum gimplify_status
4065 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4066 {
4067 tree expr = *expr_p;
4068 tree type = TREE_TYPE (expr);
4069 location_t loc = EXPR_LOCATION (expr);
4070 tree tmp, arm1, arm2;
4071 enum gimplify_status ret;
4072 tree label_true, label_false, label_cont;
4073 bool have_then_clause_p, have_else_clause_p;
4074 gcond *cond_stmt;
4075 enum tree_code pred_code;
4076 gimple_seq seq = NULL;
4077
4078 /* If this COND_EXPR has a value, copy the values into a temporary within
4079 the arms. */
4080 if (!VOID_TYPE_P (type))
4081 {
4082 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4083 tree result;
4084
4085 /* If either an rvalue is ok or we do not require an lvalue, create the
4086 temporary. But we cannot do that if the type is addressable. */
4087 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4088 && !TREE_ADDRESSABLE (type))
4089 {
4090 if (gimplify_ctxp->allow_rhs_cond_expr
4091 /* If either branch has side effects or could trap, it can't be
4092 evaluated unconditionally. */
4093 && !TREE_SIDE_EFFECTS (then_)
4094 && !generic_expr_could_trap_p (then_)
4095 && !TREE_SIDE_EFFECTS (else_)
4096 && !generic_expr_could_trap_p (else_))
4097 return gimplify_pure_cond_expr (expr_p, pre_p);
4098
4099 tmp = create_tmp_var (type, "iftmp");
4100 result = tmp;
4101 }
4102
4103 /* Otherwise, only create and copy references to the values. */
4104 else
4105 {
4106 type = build_pointer_type (type);
4107
4108 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4109 then_ = build_fold_addr_expr_loc (loc, then_);
4110
4111 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4112 else_ = build_fold_addr_expr_loc (loc, else_);
4113
4114 expr
4115 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4116
4117 tmp = create_tmp_var (type, "iftmp");
4118 result = build_simple_mem_ref_loc (loc, tmp);
4119 }
4120
4121 /* Build the new then clause, `tmp = then_;'. But don't build the
4122 assignment if the value is void; in C++ it can be if it's a throw. */
4123 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4124 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4125
4126 /* Similarly, build the new else clause, `tmp = else_;'. */
4127 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4128 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4129
4130 TREE_TYPE (expr) = void_type_node;
4131 recalculate_side_effects (expr);
4132
4133 /* Move the COND_EXPR to the prequeue. */
4134 gimplify_stmt (&expr, pre_p);
4135
4136 *expr_p = result;
4137 return GS_ALL_DONE;
4138 }
4139
4140 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4141 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4142 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4143 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4144
4145 /* Make sure the condition has BOOLEAN_TYPE. */
4146 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4147
4148 /* Break apart && and || conditions. */
4149 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4150 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4151 {
4152 expr = shortcut_cond_expr (expr);
4153
4154 if (expr != *expr_p)
4155 {
4156 *expr_p = expr;
4157
4158 /* We can't rely on gimplify_expr to re-gimplify the expanded
4159 form properly, as cleanups might cause the target labels to be
4160 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4161 set up a conditional context. */
4162 gimple_push_condition ();
4163 gimplify_stmt (expr_p, &seq);
4164 gimple_pop_condition (pre_p);
4165 gimple_seq_add_seq (pre_p, seq);
4166
4167 return GS_ALL_DONE;
4168 }
4169 }
4170
4171 /* Now do the normal gimplification. */
4172
4173 /* Gimplify condition. */
4174 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4175 is_gimple_condexpr_for_cond, fb_rvalue);
4176 if (ret == GS_ERROR)
4177 return GS_ERROR;
4178 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4179
4180 gimple_push_condition ();
4181
4182 have_then_clause_p = have_else_clause_p = false;
4183 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4184 if (label_true
4185 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4186 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4187 have different locations, otherwise we end up with incorrect
4188 location information on the branches. */
4189 && (optimize
4190 || !EXPR_HAS_LOCATION (expr)
4191 || !rexpr_has_location (label_true)
4192 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4193 {
4194 have_then_clause_p = true;
4195 label_true = GOTO_DESTINATION (label_true);
4196 }
4197 else
4198 label_true = create_artificial_label (UNKNOWN_LOCATION);
4199 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4200 if (label_false
4201 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4202 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4203 have different locations, otherwise we end up with incorrect
4204 location information on the branches. */
4205 && (optimize
4206 || !EXPR_HAS_LOCATION (expr)
4207 || !rexpr_has_location (label_false)
4208 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4209 {
4210 have_else_clause_p = true;
4211 label_false = GOTO_DESTINATION (label_false);
4212 }
4213 else
4214 label_false = create_artificial_label (UNKNOWN_LOCATION);
4215
4216 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4217 &arm2);
4218 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4219 label_false);
4220 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4221 gimplify_seq_add_stmt (&seq, cond_stmt);
4222 gimple_stmt_iterator gsi = gsi_last (seq);
4223 maybe_fold_stmt (&gsi);
4224
4225 label_cont = NULL_TREE;
4226 if (!have_then_clause_p)
4227 {
4228 /* For if (...) {} else { code; } put label_true after
4229 the else block. */
4230 if (TREE_OPERAND (expr, 1) == NULL_TREE
4231 && !have_else_clause_p
4232 && TREE_OPERAND (expr, 2) != NULL_TREE)
4233 label_cont = label_true;
4234 else
4235 {
4236 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4237 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4238 /* For if (...) { code; } else {} or
4239 if (...) { code; } else goto label; or
4240 if (...) { code; return; } else { ... }
4241 label_cont isn't needed. */
4242 if (!have_else_clause_p
4243 && TREE_OPERAND (expr, 2) != NULL_TREE
4244 && gimple_seq_may_fallthru (seq))
4245 {
4246 gimple *g;
4247 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4248
4249 g = gimple_build_goto (label_cont);
4250
4251 /* GIMPLE_COND's are very low level; they have embedded
4252 gotos. This particular embedded goto should not be marked
4253 with the location of the original COND_EXPR, as it would
4254 correspond to the COND_EXPR's condition, not the ELSE or the
4255 THEN arms. To avoid marking it with the wrong location, flag
4256 it as "no location". */
4257 gimple_set_do_not_emit_location (g);
4258
4259 gimplify_seq_add_stmt (&seq, g);
4260 }
4261 }
4262 }
4263 if (!have_else_clause_p)
4264 {
4265 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4266 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4267 }
4268 if (label_cont)
4269 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4270
4271 gimple_pop_condition (pre_p);
4272 gimple_seq_add_seq (pre_p, seq);
4273
4274 if (ret == GS_ERROR)
4275 ; /* Do nothing. */
4276 else if (have_then_clause_p || have_else_clause_p)
4277 ret = GS_ALL_DONE;
4278 else
4279 {
4280 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4281 expr = TREE_OPERAND (expr, 0);
4282 gimplify_stmt (&expr, pre_p);
4283 }
4284
4285 *expr_p = NULL;
4286 return ret;
4287 }
4288
4289 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4290 to be marked addressable.
4291
4292 We cannot rely on such an expression being directly markable if a temporary
4293 has been created by the gimplification. In this case, we create another
4294 temporary and initialize it with a copy, which will become a store after we
4295 mark it addressable. This can happen if the front-end passed us something
4296 that it could not mark addressable yet, like a Fortran pass-by-reference
4297 parameter (int) floatvar. */
4298
4299 static void
4300 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4301 {
4302 while (handled_component_p (*expr_p))
4303 expr_p = &TREE_OPERAND (*expr_p, 0);
4304 if (is_gimple_reg (*expr_p))
4305 {
4306 /* Do not allow an SSA name as the temporary. */
4307 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4308 DECL_NOT_GIMPLE_REG_P (var) = 1;
4309 *expr_p = var;
4310 }
4311 }
4312
4313 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4314 a call to __builtin_memcpy. */
4315
4316 static enum gimplify_status
4317 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4318 gimple_seq *seq_p)
4319 {
4320 tree t, to, to_ptr, from, from_ptr;
4321 gcall *gs;
4322 location_t loc = EXPR_LOCATION (*expr_p);
4323
4324 to = TREE_OPERAND (*expr_p, 0);
4325 from = TREE_OPERAND (*expr_p, 1);
4326
4327 /* Mark the RHS addressable. Beware that it may not be possible to do so
4328 directly if a temporary has been created by the gimplification. */
4329 prepare_gimple_addressable (&from, seq_p);
4330
4331 mark_addressable (from);
4332 from_ptr = build_fold_addr_expr_loc (loc, from);
4333 gimplify_arg (&from_ptr, seq_p, loc);
4334
4335 mark_addressable (to);
4336 to_ptr = build_fold_addr_expr_loc (loc, to);
4337 gimplify_arg (&to_ptr, seq_p, loc);
4338
4339 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4340
4341 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4342 gimple_call_set_alloca_for_var (gs, true);
4343
4344 if (want_value)
4345 {
4346 /* tmp = memcpy() */
4347 t = create_tmp_var (TREE_TYPE (to_ptr));
4348 gimple_call_set_lhs (gs, t);
4349 gimplify_seq_add_stmt (seq_p, gs);
4350
4351 *expr_p = build_simple_mem_ref (t);
4352 return GS_ALL_DONE;
4353 }
4354
4355 gimplify_seq_add_stmt (seq_p, gs);
4356 *expr_p = NULL;
4357 return GS_ALL_DONE;
4358 }
4359
4360 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4361 a call to __builtin_memset. In this case we know that the RHS is
4362 a CONSTRUCTOR with an empty element list. */
4363
4364 static enum gimplify_status
4365 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4366 gimple_seq *seq_p)
4367 {
4368 tree t, from, to, to_ptr;
4369 gcall *gs;
4370 location_t loc = EXPR_LOCATION (*expr_p);
4371
4372 /* Assert our assumptions, to abort instead of producing wrong code
4373 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4374 not be immediately exposed. */
4375 from = TREE_OPERAND (*expr_p, 1);
4376 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4377 from = TREE_OPERAND (from, 0);
4378
4379 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4380 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4381
4382 /* Now proceed. */
4383 to = TREE_OPERAND (*expr_p, 0);
4384
4385 to_ptr = build_fold_addr_expr_loc (loc, to);
4386 gimplify_arg (&to_ptr, seq_p, loc);
4387 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4388
4389 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4390
4391 if (want_value)
4392 {
4393 /* tmp = memset() */
4394 t = create_tmp_var (TREE_TYPE (to_ptr));
4395 gimple_call_set_lhs (gs, t);
4396 gimplify_seq_add_stmt (seq_p, gs);
4397
4398 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4399 return GS_ALL_DONE;
4400 }
4401
4402 gimplify_seq_add_stmt (seq_p, gs);
4403 *expr_p = NULL;
4404 return GS_ALL_DONE;
4405 }
4406
4407 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4408 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4409 assignment. Return non-null if we detect a potential overlap. */
4410
4411 struct gimplify_init_ctor_preeval_data
4412 {
4413 /* The base decl of the lhs object. May be NULL, in which case we
4414 have to assume the lhs is indirect. */
4415 tree lhs_base_decl;
4416
4417 /* The alias set of the lhs object. */
4418 alias_set_type lhs_alias_set;
4419 };
4420
4421 static tree
4422 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4423 {
4424 struct gimplify_init_ctor_preeval_data *data
4425 = (struct gimplify_init_ctor_preeval_data *) xdata;
4426 tree t = *tp;
4427
4428 /* If we find the base object, obviously we have overlap. */
4429 if (data->lhs_base_decl == t)
4430 return t;
4431
4432 /* If the constructor component is indirect, determine if we have a
4433 potential overlap with the lhs. The only bits of information we
4434 have to go on at this point are addressability and alias sets. */
4435 if ((INDIRECT_REF_P (t)
4436 || TREE_CODE (t) == MEM_REF)
4437 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4438 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4439 return t;
4440
4441 /* If the constructor component is a call, determine if it can hide a
4442 potential overlap with the lhs through an INDIRECT_REF like above.
4443 ??? Ugh - this is completely broken. In fact this whole analysis
4444 doesn't look conservative. */
4445 if (TREE_CODE (t) == CALL_EXPR)
4446 {
4447 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4448
4449 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4450 if (POINTER_TYPE_P (TREE_VALUE (type))
4451 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4452 && alias_sets_conflict_p (data->lhs_alias_set,
4453 get_alias_set
4454 (TREE_TYPE (TREE_VALUE (type)))))
4455 return t;
4456 }
4457
4458 if (IS_TYPE_OR_DECL_P (t))
4459 *walk_subtrees = 0;
4460 return NULL;
4461 }
4462
4463 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4464 force values that overlap with the lhs (as described by *DATA)
4465 into temporaries. */
4466
4467 static void
4468 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4469 struct gimplify_init_ctor_preeval_data *data)
4470 {
4471 enum gimplify_status one;
4472
4473 /* If the value is constant, then there's nothing to pre-evaluate. */
4474 if (TREE_CONSTANT (*expr_p))
4475 {
4476 /* Ensure it does not have side effects, it might contain a reference to
4477 the object we're initializing. */
4478 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4479 return;
4480 }
4481
4482 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4483 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4484 return;
4485
4486 /* Recurse for nested constructors. */
4487 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4488 {
4489 unsigned HOST_WIDE_INT ix;
4490 constructor_elt *ce;
4491 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4492
4493 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4494 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4495
4496 return;
4497 }
4498
4499 /* If this is a variable sized type, we must remember the size. */
4500 maybe_with_size_expr (expr_p);
4501
4502 /* Gimplify the constructor element to something appropriate for the rhs
4503 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4504 the gimplifier will consider this a store to memory. Doing this
4505 gimplification now means that we won't have to deal with complicated
4506 language-specific trees, nor trees like SAVE_EXPR that can induce
4507 exponential search behavior. */
4508 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4509 if (one == GS_ERROR)
4510 {
4511 *expr_p = NULL;
4512 return;
4513 }
4514
4515 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4516 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4517 always be true for all scalars, since is_gimple_mem_rhs insists on a
4518 temporary variable for them. */
4519 if (DECL_P (*expr_p))
4520 return;
4521
4522 /* If this is of variable size, we have no choice but to assume it doesn't
4523 overlap since we can't make a temporary for it. */
4524 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4525 return;
4526
4527 /* Otherwise, we must search for overlap ... */
4528 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4529 return;
4530
4531 /* ... and if found, force the value into a temporary. */
4532 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4533 }
4534
4535 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4536 a RANGE_EXPR in a CONSTRUCTOR for an array.
4537
4538 var = lower;
4539 loop_entry:
4540 object[var] = value;
4541 if (var == upper)
4542 goto loop_exit;
4543 var = var + 1;
4544 goto loop_entry;
4545 loop_exit:
4546
4547 We increment var _after_ the loop exit check because we might otherwise
4548 fail if upper == TYPE_MAX_VALUE (type for upper).
4549
4550 Note that we never have to deal with SAVE_EXPRs here, because this has
4551 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4552
4553 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4554 gimple_seq *, bool);
4555
4556 static void
4557 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4558 tree value, tree array_elt_type,
4559 gimple_seq *pre_p, bool cleared)
4560 {
4561 tree loop_entry_label, loop_exit_label, fall_thru_label;
4562 tree var, var_type, cref, tmp;
4563
4564 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4565 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4566 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4567
4568 /* Create and initialize the index variable. */
4569 var_type = TREE_TYPE (upper);
4570 var = create_tmp_var (var_type);
4571 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4572
4573 /* Add the loop entry label. */
4574 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4575
4576 /* Build the reference. */
4577 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4578 var, NULL_TREE, NULL_TREE);
4579
4580 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4581 the store. Otherwise just assign value to the reference. */
4582
4583 if (TREE_CODE (value) == CONSTRUCTOR)
4584 /* NB we might have to call ourself recursively through
4585 gimplify_init_ctor_eval if the value is a constructor. */
4586 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4587 pre_p, cleared);
4588 else
4589 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4590
4591 /* We exit the loop when the index var is equal to the upper bound. */
4592 gimplify_seq_add_stmt (pre_p,
4593 gimple_build_cond (EQ_EXPR, var, upper,
4594 loop_exit_label, fall_thru_label));
4595
4596 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4597
4598 /* Otherwise, increment the index var... */
4599 tmp = build2 (PLUS_EXPR, var_type, var,
4600 fold_convert (var_type, integer_one_node));
4601 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4602
4603 /* ...and jump back to the loop entry. */
4604 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4605
4606 /* Add the loop exit label. */
4607 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4608 }
4609
4610 /* Return true if FDECL is accessing a field that is zero sized. */
4611
4612 static bool
4613 zero_sized_field_decl (const_tree fdecl)
4614 {
4615 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4616 && integer_zerop (DECL_SIZE (fdecl)))
4617 return true;
4618 return false;
4619 }
4620
4621 /* Return true if TYPE is zero sized. */
4622
4623 static bool
4624 zero_sized_type (const_tree type)
4625 {
4626 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4627 && integer_zerop (TYPE_SIZE (type)))
4628 return true;
4629 return false;
4630 }
4631
4632 /* A subroutine of gimplify_init_constructor. Generate individual
4633 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4634 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4635 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4636 zeroed first. */
4637
4638 static void
4639 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4640 gimple_seq *pre_p, bool cleared)
4641 {
4642 tree array_elt_type = NULL;
4643 unsigned HOST_WIDE_INT ix;
4644 tree purpose, value;
4645
4646 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4647 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4648
4649 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4650 {
4651 tree cref;
4652
4653 /* NULL values are created above for gimplification errors. */
4654 if (value == NULL)
4655 continue;
4656
4657 if (cleared && initializer_zerop (value))
4658 continue;
4659
4660 /* ??? Here's to hoping the front end fills in all of the indices,
4661 so we don't have to figure out what's missing ourselves. */
4662 gcc_assert (purpose);
4663
4664 /* Skip zero-sized fields, unless value has side-effects. This can
4665 happen with calls to functions returning a zero-sized type, which
4666 we shouldn't discard. As a number of downstream passes don't
4667 expect sets of zero-sized fields, we rely on the gimplification of
4668 the MODIFY_EXPR we make below to drop the assignment statement. */
4669 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4670 continue;
4671
4672 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4673 whole range. */
4674 if (TREE_CODE (purpose) == RANGE_EXPR)
4675 {
4676 tree lower = TREE_OPERAND (purpose, 0);
4677 tree upper = TREE_OPERAND (purpose, 1);
4678
4679 /* If the lower bound is equal to upper, just treat it as if
4680 upper was the index. */
4681 if (simple_cst_equal (lower, upper))
4682 purpose = upper;
4683 else
4684 {
4685 gimplify_init_ctor_eval_range (object, lower, upper, value,
4686 array_elt_type, pre_p, cleared);
4687 continue;
4688 }
4689 }
4690
4691 if (array_elt_type)
4692 {
4693 /* Do not use bitsizetype for ARRAY_REF indices. */
4694 if (TYPE_DOMAIN (TREE_TYPE (object)))
4695 purpose
4696 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4697 purpose);
4698 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4699 purpose, NULL_TREE, NULL_TREE);
4700 }
4701 else
4702 {
4703 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4704 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4705 unshare_expr (object), purpose, NULL_TREE);
4706 }
4707
4708 if (TREE_CODE (value) == CONSTRUCTOR
4709 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4710 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4711 pre_p, cleared);
4712 else
4713 {
4714 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4715 gimplify_and_add (init, pre_p);
4716 ggc_free (init);
4717 }
4718 }
4719 }
4720
4721 /* Return the appropriate RHS predicate for this LHS. */
4722
4723 gimple_predicate
4724 rhs_predicate_for (tree lhs)
4725 {
4726 if (is_gimple_reg (lhs))
4727 return is_gimple_reg_rhs_or_call;
4728 else
4729 return is_gimple_mem_rhs_or_call;
4730 }
4731
4732 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4733 before the LHS has been gimplified. */
4734
4735 static gimple_predicate
4736 initial_rhs_predicate_for (tree lhs)
4737 {
4738 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4739 return is_gimple_reg_rhs_or_call;
4740 else
4741 return is_gimple_mem_rhs_or_call;
4742 }
4743
4744 /* Gimplify a C99 compound literal expression. This just means adding
4745 the DECL_EXPR before the current statement and using its anonymous
4746 decl instead. */
4747
4748 static enum gimplify_status
4749 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4750 bool (*gimple_test_f) (tree),
4751 fallback_t fallback)
4752 {
4753 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4754 tree decl = DECL_EXPR_DECL (decl_s);
4755 tree init = DECL_INITIAL (decl);
4756 /* Mark the decl as addressable if the compound literal
4757 expression is addressable now, otherwise it is marked too late
4758 after we gimplify the initialization expression. */
4759 if (TREE_ADDRESSABLE (*expr_p))
4760 TREE_ADDRESSABLE (decl) = 1;
4761 /* Otherwise, if we don't need an lvalue and have a literal directly
4762 substitute it. Check if it matches the gimple predicate, as
4763 otherwise we'd generate a new temporary, and we can as well just
4764 use the decl we already have. */
4765 else if (!TREE_ADDRESSABLE (decl)
4766 && !TREE_THIS_VOLATILE (decl)
4767 && init
4768 && (fallback & fb_lvalue) == 0
4769 && gimple_test_f (init))
4770 {
4771 *expr_p = init;
4772 return GS_OK;
4773 }
4774
4775 /* If the decl is not addressable, then it is being used in some
4776 expression or on the right hand side of a statement, and it can
4777 be put into a readonly data section. */
4778 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4779 TREE_READONLY (decl) = 1;
4780
4781 /* This decl isn't mentioned in the enclosing block, so add it to the
4782 list of temps. FIXME it seems a bit of a kludge to say that
4783 anonymous artificial vars aren't pushed, but everything else is. */
4784 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4785 gimple_add_tmp_var (decl);
4786
4787 gimplify_and_add (decl_s, pre_p);
4788 *expr_p = decl;
4789 return GS_OK;
4790 }
4791
4792 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4793 return a new CONSTRUCTOR if something changed. */
4794
4795 static tree
4796 optimize_compound_literals_in_ctor (tree orig_ctor)
4797 {
4798 tree ctor = orig_ctor;
4799 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4800 unsigned int idx, num = vec_safe_length (elts);
4801
4802 for (idx = 0; idx < num; idx++)
4803 {
4804 tree value = (*elts)[idx].value;
4805 tree newval = value;
4806 if (TREE_CODE (value) == CONSTRUCTOR)
4807 newval = optimize_compound_literals_in_ctor (value);
4808 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4809 {
4810 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4811 tree decl = DECL_EXPR_DECL (decl_s);
4812 tree init = DECL_INITIAL (decl);
4813
4814 if (!TREE_ADDRESSABLE (value)
4815 && !TREE_ADDRESSABLE (decl)
4816 && init
4817 && TREE_CODE (init) == CONSTRUCTOR)
4818 newval = optimize_compound_literals_in_ctor (init);
4819 }
4820 if (newval == value)
4821 continue;
4822
4823 if (ctor == orig_ctor)
4824 {
4825 ctor = copy_node (orig_ctor);
4826 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4827 elts = CONSTRUCTOR_ELTS (ctor);
4828 }
4829 (*elts)[idx].value = newval;
4830 }
4831 return ctor;
4832 }
4833
4834 /* A subroutine of gimplify_modify_expr. Break out elements of a
4835 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4836
4837 Note that we still need to clear any elements that don't have explicit
4838 initializers, so if not all elements are initialized we keep the
4839 original MODIFY_EXPR, we just remove all of the constructor elements.
4840
4841 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4842 GS_ERROR if we would have to create a temporary when gimplifying
4843 this constructor. Otherwise, return GS_OK.
4844
4845 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4846
4847 static enum gimplify_status
4848 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4849 bool want_value, bool notify_temp_creation)
4850 {
4851 tree object, ctor, type;
4852 enum gimplify_status ret;
4853 vec<constructor_elt, va_gc> *elts;
4854
4855 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4856
4857 if (!notify_temp_creation)
4858 {
4859 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4860 is_gimple_lvalue, fb_lvalue);
4861 if (ret == GS_ERROR)
4862 return ret;
4863 }
4864
4865 object = TREE_OPERAND (*expr_p, 0);
4866 ctor = TREE_OPERAND (*expr_p, 1)
4867 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4868 type = TREE_TYPE (ctor);
4869 elts = CONSTRUCTOR_ELTS (ctor);
4870 ret = GS_ALL_DONE;
4871
4872 switch (TREE_CODE (type))
4873 {
4874 case RECORD_TYPE:
4875 case UNION_TYPE:
4876 case QUAL_UNION_TYPE:
4877 case ARRAY_TYPE:
4878 {
4879 /* Use readonly data for initializers of this or smaller size
4880 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4881 ratio. */
4882 const HOST_WIDE_INT min_unique_size = 64;
4883 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4884 is smaller than this, use readonly data. */
4885 const int unique_nonzero_ratio = 8;
4886 /* True if a single access of the object must be ensured. This is the
4887 case if the target is volatile, the type is non-addressable and more
4888 than one field need to be assigned. */
4889 const bool ensure_single_access
4890 = TREE_THIS_VOLATILE (object)
4891 && !TREE_ADDRESSABLE (type)
4892 && vec_safe_length (elts) > 1;
4893 struct gimplify_init_ctor_preeval_data preeval_data;
4894 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4895 HOST_WIDE_INT num_unique_nonzero_elements;
4896 bool cleared, complete_p, valid_const_initializer;
4897
4898 /* Aggregate types must lower constructors to initialization of
4899 individual elements. The exception is that a CONSTRUCTOR node
4900 with no elements indicates zero-initialization of the whole. */
4901 if (vec_safe_is_empty (elts))
4902 {
4903 if (notify_temp_creation)
4904 return GS_OK;
4905 break;
4906 }
4907
4908 /* Fetch information about the constructor to direct later processing.
4909 We might want to make static versions of it in various cases, and
4910 can only do so if it known to be a valid constant initializer. */
4911 valid_const_initializer
4912 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4913 &num_unique_nonzero_elements,
4914 &num_ctor_elements, &complete_p);
4915
4916 /* If a const aggregate variable is being initialized, then it
4917 should never be a lose to promote the variable to be static. */
4918 if (valid_const_initializer
4919 && num_nonzero_elements > 1
4920 && TREE_READONLY (object)
4921 && VAR_P (object)
4922 && !DECL_REGISTER (object)
4923 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4924 /* For ctors that have many repeated nonzero elements
4925 represented through RANGE_EXPRs, prefer initializing
4926 those through runtime loops over copies of large amounts
4927 of data from readonly data section. */
4928 && (num_unique_nonzero_elements
4929 > num_nonzero_elements / unique_nonzero_ratio
4930 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4931 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4932 {
4933 if (notify_temp_creation)
4934 return GS_ERROR;
4935
4936 DECL_INITIAL (object) = ctor;
4937 TREE_STATIC (object) = 1;
4938 if (!DECL_NAME (object))
4939 DECL_NAME (object) = create_tmp_var_name ("C");
4940 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4941
4942 /* ??? C++ doesn't automatically append a .<number> to the
4943 assembler name, and even when it does, it looks at FE private
4944 data structures to figure out what that number should be,
4945 which are not set for this variable. I suppose this is
4946 important for local statics for inline functions, which aren't
4947 "local" in the object file sense. So in order to get a unique
4948 TU-local symbol, we must invoke the lhd version now. */
4949 lhd_set_decl_assembler_name (object);
4950
4951 *expr_p = NULL_TREE;
4952 break;
4953 }
4954
4955 /* If there are "lots" of initialized elements, even discounting
4956 those that are not address constants (and thus *must* be
4957 computed at runtime), then partition the constructor into
4958 constant and non-constant parts. Block copy the constant
4959 parts in, then generate code for the non-constant parts. */
4960 /* TODO. There's code in cp/typeck.c to do this. */
4961
4962 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4963 /* store_constructor will ignore the clearing of variable-sized
4964 objects. Initializers for such objects must explicitly set
4965 every field that needs to be set. */
4966 cleared = false;
4967 else if (!complete_p)
4968 /* If the constructor isn't complete, clear the whole object
4969 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4970
4971 ??? This ought not to be needed. For any element not present
4972 in the initializer, we should simply set them to zero. Except
4973 we'd need to *find* the elements that are not present, and that
4974 requires trickery to avoid quadratic compile-time behavior in
4975 large cases or excessive memory use in small cases. */
4976 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4977 else if (num_ctor_elements - num_nonzero_elements
4978 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4979 && num_nonzero_elements < num_ctor_elements / 4)
4980 /* If there are "lots" of zeros, it's more efficient to clear
4981 the memory and then set the nonzero elements. */
4982 cleared = true;
4983 else if (ensure_single_access && num_nonzero_elements == 0)
4984 /* If a single access to the target must be ensured and all elements
4985 are zero, then it's optimal to clear whatever their number. */
4986 cleared = true;
4987 else
4988 cleared = false;
4989
4990 /* If there are "lots" of initialized elements, and all of them
4991 are valid address constants, then the entire initializer can
4992 be dropped to memory, and then memcpy'd out. Don't do this
4993 for sparse arrays, though, as it's more efficient to follow
4994 the standard CONSTRUCTOR behavior of memset followed by
4995 individual element initialization. Also don't do this for small
4996 all-zero initializers (which aren't big enough to merit
4997 clearing), and don't try to make bitwise copies of
4998 TREE_ADDRESSABLE types. */
4999 if (valid_const_initializer
5000 && complete_p
5001 && !(cleared || num_nonzero_elements == 0)
5002 && !TREE_ADDRESSABLE (type))
5003 {
5004 HOST_WIDE_INT size = int_size_in_bytes (type);
5005 unsigned int align;
5006
5007 /* ??? We can still get unbounded array types, at least
5008 from the C++ front end. This seems wrong, but attempt
5009 to work around it for now. */
5010 if (size < 0)
5011 {
5012 size = int_size_in_bytes (TREE_TYPE (object));
5013 if (size >= 0)
5014 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5015 }
5016
5017 /* Find the maximum alignment we can assume for the object. */
5018 /* ??? Make use of DECL_OFFSET_ALIGN. */
5019 if (DECL_P (object))
5020 align = DECL_ALIGN (object);
5021 else
5022 align = TYPE_ALIGN (type);
5023
5024 /* Do a block move either if the size is so small as to make
5025 each individual move a sub-unit move on average, or if it
5026 is so large as to make individual moves inefficient. */
5027 if (size > 0
5028 && num_nonzero_elements > 1
5029 /* For ctors that have many repeated nonzero elements
5030 represented through RANGE_EXPRs, prefer initializing
5031 those through runtime loops over copies of large amounts
5032 of data from readonly data section. */
5033 && (num_unique_nonzero_elements
5034 > num_nonzero_elements / unique_nonzero_ratio
5035 || size <= min_unique_size)
5036 && (size < num_nonzero_elements
5037 || !can_move_by_pieces (size, align)))
5038 {
5039 if (notify_temp_creation)
5040 return GS_ERROR;
5041
5042 walk_tree (&ctor, force_labels_r, NULL, NULL);
5043 ctor = tree_output_constant_def (ctor);
5044 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5045 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5046 TREE_OPERAND (*expr_p, 1) = ctor;
5047
5048 /* This is no longer an assignment of a CONSTRUCTOR, but
5049 we still may have processing to do on the LHS. So
5050 pretend we didn't do anything here to let that happen. */
5051 return GS_UNHANDLED;
5052 }
5053 }
5054
5055 /* If a single access to the target must be ensured and there are
5056 nonzero elements or the zero elements are not assigned en masse,
5057 initialize the target from a temporary. */
5058 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5059 {
5060 if (notify_temp_creation)
5061 return GS_ERROR;
5062
5063 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5064 TREE_OPERAND (*expr_p, 0) = temp;
5065 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5066 *expr_p,
5067 build2 (MODIFY_EXPR, void_type_node,
5068 object, temp));
5069 return GS_OK;
5070 }
5071
5072 if (notify_temp_creation)
5073 return GS_OK;
5074
5075 /* If there are nonzero elements and if needed, pre-evaluate to capture
5076 elements overlapping with the lhs into temporaries. We must do this
5077 before clearing to fetch the values before they are zeroed-out. */
5078 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5079 {
5080 preeval_data.lhs_base_decl = get_base_address (object);
5081 if (!DECL_P (preeval_data.lhs_base_decl))
5082 preeval_data.lhs_base_decl = NULL;
5083 preeval_data.lhs_alias_set = get_alias_set (object);
5084
5085 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5086 pre_p, post_p, &preeval_data);
5087 }
5088
5089 bool ctor_has_side_effects_p
5090 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5091
5092 if (cleared)
5093 {
5094 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5095 Note that we still have to gimplify, in order to handle the
5096 case of variable sized types. Avoid shared tree structures. */
5097 CONSTRUCTOR_ELTS (ctor) = NULL;
5098 TREE_SIDE_EFFECTS (ctor) = 0;
5099 object = unshare_expr (object);
5100 gimplify_stmt (expr_p, pre_p);
5101 }
5102
5103 /* If we have not block cleared the object, or if there are nonzero
5104 elements in the constructor, or if the constructor has side effects,
5105 add assignments to the individual scalar fields of the object. */
5106 if (!cleared
5107 || num_nonzero_elements > 0
5108 || ctor_has_side_effects_p)
5109 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5110
5111 *expr_p = NULL_TREE;
5112 }
5113 break;
5114
5115 case COMPLEX_TYPE:
5116 {
5117 tree r, i;
5118
5119 if (notify_temp_creation)
5120 return GS_OK;
5121
5122 /* Extract the real and imaginary parts out of the ctor. */
5123 gcc_assert (elts->length () == 2);
5124 r = (*elts)[0].value;
5125 i = (*elts)[1].value;
5126 if (r == NULL || i == NULL)
5127 {
5128 tree zero = build_zero_cst (TREE_TYPE (type));
5129 if (r == NULL)
5130 r = zero;
5131 if (i == NULL)
5132 i = zero;
5133 }
5134
5135 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5136 represent creation of a complex value. */
5137 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5138 {
5139 ctor = build_complex (type, r, i);
5140 TREE_OPERAND (*expr_p, 1) = ctor;
5141 }
5142 else
5143 {
5144 ctor = build2 (COMPLEX_EXPR, type, r, i);
5145 TREE_OPERAND (*expr_p, 1) = ctor;
5146 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5147 pre_p,
5148 post_p,
5149 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5150 fb_rvalue);
5151 }
5152 }
5153 break;
5154
5155 case VECTOR_TYPE:
5156 {
5157 unsigned HOST_WIDE_INT ix;
5158 constructor_elt *ce;
5159
5160 if (notify_temp_creation)
5161 return GS_OK;
5162
5163 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5164 if (TREE_CONSTANT (ctor))
5165 {
5166 bool constant_p = true;
5167 tree value;
5168
5169 /* Even when ctor is constant, it might contain non-*_CST
5170 elements, such as addresses or trapping values like
5171 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5172 in VECTOR_CST nodes. */
5173 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5174 if (!CONSTANT_CLASS_P (value))
5175 {
5176 constant_p = false;
5177 break;
5178 }
5179
5180 if (constant_p)
5181 {
5182 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5183 break;
5184 }
5185
5186 TREE_CONSTANT (ctor) = 0;
5187 }
5188
5189 /* Vector types use CONSTRUCTOR all the way through gimple
5190 compilation as a general initializer. */
5191 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5192 {
5193 enum gimplify_status tret;
5194 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5195 fb_rvalue);
5196 if (tret == GS_ERROR)
5197 ret = GS_ERROR;
5198 else if (TREE_STATIC (ctor)
5199 && !initializer_constant_valid_p (ce->value,
5200 TREE_TYPE (ce->value)))
5201 TREE_STATIC (ctor) = 0;
5202 }
5203 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5204 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5205 }
5206 break;
5207
5208 default:
5209 /* So how did we get a CONSTRUCTOR for a scalar type? */
5210 gcc_unreachable ();
5211 }
5212
5213 if (ret == GS_ERROR)
5214 return GS_ERROR;
5215 /* If we have gimplified both sides of the initializer but have
5216 not emitted an assignment, do so now. */
5217 if (*expr_p)
5218 {
5219 tree lhs = TREE_OPERAND (*expr_p, 0);
5220 tree rhs = TREE_OPERAND (*expr_p, 1);
5221 if (want_value && object == lhs)
5222 lhs = unshare_expr (lhs);
5223 gassign *init = gimple_build_assign (lhs, rhs);
5224 gimplify_seq_add_stmt (pre_p, init);
5225 }
5226 if (want_value)
5227 {
5228 *expr_p = object;
5229 return GS_OK;
5230 }
5231 else
5232 {
5233 *expr_p = NULL;
5234 return GS_ALL_DONE;
5235 }
5236 }
5237
5238 /* Given a pointer value OP0, return a simplified version of an
5239 indirection through OP0, or NULL_TREE if no simplification is
5240 possible. This may only be applied to a rhs of an expression.
5241 Note that the resulting type may be different from the type pointed
5242 to in the sense that it is still compatible from the langhooks
5243 point of view. */
5244
5245 static tree
5246 gimple_fold_indirect_ref_rhs (tree t)
5247 {
5248 return gimple_fold_indirect_ref (t);
5249 }
5250
5251 /* Subroutine of gimplify_modify_expr to do simplifications of
5252 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5253 something changes. */
5254
5255 static enum gimplify_status
5256 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5257 gimple_seq *pre_p, gimple_seq *post_p,
5258 bool want_value)
5259 {
5260 enum gimplify_status ret = GS_UNHANDLED;
5261 bool changed;
5262
5263 do
5264 {
5265 changed = false;
5266 switch (TREE_CODE (*from_p))
5267 {
5268 case VAR_DECL:
5269 /* If we're assigning from a read-only variable initialized with
5270 a constructor and not volatile, do the direct assignment from
5271 the constructor, but only if the target is not volatile either
5272 since this latter assignment might end up being done on a per
5273 field basis. However, if the target is volatile and the type
5274 is aggregate and non-addressable, gimplify_init_constructor
5275 knows that it needs to ensure a single access to the target
5276 and it will return GS_OK only in this case. */
5277 if (TREE_READONLY (*from_p)
5278 && DECL_INITIAL (*from_p)
5279 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5280 && !TREE_THIS_VOLATILE (*from_p)
5281 && (!TREE_THIS_VOLATILE (*to_p)
5282 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5283 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5284 {
5285 tree old_from = *from_p;
5286 enum gimplify_status subret;
5287
5288 /* Move the constructor into the RHS. */
5289 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5290
5291 /* Let's see if gimplify_init_constructor will need to put
5292 it in memory. */
5293 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5294 false, true);
5295 if (subret == GS_ERROR)
5296 {
5297 /* If so, revert the change. */
5298 *from_p = old_from;
5299 }
5300 else
5301 {
5302 ret = GS_OK;
5303 changed = true;
5304 }
5305 }
5306 break;
5307 case INDIRECT_REF:
5308 {
5309 /* If we have code like
5310
5311 *(const A*)(A*)&x
5312
5313 where the type of "x" is a (possibly cv-qualified variant
5314 of "A"), treat the entire expression as identical to "x".
5315 This kind of code arises in C++ when an object is bound
5316 to a const reference, and if "x" is a TARGET_EXPR we want
5317 to take advantage of the optimization below. */
5318 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5319 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5320 if (t)
5321 {
5322 if (TREE_THIS_VOLATILE (t) != volatile_p)
5323 {
5324 if (DECL_P (t))
5325 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5326 build_fold_addr_expr (t));
5327 if (REFERENCE_CLASS_P (t))
5328 TREE_THIS_VOLATILE (t) = volatile_p;
5329 }
5330 *from_p = t;
5331 ret = GS_OK;
5332 changed = true;
5333 }
5334 break;
5335 }
5336
5337 case TARGET_EXPR:
5338 {
5339 /* If we are initializing something from a TARGET_EXPR, strip the
5340 TARGET_EXPR and initialize it directly, if possible. This can't
5341 be done if the initializer is void, since that implies that the
5342 temporary is set in some non-trivial way.
5343
5344 ??? What about code that pulls out the temp and uses it
5345 elsewhere? I think that such code never uses the TARGET_EXPR as
5346 an initializer. If I'm wrong, we'll die because the temp won't
5347 have any RTL. In that case, I guess we'll need to replace
5348 references somehow. */
5349 tree init = TARGET_EXPR_INITIAL (*from_p);
5350
5351 if (init
5352 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5353 || !TARGET_EXPR_NO_ELIDE (*from_p))
5354 && !VOID_TYPE_P (TREE_TYPE (init)))
5355 {
5356 *from_p = init;
5357 ret = GS_OK;
5358 changed = true;
5359 }
5360 }
5361 break;
5362
5363 case COMPOUND_EXPR:
5364 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5365 caught. */
5366 gimplify_compound_expr (from_p, pre_p, true);
5367 ret = GS_OK;
5368 changed = true;
5369 break;
5370
5371 case CONSTRUCTOR:
5372 /* If we already made some changes, let the front end have a
5373 crack at this before we break it down. */
5374 if (ret != GS_UNHANDLED)
5375 break;
5376 /* If we're initializing from a CONSTRUCTOR, break this into
5377 individual MODIFY_EXPRs. */
5378 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5379 false);
5380
5381 case COND_EXPR:
5382 /* If we're assigning to a non-register type, push the assignment
5383 down into the branches. This is mandatory for ADDRESSABLE types,
5384 since we cannot generate temporaries for such, but it saves a
5385 copy in other cases as well. */
5386 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5387 {
5388 /* This code should mirror the code in gimplify_cond_expr. */
5389 enum tree_code code = TREE_CODE (*expr_p);
5390 tree cond = *from_p;
5391 tree result = *to_p;
5392
5393 ret = gimplify_expr (&result, pre_p, post_p,
5394 is_gimple_lvalue, fb_lvalue);
5395 if (ret != GS_ERROR)
5396 ret = GS_OK;
5397
5398 /* If we are going to write RESULT more than once, clear
5399 TREE_READONLY flag, otherwise we might incorrectly promote
5400 the variable to static const and initialize it at compile
5401 time in one of the branches. */
5402 if (VAR_P (result)
5403 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5404 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5405 TREE_READONLY (result) = 0;
5406 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5407 TREE_OPERAND (cond, 1)
5408 = build2 (code, void_type_node, result,
5409 TREE_OPERAND (cond, 1));
5410 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5411 TREE_OPERAND (cond, 2)
5412 = build2 (code, void_type_node, unshare_expr (result),
5413 TREE_OPERAND (cond, 2));
5414
5415 TREE_TYPE (cond) = void_type_node;
5416 recalculate_side_effects (cond);
5417
5418 if (want_value)
5419 {
5420 gimplify_and_add (cond, pre_p);
5421 *expr_p = unshare_expr (result);
5422 }
5423 else
5424 *expr_p = cond;
5425 return ret;
5426 }
5427 break;
5428
5429 case CALL_EXPR:
5430 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5431 return slot so that we don't generate a temporary. */
5432 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5433 && aggregate_value_p (*from_p, *from_p))
5434 {
5435 bool use_target;
5436
5437 if (!(rhs_predicate_for (*to_p))(*from_p))
5438 /* If we need a temporary, *to_p isn't accurate. */
5439 use_target = false;
5440 /* It's OK to use the return slot directly unless it's an NRV. */
5441 else if (TREE_CODE (*to_p) == RESULT_DECL
5442 && DECL_NAME (*to_p) == NULL_TREE
5443 && needs_to_live_in_memory (*to_p))
5444 use_target = true;
5445 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5446 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5447 /* Don't force regs into memory. */
5448 use_target = false;
5449 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5450 /* It's OK to use the target directly if it's being
5451 initialized. */
5452 use_target = true;
5453 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5454 != INTEGER_CST)
5455 /* Always use the target and thus RSO for variable-sized types.
5456 GIMPLE cannot deal with a variable-sized assignment
5457 embedded in a call statement. */
5458 use_target = true;
5459 else if (TREE_CODE (*to_p) != SSA_NAME
5460 && (!is_gimple_variable (*to_p)
5461 || needs_to_live_in_memory (*to_p)))
5462 /* Don't use the original target if it's already addressable;
5463 if its address escapes, and the called function uses the
5464 NRV optimization, a conforming program could see *to_p
5465 change before the called function returns; see c++/19317.
5466 When optimizing, the return_slot pass marks more functions
5467 as safe after we have escape info. */
5468 use_target = false;
5469 else
5470 use_target = true;
5471
5472 if (use_target)
5473 {
5474 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5475 mark_addressable (*to_p);
5476 }
5477 }
5478 break;
5479
5480 case WITH_SIZE_EXPR:
5481 /* Likewise for calls that return an aggregate of non-constant size,
5482 since we would not be able to generate a temporary at all. */
5483 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5484 {
5485 *from_p = TREE_OPERAND (*from_p, 0);
5486 /* We don't change ret in this case because the
5487 WITH_SIZE_EXPR might have been added in
5488 gimplify_modify_expr, so returning GS_OK would lead to an
5489 infinite loop. */
5490 changed = true;
5491 }
5492 break;
5493
5494 /* If we're initializing from a container, push the initialization
5495 inside it. */
5496 case CLEANUP_POINT_EXPR:
5497 case BIND_EXPR:
5498 case STATEMENT_LIST:
5499 {
5500 tree wrap = *from_p;
5501 tree t;
5502
5503 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5504 fb_lvalue);
5505 if (ret != GS_ERROR)
5506 ret = GS_OK;
5507
5508 t = voidify_wrapper_expr (wrap, *expr_p);
5509 gcc_assert (t == *expr_p);
5510
5511 if (want_value)
5512 {
5513 gimplify_and_add (wrap, pre_p);
5514 *expr_p = unshare_expr (*to_p);
5515 }
5516 else
5517 *expr_p = wrap;
5518 return GS_OK;
5519 }
5520
5521 case COMPOUND_LITERAL_EXPR:
5522 {
5523 tree complit = TREE_OPERAND (*expr_p, 1);
5524 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5525 tree decl = DECL_EXPR_DECL (decl_s);
5526 tree init = DECL_INITIAL (decl);
5527
5528 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5529 into struct T x = { 0, 1, 2 } if the address of the
5530 compound literal has never been taken. */
5531 if (!TREE_ADDRESSABLE (complit)
5532 && !TREE_ADDRESSABLE (decl)
5533 && init)
5534 {
5535 *expr_p = copy_node (*expr_p);
5536 TREE_OPERAND (*expr_p, 1) = init;
5537 return GS_OK;
5538 }
5539 }
5540
5541 default:
5542 break;
5543 }
5544 }
5545 while (changed);
5546
5547 return ret;
5548 }
5549
5550
5551 /* Return true if T looks like a valid GIMPLE statement. */
5552
5553 static bool
5554 is_gimple_stmt (tree t)
5555 {
5556 const enum tree_code code = TREE_CODE (t);
5557
5558 switch (code)
5559 {
5560 case NOP_EXPR:
5561 /* The only valid NOP_EXPR is the empty statement. */
5562 return IS_EMPTY_STMT (t);
5563
5564 case BIND_EXPR:
5565 case COND_EXPR:
5566 /* These are only valid if they're void. */
5567 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5568
5569 case SWITCH_EXPR:
5570 case GOTO_EXPR:
5571 case RETURN_EXPR:
5572 case LABEL_EXPR:
5573 case CASE_LABEL_EXPR:
5574 case TRY_CATCH_EXPR:
5575 case TRY_FINALLY_EXPR:
5576 case EH_FILTER_EXPR:
5577 case CATCH_EXPR:
5578 case ASM_EXPR:
5579 case STATEMENT_LIST:
5580 case OACC_PARALLEL:
5581 case OACC_KERNELS:
5582 case OACC_SERIAL:
5583 case OACC_DATA:
5584 case OACC_HOST_DATA:
5585 case OACC_DECLARE:
5586 case OACC_UPDATE:
5587 case OACC_ENTER_DATA:
5588 case OACC_EXIT_DATA:
5589 case OACC_CACHE:
5590 case OMP_PARALLEL:
5591 case OMP_FOR:
5592 case OMP_SIMD:
5593 case OMP_DISTRIBUTE:
5594 case OMP_LOOP:
5595 case OACC_LOOP:
5596 case OMP_SCAN:
5597 case OMP_SECTIONS:
5598 case OMP_SECTION:
5599 case OMP_SINGLE:
5600 case OMP_MASTER:
5601 case OMP_TASKGROUP:
5602 case OMP_ORDERED:
5603 case OMP_CRITICAL:
5604 case OMP_TASK:
5605 case OMP_TARGET:
5606 case OMP_TARGET_DATA:
5607 case OMP_TARGET_UPDATE:
5608 case OMP_TARGET_ENTER_DATA:
5609 case OMP_TARGET_EXIT_DATA:
5610 case OMP_TASKLOOP:
5611 case OMP_TEAMS:
5612 /* These are always void. */
5613 return true;
5614
5615 case CALL_EXPR:
5616 case MODIFY_EXPR:
5617 case PREDICT_EXPR:
5618 /* These are valid regardless of their type. */
5619 return true;
5620
5621 default:
5622 return false;
5623 }
5624 }
5625
5626
5627 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5628 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5629
5630 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5631 other, unmodified part of the complex object just before the total store.
5632 As a consequence, if the object is still uninitialized, an undefined value
5633 will be loaded into a register, which may result in a spurious exception
5634 if the register is floating-point and the value happens to be a signaling
5635 NaN for example. Then the fully-fledged complex operations lowering pass
5636 followed by a DCE pass are necessary in order to fix things up. */
5637
5638 static enum gimplify_status
5639 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5640 bool want_value)
5641 {
5642 enum tree_code code, ocode;
5643 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5644
5645 lhs = TREE_OPERAND (*expr_p, 0);
5646 rhs = TREE_OPERAND (*expr_p, 1);
5647 code = TREE_CODE (lhs);
5648 lhs = TREE_OPERAND (lhs, 0);
5649
5650 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5651 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5652 TREE_NO_WARNING (other) = 1;
5653 other = get_formal_tmp_var (other, pre_p);
5654
5655 realpart = code == REALPART_EXPR ? rhs : other;
5656 imagpart = code == REALPART_EXPR ? other : rhs;
5657
5658 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5659 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5660 else
5661 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5662
5663 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5664 *expr_p = (want_value) ? rhs : NULL_TREE;
5665
5666 return GS_ALL_DONE;
5667 }
5668
5669 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5670
5671 modify_expr
5672 : varname '=' rhs
5673 | '*' ID '=' rhs
5674
5675 PRE_P points to the list where side effects that must happen before
5676 *EXPR_P should be stored.
5677
5678 POST_P points to the list where side effects that must happen after
5679 *EXPR_P should be stored.
5680
5681 WANT_VALUE is nonzero iff we want to use the value of this expression
5682 in another expression. */
5683
5684 static enum gimplify_status
5685 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5686 bool want_value)
5687 {
5688 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5689 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5690 enum gimplify_status ret = GS_UNHANDLED;
5691 gimple *assign;
5692 location_t loc = EXPR_LOCATION (*expr_p);
5693 gimple_stmt_iterator gsi;
5694
5695 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5696 || TREE_CODE (*expr_p) == INIT_EXPR);
5697
5698 /* Trying to simplify a clobber using normal logic doesn't work,
5699 so handle it here. */
5700 if (TREE_CLOBBER_P (*from_p))
5701 {
5702 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5703 if (ret == GS_ERROR)
5704 return ret;
5705 gcc_assert (!want_value);
5706 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5707 {
5708 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5709 pre_p, post_p);
5710 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5711 }
5712 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5713 *expr_p = NULL;
5714 return GS_ALL_DONE;
5715 }
5716
5717 /* Insert pointer conversions required by the middle-end that are not
5718 required by the frontend. This fixes middle-end type checking for
5719 for example gcc.dg/redecl-6.c. */
5720 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5721 {
5722 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5723 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5724 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5725 }
5726
5727 /* See if any simplifications can be done based on what the RHS is. */
5728 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5729 want_value);
5730 if (ret != GS_UNHANDLED)
5731 return ret;
5732
5733 /* For zero sized types only gimplify the left hand side and right hand
5734 side as statements and throw away the assignment. Do this after
5735 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5736 types properly. */
5737 if (zero_sized_type (TREE_TYPE (*from_p))
5738 && !want_value
5739 /* Don't do this for calls that return addressable types, expand_call
5740 relies on those having a lhs. */
5741 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5742 && TREE_CODE (*from_p) == CALL_EXPR))
5743 {
5744 gimplify_stmt (from_p, pre_p);
5745 gimplify_stmt (to_p, pre_p);
5746 *expr_p = NULL_TREE;
5747 return GS_ALL_DONE;
5748 }
5749
5750 /* If the value being copied is of variable width, compute the length
5751 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5752 before gimplifying any of the operands so that we can resolve any
5753 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5754 the size of the expression to be copied, not of the destination, so
5755 that is what we must do here. */
5756 maybe_with_size_expr (from_p);
5757
5758 /* As a special case, we have to temporarily allow for assignments
5759 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5760 a toplevel statement, when gimplifying the GENERIC expression
5761 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5762 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5763
5764 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5765 prevent gimplify_expr from trying to create a new temporary for
5766 foo's LHS, we tell it that it should only gimplify until it
5767 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5768 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5769 and all we need to do here is set 'a' to be its LHS. */
5770
5771 /* Gimplify the RHS first for C++17 and bug 71104. */
5772 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5773 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5774 if (ret == GS_ERROR)
5775 return ret;
5776
5777 /* Then gimplify the LHS. */
5778 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5779 twice we have to make sure to gimplify into non-SSA as otherwise
5780 the abnormal edge added later will make those defs not dominate
5781 their uses.
5782 ??? Technically this applies only to the registers used in the
5783 resulting non-register *TO_P. */
5784 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5785 if (saved_into_ssa
5786 && TREE_CODE (*from_p) == CALL_EXPR
5787 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5788 gimplify_ctxp->into_ssa = false;
5789 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5790 gimplify_ctxp->into_ssa = saved_into_ssa;
5791 if (ret == GS_ERROR)
5792 return ret;
5793
5794 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5795 guess for the predicate was wrong. */
5796 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5797 if (final_pred != initial_pred)
5798 {
5799 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5800 if (ret == GS_ERROR)
5801 return ret;
5802 }
5803
5804 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5805 size as argument to the call. */
5806 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5807 {
5808 tree call = TREE_OPERAND (*from_p, 0);
5809 tree vlasize = TREE_OPERAND (*from_p, 1);
5810
5811 if (TREE_CODE (call) == CALL_EXPR
5812 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5813 {
5814 int nargs = call_expr_nargs (call);
5815 tree type = TREE_TYPE (call);
5816 tree ap = CALL_EXPR_ARG (call, 0);
5817 tree tag = CALL_EXPR_ARG (call, 1);
5818 tree aptag = CALL_EXPR_ARG (call, 2);
5819 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5820 IFN_VA_ARG, type,
5821 nargs + 1, ap, tag,
5822 aptag, vlasize);
5823 TREE_OPERAND (*from_p, 0) = newcall;
5824 }
5825 }
5826
5827 /* Now see if the above changed *from_p to something we handle specially. */
5828 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5829 want_value);
5830 if (ret != GS_UNHANDLED)
5831 return ret;
5832
5833 /* If we've got a variable sized assignment between two lvalues (i.e. does
5834 not involve a call), then we can make things a bit more straightforward
5835 by converting the assignment to memcpy or memset. */
5836 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5837 {
5838 tree from = TREE_OPERAND (*from_p, 0);
5839 tree size = TREE_OPERAND (*from_p, 1);
5840
5841 if (TREE_CODE (from) == CONSTRUCTOR)
5842 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5843
5844 if (is_gimple_addressable (from))
5845 {
5846 *from_p = from;
5847 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5848 pre_p);
5849 }
5850 }
5851
5852 /* Transform partial stores to non-addressable complex variables into
5853 total stores. This allows us to use real instead of virtual operands
5854 for these variables, which improves optimization. */
5855 if ((TREE_CODE (*to_p) == REALPART_EXPR
5856 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5857 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5858 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5859
5860 /* Try to alleviate the effects of the gimplification creating artificial
5861 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5862 make sure not to create DECL_DEBUG_EXPR links across functions. */
5863 if (!gimplify_ctxp->into_ssa
5864 && VAR_P (*from_p)
5865 && DECL_IGNORED_P (*from_p)
5866 && DECL_P (*to_p)
5867 && !DECL_IGNORED_P (*to_p)
5868 && decl_function_context (*to_p) == current_function_decl
5869 && decl_function_context (*from_p) == current_function_decl)
5870 {
5871 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5872 DECL_NAME (*from_p)
5873 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5874 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5875 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5876 }
5877
5878 if (want_value && TREE_THIS_VOLATILE (*to_p))
5879 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5880
5881 if (TREE_CODE (*from_p) == CALL_EXPR)
5882 {
5883 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5884 instead of a GIMPLE_ASSIGN. */
5885 gcall *call_stmt;
5886 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5887 {
5888 /* Gimplify internal functions created in the FEs. */
5889 int nargs = call_expr_nargs (*from_p), i;
5890 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5891 auto_vec<tree> vargs (nargs);
5892
5893 for (i = 0; i < nargs; i++)
5894 {
5895 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5896 EXPR_LOCATION (*from_p));
5897 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5898 }
5899 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5900 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5901 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5902 }
5903 else
5904 {
5905 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5906 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5907 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5908 tree fndecl = get_callee_fndecl (*from_p);
5909 if (fndecl
5910 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5911 && call_expr_nargs (*from_p) == 3)
5912 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5913 CALL_EXPR_ARG (*from_p, 0),
5914 CALL_EXPR_ARG (*from_p, 1),
5915 CALL_EXPR_ARG (*from_p, 2));
5916 else
5917 {
5918 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5919 }
5920 }
5921 notice_special_calls (call_stmt);
5922 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5923 gimple_call_set_lhs (call_stmt, *to_p);
5924 else if (TREE_CODE (*to_p) == SSA_NAME)
5925 /* The above is somewhat premature, avoid ICEing later for a
5926 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5927 ??? This doesn't make it a default-def. */
5928 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5929
5930 assign = call_stmt;
5931 }
5932 else
5933 {
5934 assign = gimple_build_assign (*to_p, *from_p);
5935 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5936 if (COMPARISON_CLASS_P (*from_p))
5937 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5938 }
5939
5940 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5941 {
5942 /* We should have got an SSA name from the start. */
5943 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5944 || ! gimple_in_ssa_p (cfun));
5945 }
5946
5947 gimplify_seq_add_stmt (pre_p, assign);
5948 gsi = gsi_last (*pre_p);
5949 maybe_fold_stmt (&gsi);
5950
5951 if (want_value)
5952 {
5953 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5954 return GS_OK;
5955 }
5956 else
5957 *expr_p = NULL;
5958
5959 return GS_ALL_DONE;
5960 }
5961
5962 /* Gimplify a comparison between two variable-sized objects. Do this
5963 with a call to BUILT_IN_MEMCMP. */
5964
5965 static enum gimplify_status
5966 gimplify_variable_sized_compare (tree *expr_p)
5967 {
5968 location_t loc = EXPR_LOCATION (*expr_p);
5969 tree op0 = TREE_OPERAND (*expr_p, 0);
5970 tree op1 = TREE_OPERAND (*expr_p, 1);
5971 tree t, arg, dest, src, expr;
5972
5973 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5974 arg = unshare_expr (arg);
5975 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5976 src = build_fold_addr_expr_loc (loc, op1);
5977 dest = build_fold_addr_expr_loc (loc, op0);
5978 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5979 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5980
5981 expr
5982 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5983 SET_EXPR_LOCATION (expr, loc);
5984 *expr_p = expr;
5985
5986 return GS_OK;
5987 }
5988
5989 /* Gimplify a comparison between two aggregate objects of integral scalar
5990 mode as a comparison between the bitwise equivalent scalar values. */
5991
5992 static enum gimplify_status
5993 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5994 {
5995 location_t loc = EXPR_LOCATION (*expr_p);
5996 tree op0 = TREE_OPERAND (*expr_p, 0);
5997 tree op1 = TREE_OPERAND (*expr_p, 1);
5998
5999 tree type = TREE_TYPE (op0);
6000 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6001
6002 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6003 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6004
6005 *expr_p
6006 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6007
6008 return GS_OK;
6009 }
6010
6011 /* Gimplify an expression sequence. This function gimplifies each
6012 expression and rewrites the original expression with the last
6013 expression of the sequence in GIMPLE form.
6014
6015 PRE_P points to the list where the side effects for all the
6016 expressions in the sequence will be emitted.
6017
6018 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6019
6020 static enum gimplify_status
6021 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6022 {
6023 tree t = *expr_p;
6024
6025 do
6026 {
6027 tree *sub_p = &TREE_OPERAND (t, 0);
6028
6029 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6030 gimplify_compound_expr (sub_p, pre_p, false);
6031 else
6032 gimplify_stmt (sub_p, pre_p);
6033
6034 t = TREE_OPERAND (t, 1);
6035 }
6036 while (TREE_CODE (t) == COMPOUND_EXPR);
6037
6038 *expr_p = t;
6039 if (want_value)
6040 return GS_OK;
6041 else
6042 {
6043 gimplify_stmt (expr_p, pre_p);
6044 return GS_ALL_DONE;
6045 }
6046 }
6047
6048 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6049 gimplify. After gimplification, EXPR_P will point to a new temporary
6050 that holds the original value of the SAVE_EXPR node.
6051
6052 PRE_P points to the list where side effects that must happen before
6053 *EXPR_P should be stored. */
6054
6055 static enum gimplify_status
6056 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6057 {
6058 enum gimplify_status ret = GS_ALL_DONE;
6059 tree val;
6060
6061 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6062 val = TREE_OPERAND (*expr_p, 0);
6063
6064 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6065 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6066 {
6067 /* The operand may be a void-valued expression. It is
6068 being executed only for its side-effects. */
6069 if (TREE_TYPE (val) == void_type_node)
6070 {
6071 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6072 is_gimple_stmt, fb_none);
6073 val = NULL;
6074 }
6075 else
6076 /* The temporary may not be an SSA name as later abnormal and EH
6077 control flow may invalidate use/def domination. When in SSA
6078 form then assume there are no such issues and SAVE_EXPRs only
6079 appear via GENERIC foldings. */
6080 val = get_initialized_tmp_var (val, pre_p, post_p,
6081 gimple_in_ssa_p (cfun));
6082
6083 TREE_OPERAND (*expr_p, 0) = val;
6084 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6085 }
6086
6087 *expr_p = val;
6088
6089 return ret;
6090 }
6091
6092 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6093
6094 unary_expr
6095 : ...
6096 | '&' varname
6097 ...
6098
6099 PRE_P points to the list where side effects that must happen before
6100 *EXPR_P should be stored.
6101
6102 POST_P points to the list where side effects that must happen after
6103 *EXPR_P should be stored. */
6104
6105 static enum gimplify_status
6106 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6107 {
6108 tree expr = *expr_p;
6109 tree op0 = TREE_OPERAND (expr, 0);
6110 enum gimplify_status ret;
6111 location_t loc = EXPR_LOCATION (*expr_p);
6112
6113 switch (TREE_CODE (op0))
6114 {
6115 case INDIRECT_REF:
6116 do_indirect_ref:
6117 /* Check if we are dealing with an expression of the form '&*ptr'.
6118 While the front end folds away '&*ptr' into 'ptr', these
6119 expressions may be generated internally by the compiler (e.g.,
6120 builtins like __builtin_va_end). */
6121 /* Caution: the silent array decomposition semantics we allow for
6122 ADDR_EXPR means we can't always discard the pair. */
6123 /* Gimplification of the ADDR_EXPR operand may drop
6124 cv-qualification conversions, so make sure we add them if
6125 needed. */
6126 {
6127 tree op00 = TREE_OPERAND (op0, 0);
6128 tree t_expr = TREE_TYPE (expr);
6129 tree t_op00 = TREE_TYPE (op00);
6130
6131 if (!useless_type_conversion_p (t_expr, t_op00))
6132 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6133 *expr_p = op00;
6134 ret = GS_OK;
6135 }
6136 break;
6137
6138 case VIEW_CONVERT_EXPR:
6139 /* Take the address of our operand and then convert it to the type of
6140 this ADDR_EXPR.
6141
6142 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6143 all clear. The impact of this transformation is even less clear. */
6144
6145 /* If the operand is a useless conversion, look through it. Doing so
6146 guarantees that the ADDR_EXPR and its operand will remain of the
6147 same type. */
6148 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6149 op0 = TREE_OPERAND (op0, 0);
6150
6151 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6152 build_fold_addr_expr_loc (loc,
6153 TREE_OPERAND (op0, 0)));
6154 ret = GS_OK;
6155 break;
6156
6157 case MEM_REF:
6158 if (integer_zerop (TREE_OPERAND (op0, 1)))
6159 goto do_indirect_ref;
6160
6161 /* fall through */
6162
6163 default:
6164 /* If we see a call to a declared builtin or see its address
6165 being taken (we can unify those cases here) then we can mark
6166 the builtin for implicit generation by GCC. */
6167 if (TREE_CODE (op0) == FUNCTION_DECL
6168 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6169 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6170 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6171
6172 /* We use fb_either here because the C frontend sometimes takes
6173 the address of a call that returns a struct; see
6174 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6175 the implied temporary explicit. */
6176
6177 /* Make the operand addressable. */
6178 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6179 is_gimple_addressable, fb_either);
6180 if (ret == GS_ERROR)
6181 break;
6182
6183 /* Then mark it. Beware that it may not be possible to do so directly
6184 if a temporary has been created by the gimplification. */
6185 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6186
6187 op0 = TREE_OPERAND (expr, 0);
6188
6189 /* For various reasons, the gimplification of the expression
6190 may have made a new INDIRECT_REF. */
6191 if (TREE_CODE (op0) == INDIRECT_REF
6192 || (TREE_CODE (op0) == MEM_REF
6193 && integer_zerop (TREE_OPERAND (op0, 1))))
6194 goto do_indirect_ref;
6195
6196 mark_addressable (TREE_OPERAND (expr, 0));
6197
6198 /* The FEs may end up building ADDR_EXPRs early on a decl with
6199 an incomplete type. Re-build ADDR_EXPRs in canonical form
6200 here. */
6201 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6202 *expr_p = build_fold_addr_expr (op0);
6203
6204 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6205 recompute_tree_invariant_for_addr_expr (*expr_p);
6206
6207 /* If we re-built the ADDR_EXPR add a conversion to the original type
6208 if required. */
6209 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6210 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6211
6212 break;
6213 }
6214
6215 return ret;
6216 }
6217
6218 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6219 value; output operands should be a gimple lvalue. */
6220
6221 static enum gimplify_status
6222 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6223 {
6224 tree expr;
6225 int noutputs;
6226 const char **oconstraints;
6227 int i;
6228 tree link;
6229 const char *constraint;
6230 bool allows_mem, allows_reg, is_inout;
6231 enum gimplify_status ret, tret;
6232 gasm *stmt;
6233 vec<tree, va_gc> *inputs;
6234 vec<tree, va_gc> *outputs;
6235 vec<tree, va_gc> *clobbers;
6236 vec<tree, va_gc> *labels;
6237 tree link_next;
6238
6239 expr = *expr_p;
6240 noutputs = list_length (ASM_OUTPUTS (expr));
6241 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6242
6243 inputs = NULL;
6244 outputs = NULL;
6245 clobbers = NULL;
6246 labels = NULL;
6247
6248 ret = GS_ALL_DONE;
6249 link_next = NULL_TREE;
6250 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6251 {
6252 bool ok;
6253 size_t constraint_len;
6254
6255 link_next = TREE_CHAIN (link);
6256
6257 oconstraints[i]
6258 = constraint
6259 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6260 constraint_len = strlen (constraint);
6261 if (constraint_len == 0)
6262 continue;
6263
6264 ok = parse_output_constraint (&constraint, i, 0, 0,
6265 &allows_mem, &allows_reg, &is_inout);
6266 if (!ok)
6267 {
6268 ret = GS_ERROR;
6269 is_inout = false;
6270 }
6271
6272 /* If we can't make copies, we can only accept memory.
6273 Similarly for VLAs. */
6274 tree outtype = TREE_TYPE (TREE_VALUE (link));
6275 if (outtype != error_mark_node
6276 && (TREE_ADDRESSABLE (outtype)
6277 || !COMPLETE_TYPE_P (outtype)
6278 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6279 {
6280 if (allows_mem)
6281 allows_reg = 0;
6282 else
6283 {
6284 error ("impossible constraint in %<asm%>");
6285 error ("non-memory output %d must stay in memory", i);
6286 return GS_ERROR;
6287 }
6288 }
6289
6290 if (!allows_reg && allows_mem)
6291 mark_addressable (TREE_VALUE (link));
6292
6293 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6294 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6295 fb_lvalue | fb_mayfail);
6296 if (tret == GS_ERROR)
6297 {
6298 error ("invalid lvalue in %<asm%> output %d", i);
6299 ret = tret;
6300 }
6301
6302 /* If the constraint does not allow memory make sure we gimplify
6303 it to a register if it is not already but its base is. This
6304 happens for complex and vector components. */
6305 if (!allows_mem)
6306 {
6307 tree op = TREE_VALUE (link);
6308 if (! is_gimple_val (op)
6309 && is_gimple_reg_type (TREE_TYPE (op))
6310 && is_gimple_reg (get_base_address (op)))
6311 {
6312 tree tem = create_tmp_reg (TREE_TYPE (op));
6313 tree ass;
6314 if (is_inout)
6315 {
6316 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6317 tem, unshare_expr (op));
6318 gimplify_and_add (ass, pre_p);
6319 }
6320 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6321 gimplify_and_add (ass, post_p);
6322
6323 TREE_VALUE (link) = tem;
6324 tret = GS_OK;
6325 }
6326 }
6327
6328 vec_safe_push (outputs, link);
6329 TREE_CHAIN (link) = NULL_TREE;
6330
6331 if (is_inout)
6332 {
6333 /* An input/output operand. To give the optimizers more
6334 flexibility, split it into separate input and output
6335 operands. */
6336 tree input;
6337 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6338 char buf[11];
6339
6340 /* Turn the in/out constraint into an output constraint. */
6341 char *p = xstrdup (constraint);
6342 p[0] = '=';
6343 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6344
6345 /* And add a matching input constraint. */
6346 if (allows_reg)
6347 {
6348 sprintf (buf, "%u", i);
6349
6350 /* If there are multiple alternatives in the constraint,
6351 handle each of them individually. Those that allow register
6352 will be replaced with operand number, the others will stay
6353 unchanged. */
6354 if (strchr (p, ',') != NULL)
6355 {
6356 size_t len = 0, buflen = strlen (buf);
6357 char *beg, *end, *str, *dst;
6358
6359 for (beg = p + 1;;)
6360 {
6361 end = strchr (beg, ',');
6362 if (end == NULL)
6363 end = strchr (beg, '\0');
6364 if ((size_t) (end - beg) < buflen)
6365 len += buflen + 1;
6366 else
6367 len += end - beg + 1;
6368 if (*end)
6369 beg = end + 1;
6370 else
6371 break;
6372 }
6373
6374 str = (char *) alloca (len);
6375 for (beg = p + 1, dst = str;;)
6376 {
6377 const char *tem;
6378 bool mem_p, reg_p, inout_p;
6379
6380 end = strchr (beg, ',');
6381 if (end)
6382 *end = '\0';
6383 beg[-1] = '=';
6384 tem = beg - 1;
6385 parse_output_constraint (&tem, i, 0, 0,
6386 &mem_p, &reg_p, &inout_p);
6387 if (dst != str)
6388 *dst++ = ',';
6389 if (reg_p)
6390 {
6391 memcpy (dst, buf, buflen);
6392 dst += buflen;
6393 }
6394 else
6395 {
6396 if (end)
6397 len = end - beg;
6398 else
6399 len = strlen (beg);
6400 memcpy (dst, beg, len);
6401 dst += len;
6402 }
6403 if (end)
6404 beg = end + 1;
6405 else
6406 break;
6407 }
6408 *dst = '\0';
6409 input = build_string (dst - str, str);
6410 }
6411 else
6412 input = build_string (strlen (buf), buf);
6413 }
6414 else
6415 input = build_string (constraint_len - 1, constraint + 1);
6416
6417 free (p);
6418
6419 input = build_tree_list (build_tree_list (NULL_TREE, input),
6420 unshare_expr (TREE_VALUE (link)));
6421 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6422 }
6423 }
6424
6425 link_next = NULL_TREE;
6426 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6427 {
6428 link_next = TREE_CHAIN (link);
6429 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6430 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6431 oconstraints, &allows_mem, &allows_reg);
6432
6433 /* If we can't make copies, we can only accept memory. */
6434 tree intype = TREE_TYPE (TREE_VALUE (link));
6435 if (intype != error_mark_node
6436 && (TREE_ADDRESSABLE (intype)
6437 || !COMPLETE_TYPE_P (intype)
6438 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6439 {
6440 if (allows_mem)
6441 allows_reg = 0;
6442 else
6443 {
6444 error ("impossible constraint in %<asm%>");
6445 error ("non-memory input %d must stay in memory", i);
6446 return GS_ERROR;
6447 }
6448 }
6449
6450 /* If the operand is a memory input, it should be an lvalue. */
6451 if (!allows_reg && allows_mem)
6452 {
6453 tree inputv = TREE_VALUE (link);
6454 STRIP_NOPS (inputv);
6455 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6456 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6457 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6458 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6459 || TREE_CODE (inputv) == MODIFY_EXPR)
6460 TREE_VALUE (link) = error_mark_node;
6461 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6462 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6463 if (tret != GS_ERROR)
6464 {
6465 /* Unlike output operands, memory inputs are not guaranteed
6466 to be lvalues by the FE, and while the expressions are
6467 marked addressable there, if it is e.g. a statement
6468 expression, temporaries in it might not end up being
6469 addressable. They might be already used in the IL and thus
6470 it is too late to make them addressable now though. */
6471 tree x = TREE_VALUE (link);
6472 while (handled_component_p (x))
6473 x = TREE_OPERAND (x, 0);
6474 if (TREE_CODE (x) == MEM_REF
6475 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6476 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6477 if ((VAR_P (x)
6478 || TREE_CODE (x) == PARM_DECL
6479 || TREE_CODE (x) == RESULT_DECL)
6480 && !TREE_ADDRESSABLE (x)
6481 && is_gimple_reg (x))
6482 {
6483 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6484 input_location), 0,
6485 "memory input %d is not directly addressable",
6486 i);
6487 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6488 }
6489 }
6490 mark_addressable (TREE_VALUE (link));
6491 if (tret == GS_ERROR)
6492 {
6493 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6494 "memory input %d is not directly addressable", i);
6495 ret = tret;
6496 }
6497 }
6498 else
6499 {
6500 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6501 is_gimple_asm_val, fb_rvalue);
6502 if (tret == GS_ERROR)
6503 ret = tret;
6504 }
6505
6506 TREE_CHAIN (link) = NULL_TREE;
6507 vec_safe_push (inputs, link);
6508 }
6509
6510 link_next = NULL_TREE;
6511 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6512 {
6513 link_next = TREE_CHAIN (link);
6514 TREE_CHAIN (link) = NULL_TREE;
6515 vec_safe_push (clobbers, link);
6516 }
6517
6518 link_next = NULL_TREE;
6519 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6520 {
6521 link_next = TREE_CHAIN (link);
6522 TREE_CHAIN (link) = NULL_TREE;
6523 vec_safe_push (labels, link);
6524 }
6525
6526 /* Do not add ASMs with errors to the gimple IL stream. */
6527 if (ret != GS_ERROR)
6528 {
6529 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6530 inputs, outputs, clobbers, labels);
6531
6532 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6533 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6534 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6535
6536 gimplify_seq_add_stmt (pre_p, stmt);
6537 }
6538
6539 return ret;
6540 }
6541
6542 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6543 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6544 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6545 return to this function.
6546
6547 FIXME should we complexify the prequeue handling instead? Or use flags
6548 for all the cleanups and let the optimizer tighten them up? The current
6549 code seems pretty fragile; it will break on a cleanup within any
6550 non-conditional nesting. But any such nesting would be broken, anyway;
6551 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6552 and continues out of it. We can do that at the RTL level, though, so
6553 having an optimizer to tighten up try/finally regions would be a Good
6554 Thing. */
6555
6556 static enum gimplify_status
6557 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6558 {
6559 gimple_stmt_iterator iter;
6560 gimple_seq body_sequence = NULL;
6561
6562 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6563
6564 /* We only care about the number of conditions between the innermost
6565 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6566 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6567 int old_conds = gimplify_ctxp->conditions;
6568 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6569 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6570 gimplify_ctxp->conditions = 0;
6571 gimplify_ctxp->conditional_cleanups = NULL;
6572 gimplify_ctxp->in_cleanup_point_expr = true;
6573
6574 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6575
6576 gimplify_ctxp->conditions = old_conds;
6577 gimplify_ctxp->conditional_cleanups = old_cleanups;
6578 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6579
6580 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6581 {
6582 gimple *wce = gsi_stmt (iter);
6583
6584 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6585 {
6586 if (gsi_one_before_end_p (iter))
6587 {
6588 /* Note that gsi_insert_seq_before and gsi_remove do not
6589 scan operands, unlike some other sequence mutators. */
6590 if (!gimple_wce_cleanup_eh_only (wce))
6591 gsi_insert_seq_before_without_update (&iter,
6592 gimple_wce_cleanup (wce),
6593 GSI_SAME_STMT);
6594 gsi_remove (&iter, true);
6595 break;
6596 }
6597 else
6598 {
6599 gtry *gtry;
6600 gimple_seq seq;
6601 enum gimple_try_flags kind;
6602
6603 if (gimple_wce_cleanup_eh_only (wce))
6604 kind = GIMPLE_TRY_CATCH;
6605 else
6606 kind = GIMPLE_TRY_FINALLY;
6607 seq = gsi_split_seq_after (iter);
6608
6609 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6610 /* Do not use gsi_replace here, as it may scan operands.
6611 We want to do a simple structural modification only. */
6612 gsi_set_stmt (&iter, gtry);
6613 iter = gsi_start (gtry->eval);
6614 }
6615 }
6616 else
6617 gsi_next (&iter);
6618 }
6619
6620 gimplify_seq_add_seq (pre_p, body_sequence);
6621 if (temp)
6622 {
6623 *expr_p = temp;
6624 return GS_OK;
6625 }
6626 else
6627 {
6628 *expr_p = NULL;
6629 return GS_ALL_DONE;
6630 }
6631 }
6632
6633 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6634 is the cleanup action required. EH_ONLY is true if the cleanup should
6635 only be executed if an exception is thrown, not on normal exit.
6636 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6637 only valid for clobbers. */
6638
6639 static void
6640 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6641 bool force_uncond = false)
6642 {
6643 gimple *wce;
6644 gimple_seq cleanup_stmts = NULL;
6645
6646 /* Errors can result in improperly nested cleanups. Which results in
6647 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6648 if (seen_error ())
6649 return;
6650
6651 if (gimple_conditional_context ())
6652 {
6653 /* If we're in a conditional context, this is more complex. We only
6654 want to run the cleanup if we actually ran the initialization that
6655 necessitates it, but we want to run it after the end of the
6656 conditional context. So we wrap the try/finally around the
6657 condition and use a flag to determine whether or not to actually
6658 run the destructor. Thus
6659
6660 test ? f(A()) : 0
6661
6662 becomes (approximately)
6663
6664 flag = 0;
6665 try {
6666 if (test) { A::A(temp); flag = 1; val = f(temp); }
6667 else { val = 0; }
6668 } finally {
6669 if (flag) A::~A(temp);
6670 }
6671 val
6672 */
6673 if (force_uncond)
6674 {
6675 gimplify_stmt (&cleanup, &cleanup_stmts);
6676 wce = gimple_build_wce (cleanup_stmts);
6677 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6678 }
6679 else
6680 {
6681 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6682 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6683 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6684
6685 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6686 gimplify_stmt (&cleanup, &cleanup_stmts);
6687 wce = gimple_build_wce (cleanup_stmts);
6688
6689 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6690 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6691 gimplify_seq_add_stmt (pre_p, ftrue);
6692
6693 /* Because of this manipulation, and the EH edges that jump
6694 threading cannot redirect, the temporary (VAR) will appear
6695 to be used uninitialized. Don't warn. */
6696 TREE_NO_WARNING (var) = 1;
6697 }
6698 }
6699 else
6700 {
6701 gimplify_stmt (&cleanup, &cleanup_stmts);
6702 wce = gimple_build_wce (cleanup_stmts);
6703 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6704 gimplify_seq_add_stmt (pre_p, wce);
6705 }
6706 }
6707
6708 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6709
6710 static enum gimplify_status
6711 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6712 {
6713 tree targ = *expr_p;
6714 tree temp = TARGET_EXPR_SLOT (targ);
6715 tree init = TARGET_EXPR_INITIAL (targ);
6716 enum gimplify_status ret;
6717
6718 bool unpoison_empty_seq = false;
6719 gimple_stmt_iterator unpoison_it;
6720
6721 if (init)
6722 {
6723 tree cleanup = NULL_TREE;
6724
6725 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6726 to the temps list. Handle also variable length TARGET_EXPRs. */
6727 if (!poly_int_tree_p (DECL_SIZE (temp)))
6728 {
6729 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6730 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6731 gimplify_vla_decl (temp, pre_p);
6732 }
6733 else
6734 {
6735 /* Save location where we need to place unpoisoning. It's possible
6736 that a variable will be converted to needs_to_live_in_memory. */
6737 unpoison_it = gsi_last (*pre_p);
6738 unpoison_empty_seq = gsi_end_p (unpoison_it);
6739
6740 gimple_add_tmp_var (temp);
6741 }
6742
6743 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6744 expression is supposed to initialize the slot. */
6745 if (VOID_TYPE_P (TREE_TYPE (init)))
6746 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6747 else
6748 {
6749 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6750 init = init_expr;
6751 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6752 init = NULL;
6753 ggc_free (init_expr);
6754 }
6755 if (ret == GS_ERROR)
6756 {
6757 /* PR c++/28266 Make sure this is expanded only once. */
6758 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6759 return GS_ERROR;
6760 }
6761 if (init)
6762 gimplify_and_add (init, pre_p);
6763
6764 /* If needed, push the cleanup for the temp. */
6765 if (TARGET_EXPR_CLEANUP (targ))
6766 {
6767 if (CLEANUP_EH_ONLY (targ))
6768 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6769 CLEANUP_EH_ONLY (targ), pre_p);
6770 else
6771 cleanup = TARGET_EXPR_CLEANUP (targ);
6772 }
6773
6774 /* Add a clobber for the temporary going out of scope, like
6775 gimplify_bind_expr. */
6776 if (gimplify_ctxp->in_cleanup_point_expr
6777 && needs_to_live_in_memory (temp))
6778 {
6779 if (flag_stack_reuse == SR_ALL)
6780 {
6781 tree clobber = build_clobber (TREE_TYPE (temp));
6782 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6783 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6784 }
6785 if (asan_poisoned_variables
6786 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6787 && !TREE_STATIC (temp)
6788 && dbg_cnt (asan_use_after_scope)
6789 && !gimplify_omp_ctxp)
6790 {
6791 tree asan_cleanup = build_asan_poison_call_expr (temp);
6792 if (asan_cleanup)
6793 {
6794 if (unpoison_empty_seq)
6795 unpoison_it = gsi_start (*pre_p);
6796
6797 asan_poison_variable (temp, false, &unpoison_it,
6798 unpoison_empty_seq);
6799 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6800 }
6801 }
6802 }
6803 if (cleanup)
6804 gimple_push_cleanup (temp, cleanup, false, pre_p);
6805
6806 /* Only expand this once. */
6807 TREE_OPERAND (targ, 3) = init;
6808 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6809 }
6810 else
6811 /* We should have expanded this before. */
6812 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6813
6814 *expr_p = temp;
6815 return GS_OK;
6816 }
6817
6818 /* Gimplification of expression trees. */
6819
6820 /* Gimplify an expression which appears at statement context. The
6821 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6822 NULL, a new sequence is allocated.
6823
6824 Return true if we actually added a statement to the queue. */
6825
6826 bool
6827 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6828 {
6829 gimple_seq_node last;
6830
6831 last = gimple_seq_last (*seq_p);
6832 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6833 return last != gimple_seq_last (*seq_p);
6834 }
6835
6836 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6837 to CTX. If entries already exist, force them to be some flavor of private.
6838 If there is no enclosing parallel, do nothing. */
6839
6840 void
6841 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6842 {
6843 splay_tree_node n;
6844
6845 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6846 return;
6847
6848 do
6849 {
6850 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6851 if (n != NULL)
6852 {
6853 if (n->value & GOVD_SHARED)
6854 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6855 else if (n->value & GOVD_MAP)
6856 n->value |= GOVD_MAP_TO_ONLY;
6857 else
6858 return;
6859 }
6860 else if ((ctx->region_type & ORT_TARGET) != 0)
6861 {
6862 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6863 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6864 else
6865 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6866 }
6867 else if (ctx->region_type != ORT_WORKSHARE
6868 && ctx->region_type != ORT_TASKGROUP
6869 && ctx->region_type != ORT_SIMD
6870 && ctx->region_type != ORT_ACC
6871 && !(ctx->region_type & ORT_TARGET_DATA))
6872 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6873
6874 ctx = ctx->outer_context;
6875 }
6876 while (ctx);
6877 }
6878
6879 /* Similarly for each of the type sizes of TYPE. */
6880
6881 static void
6882 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6883 {
6884 if (type == NULL || type == error_mark_node)
6885 return;
6886 type = TYPE_MAIN_VARIANT (type);
6887
6888 if (ctx->privatized_types->add (type))
6889 return;
6890
6891 switch (TREE_CODE (type))
6892 {
6893 case INTEGER_TYPE:
6894 case ENUMERAL_TYPE:
6895 case BOOLEAN_TYPE:
6896 case REAL_TYPE:
6897 case FIXED_POINT_TYPE:
6898 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6899 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6900 break;
6901
6902 case ARRAY_TYPE:
6903 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6904 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6905 break;
6906
6907 case RECORD_TYPE:
6908 case UNION_TYPE:
6909 case QUAL_UNION_TYPE:
6910 {
6911 tree field;
6912 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6913 if (TREE_CODE (field) == FIELD_DECL)
6914 {
6915 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6916 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6917 }
6918 }
6919 break;
6920
6921 case POINTER_TYPE:
6922 case REFERENCE_TYPE:
6923 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6924 break;
6925
6926 default:
6927 break;
6928 }
6929
6930 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6931 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6932 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6933 }
6934
6935 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6936
6937 static void
6938 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6939 {
6940 splay_tree_node n;
6941 unsigned int nflags;
6942 tree t;
6943
6944 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6945 return;
6946
6947 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6948 there are constructors involved somewhere. Exception is a shared clause,
6949 there is nothing privatized in that case. */
6950 if ((flags & GOVD_SHARED) == 0
6951 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6952 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6953 flags |= GOVD_SEEN;
6954
6955 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6956 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6957 {
6958 /* We shouldn't be re-adding the decl with the same data
6959 sharing class. */
6960 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6961 nflags = n->value | flags;
6962 /* The only combination of data sharing classes we should see is
6963 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6964 reduction variables to be used in data sharing clauses. */
6965 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6966 || ((nflags & GOVD_DATA_SHARE_CLASS)
6967 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6968 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6969 n->value = nflags;
6970 return;
6971 }
6972
6973 /* When adding a variable-sized variable, we have to handle all sorts
6974 of additional bits of data: the pointer replacement variable, and
6975 the parameters of the type. */
6976 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6977 {
6978 /* Add the pointer replacement variable as PRIVATE if the variable
6979 replacement is private, else FIRSTPRIVATE since we'll need the
6980 address of the original variable either for SHARED, or for the
6981 copy into or out of the context. */
6982 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6983 {
6984 if (flags & GOVD_MAP)
6985 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6986 else if (flags & GOVD_PRIVATE)
6987 nflags = GOVD_PRIVATE;
6988 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6989 && (flags & GOVD_FIRSTPRIVATE))
6990 || (ctx->region_type == ORT_TARGET_DATA
6991 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6992 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6993 else
6994 nflags = GOVD_FIRSTPRIVATE;
6995 nflags |= flags & GOVD_SEEN;
6996 t = DECL_VALUE_EXPR (decl);
6997 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6998 t = TREE_OPERAND (t, 0);
6999 gcc_assert (DECL_P (t));
7000 omp_add_variable (ctx, t, nflags);
7001 }
7002
7003 /* Add all of the variable and type parameters (which should have
7004 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7005 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7006 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7007 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7008
7009 /* The variable-sized variable itself is never SHARED, only some form
7010 of PRIVATE. The sharing would take place via the pointer variable
7011 which we remapped above. */
7012 if (flags & GOVD_SHARED)
7013 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7014 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7015
7016 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7017 alloca statement we generate for the variable, so make sure it
7018 is available. This isn't automatically needed for the SHARED
7019 case, since we won't be allocating local storage then.
7020 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7021 in this case omp_notice_variable will be called later
7022 on when it is gimplified. */
7023 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7024 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7025 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7026 }
7027 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7028 && lang_hooks.decls.omp_privatize_by_reference (decl))
7029 {
7030 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7031
7032 /* Similar to the direct variable sized case above, we'll need the
7033 size of references being privatized. */
7034 if ((flags & GOVD_SHARED) == 0)
7035 {
7036 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7037 if (DECL_P (t))
7038 omp_notice_variable (ctx, t, true);
7039 }
7040 }
7041
7042 if (n != NULL)
7043 n->value |= flags;
7044 else
7045 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7046
7047 /* For reductions clauses in OpenACC loop directives, by default create a
7048 copy clause on the enclosing parallel construct for carrying back the
7049 results. */
7050 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7051 {
7052 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7053 while (outer_ctx)
7054 {
7055 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7056 if (n != NULL)
7057 {
7058 /* Ignore local variables and explicitly declared clauses. */
7059 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7060 break;
7061 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7062 {
7063 /* According to the OpenACC spec, such a reduction variable
7064 should already have a copy map on a kernels construct,
7065 verify that here. */
7066 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7067 && (n->value & GOVD_MAP));
7068 }
7069 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7070 {
7071 /* Remove firstprivate and make it a copy map. */
7072 n->value &= ~GOVD_FIRSTPRIVATE;
7073 n->value |= GOVD_MAP;
7074 }
7075 }
7076 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7077 {
7078 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7079 GOVD_MAP | GOVD_SEEN);
7080 break;
7081 }
7082 outer_ctx = outer_ctx->outer_context;
7083 }
7084 }
7085 }
7086
7087 /* Notice a threadprivate variable DECL used in OMP context CTX.
7088 This just prints out diagnostics about threadprivate variable uses
7089 in untied tasks. If DECL2 is non-NULL, prevent this warning
7090 on that variable. */
7091
7092 static bool
7093 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7094 tree decl2)
7095 {
7096 splay_tree_node n;
7097 struct gimplify_omp_ctx *octx;
7098
7099 for (octx = ctx; octx; octx = octx->outer_context)
7100 if ((octx->region_type & ORT_TARGET) != 0
7101 || octx->order_concurrent)
7102 {
7103 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7104 if (n == NULL)
7105 {
7106 if (octx->order_concurrent)
7107 {
7108 error ("threadprivate variable %qE used in a region with"
7109 " %<order(concurrent)%> clause", DECL_NAME (decl));
7110 inform (octx->location, "enclosing region");
7111 }
7112 else
7113 {
7114 error ("threadprivate variable %qE used in target region",
7115 DECL_NAME (decl));
7116 inform (octx->location, "enclosing target region");
7117 }
7118 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7119 }
7120 if (decl2)
7121 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7122 }
7123
7124 if (ctx->region_type != ORT_UNTIED_TASK)
7125 return false;
7126 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7127 if (n == NULL)
7128 {
7129 error ("threadprivate variable %qE used in untied task",
7130 DECL_NAME (decl));
7131 inform (ctx->location, "enclosing task");
7132 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7133 }
7134 if (decl2)
7135 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7136 return false;
7137 }
7138
7139 /* Return true if global var DECL is device resident. */
7140
7141 static bool
7142 device_resident_p (tree decl)
7143 {
7144 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7145
7146 if (!attr)
7147 return false;
7148
7149 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7150 {
7151 tree c = TREE_VALUE (t);
7152 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7153 return true;
7154 }
7155
7156 return false;
7157 }
7158
7159 /* Return true if DECL has an ACC DECLARE attribute. */
7160
7161 static bool
7162 is_oacc_declared (tree decl)
7163 {
7164 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7165 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7166 return declared != NULL_TREE;
7167 }
7168
7169 /* Determine outer default flags for DECL mentioned in an OMP region
7170 but not declared in an enclosing clause.
7171
7172 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7173 remapped firstprivate instead of shared. To some extent this is
7174 addressed in omp_firstprivatize_type_sizes, but not
7175 effectively. */
7176
7177 static unsigned
7178 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7179 bool in_code, unsigned flags)
7180 {
7181 enum omp_clause_default_kind default_kind = ctx->default_kind;
7182 enum omp_clause_default_kind kind;
7183
7184 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7185 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7186 default_kind = kind;
7187 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7188 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7189
7190 switch (default_kind)
7191 {
7192 case OMP_CLAUSE_DEFAULT_NONE:
7193 {
7194 const char *rtype;
7195
7196 if (ctx->region_type & ORT_PARALLEL)
7197 rtype = "parallel";
7198 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7199 rtype = "taskloop";
7200 else if (ctx->region_type & ORT_TASK)
7201 rtype = "task";
7202 else if (ctx->region_type & ORT_TEAMS)
7203 rtype = "teams";
7204 else
7205 gcc_unreachable ();
7206
7207 error ("%qE not specified in enclosing %qs",
7208 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7209 inform (ctx->location, "enclosing %qs", rtype);
7210 }
7211 /* FALLTHRU */
7212 case OMP_CLAUSE_DEFAULT_SHARED:
7213 flags |= GOVD_SHARED;
7214 break;
7215 case OMP_CLAUSE_DEFAULT_PRIVATE:
7216 flags |= GOVD_PRIVATE;
7217 break;
7218 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7219 flags |= GOVD_FIRSTPRIVATE;
7220 break;
7221 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7222 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7223 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7224 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7225 {
7226 omp_notice_variable (octx, decl, in_code);
7227 for (; octx; octx = octx->outer_context)
7228 {
7229 splay_tree_node n2;
7230
7231 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7232 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7233 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7234 continue;
7235 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7236 {
7237 flags |= GOVD_FIRSTPRIVATE;
7238 goto found_outer;
7239 }
7240 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7241 {
7242 flags |= GOVD_SHARED;
7243 goto found_outer;
7244 }
7245 }
7246 }
7247
7248 if (TREE_CODE (decl) == PARM_DECL
7249 || (!is_global_var (decl)
7250 && DECL_CONTEXT (decl) == current_function_decl))
7251 flags |= GOVD_FIRSTPRIVATE;
7252 else
7253 flags |= GOVD_SHARED;
7254 found_outer:
7255 break;
7256
7257 default:
7258 gcc_unreachable ();
7259 }
7260
7261 return flags;
7262 }
7263
7264
7265 /* Determine outer default flags for DECL mentioned in an OACC region
7266 but not declared in an enclosing clause. */
7267
7268 static unsigned
7269 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7270 {
7271 const char *rkind;
7272 bool on_device = false;
7273 bool is_private = false;
7274 bool declared = is_oacc_declared (decl);
7275 tree type = TREE_TYPE (decl);
7276
7277 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7278 type = TREE_TYPE (type);
7279
7280 /* For Fortran COMMON blocks, only used variables in those blocks are
7281 transfered and remapped. The block itself will have a private clause to
7282 avoid transfering the data twice.
7283 The hook evaluates to false by default. For a variable in Fortran's COMMON
7284 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7285 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7286 the whole block. For C++ and Fortran, it can also be true under certain
7287 other conditions, if DECL_HAS_VALUE_EXPR. */
7288 if (RECORD_OR_UNION_TYPE_P (type))
7289 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7290
7291 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7292 && is_global_var (decl)
7293 && device_resident_p (decl)
7294 && !is_private)
7295 {
7296 on_device = true;
7297 flags |= GOVD_MAP_TO_ONLY;
7298 }
7299
7300 switch (ctx->region_type)
7301 {
7302 case ORT_ACC_KERNELS:
7303 rkind = "kernels";
7304
7305 if (is_private)
7306 flags |= GOVD_FIRSTPRIVATE;
7307 else if (AGGREGATE_TYPE_P (type))
7308 {
7309 /* Aggregates default to 'present_or_copy', or 'present'. */
7310 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7311 flags |= GOVD_MAP;
7312 else
7313 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7314 }
7315 else
7316 /* Scalars default to 'copy'. */
7317 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7318
7319 break;
7320
7321 case ORT_ACC_PARALLEL:
7322 case ORT_ACC_SERIAL:
7323 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7324
7325 if (is_private)
7326 flags |= GOVD_FIRSTPRIVATE;
7327 else if (on_device || declared)
7328 flags |= GOVD_MAP;
7329 else if (AGGREGATE_TYPE_P (type))
7330 {
7331 /* Aggregates default to 'present_or_copy', or 'present'. */
7332 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7333 flags |= GOVD_MAP;
7334 else
7335 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7336 }
7337 else
7338 /* Scalars default to 'firstprivate'. */
7339 flags |= GOVD_FIRSTPRIVATE;
7340
7341 break;
7342
7343 default:
7344 gcc_unreachable ();
7345 }
7346
7347 if (DECL_ARTIFICIAL (decl))
7348 ; /* We can get compiler-generated decls, and should not complain
7349 about them. */
7350 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7351 {
7352 error ("%qE not specified in enclosing OpenACC %qs construct",
7353 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7354 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7355 }
7356 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7357 ; /* Handled above. */
7358 else
7359 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7360
7361 return flags;
7362 }
7363
7364 /* Record the fact that DECL was used within the OMP context CTX.
7365 IN_CODE is true when real code uses DECL, and false when we should
7366 merely emit default(none) errors. Return true if DECL is going to
7367 be remapped and thus DECL shouldn't be gimplified into its
7368 DECL_VALUE_EXPR (if any). */
7369
7370 static bool
7371 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7372 {
7373 splay_tree_node n;
7374 unsigned flags = in_code ? GOVD_SEEN : 0;
7375 bool ret = false, shared;
7376
7377 if (error_operand_p (decl))
7378 return false;
7379
7380 if (ctx->region_type == ORT_NONE)
7381 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7382
7383 if (is_global_var (decl))
7384 {
7385 /* Threadprivate variables are predetermined. */
7386 if (DECL_THREAD_LOCAL_P (decl))
7387 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7388
7389 if (DECL_HAS_VALUE_EXPR_P (decl))
7390 {
7391 if (ctx->region_type & ORT_ACC)
7392 /* For OpenACC, defer expansion of value to avoid transfering
7393 privatized common block data instead of im-/explicitly transfered
7394 variables which are in common blocks. */
7395 ;
7396 else
7397 {
7398 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7399
7400 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7401 return omp_notice_threadprivate_variable (ctx, decl, value);
7402 }
7403 }
7404
7405 if (gimplify_omp_ctxp->outer_context == NULL
7406 && VAR_P (decl)
7407 && oacc_get_fn_attrib (current_function_decl))
7408 {
7409 location_t loc = DECL_SOURCE_LOCATION (decl);
7410
7411 if (lookup_attribute ("omp declare target link",
7412 DECL_ATTRIBUTES (decl)))
7413 {
7414 error_at (loc,
7415 "%qE with %<link%> clause used in %<routine%> function",
7416 DECL_NAME (decl));
7417 return false;
7418 }
7419 else if (!lookup_attribute ("omp declare target",
7420 DECL_ATTRIBUTES (decl)))
7421 {
7422 error_at (loc,
7423 "%qE requires a %<declare%> directive for use "
7424 "in a %<routine%> function", DECL_NAME (decl));
7425 return false;
7426 }
7427 }
7428 }
7429
7430 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7431 if ((ctx->region_type & ORT_TARGET) != 0)
7432 {
7433 if (ctx->region_type & ORT_ACC)
7434 /* For OpenACC, as remarked above, defer expansion. */
7435 shared = false;
7436 else
7437 shared = true;
7438
7439 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7440 if (n == NULL)
7441 {
7442 unsigned nflags = flags;
7443 if ((ctx->region_type & ORT_ACC) == 0)
7444 {
7445 bool is_declare_target = false;
7446 if (is_global_var (decl)
7447 && varpool_node::get_create (decl)->offloadable)
7448 {
7449 struct gimplify_omp_ctx *octx;
7450 for (octx = ctx->outer_context;
7451 octx; octx = octx->outer_context)
7452 {
7453 n = splay_tree_lookup (octx->variables,
7454 (splay_tree_key)decl);
7455 if (n
7456 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7457 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7458 break;
7459 }
7460 is_declare_target = octx == NULL;
7461 }
7462 if (!is_declare_target)
7463 {
7464 int gdmk;
7465 enum omp_clause_defaultmap_kind kind;
7466 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7467 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7468 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7469 == POINTER_TYPE)))
7470 gdmk = GDMK_POINTER;
7471 else if (lang_hooks.decls.omp_scalar_p (decl))
7472 gdmk = GDMK_SCALAR;
7473 else
7474 gdmk = GDMK_AGGREGATE;
7475 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7476 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7477 {
7478 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7479 nflags |= GOVD_FIRSTPRIVATE;
7480 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7481 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7482 else
7483 gcc_unreachable ();
7484 }
7485 else if (ctx->defaultmap[gdmk] == 0)
7486 {
7487 tree d = lang_hooks.decls.omp_report_decl (decl);
7488 error ("%qE not specified in enclosing %<target%>",
7489 DECL_NAME (d));
7490 inform (ctx->location, "enclosing %<target%>");
7491 }
7492 else if (ctx->defaultmap[gdmk]
7493 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7494 nflags |= ctx->defaultmap[gdmk];
7495 else
7496 {
7497 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7498 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7499 }
7500 }
7501 }
7502
7503 struct gimplify_omp_ctx *octx = ctx->outer_context;
7504 if ((ctx->region_type & ORT_ACC) && octx)
7505 {
7506 /* Look in outer OpenACC contexts, to see if there's a
7507 data attribute for this variable. */
7508 omp_notice_variable (octx, decl, in_code);
7509
7510 for (; octx; octx = octx->outer_context)
7511 {
7512 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7513 break;
7514 splay_tree_node n2
7515 = splay_tree_lookup (octx->variables,
7516 (splay_tree_key) decl);
7517 if (n2)
7518 {
7519 if (octx->region_type == ORT_ACC_HOST_DATA)
7520 error ("variable %qE declared in enclosing "
7521 "%<host_data%> region", DECL_NAME (decl));
7522 nflags |= GOVD_MAP;
7523 if (octx->region_type == ORT_ACC_DATA
7524 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7525 nflags |= GOVD_MAP_0LEN_ARRAY;
7526 goto found_outer;
7527 }
7528 }
7529 }
7530
7531 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7532 | GOVD_MAP_ALLOC_ONLY)) == flags)
7533 {
7534 tree type = TREE_TYPE (decl);
7535
7536 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7537 && lang_hooks.decls.omp_privatize_by_reference (decl))
7538 type = TREE_TYPE (type);
7539 if (!lang_hooks.types.omp_mappable_type (type))
7540 {
7541 error ("%qD referenced in target region does not have "
7542 "a mappable type", decl);
7543 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7544 }
7545 else
7546 {
7547 if ((ctx->region_type & ORT_ACC) != 0)
7548 nflags = oacc_default_clause (ctx, decl, flags);
7549 else
7550 nflags |= GOVD_MAP;
7551 }
7552 }
7553 found_outer:
7554 omp_add_variable (ctx, decl, nflags);
7555 }
7556 else
7557 {
7558 /* If nothing changed, there's nothing left to do. */
7559 if ((n->value & flags) == flags)
7560 return ret;
7561 flags |= n->value;
7562 n->value = flags;
7563 }
7564 goto do_outer;
7565 }
7566
7567 if (n == NULL)
7568 {
7569 if (ctx->region_type == ORT_WORKSHARE
7570 || ctx->region_type == ORT_TASKGROUP
7571 || ctx->region_type == ORT_SIMD
7572 || ctx->region_type == ORT_ACC
7573 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7574 goto do_outer;
7575
7576 flags = omp_default_clause (ctx, decl, in_code, flags);
7577
7578 if ((flags & GOVD_PRIVATE)
7579 && lang_hooks.decls.omp_private_outer_ref (decl))
7580 flags |= GOVD_PRIVATE_OUTER_REF;
7581
7582 omp_add_variable (ctx, decl, flags);
7583
7584 shared = (flags & GOVD_SHARED) != 0;
7585 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7586 goto do_outer;
7587 }
7588
7589 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7590 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7591 && DECL_SIZE (decl))
7592 {
7593 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7594 {
7595 splay_tree_node n2;
7596 tree t = DECL_VALUE_EXPR (decl);
7597 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7598 t = TREE_OPERAND (t, 0);
7599 gcc_assert (DECL_P (t));
7600 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7601 n2->value |= GOVD_SEEN;
7602 }
7603 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7604 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7605 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7606 != INTEGER_CST))
7607 {
7608 splay_tree_node n2;
7609 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7610 gcc_assert (DECL_P (t));
7611 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7612 if (n2)
7613 omp_notice_variable (ctx, t, true);
7614 }
7615 }
7616
7617 if (ctx->region_type & ORT_ACC)
7618 /* For OpenACC, as remarked above, defer expansion. */
7619 shared = false;
7620 else
7621 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7622 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7623
7624 /* If nothing changed, there's nothing left to do. */
7625 if ((n->value & flags) == flags)
7626 return ret;
7627 flags |= n->value;
7628 n->value = flags;
7629
7630 do_outer:
7631 /* If the variable is private in the current context, then we don't
7632 need to propagate anything to an outer context. */
7633 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7634 return ret;
7635 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7636 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7637 return ret;
7638 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7639 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7640 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7641 return ret;
7642 if (ctx->outer_context
7643 && omp_notice_variable (ctx->outer_context, decl, in_code))
7644 return true;
7645 return ret;
7646 }
7647
7648 /* Verify that DECL is private within CTX. If there's specific information
7649 to the contrary in the innermost scope, generate an error. */
7650
7651 static bool
7652 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7653 {
7654 splay_tree_node n;
7655
7656 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7657 if (n != NULL)
7658 {
7659 if (n->value & GOVD_SHARED)
7660 {
7661 if (ctx == gimplify_omp_ctxp)
7662 {
7663 if (simd)
7664 error ("iteration variable %qE is predetermined linear",
7665 DECL_NAME (decl));
7666 else
7667 error ("iteration variable %qE should be private",
7668 DECL_NAME (decl));
7669 n->value = GOVD_PRIVATE;
7670 return true;
7671 }
7672 else
7673 return false;
7674 }
7675 else if ((n->value & GOVD_EXPLICIT) != 0
7676 && (ctx == gimplify_omp_ctxp
7677 || (ctx->region_type == ORT_COMBINED_PARALLEL
7678 && gimplify_omp_ctxp->outer_context == ctx)))
7679 {
7680 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7681 error ("iteration variable %qE should not be firstprivate",
7682 DECL_NAME (decl));
7683 else if ((n->value & GOVD_REDUCTION) != 0)
7684 error ("iteration variable %qE should not be reduction",
7685 DECL_NAME (decl));
7686 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7687 error ("iteration variable %qE should not be linear",
7688 DECL_NAME (decl));
7689 }
7690 return (ctx == gimplify_omp_ctxp
7691 || (ctx->region_type == ORT_COMBINED_PARALLEL
7692 && gimplify_omp_ctxp->outer_context == ctx));
7693 }
7694
7695 if (ctx->region_type != ORT_WORKSHARE
7696 && ctx->region_type != ORT_TASKGROUP
7697 && ctx->region_type != ORT_SIMD
7698 && ctx->region_type != ORT_ACC)
7699 return false;
7700 else if (ctx->outer_context)
7701 return omp_is_private (ctx->outer_context, decl, simd);
7702 return false;
7703 }
7704
7705 /* Return true if DECL is private within a parallel region
7706 that binds to the current construct's context or in parallel
7707 region's REDUCTION clause. */
7708
7709 static bool
7710 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7711 {
7712 splay_tree_node n;
7713
7714 do
7715 {
7716 ctx = ctx->outer_context;
7717 if (ctx == NULL)
7718 {
7719 if (is_global_var (decl))
7720 return false;
7721
7722 /* References might be private, but might be shared too,
7723 when checking for copyprivate, assume they might be
7724 private, otherwise assume they might be shared. */
7725 if (copyprivate)
7726 return true;
7727
7728 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7729 return false;
7730
7731 /* Treat C++ privatized non-static data members outside
7732 of the privatization the same. */
7733 if (omp_member_access_dummy_var (decl))
7734 return false;
7735
7736 return true;
7737 }
7738
7739 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7740
7741 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7742 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7743 continue;
7744
7745 if (n != NULL)
7746 {
7747 if ((n->value & GOVD_LOCAL) != 0
7748 && omp_member_access_dummy_var (decl))
7749 return false;
7750 return (n->value & GOVD_SHARED) == 0;
7751 }
7752 }
7753 while (ctx->region_type == ORT_WORKSHARE
7754 || ctx->region_type == ORT_TASKGROUP
7755 || ctx->region_type == ORT_SIMD
7756 || ctx->region_type == ORT_ACC);
7757 return false;
7758 }
7759
7760 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7761
7762 static tree
7763 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7764 {
7765 tree t = *tp;
7766
7767 /* If this node has been visited, unmark it and keep looking. */
7768 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7769 return t;
7770
7771 if (IS_TYPE_OR_DECL_P (t))
7772 *walk_subtrees = 0;
7773 return NULL_TREE;
7774 }
7775
7776 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7777 lower all the depend clauses by populating corresponding depend
7778 array. Returns 0 if there are no such depend clauses, or
7779 2 if all depend clauses should be removed, 1 otherwise. */
7780
7781 static int
7782 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7783 {
7784 tree c;
7785 gimple *g;
7786 size_t n[4] = { 0, 0, 0, 0 };
7787 bool unused[4];
7788 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7789 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7790 size_t i, j;
7791 location_t first_loc = UNKNOWN_LOCATION;
7792
7793 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7794 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7795 {
7796 switch (OMP_CLAUSE_DEPEND_KIND (c))
7797 {
7798 case OMP_CLAUSE_DEPEND_IN:
7799 i = 2;
7800 break;
7801 case OMP_CLAUSE_DEPEND_OUT:
7802 case OMP_CLAUSE_DEPEND_INOUT:
7803 i = 0;
7804 break;
7805 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7806 i = 1;
7807 break;
7808 case OMP_CLAUSE_DEPEND_DEPOBJ:
7809 i = 3;
7810 break;
7811 case OMP_CLAUSE_DEPEND_SOURCE:
7812 case OMP_CLAUSE_DEPEND_SINK:
7813 continue;
7814 default:
7815 gcc_unreachable ();
7816 }
7817 tree t = OMP_CLAUSE_DECL (c);
7818 if (first_loc == UNKNOWN_LOCATION)
7819 first_loc = OMP_CLAUSE_LOCATION (c);
7820 if (TREE_CODE (t) == TREE_LIST
7821 && TREE_PURPOSE (t)
7822 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7823 {
7824 if (TREE_PURPOSE (t) != last_iter)
7825 {
7826 tree tcnt = size_one_node;
7827 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7828 {
7829 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7830 is_gimple_val, fb_rvalue) == GS_ERROR
7831 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7832 is_gimple_val, fb_rvalue) == GS_ERROR
7833 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7834 is_gimple_val, fb_rvalue) == GS_ERROR
7835 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7836 is_gimple_val, fb_rvalue)
7837 == GS_ERROR))
7838 return 2;
7839 tree var = TREE_VEC_ELT (it, 0);
7840 tree begin = TREE_VEC_ELT (it, 1);
7841 tree end = TREE_VEC_ELT (it, 2);
7842 tree step = TREE_VEC_ELT (it, 3);
7843 tree orig_step = TREE_VEC_ELT (it, 4);
7844 tree type = TREE_TYPE (var);
7845 tree stype = TREE_TYPE (step);
7846 location_t loc = DECL_SOURCE_LOCATION (var);
7847 tree endmbegin;
7848 /* Compute count for this iterator as
7849 orig_step > 0
7850 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7851 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7852 and compute product of those for the entire depend
7853 clause. */
7854 if (POINTER_TYPE_P (type))
7855 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7856 stype, end, begin);
7857 else
7858 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7859 end, begin);
7860 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7861 step,
7862 build_int_cst (stype, 1));
7863 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7864 build_int_cst (stype, 1));
7865 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7866 unshare_expr (endmbegin),
7867 stepm1);
7868 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7869 pos, step);
7870 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7871 endmbegin, stepp1);
7872 if (TYPE_UNSIGNED (stype))
7873 {
7874 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7875 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7876 }
7877 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7878 neg, step);
7879 step = NULL_TREE;
7880 tree cond = fold_build2_loc (loc, LT_EXPR,
7881 boolean_type_node,
7882 begin, end);
7883 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7884 build_int_cst (stype, 0));
7885 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7886 end, begin);
7887 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7888 build_int_cst (stype, 0));
7889 tree osteptype = TREE_TYPE (orig_step);
7890 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7891 orig_step,
7892 build_int_cst (osteptype, 0));
7893 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7894 cond, pos, neg);
7895 cnt = fold_convert_loc (loc, sizetype, cnt);
7896 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7897 fb_rvalue) == GS_ERROR)
7898 return 2;
7899 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7900 }
7901 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7902 fb_rvalue) == GS_ERROR)
7903 return 2;
7904 last_iter = TREE_PURPOSE (t);
7905 last_count = tcnt;
7906 }
7907 if (counts[i] == NULL_TREE)
7908 counts[i] = last_count;
7909 else
7910 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7911 PLUS_EXPR, counts[i], last_count);
7912 }
7913 else
7914 n[i]++;
7915 }
7916 for (i = 0; i < 4; i++)
7917 if (counts[i])
7918 break;
7919 if (i == 4)
7920 return 0;
7921
7922 tree total = size_zero_node;
7923 for (i = 0; i < 4; i++)
7924 {
7925 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7926 if (counts[i] == NULL_TREE)
7927 counts[i] = size_zero_node;
7928 if (n[i])
7929 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7930 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7931 fb_rvalue) == GS_ERROR)
7932 return 2;
7933 total = size_binop (PLUS_EXPR, total, counts[i]);
7934 }
7935
7936 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7937 == GS_ERROR)
7938 return 2;
7939 bool is_old = unused[1] && unused[3];
7940 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7941 size_int (is_old ? 1 : 4));
7942 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7943 tree array = create_tmp_var_raw (type);
7944 TREE_ADDRESSABLE (array) = 1;
7945 if (!poly_int_tree_p (totalpx))
7946 {
7947 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7948 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7949 if (gimplify_omp_ctxp)
7950 {
7951 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7952 while (ctx
7953 && (ctx->region_type == ORT_WORKSHARE
7954 || ctx->region_type == ORT_TASKGROUP
7955 || ctx->region_type == ORT_SIMD
7956 || ctx->region_type == ORT_ACC))
7957 ctx = ctx->outer_context;
7958 if (ctx)
7959 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7960 }
7961 gimplify_vla_decl (array, pre_p);
7962 }
7963 else
7964 gimple_add_tmp_var (array);
7965 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7966 NULL_TREE);
7967 tree tem;
7968 if (!is_old)
7969 {
7970 tem = build2 (MODIFY_EXPR, void_type_node, r,
7971 build_int_cst (ptr_type_node, 0));
7972 gimplify_and_add (tem, pre_p);
7973 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7974 NULL_TREE);
7975 }
7976 tem = build2 (MODIFY_EXPR, void_type_node, r,
7977 fold_convert (ptr_type_node, total));
7978 gimplify_and_add (tem, pre_p);
7979 for (i = 1; i < (is_old ? 2 : 4); i++)
7980 {
7981 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7982 NULL_TREE, NULL_TREE);
7983 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7984 gimplify_and_add (tem, pre_p);
7985 }
7986
7987 tree cnts[4];
7988 for (j = 4; j; j--)
7989 if (!unused[j - 1])
7990 break;
7991 for (i = 0; i < 4; i++)
7992 {
7993 if (i && (i >= j || unused[i - 1]))
7994 {
7995 cnts[i] = cnts[i - 1];
7996 continue;
7997 }
7998 cnts[i] = create_tmp_var (sizetype);
7999 if (i == 0)
8000 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8001 else
8002 {
8003 tree t;
8004 if (is_old)
8005 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8006 else
8007 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8008 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8009 == GS_ERROR)
8010 return 2;
8011 g = gimple_build_assign (cnts[i], t);
8012 }
8013 gimple_seq_add_stmt (pre_p, g);
8014 }
8015
8016 last_iter = NULL_TREE;
8017 tree last_bind = NULL_TREE;
8018 tree *last_body = NULL;
8019 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8020 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8021 {
8022 switch (OMP_CLAUSE_DEPEND_KIND (c))
8023 {
8024 case OMP_CLAUSE_DEPEND_IN:
8025 i = 2;
8026 break;
8027 case OMP_CLAUSE_DEPEND_OUT:
8028 case OMP_CLAUSE_DEPEND_INOUT:
8029 i = 0;
8030 break;
8031 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8032 i = 1;
8033 break;
8034 case OMP_CLAUSE_DEPEND_DEPOBJ:
8035 i = 3;
8036 break;
8037 case OMP_CLAUSE_DEPEND_SOURCE:
8038 case OMP_CLAUSE_DEPEND_SINK:
8039 continue;
8040 default:
8041 gcc_unreachable ();
8042 }
8043 tree t = OMP_CLAUSE_DECL (c);
8044 if (TREE_CODE (t) == TREE_LIST
8045 && TREE_PURPOSE (t)
8046 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8047 {
8048 if (TREE_PURPOSE (t) != last_iter)
8049 {
8050 if (last_bind)
8051 gimplify_and_add (last_bind, pre_p);
8052 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8053 last_bind = build3 (BIND_EXPR, void_type_node,
8054 BLOCK_VARS (block), NULL, block);
8055 TREE_SIDE_EFFECTS (last_bind) = 1;
8056 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8057 tree *p = &BIND_EXPR_BODY (last_bind);
8058 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8059 {
8060 tree var = TREE_VEC_ELT (it, 0);
8061 tree begin = TREE_VEC_ELT (it, 1);
8062 tree end = TREE_VEC_ELT (it, 2);
8063 tree step = TREE_VEC_ELT (it, 3);
8064 tree orig_step = TREE_VEC_ELT (it, 4);
8065 tree type = TREE_TYPE (var);
8066 location_t loc = DECL_SOURCE_LOCATION (var);
8067 /* Emit:
8068 var = begin;
8069 goto cond_label;
8070 beg_label:
8071 ...
8072 var = var + step;
8073 cond_label:
8074 if (orig_step > 0) {
8075 if (var < end) goto beg_label;
8076 } else {
8077 if (var > end) goto beg_label;
8078 }
8079 for each iterator, with inner iterators added to
8080 the ... above. */
8081 tree beg_label = create_artificial_label (loc);
8082 tree cond_label = NULL_TREE;
8083 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8084 var, begin);
8085 append_to_statement_list_force (tem, p);
8086 tem = build_and_jump (&cond_label);
8087 append_to_statement_list_force (tem, p);
8088 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8089 append_to_statement_list (tem, p);
8090 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8091 NULL_TREE, NULL_TREE);
8092 TREE_SIDE_EFFECTS (bind) = 1;
8093 SET_EXPR_LOCATION (bind, loc);
8094 append_to_statement_list_force (bind, p);
8095 if (POINTER_TYPE_P (type))
8096 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8097 var, fold_convert_loc (loc, sizetype,
8098 step));
8099 else
8100 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8101 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8102 var, tem);
8103 append_to_statement_list_force (tem, p);
8104 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8105 append_to_statement_list (tem, p);
8106 tree cond = fold_build2_loc (loc, LT_EXPR,
8107 boolean_type_node,
8108 var, end);
8109 tree pos
8110 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8111 cond, build_and_jump (&beg_label),
8112 void_node);
8113 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8114 var, end);
8115 tree neg
8116 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8117 cond, build_and_jump (&beg_label),
8118 void_node);
8119 tree osteptype = TREE_TYPE (orig_step);
8120 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8121 orig_step,
8122 build_int_cst (osteptype, 0));
8123 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8124 cond, pos, neg);
8125 append_to_statement_list_force (tem, p);
8126 p = &BIND_EXPR_BODY (bind);
8127 }
8128 last_body = p;
8129 }
8130 last_iter = TREE_PURPOSE (t);
8131 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8132 {
8133 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8134 0), last_body);
8135 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8136 }
8137 if (error_operand_p (TREE_VALUE (t)))
8138 return 2;
8139 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8140 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8141 NULL_TREE, NULL_TREE);
8142 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8143 void_type_node, r, TREE_VALUE (t));
8144 append_to_statement_list_force (tem, last_body);
8145 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8146 void_type_node, cnts[i],
8147 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8148 append_to_statement_list_force (tem, last_body);
8149 TREE_VALUE (t) = null_pointer_node;
8150 }
8151 else
8152 {
8153 if (last_bind)
8154 {
8155 gimplify_and_add (last_bind, pre_p);
8156 last_bind = NULL_TREE;
8157 }
8158 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8159 {
8160 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8161 NULL, is_gimple_val, fb_rvalue);
8162 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8163 }
8164 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8165 return 2;
8166 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8167 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8168 is_gimple_val, fb_rvalue) == GS_ERROR)
8169 return 2;
8170 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8171 NULL_TREE, NULL_TREE);
8172 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8173 gimplify_and_add (tem, pre_p);
8174 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8175 size_int (1)));
8176 gimple_seq_add_stmt (pre_p, g);
8177 }
8178 }
8179 if (last_bind)
8180 gimplify_and_add (last_bind, pre_p);
8181 tree cond = boolean_false_node;
8182 if (is_old)
8183 {
8184 if (!unused[0])
8185 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8186 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8187 size_int (2)));
8188 if (!unused[2])
8189 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8190 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8191 cnts[2],
8192 size_binop_loc (first_loc, PLUS_EXPR,
8193 totalpx,
8194 size_int (1))));
8195 }
8196 else
8197 {
8198 tree prev = size_int (5);
8199 for (i = 0; i < 4; i++)
8200 {
8201 if (unused[i])
8202 continue;
8203 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8204 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8205 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8206 cnts[i], unshare_expr (prev)));
8207 }
8208 }
8209 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8210 build_call_expr_loc (first_loc,
8211 builtin_decl_explicit (BUILT_IN_TRAP),
8212 0), void_node);
8213 gimplify_and_add (tem, pre_p);
8214 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8215 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8216 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8217 OMP_CLAUSE_CHAIN (c) = *list_p;
8218 *list_p = c;
8219 return 1;
8220 }
8221
8222 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8223 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8224 the struct node to insert the new mapping after (when the struct node is
8225 initially created). PREV_NODE is the first of two or three mappings for a
8226 pointer, and is either:
8227 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8228 array section.
8229 - not the node before C. This is true when we have a reference-to-pointer
8230 type (with a mapping for the reference and for the pointer), or for
8231 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8232 If SCP is non-null, the new node is inserted before *SCP.
8233 if SCP is null, the new node is inserted before PREV_NODE.
8234 The return type is:
8235 - PREV_NODE, if SCP is non-null.
8236 - The newly-created ALLOC or RELEASE node, if SCP is null.
8237 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8238 reference to a pointer. */
8239
8240 static tree
8241 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8242 tree prev_node, tree *scp)
8243 {
8244 enum gomp_map_kind mkind
8245 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8246 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8247
8248 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8249 tree cl = scp ? prev_node : c2;
8250 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8251 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8252 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8253 if (OMP_CLAUSE_CHAIN (prev_node) != c
8254 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8255 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8256 == GOMP_MAP_TO_PSET))
8257 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8258 else
8259 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8260 if (struct_node)
8261 OMP_CLAUSE_CHAIN (struct_node) = c2;
8262
8263 /* We might need to create an additional mapping if we have a reference to a
8264 pointer (in C++). Don't do this if we have something other than a
8265 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8266 if (OMP_CLAUSE_CHAIN (prev_node) != c
8267 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8268 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8269 == GOMP_MAP_ALWAYS_POINTER)
8270 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8271 == GOMP_MAP_ATTACH_DETACH)))
8272 {
8273 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8274 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8275 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8276 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8277 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8278 OMP_CLAUSE_CHAIN (c3) = prev_node;
8279 if (!scp)
8280 OMP_CLAUSE_CHAIN (c2) = c3;
8281 else
8282 cl = c3;
8283 }
8284
8285 if (scp)
8286 *scp = c2;
8287
8288 return cl;
8289 }
8290
8291 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8292 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8293 If BASE_REF is non-NULL and the containing object is a reference, set
8294 *BASE_REF to that reference before dereferencing the object.
8295 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8296 has array type, else return NULL. */
8297
8298 static tree
8299 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8300 poly_offset_int *poffsetp)
8301 {
8302 tree offset;
8303 poly_int64 bitsize, bitpos;
8304 machine_mode mode;
8305 int unsignedp, reversep, volatilep = 0;
8306 poly_offset_int poffset;
8307
8308 if (base_ref)
8309 {
8310 *base_ref = NULL_TREE;
8311
8312 while (TREE_CODE (base) == ARRAY_REF)
8313 base = TREE_OPERAND (base, 0);
8314
8315 if (TREE_CODE (base) == INDIRECT_REF)
8316 base = TREE_OPERAND (base, 0);
8317 }
8318 else
8319 {
8320 if (TREE_CODE (base) == ARRAY_REF)
8321 {
8322 while (TREE_CODE (base) == ARRAY_REF)
8323 base = TREE_OPERAND (base, 0);
8324 if (TREE_CODE (base) != COMPONENT_REF
8325 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8326 return NULL_TREE;
8327 }
8328 else if (TREE_CODE (base) == INDIRECT_REF
8329 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8330 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8331 == REFERENCE_TYPE))
8332 base = TREE_OPERAND (base, 0);
8333 }
8334
8335 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8336 &unsignedp, &reversep, &volatilep);
8337
8338 tree orig_base = base;
8339
8340 if ((TREE_CODE (base) == INDIRECT_REF
8341 || (TREE_CODE (base) == MEM_REF
8342 && integer_zerop (TREE_OPERAND (base, 1))))
8343 && DECL_P (TREE_OPERAND (base, 0))
8344 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8345 base = TREE_OPERAND (base, 0);
8346
8347 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8348
8349 if (offset)
8350 poffset = wi::to_poly_offset (offset);
8351 else
8352 poffset = 0;
8353
8354 if (maybe_ne (bitpos, 0))
8355 poffset += bits_to_bytes_round_down (bitpos);
8356
8357 *bitposp = bitpos;
8358 *poffsetp = poffset;
8359
8360 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8361 if (base_ref && orig_base != base)
8362 *base_ref = orig_base;
8363
8364 return base;
8365 }
8366
8367 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8368
8369 static bool
8370 is_or_contains_p (tree expr, tree base_ptr)
8371 {
8372 while (expr != base_ptr)
8373 if (TREE_CODE (base_ptr) == COMPONENT_REF)
8374 base_ptr = TREE_OPERAND (base_ptr, 0);
8375 else
8376 break;
8377 return expr == base_ptr;
8378 }
8379
8380 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8381 several rules, and with some level of ambiguity, hopefully we can at least
8382 collect the complexity here in one place. */
8383
8384 static void
8385 omp_target_reorder_clauses (tree *list_p)
8386 {
8387 /* Collect refs to alloc/release/delete maps. */
8388 auto_vec<tree, 32> ard;
8389 tree *cp = list_p;
8390 while (*cp != NULL_TREE)
8391 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8392 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALLOC
8393 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_RELEASE
8394 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_DELETE))
8395 {
8396 /* Unlink cp and push to ard. */
8397 tree c = *cp;
8398 tree nc = OMP_CLAUSE_CHAIN (c);
8399 *cp = nc;
8400 ard.safe_push (c);
8401
8402 /* Any associated pointer type maps should also move along. */
8403 while (*cp != NULL_TREE
8404 && OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8405 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8406 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_POINTER
8407 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH
8408 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_POINTER
8409 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALWAYS_POINTER
8410 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_TO_PSET))
8411 {
8412 c = *cp;
8413 nc = OMP_CLAUSE_CHAIN (c);
8414 *cp = nc;
8415 ard.safe_push (c);
8416 }
8417 }
8418 else
8419 cp = &OMP_CLAUSE_CHAIN (*cp);
8420
8421 /* Link alloc/release/delete maps to the end of list. */
8422 for (unsigned int i = 0; i < ard.length (); i++)
8423 {
8424 *cp = ard[i];
8425 cp = &OMP_CLAUSE_CHAIN (ard[i]);
8426 }
8427 *cp = NULL_TREE;
8428
8429 /* OpenMP 5.0 requires that pointer variables are mapped before
8430 its use as a base-pointer. */
8431 auto_vec<tree *, 32> atf;
8432 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8433 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8434 {
8435 /* Collect alloc, to, from, to/from clause tree pointers. */
8436 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
8437 if (k == GOMP_MAP_ALLOC
8438 || k == GOMP_MAP_TO
8439 || k == GOMP_MAP_FROM
8440 || k == GOMP_MAP_TOFROM
8441 || k == GOMP_MAP_ALWAYS_TO
8442 || k == GOMP_MAP_ALWAYS_FROM
8443 || k == GOMP_MAP_ALWAYS_TOFROM)
8444 atf.safe_push (cp);
8445 }
8446
8447 for (unsigned int i = 0; i < atf.length (); i++)
8448 if (atf[i])
8449 {
8450 tree *cp = atf[i];
8451 tree decl = OMP_CLAUSE_DECL (*cp);
8452 if (TREE_CODE (decl) == INDIRECT_REF || TREE_CODE (decl) == MEM_REF)
8453 {
8454 tree base_ptr = TREE_OPERAND (decl, 0);
8455 STRIP_TYPE_NOPS (base_ptr);
8456 for (unsigned int j = i + 1; j < atf.length (); j++)
8457 {
8458 tree *cp2 = atf[j];
8459 tree decl2 = OMP_CLAUSE_DECL (*cp2);
8460 if (is_or_contains_p (decl2, base_ptr))
8461 {
8462 /* Move *cp2 to before *cp. */
8463 tree c = *cp2;
8464 *cp2 = OMP_CLAUSE_CHAIN (c);
8465 OMP_CLAUSE_CHAIN (c) = *cp;
8466 *cp = c;
8467 atf[j] = NULL;
8468 }
8469 }
8470 }
8471 }
8472 }
8473
8474 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8475 and previous omp contexts. */
8476
8477 static void
8478 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8479 enum omp_region_type region_type,
8480 enum tree_code code)
8481 {
8482 struct gimplify_omp_ctx *ctx, *outer_ctx;
8483 tree c;
8484 hash_map<tree, tree> *struct_map_to_clause = NULL;
8485 hash_set<tree> *struct_deref_set = NULL;
8486 tree *prev_list_p = NULL, *orig_list_p = list_p;
8487 int handled_depend_iterators = -1;
8488 int nowait = -1;
8489
8490 ctx = new_omp_context (region_type);
8491 ctx->code = code;
8492 outer_ctx = ctx->outer_context;
8493 if (code == OMP_TARGET)
8494 {
8495 if (!lang_GNU_Fortran ())
8496 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8497 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8498 }
8499 if (!lang_GNU_Fortran ())
8500 switch (code)
8501 {
8502 case OMP_TARGET:
8503 case OMP_TARGET_DATA:
8504 case OMP_TARGET_ENTER_DATA:
8505 case OMP_TARGET_EXIT_DATA:
8506 case OACC_DECLARE:
8507 case OACC_HOST_DATA:
8508 case OACC_PARALLEL:
8509 case OACC_KERNELS:
8510 ctx->target_firstprivatize_array_bases = true;
8511 default:
8512 break;
8513 }
8514
8515 if (code == OMP_TARGET
8516 || code == OMP_TARGET_DATA
8517 || code == OMP_TARGET_ENTER_DATA
8518 || code == OMP_TARGET_EXIT_DATA)
8519 omp_target_reorder_clauses (list_p);
8520
8521 while ((c = *list_p) != NULL)
8522 {
8523 bool remove = false;
8524 bool notice_outer = true;
8525 const char *check_non_private = NULL;
8526 unsigned int flags;
8527 tree decl;
8528
8529 switch (OMP_CLAUSE_CODE (c))
8530 {
8531 case OMP_CLAUSE_PRIVATE:
8532 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8533 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8534 {
8535 flags |= GOVD_PRIVATE_OUTER_REF;
8536 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8537 }
8538 else
8539 notice_outer = false;
8540 goto do_add;
8541 case OMP_CLAUSE_SHARED:
8542 flags = GOVD_SHARED | GOVD_EXPLICIT;
8543 goto do_add;
8544 case OMP_CLAUSE_FIRSTPRIVATE:
8545 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8546 check_non_private = "firstprivate";
8547 goto do_add;
8548 case OMP_CLAUSE_LASTPRIVATE:
8549 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8550 switch (code)
8551 {
8552 case OMP_DISTRIBUTE:
8553 error_at (OMP_CLAUSE_LOCATION (c),
8554 "conditional %<lastprivate%> clause on "
8555 "%qs construct", "distribute");
8556 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8557 break;
8558 case OMP_TASKLOOP:
8559 error_at (OMP_CLAUSE_LOCATION (c),
8560 "conditional %<lastprivate%> clause on "
8561 "%qs construct", "taskloop");
8562 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8563 break;
8564 default:
8565 break;
8566 }
8567 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8568 if (code != OMP_LOOP)
8569 check_non_private = "lastprivate";
8570 decl = OMP_CLAUSE_DECL (c);
8571 if (error_operand_p (decl))
8572 goto do_add;
8573 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8574 && !lang_hooks.decls.omp_scalar_p (decl))
8575 {
8576 error_at (OMP_CLAUSE_LOCATION (c),
8577 "non-scalar variable %qD in conditional "
8578 "%<lastprivate%> clause", decl);
8579 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8580 }
8581 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8582 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8583 if (outer_ctx
8584 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8585 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8586 == ORT_COMBINED_TEAMS))
8587 && splay_tree_lookup (outer_ctx->variables,
8588 (splay_tree_key) decl) == NULL)
8589 {
8590 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8591 if (outer_ctx->outer_context)
8592 omp_notice_variable (outer_ctx->outer_context, decl, true);
8593 }
8594 else if (outer_ctx
8595 && (outer_ctx->region_type & ORT_TASK) != 0
8596 && outer_ctx->combined_loop
8597 && splay_tree_lookup (outer_ctx->variables,
8598 (splay_tree_key) decl) == NULL)
8599 {
8600 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8601 if (outer_ctx->outer_context)
8602 omp_notice_variable (outer_ctx->outer_context, decl, true);
8603 }
8604 else if (outer_ctx
8605 && (outer_ctx->region_type == ORT_WORKSHARE
8606 || outer_ctx->region_type == ORT_ACC)
8607 && outer_ctx->combined_loop
8608 && splay_tree_lookup (outer_ctx->variables,
8609 (splay_tree_key) decl) == NULL
8610 && !omp_check_private (outer_ctx, decl, false))
8611 {
8612 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8613 if (outer_ctx->outer_context
8614 && (outer_ctx->outer_context->region_type
8615 == ORT_COMBINED_PARALLEL)
8616 && splay_tree_lookup (outer_ctx->outer_context->variables,
8617 (splay_tree_key) decl) == NULL)
8618 {
8619 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8620 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8621 if (octx->outer_context)
8622 {
8623 octx = octx->outer_context;
8624 if (octx->region_type == ORT_WORKSHARE
8625 && octx->combined_loop
8626 && splay_tree_lookup (octx->variables,
8627 (splay_tree_key) decl) == NULL
8628 && !omp_check_private (octx, decl, false))
8629 {
8630 omp_add_variable (octx, decl,
8631 GOVD_LASTPRIVATE | GOVD_SEEN);
8632 octx = octx->outer_context;
8633 if (octx
8634 && ((octx->region_type & ORT_COMBINED_TEAMS)
8635 == ORT_COMBINED_TEAMS)
8636 && (splay_tree_lookup (octx->variables,
8637 (splay_tree_key) decl)
8638 == NULL))
8639 {
8640 omp_add_variable (octx, decl,
8641 GOVD_SHARED | GOVD_SEEN);
8642 octx = octx->outer_context;
8643 }
8644 }
8645 if (octx)
8646 omp_notice_variable (octx, decl, true);
8647 }
8648 }
8649 else if (outer_ctx->outer_context)
8650 omp_notice_variable (outer_ctx->outer_context, decl, true);
8651 }
8652 goto do_add;
8653 case OMP_CLAUSE_REDUCTION:
8654 if (OMP_CLAUSE_REDUCTION_TASK (c))
8655 {
8656 if (region_type == ORT_WORKSHARE)
8657 {
8658 if (nowait == -1)
8659 nowait = omp_find_clause (*list_p,
8660 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8661 if (nowait
8662 && (outer_ctx == NULL
8663 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8664 {
8665 error_at (OMP_CLAUSE_LOCATION (c),
8666 "%<task%> reduction modifier on a construct "
8667 "with a %<nowait%> clause");
8668 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8669 }
8670 }
8671 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8672 {
8673 error_at (OMP_CLAUSE_LOCATION (c),
8674 "invalid %<task%> reduction modifier on construct "
8675 "other than %<parallel%>, %qs or %<sections%>",
8676 lang_GNU_Fortran () ? "do" : "for");
8677 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8678 }
8679 }
8680 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8681 switch (code)
8682 {
8683 case OMP_SECTIONS:
8684 error_at (OMP_CLAUSE_LOCATION (c),
8685 "%<inscan%> %<reduction%> clause on "
8686 "%qs construct", "sections");
8687 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8688 break;
8689 case OMP_PARALLEL:
8690 error_at (OMP_CLAUSE_LOCATION (c),
8691 "%<inscan%> %<reduction%> clause on "
8692 "%qs construct", "parallel");
8693 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8694 break;
8695 case OMP_TEAMS:
8696 error_at (OMP_CLAUSE_LOCATION (c),
8697 "%<inscan%> %<reduction%> clause on "
8698 "%qs construct", "teams");
8699 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8700 break;
8701 case OMP_TASKLOOP:
8702 error_at (OMP_CLAUSE_LOCATION (c),
8703 "%<inscan%> %<reduction%> clause on "
8704 "%qs construct", "taskloop");
8705 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8706 break;
8707 default:
8708 break;
8709 }
8710 /* FALLTHRU */
8711 case OMP_CLAUSE_IN_REDUCTION:
8712 case OMP_CLAUSE_TASK_REDUCTION:
8713 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8714 /* OpenACC permits reductions on private variables. */
8715 if (!(region_type & ORT_ACC)
8716 /* taskgroup is actually not a worksharing region. */
8717 && code != OMP_TASKGROUP)
8718 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8719 decl = OMP_CLAUSE_DECL (c);
8720 if (TREE_CODE (decl) == MEM_REF)
8721 {
8722 tree type = TREE_TYPE (decl);
8723 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8724 NULL, is_gimple_val, fb_rvalue, false)
8725 == GS_ERROR)
8726 {
8727 remove = true;
8728 break;
8729 }
8730 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8731 if (DECL_P (v))
8732 {
8733 omp_firstprivatize_variable (ctx, v);
8734 omp_notice_variable (ctx, v, true);
8735 }
8736 decl = TREE_OPERAND (decl, 0);
8737 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8738 {
8739 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8740 NULL, is_gimple_val, fb_rvalue, false)
8741 == GS_ERROR)
8742 {
8743 remove = true;
8744 break;
8745 }
8746 v = TREE_OPERAND (decl, 1);
8747 if (DECL_P (v))
8748 {
8749 omp_firstprivatize_variable (ctx, v);
8750 omp_notice_variable (ctx, v, true);
8751 }
8752 decl = TREE_OPERAND (decl, 0);
8753 }
8754 if (TREE_CODE (decl) == ADDR_EXPR
8755 || TREE_CODE (decl) == INDIRECT_REF)
8756 decl = TREE_OPERAND (decl, 0);
8757 }
8758 goto do_add_decl;
8759 case OMP_CLAUSE_LINEAR:
8760 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8761 is_gimple_val, fb_rvalue) == GS_ERROR)
8762 {
8763 remove = true;
8764 break;
8765 }
8766 else
8767 {
8768 if (code == OMP_SIMD
8769 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8770 {
8771 struct gimplify_omp_ctx *octx = outer_ctx;
8772 if (octx
8773 && octx->region_type == ORT_WORKSHARE
8774 && octx->combined_loop
8775 && !octx->distribute)
8776 {
8777 if (octx->outer_context
8778 && (octx->outer_context->region_type
8779 == ORT_COMBINED_PARALLEL))
8780 octx = octx->outer_context->outer_context;
8781 else
8782 octx = octx->outer_context;
8783 }
8784 if (octx
8785 && octx->region_type == ORT_WORKSHARE
8786 && octx->combined_loop
8787 && octx->distribute)
8788 {
8789 error_at (OMP_CLAUSE_LOCATION (c),
8790 "%<linear%> clause for variable other than "
8791 "loop iterator specified on construct "
8792 "combined with %<distribute%>");
8793 remove = true;
8794 break;
8795 }
8796 }
8797 /* For combined #pragma omp parallel for simd, need to put
8798 lastprivate and perhaps firstprivate too on the
8799 parallel. Similarly for #pragma omp for simd. */
8800 struct gimplify_omp_ctx *octx = outer_ctx;
8801 decl = NULL_TREE;
8802 do
8803 {
8804 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8805 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8806 break;
8807 decl = OMP_CLAUSE_DECL (c);
8808 if (error_operand_p (decl))
8809 {
8810 decl = NULL_TREE;
8811 break;
8812 }
8813 flags = GOVD_SEEN;
8814 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8815 flags |= GOVD_FIRSTPRIVATE;
8816 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8817 flags |= GOVD_LASTPRIVATE;
8818 if (octx
8819 && octx->region_type == ORT_WORKSHARE
8820 && octx->combined_loop)
8821 {
8822 if (octx->outer_context
8823 && (octx->outer_context->region_type
8824 == ORT_COMBINED_PARALLEL))
8825 octx = octx->outer_context;
8826 else if (omp_check_private (octx, decl, false))
8827 break;
8828 }
8829 else if (octx
8830 && (octx->region_type & ORT_TASK) != 0
8831 && octx->combined_loop)
8832 ;
8833 else if (octx
8834 && octx->region_type == ORT_COMBINED_PARALLEL
8835 && ctx->region_type == ORT_WORKSHARE
8836 && octx == outer_ctx)
8837 flags = GOVD_SEEN | GOVD_SHARED;
8838 else if (octx
8839 && ((octx->region_type & ORT_COMBINED_TEAMS)
8840 == ORT_COMBINED_TEAMS))
8841 flags = GOVD_SEEN | GOVD_SHARED;
8842 else if (octx
8843 && octx->region_type == ORT_COMBINED_TARGET)
8844 {
8845 flags &= ~GOVD_LASTPRIVATE;
8846 if (flags == GOVD_SEEN)
8847 break;
8848 }
8849 else
8850 break;
8851 splay_tree_node on
8852 = splay_tree_lookup (octx->variables,
8853 (splay_tree_key) decl);
8854 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8855 {
8856 octx = NULL;
8857 break;
8858 }
8859 omp_add_variable (octx, decl, flags);
8860 if (octx->outer_context == NULL)
8861 break;
8862 octx = octx->outer_context;
8863 }
8864 while (1);
8865 if (octx
8866 && decl
8867 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8868 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8869 omp_notice_variable (octx, decl, true);
8870 }
8871 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8872 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8873 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8874 {
8875 notice_outer = false;
8876 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8877 }
8878 goto do_add;
8879
8880 case OMP_CLAUSE_MAP:
8881 decl = OMP_CLAUSE_DECL (c);
8882 if (error_operand_p (decl))
8883 remove = true;
8884 switch (code)
8885 {
8886 case OMP_TARGET:
8887 break;
8888 case OACC_DATA:
8889 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8890 break;
8891 /* FALLTHRU */
8892 case OMP_TARGET_DATA:
8893 case OMP_TARGET_ENTER_DATA:
8894 case OMP_TARGET_EXIT_DATA:
8895 case OACC_ENTER_DATA:
8896 case OACC_EXIT_DATA:
8897 case OACC_HOST_DATA:
8898 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8899 || (OMP_CLAUSE_MAP_KIND (c)
8900 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8901 /* For target {,enter ,exit }data only the array slice is
8902 mapped, but not the pointer to it. */
8903 remove = true;
8904 break;
8905 default:
8906 break;
8907 }
8908 /* For Fortran, not only the pointer to the data is mapped but also
8909 the address of the pointer, the array descriptor etc.; for
8910 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8911 does not make sense. Likewise, for 'update' only transferring the
8912 data itself is needed as the rest has been handled in previous
8913 directives. However, for 'exit data', the array descriptor needs
8914 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
8915
8916 NOTE: Generally, it is not safe to perform "enter data" operations
8917 on arrays where the data *or the descriptor* may go out of scope
8918 before a corresponding "exit data" operation -- and such a
8919 descriptor may be synthesized temporarily, e.g. to pass an
8920 explicit-shape array to a function expecting an assumed-shape
8921 argument. Performing "enter data" inside the called function
8922 would thus be problematic. */
8923 if (code == OMP_TARGET_EXIT_DATA
8924 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
8925 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
8926 == GOMP_MAP_DELETE
8927 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
8928 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8929 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8930 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8931 remove = true;
8932
8933 if (remove)
8934 break;
8935 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8936 {
8937 struct gimplify_omp_ctx *octx;
8938 for (octx = outer_ctx; octx; octx = octx->outer_context)
8939 {
8940 if (octx->region_type != ORT_ACC_HOST_DATA)
8941 break;
8942 splay_tree_node n2
8943 = splay_tree_lookup (octx->variables,
8944 (splay_tree_key) decl);
8945 if (n2)
8946 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8947 "declared in enclosing %<host_data%> region",
8948 DECL_NAME (decl));
8949 }
8950 }
8951 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8952 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8953 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8954 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8955 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8956 {
8957 remove = true;
8958 break;
8959 }
8960 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8961 || (OMP_CLAUSE_MAP_KIND (c)
8962 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8963 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8964 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8965 {
8966 OMP_CLAUSE_SIZE (c)
8967 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8968 false);
8969 if ((region_type & ORT_TARGET) != 0)
8970 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8971 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8972 }
8973
8974 if (!DECL_P (decl))
8975 {
8976 tree d = decl, *pd;
8977 if (TREE_CODE (d) == ARRAY_REF)
8978 {
8979 while (TREE_CODE (d) == ARRAY_REF)
8980 d = TREE_OPERAND (d, 0);
8981 if (TREE_CODE (d) == COMPONENT_REF
8982 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8983 decl = d;
8984 }
8985 pd = &OMP_CLAUSE_DECL (c);
8986 if (d == decl
8987 && TREE_CODE (decl) == INDIRECT_REF
8988 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8989 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8990 == REFERENCE_TYPE))
8991 {
8992 pd = &TREE_OPERAND (decl, 0);
8993 decl = TREE_OPERAND (decl, 0);
8994 }
8995 bool indir_p = false;
8996 tree orig_decl = decl;
8997 tree decl_ref = NULL_TREE;
8998 if ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA)) != 0
8999 && TREE_CODE (*pd) == COMPONENT_REF
9000 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
9001 && code != OACC_UPDATE)
9002 {
9003 while (TREE_CODE (decl) == COMPONENT_REF)
9004 {
9005 decl = TREE_OPERAND (decl, 0);
9006 if (((TREE_CODE (decl) == MEM_REF
9007 && integer_zerop (TREE_OPERAND (decl, 1)))
9008 || INDIRECT_REF_P (decl))
9009 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9010 == POINTER_TYPE))
9011 {
9012 indir_p = true;
9013 decl = TREE_OPERAND (decl, 0);
9014 }
9015 if (TREE_CODE (decl) == INDIRECT_REF
9016 && DECL_P (TREE_OPERAND (decl, 0))
9017 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9018 == REFERENCE_TYPE))
9019 {
9020 decl_ref = decl;
9021 decl = TREE_OPERAND (decl, 0);
9022 }
9023 }
9024 }
9025 else if (TREE_CODE (decl) == COMPONENT_REF)
9026 {
9027 while (TREE_CODE (decl) == COMPONENT_REF)
9028 decl = TREE_OPERAND (decl, 0);
9029 if (TREE_CODE (decl) == INDIRECT_REF
9030 && DECL_P (TREE_OPERAND (decl, 0))
9031 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9032 == REFERENCE_TYPE))
9033 decl = TREE_OPERAND (decl, 0);
9034 }
9035 if (decl != orig_decl && DECL_P (decl) && indir_p)
9036 {
9037 gomp_map_kind k
9038 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9039 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9040 /* We have a dereference of a struct member. Make this an
9041 attach/detach operation, and ensure the base pointer is
9042 mapped as a FIRSTPRIVATE_POINTER. */
9043 OMP_CLAUSE_SET_MAP_KIND (c, k);
9044 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
9045 tree next_clause = OMP_CLAUSE_CHAIN (c);
9046 if (k == GOMP_MAP_ATTACH
9047 && code != OACC_ENTER_DATA
9048 && code != OMP_TARGET_ENTER_DATA
9049 && (!next_clause
9050 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
9051 || (OMP_CLAUSE_MAP_KIND (next_clause)
9052 != GOMP_MAP_POINTER)
9053 || OMP_CLAUSE_DECL (next_clause) != decl)
9054 && (!struct_deref_set
9055 || !struct_deref_set->contains (decl)))
9056 {
9057 if (!struct_deref_set)
9058 struct_deref_set = new hash_set<tree> ();
9059 /* As well as the attach, we also need a
9060 FIRSTPRIVATE_POINTER clause to properly map the
9061 pointer to the struct base. */
9062 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9063 OMP_CLAUSE_MAP);
9064 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
9065 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
9066 = 1;
9067 tree charptr_zero
9068 = build_int_cst (build_pointer_type (char_type_node),
9069 0);
9070 OMP_CLAUSE_DECL (c2)
9071 = build2 (MEM_REF, char_type_node,
9072 decl_ref ? decl_ref : decl, charptr_zero);
9073 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9074 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9075 OMP_CLAUSE_MAP);
9076 OMP_CLAUSE_SET_MAP_KIND (c3,
9077 GOMP_MAP_FIRSTPRIVATE_POINTER);
9078 OMP_CLAUSE_DECL (c3) = decl;
9079 OMP_CLAUSE_SIZE (c3) = size_zero_node;
9080 tree mapgrp = *prev_list_p;
9081 *prev_list_p = c2;
9082 OMP_CLAUSE_CHAIN (c3) = mapgrp;
9083 OMP_CLAUSE_CHAIN (c2) = c3;
9084
9085 struct_deref_set->add (decl);
9086 }
9087 goto do_add_decl;
9088 }
9089 /* An "attach/detach" operation on an update directive should
9090 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9091 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9092 depends on the previous mapping. */
9093 if (code == OACC_UPDATE
9094 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9095 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
9096 if (DECL_P (decl)
9097 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9098 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
9099 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
9100 && code != OACC_UPDATE
9101 && code != OMP_TARGET_UPDATE)
9102 {
9103 if (error_operand_p (decl))
9104 {
9105 remove = true;
9106 break;
9107 }
9108
9109 tree stype = TREE_TYPE (decl);
9110 if (TREE_CODE (stype) == REFERENCE_TYPE)
9111 stype = TREE_TYPE (stype);
9112 if (TYPE_SIZE_UNIT (stype) == NULL
9113 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
9114 {
9115 error_at (OMP_CLAUSE_LOCATION (c),
9116 "mapping field %qE of variable length "
9117 "structure", OMP_CLAUSE_DECL (c));
9118 remove = true;
9119 break;
9120 }
9121
9122 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
9123 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9124 {
9125 /* Error recovery. */
9126 if (prev_list_p == NULL)
9127 {
9128 remove = true;
9129 break;
9130 }
9131 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9132 {
9133 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9134 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9135 {
9136 remove = true;
9137 break;
9138 }
9139 }
9140 }
9141
9142 poly_offset_int offset1;
9143 poly_int64 bitpos1;
9144 tree base_ref;
9145
9146 tree base
9147 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9148 &bitpos1, &offset1);
9149
9150 gcc_assert (base == decl);
9151
9152 splay_tree_node n
9153 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9154 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9155 == GOMP_MAP_ALWAYS_POINTER);
9156 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9157 == GOMP_MAP_ATTACH_DETACH);
9158 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9159 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9160 bool has_attachments = false;
9161 /* For OpenACC, pointers in structs should trigger an
9162 attach action. */
9163 if (attach_detach
9164 && ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA))
9165 || code == OMP_TARGET_ENTER_DATA
9166 || code == OMP_TARGET_EXIT_DATA))
9167
9168 {
9169 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9170 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9171 have detected a case that needs a GOMP_MAP_STRUCT
9172 mapping added. */
9173 gomp_map_kind k
9174 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9175 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9176 OMP_CLAUSE_SET_MAP_KIND (c, k);
9177 has_attachments = true;
9178 }
9179 if (n == NULL || (n->value & GOVD_MAP) == 0)
9180 {
9181 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9182 OMP_CLAUSE_MAP);
9183 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9184 : GOMP_MAP_STRUCT;
9185
9186 OMP_CLAUSE_SET_MAP_KIND (l, k);
9187 if (base_ref)
9188 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9189 else
9190 OMP_CLAUSE_DECL (l) = decl;
9191 OMP_CLAUSE_SIZE (l)
9192 = (!attach
9193 ? size_int (1)
9194 : DECL_P (OMP_CLAUSE_DECL (l))
9195 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9196 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9197 if (struct_map_to_clause == NULL)
9198 struct_map_to_clause = new hash_map<tree, tree>;
9199 struct_map_to_clause->put (decl, l);
9200 if (ptr || attach_detach)
9201 {
9202 insert_struct_comp_map (code, c, l, *prev_list_p,
9203 NULL);
9204 *prev_list_p = l;
9205 prev_list_p = NULL;
9206 }
9207 else
9208 {
9209 OMP_CLAUSE_CHAIN (l) = c;
9210 *list_p = l;
9211 list_p = &OMP_CLAUSE_CHAIN (l);
9212 }
9213 if (base_ref && code == OMP_TARGET)
9214 {
9215 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9216 OMP_CLAUSE_MAP);
9217 enum gomp_map_kind mkind
9218 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9219 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9220 OMP_CLAUSE_DECL (c2) = decl;
9221 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9222 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9223 OMP_CLAUSE_CHAIN (l) = c2;
9224 }
9225 flags = GOVD_MAP | GOVD_EXPLICIT;
9226 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9227 || ptr
9228 || attach_detach)
9229 flags |= GOVD_SEEN;
9230 if (has_attachments)
9231 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9232 goto do_add_decl;
9233 }
9234 else if (struct_map_to_clause)
9235 {
9236 tree *osc = struct_map_to_clause->get (decl);
9237 tree *sc = NULL, *scp = NULL;
9238 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9239 || ptr
9240 || attach_detach)
9241 n->value |= GOVD_SEEN;
9242 sc = &OMP_CLAUSE_CHAIN (*osc);
9243 if (*sc != c
9244 && (OMP_CLAUSE_MAP_KIND (*sc)
9245 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9246 sc = &OMP_CLAUSE_CHAIN (*sc);
9247 /* Here "prev_list_p" is the end of the inserted
9248 alloc/release nodes after the struct node, OSC. */
9249 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9250 if ((ptr || attach_detach) && sc == prev_list_p)
9251 break;
9252 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9253 != COMPONENT_REF
9254 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9255 != INDIRECT_REF)
9256 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9257 != ARRAY_REF))
9258 break;
9259 else
9260 {
9261 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9262 poly_offset_int offsetn;
9263 poly_int64 bitposn;
9264 tree base
9265 = extract_base_bit_offset (sc_decl, NULL,
9266 &bitposn, &offsetn);
9267 if (base != decl)
9268 break;
9269 if (scp)
9270 continue;
9271 if ((region_type & ORT_ACC) != 0)
9272 {
9273 /* This duplicate checking code is currently only
9274 enabled for OpenACC. */
9275 tree d1 = OMP_CLAUSE_DECL (*sc);
9276 tree d2 = OMP_CLAUSE_DECL (c);
9277 while (TREE_CODE (d1) == ARRAY_REF)
9278 d1 = TREE_OPERAND (d1, 0);
9279 while (TREE_CODE (d2) == ARRAY_REF)
9280 d2 = TREE_OPERAND (d2, 0);
9281 if (TREE_CODE (d1) == INDIRECT_REF)
9282 d1 = TREE_OPERAND (d1, 0);
9283 if (TREE_CODE (d2) == INDIRECT_REF)
9284 d2 = TREE_OPERAND (d2, 0);
9285 while (TREE_CODE (d1) == COMPONENT_REF)
9286 if (TREE_CODE (d2) == COMPONENT_REF
9287 && TREE_OPERAND (d1, 1)
9288 == TREE_OPERAND (d2, 1))
9289 {
9290 d1 = TREE_OPERAND (d1, 0);
9291 d2 = TREE_OPERAND (d2, 0);
9292 }
9293 else
9294 break;
9295 if (d1 == d2)
9296 {
9297 error_at (OMP_CLAUSE_LOCATION (c),
9298 "%qE appears more than once in map "
9299 "clauses", OMP_CLAUSE_DECL (c));
9300 remove = true;
9301 break;
9302 }
9303 }
9304 if (maybe_lt (offset1, offsetn)
9305 || (known_eq (offset1, offsetn)
9306 && maybe_lt (bitpos1, bitposn)))
9307 {
9308 if (ptr || attach_detach)
9309 scp = sc;
9310 else
9311 break;
9312 }
9313 }
9314 if (remove)
9315 break;
9316 if (!attach)
9317 OMP_CLAUSE_SIZE (*osc)
9318 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9319 size_one_node);
9320 if (ptr || attach_detach)
9321 {
9322 tree cl = insert_struct_comp_map (code, c, NULL,
9323 *prev_list_p, scp);
9324 if (sc == prev_list_p)
9325 {
9326 *sc = cl;
9327 prev_list_p = NULL;
9328 }
9329 else
9330 {
9331 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9332 list_p = prev_list_p;
9333 prev_list_p = NULL;
9334 OMP_CLAUSE_CHAIN (c) = *sc;
9335 *sc = cl;
9336 continue;
9337 }
9338 }
9339 else if (*sc != c)
9340 {
9341 *list_p = OMP_CLAUSE_CHAIN (c);
9342 OMP_CLAUSE_CHAIN (c) = *sc;
9343 *sc = c;
9344 continue;
9345 }
9346 }
9347 }
9348
9349 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
9350 == GS_ERROR)
9351 {
9352 remove = true;
9353 break;
9354 }
9355
9356 if (!remove
9357 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9358 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9359 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9360 && OMP_CLAUSE_CHAIN (c)
9361 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9362 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9363 == GOMP_MAP_ALWAYS_POINTER)
9364 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9365 == GOMP_MAP_ATTACH_DETACH)
9366 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9367 == GOMP_MAP_TO_PSET)))
9368 prev_list_p = list_p;
9369
9370 break;
9371 }
9372 else
9373 {
9374 /* DECL_P (decl) == true */
9375 tree *sc;
9376 if (struct_map_to_clause
9377 && (sc = struct_map_to_clause->get (decl)) != NULL
9378 && OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_STRUCT
9379 && decl == OMP_CLAUSE_DECL (*sc))
9380 {
9381 /* We have found a map of the whole structure after a
9382 leading GOMP_MAP_STRUCT has been created, so refill the
9383 leading clause into a map of the whole structure
9384 variable, and remove the current one.
9385 TODO: we should be able to remove some maps of the
9386 following structure element maps if they are of
9387 compatible TO/FROM/ALLOC type. */
9388 OMP_CLAUSE_SET_MAP_KIND (*sc, OMP_CLAUSE_MAP_KIND (c));
9389 OMP_CLAUSE_SIZE (*sc) = unshare_expr (OMP_CLAUSE_SIZE (c));
9390 remove = true;
9391 break;
9392 }
9393 }
9394 flags = GOVD_MAP | GOVD_EXPLICIT;
9395 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9396 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9397 flags |= GOVD_MAP_ALWAYS_TO;
9398
9399 if ((code == OMP_TARGET
9400 || code == OMP_TARGET_DATA
9401 || code == OMP_TARGET_ENTER_DATA
9402 || code == OMP_TARGET_EXIT_DATA)
9403 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9404 {
9405 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
9406 octx = octx->outer_context)
9407 {
9408 splay_tree_node n
9409 = splay_tree_lookup (octx->variables,
9410 (splay_tree_key) OMP_CLAUSE_DECL (c));
9411 /* If this is contained in an outer OpenMP region as a
9412 firstprivate value, remove the attach/detach. */
9413 if (n && (n->value & GOVD_FIRSTPRIVATE))
9414 {
9415 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
9416 goto do_add;
9417 }
9418 }
9419
9420 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
9421 ? GOMP_MAP_DETACH
9422 : GOMP_MAP_ATTACH);
9423 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
9424 }
9425
9426 goto do_add;
9427
9428 case OMP_CLAUSE_DEPEND:
9429 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9430 {
9431 tree deps = OMP_CLAUSE_DECL (c);
9432 while (deps && TREE_CODE (deps) == TREE_LIST)
9433 {
9434 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9435 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9436 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9437 pre_p, NULL, is_gimple_val, fb_rvalue);
9438 deps = TREE_CHAIN (deps);
9439 }
9440 break;
9441 }
9442 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9443 break;
9444 if (handled_depend_iterators == -1)
9445 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9446 if (handled_depend_iterators)
9447 {
9448 if (handled_depend_iterators == 2)
9449 remove = true;
9450 break;
9451 }
9452 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9453 {
9454 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9455 NULL, is_gimple_val, fb_rvalue);
9456 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9457 }
9458 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9459 {
9460 remove = true;
9461 break;
9462 }
9463 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9464 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9465 is_gimple_val, fb_rvalue) == GS_ERROR)
9466 {
9467 remove = true;
9468 break;
9469 }
9470 break;
9471
9472 case OMP_CLAUSE_TO:
9473 case OMP_CLAUSE_FROM:
9474 case OMP_CLAUSE__CACHE_:
9475 decl = OMP_CLAUSE_DECL (c);
9476 if (error_operand_p (decl))
9477 {
9478 remove = true;
9479 break;
9480 }
9481 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9482 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9483 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9484 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9485 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9486 {
9487 remove = true;
9488 break;
9489 }
9490 if (!DECL_P (decl))
9491 {
9492 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9493 NULL, is_gimple_lvalue, fb_lvalue)
9494 == GS_ERROR)
9495 {
9496 remove = true;
9497 break;
9498 }
9499 break;
9500 }
9501 goto do_notice;
9502
9503 case OMP_CLAUSE_USE_DEVICE_PTR:
9504 case OMP_CLAUSE_USE_DEVICE_ADDR:
9505 flags = GOVD_EXPLICIT;
9506 goto do_add;
9507
9508 case OMP_CLAUSE_IS_DEVICE_PTR:
9509 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9510 goto do_add;
9511
9512 do_add:
9513 decl = OMP_CLAUSE_DECL (c);
9514 do_add_decl:
9515 if (error_operand_p (decl))
9516 {
9517 remove = true;
9518 break;
9519 }
9520 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9521 {
9522 tree t = omp_member_access_dummy_var (decl);
9523 if (t)
9524 {
9525 tree v = DECL_VALUE_EXPR (decl);
9526 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9527 if (outer_ctx)
9528 omp_notice_variable (outer_ctx, t, true);
9529 }
9530 }
9531 if (code == OACC_DATA
9532 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9533 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9534 flags |= GOVD_MAP_0LEN_ARRAY;
9535 omp_add_variable (ctx, decl, flags);
9536 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9537 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9538 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9539 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9540 {
9541 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9542 GOVD_LOCAL | GOVD_SEEN);
9543 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9544 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9545 find_decl_expr,
9546 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9547 NULL) == NULL_TREE)
9548 omp_add_variable (ctx,
9549 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9550 GOVD_LOCAL | GOVD_SEEN);
9551 gimplify_omp_ctxp = ctx;
9552 push_gimplify_context ();
9553
9554 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9555 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9556
9557 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9558 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9559 pop_gimplify_context
9560 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9561 push_gimplify_context ();
9562 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9563 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9564 pop_gimplify_context
9565 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9566 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9567 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9568
9569 gimplify_omp_ctxp = outer_ctx;
9570 }
9571 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9572 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9573 {
9574 gimplify_omp_ctxp = ctx;
9575 push_gimplify_context ();
9576 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9577 {
9578 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9579 NULL, NULL);
9580 TREE_SIDE_EFFECTS (bind) = 1;
9581 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9582 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9583 }
9584 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9585 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9586 pop_gimplify_context
9587 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9588 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9589
9590 gimplify_omp_ctxp = outer_ctx;
9591 }
9592 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9593 && OMP_CLAUSE_LINEAR_STMT (c))
9594 {
9595 gimplify_omp_ctxp = ctx;
9596 push_gimplify_context ();
9597 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9598 {
9599 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9600 NULL, NULL);
9601 TREE_SIDE_EFFECTS (bind) = 1;
9602 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9603 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9604 }
9605 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9606 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9607 pop_gimplify_context
9608 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9609 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9610
9611 gimplify_omp_ctxp = outer_ctx;
9612 }
9613 if (notice_outer)
9614 goto do_notice;
9615 break;
9616
9617 case OMP_CLAUSE_COPYIN:
9618 case OMP_CLAUSE_COPYPRIVATE:
9619 decl = OMP_CLAUSE_DECL (c);
9620 if (error_operand_p (decl))
9621 {
9622 remove = true;
9623 break;
9624 }
9625 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9626 && !remove
9627 && !omp_check_private (ctx, decl, true))
9628 {
9629 remove = true;
9630 if (is_global_var (decl))
9631 {
9632 if (DECL_THREAD_LOCAL_P (decl))
9633 remove = false;
9634 else if (DECL_HAS_VALUE_EXPR_P (decl))
9635 {
9636 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9637
9638 if (value
9639 && DECL_P (value)
9640 && DECL_THREAD_LOCAL_P (value))
9641 remove = false;
9642 }
9643 }
9644 if (remove)
9645 error_at (OMP_CLAUSE_LOCATION (c),
9646 "copyprivate variable %qE is not threadprivate"
9647 " or private in outer context", DECL_NAME (decl));
9648 }
9649 do_notice:
9650 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9651 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9652 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9653 && outer_ctx
9654 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9655 || (region_type == ORT_WORKSHARE
9656 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9657 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9658 || code == OMP_LOOP)))
9659 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9660 || (code == OMP_LOOP
9661 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9662 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9663 == ORT_COMBINED_TEAMS))))
9664 {
9665 splay_tree_node on
9666 = splay_tree_lookup (outer_ctx->variables,
9667 (splay_tree_key)decl);
9668 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9669 {
9670 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9671 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9672 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9673 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9674 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9675 == POINTER_TYPE))))
9676 omp_firstprivatize_variable (outer_ctx, decl);
9677 else
9678 {
9679 omp_add_variable (outer_ctx, decl,
9680 GOVD_SEEN | GOVD_SHARED);
9681 if (outer_ctx->outer_context)
9682 omp_notice_variable (outer_ctx->outer_context, decl,
9683 true);
9684 }
9685 }
9686 }
9687 if (outer_ctx)
9688 omp_notice_variable (outer_ctx, decl, true);
9689 if (check_non_private
9690 && region_type == ORT_WORKSHARE
9691 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9692 || decl == OMP_CLAUSE_DECL (c)
9693 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9694 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9695 == ADDR_EXPR
9696 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9697 == POINTER_PLUS_EXPR
9698 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9699 (OMP_CLAUSE_DECL (c), 0), 0))
9700 == ADDR_EXPR)))))
9701 && omp_check_private (ctx, decl, false))
9702 {
9703 error ("%s variable %qE is private in outer context",
9704 check_non_private, DECL_NAME (decl));
9705 remove = true;
9706 }
9707 break;
9708
9709 case OMP_CLAUSE_IF:
9710 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9711 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9712 {
9713 const char *p[2];
9714 for (int i = 0; i < 2; i++)
9715 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9716 {
9717 case VOID_CST: p[i] = "cancel"; break;
9718 case OMP_PARALLEL: p[i] = "parallel"; break;
9719 case OMP_SIMD: p[i] = "simd"; break;
9720 case OMP_TASK: p[i] = "task"; break;
9721 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9722 case OMP_TARGET_DATA: p[i] = "target data"; break;
9723 case OMP_TARGET: p[i] = "target"; break;
9724 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9725 case OMP_TARGET_ENTER_DATA:
9726 p[i] = "target enter data"; break;
9727 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9728 default: gcc_unreachable ();
9729 }
9730 error_at (OMP_CLAUSE_LOCATION (c),
9731 "expected %qs %<if%> clause modifier rather than %qs",
9732 p[0], p[1]);
9733 remove = true;
9734 }
9735 /* Fall through. */
9736
9737 case OMP_CLAUSE_FINAL:
9738 OMP_CLAUSE_OPERAND (c, 0)
9739 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9740 /* Fall through. */
9741
9742 case OMP_CLAUSE_SCHEDULE:
9743 case OMP_CLAUSE_NUM_THREADS:
9744 case OMP_CLAUSE_NUM_TEAMS:
9745 case OMP_CLAUSE_THREAD_LIMIT:
9746 case OMP_CLAUSE_DIST_SCHEDULE:
9747 case OMP_CLAUSE_DEVICE:
9748 case OMP_CLAUSE_PRIORITY:
9749 case OMP_CLAUSE_GRAINSIZE:
9750 case OMP_CLAUSE_NUM_TASKS:
9751 case OMP_CLAUSE_HINT:
9752 case OMP_CLAUSE_ASYNC:
9753 case OMP_CLAUSE_WAIT:
9754 case OMP_CLAUSE_NUM_GANGS:
9755 case OMP_CLAUSE_NUM_WORKERS:
9756 case OMP_CLAUSE_VECTOR_LENGTH:
9757 case OMP_CLAUSE_WORKER:
9758 case OMP_CLAUSE_VECTOR:
9759 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9760 is_gimple_val, fb_rvalue) == GS_ERROR)
9761 remove = true;
9762 break;
9763
9764 case OMP_CLAUSE_GANG:
9765 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9766 is_gimple_val, fb_rvalue) == GS_ERROR)
9767 remove = true;
9768 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9769 is_gimple_val, fb_rvalue) == GS_ERROR)
9770 remove = true;
9771 break;
9772
9773 case OMP_CLAUSE_NOWAIT:
9774 nowait = 1;
9775 break;
9776
9777 case OMP_CLAUSE_ORDERED:
9778 case OMP_CLAUSE_UNTIED:
9779 case OMP_CLAUSE_COLLAPSE:
9780 case OMP_CLAUSE_TILE:
9781 case OMP_CLAUSE_AUTO:
9782 case OMP_CLAUSE_SEQ:
9783 case OMP_CLAUSE_INDEPENDENT:
9784 case OMP_CLAUSE_MERGEABLE:
9785 case OMP_CLAUSE_PROC_BIND:
9786 case OMP_CLAUSE_SAFELEN:
9787 case OMP_CLAUSE_SIMDLEN:
9788 case OMP_CLAUSE_NOGROUP:
9789 case OMP_CLAUSE_THREADS:
9790 case OMP_CLAUSE_SIMD:
9791 case OMP_CLAUSE_BIND:
9792 case OMP_CLAUSE_IF_PRESENT:
9793 case OMP_CLAUSE_FINALIZE:
9794 break;
9795
9796 case OMP_CLAUSE_ORDER:
9797 ctx->order_concurrent = true;
9798 break;
9799
9800 case OMP_CLAUSE_DEFAULTMAP:
9801 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9802 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9803 {
9804 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9805 gdmkmin = GDMK_SCALAR;
9806 gdmkmax = GDMK_POINTER;
9807 break;
9808 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9809 gdmkmin = gdmkmax = GDMK_SCALAR;
9810 break;
9811 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9812 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9813 break;
9814 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9815 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9816 break;
9817 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9818 gdmkmin = gdmkmax = GDMK_POINTER;
9819 break;
9820 default:
9821 gcc_unreachable ();
9822 }
9823 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9824 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9825 {
9826 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9827 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9828 break;
9829 case OMP_CLAUSE_DEFAULTMAP_TO:
9830 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9831 break;
9832 case OMP_CLAUSE_DEFAULTMAP_FROM:
9833 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9834 break;
9835 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9836 ctx->defaultmap[gdmk] = GOVD_MAP;
9837 break;
9838 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9839 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9840 break;
9841 case OMP_CLAUSE_DEFAULTMAP_NONE:
9842 ctx->defaultmap[gdmk] = 0;
9843 break;
9844 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9845 switch (gdmk)
9846 {
9847 case GDMK_SCALAR:
9848 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9849 break;
9850 case GDMK_AGGREGATE:
9851 case GDMK_ALLOCATABLE:
9852 ctx->defaultmap[gdmk] = GOVD_MAP;
9853 break;
9854 case GDMK_POINTER:
9855 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9856 break;
9857 default:
9858 gcc_unreachable ();
9859 }
9860 break;
9861 default:
9862 gcc_unreachable ();
9863 }
9864 break;
9865
9866 case OMP_CLAUSE_ALIGNED:
9867 decl = OMP_CLAUSE_DECL (c);
9868 if (error_operand_p (decl))
9869 {
9870 remove = true;
9871 break;
9872 }
9873 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9874 is_gimple_val, fb_rvalue) == GS_ERROR)
9875 {
9876 remove = true;
9877 break;
9878 }
9879 if (!is_global_var (decl)
9880 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9881 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9882 break;
9883
9884 case OMP_CLAUSE_NONTEMPORAL:
9885 decl = OMP_CLAUSE_DECL (c);
9886 if (error_operand_p (decl))
9887 {
9888 remove = true;
9889 break;
9890 }
9891 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9892 break;
9893
9894 case OMP_CLAUSE_ALLOCATE:
9895 decl = OMP_CLAUSE_DECL (c);
9896 if (error_operand_p (decl))
9897 {
9898 remove = true;
9899 break;
9900 }
9901 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
9902 is_gimple_val, fb_rvalue) == GS_ERROR)
9903 {
9904 remove = true;
9905 break;
9906 }
9907 else if (code == OMP_TASKLOOP
9908 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
9909 && (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
9910 != INTEGER_CST))
9911 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
9912 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
9913 pre_p, NULL, false);
9914 break;
9915
9916 case OMP_CLAUSE_DEFAULT:
9917 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9918 break;
9919
9920 case OMP_CLAUSE_INCLUSIVE:
9921 case OMP_CLAUSE_EXCLUSIVE:
9922 decl = OMP_CLAUSE_DECL (c);
9923 {
9924 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9925 (splay_tree_key) decl);
9926 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9927 {
9928 error_at (OMP_CLAUSE_LOCATION (c),
9929 "%qD specified in %qs clause but not in %<inscan%> "
9930 "%<reduction%> clause on the containing construct",
9931 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9932 remove = true;
9933 }
9934 else
9935 {
9936 n->value |= GOVD_REDUCTION_INSCAN;
9937 if (outer_ctx->region_type == ORT_SIMD
9938 && outer_ctx->outer_context
9939 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9940 {
9941 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9942 (splay_tree_key) decl);
9943 if (n && (n->value & GOVD_REDUCTION) != 0)
9944 n->value |= GOVD_REDUCTION_INSCAN;
9945 }
9946 }
9947 }
9948 break;
9949
9950 default:
9951 gcc_unreachable ();
9952 }
9953
9954 if (code == OACC_DATA
9955 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9956 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9957 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9958 remove = true;
9959 if (remove)
9960 *list_p = OMP_CLAUSE_CHAIN (c);
9961 else
9962 list_p = &OMP_CLAUSE_CHAIN (c);
9963 }
9964
9965 ctx->clauses = *orig_list_p;
9966 gimplify_omp_ctxp = ctx;
9967 if (struct_map_to_clause)
9968 delete struct_map_to_clause;
9969 if (struct_deref_set)
9970 delete struct_deref_set;
9971 }
9972
9973 /* Return true if DECL is a candidate for shared to firstprivate
9974 optimization. We only consider non-addressable scalars, not
9975 too big, and not references. */
9976
9977 static bool
9978 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9979 {
9980 if (TREE_ADDRESSABLE (decl))
9981 return false;
9982 tree type = TREE_TYPE (decl);
9983 if (!is_gimple_reg_type (type)
9984 || TREE_CODE (type) == REFERENCE_TYPE
9985 || TREE_ADDRESSABLE (type))
9986 return false;
9987 /* Don't optimize too large decls, as each thread/task will have
9988 its own. */
9989 HOST_WIDE_INT len = int_size_in_bytes (type);
9990 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9991 return false;
9992 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9993 return false;
9994 return true;
9995 }
9996
9997 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9998 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9999 GOVD_WRITTEN in outer contexts. */
10000
10001 static void
10002 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
10003 {
10004 for (; ctx; ctx = ctx->outer_context)
10005 {
10006 splay_tree_node n = splay_tree_lookup (ctx->variables,
10007 (splay_tree_key) decl);
10008 if (n == NULL)
10009 continue;
10010 else if (n->value & GOVD_SHARED)
10011 {
10012 n->value |= GOVD_WRITTEN;
10013 return;
10014 }
10015 else if (n->value & GOVD_DATA_SHARE_CLASS)
10016 return;
10017 }
10018 }
10019
10020 /* Helper callback for walk_gimple_seq to discover possible stores
10021 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10022 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10023 for those. */
10024
10025 static tree
10026 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
10027 {
10028 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10029
10030 *walk_subtrees = 0;
10031 if (!wi->is_lhs)
10032 return NULL_TREE;
10033
10034 tree op = *tp;
10035 do
10036 {
10037 if (handled_component_p (op))
10038 op = TREE_OPERAND (op, 0);
10039 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
10040 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
10041 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
10042 else
10043 break;
10044 }
10045 while (1);
10046 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
10047 return NULL_TREE;
10048
10049 omp_mark_stores (gimplify_omp_ctxp, op);
10050 return NULL_TREE;
10051 }
10052
10053 /* Helper callback for walk_gimple_seq to discover possible stores
10054 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10055 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10056 for those. */
10057
10058 static tree
10059 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
10060 bool *handled_ops_p,
10061 struct walk_stmt_info *wi)
10062 {
10063 gimple *stmt = gsi_stmt (*gsi_p);
10064 switch (gimple_code (stmt))
10065 {
10066 /* Don't recurse on OpenMP constructs for which
10067 gimplify_adjust_omp_clauses already handled the bodies,
10068 except handle gimple_omp_for_pre_body. */
10069 case GIMPLE_OMP_FOR:
10070 *handled_ops_p = true;
10071 if (gimple_omp_for_pre_body (stmt))
10072 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10073 omp_find_stores_stmt, omp_find_stores_op, wi);
10074 break;
10075 case GIMPLE_OMP_PARALLEL:
10076 case GIMPLE_OMP_TASK:
10077 case GIMPLE_OMP_SECTIONS:
10078 case GIMPLE_OMP_SINGLE:
10079 case GIMPLE_OMP_TARGET:
10080 case GIMPLE_OMP_TEAMS:
10081 case GIMPLE_OMP_CRITICAL:
10082 *handled_ops_p = true;
10083 break;
10084 default:
10085 break;
10086 }
10087 return NULL_TREE;
10088 }
10089
10090 struct gimplify_adjust_omp_clauses_data
10091 {
10092 tree *list_p;
10093 gimple_seq *pre_p;
10094 };
10095
10096 /* For all variables that were not actually used within the context,
10097 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10098
10099 static int
10100 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
10101 {
10102 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
10103 gimple_seq *pre_p
10104 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
10105 tree decl = (tree) n->key;
10106 unsigned flags = n->value;
10107 enum omp_clause_code code;
10108 tree clause;
10109 bool private_debug;
10110
10111 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10112 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
10113 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
10114 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
10115 return 0;
10116 if ((flags & GOVD_SEEN) == 0)
10117 return 0;
10118 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
10119 return 0;
10120 if (flags & GOVD_DEBUG_PRIVATE)
10121 {
10122 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
10123 private_debug = true;
10124 }
10125 else if (flags & GOVD_MAP)
10126 private_debug = false;
10127 else
10128 private_debug
10129 = lang_hooks.decls.omp_private_debug_clause (decl,
10130 !!(flags & GOVD_SHARED));
10131 if (private_debug)
10132 code = OMP_CLAUSE_PRIVATE;
10133 else if (flags & GOVD_MAP)
10134 {
10135 code = OMP_CLAUSE_MAP;
10136 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10137 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10138 {
10139 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
10140 return 0;
10141 }
10142 if (VAR_P (decl)
10143 && DECL_IN_CONSTANT_POOL (decl)
10144 && !lookup_attribute ("omp declare target",
10145 DECL_ATTRIBUTES (decl)))
10146 {
10147 tree id = get_identifier ("omp declare target");
10148 DECL_ATTRIBUTES (decl)
10149 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
10150 varpool_node *node = varpool_node::get (decl);
10151 if (node)
10152 {
10153 node->offloadable = 1;
10154 if (ENABLE_OFFLOADING)
10155 g->have_offload = true;
10156 }
10157 }
10158 }
10159 else if (flags & GOVD_SHARED)
10160 {
10161 if (is_global_var (decl))
10162 {
10163 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10164 while (ctx != NULL)
10165 {
10166 splay_tree_node on
10167 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10168 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
10169 | GOVD_PRIVATE | GOVD_REDUCTION
10170 | GOVD_LINEAR | GOVD_MAP)) != 0)
10171 break;
10172 ctx = ctx->outer_context;
10173 }
10174 if (ctx == NULL)
10175 return 0;
10176 }
10177 code = OMP_CLAUSE_SHARED;
10178 }
10179 else if (flags & GOVD_PRIVATE)
10180 code = OMP_CLAUSE_PRIVATE;
10181 else if (flags & GOVD_FIRSTPRIVATE)
10182 {
10183 code = OMP_CLAUSE_FIRSTPRIVATE;
10184 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
10185 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10186 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10187 {
10188 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10189 "%<target%> construct", decl);
10190 return 0;
10191 }
10192 }
10193 else if (flags & GOVD_LASTPRIVATE)
10194 code = OMP_CLAUSE_LASTPRIVATE;
10195 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
10196 return 0;
10197 else if (flags & GOVD_CONDTEMP)
10198 {
10199 code = OMP_CLAUSE__CONDTEMP_;
10200 gimple_add_tmp_var (decl);
10201 }
10202 else
10203 gcc_unreachable ();
10204
10205 if (((flags & GOVD_LASTPRIVATE)
10206 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
10207 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10208 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10209
10210 tree chain = *list_p;
10211 clause = build_omp_clause (input_location, code);
10212 OMP_CLAUSE_DECL (clause) = decl;
10213 OMP_CLAUSE_CHAIN (clause) = chain;
10214 if (private_debug)
10215 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
10216 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
10217 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
10218 else if (code == OMP_CLAUSE_SHARED
10219 && (flags & GOVD_WRITTEN) == 0
10220 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10221 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
10222 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
10223 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
10224 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
10225 {
10226 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
10227 OMP_CLAUSE_DECL (nc) = decl;
10228 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10229 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10230 OMP_CLAUSE_DECL (clause)
10231 = build_simple_mem_ref_loc (input_location, decl);
10232 OMP_CLAUSE_DECL (clause)
10233 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
10234 build_int_cst (build_pointer_type (char_type_node), 0));
10235 OMP_CLAUSE_SIZE (clause) = size_zero_node;
10236 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10237 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
10238 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
10239 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10240 OMP_CLAUSE_CHAIN (nc) = chain;
10241 OMP_CLAUSE_CHAIN (clause) = nc;
10242 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10243 gimplify_omp_ctxp = ctx->outer_context;
10244 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
10245 pre_p, NULL, is_gimple_val, fb_rvalue);
10246 gimplify_omp_ctxp = ctx;
10247 }
10248 else if (code == OMP_CLAUSE_MAP)
10249 {
10250 int kind;
10251 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10252 switch (flags & (GOVD_MAP_TO_ONLY
10253 | GOVD_MAP_FORCE
10254 | GOVD_MAP_FORCE_PRESENT
10255 | GOVD_MAP_ALLOC_ONLY
10256 | GOVD_MAP_FROM_ONLY))
10257 {
10258 case 0:
10259 kind = GOMP_MAP_TOFROM;
10260 break;
10261 case GOVD_MAP_FORCE:
10262 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10263 break;
10264 case GOVD_MAP_TO_ONLY:
10265 kind = GOMP_MAP_TO;
10266 break;
10267 case GOVD_MAP_FROM_ONLY:
10268 kind = GOMP_MAP_FROM;
10269 break;
10270 case GOVD_MAP_ALLOC_ONLY:
10271 kind = GOMP_MAP_ALLOC;
10272 break;
10273 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10274 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10275 break;
10276 case GOVD_MAP_FORCE_PRESENT:
10277 kind = GOMP_MAP_FORCE_PRESENT;
10278 break;
10279 default:
10280 gcc_unreachable ();
10281 }
10282 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10283 if (DECL_SIZE (decl)
10284 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10285 {
10286 tree decl2 = DECL_VALUE_EXPR (decl);
10287 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10288 decl2 = TREE_OPERAND (decl2, 0);
10289 gcc_assert (DECL_P (decl2));
10290 tree mem = build_simple_mem_ref (decl2);
10291 OMP_CLAUSE_DECL (clause) = mem;
10292 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10293 if (gimplify_omp_ctxp->outer_context)
10294 {
10295 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10296 omp_notice_variable (ctx, decl2, true);
10297 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10298 }
10299 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10300 OMP_CLAUSE_MAP);
10301 OMP_CLAUSE_DECL (nc) = decl;
10302 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10303 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10304 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10305 else
10306 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10307 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10308 OMP_CLAUSE_CHAIN (clause) = nc;
10309 }
10310 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10311 && lang_hooks.decls.omp_privatize_by_reference (decl))
10312 {
10313 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10314 OMP_CLAUSE_SIZE (clause)
10315 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10316 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10317 gimplify_omp_ctxp = ctx->outer_context;
10318 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10319 pre_p, NULL, is_gimple_val, fb_rvalue);
10320 gimplify_omp_ctxp = ctx;
10321 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10322 OMP_CLAUSE_MAP);
10323 OMP_CLAUSE_DECL (nc) = decl;
10324 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10325 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10326 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10327 OMP_CLAUSE_CHAIN (clause) = nc;
10328 }
10329 else
10330 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10331 }
10332 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10333 {
10334 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10335 OMP_CLAUSE_DECL (nc) = decl;
10336 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10337 OMP_CLAUSE_CHAIN (nc) = chain;
10338 OMP_CLAUSE_CHAIN (clause) = nc;
10339 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10340 gimplify_omp_ctxp = ctx->outer_context;
10341 lang_hooks.decls.omp_finish_clause (nc, pre_p,
10342 (ctx->region_type & ORT_ACC) != 0);
10343 gimplify_omp_ctxp = ctx;
10344 }
10345 *list_p = clause;
10346 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10347 gimplify_omp_ctxp = ctx->outer_context;
10348 lang_hooks.decls.omp_finish_clause (clause, pre_p,
10349 (ctx->region_type & ORT_ACC) != 0);
10350 if (gimplify_omp_ctxp)
10351 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10352 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10353 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10354 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10355 true);
10356 gimplify_omp_ctxp = ctx;
10357 return 0;
10358 }
10359
10360 static void
10361 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10362 enum tree_code code)
10363 {
10364 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10365 tree *orig_list_p = list_p;
10366 tree c, decl;
10367 bool has_inscan_reductions = false;
10368
10369 if (body)
10370 {
10371 struct gimplify_omp_ctx *octx;
10372 for (octx = ctx; octx; octx = octx->outer_context)
10373 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10374 break;
10375 if (octx)
10376 {
10377 struct walk_stmt_info wi;
10378 memset (&wi, 0, sizeof (wi));
10379 walk_gimple_seq (body, omp_find_stores_stmt,
10380 omp_find_stores_op, &wi);
10381 }
10382 }
10383
10384 if (ctx->add_safelen1)
10385 {
10386 /* If there are VLAs in the body of simd loop, prevent
10387 vectorization. */
10388 gcc_assert (ctx->region_type == ORT_SIMD);
10389 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10390 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10391 OMP_CLAUSE_CHAIN (c) = *list_p;
10392 *list_p = c;
10393 list_p = &OMP_CLAUSE_CHAIN (c);
10394 }
10395
10396 if (ctx->region_type == ORT_WORKSHARE
10397 && ctx->outer_context
10398 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10399 {
10400 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10401 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10402 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10403 {
10404 decl = OMP_CLAUSE_DECL (c);
10405 splay_tree_node n
10406 = splay_tree_lookup (ctx->outer_context->variables,
10407 (splay_tree_key) decl);
10408 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10409 (splay_tree_key) decl));
10410 omp_add_variable (ctx, decl, n->value);
10411 tree c2 = copy_node (c);
10412 OMP_CLAUSE_CHAIN (c2) = *list_p;
10413 *list_p = c2;
10414 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10415 continue;
10416 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10417 OMP_CLAUSE_FIRSTPRIVATE);
10418 OMP_CLAUSE_DECL (c2) = decl;
10419 OMP_CLAUSE_CHAIN (c2) = *list_p;
10420 *list_p = c2;
10421 }
10422 }
10423 while ((c = *list_p) != NULL)
10424 {
10425 splay_tree_node n;
10426 bool remove = false;
10427
10428 switch (OMP_CLAUSE_CODE (c))
10429 {
10430 case OMP_CLAUSE_FIRSTPRIVATE:
10431 if ((ctx->region_type & ORT_TARGET)
10432 && (ctx->region_type & ORT_ACC) == 0
10433 && TYPE_ATOMIC (strip_array_types
10434 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10435 {
10436 error_at (OMP_CLAUSE_LOCATION (c),
10437 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10438 "%<target%> construct", OMP_CLAUSE_DECL (c));
10439 remove = true;
10440 break;
10441 }
10442 /* FALLTHRU */
10443 case OMP_CLAUSE_PRIVATE:
10444 case OMP_CLAUSE_SHARED:
10445 case OMP_CLAUSE_LINEAR:
10446 decl = OMP_CLAUSE_DECL (c);
10447 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10448 remove = !(n->value & GOVD_SEEN);
10449 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10450 && code == OMP_PARALLEL
10451 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10452 remove = true;
10453 if (! remove)
10454 {
10455 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10456 if ((n->value & GOVD_DEBUG_PRIVATE)
10457 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10458 {
10459 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10460 || ((n->value & GOVD_DATA_SHARE_CLASS)
10461 == GOVD_SHARED));
10462 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10463 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10464 }
10465 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10466 && (n->value & GOVD_WRITTEN) == 0
10467 && DECL_P (decl)
10468 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10469 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10470 else if (DECL_P (decl)
10471 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10472 && (n->value & GOVD_WRITTEN) != 0)
10473 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10474 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10475 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10476 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10477 }
10478 break;
10479
10480 case OMP_CLAUSE_LASTPRIVATE:
10481 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10482 accurately reflect the presence of a FIRSTPRIVATE clause. */
10483 decl = OMP_CLAUSE_DECL (c);
10484 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10485 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10486 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10487 if (code == OMP_DISTRIBUTE
10488 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10489 {
10490 remove = true;
10491 error_at (OMP_CLAUSE_LOCATION (c),
10492 "same variable used in %<firstprivate%> and "
10493 "%<lastprivate%> clauses on %<distribute%> "
10494 "construct");
10495 }
10496 if (!remove
10497 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10498 && DECL_P (decl)
10499 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10500 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10501 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10502 remove = true;
10503 break;
10504
10505 case OMP_CLAUSE_ALIGNED:
10506 decl = OMP_CLAUSE_DECL (c);
10507 if (!is_global_var (decl))
10508 {
10509 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10510 remove = n == NULL || !(n->value & GOVD_SEEN);
10511 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10512 {
10513 struct gimplify_omp_ctx *octx;
10514 if (n != NULL
10515 && (n->value & (GOVD_DATA_SHARE_CLASS
10516 & ~GOVD_FIRSTPRIVATE)))
10517 remove = true;
10518 else
10519 for (octx = ctx->outer_context; octx;
10520 octx = octx->outer_context)
10521 {
10522 n = splay_tree_lookup (octx->variables,
10523 (splay_tree_key) decl);
10524 if (n == NULL)
10525 continue;
10526 if (n->value & GOVD_LOCAL)
10527 break;
10528 /* We have to avoid assigning a shared variable
10529 to itself when trying to add
10530 __builtin_assume_aligned. */
10531 if (n->value & GOVD_SHARED)
10532 {
10533 remove = true;
10534 break;
10535 }
10536 }
10537 }
10538 }
10539 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10540 {
10541 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10542 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10543 remove = true;
10544 }
10545 break;
10546
10547 case OMP_CLAUSE_NONTEMPORAL:
10548 decl = OMP_CLAUSE_DECL (c);
10549 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10550 remove = n == NULL || !(n->value & GOVD_SEEN);
10551 break;
10552
10553 case OMP_CLAUSE_MAP:
10554 if (code == OMP_TARGET_EXIT_DATA
10555 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10556 {
10557 remove = true;
10558 break;
10559 }
10560 decl = OMP_CLAUSE_DECL (c);
10561 /* Data clauses associated with reductions must be
10562 compatible with present_or_copy. Warn and adjust the clause
10563 if that is not the case. */
10564 if (ctx->region_type == ORT_ACC_PARALLEL
10565 || ctx->region_type == ORT_ACC_SERIAL)
10566 {
10567 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10568 n = NULL;
10569
10570 if (DECL_P (t))
10571 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10572
10573 if (n && (n->value & GOVD_REDUCTION))
10574 {
10575 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10576
10577 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10578 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10579 && kind != GOMP_MAP_FORCE_PRESENT
10580 && kind != GOMP_MAP_POINTER)
10581 {
10582 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10583 "incompatible data clause with reduction "
10584 "on %qE; promoting to %<present_or_copy%>",
10585 DECL_NAME (t));
10586 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10587 }
10588 }
10589 }
10590 if (!DECL_P (decl))
10591 {
10592 if ((ctx->region_type & ORT_TARGET) != 0
10593 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10594 {
10595 if (TREE_CODE (decl) == INDIRECT_REF
10596 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10597 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10598 == REFERENCE_TYPE))
10599 decl = TREE_OPERAND (decl, 0);
10600 if (TREE_CODE (decl) == COMPONENT_REF)
10601 {
10602 while (TREE_CODE (decl) == COMPONENT_REF)
10603 decl = TREE_OPERAND (decl, 0);
10604 if (DECL_P (decl))
10605 {
10606 n = splay_tree_lookup (ctx->variables,
10607 (splay_tree_key) decl);
10608 if (!(n->value & GOVD_SEEN))
10609 remove = true;
10610 }
10611 }
10612 }
10613 break;
10614 }
10615 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10616 if ((ctx->region_type & ORT_TARGET) != 0
10617 && !(n->value & GOVD_SEEN)
10618 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10619 && (!is_global_var (decl)
10620 || !lookup_attribute ("omp declare target link",
10621 DECL_ATTRIBUTES (decl))))
10622 {
10623 remove = true;
10624 /* For struct element mapping, if struct is never referenced
10625 in target block and none of the mapping has always modifier,
10626 remove all the struct element mappings, which immediately
10627 follow the GOMP_MAP_STRUCT map clause. */
10628 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10629 {
10630 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10631 while (cnt--)
10632 OMP_CLAUSE_CHAIN (c)
10633 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10634 }
10635 }
10636 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10637 && (code == OMP_TARGET_EXIT_DATA
10638 || code == OACC_EXIT_DATA))
10639 remove = true;
10640 else if (DECL_SIZE (decl)
10641 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10642 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10643 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10644 && (OMP_CLAUSE_MAP_KIND (c)
10645 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10646 {
10647 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10648 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10649 INTEGER_CST. */
10650 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10651
10652 tree decl2 = DECL_VALUE_EXPR (decl);
10653 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10654 decl2 = TREE_OPERAND (decl2, 0);
10655 gcc_assert (DECL_P (decl2));
10656 tree mem = build_simple_mem_ref (decl2);
10657 OMP_CLAUSE_DECL (c) = mem;
10658 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10659 if (ctx->outer_context)
10660 {
10661 omp_notice_variable (ctx->outer_context, decl2, true);
10662 omp_notice_variable (ctx->outer_context,
10663 OMP_CLAUSE_SIZE (c), true);
10664 }
10665 if (((ctx->region_type & ORT_TARGET) != 0
10666 || !ctx->target_firstprivatize_array_bases)
10667 && ((n->value & GOVD_SEEN) == 0
10668 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10669 {
10670 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10671 OMP_CLAUSE_MAP);
10672 OMP_CLAUSE_DECL (nc) = decl;
10673 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10674 if (ctx->target_firstprivatize_array_bases)
10675 OMP_CLAUSE_SET_MAP_KIND (nc,
10676 GOMP_MAP_FIRSTPRIVATE_POINTER);
10677 else
10678 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10679 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10680 OMP_CLAUSE_CHAIN (c) = nc;
10681 c = nc;
10682 }
10683 }
10684 else
10685 {
10686 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10687 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10688 gcc_assert ((n->value & GOVD_SEEN) == 0
10689 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10690 == 0));
10691 }
10692 break;
10693
10694 case OMP_CLAUSE_TO:
10695 case OMP_CLAUSE_FROM:
10696 case OMP_CLAUSE__CACHE_:
10697 decl = OMP_CLAUSE_DECL (c);
10698 if (!DECL_P (decl))
10699 break;
10700 if (DECL_SIZE (decl)
10701 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10702 {
10703 tree decl2 = DECL_VALUE_EXPR (decl);
10704 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10705 decl2 = TREE_OPERAND (decl2, 0);
10706 gcc_assert (DECL_P (decl2));
10707 tree mem = build_simple_mem_ref (decl2);
10708 OMP_CLAUSE_DECL (c) = mem;
10709 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10710 if (ctx->outer_context)
10711 {
10712 omp_notice_variable (ctx->outer_context, decl2, true);
10713 omp_notice_variable (ctx->outer_context,
10714 OMP_CLAUSE_SIZE (c), true);
10715 }
10716 }
10717 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10718 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10719 break;
10720
10721 case OMP_CLAUSE_REDUCTION:
10722 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10723 {
10724 decl = OMP_CLAUSE_DECL (c);
10725 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10726 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10727 {
10728 remove = true;
10729 error_at (OMP_CLAUSE_LOCATION (c),
10730 "%qD specified in %<inscan%> %<reduction%> clause "
10731 "but not in %<scan%> directive clause", decl);
10732 break;
10733 }
10734 has_inscan_reductions = true;
10735 }
10736 /* FALLTHRU */
10737 case OMP_CLAUSE_IN_REDUCTION:
10738 case OMP_CLAUSE_TASK_REDUCTION:
10739 decl = OMP_CLAUSE_DECL (c);
10740 /* OpenACC reductions need a present_or_copy data clause.
10741 Add one if necessary. Emit error when the reduction is private. */
10742 if (ctx->region_type == ORT_ACC_PARALLEL
10743 || ctx->region_type == ORT_ACC_SERIAL)
10744 {
10745 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10746 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10747 {
10748 remove = true;
10749 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10750 "reduction on %qE", DECL_NAME (decl));
10751 }
10752 else if ((n->value & GOVD_MAP) == 0)
10753 {
10754 tree next = OMP_CLAUSE_CHAIN (c);
10755 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10756 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10757 OMP_CLAUSE_DECL (nc) = decl;
10758 OMP_CLAUSE_CHAIN (c) = nc;
10759 lang_hooks.decls.omp_finish_clause (nc, pre_p,
10760 (ctx->region_type
10761 & ORT_ACC) != 0);
10762 while (1)
10763 {
10764 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10765 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10766 break;
10767 nc = OMP_CLAUSE_CHAIN (nc);
10768 }
10769 OMP_CLAUSE_CHAIN (nc) = next;
10770 n->value |= GOVD_MAP;
10771 }
10772 }
10773 if (DECL_P (decl)
10774 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10775 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10776 break;
10777 case OMP_CLAUSE_COPYIN:
10778 case OMP_CLAUSE_COPYPRIVATE:
10779 case OMP_CLAUSE_IF:
10780 case OMP_CLAUSE_NUM_THREADS:
10781 case OMP_CLAUSE_NUM_TEAMS:
10782 case OMP_CLAUSE_THREAD_LIMIT:
10783 case OMP_CLAUSE_DIST_SCHEDULE:
10784 case OMP_CLAUSE_DEVICE:
10785 case OMP_CLAUSE_SCHEDULE:
10786 case OMP_CLAUSE_NOWAIT:
10787 case OMP_CLAUSE_ORDERED:
10788 case OMP_CLAUSE_DEFAULT:
10789 case OMP_CLAUSE_UNTIED:
10790 case OMP_CLAUSE_COLLAPSE:
10791 case OMP_CLAUSE_FINAL:
10792 case OMP_CLAUSE_MERGEABLE:
10793 case OMP_CLAUSE_PROC_BIND:
10794 case OMP_CLAUSE_SAFELEN:
10795 case OMP_CLAUSE_SIMDLEN:
10796 case OMP_CLAUSE_DEPEND:
10797 case OMP_CLAUSE_PRIORITY:
10798 case OMP_CLAUSE_GRAINSIZE:
10799 case OMP_CLAUSE_NUM_TASKS:
10800 case OMP_CLAUSE_NOGROUP:
10801 case OMP_CLAUSE_THREADS:
10802 case OMP_CLAUSE_SIMD:
10803 case OMP_CLAUSE_HINT:
10804 case OMP_CLAUSE_DEFAULTMAP:
10805 case OMP_CLAUSE_ORDER:
10806 case OMP_CLAUSE_BIND:
10807 case OMP_CLAUSE_USE_DEVICE_PTR:
10808 case OMP_CLAUSE_USE_DEVICE_ADDR:
10809 case OMP_CLAUSE_IS_DEVICE_PTR:
10810 case OMP_CLAUSE_ASYNC:
10811 case OMP_CLAUSE_WAIT:
10812 case OMP_CLAUSE_INDEPENDENT:
10813 case OMP_CLAUSE_NUM_GANGS:
10814 case OMP_CLAUSE_NUM_WORKERS:
10815 case OMP_CLAUSE_VECTOR_LENGTH:
10816 case OMP_CLAUSE_GANG:
10817 case OMP_CLAUSE_WORKER:
10818 case OMP_CLAUSE_VECTOR:
10819 case OMP_CLAUSE_AUTO:
10820 case OMP_CLAUSE_SEQ:
10821 case OMP_CLAUSE_TILE:
10822 case OMP_CLAUSE_IF_PRESENT:
10823 case OMP_CLAUSE_FINALIZE:
10824 case OMP_CLAUSE_INCLUSIVE:
10825 case OMP_CLAUSE_EXCLUSIVE:
10826 case OMP_CLAUSE_ALLOCATE:
10827 break;
10828
10829 default:
10830 gcc_unreachable ();
10831 }
10832
10833 if (remove)
10834 *list_p = OMP_CLAUSE_CHAIN (c);
10835 else
10836 list_p = &OMP_CLAUSE_CHAIN (c);
10837 }
10838
10839 /* Add in any implicit data sharing. */
10840 struct gimplify_adjust_omp_clauses_data data;
10841 data.list_p = list_p;
10842 data.pre_p = pre_p;
10843 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10844
10845 if (has_inscan_reductions)
10846 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10847 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10848 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10849 {
10850 error_at (OMP_CLAUSE_LOCATION (c),
10851 "%<inscan%> %<reduction%> clause used together with "
10852 "%<linear%> clause for a variable other than loop "
10853 "iterator");
10854 break;
10855 }
10856
10857 gimplify_omp_ctxp = ctx->outer_context;
10858 delete_omp_context (ctx);
10859 }
10860
10861 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10862 -1 if unknown yet (simd is involved, won't be known until vectorization)
10863 and 1 if they do. If SCORES is non-NULL, it should point to an array
10864 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10865 of the CONSTRUCTS (position -1 if it will never match) followed by
10866 number of constructs in the OpenMP context construct trait. If the
10867 score depends on whether it will be in a declare simd clone or not,
10868 the function returns 2 and there will be two sets of the scores, the first
10869 one for the case that it is not in a declare simd clone, the other
10870 that it is in a declare simd clone. */
10871
10872 int
10873 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10874 int *scores)
10875 {
10876 int matched = 0, cnt = 0;
10877 bool simd_seen = false;
10878 bool target_seen = false;
10879 int declare_simd_cnt = -1;
10880 auto_vec<enum tree_code, 16> codes;
10881 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10882 {
10883 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10884 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10885 == ORT_TARGET && ctx->code == OMP_TARGET)
10886 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10887 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10888 || (ctx->region_type == ORT_SIMD
10889 && ctx->code == OMP_SIMD
10890 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10891 {
10892 ++cnt;
10893 if (scores)
10894 codes.safe_push (ctx->code);
10895 else if (matched < nconstructs && ctx->code == constructs[matched])
10896 {
10897 if (ctx->code == OMP_SIMD)
10898 {
10899 if (matched)
10900 return 0;
10901 simd_seen = true;
10902 }
10903 ++matched;
10904 }
10905 if (ctx->code == OMP_TARGET)
10906 {
10907 if (scores == NULL)
10908 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10909 target_seen = true;
10910 break;
10911 }
10912 }
10913 else if (ctx->region_type == ORT_WORKSHARE
10914 && ctx->code == OMP_LOOP
10915 && ctx->outer_context
10916 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10917 && ctx->outer_context->outer_context
10918 && ctx->outer_context->outer_context->code == OMP_LOOP
10919 && ctx->outer_context->outer_context->distribute)
10920 ctx = ctx->outer_context->outer_context;
10921 ctx = ctx->outer_context;
10922 }
10923 if (!target_seen
10924 && lookup_attribute ("omp declare simd",
10925 DECL_ATTRIBUTES (current_function_decl)))
10926 {
10927 /* Declare simd is a maybe case, it is supposed to be added only to the
10928 omp-simd-clone.c added clones and not to the base function. */
10929 declare_simd_cnt = cnt++;
10930 if (scores)
10931 codes.safe_push (OMP_SIMD);
10932 else if (cnt == 0
10933 && constructs[0] == OMP_SIMD)
10934 {
10935 gcc_assert (matched == 0);
10936 simd_seen = true;
10937 if (++matched == nconstructs)
10938 return -1;
10939 }
10940 }
10941 if (tree attr = lookup_attribute ("omp declare variant variant",
10942 DECL_ATTRIBUTES (current_function_decl)))
10943 {
10944 enum tree_code variant_constructs[5];
10945 int variant_nconstructs = 0;
10946 if (!target_seen)
10947 variant_nconstructs
10948 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10949 variant_constructs);
10950 for (int i = 0; i < variant_nconstructs; i++)
10951 {
10952 ++cnt;
10953 if (scores)
10954 codes.safe_push (variant_constructs[i]);
10955 else if (matched < nconstructs
10956 && variant_constructs[i] == constructs[matched])
10957 {
10958 if (variant_constructs[i] == OMP_SIMD)
10959 {
10960 if (matched)
10961 return 0;
10962 simd_seen = true;
10963 }
10964 ++matched;
10965 }
10966 }
10967 }
10968 if (!target_seen
10969 && lookup_attribute ("omp declare target block",
10970 DECL_ATTRIBUTES (current_function_decl)))
10971 {
10972 if (scores)
10973 codes.safe_push (OMP_TARGET);
10974 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
10975 ++matched;
10976 }
10977 if (scores)
10978 {
10979 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10980 {
10981 int j = codes.length () - 1;
10982 for (int i = nconstructs - 1; i >= 0; i--)
10983 {
10984 while (j >= 0
10985 && (pass != 0 || declare_simd_cnt != j)
10986 && constructs[i] != codes[j])
10987 --j;
10988 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10989 *scores++ = j - 1;
10990 else
10991 *scores++ = j;
10992 }
10993 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10994 ? codes.length () - 1 : codes.length ());
10995 }
10996 return declare_simd_cnt == -1 ? 1 : 2;
10997 }
10998 if (matched == nconstructs)
10999 return simd_seen ? -1 : 1;
11000 return 0;
11001 }
11002
11003 /* Gimplify OACC_CACHE. */
11004
11005 static void
11006 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
11007 {
11008 tree expr = *expr_p;
11009
11010 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
11011 OACC_CACHE);
11012 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
11013 OACC_CACHE);
11014
11015 /* TODO: Do something sensible with this information. */
11016
11017 *expr_p = NULL_TREE;
11018 }
11019
11020 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11021 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11022 kind. The entry kind will replace the one in CLAUSE, while the exit
11023 kind will be used in a new omp_clause and returned to the caller. */
11024
11025 static tree
11026 gimplify_oacc_declare_1 (tree clause)
11027 {
11028 HOST_WIDE_INT kind, new_op;
11029 bool ret = false;
11030 tree c = NULL;
11031
11032 kind = OMP_CLAUSE_MAP_KIND (clause);
11033
11034 switch (kind)
11035 {
11036 case GOMP_MAP_ALLOC:
11037 new_op = GOMP_MAP_RELEASE;
11038 ret = true;
11039 break;
11040
11041 case GOMP_MAP_FROM:
11042 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
11043 new_op = GOMP_MAP_FROM;
11044 ret = true;
11045 break;
11046
11047 case GOMP_MAP_TOFROM:
11048 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
11049 new_op = GOMP_MAP_FROM;
11050 ret = true;
11051 break;
11052
11053 case GOMP_MAP_DEVICE_RESIDENT:
11054 case GOMP_MAP_FORCE_DEVICEPTR:
11055 case GOMP_MAP_FORCE_PRESENT:
11056 case GOMP_MAP_LINK:
11057 case GOMP_MAP_POINTER:
11058 case GOMP_MAP_TO:
11059 break;
11060
11061 default:
11062 gcc_unreachable ();
11063 break;
11064 }
11065
11066 if (ret)
11067 {
11068 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
11069 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
11070 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
11071 }
11072
11073 return c;
11074 }
11075
11076 /* Gimplify OACC_DECLARE. */
11077
11078 static void
11079 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
11080 {
11081 tree expr = *expr_p;
11082 gomp_target *stmt;
11083 tree clauses, t, decl;
11084
11085 clauses = OACC_DECLARE_CLAUSES (expr);
11086
11087 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
11088 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
11089
11090 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
11091 {
11092 decl = OMP_CLAUSE_DECL (t);
11093
11094 if (TREE_CODE (decl) == MEM_REF)
11095 decl = TREE_OPERAND (decl, 0);
11096
11097 if (VAR_P (decl) && !is_oacc_declared (decl))
11098 {
11099 tree attr = get_identifier ("oacc declare target");
11100 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
11101 DECL_ATTRIBUTES (decl));
11102 }
11103
11104 if (VAR_P (decl)
11105 && !is_global_var (decl)
11106 && DECL_CONTEXT (decl) == current_function_decl)
11107 {
11108 tree c = gimplify_oacc_declare_1 (t);
11109 if (c)
11110 {
11111 if (oacc_declare_returns == NULL)
11112 oacc_declare_returns = new hash_map<tree, tree>;
11113
11114 oacc_declare_returns->put (decl, c);
11115 }
11116 }
11117
11118 if (gimplify_omp_ctxp)
11119 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
11120 }
11121
11122 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
11123 clauses);
11124
11125 gimplify_seq_add_stmt (pre_p, stmt);
11126
11127 *expr_p = NULL_TREE;
11128 }
11129
11130 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11131 gimplification of the body, as well as scanning the body for used
11132 variables. We need to do this scan now, because variable-sized
11133 decls will be decomposed during gimplification. */
11134
11135 static void
11136 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
11137 {
11138 tree expr = *expr_p;
11139 gimple *g;
11140 gimple_seq body = NULL;
11141
11142 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
11143 OMP_PARALLEL_COMBINED (expr)
11144 ? ORT_COMBINED_PARALLEL
11145 : ORT_PARALLEL, OMP_PARALLEL);
11146
11147 push_gimplify_context ();
11148
11149 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
11150 if (gimple_code (g) == GIMPLE_BIND)
11151 pop_gimplify_context (g);
11152 else
11153 pop_gimplify_context (NULL);
11154
11155 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
11156 OMP_PARALLEL);
11157
11158 g = gimple_build_omp_parallel (body,
11159 OMP_PARALLEL_CLAUSES (expr),
11160 NULL_TREE, NULL_TREE);
11161 if (OMP_PARALLEL_COMBINED (expr))
11162 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
11163 gimplify_seq_add_stmt (pre_p, g);
11164 *expr_p = NULL_TREE;
11165 }
11166
11167 /* Gimplify the contents of an OMP_TASK statement. This involves
11168 gimplification of the body, as well as scanning the body for used
11169 variables. We need to do this scan now, because variable-sized
11170 decls will be decomposed during gimplification. */
11171
11172 static void
11173 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
11174 {
11175 tree expr = *expr_p;
11176 gimple *g;
11177 gimple_seq body = NULL;
11178
11179 if (OMP_TASK_BODY (expr) == NULL_TREE)
11180 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11181 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11182 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
11183 {
11184 error_at (OMP_CLAUSE_LOCATION (c),
11185 "%<mutexinoutset%> kind in %<depend%> clause on a "
11186 "%<taskwait%> construct");
11187 break;
11188 }
11189
11190 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
11191 omp_find_clause (OMP_TASK_CLAUSES (expr),
11192 OMP_CLAUSE_UNTIED)
11193 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
11194
11195 if (OMP_TASK_BODY (expr))
11196 {
11197 push_gimplify_context ();
11198
11199 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
11200 if (gimple_code (g) == GIMPLE_BIND)
11201 pop_gimplify_context (g);
11202 else
11203 pop_gimplify_context (NULL);
11204 }
11205
11206 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
11207 OMP_TASK);
11208
11209 g = gimple_build_omp_task (body,
11210 OMP_TASK_CLAUSES (expr),
11211 NULL_TREE, NULL_TREE,
11212 NULL_TREE, NULL_TREE, NULL_TREE);
11213 if (OMP_TASK_BODY (expr) == NULL_TREE)
11214 gimple_omp_task_set_taskwait_p (g, true);
11215 gimplify_seq_add_stmt (pre_p, g);
11216 *expr_p = NULL_TREE;
11217 }
11218
11219 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11220 force it into a temporary initialized in PRE_P and add firstprivate clause
11221 to ORIG_FOR_STMT. */
11222
11223 static void
11224 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
11225 tree orig_for_stmt)
11226 {
11227 if (*tp == NULL || is_gimple_constant (*tp))
11228 return;
11229
11230 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
11231 /* Reference to pointer conversion is considered useless,
11232 but is significant for firstprivate clause. Force it
11233 here. */
11234 if (type
11235 && TREE_CODE (type) == POINTER_TYPE
11236 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
11237 {
11238 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11239 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
11240 gimplify_and_add (m, pre_p);
11241 *tp = v;
11242 }
11243
11244 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
11245 OMP_CLAUSE_DECL (c) = *tp;
11246 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11247 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11248 }
11249
11250 /* Gimplify the gross structure of an OMP_FOR statement. */
11251
11252 static enum gimplify_status
11253 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11254 {
11255 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11256 enum gimplify_status ret = GS_ALL_DONE;
11257 enum gimplify_status tret;
11258 gomp_for *gfor;
11259 gimple_seq for_body, for_pre_body;
11260 int i;
11261 bitmap has_decl_expr = NULL;
11262 enum omp_region_type ort = ORT_WORKSHARE;
11263 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
11264
11265 orig_for_stmt = for_stmt = *expr_p;
11266
11267 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11268 != NULL_TREE);
11269 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11270 {
11271 tree *data[4] = { NULL, NULL, NULL, NULL };
11272 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11273 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11274 find_combined_omp_for, data, NULL);
11275 if (inner_for_stmt == NULL_TREE)
11276 {
11277 gcc_assert (seen_error ());
11278 *expr_p = NULL_TREE;
11279 return GS_ERROR;
11280 }
11281 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11282 {
11283 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11284 &OMP_FOR_PRE_BODY (for_stmt));
11285 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11286 }
11287 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11288 {
11289 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11290 &OMP_FOR_PRE_BODY (for_stmt));
11291 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11292 }
11293
11294 if (data[0])
11295 {
11296 /* We have some statements or variable declarations in between
11297 the composite construct directives. Move them around the
11298 inner_for_stmt. */
11299 data[0] = expr_p;
11300 for (i = 0; i < 3; i++)
11301 if (data[i])
11302 {
11303 tree t = *data[i];
11304 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11305 data[i + 1] = data[i];
11306 *data[i] = OMP_BODY (t);
11307 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11308 NULL_TREE, make_node (BLOCK));
11309 OMP_BODY (t) = body;
11310 append_to_statement_list_force (inner_for_stmt,
11311 &BIND_EXPR_BODY (body));
11312 *data[3] = t;
11313 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11314 gcc_assert (*data[3] == inner_for_stmt);
11315 }
11316 return GS_OK;
11317 }
11318
11319 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11320 if (!loop_p
11321 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11322 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11323 i)) == TREE_LIST
11324 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11325 i)))
11326 {
11327 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11328 /* Class iterators aren't allowed on OMP_SIMD, so the only
11329 case we need to solve is distribute parallel for. They are
11330 allowed on the loop construct, but that is already handled
11331 in gimplify_omp_loop. */
11332 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11333 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11334 && data[1]);
11335 tree orig_decl = TREE_PURPOSE (orig);
11336 tree last = TREE_VALUE (orig);
11337 tree *pc;
11338 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11339 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11340 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11341 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11342 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11343 break;
11344 if (*pc == NULL_TREE)
11345 {
11346 tree *spc;
11347 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11348 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11349 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11350 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11351 break;
11352 if (*spc)
11353 {
11354 tree c = *spc;
11355 *spc = OMP_CLAUSE_CHAIN (c);
11356 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11357 *pc = c;
11358 }
11359 }
11360 if (*pc == NULL_TREE)
11361 ;
11362 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11363 {
11364 /* private clause will appear only on inner_for_stmt.
11365 Change it into firstprivate, and add private clause
11366 on for_stmt. */
11367 tree c = copy_node (*pc);
11368 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11369 OMP_FOR_CLAUSES (for_stmt) = c;
11370 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11371 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11372 }
11373 else
11374 {
11375 /* lastprivate clause will appear on both inner_for_stmt
11376 and for_stmt. Add firstprivate clause to
11377 inner_for_stmt. */
11378 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11379 OMP_CLAUSE_FIRSTPRIVATE);
11380 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11381 OMP_CLAUSE_CHAIN (c) = *pc;
11382 *pc = c;
11383 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11384 }
11385 tree c = build_omp_clause (UNKNOWN_LOCATION,
11386 OMP_CLAUSE_FIRSTPRIVATE);
11387 OMP_CLAUSE_DECL (c) = last;
11388 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11389 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11390 c = build_omp_clause (UNKNOWN_LOCATION,
11391 *pc ? OMP_CLAUSE_SHARED
11392 : OMP_CLAUSE_FIRSTPRIVATE);
11393 OMP_CLAUSE_DECL (c) = orig_decl;
11394 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11395 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11396 }
11397 /* Similarly, take care of C++ range for temporaries, those should
11398 be firstprivate on OMP_PARALLEL if any. */
11399 if (data[1])
11400 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11401 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11402 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11403 i)) == TREE_LIST
11404 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11405 i)))
11406 {
11407 tree orig
11408 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11409 tree v = TREE_CHAIN (orig);
11410 tree c = build_omp_clause (UNKNOWN_LOCATION,
11411 OMP_CLAUSE_FIRSTPRIVATE);
11412 /* First add firstprivate clause for the __for_end artificial
11413 decl. */
11414 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11415 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11416 == REFERENCE_TYPE)
11417 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11418 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11419 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11420 if (TREE_VEC_ELT (v, 0))
11421 {
11422 /* And now the same for __for_range artificial decl if it
11423 exists. */
11424 c = build_omp_clause (UNKNOWN_LOCATION,
11425 OMP_CLAUSE_FIRSTPRIVATE);
11426 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11427 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11428 == REFERENCE_TYPE)
11429 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11430 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11431 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11432 }
11433 }
11434 }
11435
11436 switch (TREE_CODE (for_stmt))
11437 {
11438 case OMP_FOR:
11439 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
11440 {
11441 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11442 OMP_CLAUSE_SCHEDULE))
11443 error_at (EXPR_LOCATION (for_stmt),
11444 "%qs clause may not appear on non-rectangular %qs",
11445 "schedule", "for");
11446 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
11447 error_at (EXPR_LOCATION (for_stmt),
11448 "%qs clause may not appear on non-rectangular %qs",
11449 "ordered", "for");
11450 }
11451 break;
11452 case OMP_DISTRIBUTE:
11453 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
11454 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11455 OMP_CLAUSE_DIST_SCHEDULE))
11456 error_at (EXPR_LOCATION (for_stmt),
11457 "%qs clause may not appear on non-rectangular %qs",
11458 "dist_schedule", "distribute");
11459 break;
11460 case OACC_LOOP:
11461 ort = ORT_ACC;
11462 break;
11463 case OMP_TASKLOOP:
11464 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11465 ort = ORT_UNTIED_TASKLOOP;
11466 else
11467 ort = ORT_TASKLOOP;
11468 break;
11469 case OMP_SIMD:
11470 ort = ORT_SIMD;
11471 break;
11472 default:
11473 gcc_unreachable ();
11474 }
11475
11476 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11477 clause for the IV. */
11478 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11479 {
11480 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11481 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11482 decl = TREE_OPERAND (t, 0);
11483 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11484 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11485 && OMP_CLAUSE_DECL (c) == decl)
11486 {
11487 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11488 break;
11489 }
11490 }
11491
11492 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11493 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11494 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11495 ? OMP_LOOP : TREE_CODE (for_stmt));
11496
11497 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11498 gimplify_omp_ctxp->distribute = true;
11499
11500 /* Handle OMP_FOR_INIT. */
11501 for_pre_body = NULL;
11502 if ((ort == ORT_SIMD
11503 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11504 && OMP_FOR_PRE_BODY (for_stmt))
11505 {
11506 has_decl_expr = BITMAP_ALLOC (NULL);
11507 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11508 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11509 == VAR_DECL)
11510 {
11511 t = OMP_FOR_PRE_BODY (for_stmt);
11512 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11513 }
11514 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11515 {
11516 tree_stmt_iterator si;
11517 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11518 tsi_next (&si))
11519 {
11520 t = tsi_stmt (si);
11521 if (TREE_CODE (t) == DECL_EXPR
11522 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11523 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11524 }
11525 }
11526 }
11527 if (OMP_FOR_PRE_BODY (for_stmt))
11528 {
11529 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11530 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11531 else
11532 {
11533 struct gimplify_omp_ctx ctx;
11534 memset (&ctx, 0, sizeof (ctx));
11535 ctx.region_type = ORT_NONE;
11536 gimplify_omp_ctxp = &ctx;
11537 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11538 gimplify_omp_ctxp = NULL;
11539 }
11540 }
11541 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11542
11543 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11544 for_stmt = inner_for_stmt;
11545
11546 /* For taskloop, need to gimplify the start, end and step before the
11547 taskloop, outside of the taskloop omp context. */
11548 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11549 {
11550 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11551 {
11552 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11553 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
11554 ? pre_p : &for_pre_body);
11555 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11556 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11557 {
11558 tree v = TREE_OPERAND (t, 1);
11559 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11560 for_pre_p, orig_for_stmt);
11561 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11562 for_pre_p, orig_for_stmt);
11563 }
11564 else
11565 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11566 orig_for_stmt);
11567
11568 /* Handle OMP_FOR_COND. */
11569 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11570 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11571 {
11572 tree v = TREE_OPERAND (t, 1);
11573 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11574 for_pre_p, orig_for_stmt);
11575 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11576 for_pre_p, orig_for_stmt);
11577 }
11578 else
11579 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11580 orig_for_stmt);
11581
11582 /* Handle OMP_FOR_INCR. */
11583 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11584 if (TREE_CODE (t) == MODIFY_EXPR)
11585 {
11586 decl = TREE_OPERAND (t, 0);
11587 t = TREE_OPERAND (t, 1);
11588 tree *tp = &TREE_OPERAND (t, 1);
11589 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11590 tp = &TREE_OPERAND (t, 0);
11591
11592 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
11593 orig_for_stmt);
11594 }
11595 }
11596
11597 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11598 OMP_TASKLOOP);
11599 }
11600
11601 if (orig_for_stmt != for_stmt)
11602 gimplify_omp_ctxp->combined_loop = true;
11603
11604 for_body = NULL;
11605 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11606 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11607 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11608 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11609
11610 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11611 bool is_doacross = false;
11612 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11613 {
11614 is_doacross = true;
11615 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11616 (OMP_FOR_INIT (for_stmt))
11617 * 2);
11618 }
11619 int collapse = 1, tile = 0;
11620 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11621 if (c)
11622 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11623 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11624 if (c)
11625 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11626 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11627 {
11628 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11629 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11630 decl = TREE_OPERAND (t, 0);
11631 gcc_assert (DECL_P (decl));
11632 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11633 || POINTER_TYPE_P (TREE_TYPE (decl)));
11634 if (is_doacross)
11635 {
11636 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11637 {
11638 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11639 if (TREE_CODE (orig_decl) == TREE_LIST)
11640 {
11641 orig_decl = TREE_PURPOSE (orig_decl);
11642 if (!orig_decl)
11643 orig_decl = decl;
11644 }
11645 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11646 }
11647 else
11648 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11649 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11650 }
11651
11652 /* Make sure the iteration variable is private. */
11653 tree c = NULL_TREE;
11654 tree c2 = NULL_TREE;
11655 if (orig_for_stmt != for_stmt)
11656 {
11657 /* Preserve this information until we gimplify the inner simd. */
11658 if (has_decl_expr
11659 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11660 TREE_PRIVATE (t) = 1;
11661 }
11662 else if (ort == ORT_SIMD)
11663 {
11664 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11665 (splay_tree_key) decl);
11666 omp_is_private (gimplify_omp_ctxp, decl,
11667 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11668 != 1));
11669 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11670 {
11671 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11672 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11673 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11674 OMP_CLAUSE_LASTPRIVATE);
11675 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11676 OMP_CLAUSE_LASTPRIVATE))
11677 if (OMP_CLAUSE_DECL (c3) == decl)
11678 {
11679 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11680 "conditional %<lastprivate%> on loop "
11681 "iterator %qD ignored", decl);
11682 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11683 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11684 }
11685 }
11686 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11687 {
11688 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11689 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11690 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11691 if ((has_decl_expr
11692 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11693 || TREE_PRIVATE (t))
11694 {
11695 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11696 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11697 }
11698 struct gimplify_omp_ctx *outer
11699 = gimplify_omp_ctxp->outer_context;
11700 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11701 {
11702 if (outer->region_type == ORT_WORKSHARE
11703 && outer->combined_loop)
11704 {
11705 n = splay_tree_lookup (outer->variables,
11706 (splay_tree_key)decl);
11707 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11708 {
11709 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11710 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11711 }
11712 else
11713 {
11714 struct gimplify_omp_ctx *octx = outer->outer_context;
11715 if (octx
11716 && octx->region_type == ORT_COMBINED_PARALLEL
11717 && octx->outer_context
11718 && (octx->outer_context->region_type
11719 == ORT_WORKSHARE)
11720 && octx->outer_context->combined_loop)
11721 {
11722 octx = octx->outer_context;
11723 n = splay_tree_lookup (octx->variables,
11724 (splay_tree_key)decl);
11725 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11726 {
11727 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11728 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11729 }
11730 }
11731 }
11732 }
11733 }
11734
11735 OMP_CLAUSE_DECL (c) = decl;
11736 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11737 OMP_FOR_CLAUSES (for_stmt) = c;
11738 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11739 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11740 {
11741 if (outer->region_type == ORT_WORKSHARE
11742 && outer->combined_loop)
11743 {
11744 if (outer->outer_context
11745 && (outer->outer_context->region_type
11746 == ORT_COMBINED_PARALLEL))
11747 outer = outer->outer_context;
11748 else if (omp_check_private (outer, decl, false))
11749 outer = NULL;
11750 }
11751 else if (((outer->region_type & ORT_TASKLOOP)
11752 == ORT_TASKLOOP)
11753 && outer->combined_loop
11754 && !omp_check_private (gimplify_omp_ctxp,
11755 decl, false))
11756 ;
11757 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11758 {
11759 omp_notice_variable (outer, decl, true);
11760 outer = NULL;
11761 }
11762 if (outer)
11763 {
11764 n = splay_tree_lookup (outer->variables,
11765 (splay_tree_key)decl);
11766 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11767 {
11768 omp_add_variable (outer, decl,
11769 GOVD_LASTPRIVATE | GOVD_SEEN);
11770 if (outer->region_type == ORT_COMBINED_PARALLEL
11771 && outer->outer_context
11772 && (outer->outer_context->region_type
11773 == ORT_WORKSHARE)
11774 && outer->outer_context->combined_loop)
11775 {
11776 outer = outer->outer_context;
11777 n = splay_tree_lookup (outer->variables,
11778 (splay_tree_key)decl);
11779 if (omp_check_private (outer, decl, false))
11780 outer = NULL;
11781 else if (n == NULL
11782 || ((n->value & GOVD_DATA_SHARE_CLASS)
11783 == 0))
11784 omp_add_variable (outer, decl,
11785 GOVD_LASTPRIVATE
11786 | GOVD_SEEN);
11787 else
11788 outer = NULL;
11789 }
11790 if (outer && outer->outer_context
11791 && ((outer->outer_context->region_type
11792 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11793 || (((outer->region_type & ORT_TASKLOOP)
11794 == ORT_TASKLOOP)
11795 && (outer->outer_context->region_type
11796 == ORT_COMBINED_PARALLEL))))
11797 {
11798 outer = outer->outer_context;
11799 n = splay_tree_lookup (outer->variables,
11800 (splay_tree_key)decl);
11801 if (n == NULL
11802 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11803 omp_add_variable (outer, decl,
11804 GOVD_SHARED | GOVD_SEEN);
11805 else
11806 outer = NULL;
11807 }
11808 if (outer && outer->outer_context)
11809 omp_notice_variable (outer->outer_context, decl,
11810 true);
11811 }
11812 }
11813 }
11814 }
11815 else
11816 {
11817 bool lastprivate
11818 = (!has_decl_expr
11819 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11820 if (TREE_PRIVATE (t))
11821 lastprivate = false;
11822 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11823 {
11824 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11825 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11826 lastprivate = false;
11827 }
11828
11829 struct gimplify_omp_ctx *outer
11830 = gimplify_omp_ctxp->outer_context;
11831 if (outer && lastprivate)
11832 {
11833 if (outer->region_type == ORT_WORKSHARE
11834 && outer->combined_loop)
11835 {
11836 n = splay_tree_lookup (outer->variables,
11837 (splay_tree_key)decl);
11838 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11839 {
11840 lastprivate = false;
11841 outer = NULL;
11842 }
11843 else if (outer->outer_context
11844 && (outer->outer_context->region_type
11845 == ORT_COMBINED_PARALLEL))
11846 outer = outer->outer_context;
11847 else if (omp_check_private (outer, decl, false))
11848 outer = NULL;
11849 }
11850 else if (((outer->region_type & ORT_TASKLOOP)
11851 == ORT_TASKLOOP)
11852 && outer->combined_loop
11853 && !omp_check_private (gimplify_omp_ctxp,
11854 decl, false))
11855 ;
11856 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11857 {
11858 omp_notice_variable (outer, decl, true);
11859 outer = NULL;
11860 }
11861 if (outer)
11862 {
11863 n = splay_tree_lookup (outer->variables,
11864 (splay_tree_key)decl);
11865 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11866 {
11867 omp_add_variable (outer, decl,
11868 GOVD_LASTPRIVATE | GOVD_SEEN);
11869 if (outer->region_type == ORT_COMBINED_PARALLEL
11870 && outer->outer_context
11871 && (outer->outer_context->region_type
11872 == ORT_WORKSHARE)
11873 && outer->outer_context->combined_loop)
11874 {
11875 outer = outer->outer_context;
11876 n = splay_tree_lookup (outer->variables,
11877 (splay_tree_key)decl);
11878 if (omp_check_private (outer, decl, false))
11879 outer = NULL;
11880 else if (n == NULL
11881 || ((n->value & GOVD_DATA_SHARE_CLASS)
11882 == 0))
11883 omp_add_variable (outer, decl,
11884 GOVD_LASTPRIVATE
11885 | GOVD_SEEN);
11886 else
11887 outer = NULL;
11888 }
11889 if (outer && outer->outer_context
11890 && ((outer->outer_context->region_type
11891 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11892 || (((outer->region_type & ORT_TASKLOOP)
11893 == ORT_TASKLOOP)
11894 && (outer->outer_context->region_type
11895 == ORT_COMBINED_PARALLEL))))
11896 {
11897 outer = outer->outer_context;
11898 n = splay_tree_lookup (outer->variables,
11899 (splay_tree_key)decl);
11900 if (n == NULL
11901 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11902 omp_add_variable (outer, decl,
11903 GOVD_SHARED | GOVD_SEEN);
11904 else
11905 outer = NULL;
11906 }
11907 if (outer && outer->outer_context)
11908 omp_notice_variable (outer->outer_context, decl,
11909 true);
11910 }
11911 }
11912 }
11913
11914 c = build_omp_clause (input_location,
11915 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11916 : OMP_CLAUSE_PRIVATE);
11917 OMP_CLAUSE_DECL (c) = decl;
11918 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11919 OMP_FOR_CLAUSES (for_stmt) = c;
11920 omp_add_variable (gimplify_omp_ctxp, decl,
11921 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11922 | GOVD_EXPLICIT | GOVD_SEEN);
11923 c = NULL_TREE;
11924 }
11925 }
11926 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11927 {
11928 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11929 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11930 (splay_tree_key) decl);
11931 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11932 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11933 OMP_CLAUSE_LASTPRIVATE);
11934 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11935 OMP_CLAUSE_LASTPRIVATE))
11936 if (OMP_CLAUSE_DECL (c3) == decl)
11937 {
11938 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11939 "conditional %<lastprivate%> on loop "
11940 "iterator %qD ignored", decl);
11941 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11942 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11943 }
11944 }
11945 else
11946 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11947
11948 /* If DECL is not a gimple register, create a temporary variable to act
11949 as an iteration counter. This is valid, since DECL cannot be
11950 modified in the body of the loop. Similarly for any iteration vars
11951 in simd with collapse > 1 where the iterator vars must be
11952 lastprivate. */
11953 if (orig_for_stmt != for_stmt)
11954 var = decl;
11955 else if (!is_gimple_reg (decl)
11956 || (ort == ORT_SIMD
11957 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11958 {
11959 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11960 /* Make sure omp_add_variable is not called on it prematurely.
11961 We call it ourselves a few lines later. */
11962 gimplify_omp_ctxp = NULL;
11963 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11964 gimplify_omp_ctxp = ctx;
11965 TREE_OPERAND (t, 0) = var;
11966
11967 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11968
11969 if (ort == ORT_SIMD
11970 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11971 {
11972 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11973 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11974 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11975 OMP_CLAUSE_DECL (c2) = var;
11976 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11977 OMP_FOR_CLAUSES (for_stmt) = c2;
11978 omp_add_variable (gimplify_omp_ctxp, var,
11979 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11980 if (c == NULL_TREE)
11981 {
11982 c = c2;
11983 c2 = NULL_TREE;
11984 }
11985 }
11986 else
11987 omp_add_variable (gimplify_omp_ctxp, var,
11988 GOVD_PRIVATE | GOVD_SEEN);
11989 }
11990 else
11991 var = decl;
11992
11993 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11994 {
11995 tree lb = TREE_OPERAND (t, 1);
11996 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
11997 is_gimple_val, fb_rvalue, false);
11998 ret = MIN (ret, tret);
11999 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
12000 is_gimple_val, fb_rvalue, false);
12001 }
12002 else
12003 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12004 is_gimple_val, fb_rvalue, false);
12005 ret = MIN (ret, tret);
12006 if (ret == GS_ERROR)
12007 return ret;
12008
12009 /* Handle OMP_FOR_COND. */
12010 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12011 gcc_assert (COMPARISON_CLASS_P (t));
12012 gcc_assert (TREE_OPERAND (t, 0) == decl);
12013
12014 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12015 {
12016 tree ub = TREE_OPERAND (t, 1);
12017 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
12018 is_gimple_val, fb_rvalue, false);
12019 ret = MIN (ret, tret);
12020 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
12021 is_gimple_val, fb_rvalue, false);
12022 }
12023 else
12024 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12025 is_gimple_val, fb_rvalue, false);
12026 ret = MIN (ret, tret);
12027
12028 /* Handle OMP_FOR_INCR. */
12029 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12030 switch (TREE_CODE (t))
12031 {
12032 case PREINCREMENT_EXPR:
12033 case POSTINCREMENT_EXPR:
12034 {
12035 tree decl = TREE_OPERAND (t, 0);
12036 /* c_omp_for_incr_canonicalize_ptr() should have been
12037 called to massage things appropriately. */
12038 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12039
12040 if (orig_for_stmt != for_stmt)
12041 break;
12042 t = build_int_cst (TREE_TYPE (decl), 1);
12043 if (c)
12044 OMP_CLAUSE_LINEAR_STEP (c) = t;
12045 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12046 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12047 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12048 break;
12049 }
12050
12051 case PREDECREMENT_EXPR:
12052 case POSTDECREMENT_EXPR:
12053 /* c_omp_for_incr_canonicalize_ptr() should have been
12054 called to massage things appropriately. */
12055 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12056 if (orig_for_stmt != for_stmt)
12057 break;
12058 t = build_int_cst (TREE_TYPE (decl), -1);
12059 if (c)
12060 OMP_CLAUSE_LINEAR_STEP (c) = t;
12061 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12062 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12063 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12064 break;
12065
12066 case MODIFY_EXPR:
12067 gcc_assert (TREE_OPERAND (t, 0) == decl);
12068 TREE_OPERAND (t, 0) = var;
12069
12070 t = TREE_OPERAND (t, 1);
12071 switch (TREE_CODE (t))
12072 {
12073 case PLUS_EXPR:
12074 if (TREE_OPERAND (t, 1) == decl)
12075 {
12076 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
12077 TREE_OPERAND (t, 0) = var;
12078 break;
12079 }
12080
12081 /* Fallthru. */
12082 case MINUS_EXPR:
12083 case POINTER_PLUS_EXPR:
12084 gcc_assert (TREE_OPERAND (t, 0) == decl);
12085 TREE_OPERAND (t, 0) = var;
12086 break;
12087 default:
12088 gcc_unreachable ();
12089 }
12090
12091 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12092 is_gimple_val, fb_rvalue, false);
12093 ret = MIN (ret, tret);
12094 if (c)
12095 {
12096 tree step = TREE_OPERAND (t, 1);
12097 tree stept = TREE_TYPE (decl);
12098 if (POINTER_TYPE_P (stept))
12099 stept = sizetype;
12100 step = fold_convert (stept, step);
12101 if (TREE_CODE (t) == MINUS_EXPR)
12102 step = fold_build1 (NEGATE_EXPR, stept, step);
12103 OMP_CLAUSE_LINEAR_STEP (c) = step;
12104 if (step != TREE_OPERAND (t, 1))
12105 {
12106 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
12107 &for_pre_body, NULL,
12108 is_gimple_val, fb_rvalue, false);
12109 ret = MIN (ret, tret);
12110 }
12111 }
12112 break;
12113
12114 default:
12115 gcc_unreachable ();
12116 }
12117
12118 if (c2)
12119 {
12120 gcc_assert (c);
12121 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
12122 }
12123
12124 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
12125 {
12126 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
12127 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12128 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
12129 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12130 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
12131 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
12132 && OMP_CLAUSE_DECL (c) == decl)
12133 {
12134 if (is_doacross && (collapse == 1 || i >= collapse))
12135 t = var;
12136 else
12137 {
12138 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12139 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12140 gcc_assert (TREE_OPERAND (t, 0) == var);
12141 t = TREE_OPERAND (t, 1);
12142 gcc_assert (TREE_CODE (t) == PLUS_EXPR
12143 || TREE_CODE (t) == MINUS_EXPR
12144 || TREE_CODE (t) == POINTER_PLUS_EXPR);
12145 gcc_assert (TREE_OPERAND (t, 0) == var);
12146 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
12147 is_doacross ? var : decl,
12148 TREE_OPERAND (t, 1));
12149 }
12150 gimple_seq *seq;
12151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12152 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
12153 else
12154 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
12155 push_gimplify_context ();
12156 gimplify_assign (decl, t, seq);
12157 gimple *bind = NULL;
12158 if (gimplify_ctxp->temps)
12159 {
12160 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
12161 *seq = NULL;
12162 gimplify_seq_add_stmt (seq, bind);
12163 }
12164 pop_gimplify_context (bind);
12165 }
12166 }
12167 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
12168 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12169 {
12170 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12171 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12172 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12173 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12174 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12175 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12176 gcc_assert (COMPARISON_CLASS_P (t));
12177 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12178 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12179 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12180 }
12181 }
12182
12183 BITMAP_FREE (has_decl_expr);
12184
12185 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12186 || (loop_p && orig_for_stmt == for_stmt))
12187 {
12188 push_gimplify_context ();
12189 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
12190 {
12191 OMP_FOR_BODY (orig_for_stmt)
12192 = build3 (BIND_EXPR, void_type_node, NULL,
12193 OMP_FOR_BODY (orig_for_stmt), NULL);
12194 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
12195 }
12196 }
12197
12198 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
12199 &for_body);
12200
12201 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12202 || (loop_p && orig_for_stmt == for_stmt))
12203 {
12204 if (gimple_code (g) == GIMPLE_BIND)
12205 pop_gimplify_context (g);
12206 else
12207 pop_gimplify_context (NULL);
12208 }
12209
12210 if (orig_for_stmt != for_stmt)
12211 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12212 {
12213 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12214 decl = TREE_OPERAND (t, 0);
12215 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12216 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12217 gimplify_omp_ctxp = ctx->outer_context;
12218 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12219 gimplify_omp_ctxp = ctx;
12220 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
12221 TREE_OPERAND (t, 0) = var;
12222 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12223 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12224 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
12225 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12226 for (int j = i + 1;
12227 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12228 {
12229 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12230 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12231 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12232 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12233 {
12234 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12235 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12236 }
12237 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12238 gcc_assert (COMPARISON_CLASS_P (t));
12239 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12240 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12241 {
12242 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12243 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12244 }
12245 }
12246 }
12247
12248 gimplify_adjust_omp_clauses (pre_p, for_body,
12249 &OMP_FOR_CLAUSES (orig_for_stmt),
12250 TREE_CODE (orig_for_stmt));
12251
12252 int kind;
12253 switch (TREE_CODE (orig_for_stmt))
12254 {
12255 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
12256 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
12257 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
12258 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
12259 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
12260 default:
12261 gcc_unreachable ();
12262 }
12263 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12264 {
12265 gimplify_seq_add_seq (pre_p, for_pre_body);
12266 for_pre_body = NULL;
12267 }
12268 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12269 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12270 for_pre_body);
12271 if (orig_for_stmt != for_stmt)
12272 gimple_omp_for_set_combined_p (gfor, true);
12273 if (gimplify_omp_ctxp
12274 && (gimplify_omp_ctxp->combined_loop
12275 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12276 && gimplify_omp_ctxp->outer_context
12277 && gimplify_omp_ctxp->outer_context->combined_loop)))
12278 {
12279 gimple_omp_for_set_combined_into_p (gfor, true);
12280 if (gimplify_omp_ctxp->combined_loop)
12281 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12282 else
12283 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12284 }
12285
12286 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12287 {
12288 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12289 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12290 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12291 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12292 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12293 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12294 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12295 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12296 }
12297
12298 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12299 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12300 The outer taskloop stands for computing the number of iterations,
12301 counts for collapsed loops and holding taskloop specific clauses.
12302 The task construct stands for the effect of data sharing on the
12303 explicit task it creates and the inner taskloop stands for expansion
12304 of the static loop inside of the explicit task construct. */
12305 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12306 {
12307 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12308 tree task_clauses = NULL_TREE;
12309 tree c = *gfor_clauses_ptr;
12310 tree *gtask_clauses_ptr = &task_clauses;
12311 tree outer_for_clauses = NULL_TREE;
12312 tree *gforo_clauses_ptr = &outer_for_clauses;
12313 bitmap lastprivate_uids = NULL;
12314 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
12315 {
12316 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
12317 if (c)
12318 {
12319 lastprivate_uids = BITMAP_ALLOC (NULL);
12320 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12321 OMP_CLAUSE_LASTPRIVATE))
12322 bitmap_set_bit (lastprivate_uids,
12323 DECL_UID (OMP_CLAUSE_DECL (c)));
12324 }
12325 c = *gfor_clauses_ptr;
12326 }
12327 for (; c; c = OMP_CLAUSE_CHAIN (c))
12328 switch (OMP_CLAUSE_CODE (c))
12329 {
12330 /* These clauses are allowed on task, move them there. */
12331 case OMP_CLAUSE_SHARED:
12332 case OMP_CLAUSE_FIRSTPRIVATE:
12333 case OMP_CLAUSE_DEFAULT:
12334 case OMP_CLAUSE_IF:
12335 case OMP_CLAUSE_UNTIED:
12336 case OMP_CLAUSE_FINAL:
12337 case OMP_CLAUSE_MERGEABLE:
12338 case OMP_CLAUSE_PRIORITY:
12339 case OMP_CLAUSE_REDUCTION:
12340 case OMP_CLAUSE_IN_REDUCTION:
12341 *gtask_clauses_ptr = c;
12342 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12343 break;
12344 case OMP_CLAUSE_PRIVATE:
12345 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12346 {
12347 /* We want private on outer for and firstprivate
12348 on task. */
12349 *gtask_clauses_ptr
12350 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12351 OMP_CLAUSE_FIRSTPRIVATE);
12352 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12353 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12354 openacc);
12355 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12356 *gforo_clauses_ptr = c;
12357 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12358 }
12359 else
12360 {
12361 *gtask_clauses_ptr = c;
12362 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12363 }
12364 break;
12365 /* These clauses go into outer taskloop clauses. */
12366 case OMP_CLAUSE_GRAINSIZE:
12367 case OMP_CLAUSE_NUM_TASKS:
12368 case OMP_CLAUSE_NOGROUP:
12369 *gforo_clauses_ptr = c;
12370 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12371 break;
12372 /* Collapse clause we duplicate on both taskloops. */
12373 case OMP_CLAUSE_COLLAPSE:
12374 *gfor_clauses_ptr = c;
12375 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12376 *gforo_clauses_ptr = copy_node (c);
12377 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12378 break;
12379 /* For lastprivate, keep the clause on inner taskloop, and add
12380 a shared clause on task. If the same decl is also firstprivate,
12381 add also firstprivate clause on the inner taskloop. */
12382 case OMP_CLAUSE_LASTPRIVATE:
12383 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12384 {
12385 /* For taskloop C++ lastprivate IVs, we want:
12386 1) private on outer taskloop
12387 2) firstprivate and shared on task
12388 3) lastprivate on inner taskloop */
12389 *gtask_clauses_ptr
12390 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12391 OMP_CLAUSE_FIRSTPRIVATE);
12392 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12393 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12394 openacc);
12395 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12396 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12397 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12398 OMP_CLAUSE_PRIVATE);
12399 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12400 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12401 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12402 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12403 }
12404 *gfor_clauses_ptr = c;
12405 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12406 *gtask_clauses_ptr
12407 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12408 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12409 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12410 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12411 gtask_clauses_ptr
12412 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12413 break;
12414 /* Allocate clause we duplicate on task and inner taskloop
12415 if the decl is lastprivate, otherwise just put on task. */
12416 case OMP_CLAUSE_ALLOCATE:
12417 if (lastprivate_uids
12418 && bitmap_bit_p (lastprivate_uids,
12419 DECL_UID (OMP_CLAUSE_DECL (c))))
12420 {
12421 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12422 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12423 {
12424 /* Additionally, put firstprivate clause on task
12425 for the allocator if it is not constant. */
12426 *gtask_clauses_ptr
12427 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12428 OMP_CLAUSE_FIRSTPRIVATE);
12429 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
12430 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12431 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12432 }
12433 *gfor_clauses_ptr = c;
12434 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12435 *gtask_clauses_ptr = copy_node (c);
12436 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12437 }
12438 else
12439 {
12440 *gtask_clauses_ptr = c;
12441 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12442 }
12443 break;
12444 default:
12445 gcc_unreachable ();
12446 }
12447 *gfor_clauses_ptr = NULL_TREE;
12448 *gtask_clauses_ptr = NULL_TREE;
12449 *gforo_clauses_ptr = NULL_TREE;
12450 BITMAP_FREE (lastprivate_uids);
12451 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12452 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12453 NULL_TREE, NULL_TREE, NULL_TREE);
12454 gimple_omp_task_set_taskloop_p (g, true);
12455 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12456 gomp_for *gforo
12457 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12458 gimple_omp_for_collapse (gfor),
12459 gimple_omp_for_pre_body (gfor));
12460 gimple_omp_for_set_pre_body (gfor, NULL);
12461 gimple_omp_for_set_combined_p (gforo, true);
12462 gimple_omp_for_set_combined_into_p (gfor, true);
12463 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12464 {
12465 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12466 tree v = create_tmp_var (type);
12467 gimple_omp_for_set_index (gforo, i, v);
12468 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12469 gimple_omp_for_set_initial (gforo, i, t);
12470 gimple_omp_for_set_cond (gforo, i,
12471 gimple_omp_for_cond (gfor, i));
12472 t = unshare_expr (gimple_omp_for_final (gfor, i));
12473 gimple_omp_for_set_final (gforo, i, t);
12474 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12475 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12476 TREE_OPERAND (t, 0) = v;
12477 gimple_omp_for_set_incr (gforo, i, t);
12478 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12479 OMP_CLAUSE_DECL (t) = v;
12480 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12481 gimple_omp_for_set_clauses (gforo, t);
12482 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12483 {
12484 tree *p1 = NULL, *p2 = NULL;
12485 t = gimple_omp_for_initial (gforo, i);
12486 if (TREE_CODE (t) == TREE_VEC)
12487 p1 = &TREE_VEC_ELT (t, 0);
12488 t = gimple_omp_for_final (gforo, i);
12489 if (TREE_CODE (t) == TREE_VEC)
12490 {
12491 if (p1)
12492 p2 = &TREE_VEC_ELT (t, 0);
12493 else
12494 p1 = &TREE_VEC_ELT (t, 0);
12495 }
12496 if (p1)
12497 {
12498 int j;
12499 for (j = 0; j < i; j++)
12500 if (*p1 == gimple_omp_for_index (gfor, j))
12501 {
12502 *p1 = gimple_omp_for_index (gforo, j);
12503 if (p2)
12504 *p2 = *p1;
12505 break;
12506 }
12507 gcc_assert (j < i);
12508 }
12509 }
12510 }
12511 gimplify_seq_add_stmt (pre_p, gforo);
12512 }
12513 else
12514 gimplify_seq_add_stmt (pre_p, gfor);
12515
12516 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12517 {
12518 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12519 unsigned lastprivate_conditional = 0;
12520 while (ctx
12521 && (ctx->region_type == ORT_TARGET_DATA
12522 || ctx->region_type == ORT_TASKGROUP))
12523 ctx = ctx->outer_context;
12524 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12525 for (tree c = gimple_omp_for_clauses (gfor);
12526 c; c = OMP_CLAUSE_CHAIN (c))
12527 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12528 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12529 ++lastprivate_conditional;
12530 if (lastprivate_conditional)
12531 {
12532 struct omp_for_data fd;
12533 omp_extract_for_data (gfor, &fd, NULL);
12534 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12535 lastprivate_conditional);
12536 tree var = create_tmp_var_raw (type);
12537 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12538 OMP_CLAUSE_DECL (c) = var;
12539 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12540 gimple_omp_for_set_clauses (gfor, c);
12541 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12542 }
12543 }
12544 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12545 {
12546 unsigned lastprivate_conditional = 0;
12547 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12549 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12550 ++lastprivate_conditional;
12551 if (lastprivate_conditional)
12552 {
12553 struct omp_for_data fd;
12554 omp_extract_for_data (gfor, &fd, NULL);
12555 tree type = unsigned_type_for (fd.iter_type);
12556 while (lastprivate_conditional--)
12557 {
12558 tree c = build_omp_clause (UNKNOWN_LOCATION,
12559 OMP_CLAUSE__CONDTEMP_);
12560 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12561 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12562 gimple_omp_for_set_clauses (gfor, c);
12563 }
12564 }
12565 }
12566
12567 if (ret != GS_ALL_DONE)
12568 return GS_ERROR;
12569 *expr_p = NULL_TREE;
12570 return GS_ALL_DONE;
12571 }
12572
12573 /* Helper for gimplify_omp_loop, called through walk_tree. */
12574
12575 static tree
12576 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12577 {
12578 if (DECL_P (*tp))
12579 {
12580 tree *d = (tree *) data;
12581 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12582 {
12583 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12584 *walk_subtrees = 0;
12585 }
12586 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12587 {
12588 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12589 *walk_subtrees = 0;
12590 }
12591 }
12592 return NULL_TREE;
12593 }
12594
12595 /* Gimplify the gross structure of an OMP_LOOP statement. */
12596
12597 static enum gimplify_status
12598 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12599 {
12600 tree for_stmt = *expr_p;
12601 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12602 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12603 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12604 int i;
12605
12606 /* If order is not present, the behavior is as if order(concurrent)
12607 appeared. */
12608 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12609 if (order == NULL_TREE)
12610 {
12611 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12612 OMP_CLAUSE_CHAIN (order) = clauses;
12613 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12614 }
12615
12616 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12617 if (bind == NULL_TREE)
12618 {
12619 if (!flag_openmp) /* flag_openmp_simd */
12620 ;
12621 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12622 kind = OMP_CLAUSE_BIND_TEAMS;
12623 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12624 kind = OMP_CLAUSE_BIND_PARALLEL;
12625 else
12626 {
12627 for (; octx; octx = octx->outer_context)
12628 {
12629 if ((octx->region_type & ORT_ACC) != 0
12630 || octx->region_type == ORT_NONE
12631 || octx->region_type == ORT_IMPLICIT_TARGET)
12632 continue;
12633 break;
12634 }
12635 if (octx == NULL && !in_omp_construct)
12636 error_at (EXPR_LOCATION (for_stmt),
12637 "%<bind%> clause not specified on a %<loop%> "
12638 "construct not nested inside another OpenMP construct");
12639 }
12640 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12641 OMP_CLAUSE_CHAIN (bind) = clauses;
12642 OMP_CLAUSE_BIND_KIND (bind) = kind;
12643 OMP_FOR_CLAUSES (for_stmt) = bind;
12644 }
12645 else
12646 switch (OMP_CLAUSE_BIND_KIND (bind))
12647 {
12648 case OMP_CLAUSE_BIND_THREAD:
12649 break;
12650 case OMP_CLAUSE_BIND_PARALLEL:
12651 if (!flag_openmp) /* flag_openmp_simd */
12652 {
12653 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12654 break;
12655 }
12656 for (; octx; octx = octx->outer_context)
12657 if (octx->region_type == ORT_SIMD
12658 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12659 {
12660 error_at (EXPR_LOCATION (for_stmt),
12661 "%<bind(parallel)%> on a %<loop%> construct nested "
12662 "inside %<simd%> construct");
12663 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12664 break;
12665 }
12666 kind = OMP_CLAUSE_BIND_PARALLEL;
12667 break;
12668 case OMP_CLAUSE_BIND_TEAMS:
12669 if (!flag_openmp) /* flag_openmp_simd */
12670 {
12671 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12672 break;
12673 }
12674 if ((octx
12675 && octx->region_type != ORT_IMPLICIT_TARGET
12676 && octx->region_type != ORT_NONE
12677 && (octx->region_type & ORT_TEAMS) == 0)
12678 || in_omp_construct)
12679 {
12680 error_at (EXPR_LOCATION (for_stmt),
12681 "%<bind(teams)%> on a %<loop%> region not strictly "
12682 "nested inside of a %<teams%> region");
12683 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12684 break;
12685 }
12686 kind = OMP_CLAUSE_BIND_TEAMS;
12687 break;
12688 default:
12689 gcc_unreachable ();
12690 }
12691
12692 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12693 switch (OMP_CLAUSE_CODE (*pc))
12694 {
12695 case OMP_CLAUSE_REDUCTION:
12696 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12697 {
12698 error_at (OMP_CLAUSE_LOCATION (*pc),
12699 "%<inscan%> %<reduction%> clause on "
12700 "%qs construct", "loop");
12701 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12702 }
12703 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12704 {
12705 error_at (OMP_CLAUSE_LOCATION (*pc),
12706 "invalid %<task%> reduction modifier on construct "
12707 "other than %<parallel%>, %qs or %<sections%>",
12708 lang_GNU_Fortran () ? "do" : "for");
12709 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12710 }
12711 pc = &OMP_CLAUSE_CHAIN (*pc);
12712 break;
12713 case OMP_CLAUSE_LASTPRIVATE:
12714 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12715 {
12716 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12717 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12718 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12719 break;
12720 if (OMP_FOR_ORIG_DECLS (for_stmt)
12721 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12722 i)) == TREE_LIST
12723 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12724 i)))
12725 {
12726 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12727 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12728 break;
12729 }
12730 }
12731 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12732 {
12733 error_at (OMP_CLAUSE_LOCATION (*pc),
12734 "%<lastprivate%> clause on a %<loop%> construct refers "
12735 "to a variable %qD which is not the loop iterator",
12736 OMP_CLAUSE_DECL (*pc));
12737 *pc = OMP_CLAUSE_CHAIN (*pc);
12738 break;
12739 }
12740 pc = &OMP_CLAUSE_CHAIN (*pc);
12741 break;
12742 default:
12743 pc = &OMP_CLAUSE_CHAIN (*pc);
12744 break;
12745 }
12746
12747 TREE_SET_CODE (for_stmt, OMP_SIMD);
12748
12749 int last;
12750 switch (kind)
12751 {
12752 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12753 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12754 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12755 }
12756 for (int pass = 1; pass <= last; pass++)
12757 {
12758 if (pass == 2)
12759 {
12760 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12761 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12762 *expr_p = make_node (OMP_PARALLEL);
12763 TREE_TYPE (*expr_p) = void_type_node;
12764 OMP_PARALLEL_BODY (*expr_p) = bind;
12765 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12766 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12767 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12768 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12769 if (OMP_FOR_ORIG_DECLS (for_stmt)
12770 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12771 == TREE_LIST))
12772 {
12773 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12774 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12775 {
12776 *pc = build_omp_clause (UNKNOWN_LOCATION,
12777 OMP_CLAUSE_FIRSTPRIVATE);
12778 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12779 pc = &OMP_CLAUSE_CHAIN (*pc);
12780 }
12781 }
12782 }
12783 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12784 tree *pc = &OMP_FOR_CLAUSES (t);
12785 TREE_TYPE (t) = void_type_node;
12786 OMP_FOR_BODY (t) = *expr_p;
12787 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12788 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12789 switch (OMP_CLAUSE_CODE (c))
12790 {
12791 case OMP_CLAUSE_BIND:
12792 case OMP_CLAUSE_ORDER:
12793 case OMP_CLAUSE_COLLAPSE:
12794 *pc = copy_node (c);
12795 pc = &OMP_CLAUSE_CHAIN (*pc);
12796 break;
12797 case OMP_CLAUSE_PRIVATE:
12798 case OMP_CLAUSE_FIRSTPRIVATE:
12799 /* Only needed on innermost. */
12800 break;
12801 case OMP_CLAUSE_LASTPRIVATE:
12802 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12803 {
12804 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12805 OMP_CLAUSE_FIRSTPRIVATE);
12806 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12807 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
12808 pc = &OMP_CLAUSE_CHAIN (*pc);
12809 }
12810 *pc = copy_node (c);
12811 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12812 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12813 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12814 {
12815 if (pass != last)
12816 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12817 else
12818 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
12819 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12820 }
12821 pc = &OMP_CLAUSE_CHAIN (*pc);
12822 break;
12823 case OMP_CLAUSE_REDUCTION:
12824 *pc = copy_node (c);
12825 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12826 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12827 OMP_CLAUSE_REDUCTION_INIT (*pc)
12828 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12829 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12830 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12831 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12832 {
12833 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12834 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12835 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12836 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12837 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12838 tree nc = *pc;
12839 tree data[2] = { c, nc };
12840 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12841 replace_reduction_placeholders,
12842 data);
12843 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12844 replace_reduction_placeholders,
12845 data);
12846 }
12847 pc = &OMP_CLAUSE_CHAIN (*pc);
12848 break;
12849 default:
12850 gcc_unreachable ();
12851 }
12852 *pc = NULL_TREE;
12853 *expr_p = t;
12854 }
12855 return gimplify_omp_for (expr_p, pre_p);
12856 }
12857
12858
12859 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12860 of OMP_TARGET's body. */
12861
12862 static tree
12863 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12864 {
12865 *walk_subtrees = 0;
12866 switch (TREE_CODE (*tp))
12867 {
12868 case OMP_TEAMS:
12869 return *tp;
12870 case BIND_EXPR:
12871 case STATEMENT_LIST:
12872 *walk_subtrees = 1;
12873 break;
12874 default:
12875 break;
12876 }
12877 return NULL_TREE;
12878 }
12879
12880 /* Helper function of optimize_target_teams, determine if the expression
12881 can be computed safely before the target construct on the host. */
12882
12883 static tree
12884 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12885 {
12886 splay_tree_node n;
12887
12888 if (TYPE_P (*tp))
12889 {
12890 *walk_subtrees = 0;
12891 return NULL_TREE;
12892 }
12893 switch (TREE_CODE (*tp))
12894 {
12895 case VAR_DECL:
12896 case PARM_DECL:
12897 case RESULT_DECL:
12898 *walk_subtrees = 0;
12899 if (error_operand_p (*tp)
12900 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12901 || DECL_HAS_VALUE_EXPR_P (*tp)
12902 || DECL_THREAD_LOCAL_P (*tp)
12903 || TREE_SIDE_EFFECTS (*tp)
12904 || TREE_THIS_VOLATILE (*tp))
12905 return *tp;
12906 if (is_global_var (*tp)
12907 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12908 || lookup_attribute ("omp declare target link",
12909 DECL_ATTRIBUTES (*tp))))
12910 return *tp;
12911 if (VAR_P (*tp)
12912 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12913 && !is_global_var (*tp)
12914 && decl_function_context (*tp) == current_function_decl)
12915 return *tp;
12916 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12917 (splay_tree_key) *tp);
12918 if (n == NULL)
12919 {
12920 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12921 return NULL_TREE;
12922 return *tp;
12923 }
12924 else if (n->value & GOVD_LOCAL)
12925 return *tp;
12926 else if (n->value & GOVD_FIRSTPRIVATE)
12927 return NULL_TREE;
12928 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12929 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12930 return NULL_TREE;
12931 return *tp;
12932 case INTEGER_CST:
12933 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12934 return *tp;
12935 return NULL_TREE;
12936 case TARGET_EXPR:
12937 if (TARGET_EXPR_INITIAL (*tp)
12938 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12939 return *tp;
12940 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12941 walk_subtrees, NULL);
12942 /* Allow some reasonable subset of integral arithmetics. */
12943 case PLUS_EXPR:
12944 case MINUS_EXPR:
12945 case MULT_EXPR:
12946 case TRUNC_DIV_EXPR:
12947 case CEIL_DIV_EXPR:
12948 case FLOOR_DIV_EXPR:
12949 case ROUND_DIV_EXPR:
12950 case TRUNC_MOD_EXPR:
12951 case CEIL_MOD_EXPR:
12952 case FLOOR_MOD_EXPR:
12953 case ROUND_MOD_EXPR:
12954 case RDIV_EXPR:
12955 case EXACT_DIV_EXPR:
12956 case MIN_EXPR:
12957 case MAX_EXPR:
12958 case LSHIFT_EXPR:
12959 case RSHIFT_EXPR:
12960 case BIT_IOR_EXPR:
12961 case BIT_XOR_EXPR:
12962 case BIT_AND_EXPR:
12963 case NEGATE_EXPR:
12964 case ABS_EXPR:
12965 case BIT_NOT_EXPR:
12966 case NON_LVALUE_EXPR:
12967 CASE_CONVERT:
12968 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12969 return *tp;
12970 return NULL_TREE;
12971 /* And disallow anything else, except for comparisons. */
12972 default:
12973 if (COMPARISON_CLASS_P (*tp))
12974 return NULL_TREE;
12975 return *tp;
12976 }
12977 }
12978
12979 /* Try to determine if the num_teams and/or thread_limit expressions
12980 can have their values determined already before entering the
12981 target construct.
12982 INTEGER_CSTs trivially are,
12983 integral decls that are firstprivate (explicitly or implicitly)
12984 or explicitly map(always, to:) or map(always, tofrom:) on the target
12985 region too, and expressions involving simple arithmetics on those
12986 too, function calls are not ok, dereferencing something neither etc.
12987 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12988 EXPR based on what we find:
12989 0 stands for clause not specified at all, use implementation default
12990 -1 stands for value that can't be determined easily before entering
12991 the target construct.
12992 If teams construct is not present at all, use 1 for num_teams
12993 and 0 for thread_limit (only one team is involved, and the thread
12994 limit is implementation defined. */
12995
12996 static void
12997 optimize_target_teams (tree target, gimple_seq *pre_p)
12998 {
12999 tree body = OMP_BODY (target);
13000 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
13001 tree num_teams = integer_zero_node;
13002 tree thread_limit = integer_zero_node;
13003 location_t num_teams_loc = EXPR_LOCATION (target);
13004 location_t thread_limit_loc = EXPR_LOCATION (target);
13005 tree c, *p, expr;
13006 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
13007
13008 if (teams == NULL_TREE)
13009 num_teams = integer_one_node;
13010 else
13011 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
13012 {
13013 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
13014 {
13015 p = &num_teams;
13016 num_teams_loc = OMP_CLAUSE_LOCATION (c);
13017 }
13018 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
13019 {
13020 p = &thread_limit;
13021 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
13022 }
13023 else
13024 continue;
13025 expr = OMP_CLAUSE_OPERAND (c, 0);
13026 if (TREE_CODE (expr) == INTEGER_CST)
13027 {
13028 *p = expr;
13029 continue;
13030 }
13031 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
13032 {
13033 *p = integer_minus_one_node;
13034 continue;
13035 }
13036 *p = expr;
13037 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
13038 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
13039 == GS_ERROR)
13040 {
13041 gimplify_omp_ctxp = target_ctx;
13042 *p = integer_minus_one_node;
13043 continue;
13044 }
13045 gimplify_omp_ctxp = target_ctx;
13046 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
13047 OMP_CLAUSE_OPERAND (c, 0) = *p;
13048 }
13049 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
13050 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
13051 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13052 OMP_TARGET_CLAUSES (target) = c;
13053 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
13054 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
13055 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13056 OMP_TARGET_CLAUSES (target) = c;
13057 }
13058
13059 /* Gimplify the gross structure of several OMP constructs. */
13060
13061 static void
13062 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
13063 {
13064 tree expr = *expr_p;
13065 gimple *stmt;
13066 gimple_seq body = NULL;
13067 enum omp_region_type ort;
13068
13069 switch (TREE_CODE (expr))
13070 {
13071 case OMP_SECTIONS:
13072 case OMP_SINGLE:
13073 ort = ORT_WORKSHARE;
13074 break;
13075 case OMP_TARGET:
13076 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
13077 break;
13078 case OACC_KERNELS:
13079 ort = ORT_ACC_KERNELS;
13080 break;
13081 case OACC_PARALLEL:
13082 ort = ORT_ACC_PARALLEL;
13083 break;
13084 case OACC_SERIAL:
13085 ort = ORT_ACC_SERIAL;
13086 break;
13087 case OACC_DATA:
13088 ort = ORT_ACC_DATA;
13089 break;
13090 case OMP_TARGET_DATA:
13091 ort = ORT_TARGET_DATA;
13092 break;
13093 case OMP_TEAMS:
13094 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
13095 if (gimplify_omp_ctxp == NULL
13096 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
13097 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
13098 break;
13099 case OACC_HOST_DATA:
13100 ort = ORT_ACC_HOST_DATA;
13101 break;
13102 default:
13103 gcc_unreachable ();
13104 }
13105
13106 bool save_in_omp_construct = in_omp_construct;
13107 if ((ort & ORT_ACC) == 0)
13108 in_omp_construct = false;
13109 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
13110 TREE_CODE (expr));
13111 if (TREE_CODE (expr) == OMP_TARGET)
13112 optimize_target_teams (expr, pre_p);
13113 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
13114 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13115 {
13116 push_gimplify_context ();
13117 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
13118 if (gimple_code (g) == GIMPLE_BIND)
13119 pop_gimplify_context (g);
13120 else
13121 pop_gimplify_context (NULL);
13122 if ((ort & ORT_TARGET_DATA) != 0)
13123 {
13124 enum built_in_function end_ix;
13125 switch (TREE_CODE (expr))
13126 {
13127 case OACC_DATA:
13128 case OACC_HOST_DATA:
13129 end_ix = BUILT_IN_GOACC_DATA_END;
13130 break;
13131 case OMP_TARGET_DATA:
13132 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
13133 break;
13134 default:
13135 gcc_unreachable ();
13136 }
13137 tree fn = builtin_decl_explicit (end_ix);
13138 g = gimple_build_call (fn, 0);
13139 gimple_seq cleanup = NULL;
13140 gimple_seq_add_stmt (&cleanup, g);
13141 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13142 body = NULL;
13143 gimple_seq_add_stmt (&body, g);
13144 }
13145 }
13146 else
13147 gimplify_and_add (OMP_BODY (expr), &body);
13148 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
13149 TREE_CODE (expr));
13150 in_omp_construct = save_in_omp_construct;
13151
13152 switch (TREE_CODE (expr))
13153 {
13154 case OACC_DATA:
13155 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
13156 OMP_CLAUSES (expr));
13157 break;
13158 case OACC_HOST_DATA:
13159 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
13160 {
13161 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13162 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
13163 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
13164 }
13165
13166 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
13167 OMP_CLAUSES (expr));
13168 break;
13169 case OACC_KERNELS:
13170 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
13171 OMP_CLAUSES (expr));
13172 break;
13173 case OACC_PARALLEL:
13174 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
13175 OMP_CLAUSES (expr));
13176 break;
13177 case OACC_SERIAL:
13178 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
13179 OMP_CLAUSES (expr));
13180 break;
13181 case OMP_SECTIONS:
13182 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
13183 break;
13184 case OMP_SINGLE:
13185 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
13186 break;
13187 case OMP_TARGET:
13188 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
13189 OMP_CLAUSES (expr));
13190 break;
13191 case OMP_TARGET_DATA:
13192 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13193 to be evaluated before the use_device_{ptr,addr} clauses if they
13194 refer to the same variables. */
13195 {
13196 tree use_device_clauses;
13197 tree *pc, *uc = &use_device_clauses;
13198 for (pc = &OMP_CLAUSES (expr); *pc; )
13199 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
13200 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
13201 {
13202 *uc = *pc;
13203 *pc = OMP_CLAUSE_CHAIN (*pc);
13204 uc = &OMP_CLAUSE_CHAIN (*uc);
13205 }
13206 else
13207 pc = &OMP_CLAUSE_CHAIN (*pc);
13208 *uc = NULL_TREE;
13209 *pc = use_device_clauses;
13210 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
13211 OMP_CLAUSES (expr));
13212 }
13213 break;
13214 case OMP_TEAMS:
13215 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
13216 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13217 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
13218 break;
13219 default:
13220 gcc_unreachable ();
13221 }
13222
13223 gimplify_seq_add_stmt (pre_p, stmt);
13224 *expr_p = NULL_TREE;
13225 }
13226
13227 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13228 target update constructs. */
13229
13230 static void
13231 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
13232 {
13233 tree expr = *expr_p;
13234 int kind;
13235 gomp_target *stmt;
13236 enum omp_region_type ort = ORT_WORKSHARE;
13237
13238 switch (TREE_CODE (expr))
13239 {
13240 case OACC_ENTER_DATA:
13241 case OACC_EXIT_DATA:
13242 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
13243 ort = ORT_ACC;
13244 break;
13245 case OACC_UPDATE:
13246 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
13247 ort = ORT_ACC;
13248 break;
13249 case OMP_TARGET_UPDATE:
13250 kind = GF_OMP_TARGET_KIND_UPDATE;
13251 break;
13252 case OMP_TARGET_ENTER_DATA:
13253 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
13254 break;
13255 case OMP_TARGET_EXIT_DATA:
13256 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
13257 break;
13258 default:
13259 gcc_unreachable ();
13260 }
13261 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
13262 ort, TREE_CODE (expr));
13263 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
13264 TREE_CODE (expr));
13265 if (TREE_CODE (expr) == OACC_UPDATE
13266 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13267 OMP_CLAUSE_IF_PRESENT))
13268 {
13269 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13270 clause. */
13271 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13272 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13273 switch (OMP_CLAUSE_MAP_KIND (c))
13274 {
13275 case GOMP_MAP_FORCE_TO:
13276 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
13277 break;
13278 case GOMP_MAP_FORCE_FROM:
13279 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
13280 break;
13281 default:
13282 break;
13283 }
13284 }
13285 else if (TREE_CODE (expr) == OACC_EXIT_DATA
13286 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13287 OMP_CLAUSE_FINALIZE))
13288 {
13289 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13290 semantics. */
13291 bool have_clause = false;
13292 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13293 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13294 switch (OMP_CLAUSE_MAP_KIND (c))
13295 {
13296 case GOMP_MAP_FROM:
13297 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
13298 have_clause = true;
13299 break;
13300 case GOMP_MAP_RELEASE:
13301 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
13302 have_clause = true;
13303 break;
13304 case GOMP_MAP_TO_PSET:
13305 /* Fortran arrays with descriptors must map that descriptor when
13306 doing standalone "attach" operations (in OpenACC). In that
13307 case GOMP_MAP_TO_PSET appears by itself with no preceding
13308 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13309 break;
13310 case GOMP_MAP_POINTER:
13311 /* TODO PR92929: we may see these here, but they'll always follow
13312 one of the clauses above, and will be handled by libgomp as
13313 one group, so no handling required here. */
13314 gcc_assert (have_clause);
13315 break;
13316 case GOMP_MAP_DETACH:
13317 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
13318 have_clause = false;
13319 break;
13320 case GOMP_MAP_STRUCT:
13321 have_clause = false;
13322 break;
13323 default:
13324 gcc_unreachable ();
13325 }
13326 }
13327 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
13328
13329 gimplify_seq_add_stmt (pre_p, stmt);
13330 *expr_p = NULL_TREE;
13331 }
13332
13333 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13334 stabilized the lhs of the atomic operation as *ADDR. Return true if
13335 EXPR is this stabilized form. */
13336
13337 static bool
13338 goa_lhs_expr_p (tree expr, tree addr)
13339 {
13340 /* Also include casts to other type variants. The C front end is fond
13341 of adding these for e.g. volatile variables. This is like
13342 STRIP_TYPE_NOPS but includes the main variant lookup. */
13343 STRIP_USELESS_TYPE_CONVERSION (expr);
13344
13345 if (TREE_CODE (expr) == INDIRECT_REF)
13346 {
13347 expr = TREE_OPERAND (expr, 0);
13348 while (expr != addr
13349 && (CONVERT_EXPR_P (expr)
13350 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13351 && TREE_CODE (expr) == TREE_CODE (addr)
13352 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13353 {
13354 expr = TREE_OPERAND (expr, 0);
13355 addr = TREE_OPERAND (addr, 0);
13356 }
13357 if (expr == addr)
13358 return true;
13359 return (TREE_CODE (addr) == ADDR_EXPR
13360 && TREE_CODE (expr) == ADDR_EXPR
13361 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13362 }
13363 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13364 return true;
13365 return false;
13366 }
13367
13368 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13369 expression does not involve the lhs, evaluate it into a temporary.
13370 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13371 or -1 if an error was encountered. */
13372
13373 static int
13374 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13375 tree lhs_var)
13376 {
13377 tree expr = *expr_p;
13378 int saw_lhs;
13379
13380 if (goa_lhs_expr_p (expr, lhs_addr))
13381 {
13382 *expr_p = lhs_var;
13383 return 1;
13384 }
13385 if (is_gimple_val (expr))
13386 return 0;
13387
13388 saw_lhs = 0;
13389 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13390 {
13391 case tcc_binary:
13392 case tcc_comparison:
13393 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13394 lhs_var);
13395 /* FALLTHRU */
13396 case tcc_unary:
13397 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13398 lhs_var);
13399 break;
13400 case tcc_expression:
13401 switch (TREE_CODE (expr))
13402 {
13403 case TRUTH_ANDIF_EXPR:
13404 case TRUTH_ORIF_EXPR:
13405 case TRUTH_AND_EXPR:
13406 case TRUTH_OR_EXPR:
13407 case TRUTH_XOR_EXPR:
13408 case BIT_INSERT_EXPR:
13409 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13410 lhs_addr, lhs_var);
13411 /* FALLTHRU */
13412 case TRUTH_NOT_EXPR:
13413 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13414 lhs_addr, lhs_var);
13415 break;
13416 case COMPOUND_EXPR:
13417 /* Break out any preevaluations from cp_build_modify_expr. */
13418 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13419 expr = TREE_OPERAND (expr, 1))
13420 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13421 *expr_p = expr;
13422 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13423 default:
13424 break;
13425 }
13426 break;
13427 case tcc_reference:
13428 if (TREE_CODE (expr) == BIT_FIELD_REF)
13429 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13430 lhs_addr, lhs_var);
13431 break;
13432 default:
13433 break;
13434 }
13435
13436 if (saw_lhs == 0)
13437 {
13438 enum gimplify_status gs;
13439 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13440 if (gs != GS_ALL_DONE)
13441 saw_lhs = -1;
13442 }
13443
13444 return saw_lhs;
13445 }
13446
13447 /* Gimplify an OMP_ATOMIC statement. */
13448
13449 static enum gimplify_status
13450 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13451 {
13452 tree addr = TREE_OPERAND (*expr_p, 0);
13453 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13454 ? NULL : TREE_OPERAND (*expr_p, 1);
13455 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13456 tree tmp_load;
13457 gomp_atomic_load *loadstmt;
13458 gomp_atomic_store *storestmt;
13459
13460 tmp_load = create_tmp_reg (type);
13461 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13462 return GS_ERROR;
13463
13464 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13465 != GS_ALL_DONE)
13466 return GS_ERROR;
13467
13468 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13469 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13470 gimplify_seq_add_stmt (pre_p, loadstmt);
13471 if (rhs)
13472 {
13473 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13474 representatives. Use BIT_FIELD_REF on the lhs instead. */
13475 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13476 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13477 {
13478 tree bitpos = TREE_OPERAND (rhs, 2);
13479 tree op1 = TREE_OPERAND (rhs, 1);
13480 tree bitsize;
13481 tree tmp_store = tmp_load;
13482 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13483 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13484 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13485 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13486 else
13487 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13488 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13489 tree t = build2_loc (EXPR_LOCATION (rhs),
13490 MODIFY_EXPR, void_type_node,
13491 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13492 TREE_TYPE (op1), tmp_store, bitsize,
13493 bitpos), op1);
13494 gimplify_and_add (t, pre_p);
13495 rhs = tmp_store;
13496 }
13497 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13498 != GS_ALL_DONE)
13499 return GS_ERROR;
13500 }
13501
13502 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13503 rhs = tmp_load;
13504 storestmt
13505 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13506 gimplify_seq_add_stmt (pre_p, storestmt);
13507 switch (TREE_CODE (*expr_p))
13508 {
13509 case OMP_ATOMIC_READ:
13510 case OMP_ATOMIC_CAPTURE_OLD:
13511 *expr_p = tmp_load;
13512 gimple_omp_atomic_set_need_value (loadstmt);
13513 break;
13514 case OMP_ATOMIC_CAPTURE_NEW:
13515 *expr_p = rhs;
13516 gimple_omp_atomic_set_need_value (storestmt);
13517 break;
13518 default:
13519 *expr_p = NULL;
13520 break;
13521 }
13522
13523 return GS_ALL_DONE;
13524 }
13525
13526 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13527 body, and adding some EH bits. */
13528
13529 static enum gimplify_status
13530 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13531 {
13532 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13533 gimple *body_stmt;
13534 gtransaction *trans_stmt;
13535 gimple_seq body = NULL;
13536 int subcode = 0;
13537
13538 /* Wrap the transaction body in a BIND_EXPR so we have a context
13539 where to put decls for OMP. */
13540 if (TREE_CODE (tbody) != BIND_EXPR)
13541 {
13542 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13543 TREE_SIDE_EFFECTS (bind) = 1;
13544 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13545 TRANSACTION_EXPR_BODY (expr) = bind;
13546 }
13547
13548 push_gimplify_context ();
13549 temp = voidify_wrapper_expr (*expr_p, NULL);
13550
13551 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13552 pop_gimplify_context (body_stmt);
13553
13554 trans_stmt = gimple_build_transaction (body);
13555 if (TRANSACTION_EXPR_OUTER (expr))
13556 subcode = GTMA_IS_OUTER;
13557 else if (TRANSACTION_EXPR_RELAXED (expr))
13558 subcode = GTMA_IS_RELAXED;
13559 gimple_transaction_set_subcode (trans_stmt, subcode);
13560
13561 gimplify_seq_add_stmt (pre_p, trans_stmt);
13562
13563 if (temp)
13564 {
13565 *expr_p = temp;
13566 return GS_OK;
13567 }
13568
13569 *expr_p = NULL_TREE;
13570 return GS_ALL_DONE;
13571 }
13572
13573 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13574 is the OMP_BODY of the original EXPR (which has already been
13575 gimplified so it's not present in the EXPR).
13576
13577 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13578
13579 static gimple *
13580 gimplify_omp_ordered (tree expr, gimple_seq body)
13581 {
13582 tree c, decls;
13583 int failures = 0;
13584 unsigned int i;
13585 tree source_c = NULL_TREE;
13586 tree sink_c = NULL_TREE;
13587
13588 if (gimplify_omp_ctxp)
13589 {
13590 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13591 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13592 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13593 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13594 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13595 {
13596 error_at (OMP_CLAUSE_LOCATION (c),
13597 "%<ordered%> construct with %<depend%> clause must be "
13598 "closely nested inside a loop with %<ordered%> clause "
13599 "with a parameter");
13600 failures++;
13601 }
13602 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13603 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13604 {
13605 bool fail = false;
13606 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13607 decls && TREE_CODE (decls) == TREE_LIST;
13608 decls = TREE_CHAIN (decls), ++i)
13609 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13610 continue;
13611 else if (TREE_VALUE (decls)
13612 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13613 {
13614 error_at (OMP_CLAUSE_LOCATION (c),
13615 "variable %qE is not an iteration "
13616 "of outermost loop %d, expected %qE",
13617 TREE_VALUE (decls), i + 1,
13618 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13619 fail = true;
13620 failures++;
13621 }
13622 else
13623 TREE_VALUE (decls)
13624 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13625 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13626 {
13627 error_at (OMP_CLAUSE_LOCATION (c),
13628 "number of variables in %<depend%> clause with "
13629 "%<sink%> modifier does not match number of "
13630 "iteration variables");
13631 failures++;
13632 }
13633 sink_c = c;
13634 }
13635 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13636 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13637 {
13638 if (source_c)
13639 {
13640 error_at (OMP_CLAUSE_LOCATION (c),
13641 "more than one %<depend%> clause with %<source%> "
13642 "modifier on an %<ordered%> construct");
13643 failures++;
13644 }
13645 else
13646 source_c = c;
13647 }
13648 }
13649 if (source_c && sink_c)
13650 {
13651 error_at (OMP_CLAUSE_LOCATION (source_c),
13652 "%<depend%> clause with %<source%> modifier specified "
13653 "together with %<depend%> clauses with %<sink%> modifier "
13654 "on the same construct");
13655 failures++;
13656 }
13657
13658 if (failures)
13659 return gimple_build_nop ();
13660 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13661 }
13662
13663 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13664 expression produces a value to be used as an operand inside a GIMPLE
13665 statement, the value will be stored back in *EXPR_P. This value will
13666 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13667 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13668 emitted in PRE_P and POST_P.
13669
13670 Additionally, this process may overwrite parts of the input
13671 expression during gimplification. Ideally, it should be
13672 possible to do non-destructive gimplification.
13673
13674 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13675 the expression needs to evaluate to a value to be used as
13676 an operand in a GIMPLE statement, this value will be stored in
13677 *EXPR_P on exit. This happens when the caller specifies one
13678 of fb_lvalue or fb_rvalue fallback flags.
13679
13680 PRE_P will contain the sequence of GIMPLE statements corresponding
13681 to the evaluation of EXPR and all the side-effects that must
13682 be executed before the main expression. On exit, the last
13683 statement of PRE_P is the core statement being gimplified. For
13684 instance, when gimplifying 'if (++a)' the last statement in
13685 PRE_P will be 'if (t.1)' where t.1 is the result of
13686 pre-incrementing 'a'.
13687
13688 POST_P will contain the sequence of GIMPLE statements corresponding
13689 to the evaluation of all the side-effects that must be executed
13690 after the main expression. If this is NULL, the post
13691 side-effects are stored at the end of PRE_P.
13692
13693 The reason why the output is split in two is to handle post
13694 side-effects explicitly. In some cases, an expression may have
13695 inner and outer post side-effects which need to be emitted in
13696 an order different from the one given by the recursive
13697 traversal. For instance, for the expression (*p--)++ the post
13698 side-effects of '--' must actually occur *after* the post
13699 side-effects of '++'. However, gimplification will first visit
13700 the inner expression, so if a separate POST sequence was not
13701 used, the resulting sequence would be:
13702
13703 1 t.1 = *p
13704 2 p = p - 1
13705 3 t.2 = t.1 + 1
13706 4 *p = t.2
13707
13708 However, the post-decrement operation in line #2 must not be
13709 evaluated until after the store to *p at line #4, so the
13710 correct sequence should be:
13711
13712 1 t.1 = *p
13713 2 t.2 = t.1 + 1
13714 3 *p = t.2
13715 4 p = p - 1
13716
13717 So, by specifying a separate post queue, it is possible
13718 to emit the post side-effects in the correct order.
13719 If POST_P is NULL, an internal queue will be used. Before
13720 returning to the caller, the sequence POST_P is appended to
13721 the main output sequence PRE_P.
13722
13723 GIMPLE_TEST_F points to a function that takes a tree T and
13724 returns nonzero if T is in the GIMPLE form requested by the
13725 caller. The GIMPLE predicates are in gimple.c.
13726
13727 FALLBACK tells the function what sort of a temporary we want if
13728 gimplification cannot produce an expression that complies with
13729 GIMPLE_TEST_F.
13730
13731 fb_none means that no temporary should be generated
13732 fb_rvalue means that an rvalue is OK to generate
13733 fb_lvalue means that an lvalue is OK to generate
13734 fb_either means that either is OK, but an lvalue is preferable.
13735 fb_mayfail means that gimplification may fail (in which case
13736 GS_ERROR will be returned)
13737
13738 The return value is either GS_ERROR or GS_ALL_DONE, since this
13739 function iterates until EXPR is completely gimplified or an error
13740 occurs. */
13741
13742 enum gimplify_status
13743 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13744 bool (*gimple_test_f) (tree), fallback_t fallback)
13745 {
13746 tree tmp;
13747 gimple_seq internal_pre = NULL;
13748 gimple_seq internal_post = NULL;
13749 tree save_expr;
13750 bool is_statement;
13751 location_t saved_location;
13752 enum gimplify_status ret;
13753 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13754 tree label;
13755
13756 save_expr = *expr_p;
13757 if (save_expr == NULL_TREE)
13758 return GS_ALL_DONE;
13759
13760 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13761 is_statement = gimple_test_f == is_gimple_stmt;
13762 if (is_statement)
13763 gcc_assert (pre_p);
13764
13765 /* Consistency checks. */
13766 if (gimple_test_f == is_gimple_reg)
13767 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13768 else if (gimple_test_f == is_gimple_val
13769 || gimple_test_f == is_gimple_call_addr
13770 || gimple_test_f == is_gimple_condexpr
13771 || gimple_test_f == is_gimple_condexpr_for_cond
13772 || gimple_test_f == is_gimple_mem_rhs
13773 || gimple_test_f == is_gimple_mem_rhs_or_call
13774 || gimple_test_f == is_gimple_reg_rhs
13775 || gimple_test_f == is_gimple_reg_rhs_or_call
13776 || gimple_test_f == is_gimple_asm_val
13777 || gimple_test_f == is_gimple_mem_ref_addr)
13778 gcc_assert (fallback & fb_rvalue);
13779 else if (gimple_test_f == is_gimple_min_lval
13780 || gimple_test_f == is_gimple_lvalue)
13781 gcc_assert (fallback & fb_lvalue);
13782 else if (gimple_test_f == is_gimple_addressable)
13783 gcc_assert (fallback & fb_either);
13784 else if (gimple_test_f == is_gimple_stmt)
13785 gcc_assert (fallback == fb_none);
13786 else
13787 {
13788 /* We should have recognized the GIMPLE_TEST_F predicate to
13789 know what kind of fallback to use in case a temporary is
13790 needed to hold the value or address of *EXPR_P. */
13791 gcc_unreachable ();
13792 }
13793
13794 /* We used to check the predicate here and return immediately if it
13795 succeeds. This is wrong; the design is for gimplification to be
13796 idempotent, and for the predicates to only test for valid forms, not
13797 whether they are fully simplified. */
13798 if (pre_p == NULL)
13799 pre_p = &internal_pre;
13800
13801 if (post_p == NULL)
13802 post_p = &internal_post;
13803
13804 /* Remember the last statements added to PRE_P and POST_P. Every
13805 new statement added by the gimplification helpers needs to be
13806 annotated with location information. To centralize the
13807 responsibility, we remember the last statement that had been
13808 added to both queues before gimplifying *EXPR_P. If
13809 gimplification produces new statements in PRE_P and POST_P, those
13810 statements will be annotated with the same location information
13811 as *EXPR_P. */
13812 pre_last_gsi = gsi_last (*pre_p);
13813 post_last_gsi = gsi_last (*post_p);
13814
13815 saved_location = input_location;
13816 if (save_expr != error_mark_node
13817 && EXPR_HAS_LOCATION (*expr_p))
13818 input_location = EXPR_LOCATION (*expr_p);
13819
13820 /* Loop over the specific gimplifiers until the toplevel node
13821 remains the same. */
13822 do
13823 {
13824 /* Strip away as many useless type conversions as possible
13825 at the toplevel. */
13826 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13827
13828 /* Remember the expr. */
13829 save_expr = *expr_p;
13830
13831 /* Die, die, die, my darling. */
13832 if (error_operand_p (save_expr))
13833 {
13834 ret = GS_ERROR;
13835 break;
13836 }
13837
13838 /* Do any language-specific gimplification. */
13839 ret = ((enum gimplify_status)
13840 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13841 if (ret == GS_OK)
13842 {
13843 if (*expr_p == NULL_TREE)
13844 break;
13845 if (*expr_p != save_expr)
13846 continue;
13847 }
13848 else if (ret != GS_UNHANDLED)
13849 break;
13850
13851 /* Make sure that all the cases set 'ret' appropriately. */
13852 ret = GS_UNHANDLED;
13853 switch (TREE_CODE (*expr_p))
13854 {
13855 /* First deal with the special cases. */
13856
13857 case POSTINCREMENT_EXPR:
13858 case POSTDECREMENT_EXPR:
13859 case PREINCREMENT_EXPR:
13860 case PREDECREMENT_EXPR:
13861 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13862 fallback != fb_none,
13863 TREE_TYPE (*expr_p));
13864 break;
13865
13866 case VIEW_CONVERT_EXPR:
13867 if ((fallback & fb_rvalue)
13868 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13869 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13870 {
13871 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13872 post_p, is_gimple_val, fb_rvalue);
13873 recalculate_side_effects (*expr_p);
13874 break;
13875 }
13876 /* Fallthru. */
13877
13878 case ARRAY_REF:
13879 case ARRAY_RANGE_REF:
13880 case REALPART_EXPR:
13881 case IMAGPART_EXPR:
13882 case COMPONENT_REF:
13883 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13884 fallback ? fallback : fb_rvalue);
13885 break;
13886
13887 case COND_EXPR:
13888 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13889
13890 /* C99 code may assign to an array in a structure value of a
13891 conditional expression, and this has undefined behavior
13892 only on execution, so create a temporary if an lvalue is
13893 required. */
13894 if (fallback == fb_lvalue)
13895 {
13896 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13897 mark_addressable (*expr_p);
13898 ret = GS_OK;
13899 }
13900 break;
13901
13902 case CALL_EXPR:
13903 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13904
13905 /* C99 code may assign to an array in a structure returned
13906 from a function, and this has undefined behavior only on
13907 execution, so create a temporary if an lvalue is
13908 required. */
13909 if (fallback == fb_lvalue)
13910 {
13911 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13912 mark_addressable (*expr_p);
13913 ret = GS_OK;
13914 }
13915 break;
13916
13917 case TREE_LIST:
13918 gcc_unreachable ();
13919
13920 case COMPOUND_EXPR:
13921 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13922 break;
13923
13924 case COMPOUND_LITERAL_EXPR:
13925 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13926 gimple_test_f, fallback);
13927 break;
13928
13929 case MODIFY_EXPR:
13930 case INIT_EXPR:
13931 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13932 fallback != fb_none);
13933 break;
13934
13935 case TRUTH_ANDIF_EXPR:
13936 case TRUTH_ORIF_EXPR:
13937 {
13938 /* Preserve the original type of the expression and the
13939 source location of the outer expression. */
13940 tree org_type = TREE_TYPE (*expr_p);
13941 *expr_p = gimple_boolify (*expr_p);
13942 *expr_p = build3_loc (input_location, COND_EXPR,
13943 org_type, *expr_p,
13944 fold_convert_loc
13945 (input_location,
13946 org_type, boolean_true_node),
13947 fold_convert_loc
13948 (input_location,
13949 org_type, boolean_false_node));
13950 ret = GS_OK;
13951 break;
13952 }
13953
13954 case TRUTH_NOT_EXPR:
13955 {
13956 tree type = TREE_TYPE (*expr_p);
13957 /* The parsers are careful to generate TRUTH_NOT_EXPR
13958 only with operands that are always zero or one.
13959 We do not fold here but handle the only interesting case
13960 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13961 *expr_p = gimple_boolify (*expr_p);
13962 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13963 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13964 TREE_TYPE (*expr_p),
13965 TREE_OPERAND (*expr_p, 0));
13966 else
13967 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13968 TREE_TYPE (*expr_p),
13969 TREE_OPERAND (*expr_p, 0),
13970 build_int_cst (TREE_TYPE (*expr_p), 1));
13971 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13972 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13973 ret = GS_OK;
13974 break;
13975 }
13976
13977 case ADDR_EXPR:
13978 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13979 break;
13980
13981 case ANNOTATE_EXPR:
13982 {
13983 tree cond = TREE_OPERAND (*expr_p, 0);
13984 tree kind = TREE_OPERAND (*expr_p, 1);
13985 tree data = TREE_OPERAND (*expr_p, 2);
13986 tree type = TREE_TYPE (cond);
13987 if (!INTEGRAL_TYPE_P (type))
13988 {
13989 *expr_p = cond;
13990 ret = GS_OK;
13991 break;
13992 }
13993 tree tmp = create_tmp_var (type);
13994 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13995 gcall *call
13996 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13997 gimple_call_set_lhs (call, tmp);
13998 gimplify_seq_add_stmt (pre_p, call);
13999 *expr_p = tmp;
14000 ret = GS_ALL_DONE;
14001 break;
14002 }
14003
14004 case VA_ARG_EXPR:
14005 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
14006 break;
14007
14008 CASE_CONVERT:
14009 if (IS_EMPTY_STMT (*expr_p))
14010 {
14011 ret = GS_ALL_DONE;
14012 break;
14013 }
14014
14015 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
14016 || fallback == fb_none)
14017 {
14018 /* Just strip a conversion to void (or in void context) and
14019 try again. */
14020 *expr_p = TREE_OPERAND (*expr_p, 0);
14021 ret = GS_OK;
14022 break;
14023 }
14024
14025 ret = gimplify_conversion (expr_p);
14026 if (ret == GS_ERROR)
14027 break;
14028 if (*expr_p != save_expr)
14029 break;
14030 /* FALLTHRU */
14031
14032 case FIX_TRUNC_EXPR:
14033 /* unary_expr: ... | '(' cast ')' val | ... */
14034 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14035 is_gimple_val, fb_rvalue);
14036 recalculate_side_effects (*expr_p);
14037 break;
14038
14039 case INDIRECT_REF:
14040 {
14041 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
14042 bool notrap = TREE_THIS_NOTRAP (*expr_p);
14043 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
14044
14045 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
14046 if (*expr_p != save_expr)
14047 {
14048 ret = GS_OK;
14049 break;
14050 }
14051
14052 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14053 is_gimple_reg, fb_rvalue);
14054 if (ret == GS_ERROR)
14055 break;
14056
14057 recalculate_side_effects (*expr_p);
14058 *expr_p = fold_build2_loc (input_location, MEM_REF,
14059 TREE_TYPE (*expr_p),
14060 TREE_OPERAND (*expr_p, 0),
14061 build_int_cst (saved_ptr_type, 0));
14062 TREE_THIS_VOLATILE (*expr_p) = volatilep;
14063 TREE_THIS_NOTRAP (*expr_p) = notrap;
14064 ret = GS_OK;
14065 break;
14066 }
14067
14068 /* We arrive here through the various re-gimplifcation paths. */
14069 case MEM_REF:
14070 /* First try re-folding the whole thing. */
14071 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
14072 TREE_OPERAND (*expr_p, 0),
14073 TREE_OPERAND (*expr_p, 1));
14074 if (tmp)
14075 {
14076 REF_REVERSE_STORAGE_ORDER (tmp)
14077 = REF_REVERSE_STORAGE_ORDER (*expr_p);
14078 *expr_p = tmp;
14079 recalculate_side_effects (*expr_p);
14080 ret = GS_OK;
14081 break;
14082 }
14083 /* Avoid re-gimplifying the address operand if it is already
14084 in suitable form. Re-gimplifying would mark the address
14085 operand addressable. Always gimplify when not in SSA form
14086 as we still may have to gimplify decls with value-exprs. */
14087 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
14088 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
14089 {
14090 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14091 is_gimple_mem_ref_addr, fb_rvalue);
14092 if (ret == GS_ERROR)
14093 break;
14094 }
14095 recalculate_side_effects (*expr_p);
14096 ret = GS_ALL_DONE;
14097 break;
14098
14099 /* Constants need not be gimplified. */
14100 case INTEGER_CST:
14101 case REAL_CST:
14102 case FIXED_CST:
14103 case STRING_CST:
14104 case COMPLEX_CST:
14105 case VECTOR_CST:
14106 /* Drop the overflow flag on constants, we do not want
14107 that in the GIMPLE IL. */
14108 if (TREE_OVERFLOW_P (*expr_p))
14109 *expr_p = drop_tree_overflow (*expr_p);
14110 ret = GS_ALL_DONE;
14111 break;
14112
14113 case CONST_DECL:
14114 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14115 CONST_DECL node. Otherwise the decl is replaceable by its
14116 value. */
14117 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14118 if (fallback & fb_lvalue)
14119 ret = GS_ALL_DONE;
14120 else
14121 {
14122 *expr_p = DECL_INITIAL (*expr_p);
14123 ret = GS_OK;
14124 }
14125 break;
14126
14127 case DECL_EXPR:
14128 ret = gimplify_decl_expr (expr_p, pre_p);
14129 break;
14130
14131 case BIND_EXPR:
14132 ret = gimplify_bind_expr (expr_p, pre_p);
14133 break;
14134
14135 case LOOP_EXPR:
14136 ret = gimplify_loop_expr (expr_p, pre_p);
14137 break;
14138
14139 case SWITCH_EXPR:
14140 ret = gimplify_switch_expr (expr_p, pre_p);
14141 break;
14142
14143 case EXIT_EXPR:
14144 ret = gimplify_exit_expr (expr_p);
14145 break;
14146
14147 case GOTO_EXPR:
14148 /* If the target is not LABEL, then it is a computed jump
14149 and the target needs to be gimplified. */
14150 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
14151 {
14152 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
14153 NULL, is_gimple_val, fb_rvalue);
14154 if (ret == GS_ERROR)
14155 break;
14156 }
14157 gimplify_seq_add_stmt (pre_p,
14158 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
14159 ret = GS_ALL_DONE;
14160 break;
14161
14162 case PREDICT_EXPR:
14163 gimplify_seq_add_stmt (pre_p,
14164 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
14165 PREDICT_EXPR_OUTCOME (*expr_p)));
14166 ret = GS_ALL_DONE;
14167 break;
14168
14169 case LABEL_EXPR:
14170 ret = gimplify_label_expr (expr_p, pre_p);
14171 label = LABEL_EXPR_LABEL (*expr_p);
14172 gcc_assert (decl_function_context (label) == current_function_decl);
14173
14174 /* If the label is used in a goto statement, or address of the label
14175 is taken, we need to unpoison all variables that were seen so far.
14176 Doing so would prevent us from reporting a false positives. */
14177 if (asan_poisoned_variables
14178 && asan_used_labels != NULL
14179 && asan_used_labels->contains (label))
14180 asan_poison_variables (asan_poisoned_variables, false, pre_p);
14181 break;
14182
14183 case CASE_LABEL_EXPR:
14184 ret = gimplify_case_label_expr (expr_p, pre_p);
14185
14186 if (gimplify_ctxp->live_switch_vars)
14187 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
14188 pre_p);
14189 break;
14190
14191 case RETURN_EXPR:
14192 ret = gimplify_return_expr (*expr_p, pre_p);
14193 break;
14194
14195 case CONSTRUCTOR:
14196 /* Don't reduce this in place; let gimplify_init_constructor work its
14197 magic. Buf if we're just elaborating this for side effects, just
14198 gimplify any element that has side-effects. */
14199 if (fallback == fb_none)
14200 {
14201 unsigned HOST_WIDE_INT ix;
14202 tree val;
14203 tree temp = NULL_TREE;
14204 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
14205 if (TREE_SIDE_EFFECTS (val))
14206 append_to_statement_list (val, &temp);
14207
14208 *expr_p = temp;
14209 ret = temp ? GS_OK : GS_ALL_DONE;
14210 }
14211 /* C99 code may assign to an array in a constructed
14212 structure or union, and this has undefined behavior only
14213 on execution, so create a temporary if an lvalue is
14214 required. */
14215 else if (fallback == fb_lvalue)
14216 {
14217 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14218 mark_addressable (*expr_p);
14219 ret = GS_OK;
14220 }
14221 else
14222 ret = GS_ALL_DONE;
14223 break;
14224
14225 /* The following are special cases that are not handled by the
14226 original GIMPLE grammar. */
14227
14228 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14229 eliminated. */
14230 case SAVE_EXPR:
14231 ret = gimplify_save_expr (expr_p, pre_p, post_p);
14232 break;
14233
14234 case BIT_FIELD_REF:
14235 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14236 post_p, is_gimple_lvalue, fb_either);
14237 recalculate_side_effects (*expr_p);
14238 break;
14239
14240 case TARGET_MEM_REF:
14241 {
14242 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
14243
14244 if (TMR_BASE (*expr_p))
14245 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
14246 post_p, is_gimple_mem_ref_addr, fb_either);
14247 if (TMR_INDEX (*expr_p))
14248 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
14249 post_p, is_gimple_val, fb_rvalue);
14250 if (TMR_INDEX2 (*expr_p))
14251 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
14252 post_p, is_gimple_val, fb_rvalue);
14253 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14254 ret = MIN (r0, r1);
14255 }
14256 break;
14257
14258 case NON_LVALUE_EXPR:
14259 /* This should have been stripped above. */
14260 gcc_unreachable ();
14261
14262 case ASM_EXPR:
14263 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
14264 break;
14265
14266 case TRY_FINALLY_EXPR:
14267 case TRY_CATCH_EXPR:
14268 {
14269 gimple_seq eval, cleanup;
14270 gtry *try_;
14271
14272 /* Calls to destructors are generated automatically in FINALLY/CATCH
14273 block. They should have location as UNKNOWN_LOCATION. However,
14274 gimplify_call_expr will reset these call stmts to input_location
14275 if it finds stmt's location is unknown. To prevent resetting for
14276 destructors, we set the input_location to unknown.
14277 Note that this only affects the destructor calls in FINALLY/CATCH
14278 block, and will automatically reset to its original value by the
14279 end of gimplify_expr. */
14280 input_location = UNKNOWN_LOCATION;
14281 eval = cleanup = NULL;
14282 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
14283 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14284 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
14285 {
14286 gimple_seq n = NULL, e = NULL;
14287 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14288 0), &n);
14289 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14290 1), &e);
14291 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
14292 {
14293 geh_else *stmt = gimple_build_eh_else (n, e);
14294 gimple_seq_add_stmt (&cleanup, stmt);
14295 }
14296 }
14297 else
14298 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
14299 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14300 if (gimple_seq_empty_p (cleanup))
14301 {
14302 gimple_seq_add_seq (pre_p, eval);
14303 ret = GS_ALL_DONE;
14304 break;
14305 }
14306 try_ = gimple_build_try (eval, cleanup,
14307 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14308 ? GIMPLE_TRY_FINALLY
14309 : GIMPLE_TRY_CATCH);
14310 if (EXPR_HAS_LOCATION (save_expr))
14311 gimple_set_location (try_, EXPR_LOCATION (save_expr));
14312 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
14313 gimple_set_location (try_, saved_location);
14314 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
14315 gimple_try_set_catch_is_cleanup (try_,
14316 TRY_CATCH_IS_CLEANUP (*expr_p));
14317 gimplify_seq_add_stmt (pre_p, try_);
14318 ret = GS_ALL_DONE;
14319 break;
14320 }
14321
14322 case CLEANUP_POINT_EXPR:
14323 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
14324 break;
14325
14326 case TARGET_EXPR:
14327 ret = gimplify_target_expr (expr_p, pre_p, post_p);
14328 break;
14329
14330 case CATCH_EXPR:
14331 {
14332 gimple *c;
14333 gimple_seq handler = NULL;
14334 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
14335 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
14336 gimplify_seq_add_stmt (pre_p, c);
14337 ret = GS_ALL_DONE;
14338 break;
14339 }
14340
14341 case EH_FILTER_EXPR:
14342 {
14343 gimple *ehf;
14344 gimple_seq failure = NULL;
14345
14346 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14347 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
14348 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
14349 gimplify_seq_add_stmt (pre_p, ehf);
14350 ret = GS_ALL_DONE;
14351 break;
14352 }
14353
14354 case OBJ_TYPE_REF:
14355 {
14356 enum gimplify_status r0, r1;
14357 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14358 post_p, is_gimple_val, fb_rvalue);
14359 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14360 post_p, is_gimple_val, fb_rvalue);
14361 TREE_SIDE_EFFECTS (*expr_p) = 0;
14362 ret = MIN (r0, r1);
14363 }
14364 break;
14365
14366 case LABEL_DECL:
14367 /* We get here when taking the address of a label. We mark
14368 the label as "forced"; meaning it can never be removed and
14369 it is a potential target for any computed goto. */
14370 FORCED_LABEL (*expr_p) = 1;
14371 ret = GS_ALL_DONE;
14372 break;
14373
14374 case STATEMENT_LIST:
14375 ret = gimplify_statement_list (expr_p, pre_p);
14376 break;
14377
14378 case WITH_SIZE_EXPR:
14379 {
14380 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14381 post_p == &internal_post ? NULL : post_p,
14382 gimple_test_f, fallback);
14383 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14384 is_gimple_val, fb_rvalue);
14385 ret = GS_ALL_DONE;
14386 }
14387 break;
14388
14389 case VAR_DECL:
14390 case PARM_DECL:
14391 ret = gimplify_var_or_parm_decl (expr_p);
14392 break;
14393
14394 case RESULT_DECL:
14395 /* When within an OMP context, notice uses of variables. */
14396 if (gimplify_omp_ctxp)
14397 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14398 ret = GS_ALL_DONE;
14399 break;
14400
14401 case DEBUG_EXPR_DECL:
14402 gcc_unreachable ();
14403
14404 case DEBUG_BEGIN_STMT:
14405 gimplify_seq_add_stmt (pre_p,
14406 gimple_build_debug_begin_stmt
14407 (TREE_BLOCK (*expr_p),
14408 EXPR_LOCATION (*expr_p)));
14409 ret = GS_ALL_DONE;
14410 *expr_p = NULL;
14411 break;
14412
14413 case SSA_NAME:
14414 /* Allow callbacks into the gimplifier during optimization. */
14415 ret = GS_ALL_DONE;
14416 break;
14417
14418 case OMP_PARALLEL:
14419 gimplify_omp_parallel (expr_p, pre_p);
14420 ret = GS_ALL_DONE;
14421 break;
14422
14423 case OMP_TASK:
14424 gimplify_omp_task (expr_p, pre_p);
14425 ret = GS_ALL_DONE;
14426 break;
14427
14428 case OMP_FOR:
14429 case OMP_SIMD:
14430 case OMP_DISTRIBUTE:
14431 case OMP_TASKLOOP:
14432 case OACC_LOOP:
14433 ret = gimplify_omp_for (expr_p, pre_p);
14434 break;
14435
14436 case OMP_LOOP:
14437 ret = gimplify_omp_loop (expr_p, pre_p);
14438 break;
14439
14440 case OACC_CACHE:
14441 gimplify_oacc_cache (expr_p, pre_p);
14442 ret = GS_ALL_DONE;
14443 break;
14444
14445 case OACC_DECLARE:
14446 gimplify_oacc_declare (expr_p, pre_p);
14447 ret = GS_ALL_DONE;
14448 break;
14449
14450 case OACC_HOST_DATA:
14451 case OACC_DATA:
14452 case OACC_KERNELS:
14453 case OACC_PARALLEL:
14454 case OACC_SERIAL:
14455 case OMP_SECTIONS:
14456 case OMP_SINGLE:
14457 case OMP_TARGET:
14458 case OMP_TARGET_DATA:
14459 case OMP_TEAMS:
14460 gimplify_omp_workshare (expr_p, pre_p);
14461 ret = GS_ALL_DONE;
14462 break;
14463
14464 case OACC_ENTER_DATA:
14465 case OACC_EXIT_DATA:
14466 case OACC_UPDATE:
14467 case OMP_TARGET_UPDATE:
14468 case OMP_TARGET_ENTER_DATA:
14469 case OMP_TARGET_EXIT_DATA:
14470 gimplify_omp_target_update (expr_p, pre_p);
14471 ret = GS_ALL_DONE;
14472 break;
14473
14474 case OMP_SECTION:
14475 case OMP_MASTER:
14476 case OMP_ORDERED:
14477 case OMP_CRITICAL:
14478 case OMP_SCAN:
14479 {
14480 gimple_seq body = NULL;
14481 gimple *g;
14482 bool saved_in_omp_construct = in_omp_construct;
14483
14484 in_omp_construct = true;
14485 gimplify_and_add (OMP_BODY (*expr_p), &body);
14486 in_omp_construct = saved_in_omp_construct;
14487 switch (TREE_CODE (*expr_p))
14488 {
14489 case OMP_SECTION:
14490 g = gimple_build_omp_section (body);
14491 break;
14492 case OMP_MASTER:
14493 g = gimple_build_omp_master (body);
14494 break;
14495 case OMP_ORDERED:
14496 g = gimplify_omp_ordered (*expr_p, body);
14497 break;
14498 case OMP_CRITICAL:
14499 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14500 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14501 gimplify_adjust_omp_clauses (pre_p, body,
14502 &OMP_CRITICAL_CLAUSES (*expr_p),
14503 OMP_CRITICAL);
14504 g = gimple_build_omp_critical (body,
14505 OMP_CRITICAL_NAME (*expr_p),
14506 OMP_CRITICAL_CLAUSES (*expr_p));
14507 break;
14508 case OMP_SCAN:
14509 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14510 pre_p, ORT_WORKSHARE, OMP_SCAN);
14511 gimplify_adjust_omp_clauses (pre_p, body,
14512 &OMP_SCAN_CLAUSES (*expr_p),
14513 OMP_SCAN);
14514 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14515 break;
14516 default:
14517 gcc_unreachable ();
14518 }
14519 gimplify_seq_add_stmt (pre_p, g);
14520 ret = GS_ALL_DONE;
14521 break;
14522 }
14523
14524 case OMP_TASKGROUP:
14525 {
14526 gimple_seq body = NULL;
14527
14528 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14529 bool saved_in_omp_construct = in_omp_construct;
14530 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14531 OMP_TASKGROUP);
14532 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14533
14534 in_omp_construct = true;
14535 gimplify_and_add (OMP_BODY (*expr_p), &body);
14536 in_omp_construct = saved_in_omp_construct;
14537 gimple_seq cleanup = NULL;
14538 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14539 gimple *g = gimple_build_call (fn, 0);
14540 gimple_seq_add_stmt (&cleanup, g);
14541 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14542 body = NULL;
14543 gimple_seq_add_stmt (&body, g);
14544 g = gimple_build_omp_taskgroup (body, *pclauses);
14545 gimplify_seq_add_stmt (pre_p, g);
14546 ret = GS_ALL_DONE;
14547 break;
14548 }
14549
14550 case OMP_ATOMIC:
14551 case OMP_ATOMIC_READ:
14552 case OMP_ATOMIC_CAPTURE_OLD:
14553 case OMP_ATOMIC_CAPTURE_NEW:
14554 ret = gimplify_omp_atomic (expr_p, pre_p);
14555 break;
14556
14557 case TRANSACTION_EXPR:
14558 ret = gimplify_transaction (expr_p, pre_p);
14559 break;
14560
14561 case TRUTH_AND_EXPR:
14562 case TRUTH_OR_EXPR:
14563 case TRUTH_XOR_EXPR:
14564 {
14565 tree orig_type = TREE_TYPE (*expr_p);
14566 tree new_type, xop0, xop1;
14567 *expr_p = gimple_boolify (*expr_p);
14568 new_type = TREE_TYPE (*expr_p);
14569 if (!useless_type_conversion_p (orig_type, new_type))
14570 {
14571 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14572 ret = GS_OK;
14573 break;
14574 }
14575
14576 /* Boolified binary truth expressions are semantically equivalent
14577 to bitwise binary expressions. Canonicalize them to the
14578 bitwise variant. */
14579 switch (TREE_CODE (*expr_p))
14580 {
14581 case TRUTH_AND_EXPR:
14582 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14583 break;
14584 case TRUTH_OR_EXPR:
14585 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14586 break;
14587 case TRUTH_XOR_EXPR:
14588 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14589 break;
14590 default:
14591 break;
14592 }
14593 /* Now make sure that operands have compatible type to
14594 expression's new_type. */
14595 xop0 = TREE_OPERAND (*expr_p, 0);
14596 xop1 = TREE_OPERAND (*expr_p, 1);
14597 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14598 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14599 new_type,
14600 xop0);
14601 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14602 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14603 new_type,
14604 xop1);
14605 /* Continue classified as tcc_binary. */
14606 goto expr_2;
14607 }
14608
14609 case VEC_COND_EXPR:
14610 goto expr_3;
14611
14612 case VEC_PERM_EXPR:
14613 /* Classified as tcc_expression. */
14614 goto expr_3;
14615
14616 case BIT_INSERT_EXPR:
14617 /* Argument 3 is a constant. */
14618 goto expr_2;
14619
14620 case POINTER_PLUS_EXPR:
14621 {
14622 enum gimplify_status r0, r1;
14623 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14624 post_p, is_gimple_val, fb_rvalue);
14625 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14626 post_p, is_gimple_val, fb_rvalue);
14627 recalculate_side_effects (*expr_p);
14628 ret = MIN (r0, r1);
14629 break;
14630 }
14631
14632 default:
14633 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14634 {
14635 case tcc_comparison:
14636 /* Handle comparison of objects of non scalar mode aggregates
14637 with a call to memcmp. It would be nice to only have to do
14638 this for variable-sized objects, but then we'd have to allow
14639 the same nest of reference nodes we allow for MODIFY_EXPR and
14640 that's too complex.
14641
14642 Compare scalar mode aggregates as scalar mode values. Using
14643 memcmp for them would be very inefficient at best, and is
14644 plain wrong if bitfields are involved. */
14645 {
14646 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14647
14648 /* Vector comparisons need no boolification. */
14649 if (TREE_CODE (type) == VECTOR_TYPE)
14650 goto expr_2;
14651 else if (!AGGREGATE_TYPE_P (type))
14652 {
14653 tree org_type = TREE_TYPE (*expr_p);
14654 *expr_p = gimple_boolify (*expr_p);
14655 if (!useless_type_conversion_p (org_type,
14656 TREE_TYPE (*expr_p)))
14657 {
14658 *expr_p = fold_convert_loc (input_location,
14659 org_type, *expr_p);
14660 ret = GS_OK;
14661 }
14662 else
14663 goto expr_2;
14664 }
14665 else if (TYPE_MODE (type) != BLKmode)
14666 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14667 else
14668 ret = gimplify_variable_sized_compare (expr_p);
14669
14670 break;
14671 }
14672
14673 /* If *EXPR_P does not need to be special-cased, handle it
14674 according to its class. */
14675 case tcc_unary:
14676 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14677 post_p, is_gimple_val, fb_rvalue);
14678 break;
14679
14680 case tcc_binary:
14681 expr_2:
14682 {
14683 enum gimplify_status r0, r1;
14684
14685 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14686 post_p, is_gimple_val, fb_rvalue);
14687 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14688 post_p, is_gimple_val, fb_rvalue);
14689
14690 ret = MIN (r0, r1);
14691 break;
14692 }
14693
14694 expr_3:
14695 {
14696 enum gimplify_status r0, r1, r2;
14697
14698 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14699 post_p, is_gimple_val, fb_rvalue);
14700 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14701 post_p, is_gimple_val, fb_rvalue);
14702 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14703 post_p, is_gimple_val, fb_rvalue);
14704
14705 ret = MIN (MIN (r0, r1), r2);
14706 break;
14707 }
14708
14709 case tcc_declaration:
14710 case tcc_constant:
14711 ret = GS_ALL_DONE;
14712 goto dont_recalculate;
14713
14714 default:
14715 gcc_unreachable ();
14716 }
14717
14718 recalculate_side_effects (*expr_p);
14719
14720 dont_recalculate:
14721 break;
14722 }
14723
14724 gcc_assert (*expr_p || ret != GS_OK);
14725 }
14726 while (ret == GS_OK);
14727
14728 /* If we encountered an error_mark somewhere nested inside, either
14729 stub out the statement or propagate the error back out. */
14730 if (ret == GS_ERROR)
14731 {
14732 if (is_statement)
14733 *expr_p = NULL;
14734 goto out;
14735 }
14736
14737 /* This was only valid as a return value from the langhook, which
14738 we handled. Make sure it doesn't escape from any other context. */
14739 gcc_assert (ret != GS_UNHANDLED);
14740
14741 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14742 {
14743 /* We aren't looking for a value, and we don't have a valid
14744 statement. If it doesn't have side-effects, throw it away.
14745 We can also get here with code such as "*&&L;", where L is
14746 a LABEL_DECL that is marked as FORCED_LABEL. */
14747 if (TREE_CODE (*expr_p) == LABEL_DECL
14748 || !TREE_SIDE_EFFECTS (*expr_p))
14749 *expr_p = NULL;
14750 else if (!TREE_THIS_VOLATILE (*expr_p))
14751 {
14752 /* This is probably a _REF that contains something nested that
14753 has side effects. Recurse through the operands to find it. */
14754 enum tree_code code = TREE_CODE (*expr_p);
14755
14756 switch (code)
14757 {
14758 case COMPONENT_REF:
14759 case REALPART_EXPR:
14760 case IMAGPART_EXPR:
14761 case VIEW_CONVERT_EXPR:
14762 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14763 gimple_test_f, fallback);
14764 break;
14765
14766 case ARRAY_REF:
14767 case ARRAY_RANGE_REF:
14768 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14769 gimple_test_f, fallback);
14770 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14771 gimple_test_f, fallback);
14772 break;
14773
14774 default:
14775 /* Anything else with side-effects must be converted to
14776 a valid statement before we get here. */
14777 gcc_unreachable ();
14778 }
14779
14780 *expr_p = NULL;
14781 }
14782 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14783 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14784 {
14785 /* Historically, the compiler has treated a bare reference
14786 to a non-BLKmode volatile lvalue as forcing a load. */
14787 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14788
14789 /* Normally, we do not want to create a temporary for a
14790 TREE_ADDRESSABLE type because such a type should not be
14791 copied by bitwise-assignment. However, we make an
14792 exception here, as all we are doing here is ensuring that
14793 we read the bytes that make up the type. We use
14794 create_tmp_var_raw because create_tmp_var will abort when
14795 given a TREE_ADDRESSABLE type. */
14796 tree tmp = create_tmp_var_raw (type, "vol");
14797 gimple_add_tmp_var (tmp);
14798 gimplify_assign (tmp, *expr_p, pre_p);
14799 *expr_p = NULL;
14800 }
14801 else
14802 /* We can't do anything useful with a volatile reference to
14803 an incomplete type, so just throw it away. Likewise for
14804 a BLKmode type, since any implicit inner load should
14805 already have been turned into an explicit one by the
14806 gimplification process. */
14807 *expr_p = NULL;
14808 }
14809
14810 /* If we are gimplifying at the statement level, we're done. Tack
14811 everything together and return. */
14812 if (fallback == fb_none || is_statement)
14813 {
14814 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14815 it out for GC to reclaim it. */
14816 *expr_p = NULL_TREE;
14817
14818 if (!gimple_seq_empty_p (internal_pre)
14819 || !gimple_seq_empty_p (internal_post))
14820 {
14821 gimplify_seq_add_seq (&internal_pre, internal_post);
14822 gimplify_seq_add_seq (pre_p, internal_pre);
14823 }
14824
14825 /* The result of gimplifying *EXPR_P is going to be the last few
14826 statements in *PRE_P and *POST_P. Add location information
14827 to all the statements that were added by the gimplification
14828 helpers. */
14829 if (!gimple_seq_empty_p (*pre_p))
14830 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14831
14832 if (!gimple_seq_empty_p (*post_p))
14833 annotate_all_with_location_after (*post_p, post_last_gsi,
14834 input_location);
14835
14836 goto out;
14837 }
14838
14839 #ifdef ENABLE_GIMPLE_CHECKING
14840 if (*expr_p)
14841 {
14842 enum tree_code code = TREE_CODE (*expr_p);
14843 /* These expressions should already be in gimple IR form. */
14844 gcc_assert (code != MODIFY_EXPR
14845 && code != ASM_EXPR
14846 && code != BIND_EXPR
14847 && code != CATCH_EXPR
14848 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14849 && code != EH_FILTER_EXPR
14850 && code != GOTO_EXPR
14851 && code != LABEL_EXPR
14852 && code != LOOP_EXPR
14853 && code != SWITCH_EXPR
14854 && code != TRY_FINALLY_EXPR
14855 && code != EH_ELSE_EXPR
14856 && code != OACC_PARALLEL
14857 && code != OACC_KERNELS
14858 && code != OACC_SERIAL
14859 && code != OACC_DATA
14860 && code != OACC_HOST_DATA
14861 && code != OACC_DECLARE
14862 && code != OACC_UPDATE
14863 && code != OACC_ENTER_DATA
14864 && code != OACC_EXIT_DATA
14865 && code != OACC_CACHE
14866 && code != OMP_CRITICAL
14867 && code != OMP_FOR
14868 && code != OACC_LOOP
14869 && code != OMP_MASTER
14870 && code != OMP_TASKGROUP
14871 && code != OMP_ORDERED
14872 && code != OMP_PARALLEL
14873 && code != OMP_SCAN
14874 && code != OMP_SECTIONS
14875 && code != OMP_SECTION
14876 && code != OMP_SINGLE);
14877 }
14878 #endif
14879
14880 /* Otherwise we're gimplifying a subexpression, so the resulting
14881 value is interesting. If it's a valid operand that matches
14882 GIMPLE_TEST_F, we're done. Unless we are handling some
14883 post-effects internally; if that's the case, we need to copy into
14884 a temporary before adding the post-effects to POST_P. */
14885 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14886 goto out;
14887
14888 /* Otherwise, we need to create a new temporary for the gimplified
14889 expression. */
14890
14891 /* We can't return an lvalue if we have an internal postqueue. The
14892 object the lvalue refers to would (probably) be modified by the
14893 postqueue; we need to copy the value out first, which means an
14894 rvalue. */
14895 if ((fallback & fb_lvalue)
14896 && gimple_seq_empty_p (internal_post)
14897 && is_gimple_addressable (*expr_p))
14898 {
14899 /* An lvalue will do. Take the address of the expression, store it
14900 in a temporary, and replace the expression with an INDIRECT_REF of
14901 that temporary. */
14902 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14903 unsigned int ref_align = get_object_alignment (*expr_p);
14904 tree ref_type = TREE_TYPE (*expr_p);
14905 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14906 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14907 if (TYPE_ALIGN (ref_type) != ref_align)
14908 ref_type = build_aligned_type (ref_type, ref_align);
14909 *expr_p = build2 (MEM_REF, ref_type,
14910 tmp, build_zero_cst (ref_alias_type));
14911 }
14912 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14913 {
14914 /* An rvalue will do. Assign the gimplified expression into a
14915 new temporary TMP and replace the original expression with
14916 TMP. First, make sure that the expression has a type so that
14917 it can be assigned into a temporary. */
14918 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14919 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14920 }
14921 else
14922 {
14923 #ifdef ENABLE_GIMPLE_CHECKING
14924 if (!(fallback & fb_mayfail))
14925 {
14926 fprintf (stderr, "gimplification failed:\n");
14927 print_generic_expr (stderr, *expr_p);
14928 debug_tree (*expr_p);
14929 internal_error ("gimplification failed");
14930 }
14931 #endif
14932 gcc_assert (fallback & fb_mayfail);
14933
14934 /* If this is an asm statement, and the user asked for the
14935 impossible, don't die. Fail and let gimplify_asm_expr
14936 issue an error. */
14937 ret = GS_ERROR;
14938 goto out;
14939 }
14940
14941 /* Make sure the temporary matches our predicate. */
14942 gcc_assert ((*gimple_test_f) (*expr_p));
14943
14944 if (!gimple_seq_empty_p (internal_post))
14945 {
14946 annotate_all_with_location (internal_post, input_location);
14947 gimplify_seq_add_seq (pre_p, internal_post);
14948 }
14949
14950 out:
14951 input_location = saved_location;
14952 return ret;
14953 }
14954
14955 /* Like gimplify_expr but make sure the gimplified result is not itself
14956 a SSA name (but a decl if it were). Temporaries required by
14957 evaluating *EXPR_P may be still SSA names. */
14958
14959 static enum gimplify_status
14960 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14961 bool (*gimple_test_f) (tree), fallback_t fallback,
14962 bool allow_ssa)
14963 {
14964 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14965 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14966 gimple_test_f, fallback);
14967 if (! allow_ssa
14968 && TREE_CODE (*expr_p) == SSA_NAME)
14969 {
14970 tree name = *expr_p;
14971 if (was_ssa_name_p)
14972 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14973 else
14974 {
14975 /* Avoid the extra copy if possible. */
14976 *expr_p = create_tmp_reg (TREE_TYPE (name));
14977 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
14978 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14979 release_ssa_name (name);
14980 }
14981 }
14982 return ret;
14983 }
14984
14985 /* Look through TYPE for variable-sized objects and gimplify each such
14986 size that we find. Add to LIST_P any statements generated. */
14987
14988 void
14989 gimplify_type_sizes (tree type, gimple_seq *list_p)
14990 {
14991 tree field, t;
14992
14993 if (type == NULL || type == error_mark_node)
14994 return;
14995
14996 /* We first do the main variant, then copy into any other variants. */
14997 type = TYPE_MAIN_VARIANT (type);
14998
14999 /* Avoid infinite recursion. */
15000 if (TYPE_SIZES_GIMPLIFIED (type))
15001 return;
15002
15003 TYPE_SIZES_GIMPLIFIED (type) = 1;
15004
15005 switch (TREE_CODE (type))
15006 {
15007 case INTEGER_TYPE:
15008 case ENUMERAL_TYPE:
15009 case BOOLEAN_TYPE:
15010 case REAL_TYPE:
15011 case FIXED_POINT_TYPE:
15012 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
15013 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
15014
15015 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15016 {
15017 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
15018 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
15019 }
15020 break;
15021
15022 case ARRAY_TYPE:
15023 /* These types may not have declarations, so handle them here. */
15024 gimplify_type_sizes (TREE_TYPE (type), list_p);
15025 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
15026 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15027 with assigned stack slots, for -O1+ -g they should be tracked
15028 by VTA. */
15029 if (!(TYPE_NAME (type)
15030 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
15031 && DECL_IGNORED_P (TYPE_NAME (type)))
15032 && TYPE_DOMAIN (type)
15033 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
15034 {
15035 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
15036 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15037 DECL_IGNORED_P (t) = 0;
15038 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
15039 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15040 DECL_IGNORED_P (t) = 0;
15041 }
15042 break;
15043
15044 case RECORD_TYPE:
15045 case UNION_TYPE:
15046 case QUAL_UNION_TYPE:
15047 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15048 if (TREE_CODE (field) == FIELD_DECL)
15049 {
15050 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
15051 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
15052 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
15053 gimplify_type_sizes (TREE_TYPE (field), list_p);
15054 }
15055 break;
15056
15057 case POINTER_TYPE:
15058 case REFERENCE_TYPE:
15059 /* We used to recurse on the pointed-to type here, which turned out to
15060 be incorrect because its definition might refer to variables not
15061 yet initialized at this point if a forward declaration is involved.
15062
15063 It was actually useful for anonymous pointed-to types to ensure
15064 that the sizes evaluation dominates every possible later use of the
15065 values. Restricting to such types here would be safe since there
15066 is no possible forward declaration around, but would introduce an
15067 undesirable middle-end semantic to anonymity. We then defer to
15068 front-ends the responsibility of ensuring that the sizes are
15069 evaluated both early and late enough, e.g. by attaching artificial
15070 type declarations to the tree. */
15071 break;
15072
15073 default:
15074 break;
15075 }
15076
15077 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
15078 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
15079
15080 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15081 {
15082 TYPE_SIZE (t) = TYPE_SIZE (type);
15083 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
15084 TYPE_SIZES_GIMPLIFIED (t) = 1;
15085 }
15086 }
15087
15088 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15089 a size or position, has had all of its SAVE_EXPRs evaluated.
15090 We add any required statements to *STMT_P. */
15091
15092 void
15093 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
15094 {
15095 tree expr = *expr_p;
15096
15097 /* We don't do anything if the value isn't there, is constant, or contains
15098 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15099 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15100 will want to replace it with a new variable, but that will cause problems
15101 if this type is from outside the function. It's OK to have that here. */
15102 if (expr == NULL_TREE
15103 || is_gimple_constant (expr)
15104 || TREE_CODE (expr) == VAR_DECL
15105 || CONTAINS_PLACEHOLDER_P (expr))
15106 return;
15107
15108 *expr_p = unshare_expr (expr);
15109
15110 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15111 if the def vanishes. */
15112 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
15113
15114 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15115 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15116 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15117 if (is_gimple_constant (*expr_p))
15118 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
15119 }
15120
15121 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15122 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15123 is true, also gimplify the parameters. */
15124
15125 gbind *
15126 gimplify_body (tree fndecl, bool do_parms)
15127 {
15128 location_t saved_location = input_location;
15129 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
15130 gimple *outer_stmt;
15131 gbind *outer_bind;
15132
15133 timevar_push (TV_TREE_GIMPLIFY);
15134
15135 init_tree_ssa (cfun);
15136
15137 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15138 gimplification. */
15139 default_rtl_profile ();
15140
15141 gcc_assert (gimplify_ctxp == NULL);
15142 push_gimplify_context (true);
15143
15144 if (flag_openacc || flag_openmp)
15145 {
15146 gcc_assert (gimplify_omp_ctxp == NULL);
15147 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
15148 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
15149 }
15150
15151 /* Unshare most shared trees in the body and in that of any nested functions.
15152 It would seem we don't have to do this for nested functions because
15153 they are supposed to be output and then the outer function gimplified
15154 first, but the g++ front end doesn't always do it that way. */
15155 unshare_body (fndecl);
15156 unvisit_body (fndecl);
15157
15158 /* Make sure input_location isn't set to something weird. */
15159 input_location = DECL_SOURCE_LOCATION (fndecl);
15160
15161 /* Resolve callee-copies. This has to be done before processing
15162 the body so that DECL_VALUE_EXPR gets processed correctly. */
15163 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
15164
15165 /* Gimplify the function's body. */
15166 seq = NULL;
15167 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
15168 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
15169 if (!outer_stmt)
15170 {
15171 outer_stmt = gimple_build_nop ();
15172 gimplify_seq_add_stmt (&seq, outer_stmt);
15173 }
15174
15175 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15176 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15177 if (gimple_code (outer_stmt) == GIMPLE_BIND
15178 && (gimple_seq_first_nondebug_stmt (seq)
15179 == gimple_seq_last_nondebug_stmt (seq)))
15180 {
15181 outer_bind = as_a <gbind *> (outer_stmt);
15182 if (gimple_seq_first_stmt (seq) != outer_stmt
15183 || gimple_seq_last_stmt (seq) != outer_stmt)
15184 {
15185 /* If there are debug stmts before or after outer_stmt, move them
15186 inside of outer_bind body. */
15187 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
15188 gimple_seq second_seq = NULL;
15189 if (gimple_seq_first_stmt (seq) != outer_stmt
15190 && gimple_seq_last_stmt (seq) != outer_stmt)
15191 {
15192 second_seq = gsi_split_seq_after (gsi);
15193 gsi_remove (&gsi, false);
15194 }
15195 else if (gimple_seq_first_stmt (seq) != outer_stmt)
15196 gsi_remove (&gsi, false);
15197 else
15198 {
15199 gsi_remove (&gsi, false);
15200 second_seq = seq;
15201 seq = NULL;
15202 }
15203 gimple_seq_add_seq_without_update (&seq,
15204 gimple_bind_body (outer_bind));
15205 gimple_seq_add_seq_without_update (&seq, second_seq);
15206 gimple_bind_set_body (outer_bind, seq);
15207 }
15208 }
15209 else
15210 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
15211
15212 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15213
15214 /* If we had callee-copies statements, insert them at the beginning
15215 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15216 if (!gimple_seq_empty_p (parm_stmts))
15217 {
15218 tree parm;
15219
15220 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
15221 if (parm_cleanup)
15222 {
15223 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
15224 GIMPLE_TRY_FINALLY);
15225 parm_stmts = NULL;
15226 gimple_seq_add_stmt (&parm_stmts, g);
15227 }
15228 gimple_bind_set_body (outer_bind, parm_stmts);
15229
15230 for (parm = DECL_ARGUMENTS (current_function_decl);
15231 parm; parm = DECL_CHAIN (parm))
15232 if (DECL_HAS_VALUE_EXPR_P (parm))
15233 {
15234 DECL_HAS_VALUE_EXPR_P (parm) = 0;
15235 DECL_IGNORED_P (parm) = 0;
15236 }
15237 }
15238
15239 if ((flag_openacc || flag_openmp || flag_openmp_simd)
15240 && gimplify_omp_ctxp)
15241 {
15242 delete_omp_context (gimplify_omp_ctxp);
15243 gimplify_omp_ctxp = NULL;
15244 }
15245
15246 pop_gimplify_context (outer_bind);
15247 gcc_assert (gimplify_ctxp == NULL);
15248
15249 if (flag_checking && !seen_error ())
15250 verify_gimple_in_seq (gimple_bind_body (outer_bind));
15251
15252 timevar_pop (TV_TREE_GIMPLIFY);
15253 input_location = saved_location;
15254
15255 return outer_bind;
15256 }
15257
15258 typedef char *char_p; /* For DEF_VEC_P. */
15259
15260 /* Return whether we should exclude FNDECL from instrumentation. */
15261
15262 static bool
15263 flag_instrument_functions_exclude_p (tree fndecl)
15264 {
15265 vec<char_p> *v;
15266
15267 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
15268 if (v && v->length () > 0)
15269 {
15270 const char *name;
15271 int i;
15272 char *s;
15273
15274 name = lang_hooks.decl_printable_name (fndecl, 1);
15275 FOR_EACH_VEC_ELT (*v, i, s)
15276 if (strstr (name, s) != NULL)
15277 return true;
15278 }
15279
15280 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
15281 if (v && v->length () > 0)
15282 {
15283 const char *name;
15284 int i;
15285 char *s;
15286
15287 name = DECL_SOURCE_FILE (fndecl);
15288 FOR_EACH_VEC_ELT (*v, i, s)
15289 if (strstr (name, s) != NULL)
15290 return true;
15291 }
15292
15293 return false;
15294 }
15295
15296 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15297 node for the function we want to gimplify.
15298
15299 Return the sequence of GIMPLE statements corresponding to the body
15300 of FNDECL. */
15301
15302 void
15303 gimplify_function_tree (tree fndecl)
15304 {
15305 gimple_seq seq;
15306 gbind *bind;
15307
15308 gcc_assert (!gimple_body (fndecl));
15309
15310 if (DECL_STRUCT_FUNCTION (fndecl))
15311 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
15312 else
15313 push_struct_function (fndecl);
15314
15315 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15316 if necessary. */
15317 cfun->curr_properties |= PROP_gimple_lva;
15318
15319 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
15320 asan_poisoned_variables = new hash_set<tree> ();
15321 bind = gimplify_body (fndecl, true);
15322 if (asan_poisoned_variables)
15323 {
15324 delete asan_poisoned_variables;
15325 asan_poisoned_variables = NULL;
15326 }
15327
15328 /* The tree body of the function is no longer needed, replace it
15329 with the new GIMPLE body. */
15330 seq = NULL;
15331 gimple_seq_add_stmt (&seq, bind);
15332 gimple_set_body (fndecl, seq);
15333
15334 /* If we're instrumenting function entry/exit, then prepend the call to
15335 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15336 catch the exit hook. */
15337 /* ??? Add some way to ignore exceptions for this TFE. */
15338 if (flag_instrument_function_entry_exit
15339 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
15340 /* Do not instrument extern inline functions. */
15341 && !(DECL_DECLARED_INLINE_P (fndecl)
15342 && DECL_EXTERNAL (fndecl)
15343 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
15344 && !flag_instrument_functions_exclude_p (fndecl))
15345 {
15346 tree x;
15347 gbind *new_bind;
15348 gimple *tf;
15349 gimple_seq cleanup = NULL, body = NULL;
15350 tree tmp_var, this_fn_addr;
15351 gcall *call;
15352
15353 /* The instrumentation hooks aren't going to call the instrumented
15354 function and the address they receive is expected to be matchable
15355 against symbol addresses. Make sure we don't create a trampoline,
15356 in case the current function is nested. */
15357 this_fn_addr = build_fold_addr_expr (current_function_decl);
15358 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15359
15360 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15361 call = gimple_build_call (x, 1, integer_zero_node);
15362 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15363 gimple_call_set_lhs (call, tmp_var);
15364 gimplify_seq_add_stmt (&cleanup, call);
15365 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15366 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15367 gimplify_seq_add_stmt (&cleanup, call);
15368 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15369
15370 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15371 call = gimple_build_call (x, 1, integer_zero_node);
15372 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15373 gimple_call_set_lhs (call, tmp_var);
15374 gimplify_seq_add_stmt (&body, call);
15375 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15376 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15377 gimplify_seq_add_stmt (&body, call);
15378 gimplify_seq_add_stmt (&body, tf);
15379 new_bind = gimple_build_bind (NULL, body, NULL);
15380
15381 /* Replace the current function body with the body
15382 wrapped in the try/finally TF. */
15383 seq = NULL;
15384 gimple_seq_add_stmt (&seq, new_bind);
15385 gimple_set_body (fndecl, seq);
15386 bind = new_bind;
15387 }
15388
15389 if (sanitize_flags_p (SANITIZE_THREAD)
15390 && param_tsan_instrument_func_entry_exit)
15391 {
15392 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15393 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15394 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15395 /* Replace the current function body with the body
15396 wrapped in the try/finally TF. */
15397 seq = NULL;
15398 gimple_seq_add_stmt (&seq, new_bind);
15399 gimple_set_body (fndecl, seq);
15400 }
15401
15402 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15403 cfun->curr_properties |= PROP_gimple_any;
15404
15405 pop_cfun ();
15406
15407 dump_function (TDI_gimple, fndecl);
15408 }
15409
15410 /* Return a dummy expression of type TYPE in order to keep going after an
15411 error. */
15412
15413 static tree
15414 dummy_object (tree type)
15415 {
15416 tree t = build_int_cst (build_pointer_type (type), 0);
15417 return build2 (MEM_REF, type, t, t);
15418 }
15419
15420 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15421 builtin function, but a very special sort of operator. */
15422
15423 enum gimplify_status
15424 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15425 gimple_seq *post_p ATTRIBUTE_UNUSED)
15426 {
15427 tree promoted_type, have_va_type;
15428 tree valist = TREE_OPERAND (*expr_p, 0);
15429 tree type = TREE_TYPE (*expr_p);
15430 tree t, tag, aptag;
15431 location_t loc = EXPR_LOCATION (*expr_p);
15432
15433 /* Verify that valist is of the proper type. */
15434 have_va_type = TREE_TYPE (valist);
15435 if (have_va_type == error_mark_node)
15436 return GS_ERROR;
15437 have_va_type = targetm.canonical_va_list_type (have_va_type);
15438 if (have_va_type == NULL_TREE
15439 && POINTER_TYPE_P (TREE_TYPE (valist)))
15440 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15441 have_va_type
15442 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15443 gcc_assert (have_va_type != NULL_TREE);
15444
15445 /* Generate a diagnostic for requesting data of a type that cannot
15446 be passed through `...' due to type promotion at the call site. */
15447 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15448 != type)
15449 {
15450 static bool gave_help;
15451 bool warned;
15452 /* Use the expansion point to handle cases such as passing bool (defined
15453 in a system header) through `...'. */
15454 location_t xloc
15455 = expansion_point_location_if_in_system_header (loc);
15456
15457 /* Unfortunately, this is merely undefined, rather than a constraint
15458 violation, so we cannot make this an error. If this call is never
15459 executed, the program is still strictly conforming. */
15460 auto_diagnostic_group d;
15461 warned = warning_at (xloc, 0,
15462 "%qT is promoted to %qT when passed through %<...%>",
15463 type, promoted_type);
15464 if (!gave_help && warned)
15465 {
15466 gave_help = true;
15467 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15468 promoted_type, type);
15469 }
15470
15471 /* We can, however, treat "undefined" any way we please.
15472 Call abort to encourage the user to fix the program. */
15473 if (warned)
15474 inform (xloc, "if this code is reached, the program will abort");
15475 /* Before the abort, allow the evaluation of the va_list
15476 expression to exit or longjmp. */
15477 gimplify_and_add (valist, pre_p);
15478 t = build_call_expr_loc (loc,
15479 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15480 gimplify_and_add (t, pre_p);
15481
15482 /* This is dead code, but go ahead and finish so that the
15483 mode of the result comes out right. */
15484 *expr_p = dummy_object (type);
15485 return GS_ALL_DONE;
15486 }
15487
15488 tag = build_int_cst (build_pointer_type (type), 0);
15489 aptag = build_int_cst (TREE_TYPE (valist), 0);
15490
15491 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15492 valist, tag, aptag);
15493
15494 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15495 needs to be expanded. */
15496 cfun->curr_properties &= ~PROP_gimple_lva;
15497
15498 return GS_OK;
15499 }
15500
15501 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15502
15503 DST/SRC are the destination and source respectively. You can pass
15504 ungimplified trees in DST or SRC, in which case they will be
15505 converted to a gimple operand if necessary.
15506
15507 This function returns the newly created GIMPLE_ASSIGN tuple. */
15508
15509 gimple *
15510 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15511 {
15512 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15513 gimplify_and_add (t, seq_p);
15514 ggc_free (t);
15515 return gimple_seq_last_stmt (*seq_p);
15516 }
15517
15518 inline hashval_t
15519 gimplify_hasher::hash (const elt_t *p)
15520 {
15521 tree t = p->val;
15522 return iterative_hash_expr (t, 0);
15523 }
15524
15525 inline bool
15526 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15527 {
15528 tree t1 = p1->val;
15529 tree t2 = p2->val;
15530 enum tree_code code = TREE_CODE (t1);
15531
15532 if (TREE_CODE (t2) != code
15533 || TREE_TYPE (t1) != TREE_TYPE (t2))
15534 return false;
15535
15536 if (!operand_equal_p (t1, t2, 0))
15537 return false;
15538
15539 /* Only allow them to compare equal if they also hash equal; otherwise
15540 results are nondeterminate, and we fail bootstrap comparison. */
15541 gcc_checking_assert (hash (p1) == hash (p2));
15542
15543 return true;
15544 }