Don't build insn-extract.o with rtl checking
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70 #include "tree-nested.h"
71
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set<tree> *asan_poisoned_variables = NULL;
74
75 enum gimplify_omp_var_data
76 {
77 GOVD_SEEN = 0x000001,
78 GOVD_EXPLICIT = 0x000002,
79 GOVD_SHARED = 0x000004,
80 GOVD_PRIVATE = 0x000008,
81 GOVD_FIRSTPRIVATE = 0x000010,
82 GOVD_LASTPRIVATE = 0x000020,
83 GOVD_REDUCTION = 0x000040,
84 GOVD_LOCAL = 0x00080,
85 GOVD_MAP = 0x000100,
86 GOVD_DEBUG_PRIVATE = 0x000200,
87 GOVD_PRIVATE_OUTER_REF = 0x000400,
88 GOVD_LINEAR = 0x000800,
89 GOVD_ALIGNED = 0x001000,
90
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY = 0x002000,
93
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
96
97 GOVD_MAP_0LEN_ARRAY = 0x008000,
98
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO = 0x010000,
101
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN = 0x020000,
104
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE = 0x040000,
107
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT = 0x080000,
110
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY = 0x100000,
113
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY = 0x200000,
116
117 GOVD_NONTEMPORAL = 0x400000,
118
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
121
122 GOVD_CONDTEMP = 0x1000000,
123
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN = 0x2000000,
126
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
128 fields. */
129 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
130
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
134 };
135
136
137 enum omp_region_type
138 {
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
142
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
145
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
150
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
155
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
158
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
163
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
171
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
175 };
176
177 /* Gimplify hashtable helper. */
178
179 struct gimplify_hasher : free_ptr_hash <elt_t>
180 {
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
183 };
184
185 struct gimplify_ctx
186 {
187 struct gimplify_ctx *prev_context;
188
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
194
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
199
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
207 };
208
209 enum gimplify_defaultmap_kind
210 {
211 GDMK_SCALAR,
212 GDMK_AGGREGATE,
213 GDMK_ALLOCATABLE,
214 GDMK_POINTER
215 };
216
217 struct gimplify_omp_ctx
218 {
219 struct gimplify_omp_ctx *outer_context;
220 splay_tree variables;
221 hash_set<tree> *privatized_types;
222 tree clauses;
223 /* Iteration variables in an OMP_FOR. */
224 vec<tree> loop_iter_var;
225 location_t location;
226 enum omp_clause_default_kind default_kind;
227 enum omp_region_type region_type;
228 enum tree_code code;
229 bool combined_loop;
230 bool distribute;
231 bool target_firstprivatize_array_bases;
232 bool add_safelen1;
233 bool order_concurrent;
234 int defaultmap[4];
235 };
236
237 static struct gimplify_ctx *gimplify_ctxp;
238 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
239 static bool in_omp_construct;
240
241 /* Forward declaration. */
242 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
243 static hash_map<tree, tree> *oacc_declare_returns;
244 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
245 bool (*) (tree), fallback_t, bool);
246
247 /* Shorter alias name for the above function for use in gimplify.c
248 only. */
249
250 static inline void
251 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
252 {
253 gimple_seq_add_stmt_without_update (seq_p, gs);
254 }
255
256 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
257 NULL, a new sequence is allocated. This function is
258 similar to gimple_seq_add_seq, but does not scan the operands.
259 During gimplification, we need to manipulate statement sequences
260 before the def/use vectors have been constructed. */
261
262 static void
263 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
264 {
265 gimple_stmt_iterator si;
266
267 if (src == NULL)
268 return;
269
270 si = gsi_last (*dst_p);
271 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
272 }
273
274
275 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
276 and popping gimplify contexts. */
277
278 static struct gimplify_ctx *ctx_pool = NULL;
279
280 /* Return a gimplify context struct from the pool. */
281
282 static inline struct gimplify_ctx *
283 ctx_alloc (void)
284 {
285 struct gimplify_ctx * c = ctx_pool;
286
287 if (c)
288 ctx_pool = c->prev_context;
289 else
290 c = XNEW (struct gimplify_ctx);
291
292 memset (c, '\0', sizeof (*c));
293 return c;
294 }
295
296 /* Put gimplify context C back into the pool. */
297
298 static inline void
299 ctx_free (struct gimplify_ctx *c)
300 {
301 c->prev_context = ctx_pool;
302 ctx_pool = c;
303 }
304
305 /* Free allocated ctx stack memory. */
306
307 void
308 free_gimplify_stack (void)
309 {
310 struct gimplify_ctx *c;
311
312 while ((c = ctx_pool))
313 {
314 ctx_pool = c->prev_context;
315 free (c);
316 }
317 }
318
319
320 /* Set up a context for the gimplifier. */
321
322 void
323 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
324 {
325 struct gimplify_ctx *c = ctx_alloc ();
326
327 c->prev_context = gimplify_ctxp;
328 gimplify_ctxp = c;
329 gimplify_ctxp->into_ssa = in_ssa;
330 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
331 }
332
333 /* Tear down a context for the gimplifier. If BODY is non-null, then
334 put the temporaries into the outer BIND_EXPR. Otherwise, put them
335 in the local_decls.
336
337 BODY is not a sequence, but the first tuple in a sequence. */
338
339 void
340 pop_gimplify_context (gimple *body)
341 {
342 struct gimplify_ctx *c = gimplify_ctxp;
343
344 gcc_assert (c
345 && (!c->bind_expr_stack.exists ()
346 || c->bind_expr_stack.is_empty ()));
347 c->bind_expr_stack.release ();
348 gimplify_ctxp = c->prev_context;
349
350 if (body)
351 declare_vars (c->temps, body, false);
352 else
353 record_vars (c->temps);
354
355 delete c->temp_htab;
356 c->temp_htab = NULL;
357 ctx_free (c);
358 }
359
360 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
361
362 static void
363 gimple_push_bind_expr (gbind *bind_stmt)
364 {
365 gimplify_ctxp->bind_expr_stack.reserve (8);
366 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
367 }
368
369 /* Pop the first element off the stack of bindings. */
370
371 static void
372 gimple_pop_bind_expr (void)
373 {
374 gimplify_ctxp->bind_expr_stack.pop ();
375 }
376
377 /* Return the first element of the stack of bindings. */
378
379 gbind *
380 gimple_current_bind_expr (void)
381 {
382 return gimplify_ctxp->bind_expr_stack.last ();
383 }
384
385 /* Return the stack of bindings created during gimplification. */
386
387 vec<gbind *>
388 gimple_bind_expr_stack (void)
389 {
390 return gimplify_ctxp->bind_expr_stack;
391 }
392
393 /* Return true iff there is a COND_EXPR between us and the innermost
394 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
395
396 static bool
397 gimple_conditional_context (void)
398 {
399 return gimplify_ctxp->conditions > 0;
400 }
401
402 /* Note that we've entered a COND_EXPR. */
403
404 static void
405 gimple_push_condition (void)
406 {
407 #ifdef ENABLE_GIMPLE_CHECKING
408 if (gimplify_ctxp->conditions == 0)
409 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
410 #endif
411 ++(gimplify_ctxp->conditions);
412 }
413
414 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
415 now, add any conditional cleanups we've seen to the prequeue. */
416
417 static void
418 gimple_pop_condition (gimple_seq *pre_p)
419 {
420 int conds = --(gimplify_ctxp->conditions);
421
422 gcc_assert (conds >= 0);
423 if (conds == 0)
424 {
425 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
426 gimplify_ctxp->conditional_cleanups = NULL;
427 }
428 }
429
430 /* A stable comparison routine for use with splay trees and DECLs. */
431
432 static int
433 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
434 {
435 tree a = (tree) xa;
436 tree b = (tree) xb;
437
438 return DECL_UID (a) - DECL_UID (b);
439 }
440
441 /* Create a new omp construct that deals with variable remapping. */
442
443 static struct gimplify_omp_ctx *
444 new_omp_context (enum omp_region_type region_type)
445 {
446 struct gimplify_omp_ctx *c;
447
448 c = XCNEW (struct gimplify_omp_ctx);
449 c->outer_context = gimplify_omp_ctxp;
450 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
451 c->privatized_types = new hash_set<tree>;
452 c->location = input_location;
453 c->region_type = region_type;
454 if ((region_type & ORT_TASK) == 0)
455 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
456 else
457 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
458 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
459 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
460 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
461 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
462
463 return c;
464 }
465
466 /* Destroy an omp construct that deals with variable remapping. */
467
468 static void
469 delete_omp_context (struct gimplify_omp_ctx *c)
470 {
471 splay_tree_delete (c->variables);
472 delete c->privatized_types;
473 c->loop_iter_var.release ();
474 XDELETE (c);
475 }
476
477 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
478 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
479
480 /* Both gimplify the statement T and append it to *SEQ_P. This function
481 behaves exactly as gimplify_stmt, but you don't have to pass T as a
482 reference. */
483
484 void
485 gimplify_and_add (tree t, gimple_seq *seq_p)
486 {
487 gimplify_stmt (&t, seq_p);
488 }
489
490 /* Gimplify statement T into sequence *SEQ_P, and return the first
491 tuple in the sequence of generated tuples for this statement.
492 Return NULL if gimplifying T produced no tuples. */
493
494 static gimple *
495 gimplify_and_return_first (tree t, gimple_seq *seq_p)
496 {
497 gimple_stmt_iterator last = gsi_last (*seq_p);
498
499 gimplify_and_add (t, seq_p);
500
501 if (!gsi_end_p (last))
502 {
503 gsi_next (&last);
504 return gsi_stmt (last);
505 }
506 else
507 return gimple_seq_first_stmt (*seq_p);
508 }
509
510 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
511 LHS, or for a call argument. */
512
513 static bool
514 is_gimple_mem_rhs (tree t)
515 {
516 /* If we're dealing with a renamable type, either source or dest must be
517 a renamed variable. */
518 if (is_gimple_reg_type (TREE_TYPE (t)))
519 return is_gimple_val (t);
520 else
521 return is_gimple_val (t) || is_gimple_lvalue (t);
522 }
523
524 /* Return true if T is a CALL_EXPR or an expression that can be
525 assigned to a temporary. Note that this predicate should only be
526 used during gimplification. See the rationale for this in
527 gimplify_modify_expr. */
528
529 static bool
530 is_gimple_reg_rhs_or_call (tree t)
531 {
532 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
533 || TREE_CODE (t) == CALL_EXPR);
534 }
535
536 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
537 this predicate should only be used during gimplification. See the
538 rationale for this in gimplify_modify_expr. */
539
540 static bool
541 is_gimple_mem_rhs_or_call (tree t)
542 {
543 /* If we're dealing with a renamable type, either source or dest must be
544 a renamed variable. */
545 if (is_gimple_reg_type (TREE_TYPE (t)))
546 return is_gimple_val (t);
547 else
548 return (is_gimple_val (t)
549 || is_gimple_lvalue (t)
550 || TREE_CLOBBER_P (t)
551 || TREE_CODE (t) == CALL_EXPR);
552 }
553
554 /* Create a temporary with a name derived from VAL. Subroutine of
555 lookup_tmp_var; nobody else should call this function. */
556
557 static inline tree
558 create_tmp_from_val (tree val)
559 {
560 /* Drop all qualifiers and address-space information from the value type. */
561 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
562 tree var = create_tmp_var (type, get_name (val));
563 return var;
564 }
565
566 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
567 an existing expression temporary. */
568
569 static tree
570 lookup_tmp_var (tree val, bool is_formal)
571 {
572 tree ret;
573
574 /* If not optimizing, never really reuse a temporary. local-alloc
575 won't allocate any variable that is used in more than one basic
576 block, which means it will go into memory, causing much extra
577 work in reload and final and poorer code generation, outweighing
578 the extra memory allocation here. */
579 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
580 ret = create_tmp_from_val (val);
581 else
582 {
583 elt_t elt, *elt_p;
584 elt_t **slot;
585
586 elt.val = val;
587 if (!gimplify_ctxp->temp_htab)
588 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
589 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
590 if (*slot == NULL)
591 {
592 elt_p = XNEW (elt_t);
593 elt_p->val = val;
594 elt_p->temp = ret = create_tmp_from_val (val);
595 *slot = elt_p;
596 }
597 else
598 {
599 elt_p = *slot;
600 ret = elt_p->temp;
601 }
602 }
603
604 return ret;
605 }
606
607 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
608
609 static tree
610 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
611 bool is_formal, bool allow_ssa)
612 {
613 tree t, mod;
614
615 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
616 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
617 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
618 fb_rvalue);
619
620 if (allow_ssa
621 && gimplify_ctxp->into_ssa
622 && is_gimple_reg_type (TREE_TYPE (val)))
623 {
624 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
625 if (! gimple_in_ssa_p (cfun))
626 {
627 const char *name = get_name (val);
628 if (name)
629 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
630 }
631 }
632 else
633 t = lookup_tmp_var (val, is_formal);
634
635 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
636
637 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
638
639 /* gimplify_modify_expr might want to reduce this further. */
640 gimplify_and_add (mod, pre_p);
641 ggc_free (mod);
642
643 return t;
644 }
645
646 /* Return a formal temporary variable initialized with VAL. PRE_P is as
647 in gimplify_expr. Only use this function if:
648
649 1) The value of the unfactored expression represented by VAL will not
650 change between the initialization and use of the temporary, and
651 2) The temporary will not be otherwise modified.
652
653 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
654 and #2 means it is inappropriate for && temps.
655
656 For other cases, use get_initialized_tmp_var instead. */
657
658 tree
659 get_formal_tmp_var (tree val, gimple_seq *pre_p)
660 {
661 return internal_get_tmp_var (val, pre_p, NULL, true, true);
662 }
663
664 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
665 are as in gimplify_expr. */
666
667 tree
668 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
669 gimple_seq *post_p /* = NULL */,
670 bool allow_ssa /* = true */)
671 {
672 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
673 }
674
675 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
676 generate debug info for them; otherwise don't. */
677
678 void
679 declare_vars (tree vars, gimple *gs, bool debug_info)
680 {
681 tree last = vars;
682 if (last)
683 {
684 tree temps, block;
685
686 gbind *scope = as_a <gbind *> (gs);
687
688 temps = nreverse (last);
689
690 block = gimple_bind_block (scope);
691 gcc_assert (!block || TREE_CODE (block) == BLOCK);
692 if (!block || !debug_info)
693 {
694 DECL_CHAIN (last) = gimple_bind_vars (scope);
695 gimple_bind_set_vars (scope, temps);
696 }
697 else
698 {
699 /* We need to attach the nodes both to the BIND_EXPR and to its
700 associated BLOCK for debugging purposes. The key point here
701 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
702 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
703 if (BLOCK_VARS (block))
704 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
705 else
706 {
707 gimple_bind_set_vars (scope,
708 chainon (gimple_bind_vars (scope), temps));
709 BLOCK_VARS (block) = temps;
710 }
711 }
712 }
713 }
714
715 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
716 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
717 no such upper bound can be obtained. */
718
719 static void
720 force_constant_size (tree var)
721 {
722 /* The only attempt we make is by querying the maximum size of objects
723 of the variable's type. */
724
725 HOST_WIDE_INT max_size;
726
727 gcc_assert (VAR_P (var));
728
729 max_size = max_int_size_in_bytes (TREE_TYPE (var));
730
731 gcc_assert (max_size >= 0);
732
733 DECL_SIZE_UNIT (var)
734 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
735 DECL_SIZE (var)
736 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
737 }
738
739 /* Push the temporary variable TMP into the current binding. */
740
741 void
742 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
743 {
744 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
745
746 /* Later processing assumes that the object size is constant, which might
747 not be true at this point. Force the use of a constant upper bound in
748 this case. */
749 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
750 force_constant_size (tmp);
751
752 DECL_CONTEXT (tmp) = fn->decl;
753 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
754
755 record_vars_into (tmp, fn->decl);
756 }
757
758 /* Push the temporary variable TMP into the current binding. */
759
760 void
761 gimple_add_tmp_var (tree tmp)
762 {
763 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
764
765 /* Later processing assumes that the object size is constant, which might
766 not be true at this point. Force the use of a constant upper bound in
767 this case. */
768 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
769 force_constant_size (tmp);
770
771 DECL_CONTEXT (tmp) = current_function_decl;
772 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
773
774 if (gimplify_ctxp)
775 {
776 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
777 gimplify_ctxp->temps = tmp;
778
779 /* Mark temporaries local within the nearest enclosing parallel. */
780 if (gimplify_omp_ctxp)
781 {
782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
783 int flag = GOVD_LOCAL;
784 while (ctx
785 && (ctx->region_type == ORT_WORKSHARE
786 || ctx->region_type == ORT_TASKGROUP
787 || ctx->region_type == ORT_SIMD
788 || ctx->region_type == ORT_ACC))
789 {
790 if (ctx->region_type == ORT_SIMD
791 && TREE_ADDRESSABLE (tmp)
792 && !TREE_STATIC (tmp))
793 {
794 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
795 ctx->add_safelen1 = true;
796 else
797 flag = GOVD_PRIVATE;
798 break;
799 }
800 ctx = ctx->outer_context;
801 }
802 if (ctx)
803 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
804 }
805 }
806 else if (cfun)
807 record_vars (tmp);
808 else
809 {
810 gimple_seq body_seq;
811
812 /* This case is for nested functions. We need to expose the locals
813 they create. */
814 body_seq = gimple_body (current_function_decl);
815 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
816 }
817 }
818
819
820 \f
821 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
822 nodes that are referenced more than once in GENERIC functions. This is
823 necessary because gimplification (translation into GIMPLE) is performed
824 by modifying tree nodes in-place, so gimplication of a shared node in a
825 first context could generate an invalid GIMPLE form in a second context.
826
827 This is achieved with a simple mark/copy/unmark algorithm that walks the
828 GENERIC representation top-down, marks nodes with TREE_VISITED the first
829 time it encounters them, duplicates them if they already have TREE_VISITED
830 set, and finally removes the TREE_VISITED marks it has set.
831
832 The algorithm works only at the function level, i.e. it generates a GENERIC
833 representation of a function with no nodes shared within the function when
834 passed a GENERIC function (except for nodes that are allowed to be shared).
835
836 At the global level, it is also necessary to unshare tree nodes that are
837 referenced in more than one function, for the same aforementioned reason.
838 This requires some cooperation from the front-end. There are 2 strategies:
839
840 1. Manual unsharing. The front-end needs to call unshare_expr on every
841 expression that might end up being shared across functions.
842
843 2. Deep unsharing. This is an extension of regular unsharing. Instead
844 of calling unshare_expr on expressions that might be shared across
845 functions, the front-end pre-marks them with TREE_VISITED. This will
846 ensure that they are unshared on the first reference within functions
847 when the regular unsharing algorithm runs. The counterpart is that
848 this algorithm must look deeper than for manual unsharing, which is
849 specified by LANG_HOOKS_DEEP_UNSHARING.
850
851 If there are only few specific cases of node sharing across functions, it is
852 probably easier for a front-end to unshare the expressions manually. On the
853 contrary, if the expressions generated at the global level are as widespread
854 as expressions generated within functions, deep unsharing is very likely the
855 way to go. */
856
857 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
858 These nodes model computations that must be done once. If we were to
859 unshare something like SAVE_EXPR(i++), the gimplification process would
860 create wrong code. However, if DATA is non-null, it must hold a pointer
861 set that is used to unshare the subtrees of these nodes. */
862
863 static tree
864 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
865 {
866 tree t = *tp;
867 enum tree_code code = TREE_CODE (t);
868
869 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
870 copy their subtrees if we can make sure to do it only once. */
871 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
872 {
873 if (data && !((hash_set<tree> *)data)->add (t))
874 ;
875 else
876 *walk_subtrees = 0;
877 }
878
879 /* Stop at types, decls, constants like copy_tree_r. */
880 else if (TREE_CODE_CLASS (code) == tcc_type
881 || TREE_CODE_CLASS (code) == tcc_declaration
882 || TREE_CODE_CLASS (code) == tcc_constant)
883 *walk_subtrees = 0;
884
885 /* Cope with the statement expression extension. */
886 else if (code == STATEMENT_LIST)
887 ;
888
889 /* Leave the bulk of the work to copy_tree_r itself. */
890 else
891 copy_tree_r (tp, walk_subtrees, NULL);
892
893 return NULL_TREE;
894 }
895
896 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
897 If *TP has been visited already, then *TP is deeply copied by calling
898 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
899
900 static tree
901 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
902 {
903 tree t = *tp;
904 enum tree_code code = TREE_CODE (t);
905
906 /* Skip types, decls, and constants. But we do want to look at their
907 types and the bounds of types. Mark them as visited so we properly
908 unmark their subtrees on the unmark pass. If we've already seen them,
909 don't look down further. */
910 if (TREE_CODE_CLASS (code) == tcc_type
911 || TREE_CODE_CLASS (code) == tcc_declaration
912 || TREE_CODE_CLASS (code) == tcc_constant)
913 {
914 if (TREE_VISITED (t))
915 *walk_subtrees = 0;
916 else
917 TREE_VISITED (t) = 1;
918 }
919
920 /* If this node has been visited already, unshare it and don't look
921 any deeper. */
922 else if (TREE_VISITED (t))
923 {
924 walk_tree (tp, mostly_copy_tree_r, data, NULL);
925 *walk_subtrees = 0;
926 }
927
928 /* Otherwise, mark the node as visited and keep looking. */
929 else
930 TREE_VISITED (t) = 1;
931
932 return NULL_TREE;
933 }
934
935 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
936 copy_if_shared_r callback unmodified. */
937
938 void
939 copy_if_shared (tree *tp, void *data)
940 {
941 walk_tree (tp, copy_if_shared_r, data, NULL);
942 }
943
944 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
945 any nested functions. */
946
947 static void
948 unshare_body (tree fndecl)
949 {
950 struct cgraph_node *cgn = cgraph_node::get (fndecl);
951 /* If the language requires deep unsharing, we need a pointer set to make
952 sure we don't repeatedly unshare subtrees of unshareable nodes. */
953 hash_set<tree> *visited
954 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
955
956 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
957 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
958 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
959
960 delete visited;
961
962 if (cgn)
963 for (cgn = first_nested_function (cgn); cgn;
964 cgn = next_nested_function (cgn))
965 unshare_body (cgn->decl);
966 }
967
968 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
969 Subtrees are walked until the first unvisited node is encountered. */
970
971 static tree
972 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
973 {
974 tree t = *tp;
975
976 /* If this node has been visited, unmark it and keep looking. */
977 if (TREE_VISITED (t))
978 TREE_VISITED (t) = 0;
979
980 /* Otherwise, don't look any deeper. */
981 else
982 *walk_subtrees = 0;
983
984 return NULL_TREE;
985 }
986
987 /* Unmark the visited trees rooted at *TP. */
988
989 static inline void
990 unmark_visited (tree *tp)
991 {
992 walk_tree (tp, unmark_visited_r, NULL, NULL);
993 }
994
995 /* Likewise, but mark all trees as not visited. */
996
997 static void
998 unvisit_body (tree fndecl)
999 {
1000 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1001
1002 unmark_visited (&DECL_SAVED_TREE (fndecl));
1003 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1004 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1005
1006 if (cgn)
1007 for (cgn = first_nested_function (cgn);
1008 cgn; cgn = next_nested_function (cgn))
1009 unvisit_body (cgn->decl);
1010 }
1011
1012 /* Unconditionally make an unshared copy of EXPR. This is used when using
1013 stored expressions which span multiple functions, such as BINFO_VTABLE,
1014 as the normal unsharing process can't tell that they're shared. */
1015
1016 tree
1017 unshare_expr (tree expr)
1018 {
1019 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1020 return expr;
1021 }
1022
1023 /* Worker for unshare_expr_without_location. */
1024
1025 static tree
1026 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1027 {
1028 if (EXPR_P (*tp))
1029 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1030 else
1031 *walk_subtrees = 0;
1032 return NULL_TREE;
1033 }
1034
1035 /* Similar to unshare_expr but also prune all expression locations
1036 from EXPR. */
1037
1038 tree
1039 unshare_expr_without_location (tree expr)
1040 {
1041 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1042 if (EXPR_P (expr))
1043 walk_tree (&expr, prune_expr_location, NULL, NULL);
1044 return expr;
1045 }
1046
1047 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1048 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1049 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1050 EXPR is the location of the EXPR. */
1051
1052 static location_t
1053 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1054 {
1055 if (!expr)
1056 return or_else;
1057
1058 if (EXPR_HAS_LOCATION (expr))
1059 return EXPR_LOCATION (expr);
1060
1061 if (TREE_CODE (expr) != STATEMENT_LIST)
1062 return or_else;
1063
1064 tree_stmt_iterator i = tsi_start (expr);
1065
1066 bool found = false;
1067 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1068 {
1069 found = true;
1070 tsi_next (&i);
1071 }
1072
1073 if (!found || !tsi_one_before_end_p (i))
1074 return or_else;
1075
1076 return rexpr_location (tsi_stmt (i), or_else);
1077 }
1078
1079 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1080 rexpr_location for the potential recursion. */
1081
1082 static inline bool
1083 rexpr_has_location (tree expr)
1084 {
1085 return rexpr_location (expr) != UNKNOWN_LOCATION;
1086 }
1087
1088 \f
1089 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1090 contain statements and have a value. Assign its value to a temporary
1091 and give it void_type_node. Return the temporary, or NULL_TREE if
1092 WRAPPER was already void. */
1093
1094 tree
1095 voidify_wrapper_expr (tree wrapper, tree temp)
1096 {
1097 tree type = TREE_TYPE (wrapper);
1098 if (type && !VOID_TYPE_P (type))
1099 {
1100 tree *p;
1101
1102 /* Set p to point to the body of the wrapper. Loop until we find
1103 something that isn't a wrapper. */
1104 for (p = &wrapper; p && *p; )
1105 {
1106 switch (TREE_CODE (*p))
1107 {
1108 case BIND_EXPR:
1109 TREE_SIDE_EFFECTS (*p) = 1;
1110 TREE_TYPE (*p) = void_type_node;
1111 /* For a BIND_EXPR, the body is operand 1. */
1112 p = &BIND_EXPR_BODY (*p);
1113 break;
1114
1115 case CLEANUP_POINT_EXPR:
1116 case TRY_FINALLY_EXPR:
1117 case TRY_CATCH_EXPR:
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1122
1123 case STATEMENT_LIST:
1124 {
1125 tree_stmt_iterator i = tsi_last (*p);
1126 TREE_SIDE_EFFECTS (*p) = 1;
1127 TREE_TYPE (*p) = void_type_node;
1128 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1129 }
1130 break;
1131
1132 case COMPOUND_EXPR:
1133 /* Advance to the last statement. Set all container types to
1134 void. */
1135 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1136 {
1137 TREE_SIDE_EFFECTS (*p) = 1;
1138 TREE_TYPE (*p) = void_type_node;
1139 }
1140 break;
1141
1142 case TRANSACTION_EXPR:
1143 TREE_SIDE_EFFECTS (*p) = 1;
1144 TREE_TYPE (*p) = void_type_node;
1145 p = &TRANSACTION_EXPR_BODY (*p);
1146 break;
1147
1148 default:
1149 /* Assume that any tree upon which voidify_wrapper_expr is
1150 directly called is a wrapper, and that its body is op0. */
1151 if (p == &wrapper)
1152 {
1153 TREE_SIDE_EFFECTS (*p) = 1;
1154 TREE_TYPE (*p) = void_type_node;
1155 p = &TREE_OPERAND (*p, 0);
1156 break;
1157 }
1158 goto out;
1159 }
1160 }
1161
1162 out:
1163 if (p == NULL || IS_EMPTY_STMT (*p))
1164 temp = NULL_TREE;
1165 else if (temp)
1166 {
1167 /* The wrapper is on the RHS of an assignment that we're pushing
1168 down. */
1169 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1170 || TREE_CODE (temp) == MODIFY_EXPR);
1171 TREE_OPERAND (temp, 1) = *p;
1172 *p = temp;
1173 }
1174 else
1175 {
1176 temp = create_tmp_var (type, "retval");
1177 *p = build2 (INIT_EXPR, type, temp, *p);
1178 }
1179
1180 return temp;
1181 }
1182
1183 return NULL_TREE;
1184 }
1185
1186 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1187 a temporary through which they communicate. */
1188
1189 static void
1190 build_stack_save_restore (gcall **save, gcall **restore)
1191 {
1192 tree tmp_var;
1193
1194 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1195 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1196 gimple_call_set_lhs (*save, tmp_var);
1197
1198 *restore
1199 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1200 1, tmp_var);
1201 }
1202
1203 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1204
1205 static tree
1206 build_asan_poison_call_expr (tree decl)
1207 {
1208 /* Do not poison variables that have size equal to zero. */
1209 tree unit_size = DECL_SIZE_UNIT (decl);
1210 if (zerop (unit_size))
1211 return NULL_TREE;
1212
1213 tree base = build_fold_addr_expr (decl);
1214
1215 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1216 void_type_node, 3,
1217 build_int_cst (integer_type_node,
1218 ASAN_MARK_POISON),
1219 base, unit_size);
1220 }
1221
1222 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1223 on POISON flag, shadow memory of a DECL variable. The call will be
1224 put on location identified by IT iterator, where BEFORE flag drives
1225 position where the stmt will be put. */
1226
1227 static void
1228 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1229 bool before)
1230 {
1231 tree unit_size = DECL_SIZE_UNIT (decl);
1232 tree base = build_fold_addr_expr (decl);
1233
1234 /* Do not poison variables that have size equal to zero. */
1235 if (zerop (unit_size))
1236 return;
1237
1238 /* It's necessary to have all stack variables aligned to ASAN granularity
1239 bytes. */
1240 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1241 unsigned shadow_granularity
1242 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1243 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1244 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1245
1246 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1247
1248 gimple *g
1249 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1250 build_int_cst (integer_type_node, flags),
1251 base, unit_size);
1252
1253 if (before)
1254 gsi_insert_before (it, g, GSI_NEW_STMT);
1255 else
1256 gsi_insert_after (it, g, GSI_NEW_STMT);
1257 }
1258
1259 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1260 either poisons or unpoisons a DECL. Created statement is appended
1261 to SEQ_P gimple sequence. */
1262
1263 static void
1264 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1265 {
1266 gimple_stmt_iterator it = gsi_last (*seq_p);
1267 bool before = false;
1268
1269 if (gsi_end_p (it))
1270 before = true;
1271
1272 asan_poison_variable (decl, poison, &it, before);
1273 }
1274
1275 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1276
1277 static int
1278 sort_by_decl_uid (const void *a, const void *b)
1279 {
1280 const tree *t1 = (const tree *)a;
1281 const tree *t2 = (const tree *)b;
1282
1283 int uid1 = DECL_UID (*t1);
1284 int uid2 = DECL_UID (*t2);
1285
1286 if (uid1 < uid2)
1287 return -1;
1288 else if (uid1 > uid2)
1289 return 1;
1290 else
1291 return 0;
1292 }
1293
1294 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1295 depending on POISON flag. Created statement is appended
1296 to SEQ_P gimple sequence. */
1297
1298 static void
1299 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1300 {
1301 unsigned c = variables->elements ();
1302 if (c == 0)
1303 return;
1304
1305 auto_vec<tree> sorted_variables (c);
1306
1307 for (hash_set<tree>::iterator it = variables->begin ();
1308 it != variables->end (); ++it)
1309 sorted_variables.safe_push (*it);
1310
1311 sorted_variables.qsort (sort_by_decl_uid);
1312
1313 unsigned i;
1314 tree var;
1315 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1316 {
1317 asan_poison_variable (var, poison, seq_p);
1318
1319 /* Add use_after_scope_memory attribute for the variable in order
1320 to prevent re-written into SSA. */
1321 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1322 DECL_ATTRIBUTES (var)))
1323 DECL_ATTRIBUTES (var)
1324 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1325 integer_one_node,
1326 DECL_ATTRIBUTES (var));
1327 }
1328 }
1329
1330 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1331
1332 static enum gimplify_status
1333 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1334 {
1335 tree bind_expr = *expr_p;
1336 bool old_keep_stack = gimplify_ctxp->keep_stack;
1337 bool old_save_stack = gimplify_ctxp->save_stack;
1338 tree t;
1339 gbind *bind_stmt;
1340 gimple_seq body, cleanup;
1341 gcall *stack_save;
1342 location_t start_locus = 0, end_locus = 0;
1343 tree ret_clauses = NULL;
1344
1345 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1346
1347 /* Mark variables seen in this bind expr. */
1348 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1349 {
1350 if (VAR_P (t))
1351 {
1352 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1353
1354 /* Mark variable as local. */
1355 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1356 {
1357 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1358 || splay_tree_lookup (ctx->variables,
1359 (splay_tree_key) t) == NULL)
1360 {
1361 int flag = GOVD_LOCAL;
1362 if (ctx->region_type == ORT_SIMD
1363 && TREE_ADDRESSABLE (t)
1364 && !TREE_STATIC (t))
1365 {
1366 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1367 ctx->add_safelen1 = true;
1368 else
1369 flag = GOVD_PRIVATE;
1370 }
1371 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1372 }
1373 /* Static locals inside of target construct or offloaded
1374 routines need to be "omp declare target". */
1375 if (TREE_STATIC (t))
1376 for (; ctx; ctx = ctx->outer_context)
1377 if ((ctx->region_type & ORT_TARGET) != 0)
1378 {
1379 if (!lookup_attribute ("omp declare target",
1380 DECL_ATTRIBUTES (t)))
1381 {
1382 tree id = get_identifier ("omp declare target");
1383 DECL_ATTRIBUTES (t)
1384 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1385 varpool_node *node = varpool_node::get (t);
1386 if (node)
1387 {
1388 node->offloadable = 1;
1389 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1390 {
1391 g->have_offload = true;
1392 if (!in_lto_p)
1393 vec_safe_push (offload_vars, t);
1394 }
1395 }
1396 }
1397 break;
1398 }
1399 }
1400
1401 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1402
1403 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1404 cfun->has_local_explicit_reg_vars = true;
1405 }
1406 }
1407
1408 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1409 BIND_EXPR_BLOCK (bind_expr));
1410 gimple_push_bind_expr (bind_stmt);
1411
1412 gimplify_ctxp->keep_stack = false;
1413 gimplify_ctxp->save_stack = false;
1414
1415 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1416 body = NULL;
1417 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1418 gimple_bind_set_body (bind_stmt, body);
1419
1420 /* Source location wise, the cleanup code (stack_restore and clobbers)
1421 belongs to the end of the block, so propagate what we have. The
1422 stack_save operation belongs to the beginning of block, which we can
1423 infer from the bind_expr directly if the block has no explicit
1424 assignment. */
1425 if (BIND_EXPR_BLOCK (bind_expr))
1426 {
1427 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1428 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1429 }
1430 if (start_locus == 0)
1431 start_locus = EXPR_LOCATION (bind_expr);
1432
1433 cleanup = NULL;
1434 stack_save = NULL;
1435
1436 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1437 the stack space allocated to the VLAs. */
1438 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1439 {
1440 gcall *stack_restore;
1441
1442 /* Save stack on entry and restore it on exit. Add a try_finally
1443 block to achieve this. */
1444 build_stack_save_restore (&stack_save, &stack_restore);
1445
1446 gimple_set_location (stack_save, start_locus);
1447 gimple_set_location (stack_restore, end_locus);
1448
1449 gimplify_seq_add_stmt (&cleanup, stack_restore);
1450 }
1451
1452 /* Add clobbers for all variables that go out of scope. */
1453 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1454 {
1455 if (VAR_P (t)
1456 && !is_global_var (t)
1457 && DECL_CONTEXT (t) == current_function_decl)
1458 {
1459 if (!DECL_HARD_REGISTER (t)
1460 && !TREE_THIS_VOLATILE (t)
1461 && !DECL_HAS_VALUE_EXPR_P (t)
1462 /* Only care for variables that have to be in memory. Others
1463 will be rewritten into SSA names, hence moved to the
1464 top-level. */
1465 && !is_gimple_reg (t)
1466 && flag_stack_reuse != SR_NONE)
1467 {
1468 tree clobber = build_clobber (TREE_TYPE (t));
1469 gimple *clobber_stmt;
1470 clobber_stmt = gimple_build_assign (t, clobber);
1471 gimple_set_location (clobber_stmt, end_locus);
1472 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1473 }
1474
1475 if (flag_openacc && oacc_declare_returns != NULL)
1476 {
1477 tree key = t;
1478 if (DECL_HAS_VALUE_EXPR_P (key))
1479 {
1480 key = DECL_VALUE_EXPR (key);
1481 if (TREE_CODE (key) == INDIRECT_REF)
1482 key = TREE_OPERAND (key, 0);
1483 }
1484 tree *c = oacc_declare_returns->get (key);
1485 if (c != NULL)
1486 {
1487 if (ret_clauses)
1488 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1489
1490 ret_clauses = unshare_expr (*c);
1491
1492 oacc_declare_returns->remove (key);
1493
1494 if (oacc_declare_returns->is_empty ())
1495 {
1496 delete oacc_declare_returns;
1497 oacc_declare_returns = NULL;
1498 }
1499 }
1500 }
1501 }
1502
1503 if (asan_poisoned_variables != NULL
1504 && asan_poisoned_variables->contains (t))
1505 {
1506 asan_poisoned_variables->remove (t);
1507 asan_poison_variable (t, true, &cleanup);
1508 }
1509
1510 if (gimplify_ctxp->live_switch_vars != NULL
1511 && gimplify_ctxp->live_switch_vars->contains (t))
1512 gimplify_ctxp->live_switch_vars->remove (t);
1513 }
1514
1515 if (ret_clauses)
1516 {
1517 gomp_target *stmt;
1518 gimple_stmt_iterator si = gsi_start (cleanup);
1519
1520 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1521 ret_clauses);
1522 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1523 }
1524
1525 if (cleanup)
1526 {
1527 gtry *gs;
1528 gimple_seq new_body;
1529
1530 new_body = NULL;
1531 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1532 GIMPLE_TRY_FINALLY);
1533
1534 if (stack_save)
1535 gimplify_seq_add_stmt (&new_body, stack_save);
1536 gimplify_seq_add_stmt (&new_body, gs);
1537 gimple_bind_set_body (bind_stmt, new_body);
1538 }
1539
1540 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1541 if (!gimplify_ctxp->keep_stack)
1542 gimplify_ctxp->keep_stack = old_keep_stack;
1543 gimplify_ctxp->save_stack = old_save_stack;
1544
1545 gimple_pop_bind_expr ();
1546
1547 gimplify_seq_add_stmt (pre_p, bind_stmt);
1548
1549 if (temp)
1550 {
1551 *expr_p = temp;
1552 return GS_OK;
1553 }
1554
1555 *expr_p = NULL_TREE;
1556 return GS_ALL_DONE;
1557 }
1558
1559 /* Maybe add early return predict statement to PRE_P sequence. */
1560
1561 static void
1562 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1563 {
1564 /* If we are not in a conditional context, add PREDICT statement. */
1565 if (gimple_conditional_context ())
1566 {
1567 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1568 NOT_TAKEN);
1569 gimplify_seq_add_stmt (pre_p, predict);
1570 }
1571 }
1572
1573 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1574 GIMPLE value, it is assigned to a new temporary and the statement is
1575 re-written to return the temporary.
1576
1577 PRE_P points to the sequence where side effects that must happen before
1578 STMT should be stored. */
1579
1580 static enum gimplify_status
1581 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1582 {
1583 greturn *ret;
1584 tree ret_expr = TREE_OPERAND (stmt, 0);
1585 tree result_decl, result;
1586
1587 if (ret_expr == error_mark_node)
1588 return GS_ERROR;
1589
1590 if (!ret_expr
1591 || TREE_CODE (ret_expr) == RESULT_DECL)
1592 {
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 greturn *ret = gimple_build_return (ret_expr);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1597 return GS_ALL_DONE;
1598 }
1599
1600 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1601 result_decl = NULL_TREE;
1602 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1603 {
1604 /* Used in C++ for handling EH cleanup of the return value if a local
1605 cleanup throws. Assume the front-end knows what it's doing. */
1606 result_decl = DECL_RESULT (current_function_decl);
1607 /* But crash if we end up trying to modify ret_expr below. */
1608 ret_expr = NULL_TREE;
1609 }
1610 else
1611 {
1612 result_decl = TREE_OPERAND (ret_expr, 0);
1613
1614 /* See through a return by reference. */
1615 if (TREE_CODE (result_decl) == INDIRECT_REF)
1616 result_decl = TREE_OPERAND (result_decl, 0);
1617
1618 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1619 || TREE_CODE (ret_expr) == INIT_EXPR)
1620 && TREE_CODE (result_decl) == RESULT_DECL);
1621 }
1622
1623 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1624 Recall that aggregate_value_p is FALSE for any aggregate type that is
1625 returned in registers. If we're returning values in registers, then
1626 we don't want to extend the lifetime of the RESULT_DECL, particularly
1627 across another call. In addition, for those aggregates for which
1628 hard_function_value generates a PARALLEL, we'll die during normal
1629 expansion of structure assignments; there's special code in expand_return
1630 to handle this case that does not exist in expand_expr. */
1631 if (!result_decl)
1632 result = NULL_TREE;
1633 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1634 {
1635 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1636 {
1637 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1638 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1639 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1640 should be effectively allocated by the caller, i.e. all calls to
1641 this function must be subject to the Return Slot Optimization. */
1642 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1643 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1644 }
1645 result = result_decl;
1646 }
1647 else if (gimplify_ctxp->return_temp)
1648 result = gimplify_ctxp->return_temp;
1649 else
1650 {
1651 result = create_tmp_reg (TREE_TYPE (result_decl));
1652
1653 /* ??? With complex control flow (usually involving abnormal edges),
1654 we can wind up warning about an uninitialized value for this. Due
1655 to how this variable is constructed and initialized, this is never
1656 true. Give up and never warn. */
1657 TREE_NO_WARNING (result) = 1;
1658
1659 gimplify_ctxp->return_temp = result;
1660 }
1661
1662 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1663 Then gimplify the whole thing. */
1664 if (result != result_decl)
1665 TREE_OPERAND (ret_expr, 0) = result;
1666
1667 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1668
1669 maybe_add_early_return_predict_stmt (pre_p);
1670 ret = gimple_build_return (result);
1671 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1672 gimplify_seq_add_stmt (pre_p, ret);
1673
1674 return GS_ALL_DONE;
1675 }
1676
1677 /* Gimplify a variable-length array DECL. */
1678
1679 static void
1680 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1681 {
1682 /* This is a variable-sized decl. Simplify its size and mark it
1683 for deferred expansion. */
1684 tree t, addr, ptr_type;
1685
1686 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1687 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1688
1689 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1690 if (DECL_HAS_VALUE_EXPR_P (decl))
1691 return;
1692
1693 /* All occurrences of this decl in final gimplified code will be
1694 replaced by indirection. Setting DECL_VALUE_EXPR does two
1695 things: First, it lets the rest of the gimplifier know what
1696 replacement to use. Second, it lets the debug info know
1697 where to find the value. */
1698 ptr_type = build_pointer_type (TREE_TYPE (decl));
1699 addr = create_tmp_var (ptr_type, get_name (decl));
1700 DECL_IGNORED_P (addr) = 0;
1701 t = build_fold_indirect_ref (addr);
1702 TREE_THIS_NOTRAP (t) = 1;
1703 SET_DECL_VALUE_EXPR (decl, t);
1704 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1705
1706 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1707 max_int_size_in_bytes (TREE_TYPE (decl)));
1708 /* The call has been built for a variable-sized object. */
1709 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1710 t = fold_convert (ptr_type, t);
1711 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1712
1713 gimplify_and_add (t, seq_p);
1714
1715 /* Record the dynamic allocation associated with DECL if requested. */
1716 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1717 record_dynamic_alloc (decl);
1718 }
1719
1720 /* A helper function to be called via walk_tree. Mark all labels under *TP
1721 as being forced. To be called for DECL_INITIAL of static variables. */
1722
1723 static tree
1724 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1725 {
1726 if (TYPE_P (*tp))
1727 *walk_subtrees = 0;
1728 if (TREE_CODE (*tp) == LABEL_DECL)
1729 {
1730 FORCED_LABEL (*tp) = 1;
1731 cfun->has_forced_label_in_static = 1;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1738 and initialization explicit. */
1739
1740 static enum gimplify_status
1741 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1742 {
1743 tree stmt = *stmt_p;
1744 tree decl = DECL_EXPR_DECL (stmt);
1745
1746 *stmt_p = NULL_TREE;
1747
1748 if (TREE_TYPE (decl) == error_mark_node)
1749 return GS_ERROR;
1750
1751 if ((TREE_CODE (decl) == TYPE_DECL
1752 || VAR_P (decl))
1753 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1754 {
1755 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1756 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1757 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1758 }
1759
1760 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1761 in case its size expressions contain problematic nodes like CALL_EXPR. */
1762 if (TREE_CODE (decl) == TYPE_DECL
1763 && DECL_ORIGINAL_TYPE (decl)
1764 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1765 {
1766 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1767 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1768 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1769 }
1770
1771 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1772 {
1773 tree init = DECL_INITIAL (decl);
1774 bool is_vla = false;
1775
1776 poly_uint64 size;
1777 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1778 || (!TREE_STATIC (decl)
1779 && flag_stack_check == GENERIC_STACK_CHECK
1780 && maybe_gt (size,
1781 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1782 {
1783 gimplify_vla_decl (decl, seq_p);
1784 is_vla = true;
1785 }
1786
1787 if (asan_poisoned_variables
1788 && !is_vla
1789 && TREE_ADDRESSABLE (decl)
1790 && !TREE_STATIC (decl)
1791 && !DECL_HAS_VALUE_EXPR_P (decl)
1792 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1793 && dbg_cnt (asan_use_after_scope)
1794 && !gimplify_omp_ctxp)
1795 {
1796 asan_poisoned_variables->add (decl);
1797 asan_poison_variable (decl, false, seq_p);
1798 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1799 gimplify_ctxp->live_switch_vars->add (decl);
1800 }
1801
1802 /* Some front ends do not explicitly declare all anonymous
1803 artificial variables. We compensate here by declaring the
1804 variables, though it would be better if the front ends would
1805 explicitly declare them. */
1806 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1807 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1808 gimple_add_tmp_var (decl);
1809
1810 if (init && init != error_mark_node)
1811 {
1812 if (!TREE_STATIC (decl))
1813 {
1814 DECL_INITIAL (decl) = NULL_TREE;
1815 init = build2 (INIT_EXPR, void_type_node, decl, init);
1816 gimplify_and_add (init, seq_p);
1817 ggc_free (init);
1818 }
1819 else
1820 /* We must still examine initializers for static variables
1821 as they may contain a label address. */
1822 walk_tree (&init, force_labels_r, NULL, NULL);
1823 }
1824 }
1825
1826 return GS_ALL_DONE;
1827 }
1828
1829 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1830 and replacing the LOOP_EXPR with goto, but if the loop contains an
1831 EXIT_EXPR, we need to append a label for it to jump to. */
1832
1833 static enum gimplify_status
1834 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1835 {
1836 tree saved_label = gimplify_ctxp->exit_label;
1837 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1838
1839 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1840
1841 gimplify_ctxp->exit_label = NULL_TREE;
1842
1843 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1844
1845 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1846
1847 if (gimplify_ctxp->exit_label)
1848 gimplify_seq_add_stmt (pre_p,
1849 gimple_build_label (gimplify_ctxp->exit_label));
1850
1851 gimplify_ctxp->exit_label = saved_label;
1852
1853 *expr_p = NULL;
1854 return GS_ALL_DONE;
1855 }
1856
1857 /* Gimplify a statement list onto a sequence. These may be created either
1858 by an enlightened front-end, or by shortcut_cond_expr. */
1859
1860 static enum gimplify_status
1861 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1862 {
1863 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1864
1865 tree_stmt_iterator i = tsi_start (*expr_p);
1866
1867 while (!tsi_end_p (i))
1868 {
1869 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1870 tsi_delink (&i);
1871 }
1872
1873 if (temp)
1874 {
1875 *expr_p = temp;
1876 return GS_OK;
1877 }
1878
1879 return GS_ALL_DONE;
1880 }
1881
1882 /* Callback for walk_gimple_seq. */
1883
1884 static tree
1885 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1886 struct walk_stmt_info *wi)
1887 {
1888 gimple *stmt = gsi_stmt (*gsi_p);
1889
1890 *handled_ops_p = true;
1891 switch (gimple_code (stmt))
1892 {
1893 case GIMPLE_TRY:
1894 /* A compiler-generated cleanup or a user-written try block.
1895 If it's empty, don't dive into it--that would result in
1896 worse location info. */
1897 if (gimple_try_eval (stmt) == NULL)
1898 {
1899 wi->info = stmt;
1900 return integer_zero_node;
1901 }
1902 /* Fall through. */
1903 case GIMPLE_BIND:
1904 case GIMPLE_CATCH:
1905 case GIMPLE_EH_FILTER:
1906 case GIMPLE_TRANSACTION:
1907 /* Walk the sub-statements. */
1908 *handled_ops_p = false;
1909 break;
1910
1911 case GIMPLE_DEBUG:
1912 /* Ignore these. We may generate them before declarations that
1913 are never executed. If there's something to warn about,
1914 there will be non-debug stmts too, and we'll catch those. */
1915 break;
1916
1917 case GIMPLE_CALL:
1918 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1919 {
1920 *handled_ops_p = false;
1921 break;
1922 }
1923 /* Fall through. */
1924 default:
1925 /* Save the first "real" statement (not a decl/lexical scope/...). */
1926 wi->info = stmt;
1927 return integer_zero_node;
1928 }
1929 return NULL_TREE;
1930 }
1931
1932 /* Possibly warn about unreachable statements between switch's controlling
1933 expression and the first case. SEQ is the body of a switch expression. */
1934
1935 static void
1936 maybe_warn_switch_unreachable (gimple_seq seq)
1937 {
1938 if (!warn_switch_unreachable
1939 /* This warning doesn't play well with Fortran when optimizations
1940 are on. */
1941 || lang_GNU_Fortran ()
1942 || seq == NULL)
1943 return;
1944
1945 struct walk_stmt_info wi;
1946 memset (&wi, 0, sizeof (wi));
1947 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1948 gimple *stmt = (gimple *) wi.info;
1949
1950 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1951 {
1952 if (gimple_code (stmt) == GIMPLE_GOTO
1953 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1954 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1955 /* Don't warn for compiler-generated gotos. These occur
1956 in Duff's devices, for example. */;
1957 else
1958 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1959 "statement will never be executed");
1960 }
1961 }
1962
1963
1964 /* A label entry that pairs label and a location. */
1965 struct label_entry
1966 {
1967 tree label;
1968 location_t loc;
1969 };
1970
1971 /* Find LABEL in vector of label entries VEC. */
1972
1973 static struct label_entry *
1974 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1975 {
1976 unsigned int i;
1977 struct label_entry *l;
1978
1979 FOR_EACH_VEC_ELT (*vec, i, l)
1980 if (l->label == label)
1981 return l;
1982 return NULL;
1983 }
1984
1985 /* Return true if LABEL, a LABEL_DECL, represents a case label
1986 in a vector of labels CASES. */
1987
1988 static bool
1989 case_label_p (const vec<tree> *cases, tree label)
1990 {
1991 unsigned int i;
1992 tree l;
1993
1994 FOR_EACH_VEC_ELT (*cases, i, l)
1995 if (CASE_LABEL (l) == label)
1996 return true;
1997 return false;
1998 }
1999
2000 /* Find the last nondebug statement in a scope STMT. */
2001
2002 static gimple *
2003 last_stmt_in_scope (gimple *stmt)
2004 {
2005 if (!stmt)
2006 return NULL;
2007
2008 switch (gimple_code (stmt))
2009 {
2010 case GIMPLE_BIND:
2011 {
2012 gbind *bind = as_a <gbind *> (stmt);
2013 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2014 return last_stmt_in_scope (stmt);
2015 }
2016
2017 case GIMPLE_TRY:
2018 {
2019 gtry *try_stmt = as_a <gtry *> (stmt);
2020 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2021 gimple *last_eval = last_stmt_in_scope (stmt);
2022 if (gimple_stmt_may_fallthru (last_eval)
2023 && (last_eval == NULL
2024 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2025 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2026 {
2027 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2028 return last_stmt_in_scope (stmt);
2029 }
2030 else
2031 return last_eval;
2032 }
2033
2034 case GIMPLE_DEBUG:
2035 gcc_unreachable ();
2036
2037 default:
2038 return stmt;
2039 }
2040 }
2041
2042 /* Collect interesting labels in LABELS and return the statement preceding
2043 another case label, or a user-defined label. Store a location useful
2044 to give warnings at *PREVLOC (usually the location of the returned
2045 statement or of its surrounding scope). */
2046
2047 static gimple *
2048 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2049 auto_vec <struct label_entry> *labels,
2050 location_t *prevloc)
2051 {
2052 gimple *prev = NULL;
2053
2054 *prevloc = UNKNOWN_LOCATION;
2055 do
2056 {
2057 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2058 {
2059 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2060 which starts on a GIMPLE_SWITCH and ends with a break label.
2061 Handle that as a single statement that can fall through. */
2062 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2063 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2064 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2065 if (last
2066 && gimple_code (first) == GIMPLE_SWITCH
2067 && gimple_code (last) == GIMPLE_LABEL)
2068 {
2069 tree label = gimple_label_label (as_a <glabel *> (last));
2070 if (SWITCH_BREAK_LABEL_P (label))
2071 {
2072 prev = bind;
2073 gsi_next (gsi_p);
2074 continue;
2075 }
2076 }
2077 }
2078 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2079 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2080 {
2081 /* Nested scope. Only look at the last statement of
2082 the innermost scope. */
2083 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2084 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2085 if (last)
2086 {
2087 prev = last;
2088 /* It might be a label without a location. Use the
2089 location of the scope then. */
2090 if (!gimple_has_location (prev))
2091 *prevloc = bind_loc;
2092 }
2093 gsi_next (gsi_p);
2094 continue;
2095 }
2096
2097 /* Ifs are tricky. */
2098 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2099 {
2100 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2101 tree false_lab = gimple_cond_false_label (cond_stmt);
2102 location_t if_loc = gimple_location (cond_stmt);
2103
2104 /* If we have e.g.
2105 if (i > 1) goto <D.2259>; else goto D;
2106 we can't do much with the else-branch. */
2107 if (!DECL_ARTIFICIAL (false_lab))
2108 break;
2109
2110 /* Go on until the false label, then one step back. */
2111 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2112 {
2113 gimple *stmt = gsi_stmt (*gsi_p);
2114 if (gimple_code (stmt) == GIMPLE_LABEL
2115 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2116 break;
2117 }
2118
2119 /* Not found? Oops. */
2120 if (gsi_end_p (*gsi_p))
2121 break;
2122
2123 struct label_entry l = { false_lab, if_loc };
2124 labels->safe_push (l);
2125
2126 /* Go to the last statement of the then branch. */
2127 gsi_prev (gsi_p);
2128
2129 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2130 <D.1759>:
2131 <stmt>;
2132 goto <D.1761>;
2133 <D.1760>:
2134 */
2135 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2136 && !gimple_has_location (gsi_stmt (*gsi_p)))
2137 {
2138 /* Look at the statement before, it might be
2139 attribute fallthrough, in which case don't warn. */
2140 gsi_prev (gsi_p);
2141 bool fallthru_before_dest
2142 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2143 gsi_next (gsi_p);
2144 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2145 if (!fallthru_before_dest)
2146 {
2147 struct label_entry l = { goto_dest, if_loc };
2148 labels->safe_push (l);
2149 }
2150 }
2151 /* And move back. */
2152 gsi_next (gsi_p);
2153 }
2154
2155 /* Remember the last statement. Skip labels that are of no interest
2156 to us. */
2157 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2158 {
2159 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2160 if (find_label_entry (labels, label))
2161 prev = gsi_stmt (*gsi_p);
2162 }
2163 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2164 ;
2165 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2166 ;
2167 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2168 prev = gsi_stmt (*gsi_p);
2169 gsi_next (gsi_p);
2170 }
2171 while (!gsi_end_p (*gsi_p)
2172 /* Stop if we find a case or a user-defined label. */
2173 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2174 || !gimple_has_location (gsi_stmt (*gsi_p))));
2175
2176 if (prev && gimple_has_location (prev))
2177 *prevloc = gimple_location (prev);
2178 return prev;
2179 }
2180
2181 /* Return true if the switch fallthough warning should occur. LABEL is
2182 the label statement that we're falling through to. */
2183
2184 static bool
2185 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2186 {
2187 gimple_stmt_iterator gsi = *gsi_p;
2188
2189 /* Don't warn if the label is marked with a "falls through" comment. */
2190 if (FALLTHROUGH_LABEL_P (label))
2191 return false;
2192
2193 /* Don't warn for non-case labels followed by a statement:
2194 case 0:
2195 foo ();
2196 label:
2197 bar ();
2198 as these are likely intentional. */
2199 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2200 {
2201 tree l;
2202 while (!gsi_end_p (gsi)
2203 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2204 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2205 && !case_label_p (&gimplify_ctxp->case_labels, l))
2206 gsi_next_nondebug (&gsi);
2207 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2208 return false;
2209 }
2210
2211 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2212 immediately breaks. */
2213 gsi = *gsi_p;
2214
2215 /* Skip all immediately following labels. */
2216 while (!gsi_end_p (gsi)
2217 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2218 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2219 gsi_next_nondebug (&gsi);
2220
2221 /* { ... something; default:; } */
2222 if (gsi_end_p (gsi)
2223 /* { ... something; default: break; } or
2224 { ... something; default: goto L; } */
2225 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2226 /* { ... something; default: return; } */
2227 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2228 return false;
2229
2230 return true;
2231 }
2232
2233 /* Callback for walk_gimple_seq. */
2234
2235 static tree
2236 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2237 struct walk_stmt_info *)
2238 {
2239 gimple *stmt = gsi_stmt (*gsi_p);
2240
2241 *handled_ops_p = true;
2242 switch (gimple_code (stmt))
2243 {
2244 case GIMPLE_TRY:
2245 case GIMPLE_BIND:
2246 case GIMPLE_CATCH:
2247 case GIMPLE_EH_FILTER:
2248 case GIMPLE_TRANSACTION:
2249 /* Walk the sub-statements. */
2250 *handled_ops_p = false;
2251 break;
2252
2253 /* Find a sequence of form:
2254
2255 GIMPLE_LABEL
2256 [...]
2257 <may fallthru stmt>
2258 GIMPLE_LABEL
2259
2260 and possibly warn. */
2261 case GIMPLE_LABEL:
2262 {
2263 /* Found a label. Skip all immediately following labels. */
2264 while (!gsi_end_p (*gsi_p)
2265 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2266 gsi_next_nondebug (gsi_p);
2267
2268 /* There might be no more statements. */
2269 if (gsi_end_p (*gsi_p))
2270 return integer_zero_node;
2271
2272 /* Vector of labels that fall through. */
2273 auto_vec <struct label_entry> labels;
2274 location_t prevloc;
2275 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2276
2277 /* There might be no more statements. */
2278 if (gsi_end_p (*gsi_p))
2279 return integer_zero_node;
2280
2281 gimple *next = gsi_stmt (*gsi_p);
2282 tree label;
2283 /* If what follows is a label, then we may have a fallthrough. */
2284 if (gimple_code (next) == GIMPLE_LABEL
2285 && gimple_has_location (next)
2286 && (label = gimple_label_label (as_a <glabel *> (next)))
2287 && prev != NULL)
2288 {
2289 struct label_entry *l;
2290 bool warned_p = false;
2291 auto_diagnostic_group d;
2292 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2293 /* Quiet. */;
2294 else if (gimple_code (prev) == GIMPLE_LABEL
2295 && (label = gimple_label_label (as_a <glabel *> (prev)))
2296 && (l = find_label_entry (&labels, label)))
2297 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2298 "this statement may fall through");
2299 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2300 /* Try to be clever and don't warn when the statement
2301 can't actually fall through. */
2302 && gimple_stmt_may_fallthru (prev)
2303 && prevloc != UNKNOWN_LOCATION)
2304 warned_p = warning_at (prevloc,
2305 OPT_Wimplicit_fallthrough_,
2306 "this statement may fall through");
2307 if (warned_p)
2308 inform (gimple_location (next), "here");
2309
2310 /* Mark this label as processed so as to prevent multiple
2311 warnings in nested switches. */
2312 FALLTHROUGH_LABEL_P (label) = true;
2313
2314 /* So that next warn_implicit_fallthrough_r will start looking for
2315 a new sequence starting with this label. */
2316 gsi_prev (gsi_p);
2317 }
2318 }
2319 break;
2320 default:
2321 break;
2322 }
2323 return NULL_TREE;
2324 }
2325
2326 /* Warn when a switch case falls through. */
2327
2328 static void
2329 maybe_warn_implicit_fallthrough (gimple_seq seq)
2330 {
2331 if (!warn_implicit_fallthrough)
2332 return;
2333
2334 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2335 if (!(lang_GNU_C ()
2336 || lang_GNU_CXX ()
2337 || lang_GNU_OBJC ()))
2338 return;
2339
2340 struct walk_stmt_info wi;
2341 memset (&wi, 0, sizeof (wi));
2342 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2343 }
2344
2345 /* Callback for walk_gimple_seq. */
2346
2347 static tree
2348 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2349 struct walk_stmt_info *wi)
2350 {
2351 gimple *stmt = gsi_stmt (*gsi_p);
2352
2353 *handled_ops_p = true;
2354 switch (gimple_code (stmt))
2355 {
2356 case GIMPLE_TRY:
2357 case GIMPLE_BIND:
2358 case GIMPLE_CATCH:
2359 case GIMPLE_EH_FILTER:
2360 case GIMPLE_TRANSACTION:
2361 /* Walk the sub-statements. */
2362 *handled_ops_p = false;
2363 break;
2364 case GIMPLE_CALL:
2365 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2366 {
2367 gsi_remove (gsi_p, true);
2368 if (gsi_end_p (*gsi_p))
2369 {
2370 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2371 return integer_zero_node;
2372 }
2373
2374 bool found = false;
2375 location_t loc = gimple_location (stmt);
2376
2377 gimple_stmt_iterator gsi2 = *gsi_p;
2378 stmt = gsi_stmt (gsi2);
2379 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2380 {
2381 /* Go on until the artificial label. */
2382 tree goto_dest = gimple_goto_dest (stmt);
2383 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2384 {
2385 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2386 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2387 == goto_dest)
2388 break;
2389 }
2390
2391 /* Not found? Stop. */
2392 if (gsi_end_p (gsi2))
2393 break;
2394
2395 /* Look one past it. */
2396 gsi_next (&gsi2);
2397 }
2398
2399 /* We're looking for a case label or default label here. */
2400 while (!gsi_end_p (gsi2))
2401 {
2402 stmt = gsi_stmt (gsi2);
2403 if (gimple_code (stmt) == GIMPLE_LABEL)
2404 {
2405 tree label = gimple_label_label (as_a <glabel *> (stmt));
2406 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2407 {
2408 found = true;
2409 break;
2410 }
2411 }
2412 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2413 ;
2414 else if (!is_gimple_debug (stmt))
2415 /* Anything else is not expected. */
2416 break;
2417 gsi_next (&gsi2);
2418 }
2419 if (!found)
2420 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2421 "a case label or default label");
2422 }
2423 break;
2424 default:
2425 break;
2426 }
2427 return NULL_TREE;
2428 }
2429
2430 /* Expand all FALLTHROUGH () calls in SEQ. */
2431
2432 static void
2433 expand_FALLTHROUGH (gimple_seq *seq_p)
2434 {
2435 struct walk_stmt_info wi;
2436 location_t loc;
2437 memset (&wi, 0, sizeof (wi));
2438 wi.info = (void *) &loc;
2439 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2440 if (wi.callback_result == integer_zero_node)
2441 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2442 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2443 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2444 "a case label or default label");
2445 }
2446
2447 \f
2448 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2449 branch to. */
2450
2451 static enum gimplify_status
2452 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2453 {
2454 tree switch_expr = *expr_p;
2455 gimple_seq switch_body_seq = NULL;
2456 enum gimplify_status ret;
2457 tree index_type = TREE_TYPE (switch_expr);
2458 if (index_type == NULL_TREE)
2459 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2460
2461 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2462 fb_rvalue);
2463 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2464 return ret;
2465
2466 if (SWITCH_BODY (switch_expr))
2467 {
2468 vec<tree> labels;
2469 vec<tree> saved_labels;
2470 hash_set<tree> *saved_live_switch_vars = NULL;
2471 tree default_case = NULL_TREE;
2472 gswitch *switch_stmt;
2473
2474 /* Save old labels, get new ones from body, then restore the old
2475 labels. Save all the things from the switch body to append after. */
2476 saved_labels = gimplify_ctxp->case_labels;
2477 gimplify_ctxp->case_labels.create (8);
2478
2479 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2480 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2481 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2482 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2483 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2484 else
2485 gimplify_ctxp->live_switch_vars = NULL;
2486
2487 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2488 gimplify_ctxp->in_switch_expr = true;
2489
2490 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2491
2492 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2493 maybe_warn_switch_unreachable (switch_body_seq);
2494 maybe_warn_implicit_fallthrough (switch_body_seq);
2495 /* Only do this for the outermost GIMPLE_SWITCH. */
2496 if (!gimplify_ctxp->in_switch_expr)
2497 expand_FALLTHROUGH (&switch_body_seq);
2498
2499 labels = gimplify_ctxp->case_labels;
2500 gimplify_ctxp->case_labels = saved_labels;
2501
2502 if (gimplify_ctxp->live_switch_vars)
2503 {
2504 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2505 delete gimplify_ctxp->live_switch_vars;
2506 }
2507 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2508
2509 preprocess_case_label_vec_for_gimple (labels, index_type,
2510 &default_case);
2511
2512 bool add_bind = false;
2513 if (!default_case)
2514 {
2515 glabel *new_default;
2516
2517 default_case
2518 = build_case_label (NULL_TREE, NULL_TREE,
2519 create_artificial_label (UNKNOWN_LOCATION));
2520 if (old_in_switch_expr)
2521 {
2522 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2523 add_bind = true;
2524 }
2525 new_default = gimple_build_label (CASE_LABEL (default_case));
2526 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2527 }
2528 else if (old_in_switch_expr)
2529 {
2530 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2531 if (last && gimple_code (last) == GIMPLE_LABEL)
2532 {
2533 tree label = gimple_label_label (as_a <glabel *> (last));
2534 if (SWITCH_BREAK_LABEL_P (label))
2535 add_bind = true;
2536 }
2537 }
2538
2539 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2540 default_case, labels);
2541 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2542 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2543 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2544 so that we can easily find the start and end of the switch
2545 statement. */
2546 if (add_bind)
2547 {
2548 gimple_seq bind_body = NULL;
2549 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2550 gimple_seq_add_seq (&bind_body, switch_body_seq);
2551 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2552 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2553 gimplify_seq_add_stmt (pre_p, bind);
2554 }
2555 else
2556 {
2557 gimplify_seq_add_stmt (pre_p, switch_stmt);
2558 gimplify_seq_add_seq (pre_p, switch_body_seq);
2559 }
2560 labels.release ();
2561 }
2562 else
2563 gcc_unreachable ();
2564
2565 return GS_ALL_DONE;
2566 }
2567
2568 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2569
2570 static enum gimplify_status
2571 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2572 {
2573 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2574 == current_function_decl);
2575
2576 tree label = LABEL_EXPR_LABEL (*expr_p);
2577 glabel *label_stmt = gimple_build_label (label);
2578 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2579 gimplify_seq_add_stmt (pre_p, label_stmt);
2580
2581 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2582 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2583 NOT_TAKEN));
2584 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2585 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2586 TAKEN));
2587
2588 return GS_ALL_DONE;
2589 }
2590
2591 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2592
2593 static enum gimplify_status
2594 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2595 {
2596 struct gimplify_ctx *ctxp;
2597 glabel *label_stmt;
2598
2599 /* Invalid programs can play Duff's Device type games with, for example,
2600 #pragma omp parallel. At least in the C front end, we don't
2601 detect such invalid branches until after gimplification, in the
2602 diagnose_omp_blocks pass. */
2603 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2604 if (ctxp->case_labels.exists ())
2605 break;
2606
2607 tree label = CASE_LABEL (*expr_p);
2608 label_stmt = gimple_build_label (label);
2609 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2610 ctxp->case_labels.safe_push (*expr_p);
2611 gimplify_seq_add_stmt (pre_p, label_stmt);
2612
2613 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2614 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2615 NOT_TAKEN));
2616 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2617 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2618 TAKEN));
2619
2620 return GS_ALL_DONE;
2621 }
2622
2623 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2624 if necessary. */
2625
2626 tree
2627 build_and_jump (tree *label_p)
2628 {
2629 if (label_p == NULL)
2630 /* If there's nowhere to jump, just fall through. */
2631 return NULL_TREE;
2632
2633 if (*label_p == NULL_TREE)
2634 {
2635 tree label = create_artificial_label (UNKNOWN_LOCATION);
2636 *label_p = label;
2637 }
2638
2639 return build1 (GOTO_EXPR, void_type_node, *label_p);
2640 }
2641
2642 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2643 This also involves building a label to jump to and communicating it to
2644 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2645
2646 static enum gimplify_status
2647 gimplify_exit_expr (tree *expr_p)
2648 {
2649 tree cond = TREE_OPERAND (*expr_p, 0);
2650 tree expr;
2651
2652 expr = build_and_jump (&gimplify_ctxp->exit_label);
2653 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2654 *expr_p = expr;
2655
2656 return GS_OK;
2657 }
2658
2659 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2660 different from its canonical type, wrap the whole thing inside a
2661 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2662 type.
2663
2664 The canonical type of a COMPONENT_REF is the type of the field being
2665 referenced--unless the field is a bit-field which can be read directly
2666 in a smaller mode, in which case the canonical type is the
2667 sign-appropriate type corresponding to that mode. */
2668
2669 static void
2670 canonicalize_component_ref (tree *expr_p)
2671 {
2672 tree expr = *expr_p;
2673 tree type;
2674
2675 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2676
2677 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2678 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2679 else
2680 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2681
2682 /* One could argue that all the stuff below is not necessary for
2683 the non-bitfield case and declare it a FE error if type
2684 adjustment would be needed. */
2685 if (TREE_TYPE (expr) != type)
2686 {
2687 #ifdef ENABLE_TYPES_CHECKING
2688 tree old_type = TREE_TYPE (expr);
2689 #endif
2690 int type_quals;
2691
2692 /* We need to preserve qualifiers and propagate them from
2693 operand 0. */
2694 type_quals = TYPE_QUALS (type)
2695 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2696 if (TYPE_QUALS (type) != type_quals)
2697 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2698
2699 /* Set the type of the COMPONENT_REF to the underlying type. */
2700 TREE_TYPE (expr) = type;
2701
2702 #ifdef ENABLE_TYPES_CHECKING
2703 /* It is now a FE error, if the conversion from the canonical
2704 type to the original expression type is not useless. */
2705 gcc_assert (useless_type_conversion_p (old_type, type));
2706 #endif
2707 }
2708 }
2709
2710 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2711 to foo, embed that change in the ADDR_EXPR by converting
2712 T array[U];
2713 (T *)&array
2714 ==>
2715 &array[L]
2716 where L is the lower bound. For simplicity, only do this for constant
2717 lower bound.
2718 The constraint is that the type of &array[L] is trivially convertible
2719 to T *. */
2720
2721 static void
2722 canonicalize_addr_expr (tree *expr_p)
2723 {
2724 tree expr = *expr_p;
2725 tree addr_expr = TREE_OPERAND (expr, 0);
2726 tree datype, ddatype, pddatype;
2727
2728 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2729 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2730 || TREE_CODE (addr_expr) != ADDR_EXPR)
2731 return;
2732
2733 /* The addr_expr type should be a pointer to an array. */
2734 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2735 if (TREE_CODE (datype) != ARRAY_TYPE)
2736 return;
2737
2738 /* The pointer to element type shall be trivially convertible to
2739 the expression pointer type. */
2740 ddatype = TREE_TYPE (datype);
2741 pddatype = build_pointer_type (ddatype);
2742 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2743 pddatype))
2744 return;
2745
2746 /* The lower bound and element sizes must be constant. */
2747 if (!TYPE_SIZE_UNIT (ddatype)
2748 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2749 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2750 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2751 return;
2752
2753 /* All checks succeeded. Build a new node to merge the cast. */
2754 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2755 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2756 NULL_TREE, NULL_TREE);
2757 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2758
2759 /* We can have stripped a required restrict qualifier above. */
2760 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2761 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2762 }
2763
2764 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2765 underneath as appropriate. */
2766
2767 static enum gimplify_status
2768 gimplify_conversion (tree *expr_p)
2769 {
2770 location_t loc = EXPR_LOCATION (*expr_p);
2771 gcc_assert (CONVERT_EXPR_P (*expr_p));
2772
2773 /* Then strip away all but the outermost conversion. */
2774 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2775
2776 /* And remove the outermost conversion if it's useless. */
2777 if (tree_ssa_useless_type_conversion (*expr_p))
2778 *expr_p = TREE_OPERAND (*expr_p, 0);
2779
2780 /* If we still have a conversion at the toplevel,
2781 then canonicalize some constructs. */
2782 if (CONVERT_EXPR_P (*expr_p))
2783 {
2784 tree sub = TREE_OPERAND (*expr_p, 0);
2785
2786 /* If a NOP conversion is changing the type of a COMPONENT_REF
2787 expression, then canonicalize its type now in order to expose more
2788 redundant conversions. */
2789 if (TREE_CODE (sub) == COMPONENT_REF)
2790 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2791
2792 /* If a NOP conversion is changing a pointer to array of foo
2793 to a pointer to foo, embed that change in the ADDR_EXPR. */
2794 else if (TREE_CODE (sub) == ADDR_EXPR)
2795 canonicalize_addr_expr (expr_p);
2796 }
2797
2798 /* If we have a conversion to a non-register type force the
2799 use of a VIEW_CONVERT_EXPR instead. */
2800 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2801 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2802 TREE_OPERAND (*expr_p, 0));
2803
2804 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2805 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2806 TREE_SET_CODE (*expr_p, NOP_EXPR);
2807
2808 return GS_OK;
2809 }
2810
2811 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2812 DECL_VALUE_EXPR, and it's worth re-examining things. */
2813
2814 static enum gimplify_status
2815 gimplify_var_or_parm_decl (tree *expr_p)
2816 {
2817 tree decl = *expr_p;
2818
2819 /* ??? If this is a local variable, and it has not been seen in any
2820 outer BIND_EXPR, then it's probably the result of a duplicate
2821 declaration, for which we've already issued an error. It would
2822 be really nice if the front end wouldn't leak these at all.
2823 Currently the only known culprit is C++ destructors, as seen
2824 in g++.old-deja/g++.jason/binding.C. */
2825 if (VAR_P (decl)
2826 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2827 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2828 && decl_function_context (decl) == current_function_decl)
2829 {
2830 gcc_assert (seen_error ());
2831 return GS_ERROR;
2832 }
2833
2834 /* When within an OMP context, notice uses of variables. */
2835 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2836 return GS_ALL_DONE;
2837
2838 /* If the decl is an alias for another expression, substitute it now. */
2839 if (DECL_HAS_VALUE_EXPR_P (decl))
2840 {
2841 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2842 return GS_OK;
2843 }
2844
2845 return GS_ALL_DONE;
2846 }
2847
2848 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2849
2850 static void
2851 recalculate_side_effects (tree t)
2852 {
2853 enum tree_code code = TREE_CODE (t);
2854 int len = TREE_OPERAND_LENGTH (t);
2855 int i;
2856
2857 switch (TREE_CODE_CLASS (code))
2858 {
2859 case tcc_expression:
2860 switch (code)
2861 {
2862 case INIT_EXPR:
2863 case MODIFY_EXPR:
2864 case VA_ARG_EXPR:
2865 case PREDECREMENT_EXPR:
2866 case PREINCREMENT_EXPR:
2867 case POSTDECREMENT_EXPR:
2868 case POSTINCREMENT_EXPR:
2869 /* All of these have side-effects, no matter what their
2870 operands are. */
2871 return;
2872
2873 default:
2874 break;
2875 }
2876 /* Fall through. */
2877
2878 case tcc_comparison: /* a comparison expression */
2879 case tcc_unary: /* a unary arithmetic expression */
2880 case tcc_binary: /* a binary arithmetic expression */
2881 case tcc_reference: /* a reference */
2882 case tcc_vl_exp: /* a function call */
2883 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2884 for (i = 0; i < len; ++i)
2885 {
2886 tree op = TREE_OPERAND (t, i);
2887 if (op && TREE_SIDE_EFFECTS (op))
2888 TREE_SIDE_EFFECTS (t) = 1;
2889 }
2890 break;
2891
2892 case tcc_constant:
2893 /* No side-effects. */
2894 return;
2895
2896 default:
2897 gcc_unreachable ();
2898 }
2899 }
2900
2901 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2902 node *EXPR_P.
2903
2904 compound_lval
2905 : min_lval '[' val ']'
2906 | min_lval '.' ID
2907 | compound_lval '[' val ']'
2908 | compound_lval '.' ID
2909
2910 This is not part of the original SIMPLE definition, which separates
2911 array and member references, but it seems reasonable to handle them
2912 together. Also, this way we don't run into problems with union
2913 aliasing; gcc requires that for accesses through a union to alias, the
2914 union reference must be explicit, which was not always the case when we
2915 were splitting up array and member refs.
2916
2917 PRE_P points to the sequence where side effects that must happen before
2918 *EXPR_P should be stored.
2919
2920 POST_P points to the sequence where side effects that must happen after
2921 *EXPR_P should be stored. */
2922
2923 static enum gimplify_status
2924 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2925 fallback_t fallback)
2926 {
2927 tree *p;
2928 enum gimplify_status ret = GS_ALL_DONE, tret;
2929 int i;
2930 location_t loc = EXPR_LOCATION (*expr_p);
2931 tree expr = *expr_p;
2932
2933 /* Create a stack of the subexpressions so later we can walk them in
2934 order from inner to outer. */
2935 auto_vec<tree, 10> expr_stack;
2936
2937 /* We can handle anything that get_inner_reference can deal with. */
2938 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2939 {
2940 restart:
2941 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2942 if (TREE_CODE (*p) == INDIRECT_REF)
2943 *p = fold_indirect_ref_loc (loc, *p);
2944
2945 if (handled_component_p (*p))
2946 ;
2947 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2948 additional COMPONENT_REFs. */
2949 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2950 && gimplify_var_or_parm_decl (p) == GS_OK)
2951 goto restart;
2952 else
2953 break;
2954
2955 expr_stack.safe_push (*p);
2956 }
2957
2958 gcc_assert (expr_stack.length ());
2959
2960 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2961 walked through and P points to the innermost expression.
2962
2963 Java requires that we elaborated nodes in source order. That
2964 means we must gimplify the inner expression followed by each of
2965 the indices, in order. But we can't gimplify the inner
2966 expression until we deal with any variable bounds, sizes, or
2967 positions in order to deal with PLACEHOLDER_EXPRs.
2968
2969 So we do this in three steps. First we deal with the annotations
2970 for any variables in the components, then we gimplify the base,
2971 then we gimplify any indices, from left to right. */
2972 for (i = expr_stack.length () - 1; i >= 0; i--)
2973 {
2974 tree t = expr_stack[i];
2975
2976 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2977 {
2978 /* Gimplify the low bound and element type size and put them into
2979 the ARRAY_REF. If these values are set, they have already been
2980 gimplified. */
2981 if (TREE_OPERAND (t, 2) == NULL_TREE)
2982 {
2983 tree low = unshare_expr (array_ref_low_bound (t));
2984 if (!is_gimple_min_invariant (low))
2985 {
2986 TREE_OPERAND (t, 2) = low;
2987 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2988 post_p, is_gimple_reg,
2989 fb_rvalue);
2990 ret = MIN (ret, tret);
2991 }
2992 }
2993 else
2994 {
2995 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2996 is_gimple_reg, fb_rvalue);
2997 ret = MIN (ret, tret);
2998 }
2999
3000 if (TREE_OPERAND (t, 3) == NULL_TREE)
3001 {
3002 tree elmt_size = array_ref_element_size (t);
3003 if (!is_gimple_min_invariant (elmt_size))
3004 {
3005 elmt_size = unshare_expr (elmt_size);
3006 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3007 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3008
3009 /* Divide the element size by the alignment of the element
3010 type (above). */
3011 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3012 elmt_size, factor);
3013
3014 TREE_OPERAND (t, 3) = elmt_size;
3015 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3016 post_p, is_gimple_reg,
3017 fb_rvalue);
3018 ret = MIN (ret, tret);
3019 }
3020 }
3021 else
3022 {
3023 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3024 is_gimple_reg, fb_rvalue);
3025 ret = MIN (ret, tret);
3026 }
3027 }
3028 else if (TREE_CODE (t) == COMPONENT_REF)
3029 {
3030 /* Set the field offset into T and gimplify it. */
3031 if (TREE_OPERAND (t, 2) == NULL_TREE)
3032 {
3033 tree offset = component_ref_field_offset (t);
3034 if (!is_gimple_min_invariant (offset))
3035 {
3036 offset = unshare_expr (offset);
3037 tree field = TREE_OPERAND (t, 1);
3038 tree factor
3039 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3040
3041 /* Divide the offset by its alignment. */
3042 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3043 offset, factor);
3044
3045 TREE_OPERAND (t, 2) = offset;
3046 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3047 post_p, is_gimple_reg,
3048 fb_rvalue);
3049 ret = MIN (ret, tret);
3050 }
3051 }
3052 else
3053 {
3054 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3055 is_gimple_reg, fb_rvalue);
3056 ret = MIN (ret, tret);
3057 }
3058 }
3059 }
3060
3061 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3062 so as to match the min_lval predicate. Failure to do so may result
3063 in the creation of large aggregate temporaries. */
3064 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3065 fallback | fb_lvalue);
3066 ret = MIN (ret, tret);
3067
3068 /* And finally, the indices and operands of ARRAY_REF. During this
3069 loop we also remove any useless conversions. */
3070 for (; expr_stack.length () > 0; )
3071 {
3072 tree t = expr_stack.pop ();
3073
3074 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3075 {
3076 /* Gimplify the dimension. */
3077 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3078 {
3079 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3080 is_gimple_val, fb_rvalue);
3081 ret = MIN (ret, tret);
3082 }
3083 }
3084
3085 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3086
3087 /* The innermost expression P may have originally had
3088 TREE_SIDE_EFFECTS set which would have caused all the outer
3089 expressions in *EXPR_P leading to P to also have had
3090 TREE_SIDE_EFFECTS set. */
3091 recalculate_side_effects (t);
3092 }
3093
3094 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3095 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3096 {
3097 canonicalize_component_ref (expr_p);
3098 }
3099
3100 expr_stack.release ();
3101
3102 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3103
3104 return ret;
3105 }
3106
3107 /* Gimplify the self modifying expression pointed to by EXPR_P
3108 (++, --, +=, -=).
3109
3110 PRE_P points to the list where side effects that must happen before
3111 *EXPR_P should be stored.
3112
3113 POST_P points to the list where side effects that must happen after
3114 *EXPR_P should be stored.
3115
3116 WANT_VALUE is nonzero iff we want to use the value of this expression
3117 in another expression.
3118
3119 ARITH_TYPE is the type the computation should be performed in. */
3120
3121 enum gimplify_status
3122 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3123 bool want_value, tree arith_type)
3124 {
3125 enum tree_code code;
3126 tree lhs, lvalue, rhs, t1;
3127 gimple_seq post = NULL, *orig_post_p = post_p;
3128 bool postfix;
3129 enum tree_code arith_code;
3130 enum gimplify_status ret;
3131 location_t loc = EXPR_LOCATION (*expr_p);
3132
3133 code = TREE_CODE (*expr_p);
3134
3135 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3136 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3137
3138 /* Prefix or postfix? */
3139 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3140 /* Faster to treat as prefix if result is not used. */
3141 postfix = want_value;
3142 else
3143 postfix = false;
3144
3145 /* For postfix, make sure the inner expression's post side effects
3146 are executed after side effects from this expression. */
3147 if (postfix)
3148 post_p = &post;
3149
3150 /* Add or subtract? */
3151 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3152 arith_code = PLUS_EXPR;
3153 else
3154 arith_code = MINUS_EXPR;
3155
3156 /* Gimplify the LHS into a GIMPLE lvalue. */
3157 lvalue = TREE_OPERAND (*expr_p, 0);
3158 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3159 if (ret == GS_ERROR)
3160 return ret;
3161
3162 /* Extract the operands to the arithmetic operation. */
3163 lhs = lvalue;
3164 rhs = TREE_OPERAND (*expr_p, 1);
3165
3166 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3167 that as the result value and in the postqueue operation. */
3168 if (postfix)
3169 {
3170 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3171 if (ret == GS_ERROR)
3172 return ret;
3173
3174 lhs = get_initialized_tmp_var (lhs, pre_p);
3175 }
3176
3177 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3178 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3179 {
3180 rhs = convert_to_ptrofftype_loc (loc, rhs);
3181 if (arith_code == MINUS_EXPR)
3182 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3183 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3184 }
3185 else
3186 t1 = fold_convert (TREE_TYPE (*expr_p),
3187 fold_build2 (arith_code, arith_type,
3188 fold_convert (arith_type, lhs),
3189 fold_convert (arith_type, rhs)));
3190
3191 if (postfix)
3192 {
3193 gimplify_assign (lvalue, t1, pre_p);
3194 gimplify_seq_add_seq (orig_post_p, post);
3195 *expr_p = lhs;
3196 return GS_ALL_DONE;
3197 }
3198 else
3199 {
3200 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3201 return GS_OK;
3202 }
3203 }
3204
3205 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3206
3207 static void
3208 maybe_with_size_expr (tree *expr_p)
3209 {
3210 tree expr = *expr_p;
3211 tree type = TREE_TYPE (expr);
3212 tree size;
3213
3214 /* If we've already wrapped this or the type is error_mark_node, we can't do
3215 anything. */
3216 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3217 || type == error_mark_node)
3218 return;
3219
3220 /* If the size isn't known or is a constant, we have nothing to do. */
3221 size = TYPE_SIZE_UNIT (type);
3222 if (!size || poly_int_tree_p (size))
3223 return;
3224
3225 /* Otherwise, make a WITH_SIZE_EXPR. */
3226 size = unshare_expr (size);
3227 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3228 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3229 }
3230
3231 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3232 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3233 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3234 gimplified to an SSA name. */
3235
3236 enum gimplify_status
3237 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3238 bool allow_ssa)
3239 {
3240 bool (*test) (tree);
3241 fallback_t fb;
3242
3243 /* In general, we allow lvalues for function arguments to avoid
3244 extra overhead of copying large aggregates out of even larger
3245 aggregates into temporaries only to copy the temporaries to
3246 the argument list. Make optimizers happy by pulling out to
3247 temporaries those types that fit in registers. */
3248 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3249 test = is_gimple_val, fb = fb_rvalue;
3250 else
3251 {
3252 test = is_gimple_lvalue, fb = fb_either;
3253 /* Also strip a TARGET_EXPR that would force an extra copy. */
3254 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3255 {
3256 tree init = TARGET_EXPR_INITIAL (*arg_p);
3257 if (init
3258 && !VOID_TYPE_P (TREE_TYPE (init)))
3259 *arg_p = init;
3260 }
3261 }
3262
3263 /* If this is a variable sized type, we must remember the size. */
3264 maybe_with_size_expr (arg_p);
3265
3266 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3267 /* Make sure arguments have the same location as the function call
3268 itself. */
3269 protected_set_expr_location (*arg_p, call_location);
3270
3271 /* There is a sequence point before a function call. Side effects in
3272 the argument list must occur before the actual call. So, when
3273 gimplifying arguments, force gimplify_expr to use an internal
3274 post queue which is then appended to the end of PRE_P. */
3275 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3276 }
3277
3278 /* Don't fold inside offloading or taskreg regions: it can break code by
3279 adding decl references that weren't in the source. We'll do it during
3280 omplower pass instead. */
3281
3282 static bool
3283 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3284 {
3285 struct gimplify_omp_ctx *ctx;
3286 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3287 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3288 return false;
3289 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3290 return false;
3291 /* Delay folding of builtins until the IL is in consistent state
3292 so the diagnostic machinery can do a better job. */
3293 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3294 return false;
3295 return fold_stmt (gsi);
3296 }
3297
3298 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3299 WANT_VALUE is true if the result of the call is desired. */
3300
3301 static enum gimplify_status
3302 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3303 {
3304 tree fndecl, parms, p, fnptrtype;
3305 enum gimplify_status ret;
3306 int i, nargs;
3307 gcall *call;
3308 bool builtin_va_start_p = false;
3309 location_t loc = EXPR_LOCATION (*expr_p);
3310
3311 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3312
3313 /* For reliable diagnostics during inlining, it is necessary that
3314 every call_expr be annotated with file and line. */
3315 if (! EXPR_HAS_LOCATION (*expr_p))
3316 SET_EXPR_LOCATION (*expr_p, input_location);
3317
3318 /* Gimplify internal functions created in the FEs. */
3319 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3320 {
3321 if (want_value)
3322 return GS_ALL_DONE;
3323
3324 nargs = call_expr_nargs (*expr_p);
3325 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3326 auto_vec<tree> vargs (nargs);
3327
3328 for (i = 0; i < nargs; i++)
3329 {
3330 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3331 EXPR_LOCATION (*expr_p));
3332 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3333 }
3334
3335 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3336 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3337 gimplify_seq_add_stmt (pre_p, call);
3338 return GS_ALL_DONE;
3339 }
3340
3341 /* This may be a call to a builtin function.
3342
3343 Builtin function calls may be transformed into different
3344 (and more efficient) builtin function calls under certain
3345 circumstances. Unfortunately, gimplification can muck things
3346 up enough that the builtin expanders are not aware that certain
3347 transformations are still valid.
3348
3349 So we attempt transformation/gimplification of the call before
3350 we gimplify the CALL_EXPR. At this time we do not manage to
3351 transform all calls in the same manner as the expanders do, but
3352 we do transform most of them. */
3353 fndecl = get_callee_fndecl (*expr_p);
3354 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3355 switch (DECL_FUNCTION_CODE (fndecl))
3356 {
3357 CASE_BUILT_IN_ALLOCA:
3358 /* If the call has been built for a variable-sized object, then we
3359 want to restore the stack level when the enclosing BIND_EXPR is
3360 exited to reclaim the allocated space; otherwise, we precisely
3361 need to do the opposite and preserve the latest stack level. */
3362 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3363 gimplify_ctxp->save_stack = true;
3364 else
3365 gimplify_ctxp->keep_stack = true;
3366 break;
3367
3368 case BUILT_IN_VA_START:
3369 {
3370 builtin_va_start_p = TRUE;
3371 if (call_expr_nargs (*expr_p) < 2)
3372 {
3373 error ("too few arguments to function %<va_start%>");
3374 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3375 return GS_OK;
3376 }
3377
3378 if (fold_builtin_next_arg (*expr_p, true))
3379 {
3380 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3381 return GS_OK;
3382 }
3383 break;
3384 }
3385
3386 case BUILT_IN_EH_RETURN:
3387 cfun->calls_eh_return = true;
3388 break;
3389
3390 case BUILT_IN_CLEAR_PADDING:
3391 if (call_expr_nargs (*expr_p) == 1)
3392 {
3393 /* Remember the original type of the argument in an internal
3394 dummy second argument, as in GIMPLE pointer conversions are
3395 useless. */
3396 p = CALL_EXPR_ARG (*expr_p, 0);
3397 *expr_p
3398 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3399 build_zero_cst (TREE_TYPE (p)));
3400 return GS_OK;
3401 }
3402 break;
3403
3404 default:
3405 ;
3406 }
3407 if (fndecl && fndecl_built_in_p (fndecl))
3408 {
3409 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3410 if (new_tree && new_tree != *expr_p)
3411 {
3412 /* There was a transformation of this call which computes the
3413 same value, but in a more efficient way. Return and try
3414 again. */
3415 *expr_p = new_tree;
3416 return GS_OK;
3417 }
3418 }
3419
3420 /* Remember the original function pointer type. */
3421 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3422
3423 if (flag_openmp
3424 && fndecl
3425 && cfun
3426 && (cfun->curr_properties & PROP_gimple_any) == 0)
3427 {
3428 tree variant = omp_resolve_declare_variant (fndecl);
3429 if (variant != fndecl)
3430 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3431 }
3432
3433 /* There is a sequence point before the call, so any side effects in
3434 the calling expression must occur before the actual call. Force
3435 gimplify_expr to use an internal post queue. */
3436 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3437 is_gimple_call_addr, fb_rvalue);
3438
3439 nargs = call_expr_nargs (*expr_p);
3440
3441 /* Get argument types for verification. */
3442 fndecl = get_callee_fndecl (*expr_p);
3443 parms = NULL_TREE;
3444 if (fndecl)
3445 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3446 else
3447 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3448
3449 if (fndecl && DECL_ARGUMENTS (fndecl))
3450 p = DECL_ARGUMENTS (fndecl);
3451 else if (parms)
3452 p = parms;
3453 else
3454 p = NULL_TREE;
3455 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3456 ;
3457
3458 /* If the last argument is __builtin_va_arg_pack () and it is not
3459 passed as a named argument, decrease the number of CALL_EXPR
3460 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3461 if (!p
3462 && i < nargs
3463 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3464 {
3465 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3466 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3467
3468 if (last_arg_fndecl
3469 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3470 {
3471 tree call = *expr_p;
3472
3473 --nargs;
3474 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3475 CALL_EXPR_FN (call),
3476 nargs, CALL_EXPR_ARGP (call));
3477
3478 /* Copy all CALL_EXPR flags, location and block, except
3479 CALL_EXPR_VA_ARG_PACK flag. */
3480 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3481 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3482 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3483 = CALL_EXPR_RETURN_SLOT_OPT (call);
3484 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3485 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3486
3487 /* Set CALL_EXPR_VA_ARG_PACK. */
3488 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3489 }
3490 }
3491
3492 /* If the call returns twice then after building the CFG the call
3493 argument computations will no longer dominate the call because
3494 we add an abnormal incoming edge to the call. So do not use SSA
3495 vars there. */
3496 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3497
3498 /* Gimplify the function arguments. */
3499 if (nargs > 0)
3500 {
3501 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3502 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3503 PUSH_ARGS_REVERSED ? i-- : i++)
3504 {
3505 enum gimplify_status t;
3506
3507 /* Avoid gimplifying the second argument to va_start, which needs to
3508 be the plain PARM_DECL. */
3509 if ((i != 1) || !builtin_va_start_p)
3510 {
3511 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3512 EXPR_LOCATION (*expr_p), ! returns_twice);
3513
3514 if (t == GS_ERROR)
3515 ret = GS_ERROR;
3516 }
3517 }
3518 }
3519
3520 /* Gimplify the static chain. */
3521 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3522 {
3523 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3524 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3525 else
3526 {
3527 enum gimplify_status t;
3528 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3529 EXPR_LOCATION (*expr_p), ! returns_twice);
3530 if (t == GS_ERROR)
3531 ret = GS_ERROR;
3532 }
3533 }
3534
3535 /* Verify the function result. */
3536 if (want_value && fndecl
3537 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3538 {
3539 error_at (loc, "using result of function returning %<void%>");
3540 ret = GS_ERROR;
3541 }
3542
3543 /* Try this again in case gimplification exposed something. */
3544 if (ret != GS_ERROR)
3545 {
3546 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3547
3548 if (new_tree && new_tree != *expr_p)
3549 {
3550 /* There was a transformation of this call which computes the
3551 same value, but in a more efficient way. Return and try
3552 again. */
3553 *expr_p = new_tree;
3554 return GS_OK;
3555 }
3556 }
3557 else
3558 {
3559 *expr_p = error_mark_node;
3560 return GS_ERROR;
3561 }
3562
3563 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3564 decl. This allows us to eliminate redundant or useless
3565 calls to "const" functions. */
3566 if (TREE_CODE (*expr_p) == CALL_EXPR)
3567 {
3568 int flags = call_expr_flags (*expr_p);
3569 if (flags & (ECF_CONST | ECF_PURE)
3570 /* An infinite loop is considered a side effect. */
3571 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3572 TREE_SIDE_EFFECTS (*expr_p) = 0;
3573 }
3574
3575 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3576 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3577 form and delegate the creation of a GIMPLE_CALL to
3578 gimplify_modify_expr. This is always possible because when
3579 WANT_VALUE is true, the caller wants the result of this call into
3580 a temporary, which means that we will emit an INIT_EXPR in
3581 internal_get_tmp_var which will then be handled by
3582 gimplify_modify_expr. */
3583 if (!want_value)
3584 {
3585 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3586 have to do is replicate it as a GIMPLE_CALL tuple. */
3587 gimple_stmt_iterator gsi;
3588 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3589 notice_special_calls (call);
3590 gimplify_seq_add_stmt (pre_p, call);
3591 gsi = gsi_last (*pre_p);
3592 maybe_fold_stmt (&gsi);
3593 *expr_p = NULL_TREE;
3594 }
3595 else
3596 /* Remember the original function type. */
3597 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3598 CALL_EXPR_FN (*expr_p));
3599
3600 return ret;
3601 }
3602
3603 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3604 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3605
3606 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3607 condition is true or false, respectively. If null, we should generate
3608 our own to skip over the evaluation of this specific expression.
3609
3610 LOCUS is the source location of the COND_EXPR.
3611
3612 This function is the tree equivalent of do_jump.
3613
3614 shortcut_cond_r should only be called by shortcut_cond_expr. */
3615
3616 static tree
3617 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3618 location_t locus)
3619 {
3620 tree local_label = NULL_TREE;
3621 tree t, expr = NULL;
3622
3623 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3624 retain the shortcut semantics. Just insert the gotos here;
3625 shortcut_cond_expr will append the real blocks later. */
3626 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3627 {
3628 location_t new_locus;
3629
3630 /* Turn if (a && b) into
3631
3632 if (a); else goto no;
3633 if (b) goto yes; else goto no;
3634 (no:) */
3635
3636 if (false_label_p == NULL)
3637 false_label_p = &local_label;
3638
3639 /* Keep the original source location on the first 'if'. */
3640 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3641 append_to_statement_list (t, &expr);
3642
3643 /* Set the source location of the && on the second 'if'. */
3644 new_locus = rexpr_location (pred, locus);
3645 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3646 new_locus);
3647 append_to_statement_list (t, &expr);
3648 }
3649 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3650 {
3651 location_t new_locus;
3652
3653 /* Turn if (a || b) into
3654
3655 if (a) goto yes;
3656 if (b) goto yes; else goto no;
3657 (yes:) */
3658
3659 if (true_label_p == NULL)
3660 true_label_p = &local_label;
3661
3662 /* Keep the original source location on the first 'if'. */
3663 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3664 append_to_statement_list (t, &expr);
3665
3666 /* Set the source location of the || on the second 'if'. */
3667 new_locus = rexpr_location (pred, locus);
3668 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3669 new_locus);
3670 append_to_statement_list (t, &expr);
3671 }
3672 else if (TREE_CODE (pred) == COND_EXPR
3673 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3674 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3675 {
3676 location_t new_locus;
3677
3678 /* As long as we're messing with gotos, turn if (a ? b : c) into
3679 if (a)
3680 if (b) goto yes; else goto no;
3681 else
3682 if (c) goto yes; else goto no;
3683
3684 Don't do this if one of the arms has void type, which can happen
3685 in C++ when the arm is throw. */
3686
3687 /* Keep the original source location on the first 'if'. Set the source
3688 location of the ? on the second 'if'. */
3689 new_locus = rexpr_location (pred, locus);
3690 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3691 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3692 false_label_p, locus),
3693 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3694 false_label_p, new_locus));
3695 }
3696 else
3697 {
3698 expr = build3 (COND_EXPR, void_type_node, pred,
3699 build_and_jump (true_label_p),
3700 build_and_jump (false_label_p));
3701 SET_EXPR_LOCATION (expr, locus);
3702 }
3703
3704 if (local_label)
3705 {
3706 t = build1 (LABEL_EXPR, void_type_node, local_label);
3707 append_to_statement_list (t, &expr);
3708 }
3709
3710 return expr;
3711 }
3712
3713 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3714 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3715 statement, if it is the last one. Otherwise, return NULL. */
3716
3717 static tree
3718 find_goto (tree expr)
3719 {
3720 if (!expr)
3721 return NULL_TREE;
3722
3723 if (TREE_CODE (expr) == GOTO_EXPR)
3724 return expr;
3725
3726 if (TREE_CODE (expr) != STATEMENT_LIST)
3727 return NULL_TREE;
3728
3729 tree_stmt_iterator i = tsi_start (expr);
3730
3731 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3732 tsi_next (&i);
3733
3734 if (!tsi_one_before_end_p (i))
3735 return NULL_TREE;
3736
3737 return find_goto (tsi_stmt (i));
3738 }
3739
3740 /* Same as find_goto, except that it returns NULL if the destination
3741 is not a LABEL_DECL. */
3742
3743 static inline tree
3744 find_goto_label (tree expr)
3745 {
3746 tree dest = find_goto (expr);
3747 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3748 return dest;
3749 return NULL_TREE;
3750 }
3751
3752 /* Given a conditional expression EXPR with short-circuit boolean
3753 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3754 predicate apart into the equivalent sequence of conditionals. */
3755
3756 static tree
3757 shortcut_cond_expr (tree expr)
3758 {
3759 tree pred = TREE_OPERAND (expr, 0);
3760 tree then_ = TREE_OPERAND (expr, 1);
3761 tree else_ = TREE_OPERAND (expr, 2);
3762 tree true_label, false_label, end_label, t;
3763 tree *true_label_p;
3764 tree *false_label_p;
3765 bool emit_end, emit_false, jump_over_else;
3766 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3767 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3768
3769 /* First do simple transformations. */
3770 if (!else_se)
3771 {
3772 /* If there is no 'else', turn
3773 if (a && b) then c
3774 into
3775 if (a) if (b) then c. */
3776 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3777 {
3778 /* Keep the original source location on the first 'if'. */
3779 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3780 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3781 /* Set the source location of the && on the second 'if'. */
3782 if (rexpr_has_location (pred))
3783 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3784 then_ = shortcut_cond_expr (expr);
3785 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3786 pred = TREE_OPERAND (pred, 0);
3787 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3788 SET_EXPR_LOCATION (expr, locus);
3789 }
3790 }
3791
3792 if (!then_se)
3793 {
3794 /* If there is no 'then', turn
3795 if (a || b); else d
3796 into
3797 if (a); else if (b); else d. */
3798 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3799 {
3800 /* Keep the original source location on the first 'if'. */
3801 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3802 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3803 /* Set the source location of the || on the second 'if'. */
3804 if (rexpr_has_location (pred))
3805 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3806 else_ = shortcut_cond_expr (expr);
3807 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3808 pred = TREE_OPERAND (pred, 0);
3809 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3810 SET_EXPR_LOCATION (expr, locus);
3811 }
3812 }
3813
3814 /* If we're done, great. */
3815 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3816 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3817 return expr;
3818
3819 /* Otherwise we need to mess with gotos. Change
3820 if (a) c; else d;
3821 to
3822 if (a); else goto no;
3823 c; goto end;
3824 no: d; end:
3825 and recursively gimplify the condition. */
3826
3827 true_label = false_label = end_label = NULL_TREE;
3828
3829 /* If our arms just jump somewhere, hijack those labels so we don't
3830 generate jumps to jumps. */
3831
3832 if (tree then_goto = find_goto_label (then_))
3833 {
3834 true_label = GOTO_DESTINATION (then_goto);
3835 then_ = NULL;
3836 then_se = false;
3837 }
3838
3839 if (tree else_goto = find_goto_label (else_))
3840 {
3841 false_label = GOTO_DESTINATION (else_goto);
3842 else_ = NULL;
3843 else_se = false;
3844 }
3845
3846 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3847 if (true_label)
3848 true_label_p = &true_label;
3849 else
3850 true_label_p = NULL;
3851
3852 /* The 'else' branch also needs a label if it contains interesting code. */
3853 if (false_label || else_se)
3854 false_label_p = &false_label;
3855 else
3856 false_label_p = NULL;
3857
3858 /* If there was nothing else in our arms, just forward the label(s). */
3859 if (!then_se && !else_se)
3860 return shortcut_cond_r (pred, true_label_p, false_label_p,
3861 EXPR_LOC_OR_LOC (expr, input_location));
3862
3863 /* If our last subexpression already has a terminal label, reuse it. */
3864 if (else_se)
3865 t = expr_last (else_);
3866 else if (then_se)
3867 t = expr_last (then_);
3868 else
3869 t = NULL;
3870 if (t && TREE_CODE (t) == LABEL_EXPR)
3871 end_label = LABEL_EXPR_LABEL (t);
3872
3873 /* If we don't care about jumping to the 'else' branch, jump to the end
3874 if the condition is false. */
3875 if (!false_label_p)
3876 false_label_p = &end_label;
3877
3878 /* We only want to emit these labels if we aren't hijacking them. */
3879 emit_end = (end_label == NULL_TREE);
3880 emit_false = (false_label == NULL_TREE);
3881
3882 /* We only emit the jump over the else clause if we have to--if the
3883 then clause may fall through. Otherwise we can wind up with a
3884 useless jump and a useless label at the end of gimplified code,
3885 which will cause us to think that this conditional as a whole
3886 falls through even if it doesn't. If we then inline a function
3887 which ends with such a condition, that can cause us to issue an
3888 inappropriate warning about control reaching the end of a
3889 non-void function. */
3890 jump_over_else = block_may_fallthru (then_);
3891
3892 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3893 EXPR_LOC_OR_LOC (expr, input_location));
3894
3895 expr = NULL;
3896 append_to_statement_list (pred, &expr);
3897
3898 append_to_statement_list (then_, &expr);
3899 if (else_se)
3900 {
3901 if (jump_over_else)
3902 {
3903 tree last = expr_last (expr);
3904 t = build_and_jump (&end_label);
3905 if (rexpr_has_location (last))
3906 SET_EXPR_LOCATION (t, rexpr_location (last));
3907 append_to_statement_list (t, &expr);
3908 }
3909 if (emit_false)
3910 {
3911 t = build1 (LABEL_EXPR, void_type_node, false_label);
3912 append_to_statement_list (t, &expr);
3913 }
3914 append_to_statement_list (else_, &expr);
3915 }
3916 if (emit_end && end_label)
3917 {
3918 t = build1 (LABEL_EXPR, void_type_node, end_label);
3919 append_to_statement_list (t, &expr);
3920 }
3921
3922 return expr;
3923 }
3924
3925 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3926
3927 tree
3928 gimple_boolify (tree expr)
3929 {
3930 tree type = TREE_TYPE (expr);
3931 location_t loc = EXPR_LOCATION (expr);
3932
3933 if (TREE_CODE (expr) == NE_EXPR
3934 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3935 && integer_zerop (TREE_OPERAND (expr, 1)))
3936 {
3937 tree call = TREE_OPERAND (expr, 0);
3938 tree fn = get_callee_fndecl (call);
3939
3940 /* For __builtin_expect ((long) (x), y) recurse into x as well
3941 if x is truth_value_p. */
3942 if (fn
3943 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3944 && call_expr_nargs (call) == 2)
3945 {
3946 tree arg = CALL_EXPR_ARG (call, 0);
3947 if (arg)
3948 {
3949 if (TREE_CODE (arg) == NOP_EXPR
3950 && TREE_TYPE (arg) == TREE_TYPE (call))
3951 arg = TREE_OPERAND (arg, 0);
3952 if (truth_value_p (TREE_CODE (arg)))
3953 {
3954 arg = gimple_boolify (arg);
3955 CALL_EXPR_ARG (call, 0)
3956 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3957 }
3958 }
3959 }
3960 }
3961
3962 switch (TREE_CODE (expr))
3963 {
3964 case TRUTH_AND_EXPR:
3965 case TRUTH_OR_EXPR:
3966 case TRUTH_XOR_EXPR:
3967 case TRUTH_ANDIF_EXPR:
3968 case TRUTH_ORIF_EXPR:
3969 /* Also boolify the arguments of truth exprs. */
3970 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3971 /* FALLTHRU */
3972
3973 case TRUTH_NOT_EXPR:
3974 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3975
3976 /* These expressions always produce boolean results. */
3977 if (TREE_CODE (type) != BOOLEAN_TYPE)
3978 TREE_TYPE (expr) = boolean_type_node;
3979 return expr;
3980
3981 case ANNOTATE_EXPR:
3982 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3983 {
3984 case annot_expr_ivdep_kind:
3985 case annot_expr_unroll_kind:
3986 case annot_expr_no_vector_kind:
3987 case annot_expr_vector_kind:
3988 case annot_expr_parallel_kind:
3989 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3990 if (TREE_CODE (type) != BOOLEAN_TYPE)
3991 TREE_TYPE (expr) = boolean_type_node;
3992 return expr;
3993 default:
3994 gcc_unreachable ();
3995 }
3996
3997 default:
3998 if (COMPARISON_CLASS_P (expr))
3999 {
4000 /* There expressions always prduce boolean results. */
4001 if (TREE_CODE (type) != BOOLEAN_TYPE)
4002 TREE_TYPE (expr) = boolean_type_node;
4003 return expr;
4004 }
4005 /* Other expressions that get here must have boolean values, but
4006 might need to be converted to the appropriate mode. */
4007 if (TREE_CODE (type) == BOOLEAN_TYPE)
4008 return expr;
4009 return fold_convert_loc (loc, boolean_type_node, expr);
4010 }
4011 }
4012
4013 /* Given a conditional expression *EXPR_P without side effects, gimplify
4014 its operands. New statements are inserted to PRE_P. */
4015
4016 static enum gimplify_status
4017 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4018 {
4019 tree expr = *expr_p, cond;
4020 enum gimplify_status ret, tret;
4021 enum tree_code code;
4022
4023 cond = gimple_boolify (COND_EXPR_COND (expr));
4024
4025 /* We need to handle && and || specially, as their gimplification
4026 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4027 code = TREE_CODE (cond);
4028 if (code == TRUTH_ANDIF_EXPR)
4029 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4030 else if (code == TRUTH_ORIF_EXPR)
4031 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4032 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4033 COND_EXPR_COND (*expr_p) = cond;
4034
4035 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4036 is_gimple_val, fb_rvalue);
4037 ret = MIN (ret, tret);
4038 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4039 is_gimple_val, fb_rvalue);
4040
4041 return MIN (ret, tret);
4042 }
4043
4044 /* Return true if evaluating EXPR could trap.
4045 EXPR is GENERIC, while tree_could_trap_p can be called
4046 only on GIMPLE. */
4047
4048 bool
4049 generic_expr_could_trap_p (tree expr)
4050 {
4051 unsigned i, n;
4052
4053 if (!expr || is_gimple_val (expr))
4054 return false;
4055
4056 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4057 return true;
4058
4059 n = TREE_OPERAND_LENGTH (expr);
4060 for (i = 0; i < n; i++)
4061 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4062 return true;
4063
4064 return false;
4065 }
4066
4067 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4068 into
4069
4070 if (p) if (p)
4071 t1 = a; a;
4072 else or else
4073 t1 = b; b;
4074 t1;
4075
4076 The second form is used when *EXPR_P is of type void.
4077
4078 PRE_P points to the list where side effects that must happen before
4079 *EXPR_P should be stored. */
4080
4081 static enum gimplify_status
4082 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4083 {
4084 tree expr = *expr_p;
4085 tree type = TREE_TYPE (expr);
4086 location_t loc = EXPR_LOCATION (expr);
4087 tree tmp, arm1, arm2;
4088 enum gimplify_status ret;
4089 tree label_true, label_false, label_cont;
4090 bool have_then_clause_p, have_else_clause_p;
4091 gcond *cond_stmt;
4092 enum tree_code pred_code;
4093 gimple_seq seq = NULL;
4094
4095 /* If this COND_EXPR has a value, copy the values into a temporary within
4096 the arms. */
4097 if (!VOID_TYPE_P (type))
4098 {
4099 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4100 tree result;
4101
4102 /* If either an rvalue is ok or we do not require an lvalue, create the
4103 temporary. But we cannot do that if the type is addressable. */
4104 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4105 && !TREE_ADDRESSABLE (type))
4106 {
4107 if (gimplify_ctxp->allow_rhs_cond_expr
4108 /* If either branch has side effects or could trap, it can't be
4109 evaluated unconditionally. */
4110 && !TREE_SIDE_EFFECTS (then_)
4111 && !generic_expr_could_trap_p (then_)
4112 && !TREE_SIDE_EFFECTS (else_)
4113 && !generic_expr_could_trap_p (else_))
4114 return gimplify_pure_cond_expr (expr_p, pre_p);
4115
4116 tmp = create_tmp_var (type, "iftmp");
4117 result = tmp;
4118 }
4119
4120 /* Otherwise, only create and copy references to the values. */
4121 else
4122 {
4123 type = build_pointer_type (type);
4124
4125 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4126 then_ = build_fold_addr_expr_loc (loc, then_);
4127
4128 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4129 else_ = build_fold_addr_expr_loc (loc, else_);
4130
4131 expr
4132 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4133
4134 tmp = create_tmp_var (type, "iftmp");
4135 result = build_simple_mem_ref_loc (loc, tmp);
4136 }
4137
4138 /* Build the new then clause, `tmp = then_;'. But don't build the
4139 assignment if the value is void; in C++ it can be if it's a throw. */
4140 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4141 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4142
4143 /* Similarly, build the new else clause, `tmp = else_;'. */
4144 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4145 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4146
4147 TREE_TYPE (expr) = void_type_node;
4148 recalculate_side_effects (expr);
4149
4150 /* Move the COND_EXPR to the prequeue. */
4151 gimplify_stmt (&expr, pre_p);
4152
4153 *expr_p = result;
4154 return GS_ALL_DONE;
4155 }
4156
4157 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4158 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4159 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4160 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4161
4162 /* Make sure the condition has BOOLEAN_TYPE. */
4163 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4164
4165 /* Break apart && and || conditions. */
4166 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4167 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4168 {
4169 expr = shortcut_cond_expr (expr);
4170
4171 if (expr != *expr_p)
4172 {
4173 *expr_p = expr;
4174
4175 /* We can't rely on gimplify_expr to re-gimplify the expanded
4176 form properly, as cleanups might cause the target labels to be
4177 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4178 set up a conditional context. */
4179 gimple_push_condition ();
4180 gimplify_stmt (expr_p, &seq);
4181 gimple_pop_condition (pre_p);
4182 gimple_seq_add_seq (pre_p, seq);
4183
4184 return GS_ALL_DONE;
4185 }
4186 }
4187
4188 /* Now do the normal gimplification. */
4189
4190 /* Gimplify condition. */
4191 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4192 is_gimple_condexpr_for_cond, fb_rvalue);
4193 if (ret == GS_ERROR)
4194 return GS_ERROR;
4195 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4196
4197 gimple_push_condition ();
4198
4199 have_then_clause_p = have_else_clause_p = false;
4200 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4201 if (label_true
4202 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4203 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4204 have different locations, otherwise we end up with incorrect
4205 location information on the branches. */
4206 && (optimize
4207 || !EXPR_HAS_LOCATION (expr)
4208 || !rexpr_has_location (label_true)
4209 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4210 {
4211 have_then_clause_p = true;
4212 label_true = GOTO_DESTINATION (label_true);
4213 }
4214 else
4215 label_true = create_artificial_label (UNKNOWN_LOCATION);
4216 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4217 if (label_false
4218 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4219 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4220 have different locations, otherwise we end up with incorrect
4221 location information on the branches. */
4222 && (optimize
4223 || !EXPR_HAS_LOCATION (expr)
4224 || !rexpr_has_location (label_false)
4225 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4226 {
4227 have_else_clause_p = true;
4228 label_false = GOTO_DESTINATION (label_false);
4229 }
4230 else
4231 label_false = create_artificial_label (UNKNOWN_LOCATION);
4232
4233 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4234 &arm2);
4235 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4236 label_false);
4237 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4238 gimplify_seq_add_stmt (&seq, cond_stmt);
4239 gimple_stmt_iterator gsi = gsi_last (seq);
4240 maybe_fold_stmt (&gsi);
4241
4242 label_cont = NULL_TREE;
4243 if (!have_then_clause_p)
4244 {
4245 /* For if (...) {} else { code; } put label_true after
4246 the else block. */
4247 if (TREE_OPERAND (expr, 1) == NULL_TREE
4248 && !have_else_clause_p
4249 && TREE_OPERAND (expr, 2) != NULL_TREE)
4250 label_cont = label_true;
4251 else
4252 {
4253 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4254 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4255 /* For if (...) { code; } else {} or
4256 if (...) { code; } else goto label; or
4257 if (...) { code; return; } else { ... }
4258 label_cont isn't needed. */
4259 if (!have_else_clause_p
4260 && TREE_OPERAND (expr, 2) != NULL_TREE
4261 && gimple_seq_may_fallthru (seq))
4262 {
4263 gimple *g;
4264 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4265
4266 g = gimple_build_goto (label_cont);
4267
4268 /* GIMPLE_COND's are very low level; they have embedded
4269 gotos. This particular embedded goto should not be marked
4270 with the location of the original COND_EXPR, as it would
4271 correspond to the COND_EXPR's condition, not the ELSE or the
4272 THEN arms. To avoid marking it with the wrong location, flag
4273 it as "no location". */
4274 gimple_set_do_not_emit_location (g);
4275
4276 gimplify_seq_add_stmt (&seq, g);
4277 }
4278 }
4279 }
4280 if (!have_else_clause_p)
4281 {
4282 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4283 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4284 }
4285 if (label_cont)
4286 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4287
4288 gimple_pop_condition (pre_p);
4289 gimple_seq_add_seq (pre_p, seq);
4290
4291 if (ret == GS_ERROR)
4292 ; /* Do nothing. */
4293 else if (have_then_clause_p || have_else_clause_p)
4294 ret = GS_ALL_DONE;
4295 else
4296 {
4297 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4298 expr = TREE_OPERAND (expr, 0);
4299 gimplify_stmt (&expr, pre_p);
4300 }
4301
4302 *expr_p = NULL;
4303 return ret;
4304 }
4305
4306 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4307 to be marked addressable.
4308
4309 We cannot rely on such an expression being directly markable if a temporary
4310 has been created by the gimplification. In this case, we create another
4311 temporary and initialize it with a copy, which will become a store after we
4312 mark it addressable. This can happen if the front-end passed us something
4313 that it could not mark addressable yet, like a Fortran pass-by-reference
4314 parameter (int) floatvar. */
4315
4316 static void
4317 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4318 {
4319 while (handled_component_p (*expr_p))
4320 expr_p = &TREE_OPERAND (*expr_p, 0);
4321 if (is_gimple_reg (*expr_p))
4322 {
4323 /* Do not allow an SSA name as the temporary. */
4324 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4325 DECL_NOT_GIMPLE_REG_P (var) = 1;
4326 *expr_p = var;
4327 }
4328 }
4329
4330 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4331 a call to __builtin_memcpy. */
4332
4333 static enum gimplify_status
4334 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4335 gimple_seq *seq_p)
4336 {
4337 tree t, to, to_ptr, from, from_ptr;
4338 gcall *gs;
4339 location_t loc = EXPR_LOCATION (*expr_p);
4340
4341 to = TREE_OPERAND (*expr_p, 0);
4342 from = TREE_OPERAND (*expr_p, 1);
4343
4344 /* Mark the RHS addressable. Beware that it may not be possible to do so
4345 directly if a temporary has been created by the gimplification. */
4346 prepare_gimple_addressable (&from, seq_p);
4347
4348 mark_addressable (from);
4349 from_ptr = build_fold_addr_expr_loc (loc, from);
4350 gimplify_arg (&from_ptr, seq_p, loc);
4351
4352 mark_addressable (to);
4353 to_ptr = build_fold_addr_expr_loc (loc, to);
4354 gimplify_arg (&to_ptr, seq_p, loc);
4355
4356 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4357
4358 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4359 gimple_call_set_alloca_for_var (gs, true);
4360
4361 if (want_value)
4362 {
4363 /* tmp = memcpy() */
4364 t = create_tmp_var (TREE_TYPE (to_ptr));
4365 gimple_call_set_lhs (gs, t);
4366 gimplify_seq_add_stmt (seq_p, gs);
4367
4368 *expr_p = build_simple_mem_ref (t);
4369 return GS_ALL_DONE;
4370 }
4371
4372 gimplify_seq_add_stmt (seq_p, gs);
4373 *expr_p = NULL;
4374 return GS_ALL_DONE;
4375 }
4376
4377 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4378 a call to __builtin_memset. In this case we know that the RHS is
4379 a CONSTRUCTOR with an empty element list. */
4380
4381 static enum gimplify_status
4382 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4383 gimple_seq *seq_p)
4384 {
4385 tree t, from, to, to_ptr;
4386 gcall *gs;
4387 location_t loc = EXPR_LOCATION (*expr_p);
4388
4389 /* Assert our assumptions, to abort instead of producing wrong code
4390 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4391 not be immediately exposed. */
4392 from = TREE_OPERAND (*expr_p, 1);
4393 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4394 from = TREE_OPERAND (from, 0);
4395
4396 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4397 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4398
4399 /* Now proceed. */
4400 to = TREE_OPERAND (*expr_p, 0);
4401
4402 to_ptr = build_fold_addr_expr_loc (loc, to);
4403 gimplify_arg (&to_ptr, seq_p, loc);
4404 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4405
4406 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4407
4408 if (want_value)
4409 {
4410 /* tmp = memset() */
4411 t = create_tmp_var (TREE_TYPE (to_ptr));
4412 gimple_call_set_lhs (gs, t);
4413 gimplify_seq_add_stmt (seq_p, gs);
4414
4415 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4416 return GS_ALL_DONE;
4417 }
4418
4419 gimplify_seq_add_stmt (seq_p, gs);
4420 *expr_p = NULL;
4421 return GS_ALL_DONE;
4422 }
4423
4424 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4425 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4426 assignment. Return non-null if we detect a potential overlap. */
4427
4428 struct gimplify_init_ctor_preeval_data
4429 {
4430 /* The base decl of the lhs object. May be NULL, in which case we
4431 have to assume the lhs is indirect. */
4432 tree lhs_base_decl;
4433
4434 /* The alias set of the lhs object. */
4435 alias_set_type lhs_alias_set;
4436 };
4437
4438 static tree
4439 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4440 {
4441 struct gimplify_init_ctor_preeval_data *data
4442 = (struct gimplify_init_ctor_preeval_data *) xdata;
4443 tree t = *tp;
4444
4445 /* If we find the base object, obviously we have overlap. */
4446 if (data->lhs_base_decl == t)
4447 return t;
4448
4449 /* If the constructor component is indirect, determine if we have a
4450 potential overlap with the lhs. The only bits of information we
4451 have to go on at this point are addressability and alias sets. */
4452 if ((INDIRECT_REF_P (t)
4453 || TREE_CODE (t) == MEM_REF)
4454 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4455 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4456 return t;
4457
4458 /* If the constructor component is a call, determine if it can hide a
4459 potential overlap with the lhs through an INDIRECT_REF like above.
4460 ??? Ugh - this is completely broken. In fact this whole analysis
4461 doesn't look conservative. */
4462 if (TREE_CODE (t) == CALL_EXPR)
4463 {
4464 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4465
4466 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4467 if (POINTER_TYPE_P (TREE_VALUE (type))
4468 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4469 && alias_sets_conflict_p (data->lhs_alias_set,
4470 get_alias_set
4471 (TREE_TYPE (TREE_VALUE (type)))))
4472 return t;
4473 }
4474
4475 if (IS_TYPE_OR_DECL_P (t))
4476 *walk_subtrees = 0;
4477 return NULL;
4478 }
4479
4480 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4481 force values that overlap with the lhs (as described by *DATA)
4482 into temporaries. */
4483
4484 static void
4485 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4486 struct gimplify_init_ctor_preeval_data *data)
4487 {
4488 enum gimplify_status one;
4489
4490 /* If the value is constant, then there's nothing to pre-evaluate. */
4491 if (TREE_CONSTANT (*expr_p))
4492 {
4493 /* Ensure it does not have side effects, it might contain a reference to
4494 the object we're initializing. */
4495 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4496 return;
4497 }
4498
4499 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4500 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4501 return;
4502
4503 /* Recurse for nested constructors. */
4504 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4505 {
4506 unsigned HOST_WIDE_INT ix;
4507 constructor_elt *ce;
4508 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4509
4510 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4511 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4512
4513 return;
4514 }
4515
4516 /* If this is a variable sized type, we must remember the size. */
4517 maybe_with_size_expr (expr_p);
4518
4519 /* Gimplify the constructor element to something appropriate for the rhs
4520 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4521 the gimplifier will consider this a store to memory. Doing this
4522 gimplification now means that we won't have to deal with complicated
4523 language-specific trees, nor trees like SAVE_EXPR that can induce
4524 exponential search behavior. */
4525 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4526 if (one == GS_ERROR)
4527 {
4528 *expr_p = NULL;
4529 return;
4530 }
4531
4532 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4533 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4534 always be true for all scalars, since is_gimple_mem_rhs insists on a
4535 temporary variable for them. */
4536 if (DECL_P (*expr_p))
4537 return;
4538
4539 /* If this is of variable size, we have no choice but to assume it doesn't
4540 overlap since we can't make a temporary for it. */
4541 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4542 return;
4543
4544 /* Otherwise, we must search for overlap ... */
4545 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4546 return;
4547
4548 /* ... and if found, force the value into a temporary. */
4549 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4550 }
4551
4552 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4553 a RANGE_EXPR in a CONSTRUCTOR for an array.
4554
4555 var = lower;
4556 loop_entry:
4557 object[var] = value;
4558 if (var == upper)
4559 goto loop_exit;
4560 var = var + 1;
4561 goto loop_entry;
4562 loop_exit:
4563
4564 We increment var _after_ the loop exit check because we might otherwise
4565 fail if upper == TYPE_MAX_VALUE (type for upper).
4566
4567 Note that we never have to deal with SAVE_EXPRs here, because this has
4568 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4569
4570 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4571 gimple_seq *, bool);
4572
4573 static void
4574 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4575 tree value, tree array_elt_type,
4576 gimple_seq *pre_p, bool cleared)
4577 {
4578 tree loop_entry_label, loop_exit_label, fall_thru_label;
4579 tree var, var_type, cref, tmp;
4580
4581 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4582 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4583 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4584
4585 /* Create and initialize the index variable. */
4586 var_type = TREE_TYPE (upper);
4587 var = create_tmp_var (var_type);
4588 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4589
4590 /* Add the loop entry label. */
4591 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4592
4593 /* Build the reference. */
4594 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4595 var, NULL_TREE, NULL_TREE);
4596
4597 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4598 the store. Otherwise just assign value to the reference. */
4599
4600 if (TREE_CODE (value) == CONSTRUCTOR)
4601 /* NB we might have to call ourself recursively through
4602 gimplify_init_ctor_eval if the value is a constructor. */
4603 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4604 pre_p, cleared);
4605 else
4606 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4607
4608 /* We exit the loop when the index var is equal to the upper bound. */
4609 gimplify_seq_add_stmt (pre_p,
4610 gimple_build_cond (EQ_EXPR, var, upper,
4611 loop_exit_label, fall_thru_label));
4612
4613 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4614
4615 /* Otherwise, increment the index var... */
4616 tmp = build2 (PLUS_EXPR, var_type, var,
4617 fold_convert (var_type, integer_one_node));
4618 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4619
4620 /* ...and jump back to the loop entry. */
4621 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4622
4623 /* Add the loop exit label. */
4624 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4625 }
4626
4627 /* Return true if FDECL is accessing a field that is zero sized. */
4628
4629 static bool
4630 zero_sized_field_decl (const_tree fdecl)
4631 {
4632 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4633 && integer_zerop (DECL_SIZE (fdecl)))
4634 return true;
4635 return false;
4636 }
4637
4638 /* Return true if TYPE is zero sized. */
4639
4640 static bool
4641 zero_sized_type (const_tree type)
4642 {
4643 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4644 && integer_zerop (TYPE_SIZE (type)))
4645 return true;
4646 return false;
4647 }
4648
4649 /* A subroutine of gimplify_init_constructor. Generate individual
4650 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4651 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4652 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4653 zeroed first. */
4654
4655 static void
4656 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4657 gimple_seq *pre_p, bool cleared)
4658 {
4659 tree array_elt_type = NULL;
4660 unsigned HOST_WIDE_INT ix;
4661 tree purpose, value;
4662
4663 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4664 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4665
4666 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4667 {
4668 tree cref;
4669
4670 /* NULL values are created above for gimplification errors. */
4671 if (value == NULL)
4672 continue;
4673
4674 if (cleared && initializer_zerop (value))
4675 continue;
4676
4677 /* ??? Here's to hoping the front end fills in all of the indices,
4678 so we don't have to figure out what's missing ourselves. */
4679 gcc_assert (purpose);
4680
4681 /* Skip zero-sized fields, unless value has side-effects. This can
4682 happen with calls to functions returning a zero-sized type, which
4683 we shouldn't discard. As a number of downstream passes don't
4684 expect sets of zero-sized fields, we rely on the gimplification of
4685 the MODIFY_EXPR we make below to drop the assignment statement. */
4686 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4687 continue;
4688
4689 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4690 whole range. */
4691 if (TREE_CODE (purpose) == RANGE_EXPR)
4692 {
4693 tree lower = TREE_OPERAND (purpose, 0);
4694 tree upper = TREE_OPERAND (purpose, 1);
4695
4696 /* If the lower bound is equal to upper, just treat it as if
4697 upper was the index. */
4698 if (simple_cst_equal (lower, upper))
4699 purpose = upper;
4700 else
4701 {
4702 gimplify_init_ctor_eval_range (object, lower, upper, value,
4703 array_elt_type, pre_p, cleared);
4704 continue;
4705 }
4706 }
4707
4708 if (array_elt_type)
4709 {
4710 /* Do not use bitsizetype for ARRAY_REF indices. */
4711 if (TYPE_DOMAIN (TREE_TYPE (object)))
4712 purpose
4713 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4714 purpose);
4715 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4716 purpose, NULL_TREE, NULL_TREE);
4717 }
4718 else
4719 {
4720 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4721 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4722 unshare_expr (object), purpose, NULL_TREE);
4723 }
4724
4725 if (TREE_CODE (value) == CONSTRUCTOR
4726 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4727 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4728 pre_p, cleared);
4729 else
4730 {
4731 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4732 gimplify_and_add (init, pre_p);
4733 ggc_free (init);
4734 }
4735 }
4736 }
4737
4738 /* Return the appropriate RHS predicate for this LHS. */
4739
4740 gimple_predicate
4741 rhs_predicate_for (tree lhs)
4742 {
4743 if (is_gimple_reg (lhs))
4744 return is_gimple_reg_rhs_or_call;
4745 else
4746 return is_gimple_mem_rhs_or_call;
4747 }
4748
4749 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4750 before the LHS has been gimplified. */
4751
4752 static gimple_predicate
4753 initial_rhs_predicate_for (tree lhs)
4754 {
4755 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4756 return is_gimple_reg_rhs_or_call;
4757 else
4758 return is_gimple_mem_rhs_or_call;
4759 }
4760
4761 /* Gimplify a C99 compound literal expression. This just means adding
4762 the DECL_EXPR before the current statement and using its anonymous
4763 decl instead. */
4764
4765 static enum gimplify_status
4766 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4767 bool (*gimple_test_f) (tree),
4768 fallback_t fallback)
4769 {
4770 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4771 tree decl = DECL_EXPR_DECL (decl_s);
4772 tree init = DECL_INITIAL (decl);
4773 /* Mark the decl as addressable if the compound literal
4774 expression is addressable now, otherwise it is marked too late
4775 after we gimplify the initialization expression. */
4776 if (TREE_ADDRESSABLE (*expr_p))
4777 TREE_ADDRESSABLE (decl) = 1;
4778 /* Otherwise, if we don't need an lvalue and have a literal directly
4779 substitute it. Check if it matches the gimple predicate, as
4780 otherwise we'd generate a new temporary, and we can as well just
4781 use the decl we already have. */
4782 else if (!TREE_ADDRESSABLE (decl)
4783 && !TREE_THIS_VOLATILE (decl)
4784 && init
4785 && (fallback & fb_lvalue) == 0
4786 && gimple_test_f (init))
4787 {
4788 *expr_p = init;
4789 return GS_OK;
4790 }
4791
4792 /* If the decl is not addressable, then it is being used in some
4793 expression or on the right hand side of a statement, and it can
4794 be put into a readonly data section. */
4795 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4796 TREE_READONLY (decl) = 1;
4797
4798 /* This decl isn't mentioned in the enclosing block, so add it to the
4799 list of temps. FIXME it seems a bit of a kludge to say that
4800 anonymous artificial vars aren't pushed, but everything else is. */
4801 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4802 gimple_add_tmp_var (decl);
4803
4804 gimplify_and_add (decl_s, pre_p);
4805 *expr_p = decl;
4806 return GS_OK;
4807 }
4808
4809 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4810 return a new CONSTRUCTOR if something changed. */
4811
4812 static tree
4813 optimize_compound_literals_in_ctor (tree orig_ctor)
4814 {
4815 tree ctor = orig_ctor;
4816 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4817 unsigned int idx, num = vec_safe_length (elts);
4818
4819 for (idx = 0; idx < num; idx++)
4820 {
4821 tree value = (*elts)[idx].value;
4822 tree newval = value;
4823 if (TREE_CODE (value) == CONSTRUCTOR)
4824 newval = optimize_compound_literals_in_ctor (value);
4825 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4826 {
4827 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4828 tree decl = DECL_EXPR_DECL (decl_s);
4829 tree init = DECL_INITIAL (decl);
4830
4831 if (!TREE_ADDRESSABLE (value)
4832 && !TREE_ADDRESSABLE (decl)
4833 && init
4834 && TREE_CODE (init) == CONSTRUCTOR)
4835 newval = optimize_compound_literals_in_ctor (init);
4836 }
4837 if (newval == value)
4838 continue;
4839
4840 if (ctor == orig_ctor)
4841 {
4842 ctor = copy_node (orig_ctor);
4843 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4844 elts = CONSTRUCTOR_ELTS (ctor);
4845 }
4846 (*elts)[idx].value = newval;
4847 }
4848 return ctor;
4849 }
4850
4851 /* A subroutine of gimplify_modify_expr. Break out elements of a
4852 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4853
4854 Note that we still need to clear any elements that don't have explicit
4855 initializers, so if not all elements are initialized we keep the
4856 original MODIFY_EXPR, we just remove all of the constructor elements.
4857
4858 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4859 GS_ERROR if we would have to create a temporary when gimplifying
4860 this constructor. Otherwise, return GS_OK.
4861
4862 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4863
4864 static enum gimplify_status
4865 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4866 bool want_value, bool notify_temp_creation)
4867 {
4868 tree object, ctor, type;
4869 enum gimplify_status ret;
4870 vec<constructor_elt, va_gc> *elts;
4871
4872 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4873
4874 if (!notify_temp_creation)
4875 {
4876 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4877 is_gimple_lvalue, fb_lvalue);
4878 if (ret == GS_ERROR)
4879 return ret;
4880 }
4881
4882 object = TREE_OPERAND (*expr_p, 0);
4883 ctor = TREE_OPERAND (*expr_p, 1)
4884 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4885 type = TREE_TYPE (ctor);
4886 elts = CONSTRUCTOR_ELTS (ctor);
4887 ret = GS_ALL_DONE;
4888
4889 switch (TREE_CODE (type))
4890 {
4891 case RECORD_TYPE:
4892 case UNION_TYPE:
4893 case QUAL_UNION_TYPE:
4894 case ARRAY_TYPE:
4895 {
4896 /* Use readonly data for initializers of this or smaller size
4897 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4898 ratio. */
4899 const HOST_WIDE_INT min_unique_size = 64;
4900 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4901 is smaller than this, use readonly data. */
4902 const int unique_nonzero_ratio = 8;
4903 /* True if a single access of the object must be ensured. This is the
4904 case if the target is volatile, the type is non-addressable and more
4905 than one field need to be assigned. */
4906 const bool ensure_single_access
4907 = TREE_THIS_VOLATILE (object)
4908 && !TREE_ADDRESSABLE (type)
4909 && vec_safe_length (elts) > 1;
4910 struct gimplify_init_ctor_preeval_data preeval_data;
4911 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4912 HOST_WIDE_INT num_unique_nonzero_elements;
4913 bool cleared, complete_p, valid_const_initializer;
4914
4915 /* Aggregate types must lower constructors to initialization of
4916 individual elements. The exception is that a CONSTRUCTOR node
4917 with no elements indicates zero-initialization of the whole. */
4918 if (vec_safe_is_empty (elts))
4919 {
4920 if (notify_temp_creation)
4921 return GS_OK;
4922 break;
4923 }
4924
4925 /* Fetch information about the constructor to direct later processing.
4926 We might want to make static versions of it in various cases, and
4927 can only do so if it known to be a valid constant initializer. */
4928 valid_const_initializer
4929 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4930 &num_unique_nonzero_elements,
4931 &num_ctor_elements, &complete_p);
4932
4933 /* If a const aggregate variable is being initialized, then it
4934 should never be a lose to promote the variable to be static. */
4935 if (valid_const_initializer
4936 && num_nonzero_elements > 1
4937 && TREE_READONLY (object)
4938 && VAR_P (object)
4939 && !DECL_REGISTER (object)
4940 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4941 /* For ctors that have many repeated nonzero elements
4942 represented through RANGE_EXPRs, prefer initializing
4943 those through runtime loops over copies of large amounts
4944 of data from readonly data section. */
4945 && (num_unique_nonzero_elements
4946 > num_nonzero_elements / unique_nonzero_ratio
4947 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4948 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4949 {
4950 if (notify_temp_creation)
4951 return GS_ERROR;
4952
4953 DECL_INITIAL (object) = ctor;
4954 TREE_STATIC (object) = 1;
4955 if (!DECL_NAME (object))
4956 DECL_NAME (object) = create_tmp_var_name ("C");
4957 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4958
4959 /* ??? C++ doesn't automatically append a .<number> to the
4960 assembler name, and even when it does, it looks at FE private
4961 data structures to figure out what that number should be,
4962 which are not set for this variable. I suppose this is
4963 important for local statics for inline functions, which aren't
4964 "local" in the object file sense. So in order to get a unique
4965 TU-local symbol, we must invoke the lhd version now. */
4966 lhd_set_decl_assembler_name (object);
4967
4968 *expr_p = NULL_TREE;
4969 break;
4970 }
4971
4972 /* If there are "lots" of initialized elements, even discounting
4973 those that are not address constants (and thus *must* be
4974 computed at runtime), then partition the constructor into
4975 constant and non-constant parts. Block copy the constant
4976 parts in, then generate code for the non-constant parts. */
4977 /* TODO. There's code in cp/typeck.c to do this. */
4978
4979 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4980 /* store_constructor will ignore the clearing of variable-sized
4981 objects. Initializers for such objects must explicitly set
4982 every field that needs to be set. */
4983 cleared = false;
4984 else if (!complete_p)
4985 /* If the constructor isn't complete, clear the whole object
4986 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4987
4988 ??? This ought not to be needed. For any element not present
4989 in the initializer, we should simply set them to zero. Except
4990 we'd need to *find* the elements that are not present, and that
4991 requires trickery to avoid quadratic compile-time behavior in
4992 large cases or excessive memory use in small cases. */
4993 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4994 else if (num_ctor_elements - num_nonzero_elements
4995 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4996 && num_nonzero_elements < num_ctor_elements / 4)
4997 /* If there are "lots" of zeros, it's more efficient to clear
4998 the memory and then set the nonzero elements. */
4999 cleared = true;
5000 else if (ensure_single_access && num_nonzero_elements == 0)
5001 /* If a single access to the target must be ensured and all elements
5002 are zero, then it's optimal to clear whatever their number. */
5003 cleared = true;
5004 else
5005 cleared = false;
5006
5007 /* If there are "lots" of initialized elements, and all of them
5008 are valid address constants, then the entire initializer can
5009 be dropped to memory, and then memcpy'd out. Don't do this
5010 for sparse arrays, though, as it's more efficient to follow
5011 the standard CONSTRUCTOR behavior of memset followed by
5012 individual element initialization. Also don't do this for small
5013 all-zero initializers (which aren't big enough to merit
5014 clearing), and don't try to make bitwise copies of
5015 TREE_ADDRESSABLE types. */
5016 if (valid_const_initializer
5017 && complete_p
5018 && !(cleared || num_nonzero_elements == 0)
5019 && !TREE_ADDRESSABLE (type))
5020 {
5021 HOST_WIDE_INT size = int_size_in_bytes (type);
5022 unsigned int align;
5023
5024 /* ??? We can still get unbounded array types, at least
5025 from the C++ front end. This seems wrong, but attempt
5026 to work around it for now. */
5027 if (size < 0)
5028 {
5029 size = int_size_in_bytes (TREE_TYPE (object));
5030 if (size >= 0)
5031 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5032 }
5033
5034 /* Find the maximum alignment we can assume for the object. */
5035 /* ??? Make use of DECL_OFFSET_ALIGN. */
5036 if (DECL_P (object))
5037 align = DECL_ALIGN (object);
5038 else
5039 align = TYPE_ALIGN (type);
5040
5041 /* Do a block move either if the size is so small as to make
5042 each individual move a sub-unit move on average, or if it
5043 is so large as to make individual moves inefficient. */
5044 if (size > 0
5045 && num_nonzero_elements > 1
5046 /* For ctors that have many repeated nonzero elements
5047 represented through RANGE_EXPRs, prefer initializing
5048 those through runtime loops over copies of large amounts
5049 of data from readonly data section. */
5050 && (num_unique_nonzero_elements
5051 > num_nonzero_elements / unique_nonzero_ratio
5052 || size <= min_unique_size)
5053 && (size < num_nonzero_elements
5054 || !can_move_by_pieces (size, align)))
5055 {
5056 if (notify_temp_creation)
5057 return GS_ERROR;
5058
5059 walk_tree (&ctor, force_labels_r, NULL, NULL);
5060 ctor = tree_output_constant_def (ctor);
5061 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5062 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5063 TREE_OPERAND (*expr_p, 1) = ctor;
5064
5065 /* This is no longer an assignment of a CONSTRUCTOR, but
5066 we still may have processing to do on the LHS. So
5067 pretend we didn't do anything here to let that happen. */
5068 return GS_UNHANDLED;
5069 }
5070 }
5071
5072 /* If a single access to the target must be ensured and there are
5073 nonzero elements or the zero elements are not assigned en masse,
5074 initialize the target from a temporary. */
5075 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5076 {
5077 if (notify_temp_creation)
5078 return GS_ERROR;
5079
5080 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5081 TREE_OPERAND (*expr_p, 0) = temp;
5082 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5083 *expr_p,
5084 build2 (MODIFY_EXPR, void_type_node,
5085 object, temp));
5086 return GS_OK;
5087 }
5088
5089 if (notify_temp_creation)
5090 return GS_OK;
5091
5092 /* If there are nonzero elements and if needed, pre-evaluate to capture
5093 elements overlapping with the lhs into temporaries. We must do this
5094 before clearing to fetch the values before they are zeroed-out. */
5095 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5096 {
5097 preeval_data.lhs_base_decl = get_base_address (object);
5098 if (!DECL_P (preeval_data.lhs_base_decl))
5099 preeval_data.lhs_base_decl = NULL;
5100 preeval_data.lhs_alias_set = get_alias_set (object);
5101
5102 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5103 pre_p, post_p, &preeval_data);
5104 }
5105
5106 bool ctor_has_side_effects_p
5107 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5108
5109 if (cleared)
5110 {
5111 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5112 Note that we still have to gimplify, in order to handle the
5113 case of variable sized types. Avoid shared tree structures. */
5114 CONSTRUCTOR_ELTS (ctor) = NULL;
5115 TREE_SIDE_EFFECTS (ctor) = 0;
5116 object = unshare_expr (object);
5117 gimplify_stmt (expr_p, pre_p);
5118 }
5119
5120 /* If we have not block cleared the object, or if there are nonzero
5121 elements in the constructor, or if the constructor has side effects,
5122 add assignments to the individual scalar fields of the object. */
5123 if (!cleared
5124 || num_nonzero_elements > 0
5125 || ctor_has_side_effects_p)
5126 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5127
5128 *expr_p = NULL_TREE;
5129 }
5130 break;
5131
5132 case COMPLEX_TYPE:
5133 {
5134 tree r, i;
5135
5136 if (notify_temp_creation)
5137 return GS_OK;
5138
5139 /* Extract the real and imaginary parts out of the ctor. */
5140 gcc_assert (elts->length () == 2);
5141 r = (*elts)[0].value;
5142 i = (*elts)[1].value;
5143 if (r == NULL || i == NULL)
5144 {
5145 tree zero = build_zero_cst (TREE_TYPE (type));
5146 if (r == NULL)
5147 r = zero;
5148 if (i == NULL)
5149 i = zero;
5150 }
5151
5152 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5153 represent creation of a complex value. */
5154 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5155 {
5156 ctor = build_complex (type, r, i);
5157 TREE_OPERAND (*expr_p, 1) = ctor;
5158 }
5159 else
5160 {
5161 ctor = build2 (COMPLEX_EXPR, type, r, i);
5162 TREE_OPERAND (*expr_p, 1) = ctor;
5163 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5164 pre_p,
5165 post_p,
5166 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5167 fb_rvalue);
5168 }
5169 }
5170 break;
5171
5172 case VECTOR_TYPE:
5173 {
5174 unsigned HOST_WIDE_INT ix;
5175 constructor_elt *ce;
5176
5177 if (notify_temp_creation)
5178 return GS_OK;
5179
5180 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5181 if (TREE_CONSTANT (ctor))
5182 {
5183 bool constant_p = true;
5184 tree value;
5185
5186 /* Even when ctor is constant, it might contain non-*_CST
5187 elements, such as addresses or trapping values like
5188 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5189 in VECTOR_CST nodes. */
5190 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5191 if (!CONSTANT_CLASS_P (value))
5192 {
5193 constant_p = false;
5194 break;
5195 }
5196
5197 if (constant_p)
5198 {
5199 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5200 break;
5201 }
5202
5203 TREE_CONSTANT (ctor) = 0;
5204 }
5205
5206 /* Vector types use CONSTRUCTOR all the way through gimple
5207 compilation as a general initializer. */
5208 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5209 {
5210 enum gimplify_status tret;
5211 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5212 fb_rvalue);
5213 if (tret == GS_ERROR)
5214 ret = GS_ERROR;
5215 else if (TREE_STATIC (ctor)
5216 && !initializer_constant_valid_p (ce->value,
5217 TREE_TYPE (ce->value)))
5218 TREE_STATIC (ctor) = 0;
5219 }
5220 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5221 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5222 }
5223 break;
5224
5225 default:
5226 /* So how did we get a CONSTRUCTOR for a scalar type? */
5227 gcc_unreachable ();
5228 }
5229
5230 if (ret == GS_ERROR)
5231 return GS_ERROR;
5232 /* If we have gimplified both sides of the initializer but have
5233 not emitted an assignment, do so now. */
5234 if (*expr_p)
5235 {
5236 tree lhs = TREE_OPERAND (*expr_p, 0);
5237 tree rhs = TREE_OPERAND (*expr_p, 1);
5238 if (want_value && object == lhs)
5239 lhs = unshare_expr (lhs);
5240 gassign *init = gimple_build_assign (lhs, rhs);
5241 gimplify_seq_add_stmt (pre_p, init);
5242 }
5243 if (want_value)
5244 {
5245 *expr_p = object;
5246 return GS_OK;
5247 }
5248 else
5249 {
5250 *expr_p = NULL;
5251 return GS_ALL_DONE;
5252 }
5253 }
5254
5255 /* Given a pointer value OP0, return a simplified version of an
5256 indirection through OP0, or NULL_TREE if no simplification is
5257 possible. This may only be applied to a rhs of an expression.
5258 Note that the resulting type may be different from the type pointed
5259 to in the sense that it is still compatible from the langhooks
5260 point of view. */
5261
5262 static tree
5263 gimple_fold_indirect_ref_rhs (tree t)
5264 {
5265 return gimple_fold_indirect_ref (t);
5266 }
5267
5268 /* Subroutine of gimplify_modify_expr to do simplifications of
5269 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5270 something changes. */
5271
5272 static enum gimplify_status
5273 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5274 gimple_seq *pre_p, gimple_seq *post_p,
5275 bool want_value)
5276 {
5277 enum gimplify_status ret = GS_UNHANDLED;
5278 bool changed;
5279
5280 do
5281 {
5282 changed = false;
5283 switch (TREE_CODE (*from_p))
5284 {
5285 case VAR_DECL:
5286 /* If we're assigning from a read-only variable initialized with
5287 a constructor and not volatile, do the direct assignment from
5288 the constructor, but only if the target is not volatile either
5289 since this latter assignment might end up being done on a per
5290 field basis. However, if the target is volatile and the type
5291 is aggregate and non-addressable, gimplify_init_constructor
5292 knows that it needs to ensure a single access to the target
5293 and it will return GS_OK only in this case. */
5294 if (TREE_READONLY (*from_p)
5295 && DECL_INITIAL (*from_p)
5296 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5297 && !TREE_THIS_VOLATILE (*from_p)
5298 && (!TREE_THIS_VOLATILE (*to_p)
5299 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5300 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5301 {
5302 tree old_from = *from_p;
5303 enum gimplify_status subret;
5304
5305 /* Move the constructor into the RHS. */
5306 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5307
5308 /* Let's see if gimplify_init_constructor will need to put
5309 it in memory. */
5310 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5311 false, true);
5312 if (subret == GS_ERROR)
5313 {
5314 /* If so, revert the change. */
5315 *from_p = old_from;
5316 }
5317 else
5318 {
5319 ret = GS_OK;
5320 changed = true;
5321 }
5322 }
5323 break;
5324 case INDIRECT_REF:
5325 {
5326 /* If we have code like
5327
5328 *(const A*)(A*)&x
5329
5330 where the type of "x" is a (possibly cv-qualified variant
5331 of "A"), treat the entire expression as identical to "x".
5332 This kind of code arises in C++ when an object is bound
5333 to a const reference, and if "x" is a TARGET_EXPR we want
5334 to take advantage of the optimization below. */
5335 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5336 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5337 if (t)
5338 {
5339 if (TREE_THIS_VOLATILE (t) != volatile_p)
5340 {
5341 if (DECL_P (t))
5342 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5343 build_fold_addr_expr (t));
5344 if (REFERENCE_CLASS_P (t))
5345 TREE_THIS_VOLATILE (t) = volatile_p;
5346 }
5347 *from_p = t;
5348 ret = GS_OK;
5349 changed = true;
5350 }
5351 break;
5352 }
5353
5354 case TARGET_EXPR:
5355 {
5356 /* If we are initializing something from a TARGET_EXPR, strip the
5357 TARGET_EXPR and initialize it directly, if possible. This can't
5358 be done if the initializer is void, since that implies that the
5359 temporary is set in some non-trivial way.
5360
5361 ??? What about code that pulls out the temp and uses it
5362 elsewhere? I think that such code never uses the TARGET_EXPR as
5363 an initializer. If I'm wrong, we'll die because the temp won't
5364 have any RTL. In that case, I guess we'll need to replace
5365 references somehow. */
5366 tree init = TARGET_EXPR_INITIAL (*from_p);
5367
5368 if (init
5369 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5370 || !TARGET_EXPR_NO_ELIDE (*from_p))
5371 && !VOID_TYPE_P (TREE_TYPE (init)))
5372 {
5373 *from_p = init;
5374 ret = GS_OK;
5375 changed = true;
5376 }
5377 }
5378 break;
5379
5380 case COMPOUND_EXPR:
5381 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5382 caught. */
5383 gimplify_compound_expr (from_p, pre_p, true);
5384 ret = GS_OK;
5385 changed = true;
5386 break;
5387
5388 case CONSTRUCTOR:
5389 /* If we already made some changes, let the front end have a
5390 crack at this before we break it down. */
5391 if (ret != GS_UNHANDLED)
5392 break;
5393 /* If we're initializing from a CONSTRUCTOR, break this into
5394 individual MODIFY_EXPRs. */
5395 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5396 false);
5397
5398 case COND_EXPR:
5399 /* If we're assigning to a non-register type, push the assignment
5400 down into the branches. This is mandatory for ADDRESSABLE types,
5401 since we cannot generate temporaries for such, but it saves a
5402 copy in other cases as well. */
5403 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5404 {
5405 /* This code should mirror the code in gimplify_cond_expr. */
5406 enum tree_code code = TREE_CODE (*expr_p);
5407 tree cond = *from_p;
5408 tree result = *to_p;
5409
5410 ret = gimplify_expr (&result, pre_p, post_p,
5411 is_gimple_lvalue, fb_lvalue);
5412 if (ret != GS_ERROR)
5413 ret = GS_OK;
5414
5415 /* If we are going to write RESULT more than once, clear
5416 TREE_READONLY flag, otherwise we might incorrectly promote
5417 the variable to static const and initialize it at compile
5418 time in one of the branches. */
5419 if (VAR_P (result)
5420 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5421 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5422 TREE_READONLY (result) = 0;
5423 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5424 TREE_OPERAND (cond, 1)
5425 = build2 (code, void_type_node, result,
5426 TREE_OPERAND (cond, 1));
5427 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5428 TREE_OPERAND (cond, 2)
5429 = build2 (code, void_type_node, unshare_expr (result),
5430 TREE_OPERAND (cond, 2));
5431
5432 TREE_TYPE (cond) = void_type_node;
5433 recalculate_side_effects (cond);
5434
5435 if (want_value)
5436 {
5437 gimplify_and_add (cond, pre_p);
5438 *expr_p = unshare_expr (result);
5439 }
5440 else
5441 *expr_p = cond;
5442 return ret;
5443 }
5444 break;
5445
5446 case CALL_EXPR:
5447 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5448 return slot so that we don't generate a temporary. */
5449 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5450 && aggregate_value_p (*from_p, *from_p))
5451 {
5452 bool use_target;
5453
5454 if (!(rhs_predicate_for (*to_p))(*from_p))
5455 /* If we need a temporary, *to_p isn't accurate. */
5456 use_target = false;
5457 /* It's OK to use the return slot directly unless it's an NRV. */
5458 else if (TREE_CODE (*to_p) == RESULT_DECL
5459 && DECL_NAME (*to_p) == NULL_TREE
5460 && needs_to_live_in_memory (*to_p))
5461 use_target = true;
5462 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5463 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5464 /* Don't force regs into memory. */
5465 use_target = false;
5466 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5467 /* It's OK to use the target directly if it's being
5468 initialized. */
5469 use_target = true;
5470 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5471 != INTEGER_CST)
5472 /* Always use the target and thus RSO for variable-sized types.
5473 GIMPLE cannot deal with a variable-sized assignment
5474 embedded in a call statement. */
5475 use_target = true;
5476 else if (TREE_CODE (*to_p) != SSA_NAME
5477 && (!is_gimple_variable (*to_p)
5478 || needs_to_live_in_memory (*to_p)))
5479 /* Don't use the original target if it's already addressable;
5480 if its address escapes, and the called function uses the
5481 NRV optimization, a conforming program could see *to_p
5482 change before the called function returns; see c++/19317.
5483 When optimizing, the return_slot pass marks more functions
5484 as safe after we have escape info. */
5485 use_target = false;
5486 else
5487 use_target = true;
5488
5489 if (use_target)
5490 {
5491 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5492 mark_addressable (*to_p);
5493 }
5494 }
5495 break;
5496
5497 case WITH_SIZE_EXPR:
5498 /* Likewise for calls that return an aggregate of non-constant size,
5499 since we would not be able to generate a temporary at all. */
5500 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5501 {
5502 *from_p = TREE_OPERAND (*from_p, 0);
5503 /* We don't change ret in this case because the
5504 WITH_SIZE_EXPR might have been added in
5505 gimplify_modify_expr, so returning GS_OK would lead to an
5506 infinite loop. */
5507 changed = true;
5508 }
5509 break;
5510
5511 /* If we're initializing from a container, push the initialization
5512 inside it. */
5513 case CLEANUP_POINT_EXPR:
5514 case BIND_EXPR:
5515 case STATEMENT_LIST:
5516 {
5517 tree wrap = *from_p;
5518 tree t;
5519
5520 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5521 fb_lvalue);
5522 if (ret != GS_ERROR)
5523 ret = GS_OK;
5524
5525 t = voidify_wrapper_expr (wrap, *expr_p);
5526 gcc_assert (t == *expr_p);
5527
5528 if (want_value)
5529 {
5530 gimplify_and_add (wrap, pre_p);
5531 *expr_p = unshare_expr (*to_p);
5532 }
5533 else
5534 *expr_p = wrap;
5535 return GS_OK;
5536 }
5537
5538 case NOP_EXPR:
5539 /* Pull out compound literal expressions from a NOP_EXPR.
5540 Those are created in the C FE to drop qualifiers during
5541 lvalue conversion. */
5542 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5543 && tree_ssa_useless_type_conversion (*from_p))
5544 {
5545 *from_p = TREE_OPERAND (*from_p, 0);
5546 ret = GS_OK;
5547 changed = true;
5548 }
5549 break;
5550
5551 case COMPOUND_LITERAL_EXPR:
5552 {
5553 tree complit = TREE_OPERAND (*expr_p, 1);
5554 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5555 tree decl = DECL_EXPR_DECL (decl_s);
5556 tree init = DECL_INITIAL (decl);
5557
5558 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5559 into struct T x = { 0, 1, 2 } if the address of the
5560 compound literal has never been taken. */
5561 if (!TREE_ADDRESSABLE (complit)
5562 && !TREE_ADDRESSABLE (decl)
5563 && init)
5564 {
5565 *expr_p = copy_node (*expr_p);
5566 TREE_OPERAND (*expr_p, 1) = init;
5567 return GS_OK;
5568 }
5569 }
5570
5571 default:
5572 break;
5573 }
5574 }
5575 while (changed);
5576
5577 return ret;
5578 }
5579
5580
5581 /* Return true if T looks like a valid GIMPLE statement. */
5582
5583 static bool
5584 is_gimple_stmt (tree t)
5585 {
5586 const enum tree_code code = TREE_CODE (t);
5587
5588 switch (code)
5589 {
5590 case NOP_EXPR:
5591 /* The only valid NOP_EXPR is the empty statement. */
5592 return IS_EMPTY_STMT (t);
5593
5594 case BIND_EXPR:
5595 case COND_EXPR:
5596 /* These are only valid if they're void. */
5597 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5598
5599 case SWITCH_EXPR:
5600 case GOTO_EXPR:
5601 case RETURN_EXPR:
5602 case LABEL_EXPR:
5603 case CASE_LABEL_EXPR:
5604 case TRY_CATCH_EXPR:
5605 case TRY_FINALLY_EXPR:
5606 case EH_FILTER_EXPR:
5607 case CATCH_EXPR:
5608 case ASM_EXPR:
5609 case STATEMENT_LIST:
5610 case OACC_PARALLEL:
5611 case OACC_KERNELS:
5612 case OACC_SERIAL:
5613 case OACC_DATA:
5614 case OACC_HOST_DATA:
5615 case OACC_DECLARE:
5616 case OACC_UPDATE:
5617 case OACC_ENTER_DATA:
5618 case OACC_EXIT_DATA:
5619 case OACC_CACHE:
5620 case OMP_PARALLEL:
5621 case OMP_FOR:
5622 case OMP_SIMD:
5623 case OMP_DISTRIBUTE:
5624 case OMP_LOOP:
5625 case OACC_LOOP:
5626 case OMP_SCAN:
5627 case OMP_SECTIONS:
5628 case OMP_SECTION:
5629 case OMP_SINGLE:
5630 case OMP_MASTER:
5631 case OMP_TASKGROUP:
5632 case OMP_ORDERED:
5633 case OMP_CRITICAL:
5634 case OMP_TASK:
5635 case OMP_TARGET:
5636 case OMP_TARGET_DATA:
5637 case OMP_TARGET_UPDATE:
5638 case OMP_TARGET_ENTER_DATA:
5639 case OMP_TARGET_EXIT_DATA:
5640 case OMP_TASKLOOP:
5641 case OMP_TEAMS:
5642 /* These are always void. */
5643 return true;
5644
5645 case CALL_EXPR:
5646 case MODIFY_EXPR:
5647 case PREDICT_EXPR:
5648 /* These are valid regardless of their type. */
5649 return true;
5650
5651 default:
5652 return false;
5653 }
5654 }
5655
5656
5657 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5658 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5659
5660 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5661 other, unmodified part of the complex object just before the total store.
5662 As a consequence, if the object is still uninitialized, an undefined value
5663 will be loaded into a register, which may result in a spurious exception
5664 if the register is floating-point and the value happens to be a signaling
5665 NaN for example. Then the fully-fledged complex operations lowering pass
5666 followed by a DCE pass are necessary in order to fix things up. */
5667
5668 static enum gimplify_status
5669 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5670 bool want_value)
5671 {
5672 enum tree_code code, ocode;
5673 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5674
5675 lhs = TREE_OPERAND (*expr_p, 0);
5676 rhs = TREE_OPERAND (*expr_p, 1);
5677 code = TREE_CODE (lhs);
5678 lhs = TREE_OPERAND (lhs, 0);
5679
5680 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5681 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5682 TREE_NO_WARNING (other) = 1;
5683 other = get_formal_tmp_var (other, pre_p);
5684
5685 realpart = code == REALPART_EXPR ? rhs : other;
5686 imagpart = code == REALPART_EXPR ? other : rhs;
5687
5688 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5689 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5690 else
5691 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5692
5693 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5694 *expr_p = (want_value) ? rhs : NULL_TREE;
5695
5696 return GS_ALL_DONE;
5697 }
5698
5699 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5700
5701 modify_expr
5702 : varname '=' rhs
5703 | '*' ID '=' rhs
5704
5705 PRE_P points to the list where side effects that must happen before
5706 *EXPR_P should be stored.
5707
5708 POST_P points to the list where side effects that must happen after
5709 *EXPR_P should be stored.
5710
5711 WANT_VALUE is nonzero iff we want to use the value of this expression
5712 in another expression. */
5713
5714 static enum gimplify_status
5715 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5716 bool want_value)
5717 {
5718 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5719 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5720 enum gimplify_status ret = GS_UNHANDLED;
5721 gimple *assign;
5722 location_t loc = EXPR_LOCATION (*expr_p);
5723 gimple_stmt_iterator gsi;
5724
5725 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5726 || TREE_CODE (*expr_p) == INIT_EXPR);
5727
5728 /* Trying to simplify a clobber using normal logic doesn't work,
5729 so handle it here. */
5730 if (TREE_CLOBBER_P (*from_p))
5731 {
5732 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5733 if (ret == GS_ERROR)
5734 return ret;
5735 gcc_assert (!want_value);
5736 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5737 {
5738 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5739 pre_p, post_p);
5740 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5741 }
5742 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5743 *expr_p = NULL;
5744 return GS_ALL_DONE;
5745 }
5746
5747 /* Insert pointer conversions required by the middle-end that are not
5748 required by the frontend. This fixes middle-end type checking for
5749 for example gcc.dg/redecl-6.c. */
5750 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5751 {
5752 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5753 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5754 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5755 }
5756
5757 /* See if any simplifications can be done based on what the RHS is. */
5758 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5759 want_value);
5760 if (ret != GS_UNHANDLED)
5761 return ret;
5762
5763 /* For zero sized types only gimplify the left hand side and right hand
5764 side as statements and throw away the assignment. Do this after
5765 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5766 types properly. */
5767 if (zero_sized_type (TREE_TYPE (*from_p))
5768 && !want_value
5769 /* Don't do this for calls that return addressable types, expand_call
5770 relies on those having a lhs. */
5771 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5772 && TREE_CODE (*from_p) == CALL_EXPR))
5773 {
5774 gimplify_stmt (from_p, pre_p);
5775 gimplify_stmt (to_p, pre_p);
5776 *expr_p = NULL_TREE;
5777 return GS_ALL_DONE;
5778 }
5779
5780 /* If the value being copied is of variable width, compute the length
5781 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5782 before gimplifying any of the operands so that we can resolve any
5783 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5784 the size of the expression to be copied, not of the destination, so
5785 that is what we must do here. */
5786 maybe_with_size_expr (from_p);
5787
5788 /* As a special case, we have to temporarily allow for assignments
5789 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5790 a toplevel statement, when gimplifying the GENERIC expression
5791 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5792 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5793
5794 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5795 prevent gimplify_expr from trying to create a new temporary for
5796 foo's LHS, we tell it that it should only gimplify until it
5797 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5798 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5799 and all we need to do here is set 'a' to be its LHS. */
5800
5801 /* Gimplify the RHS first for C++17 and bug 71104. */
5802 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5803 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5804 if (ret == GS_ERROR)
5805 return ret;
5806
5807 /* Then gimplify the LHS. */
5808 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5809 twice we have to make sure to gimplify into non-SSA as otherwise
5810 the abnormal edge added later will make those defs not dominate
5811 their uses.
5812 ??? Technically this applies only to the registers used in the
5813 resulting non-register *TO_P. */
5814 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5815 if (saved_into_ssa
5816 && TREE_CODE (*from_p) == CALL_EXPR
5817 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5818 gimplify_ctxp->into_ssa = false;
5819 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5820 gimplify_ctxp->into_ssa = saved_into_ssa;
5821 if (ret == GS_ERROR)
5822 return ret;
5823
5824 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5825 guess for the predicate was wrong. */
5826 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5827 if (final_pred != initial_pred)
5828 {
5829 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5830 if (ret == GS_ERROR)
5831 return ret;
5832 }
5833
5834 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5835 size as argument to the call. */
5836 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5837 {
5838 tree call = TREE_OPERAND (*from_p, 0);
5839 tree vlasize = TREE_OPERAND (*from_p, 1);
5840
5841 if (TREE_CODE (call) == CALL_EXPR
5842 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5843 {
5844 int nargs = call_expr_nargs (call);
5845 tree type = TREE_TYPE (call);
5846 tree ap = CALL_EXPR_ARG (call, 0);
5847 tree tag = CALL_EXPR_ARG (call, 1);
5848 tree aptag = CALL_EXPR_ARG (call, 2);
5849 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5850 IFN_VA_ARG, type,
5851 nargs + 1, ap, tag,
5852 aptag, vlasize);
5853 TREE_OPERAND (*from_p, 0) = newcall;
5854 }
5855 }
5856
5857 /* Now see if the above changed *from_p to something we handle specially. */
5858 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5859 want_value);
5860 if (ret != GS_UNHANDLED)
5861 return ret;
5862
5863 /* If we've got a variable sized assignment between two lvalues (i.e. does
5864 not involve a call), then we can make things a bit more straightforward
5865 by converting the assignment to memcpy or memset. */
5866 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5867 {
5868 tree from = TREE_OPERAND (*from_p, 0);
5869 tree size = TREE_OPERAND (*from_p, 1);
5870
5871 if (TREE_CODE (from) == CONSTRUCTOR)
5872 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5873
5874 if (is_gimple_addressable (from))
5875 {
5876 *from_p = from;
5877 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5878 pre_p);
5879 }
5880 }
5881
5882 /* Transform partial stores to non-addressable complex variables into
5883 total stores. This allows us to use real instead of virtual operands
5884 for these variables, which improves optimization. */
5885 if ((TREE_CODE (*to_p) == REALPART_EXPR
5886 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5887 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5888 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5889
5890 /* Try to alleviate the effects of the gimplification creating artificial
5891 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5892 make sure not to create DECL_DEBUG_EXPR links across functions. */
5893 if (!gimplify_ctxp->into_ssa
5894 && VAR_P (*from_p)
5895 && DECL_IGNORED_P (*from_p)
5896 && DECL_P (*to_p)
5897 && !DECL_IGNORED_P (*to_p)
5898 && decl_function_context (*to_p) == current_function_decl
5899 && decl_function_context (*from_p) == current_function_decl)
5900 {
5901 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5902 DECL_NAME (*from_p)
5903 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5904 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5905 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5906 }
5907
5908 if (want_value && TREE_THIS_VOLATILE (*to_p))
5909 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5910
5911 if (TREE_CODE (*from_p) == CALL_EXPR)
5912 {
5913 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5914 instead of a GIMPLE_ASSIGN. */
5915 gcall *call_stmt;
5916 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5917 {
5918 /* Gimplify internal functions created in the FEs. */
5919 int nargs = call_expr_nargs (*from_p), i;
5920 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5921 auto_vec<tree> vargs (nargs);
5922
5923 for (i = 0; i < nargs; i++)
5924 {
5925 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5926 EXPR_LOCATION (*from_p));
5927 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5928 }
5929 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5930 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5931 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5932 }
5933 else
5934 {
5935 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5936 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5937 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5938 tree fndecl = get_callee_fndecl (*from_p);
5939 if (fndecl
5940 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5941 && call_expr_nargs (*from_p) == 3)
5942 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5943 CALL_EXPR_ARG (*from_p, 0),
5944 CALL_EXPR_ARG (*from_p, 1),
5945 CALL_EXPR_ARG (*from_p, 2));
5946 else
5947 {
5948 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5949 }
5950 }
5951 notice_special_calls (call_stmt);
5952 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5953 gimple_call_set_lhs (call_stmt, *to_p);
5954 else if (TREE_CODE (*to_p) == SSA_NAME)
5955 /* The above is somewhat premature, avoid ICEing later for a
5956 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5957 ??? This doesn't make it a default-def. */
5958 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5959
5960 assign = call_stmt;
5961 }
5962 else
5963 {
5964 assign = gimple_build_assign (*to_p, *from_p);
5965 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5966 if (COMPARISON_CLASS_P (*from_p))
5967 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5968 }
5969
5970 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5971 {
5972 /* We should have got an SSA name from the start. */
5973 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5974 || ! gimple_in_ssa_p (cfun));
5975 }
5976
5977 gimplify_seq_add_stmt (pre_p, assign);
5978 gsi = gsi_last (*pre_p);
5979 maybe_fold_stmt (&gsi);
5980
5981 if (want_value)
5982 {
5983 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5984 return GS_OK;
5985 }
5986 else
5987 *expr_p = NULL;
5988
5989 return GS_ALL_DONE;
5990 }
5991
5992 /* Gimplify a comparison between two variable-sized objects. Do this
5993 with a call to BUILT_IN_MEMCMP. */
5994
5995 static enum gimplify_status
5996 gimplify_variable_sized_compare (tree *expr_p)
5997 {
5998 location_t loc = EXPR_LOCATION (*expr_p);
5999 tree op0 = TREE_OPERAND (*expr_p, 0);
6000 tree op1 = TREE_OPERAND (*expr_p, 1);
6001 tree t, arg, dest, src, expr;
6002
6003 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6004 arg = unshare_expr (arg);
6005 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6006 src = build_fold_addr_expr_loc (loc, op1);
6007 dest = build_fold_addr_expr_loc (loc, op0);
6008 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6009 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6010
6011 expr
6012 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6013 SET_EXPR_LOCATION (expr, loc);
6014 *expr_p = expr;
6015
6016 return GS_OK;
6017 }
6018
6019 /* Gimplify a comparison between two aggregate objects of integral scalar
6020 mode as a comparison between the bitwise equivalent scalar values. */
6021
6022 static enum gimplify_status
6023 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6024 {
6025 location_t loc = EXPR_LOCATION (*expr_p);
6026 tree op0 = TREE_OPERAND (*expr_p, 0);
6027 tree op1 = TREE_OPERAND (*expr_p, 1);
6028
6029 tree type = TREE_TYPE (op0);
6030 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6031
6032 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6033 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6034
6035 *expr_p
6036 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6037
6038 return GS_OK;
6039 }
6040
6041 /* Gimplify an expression sequence. This function gimplifies each
6042 expression and rewrites the original expression with the last
6043 expression of the sequence in GIMPLE form.
6044
6045 PRE_P points to the list where the side effects for all the
6046 expressions in the sequence will be emitted.
6047
6048 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6049
6050 static enum gimplify_status
6051 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6052 {
6053 tree t = *expr_p;
6054
6055 do
6056 {
6057 tree *sub_p = &TREE_OPERAND (t, 0);
6058
6059 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6060 gimplify_compound_expr (sub_p, pre_p, false);
6061 else
6062 gimplify_stmt (sub_p, pre_p);
6063
6064 t = TREE_OPERAND (t, 1);
6065 }
6066 while (TREE_CODE (t) == COMPOUND_EXPR);
6067
6068 *expr_p = t;
6069 if (want_value)
6070 return GS_OK;
6071 else
6072 {
6073 gimplify_stmt (expr_p, pre_p);
6074 return GS_ALL_DONE;
6075 }
6076 }
6077
6078 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6079 gimplify. After gimplification, EXPR_P will point to a new temporary
6080 that holds the original value of the SAVE_EXPR node.
6081
6082 PRE_P points to the list where side effects that must happen before
6083 *EXPR_P should be stored. */
6084
6085 static enum gimplify_status
6086 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6087 {
6088 enum gimplify_status ret = GS_ALL_DONE;
6089 tree val;
6090
6091 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6092 val = TREE_OPERAND (*expr_p, 0);
6093
6094 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6095 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6096 {
6097 /* The operand may be a void-valued expression. It is
6098 being executed only for its side-effects. */
6099 if (TREE_TYPE (val) == void_type_node)
6100 {
6101 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6102 is_gimple_stmt, fb_none);
6103 val = NULL;
6104 }
6105 else
6106 /* The temporary may not be an SSA name as later abnormal and EH
6107 control flow may invalidate use/def domination. When in SSA
6108 form then assume there are no such issues and SAVE_EXPRs only
6109 appear via GENERIC foldings. */
6110 val = get_initialized_tmp_var (val, pre_p, post_p,
6111 gimple_in_ssa_p (cfun));
6112
6113 TREE_OPERAND (*expr_p, 0) = val;
6114 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6115 }
6116
6117 *expr_p = val;
6118
6119 return ret;
6120 }
6121
6122 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6123
6124 unary_expr
6125 : ...
6126 | '&' varname
6127 ...
6128
6129 PRE_P points to the list where side effects that must happen before
6130 *EXPR_P should be stored.
6131
6132 POST_P points to the list where side effects that must happen after
6133 *EXPR_P should be stored. */
6134
6135 static enum gimplify_status
6136 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6137 {
6138 tree expr = *expr_p;
6139 tree op0 = TREE_OPERAND (expr, 0);
6140 enum gimplify_status ret;
6141 location_t loc = EXPR_LOCATION (*expr_p);
6142
6143 switch (TREE_CODE (op0))
6144 {
6145 case INDIRECT_REF:
6146 do_indirect_ref:
6147 /* Check if we are dealing with an expression of the form '&*ptr'.
6148 While the front end folds away '&*ptr' into 'ptr', these
6149 expressions may be generated internally by the compiler (e.g.,
6150 builtins like __builtin_va_end). */
6151 /* Caution: the silent array decomposition semantics we allow for
6152 ADDR_EXPR means we can't always discard the pair. */
6153 /* Gimplification of the ADDR_EXPR operand may drop
6154 cv-qualification conversions, so make sure we add them if
6155 needed. */
6156 {
6157 tree op00 = TREE_OPERAND (op0, 0);
6158 tree t_expr = TREE_TYPE (expr);
6159 tree t_op00 = TREE_TYPE (op00);
6160
6161 if (!useless_type_conversion_p (t_expr, t_op00))
6162 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6163 *expr_p = op00;
6164 ret = GS_OK;
6165 }
6166 break;
6167
6168 case VIEW_CONVERT_EXPR:
6169 /* Take the address of our operand and then convert it to the type of
6170 this ADDR_EXPR.
6171
6172 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6173 all clear. The impact of this transformation is even less clear. */
6174
6175 /* If the operand is a useless conversion, look through it. Doing so
6176 guarantees that the ADDR_EXPR and its operand will remain of the
6177 same type. */
6178 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6179 op0 = TREE_OPERAND (op0, 0);
6180
6181 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6182 build_fold_addr_expr_loc (loc,
6183 TREE_OPERAND (op0, 0)));
6184 ret = GS_OK;
6185 break;
6186
6187 case MEM_REF:
6188 if (integer_zerop (TREE_OPERAND (op0, 1)))
6189 goto do_indirect_ref;
6190
6191 /* fall through */
6192
6193 default:
6194 /* If we see a call to a declared builtin or see its address
6195 being taken (we can unify those cases here) then we can mark
6196 the builtin for implicit generation by GCC. */
6197 if (TREE_CODE (op0) == FUNCTION_DECL
6198 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6199 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6200 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6201
6202 /* We use fb_either here because the C frontend sometimes takes
6203 the address of a call that returns a struct; see
6204 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6205 the implied temporary explicit. */
6206
6207 /* Make the operand addressable. */
6208 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6209 is_gimple_addressable, fb_either);
6210 if (ret == GS_ERROR)
6211 break;
6212
6213 /* Then mark it. Beware that it may not be possible to do so directly
6214 if a temporary has been created by the gimplification. */
6215 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6216
6217 op0 = TREE_OPERAND (expr, 0);
6218
6219 /* For various reasons, the gimplification of the expression
6220 may have made a new INDIRECT_REF. */
6221 if (TREE_CODE (op0) == INDIRECT_REF
6222 || (TREE_CODE (op0) == MEM_REF
6223 && integer_zerop (TREE_OPERAND (op0, 1))))
6224 goto do_indirect_ref;
6225
6226 mark_addressable (TREE_OPERAND (expr, 0));
6227
6228 /* The FEs may end up building ADDR_EXPRs early on a decl with
6229 an incomplete type. Re-build ADDR_EXPRs in canonical form
6230 here. */
6231 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6232 *expr_p = build_fold_addr_expr (op0);
6233
6234 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6235 recompute_tree_invariant_for_addr_expr (*expr_p);
6236
6237 /* If we re-built the ADDR_EXPR add a conversion to the original type
6238 if required. */
6239 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6240 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6241
6242 break;
6243 }
6244
6245 return ret;
6246 }
6247
6248 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6249 value; output operands should be a gimple lvalue. */
6250
6251 static enum gimplify_status
6252 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6253 {
6254 tree expr;
6255 int noutputs;
6256 const char **oconstraints;
6257 int i;
6258 tree link;
6259 const char *constraint;
6260 bool allows_mem, allows_reg, is_inout;
6261 enum gimplify_status ret, tret;
6262 gasm *stmt;
6263 vec<tree, va_gc> *inputs;
6264 vec<tree, va_gc> *outputs;
6265 vec<tree, va_gc> *clobbers;
6266 vec<tree, va_gc> *labels;
6267 tree link_next;
6268
6269 expr = *expr_p;
6270 noutputs = list_length (ASM_OUTPUTS (expr));
6271 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6272
6273 inputs = NULL;
6274 outputs = NULL;
6275 clobbers = NULL;
6276 labels = NULL;
6277
6278 ret = GS_ALL_DONE;
6279 link_next = NULL_TREE;
6280 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6281 {
6282 bool ok;
6283 size_t constraint_len;
6284
6285 link_next = TREE_CHAIN (link);
6286
6287 oconstraints[i]
6288 = constraint
6289 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6290 constraint_len = strlen (constraint);
6291 if (constraint_len == 0)
6292 continue;
6293
6294 ok = parse_output_constraint (&constraint, i, 0, 0,
6295 &allows_mem, &allows_reg, &is_inout);
6296 if (!ok)
6297 {
6298 ret = GS_ERROR;
6299 is_inout = false;
6300 }
6301
6302 /* If we can't make copies, we can only accept memory.
6303 Similarly for VLAs. */
6304 tree outtype = TREE_TYPE (TREE_VALUE (link));
6305 if (outtype != error_mark_node
6306 && (TREE_ADDRESSABLE (outtype)
6307 || !COMPLETE_TYPE_P (outtype)
6308 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6309 {
6310 if (allows_mem)
6311 allows_reg = 0;
6312 else
6313 {
6314 error ("impossible constraint in %<asm%>");
6315 error ("non-memory output %d must stay in memory", i);
6316 return GS_ERROR;
6317 }
6318 }
6319
6320 if (!allows_reg && allows_mem)
6321 mark_addressable (TREE_VALUE (link));
6322
6323 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6324 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6325 fb_lvalue | fb_mayfail);
6326 if (tret == GS_ERROR)
6327 {
6328 error ("invalid lvalue in %<asm%> output %d", i);
6329 ret = tret;
6330 }
6331
6332 /* If the constraint does not allow memory make sure we gimplify
6333 it to a register if it is not already but its base is. This
6334 happens for complex and vector components. */
6335 if (!allows_mem)
6336 {
6337 tree op = TREE_VALUE (link);
6338 if (! is_gimple_val (op)
6339 && is_gimple_reg_type (TREE_TYPE (op))
6340 && is_gimple_reg (get_base_address (op)))
6341 {
6342 tree tem = create_tmp_reg (TREE_TYPE (op));
6343 tree ass;
6344 if (is_inout)
6345 {
6346 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6347 tem, unshare_expr (op));
6348 gimplify_and_add (ass, pre_p);
6349 }
6350 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6351 gimplify_and_add (ass, post_p);
6352
6353 TREE_VALUE (link) = tem;
6354 tret = GS_OK;
6355 }
6356 }
6357
6358 vec_safe_push (outputs, link);
6359 TREE_CHAIN (link) = NULL_TREE;
6360
6361 if (is_inout)
6362 {
6363 /* An input/output operand. To give the optimizers more
6364 flexibility, split it into separate input and output
6365 operands. */
6366 tree input;
6367 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6368 char buf[11];
6369
6370 /* Turn the in/out constraint into an output constraint. */
6371 char *p = xstrdup (constraint);
6372 p[0] = '=';
6373 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6374
6375 /* And add a matching input constraint. */
6376 if (allows_reg)
6377 {
6378 sprintf (buf, "%u", i);
6379
6380 /* If there are multiple alternatives in the constraint,
6381 handle each of them individually. Those that allow register
6382 will be replaced with operand number, the others will stay
6383 unchanged. */
6384 if (strchr (p, ',') != NULL)
6385 {
6386 size_t len = 0, buflen = strlen (buf);
6387 char *beg, *end, *str, *dst;
6388
6389 for (beg = p + 1;;)
6390 {
6391 end = strchr (beg, ',');
6392 if (end == NULL)
6393 end = strchr (beg, '\0');
6394 if ((size_t) (end - beg) < buflen)
6395 len += buflen + 1;
6396 else
6397 len += end - beg + 1;
6398 if (*end)
6399 beg = end + 1;
6400 else
6401 break;
6402 }
6403
6404 str = (char *) alloca (len);
6405 for (beg = p + 1, dst = str;;)
6406 {
6407 const char *tem;
6408 bool mem_p, reg_p, inout_p;
6409
6410 end = strchr (beg, ',');
6411 if (end)
6412 *end = '\0';
6413 beg[-1] = '=';
6414 tem = beg - 1;
6415 parse_output_constraint (&tem, i, 0, 0,
6416 &mem_p, &reg_p, &inout_p);
6417 if (dst != str)
6418 *dst++ = ',';
6419 if (reg_p)
6420 {
6421 memcpy (dst, buf, buflen);
6422 dst += buflen;
6423 }
6424 else
6425 {
6426 if (end)
6427 len = end - beg;
6428 else
6429 len = strlen (beg);
6430 memcpy (dst, beg, len);
6431 dst += len;
6432 }
6433 if (end)
6434 beg = end + 1;
6435 else
6436 break;
6437 }
6438 *dst = '\0';
6439 input = build_string (dst - str, str);
6440 }
6441 else
6442 input = build_string (strlen (buf), buf);
6443 }
6444 else
6445 input = build_string (constraint_len - 1, constraint + 1);
6446
6447 free (p);
6448
6449 input = build_tree_list (build_tree_list (NULL_TREE, input),
6450 unshare_expr (TREE_VALUE (link)));
6451 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6452 }
6453 }
6454
6455 link_next = NULL_TREE;
6456 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6457 {
6458 link_next = TREE_CHAIN (link);
6459 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6460 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6461 oconstraints, &allows_mem, &allows_reg);
6462
6463 /* If we can't make copies, we can only accept memory. */
6464 tree intype = TREE_TYPE (TREE_VALUE (link));
6465 if (intype != error_mark_node
6466 && (TREE_ADDRESSABLE (intype)
6467 || !COMPLETE_TYPE_P (intype)
6468 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6469 {
6470 if (allows_mem)
6471 allows_reg = 0;
6472 else
6473 {
6474 error ("impossible constraint in %<asm%>");
6475 error ("non-memory input %d must stay in memory", i);
6476 return GS_ERROR;
6477 }
6478 }
6479
6480 /* If the operand is a memory input, it should be an lvalue. */
6481 if (!allows_reg && allows_mem)
6482 {
6483 tree inputv = TREE_VALUE (link);
6484 STRIP_NOPS (inputv);
6485 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6486 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6487 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6488 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6489 || TREE_CODE (inputv) == MODIFY_EXPR)
6490 TREE_VALUE (link) = error_mark_node;
6491 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6492 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6493 if (tret != GS_ERROR)
6494 {
6495 /* Unlike output operands, memory inputs are not guaranteed
6496 to be lvalues by the FE, and while the expressions are
6497 marked addressable there, if it is e.g. a statement
6498 expression, temporaries in it might not end up being
6499 addressable. They might be already used in the IL and thus
6500 it is too late to make them addressable now though. */
6501 tree x = TREE_VALUE (link);
6502 while (handled_component_p (x))
6503 x = TREE_OPERAND (x, 0);
6504 if (TREE_CODE (x) == MEM_REF
6505 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6506 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6507 if ((VAR_P (x)
6508 || TREE_CODE (x) == PARM_DECL
6509 || TREE_CODE (x) == RESULT_DECL)
6510 && !TREE_ADDRESSABLE (x)
6511 && is_gimple_reg (x))
6512 {
6513 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6514 input_location), 0,
6515 "memory input %d is not directly addressable",
6516 i);
6517 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6518 }
6519 }
6520 mark_addressable (TREE_VALUE (link));
6521 if (tret == GS_ERROR)
6522 {
6523 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6524 "memory input %d is not directly addressable", i);
6525 ret = tret;
6526 }
6527 }
6528 else
6529 {
6530 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6531 is_gimple_asm_val, fb_rvalue);
6532 if (tret == GS_ERROR)
6533 ret = tret;
6534 }
6535
6536 TREE_CHAIN (link) = NULL_TREE;
6537 vec_safe_push (inputs, link);
6538 }
6539
6540 link_next = NULL_TREE;
6541 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6542 {
6543 link_next = TREE_CHAIN (link);
6544 TREE_CHAIN (link) = NULL_TREE;
6545 vec_safe_push (clobbers, link);
6546 }
6547
6548 link_next = NULL_TREE;
6549 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6550 {
6551 link_next = TREE_CHAIN (link);
6552 TREE_CHAIN (link) = NULL_TREE;
6553 vec_safe_push (labels, link);
6554 }
6555
6556 /* Do not add ASMs with errors to the gimple IL stream. */
6557 if (ret != GS_ERROR)
6558 {
6559 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6560 inputs, outputs, clobbers, labels);
6561
6562 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6563 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6564 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6565
6566 gimplify_seq_add_stmt (pre_p, stmt);
6567 }
6568
6569 return ret;
6570 }
6571
6572 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6573 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6574 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6575 return to this function.
6576
6577 FIXME should we complexify the prequeue handling instead? Or use flags
6578 for all the cleanups and let the optimizer tighten them up? The current
6579 code seems pretty fragile; it will break on a cleanup within any
6580 non-conditional nesting. But any such nesting would be broken, anyway;
6581 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6582 and continues out of it. We can do that at the RTL level, though, so
6583 having an optimizer to tighten up try/finally regions would be a Good
6584 Thing. */
6585
6586 static enum gimplify_status
6587 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6588 {
6589 gimple_stmt_iterator iter;
6590 gimple_seq body_sequence = NULL;
6591
6592 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6593
6594 /* We only care about the number of conditions between the innermost
6595 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6596 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6597 int old_conds = gimplify_ctxp->conditions;
6598 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6599 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6600 gimplify_ctxp->conditions = 0;
6601 gimplify_ctxp->conditional_cleanups = NULL;
6602 gimplify_ctxp->in_cleanup_point_expr = true;
6603
6604 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6605
6606 gimplify_ctxp->conditions = old_conds;
6607 gimplify_ctxp->conditional_cleanups = old_cleanups;
6608 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6609
6610 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6611 {
6612 gimple *wce = gsi_stmt (iter);
6613
6614 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6615 {
6616 if (gsi_one_before_end_p (iter))
6617 {
6618 /* Note that gsi_insert_seq_before and gsi_remove do not
6619 scan operands, unlike some other sequence mutators. */
6620 if (!gimple_wce_cleanup_eh_only (wce))
6621 gsi_insert_seq_before_without_update (&iter,
6622 gimple_wce_cleanup (wce),
6623 GSI_SAME_STMT);
6624 gsi_remove (&iter, true);
6625 break;
6626 }
6627 else
6628 {
6629 gtry *gtry;
6630 gimple_seq seq;
6631 enum gimple_try_flags kind;
6632
6633 if (gimple_wce_cleanup_eh_only (wce))
6634 kind = GIMPLE_TRY_CATCH;
6635 else
6636 kind = GIMPLE_TRY_FINALLY;
6637 seq = gsi_split_seq_after (iter);
6638
6639 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6640 /* Do not use gsi_replace here, as it may scan operands.
6641 We want to do a simple structural modification only. */
6642 gsi_set_stmt (&iter, gtry);
6643 iter = gsi_start (gtry->eval);
6644 }
6645 }
6646 else
6647 gsi_next (&iter);
6648 }
6649
6650 gimplify_seq_add_seq (pre_p, body_sequence);
6651 if (temp)
6652 {
6653 *expr_p = temp;
6654 return GS_OK;
6655 }
6656 else
6657 {
6658 *expr_p = NULL;
6659 return GS_ALL_DONE;
6660 }
6661 }
6662
6663 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6664 is the cleanup action required. EH_ONLY is true if the cleanup should
6665 only be executed if an exception is thrown, not on normal exit.
6666 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6667 only valid for clobbers. */
6668
6669 static void
6670 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6671 bool force_uncond = false)
6672 {
6673 gimple *wce;
6674 gimple_seq cleanup_stmts = NULL;
6675
6676 /* Errors can result in improperly nested cleanups. Which results in
6677 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6678 if (seen_error ())
6679 return;
6680
6681 if (gimple_conditional_context ())
6682 {
6683 /* If we're in a conditional context, this is more complex. We only
6684 want to run the cleanup if we actually ran the initialization that
6685 necessitates it, but we want to run it after the end of the
6686 conditional context. So we wrap the try/finally around the
6687 condition and use a flag to determine whether or not to actually
6688 run the destructor. Thus
6689
6690 test ? f(A()) : 0
6691
6692 becomes (approximately)
6693
6694 flag = 0;
6695 try {
6696 if (test) { A::A(temp); flag = 1; val = f(temp); }
6697 else { val = 0; }
6698 } finally {
6699 if (flag) A::~A(temp);
6700 }
6701 val
6702 */
6703 if (force_uncond)
6704 {
6705 gimplify_stmt (&cleanup, &cleanup_stmts);
6706 wce = gimple_build_wce (cleanup_stmts);
6707 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6708 }
6709 else
6710 {
6711 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6712 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6713 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6714
6715 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6716 gimplify_stmt (&cleanup, &cleanup_stmts);
6717 wce = gimple_build_wce (cleanup_stmts);
6718
6719 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6720 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6721 gimplify_seq_add_stmt (pre_p, ftrue);
6722
6723 /* Because of this manipulation, and the EH edges that jump
6724 threading cannot redirect, the temporary (VAR) will appear
6725 to be used uninitialized. Don't warn. */
6726 TREE_NO_WARNING (var) = 1;
6727 }
6728 }
6729 else
6730 {
6731 gimplify_stmt (&cleanup, &cleanup_stmts);
6732 wce = gimple_build_wce (cleanup_stmts);
6733 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6734 gimplify_seq_add_stmt (pre_p, wce);
6735 }
6736 }
6737
6738 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6739
6740 static enum gimplify_status
6741 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6742 {
6743 tree targ = *expr_p;
6744 tree temp = TARGET_EXPR_SLOT (targ);
6745 tree init = TARGET_EXPR_INITIAL (targ);
6746 enum gimplify_status ret;
6747
6748 bool unpoison_empty_seq = false;
6749 gimple_stmt_iterator unpoison_it;
6750
6751 if (init)
6752 {
6753 tree cleanup = NULL_TREE;
6754
6755 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6756 to the temps list. Handle also variable length TARGET_EXPRs. */
6757 if (!poly_int_tree_p (DECL_SIZE (temp)))
6758 {
6759 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6760 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6761 gimplify_vla_decl (temp, pre_p);
6762 }
6763 else
6764 {
6765 /* Save location where we need to place unpoisoning. It's possible
6766 that a variable will be converted to needs_to_live_in_memory. */
6767 unpoison_it = gsi_last (*pre_p);
6768 unpoison_empty_seq = gsi_end_p (unpoison_it);
6769
6770 gimple_add_tmp_var (temp);
6771 }
6772
6773 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6774 expression is supposed to initialize the slot. */
6775 if (VOID_TYPE_P (TREE_TYPE (init)))
6776 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6777 else
6778 {
6779 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6780 init = init_expr;
6781 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6782 init = NULL;
6783 ggc_free (init_expr);
6784 }
6785 if (ret == GS_ERROR)
6786 {
6787 /* PR c++/28266 Make sure this is expanded only once. */
6788 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6789 return GS_ERROR;
6790 }
6791 if (init)
6792 gimplify_and_add (init, pre_p);
6793
6794 /* If needed, push the cleanup for the temp. */
6795 if (TARGET_EXPR_CLEANUP (targ))
6796 {
6797 if (CLEANUP_EH_ONLY (targ))
6798 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6799 CLEANUP_EH_ONLY (targ), pre_p);
6800 else
6801 cleanup = TARGET_EXPR_CLEANUP (targ);
6802 }
6803
6804 /* Add a clobber for the temporary going out of scope, like
6805 gimplify_bind_expr. */
6806 if (gimplify_ctxp->in_cleanup_point_expr
6807 && needs_to_live_in_memory (temp))
6808 {
6809 if (flag_stack_reuse == SR_ALL)
6810 {
6811 tree clobber = build_clobber (TREE_TYPE (temp));
6812 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6813 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6814 }
6815 if (asan_poisoned_variables
6816 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6817 && !TREE_STATIC (temp)
6818 && dbg_cnt (asan_use_after_scope)
6819 && !gimplify_omp_ctxp)
6820 {
6821 tree asan_cleanup = build_asan_poison_call_expr (temp);
6822 if (asan_cleanup)
6823 {
6824 if (unpoison_empty_seq)
6825 unpoison_it = gsi_start (*pre_p);
6826
6827 asan_poison_variable (temp, false, &unpoison_it,
6828 unpoison_empty_seq);
6829 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6830 }
6831 }
6832 }
6833 if (cleanup)
6834 gimple_push_cleanup (temp, cleanup, false, pre_p);
6835
6836 /* Only expand this once. */
6837 TREE_OPERAND (targ, 3) = init;
6838 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6839 }
6840 else
6841 /* We should have expanded this before. */
6842 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6843
6844 *expr_p = temp;
6845 return GS_OK;
6846 }
6847
6848 /* Gimplification of expression trees. */
6849
6850 /* Gimplify an expression which appears at statement context. The
6851 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6852 NULL, a new sequence is allocated.
6853
6854 Return true if we actually added a statement to the queue. */
6855
6856 bool
6857 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6858 {
6859 gimple_seq_node last;
6860
6861 last = gimple_seq_last (*seq_p);
6862 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6863 return last != gimple_seq_last (*seq_p);
6864 }
6865
6866 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6867 to CTX. If entries already exist, force them to be some flavor of private.
6868 If there is no enclosing parallel, do nothing. */
6869
6870 void
6871 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6872 {
6873 splay_tree_node n;
6874
6875 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6876 return;
6877
6878 do
6879 {
6880 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6881 if (n != NULL)
6882 {
6883 if (n->value & GOVD_SHARED)
6884 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6885 else if (n->value & GOVD_MAP)
6886 n->value |= GOVD_MAP_TO_ONLY;
6887 else
6888 return;
6889 }
6890 else if ((ctx->region_type & ORT_TARGET) != 0)
6891 {
6892 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6893 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6894 else
6895 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6896 }
6897 else if (ctx->region_type != ORT_WORKSHARE
6898 && ctx->region_type != ORT_TASKGROUP
6899 && ctx->region_type != ORT_SIMD
6900 && ctx->region_type != ORT_ACC
6901 && !(ctx->region_type & ORT_TARGET_DATA))
6902 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6903
6904 ctx = ctx->outer_context;
6905 }
6906 while (ctx);
6907 }
6908
6909 /* Similarly for each of the type sizes of TYPE. */
6910
6911 static void
6912 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6913 {
6914 if (type == NULL || type == error_mark_node)
6915 return;
6916 type = TYPE_MAIN_VARIANT (type);
6917
6918 if (ctx->privatized_types->add (type))
6919 return;
6920
6921 switch (TREE_CODE (type))
6922 {
6923 case INTEGER_TYPE:
6924 case ENUMERAL_TYPE:
6925 case BOOLEAN_TYPE:
6926 case REAL_TYPE:
6927 case FIXED_POINT_TYPE:
6928 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6929 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6930 break;
6931
6932 case ARRAY_TYPE:
6933 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6934 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6935 break;
6936
6937 case RECORD_TYPE:
6938 case UNION_TYPE:
6939 case QUAL_UNION_TYPE:
6940 {
6941 tree field;
6942 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6943 if (TREE_CODE (field) == FIELD_DECL)
6944 {
6945 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6946 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6947 }
6948 }
6949 break;
6950
6951 case POINTER_TYPE:
6952 case REFERENCE_TYPE:
6953 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6954 break;
6955
6956 default:
6957 break;
6958 }
6959
6960 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6961 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6962 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6963 }
6964
6965 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6966
6967 static void
6968 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6969 {
6970 splay_tree_node n;
6971 unsigned int nflags;
6972 tree t;
6973
6974 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6975 return;
6976
6977 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6978 there are constructors involved somewhere. Exception is a shared clause,
6979 there is nothing privatized in that case. */
6980 if ((flags & GOVD_SHARED) == 0
6981 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6982 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6983 flags |= GOVD_SEEN;
6984
6985 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6986 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6987 {
6988 /* We shouldn't be re-adding the decl with the same data
6989 sharing class. */
6990 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6991 nflags = n->value | flags;
6992 /* The only combination of data sharing classes we should see is
6993 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6994 reduction variables to be used in data sharing clauses. */
6995 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6996 || ((nflags & GOVD_DATA_SHARE_CLASS)
6997 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6998 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6999 n->value = nflags;
7000 return;
7001 }
7002
7003 /* When adding a variable-sized variable, we have to handle all sorts
7004 of additional bits of data: the pointer replacement variable, and
7005 the parameters of the type. */
7006 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7007 {
7008 /* Add the pointer replacement variable as PRIVATE if the variable
7009 replacement is private, else FIRSTPRIVATE since we'll need the
7010 address of the original variable either for SHARED, or for the
7011 copy into or out of the context. */
7012 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7013 {
7014 if (flags & GOVD_MAP)
7015 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7016 else if (flags & GOVD_PRIVATE)
7017 nflags = GOVD_PRIVATE;
7018 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7019 && (flags & GOVD_FIRSTPRIVATE))
7020 || (ctx->region_type == ORT_TARGET_DATA
7021 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7022 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7023 else
7024 nflags = GOVD_FIRSTPRIVATE;
7025 nflags |= flags & GOVD_SEEN;
7026 t = DECL_VALUE_EXPR (decl);
7027 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7028 t = TREE_OPERAND (t, 0);
7029 gcc_assert (DECL_P (t));
7030 omp_add_variable (ctx, t, nflags);
7031 }
7032
7033 /* Add all of the variable and type parameters (which should have
7034 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7035 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7036 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7037 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7038
7039 /* The variable-sized variable itself is never SHARED, only some form
7040 of PRIVATE. The sharing would take place via the pointer variable
7041 which we remapped above. */
7042 if (flags & GOVD_SHARED)
7043 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7044 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7045
7046 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7047 alloca statement we generate for the variable, so make sure it
7048 is available. This isn't automatically needed for the SHARED
7049 case, since we won't be allocating local storage then.
7050 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7051 in this case omp_notice_variable will be called later
7052 on when it is gimplified. */
7053 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7054 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7055 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7056 }
7057 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7058 && lang_hooks.decls.omp_privatize_by_reference (decl))
7059 {
7060 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7061
7062 /* Similar to the direct variable sized case above, we'll need the
7063 size of references being privatized. */
7064 if ((flags & GOVD_SHARED) == 0)
7065 {
7066 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7067 if (DECL_P (t))
7068 omp_notice_variable (ctx, t, true);
7069 }
7070 }
7071
7072 if (n != NULL)
7073 n->value |= flags;
7074 else
7075 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7076
7077 /* For reductions clauses in OpenACC loop directives, by default create a
7078 copy clause on the enclosing parallel construct for carrying back the
7079 results. */
7080 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7081 {
7082 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7083 while (outer_ctx)
7084 {
7085 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7086 if (n != NULL)
7087 {
7088 /* Ignore local variables and explicitly declared clauses. */
7089 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7090 break;
7091 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7092 {
7093 /* According to the OpenACC spec, such a reduction variable
7094 should already have a copy map on a kernels construct,
7095 verify that here. */
7096 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7097 && (n->value & GOVD_MAP));
7098 }
7099 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7100 {
7101 /* Remove firstprivate and make it a copy map. */
7102 n->value &= ~GOVD_FIRSTPRIVATE;
7103 n->value |= GOVD_MAP;
7104 }
7105 }
7106 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7107 {
7108 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7109 GOVD_MAP | GOVD_SEEN);
7110 break;
7111 }
7112 outer_ctx = outer_ctx->outer_context;
7113 }
7114 }
7115 }
7116
7117 /* Notice a threadprivate variable DECL used in OMP context CTX.
7118 This just prints out diagnostics about threadprivate variable uses
7119 in untied tasks. If DECL2 is non-NULL, prevent this warning
7120 on that variable. */
7121
7122 static bool
7123 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7124 tree decl2)
7125 {
7126 splay_tree_node n;
7127 struct gimplify_omp_ctx *octx;
7128
7129 for (octx = ctx; octx; octx = octx->outer_context)
7130 if ((octx->region_type & ORT_TARGET) != 0
7131 || octx->order_concurrent)
7132 {
7133 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7134 if (n == NULL)
7135 {
7136 if (octx->order_concurrent)
7137 {
7138 error ("threadprivate variable %qE used in a region with"
7139 " %<order(concurrent)%> clause", DECL_NAME (decl));
7140 inform (octx->location, "enclosing region");
7141 }
7142 else
7143 {
7144 error ("threadprivate variable %qE used in target region",
7145 DECL_NAME (decl));
7146 inform (octx->location, "enclosing target region");
7147 }
7148 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7149 }
7150 if (decl2)
7151 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7152 }
7153
7154 if (ctx->region_type != ORT_UNTIED_TASK)
7155 return false;
7156 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7157 if (n == NULL)
7158 {
7159 error ("threadprivate variable %qE used in untied task",
7160 DECL_NAME (decl));
7161 inform (ctx->location, "enclosing task");
7162 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7163 }
7164 if (decl2)
7165 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7166 return false;
7167 }
7168
7169 /* Return true if global var DECL is device resident. */
7170
7171 static bool
7172 device_resident_p (tree decl)
7173 {
7174 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7175
7176 if (!attr)
7177 return false;
7178
7179 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7180 {
7181 tree c = TREE_VALUE (t);
7182 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7183 return true;
7184 }
7185
7186 return false;
7187 }
7188
7189 /* Return true if DECL has an ACC DECLARE attribute. */
7190
7191 static bool
7192 is_oacc_declared (tree decl)
7193 {
7194 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7195 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7196 return declared != NULL_TREE;
7197 }
7198
7199 /* Determine outer default flags for DECL mentioned in an OMP region
7200 but not declared in an enclosing clause.
7201
7202 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7203 remapped firstprivate instead of shared. To some extent this is
7204 addressed in omp_firstprivatize_type_sizes, but not
7205 effectively. */
7206
7207 static unsigned
7208 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7209 bool in_code, unsigned flags)
7210 {
7211 enum omp_clause_default_kind default_kind = ctx->default_kind;
7212 enum omp_clause_default_kind kind;
7213
7214 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7215 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7216 default_kind = kind;
7217 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7218 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7219
7220 switch (default_kind)
7221 {
7222 case OMP_CLAUSE_DEFAULT_NONE:
7223 {
7224 const char *rtype;
7225
7226 if (ctx->region_type & ORT_PARALLEL)
7227 rtype = "parallel";
7228 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7229 rtype = "taskloop";
7230 else if (ctx->region_type & ORT_TASK)
7231 rtype = "task";
7232 else if (ctx->region_type & ORT_TEAMS)
7233 rtype = "teams";
7234 else
7235 gcc_unreachable ();
7236
7237 error ("%qE not specified in enclosing %qs",
7238 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7239 inform (ctx->location, "enclosing %qs", rtype);
7240 }
7241 /* FALLTHRU */
7242 case OMP_CLAUSE_DEFAULT_SHARED:
7243 flags |= GOVD_SHARED;
7244 break;
7245 case OMP_CLAUSE_DEFAULT_PRIVATE:
7246 flags |= GOVD_PRIVATE;
7247 break;
7248 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7249 flags |= GOVD_FIRSTPRIVATE;
7250 break;
7251 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7252 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7253 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7254 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7255 {
7256 omp_notice_variable (octx, decl, in_code);
7257 for (; octx; octx = octx->outer_context)
7258 {
7259 splay_tree_node n2;
7260
7261 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7262 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7263 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7264 continue;
7265 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7266 {
7267 flags |= GOVD_FIRSTPRIVATE;
7268 goto found_outer;
7269 }
7270 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7271 {
7272 flags |= GOVD_SHARED;
7273 goto found_outer;
7274 }
7275 }
7276 }
7277
7278 if (TREE_CODE (decl) == PARM_DECL
7279 || (!is_global_var (decl)
7280 && DECL_CONTEXT (decl) == current_function_decl))
7281 flags |= GOVD_FIRSTPRIVATE;
7282 else
7283 flags |= GOVD_SHARED;
7284 found_outer:
7285 break;
7286
7287 default:
7288 gcc_unreachable ();
7289 }
7290
7291 return flags;
7292 }
7293
7294
7295 /* Determine outer default flags for DECL mentioned in an OACC region
7296 but not declared in an enclosing clause. */
7297
7298 static unsigned
7299 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7300 {
7301 const char *rkind;
7302 bool on_device = false;
7303 bool is_private = false;
7304 bool declared = is_oacc_declared (decl);
7305 tree type = TREE_TYPE (decl);
7306
7307 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7308 type = TREE_TYPE (type);
7309
7310 /* For Fortran COMMON blocks, only used variables in those blocks are
7311 transfered and remapped. The block itself will have a private clause to
7312 avoid transfering the data twice.
7313 The hook evaluates to false by default. For a variable in Fortran's COMMON
7314 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7315 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7316 the whole block. For C++ and Fortran, it can also be true under certain
7317 other conditions, if DECL_HAS_VALUE_EXPR. */
7318 if (RECORD_OR_UNION_TYPE_P (type))
7319 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7320
7321 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7322 && is_global_var (decl)
7323 && device_resident_p (decl)
7324 && !is_private)
7325 {
7326 on_device = true;
7327 flags |= GOVD_MAP_TO_ONLY;
7328 }
7329
7330 switch (ctx->region_type)
7331 {
7332 case ORT_ACC_KERNELS:
7333 rkind = "kernels";
7334
7335 if (is_private)
7336 flags |= GOVD_FIRSTPRIVATE;
7337 else if (AGGREGATE_TYPE_P (type))
7338 {
7339 /* Aggregates default to 'present_or_copy', or 'present'. */
7340 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7341 flags |= GOVD_MAP;
7342 else
7343 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7344 }
7345 else
7346 /* Scalars default to 'copy'. */
7347 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7348
7349 break;
7350
7351 case ORT_ACC_PARALLEL:
7352 case ORT_ACC_SERIAL:
7353 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7354
7355 if (is_private)
7356 flags |= GOVD_FIRSTPRIVATE;
7357 else if (on_device || declared)
7358 flags |= GOVD_MAP;
7359 else if (AGGREGATE_TYPE_P (type))
7360 {
7361 /* Aggregates default to 'present_or_copy', or 'present'. */
7362 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7363 flags |= GOVD_MAP;
7364 else
7365 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7366 }
7367 else
7368 /* Scalars default to 'firstprivate'. */
7369 flags |= GOVD_FIRSTPRIVATE;
7370
7371 break;
7372
7373 default:
7374 gcc_unreachable ();
7375 }
7376
7377 if (DECL_ARTIFICIAL (decl))
7378 ; /* We can get compiler-generated decls, and should not complain
7379 about them. */
7380 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7381 {
7382 error ("%qE not specified in enclosing OpenACC %qs construct",
7383 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7384 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7385 }
7386 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7387 ; /* Handled above. */
7388 else
7389 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7390
7391 return flags;
7392 }
7393
7394 /* Record the fact that DECL was used within the OMP context CTX.
7395 IN_CODE is true when real code uses DECL, and false when we should
7396 merely emit default(none) errors. Return true if DECL is going to
7397 be remapped and thus DECL shouldn't be gimplified into its
7398 DECL_VALUE_EXPR (if any). */
7399
7400 static bool
7401 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7402 {
7403 splay_tree_node n;
7404 unsigned flags = in_code ? GOVD_SEEN : 0;
7405 bool ret = false, shared;
7406
7407 if (error_operand_p (decl))
7408 return false;
7409
7410 if (ctx->region_type == ORT_NONE)
7411 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7412
7413 if (is_global_var (decl))
7414 {
7415 /* Threadprivate variables are predetermined. */
7416 if (DECL_THREAD_LOCAL_P (decl))
7417 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7418
7419 if (DECL_HAS_VALUE_EXPR_P (decl))
7420 {
7421 if (ctx->region_type & ORT_ACC)
7422 /* For OpenACC, defer expansion of value to avoid transfering
7423 privatized common block data instead of im-/explicitly transfered
7424 variables which are in common blocks. */
7425 ;
7426 else
7427 {
7428 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7429
7430 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7431 return omp_notice_threadprivate_variable (ctx, decl, value);
7432 }
7433 }
7434
7435 if (gimplify_omp_ctxp->outer_context == NULL
7436 && VAR_P (decl)
7437 && oacc_get_fn_attrib (current_function_decl))
7438 {
7439 location_t loc = DECL_SOURCE_LOCATION (decl);
7440
7441 if (lookup_attribute ("omp declare target link",
7442 DECL_ATTRIBUTES (decl)))
7443 {
7444 error_at (loc,
7445 "%qE with %<link%> clause used in %<routine%> function",
7446 DECL_NAME (decl));
7447 return false;
7448 }
7449 else if (!lookup_attribute ("omp declare target",
7450 DECL_ATTRIBUTES (decl)))
7451 {
7452 error_at (loc,
7453 "%qE requires a %<declare%> directive for use "
7454 "in a %<routine%> function", DECL_NAME (decl));
7455 return false;
7456 }
7457 }
7458 }
7459
7460 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7461 if ((ctx->region_type & ORT_TARGET) != 0)
7462 {
7463 if (ctx->region_type & ORT_ACC)
7464 /* For OpenACC, as remarked above, defer expansion. */
7465 shared = false;
7466 else
7467 shared = true;
7468
7469 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7470 if (n == NULL)
7471 {
7472 unsigned nflags = flags;
7473 if ((ctx->region_type & ORT_ACC) == 0)
7474 {
7475 bool is_declare_target = false;
7476 if (is_global_var (decl)
7477 && varpool_node::get_create (decl)->offloadable)
7478 {
7479 struct gimplify_omp_ctx *octx;
7480 for (octx = ctx->outer_context;
7481 octx; octx = octx->outer_context)
7482 {
7483 n = splay_tree_lookup (octx->variables,
7484 (splay_tree_key)decl);
7485 if (n
7486 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7487 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7488 break;
7489 }
7490 is_declare_target = octx == NULL;
7491 }
7492 if (!is_declare_target)
7493 {
7494 int gdmk;
7495 enum omp_clause_defaultmap_kind kind;
7496 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7497 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7498 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7499 == POINTER_TYPE)))
7500 gdmk = GDMK_POINTER;
7501 else if (lang_hooks.decls.omp_scalar_p (decl))
7502 gdmk = GDMK_SCALAR;
7503 else
7504 gdmk = GDMK_AGGREGATE;
7505 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7506 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7507 {
7508 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7509 nflags |= GOVD_FIRSTPRIVATE;
7510 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7511 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7512 else
7513 gcc_unreachable ();
7514 }
7515 else if (ctx->defaultmap[gdmk] == 0)
7516 {
7517 tree d = lang_hooks.decls.omp_report_decl (decl);
7518 error ("%qE not specified in enclosing %<target%>",
7519 DECL_NAME (d));
7520 inform (ctx->location, "enclosing %<target%>");
7521 }
7522 else if (ctx->defaultmap[gdmk]
7523 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7524 nflags |= ctx->defaultmap[gdmk];
7525 else
7526 {
7527 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7528 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7529 }
7530 }
7531 }
7532
7533 struct gimplify_omp_ctx *octx = ctx->outer_context;
7534 if ((ctx->region_type & ORT_ACC) && octx)
7535 {
7536 /* Look in outer OpenACC contexts, to see if there's a
7537 data attribute for this variable. */
7538 omp_notice_variable (octx, decl, in_code);
7539
7540 for (; octx; octx = octx->outer_context)
7541 {
7542 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7543 break;
7544 splay_tree_node n2
7545 = splay_tree_lookup (octx->variables,
7546 (splay_tree_key) decl);
7547 if (n2)
7548 {
7549 if (octx->region_type == ORT_ACC_HOST_DATA)
7550 error ("variable %qE declared in enclosing "
7551 "%<host_data%> region", DECL_NAME (decl));
7552 nflags |= GOVD_MAP;
7553 if (octx->region_type == ORT_ACC_DATA
7554 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7555 nflags |= GOVD_MAP_0LEN_ARRAY;
7556 goto found_outer;
7557 }
7558 }
7559 }
7560
7561 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7562 | GOVD_MAP_ALLOC_ONLY)) == flags)
7563 {
7564 tree type = TREE_TYPE (decl);
7565
7566 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7567 && lang_hooks.decls.omp_privatize_by_reference (decl))
7568 type = TREE_TYPE (type);
7569 if (!lang_hooks.types.omp_mappable_type (type))
7570 {
7571 error ("%qD referenced in target region does not have "
7572 "a mappable type", decl);
7573 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7574 }
7575 else
7576 {
7577 if ((ctx->region_type & ORT_ACC) != 0)
7578 nflags = oacc_default_clause (ctx, decl, flags);
7579 else
7580 nflags |= GOVD_MAP;
7581 }
7582 }
7583 found_outer:
7584 omp_add_variable (ctx, decl, nflags);
7585 }
7586 else
7587 {
7588 /* If nothing changed, there's nothing left to do. */
7589 if ((n->value & flags) == flags)
7590 return ret;
7591 flags |= n->value;
7592 n->value = flags;
7593 }
7594 goto do_outer;
7595 }
7596
7597 if (n == NULL)
7598 {
7599 if (ctx->region_type == ORT_WORKSHARE
7600 || ctx->region_type == ORT_TASKGROUP
7601 || ctx->region_type == ORT_SIMD
7602 || ctx->region_type == ORT_ACC
7603 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7604 goto do_outer;
7605
7606 flags = omp_default_clause (ctx, decl, in_code, flags);
7607
7608 if ((flags & GOVD_PRIVATE)
7609 && lang_hooks.decls.omp_private_outer_ref (decl))
7610 flags |= GOVD_PRIVATE_OUTER_REF;
7611
7612 omp_add_variable (ctx, decl, flags);
7613
7614 shared = (flags & GOVD_SHARED) != 0;
7615 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7616 goto do_outer;
7617 }
7618
7619 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7620 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7621 && DECL_SIZE (decl))
7622 {
7623 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7624 {
7625 splay_tree_node n2;
7626 tree t = DECL_VALUE_EXPR (decl);
7627 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7628 t = TREE_OPERAND (t, 0);
7629 gcc_assert (DECL_P (t));
7630 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7631 n2->value |= GOVD_SEEN;
7632 }
7633 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7634 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7635 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7636 != INTEGER_CST))
7637 {
7638 splay_tree_node n2;
7639 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7640 gcc_assert (DECL_P (t));
7641 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7642 if (n2)
7643 omp_notice_variable (ctx, t, true);
7644 }
7645 }
7646
7647 if (ctx->region_type & ORT_ACC)
7648 /* For OpenACC, as remarked above, defer expansion. */
7649 shared = false;
7650 else
7651 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7652 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7653
7654 /* If nothing changed, there's nothing left to do. */
7655 if ((n->value & flags) == flags)
7656 return ret;
7657 flags |= n->value;
7658 n->value = flags;
7659
7660 do_outer:
7661 /* If the variable is private in the current context, then we don't
7662 need to propagate anything to an outer context. */
7663 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7664 return ret;
7665 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7666 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7667 return ret;
7668 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7669 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7670 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7671 return ret;
7672 if (ctx->outer_context
7673 && omp_notice_variable (ctx->outer_context, decl, in_code))
7674 return true;
7675 return ret;
7676 }
7677
7678 /* Verify that DECL is private within CTX. If there's specific information
7679 to the contrary in the innermost scope, generate an error. */
7680
7681 static bool
7682 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7683 {
7684 splay_tree_node n;
7685
7686 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7687 if (n != NULL)
7688 {
7689 if (n->value & GOVD_SHARED)
7690 {
7691 if (ctx == gimplify_omp_ctxp)
7692 {
7693 if (simd)
7694 error ("iteration variable %qE is predetermined linear",
7695 DECL_NAME (decl));
7696 else
7697 error ("iteration variable %qE should be private",
7698 DECL_NAME (decl));
7699 n->value = GOVD_PRIVATE;
7700 return true;
7701 }
7702 else
7703 return false;
7704 }
7705 else if ((n->value & GOVD_EXPLICIT) != 0
7706 && (ctx == gimplify_omp_ctxp
7707 || (ctx->region_type == ORT_COMBINED_PARALLEL
7708 && gimplify_omp_ctxp->outer_context == ctx)))
7709 {
7710 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7711 error ("iteration variable %qE should not be firstprivate",
7712 DECL_NAME (decl));
7713 else if ((n->value & GOVD_REDUCTION) != 0)
7714 error ("iteration variable %qE should not be reduction",
7715 DECL_NAME (decl));
7716 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7717 error ("iteration variable %qE should not be linear",
7718 DECL_NAME (decl));
7719 }
7720 return (ctx == gimplify_omp_ctxp
7721 || (ctx->region_type == ORT_COMBINED_PARALLEL
7722 && gimplify_omp_ctxp->outer_context == ctx));
7723 }
7724
7725 if (ctx->region_type != ORT_WORKSHARE
7726 && ctx->region_type != ORT_TASKGROUP
7727 && ctx->region_type != ORT_SIMD
7728 && ctx->region_type != ORT_ACC)
7729 return false;
7730 else if (ctx->outer_context)
7731 return omp_is_private (ctx->outer_context, decl, simd);
7732 return false;
7733 }
7734
7735 /* Return true if DECL is private within a parallel region
7736 that binds to the current construct's context or in parallel
7737 region's REDUCTION clause. */
7738
7739 static bool
7740 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7741 {
7742 splay_tree_node n;
7743
7744 do
7745 {
7746 ctx = ctx->outer_context;
7747 if (ctx == NULL)
7748 {
7749 if (is_global_var (decl))
7750 return false;
7751
7752 /* References might be private, but might be shared too,
7753 when checking for copyprivate, assume they might be
7754 private, otherwise assume they might be shared. */
7755 if (copyprivate)
7756 return true;
7757
7758 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7759 return false;
7760
7761 /* Treat C++ privatized non-static data members outside
7762 of the privatization the same. */
7763 if (omp_member_access_dummy_var (decl))
7764 return false;
7765
7766 return true;
7767 }
7768
7769 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7770
7771 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7772 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7773 continue;
7774
7775 if (n != NULL)
7776 {
7777 if ((n->value & GOVD_LOCAL) != 0
7778 && omp_member_access_dummy_var (decl))
7779 return false;
7780 return (n->value & GOVD_SHARED) == 0;
7781 }
7782 }
7783 while (ctx->region_type == ORT_WORKSHARE
7784 || ctx->region_type == ORT_TASKGROUP
7785 || ctx->region_type == ORT_SIMD
7786 || ctx->region_type == ORT_ACC);
7787 return false;
7788 }
7789
7790 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7791
7792 static tree
7793 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7794 {
7795 tree t = *tp;
7796
7797 /* If this node has been visited, unmark it and keep looking. */
7798 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7799 return t;
7800
7801 if (IS_TYPE_OR_DECL_P (t))
7802 *walk_subtrees = 0;
7803 return NULL_TREE;
7804 }
7805
7806 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7807 lower all the depend clauses by populating corresponding depend
7808 array. Returns 0 if there are no such depend clauses, or
7809 2 if all depend clauses should be removed, 1 otherwise. */
7810
7811 static int
7812 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7813 {
7814 tree c;
7815 gimple *g;
7816 size_t n[4] = { 0, 0, 0, 0 };
7817 bool unused[4];
7818 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7819 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7820 size_t i, j;
7821 location_t first_loc = UNKNOWN_LOCATION;
7822
7823 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7824 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7825 {
7826 switch (OMP_CLAUSE_DEPEND_KIND (c))
7827 {
7828 case OMP_CLAUSE_DEPEND_IN:
7829 i = 2;
7830 break;
7831 case OMP_CLAUSE_DEPEND_OUT:
7832 case OMP_CLAUSE_DEPEND_INOUT:
7833 i = 0;
7834 break;
7835 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7836 i = 1;
7837 break;
7838 case OMP_CLAUSE_DEPEND_DEPOBJ:
7839 i = 3;
7840 break;
7841 case OMP_CLAUSE_DEPEND_SOURCE:
7842 case OMP_CLAUSE_DEPEND_SINK:
7843 continue;
7844 default:
7845 gcc_unreachable ();
7846 }
7847 tree t = OMP_CLAUSE_DECL (c);
7848 if (first_loc == UNKNOWN_LOCATION)
7849 first_loc = OMP_CLAUSE_LOCATION (c);
7850 if (TREE_CODE (t) == TREE_LIST
7851 && TREE_PURPOSE (t)
7852 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7853 {
7854 if (TREE_PURPOSE (t) != last_iter)
7855 {
7856 tree tcnt = size_one_node;
7857 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7858 {
7859 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7860 is_gimple_val, fb_rvalue) == GS_ERROR
7861 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7862 is_gimple_val, fb_rvalue) == GS_ERROR
7863 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7864 is_gimple_val, fb_rvalue) == GS_ERROR
7865 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7866 is_gimple_val, fb_rvalue)
7867 == GS_ERROR))
7868 return 2;
7869 tree var = TREE_VEC_ELT (it, 0);
7870 tree begin = TREE_VEC_ELT (it, 1);
7871 tree end = TREE_VEC_ELT (it, 2);
7872 tree step = TREE_VEC_ELT (it, 3);
7873 tree orig_step = TREE_VEC_ELT (it, 4);
7874 tree type = TREE_TYPE (var);
7875 tree stype = TREE_TYPE (step);
7876 location_t loc = DECL_SOURCE_LOCATION (var);
7877 tree endmbegin;
7878 /* Compute count for this iterator as
7879 orig_step > 0
7880 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7881 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7882 and compute product of those for the entire depend
7883 clause. */
7884 if (POINTER_TYPE_P (type))
7885 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7886 stype, end, begin);
7887 else
7888 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7889 end, begin);
7890 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7891 step,
7892 build_int_cst (stype, 1));
7893 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7894 build_int_cst (stype, 1));
7895 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7896 unshare_expr (endmbegin),
7897 stepm1);
7898 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7899 pos, step);
7900 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7901 endmbegin, stepp1);
7902 if (TYPE_UNSIGNED (stype))
7903 {
7904 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7905 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7906 }
7907 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7908 neg, step);
7909 step = NULL_TREE;
7910 tree cond = fold_build2_loc (loc, LT_EXPR,
7911 boolean_type_node,
7912 begin, end);
7913 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7914 build_int_cst (stype, 0));
7915 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7916 end, begin);
7917 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7918 build_int_cst (stype, 0));
7919 tree osteptype = TREE_TYPE (orig_step);
7920 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7921 orig_step,
7922 build_int_cst (osteptype, 0));
7923 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7924 cond, pos, neg);
7925 cnt = fold_convert_loc (loc, sizetype, cnt);
7926 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7927 fb_rvalue) == GS_ERROR)
7928 return 2;
7929 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7930 }
7931 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7932 fb_rvalue) == GS_ERROR)
7933 return 2;
7934 last_iter = TREE_PURPOSE (t);
7935 last_count = tcnt;
7936 }
7937 if (counts[i] == NULL_TREE)
7938 counts[i] = last_count;
7939 else
7940 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7941 PLUS_EXPR, counts[i], last_count);
7942 }
7943 else
7944 n[i]++;
7945 }
7946 for (i = 0; i < 4; i++)
7947 if (counts[i])
7948 break;
7949 if (i == 4)
7950 return 0;
7951
7952 tree total = size_zero_node;
7953 for (i = 0; i < 4; i++)
7954 {
7955 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7956 if (counts[i] == NULL_TREE)
7957 counts[i] = size_zero_node;
7958 if (n[i])
7959 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7960 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7961 fb_rvalue) == GS_ERROR)
7962 return 2;
7963 total = size_binop (PLUS_EXPR, total, counts[i]);
7964 }
7965
7966 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7967 == GS_ERROR)
7968 return 2;
7969 bool is_old = unused[1] && unused[3];
7970 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7971 size_int (is_old ? 1 : 4));
7972 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7973 tree array = create_tmp_var_raw (type);
7974 TREE_ADDRESSABLE (array) = 1;
7975 if (!poly_int_tree_p (totalpx))
7976 {
7977 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7978 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7979 if (gimplify_omp_ctxp)
7980 {
7981 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7982 while (ctx
7983 && (ctx->region_type == ORT_WORKSHARE
7984 || ctx->region_type == ORT_TASKGROUP
7985 || ctx->region_type == ORT_SIMD
7986 || ctx->region_type == ORT_ACC))
7987 ctx = ctx->outer_context;
7988 if (ctx)
7989 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7990 }
7991 gimplify_vla_decl (array, pre_p);
7992 }
7993 else
7994 gimple_add_tmp_var (array);
7995 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7996 NULL_TREE);
7997 tree tem;
7998 if (!is_old)
7999 {
8000 tem = build2 (MODIFY_EXPR, void_type_node, r,
8001 build_int_cst (ptr_type_node, 0));
8002 gimplify_and_add (tem, pre_p);
8003 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8004 NULL_TREE);
8005 }
8006 tem = build2 (MODIFY_EXPR, void_type_node, r,
8007 fold_convert (ptr_type_node, total));
8008 gimplify_and_add (tem, pre_p);
8009 for (i = 1; i < (is_old ? 2 : 4); i++)
8010 {
8011 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8012 NULL_TREE, NULL_TREE);
8013 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8014 gimplify_and_add (tem, pre_p);
8015 }
8016
8017 tree cnts[4];
8018 for (j = 4; j; j--)
8019 if (!unused[j - 1])
8020 break;
8021 for (i = 0; i < 4; i++)
8022 {
8023 if (i && (i >= j || unused[i - 1]))
8024 {
8025 cnts[i] = cnts[i - 1];
8026 continue;
8027 }
8028 cnts[i] = create_tmp_var (sizetype);
8029 if (i == 0)
8030 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8031 else
8032 {
8033 tree t;
8034 if (is_old)
8035 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8036 else
8037 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8038 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8039 == GS_ERROR)
8040 return 2;
8041 g = gimple_build_assign (cnts[i], t);
8042 }
8043 gimple_seq_add_stmt (pre_p, g);
8044 }
8045
8046 last_iter = NULL_TREE;
8047 tree last_bind = NULL_TREE;
8048 tree *last_body = NULL;
8049 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8050 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8051 {
8052 switch (OMP_CLAUSE_DEPEND_KIND (c))
8053 {
8054 case OMP_CLAUSE_DEPEND_IN:
8055 i = 2;
8056 break;
8057 case OMP_CLAUSE_DEPEND_OUT:
8058 case OMP_CLAUSE_DEPEND_INOUT:
8059 i = 0;
8060 break;
8061 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8062 i = 1;
8063 break;
8064 case OMP_CLAUSE_DEPEND_DEPOBJ:
8065 i = 3;
8066 break;
8067 case OMP_CLAUSE_DEPEND_SOURCE:
8068 case OMP_CLAUSE_DEPEND_SINK:
8069 continue;
8070 default:
8071 gcc_unreachable ();
8072 }
8073 tree t = OMP_CLAUSE_DECL (c);
8074 if (TREE_CODE (t) == TREE_LIST
8075 && TREE_PURPOSE (t)
8076 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8077 {
8078 if (TREE_PURPOSE (t) != last_iter)
8079 {
8080 if (last_bind)
8081 gimplify_and_add (last_bind, pre_p);
8082 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8083 last_bind = build3 (BIND_EXPR, void_type_node,
8084 BLOCK_VARS (block), NULL, block);
8085 TREE_SIDE_EFFECTS (last_bind) = 1;
8086 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8087 tree *p = &BIND_EXPR_BODY (last_bind);
8088 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8089 {
8090 tree var = TREE_VEC_ELT (it, 0);
8091 tree begin = TREE_VEC_ELT (it, 1);
8092 tree end = TREE_VEC_ELT (it, 2);
8093 tree step = TREE_VEC_ELT (it, 3);
8094 tree orig_step = TREE_VEC_ELT (it, 4);
8095 tree type = TREE_TYPE (var);
8096 location_t loc = DECL_SOURCE_LOCATION (var);
8097 /* Emit:
8098 var = begin;
8099 goto cond_label;
8100 beg_label:
8101 ...
8102 var = var + step;
8103 cond_label:
8104 if (orig_step > 0) {
8105 if (var < end) goto beg_label;
8106 } else {
8107 if (var > end) goto beg_label;
8108 }
8109 for each iterator, with inner iterators added to
8110 the ... above. */
8111 tree beg_label = create_artificial_label (loc);
8112 tree cond_label = NULL_TREE;
8113 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8114 var, begin);
8115 append_to_statement_list_force (tem, p);
8116 tem = build_and_jump (&cond_label);
8117 append_to_statement_list_force (tem, p);
8118 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8119 append_to_statement_list (tem, p);
8120 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8121 NULL_TREE, NULL_TREE);
8122 TREE_SIDE_EFFECTS (bind) = 1;
8123 SET_EXPR_LOCATION (bind, loc);
8124 append_to_statement_list_force (bind, p);
8125 if (POINTER_TYPE_P (type))
8126 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8127 var, fold_convert_loc (loc, sizetype,
8128 step));
8129 else
8130 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8131 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8132 var, tem);
8133 append_to_statement_list_force (tem, p);
8134 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8135 append_to_statement_list (tem, p);
8136 tree cond = fold_build2_loc (loc, LT_EXPR,
8137 boolean_type_node,
8138 var, end);
8139 tree pos
8140 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8141 cond, build_and_jump (&beg_label),
8142 void_node);
8143 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8144 var, end);
8145 tree neg
8146 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8147 cond, build_and_jump (&beg_label),
8148 void_node);
8149 tree osteptype = TREE_TYPE (orig_step);
8150 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8151 orig_step,
8152 build_int_cst (osteptype, 0));
8153 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8154 cond, pos, neg);
8155 append_to_statement_list_force (tem, p);
8156 p = &BIND_EXPR_BODY (bind);
8157 }
8158 last_body = p;
8159 }
8160 last_iter = TREE_PURPOSE (t);
8161 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8162 {
8163 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8164 0), last_body);
8165 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8166 }
8167 if (error_operand_p (TREE_VALUE (t)))
8168 return 2;
8169 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8170 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8171 NULL_TREE, NULL_TREE);
8172 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8173 void_type_node, r, TREE_VALUE (t));
8174 append_to_statement_list_force (tem, last_body);
8175 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8176 void_type_node, cnts[i],
8177 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8178 append_to_statement_list_force (tem, last_body);
8179 TREE_VALUE (t) = null_pointer_node;
8180 }
8181 else
8182 {
8183 if (last_bind)
8184 {
8185 gimplify_and_add (last_bind, pre_p);
8186 last_bind = NULL_TREE;
8187 }
8188 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8189 {
8190 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8191 NULL, is_gimple_val, fb_rvalue);
8192 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8193 }
8194 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8195 return 2;
8196 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8197 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8198 is_gimple_val, fb_rvalue) == GS_ERROR)
8199 return 2;
8200 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8201 NULL_TREE, NULL_TREE);
8202 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8203 gimplify_and_add (tem, pre_p);
8204 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8205 size_int (1)));
8206 gimple_seq_add_stmt (pre_p, g);
8207 }
8208 }
8209 if (last_bind)
8210 gimplify_and_add (last_bind, pre_p);
8211 tree cond = boolean_false_node;
8212 if (is_old)
8213 {
8214 if (!unused[0])
8215 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8216 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8217 size_int (2)));
8218 if (!unused[2])
8219 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8220 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8221 cnts[2],
8222 size_binop_loc (first_loc, PLUS_EXPR,
8223 totalpx,
8224 size_int (1))));
8225 }
8226 else
8227 {
8228 tree prev = size_int (5);
8229 for (i = 0; i < 4; i++)
8230 {
8231 if (unused[i])
8232 continue;
8233 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8234 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8235 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8236 cnts[i], unshare_expr (prev)));
8237 }
8238 }
8239 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8240 build_call_expr_loc (first_loc,
8241 builtin_decl_explicit (BUILT_IN_TRAP),
8242 0), void_node);
8243 gimplify_and_add (tem, pre_p);
8244 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8245 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8246 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8247 OMP_CLAUSE_CHAIN (c) = *list_p;
8248 *list_p = c;
8249 return 1;
8250 }
8251
8252 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8253 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8254 the struct node to insert the new mapping after (when the struct node is
8255 initially created). PREV_NODE is the first of two or three mappings for a
8256 pointer, and is either:
8257 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8258 array section.
8259 - not the node before C. This is true when we have a reference-to-pointer
8260 type (with a mapping for the reference and for the pointer), or for
8261 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8262 If SCP is non-null, the new node is inserted before *SCP.
8263 if SCP is null, the new node is inserted before PREV_NODE.
8264 The return type is:
8265 - PREV_NODE, if SCP is non-null.
8266 - The newly-created ALLOC or RELEASE node, if SCP is null.
8267 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8268 reference to a pointer. */
8269
8270 static tree
8271 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8272 tree prev_node, tree *scp)
8273 {
8274 enum gomp_map_kind mkind
8275 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8276 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8277
8278 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8279 tree cl = scp ? prev_node : c2;
8280 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8281 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8282 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8283 if (OMP_CLAUSE_CHAIN (prev_node) != c
8284 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8285 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8286 == GOMP_MAP_TO_PSET))
8287 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8288 else
8289 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8290 if (struct_node)
8291 OMP_CLAUSE_CHAIN (struct_node) = c2;
8292
8293 /* We might need to create an additional mapping if we have a reference to a
8294 pointer (in C++). Don't do this if we have something other than a
8295 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8296 if (OMP_CLAUSE_CHAIN (prev_node) != c
8297 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8298 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8299 == GOMP_MAP_ALWAYS_POINTER)
8300 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8301 == GOMP_MAP_ATTACH_DETACH)))
8302 {
8303 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8304 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8305 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8306 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8307 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8308 OMP_CLAUSE_CHAIN (c3) = prev_node;
8309 if (!scp)
8310 OMP_CLAUSE_CHAIN (c2) = c3;
8311 else
8312 cl = c3;
8313 }
8314
8315 if (scp)
8316 *scp = c2;
8317
8318 return cl;
8319 }
8320
8321 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8322 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8323 If BASE_REF is non-NULL and the containing object is a reference, set
8324 *BASE_REF to that reference before dereferencing the object.
8325 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8326 has array type, else return NULL. */
8327
8328 static tree
8329 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8330 poly_offset_int *poffsetp)
8331 {
8332 tree offset;
8333 poly_int64 bitsize, bitpos;
8334 machine_mode mode;
8335 int unsignedp, reversep, volatilep = 0;
8336 poly_offset_int poffset;
8337
8338 if (base_ref)
8339 {
8340 *base_ref = NULL_TREE;
8341
8342 while (TREE_CODE (base) == ARRAY_REF)
8343 base = TREE_OPERAND (base, 0);
8344
8345 if (TREE_CODE (base) == INDIRECT_REF)
8346 base = TREE_OPERAND (base, 0);
8347 }
8348 else
8349 {
8350 if (TREE_CODE (base) == ARRAY_REF)
8351 {
8352 while (TREE_CODE (base) == ARRAY_REF)
8353 base = TREE_OPERAND (base, 0);
8354 if (TREE_CODE (base) != COMPONENT_REF
8355 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8356 return NULL_TREE;
8357 }
8358 else if (TREE_CODE (base) == INDIRECT_REF
8359 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8360 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8361 == REFERENCE_TYPE))
8362 base = TREE_OPERAND (base, 0);
8363 }
8364
8365 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8366 &unsignedp, &reversep, &volatilep);
8367
8368 tree orig_base = base;
8369
8370 if ((TREE_CODE (base) == INDIRECT_REF
8371 || (TREE_CODE (base) == MEM_REF
8372 && integer_zerop (TREE_OPERAND (base, 1))))
8373 && DECL_P (TREE_OPERAND (base, 0))
8374 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8375 base = TREE_OPERAND (base, 0);
8376
8377 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8378
8379 if (offset)
8380 poffset = wi::to_poly_offset (offset);
8381 else
8382 poffset = 0;
8383
8384 if (maybe_ne (bitpos, 0))
8385 poffset += bits_to_bytes_round_down (bitpos);
8386
8387 *bitposp = bitpos;
8388 *poffsetp = poffset;
8389
8390 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8391 if (base_ref && orig_base != base)
8392 *base_ref = orig_base;
8393
8394 return base;
8395 }
8396
8397 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8398
8399 static bool
8400 is_or_contains_p (tree expr, tree base_ptr)
8401 {
8402 while (expr != base_ptr)
8403 if (TREE_CODE (base_ptr) == COMPONENT_REF)
8404 base_ptr = TREE_OPERAND (base_ptr, 0);
8405 else
8406 break;
8407 return expr == base_ptr;
8408 }
8409
8410 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8411 several rules, and with some level of ambiguity, hopefully we can at least
8412 collect the complexity here in one place. */
8413
8414 static void
8415 omp_target_reorder_clauses (tree *list_p)
8416 {
8417 /* Collect refs to alloc/release/delete maps. */
8418 auto_vec<tree, 32> ard;
8419 tree *cp = list_p;
8420 while (*cp != NULL_TREE)
8421 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8422 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALLOC
8423 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_RELEASE
8424 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_DELETE))
8425 {
8426 /* Unlink cp and push to ard. */
8427 tree c = *cp;
8428 tree nc = OMP_CLAUSE_CHAIN (c);
8429 *cp = nc;
8430 ard.safe_push (c);
8431
8432 /* Any associated pointer type maps should also move along. */
8433 while (*cp != NULL_TREE
8434 && OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8435 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8436 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_POINTER
8437 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH
8438 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_POINTER
8439 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALWAYS_POINTER
8440 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_TO_PSET))
8441 {
8442 c = *cp;
8443 nc = OMP_CLAUSE_CHAIN (c);
8444 *cp = nc;
8445 ard.safe_push (c);
8446 }
8447 }
8448 else
8449 cp = &OMP_CLAUSE_CHAIN (*cp);
8450
8451 /* Link alloc/release/delete maps to the end of list. */
8452 for (unsigned int i = 0; i < ard.length (); i++)
8453 {
8454 *cp = ard[i];
8455 cp = &OMP_CLAUSE_CHAIN (ard[i]);
8456 }
8457 *cp = NULL_TREE;
8458
8459 /* OpenMP 5.0 requires that pointer variables are mapped before
8460 its use as a base-pointer. */
8461 auto_vec<tree *, 32> atf;
8462 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8463 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8464 {
8465 /* Collect alloc, to, from, to/from clause tree pointers. */
8466 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
8467 if (k == GOMP_MAP_ALLOC
8468 || k == GOMP_MAP_TO
8469 || k == GOMP_MAP_FROM
8470 || k == GOMP_MAP_TOFROM
8471 || k == GOMP_MAP_ALWAYS_TO
8472 || k == GOMP_MAP_ALWAYS_FROM
8473 || k == GOMP_MAP_ALWAYS_TOFROM)
8474 atf.safe_push (cp);
8475 }
8476
8477 for (unsigned int i = 0; i < atf.length (); i++)
8478 if (atf[i])
8479 {
8480 tree *cp = atf[i];
8481 tree decl = OMP_CLAUSE_DECL (*cp);
8482 if (TREE_CODE (decl) == INDIRECT_REF || TREE_CODE (decl) == MEM_REF)
8483 {
8484 tree base_ptr = TREE_OPERAND (decl, 0);
8485 STRIP_TYPE_NOPS (base_ptr);
8486 for (unsigned int j = i + 1; j < atf.length (); j++)
8487 {
8488 tree *cp2 = atf[j];
8489 tree decl2 = OMP_CLAUSE_DECL (*cp2);
8490 if (is_or_contains_p (decl2, base_ptr))
8491 {
8492 /* Move *cp2 to before *cp. */
8493 tree c = *cp2;
8494 *cp2 = OMP_CLAUSE_CHAIN (c);
8495 OMP_CLAUSE_CHAIN (c) = *cp;
8496 *cp = c;
8497 atf[j] = NULL;
8498 }
8499 }
8500 }
8501 }
8502 }
8503
8504 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8505 and previous omp contexts. */
8506
8507 static void
8508 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8509 enum omp_region_type region_type,
8510 enum tree_code code)
8511 {
8512 struct gimplify_omp_ctx *ctx, *outer_ctx;
8513 tree c;
8514 hash_map<tree, tree> *struct_map_to_clause = NULL;
8515 hash_set<tree> *struct_deref_set = NULL;
8516 tree *prev_list_p = NULL, *orig_list_p = list_p;
8517 int handled_depend_iterators = -1;
8518 int nowait = -1;
8519
8520 ctx = new_omp_context (region_type);
8521 ctx->code = code;
8522 outer_ctx = ctx->outer_context;
8523 if (code == OMP_TARGET)
8524 {
8525 if (!lang_GNU_Fortran ())
8526 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8527 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8528 }
8529 if (!lang_GNU_Fortran ())
8530 switch (code)
8531 {
8532 case OMP_TARGET:
8533 case OMP_TARGET_DATA:
8534 case OMP_TARGET_ENTER_DATA:
8535 case OMP_TARGET_EXIT_DATA:
8536 case OACC_DECLARE:
8537 case OACC_HOST_DATA:
8538 case OACC_PARALLEL:
8539 case OACC_KERNELS:
8540 ctx->target_firstprivatize_array_bases = true;
8541 default:
8542 break;
8543 }
8544
8545 if (code == OMP_TARGET
8546 || code == OMP_TARGET_DATA
8547 || code == OMP_TARGET_ENTER_DATA
8548 || code == OMP_TARGET_EXIT_DATA)
8549 omp_target_reorder_clauses (list_p);
8550
8551 while ((c = *list_p) != NULL)
8552 {
8553 bool remove = false;
8554 bool notice_outer = true;
8555 const char *check_non_private = NULL;
8556 unsigned int flags;
8557 tree decl;
8558
8559 switch (OMP_CLAUSE_CODE (c))
8560 {
8561 case OMP_CLAUSE_PRIVATE:
8562 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8563 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8564 {
8565 flags |= GOVD_PRIVATE_OUTER_REF;
8566 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8567 }
8568 else
8569 notice_outer = false;
8570 goto do_add;
8571 case OMP_CLAUSE_SHARED:
8572 flags = GOVD_SHARED | GOVD_EXPLICIT;
8573 goto do_add;
8574 case OMP_CLAUSE_FIRSTPRIVATE:
8575 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8576 check_non_private = "firstprivate";
8577 goto do_add;
8578 case OMP_CLAUSE_LASTPRIVATE:
8579 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8580 switch (code)
8581 {
8582 case OMP_DISTRIBUTE:
8583 error_at (OMP_CLAUSE_LOCATION (c),
8584 "conditional %<lastprivate%> clause on "
8585 "%qs construct", "distribute");
8586 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8587 break;
8588 case OMP_TASKLOOP:
8589 error_at (OMP_CLAUSE_LOCATION (c),
8590 "conditional %<lastprivate%> clause on "
8591 "%qs construct", "taskloop");
8592 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8593 break;
8594 default:
8595 break;
8596 }
8597 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8598 if (code != OMP_LOOP)
8599 check_non_private = "lastprivate";
8600 decl = OMP_CLAUSE_DECL (c);
8601 if (error_operand_p (decl))
8602 goto do_add;
8603 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8604 && !lang_hooks.decls.omp_scalar_p (decl))
8605 {
8606 error_at (OMP_CLAUSE_LOCATION (c),
8607 "non-scalar variable %qD in conditional "
8608 "%<lastprivate%> clause", decl);
8609 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8610 }
8611 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8612 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8613 if (outer_ctx
8614 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8615 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8616 == ORT_COMBINED_TEAMS))
8617 && splay_tree_lookup (outer_ctx->variables,
8618 (splay_tree_key) decl) == NULL)
8619 {
8620 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8621 if (outer_ctx->outer_context)
8622 omp_notice_variable (outer_ctx->outer_context, decl, true);
8623 }
8624 else if (outer_ctx
8625 && (outer_ctx->region_type & ORT_TASK) != 0
8626 && outer_ctx->combined_loop
8627 && splay_tree_lookup (outer_ctx->variables,
8628 (splay_tree_key) decl) == NULL)
8629 {
8630 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8631 if (outer_ctx->outer_context)
8632 omp_notice_variable (outer_ctx->outer_context, decl, true);
8633 }
8634 else if (outer_ctx
8635 && (outer_ctx->region_type == ORT_WORKSHARE
8636 || outer_ctx->region_type == ORT_ACC)
8637 && outer_ctx->combined_loop
8638 && splay_tree_lookup (outer_ctx->variables,
8639 (splay_tree_key) decl) == NULL
8640 && !omp_check_private (outer_ctx, decl, false))
8641 {
8642 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8643 if (outer_ctx->outer_context
8644 && (outer_ctx->outer_context->region_type
8645 == ORT_COMBINED_PARALLEL)
8646 && splay_tree_lookup (outer_ctx->outer_context->variables,
8647 (splay_tree_key) decl) == NULL)
8648 {
8649 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8650 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8651 if (octx->outer_context)
8652 {
8653 octx = octx->outer_context;
8654 if (octx->region_type == ORT_WORKSHARE
8655 && octx->combined_loop
8656 && splay_tree_lookup (octx->variables,
8657 (splay_tree_key) decl) == NULL
8658 && !omp_check_private (octx, decl, false))
8659 {
8660 omp_add_variable (octx, decl,
8661 GOVD_LASTPRIVATE | GOVD_SEEN);
8662 octx = octx->outer_context;
8663 if (octx
8664 && ((octx->region_type & ORT_COMBINED_TEAMS)
8665 == ORT_COMBINED_TEAMS)
8666 && (splay_tree_lookup (octx->variables,
8667 (splay_tree_key) decl)
8668 == NULL))
8669 {
8670 omp_add_variable (octx, decl,
8671 GOVD_SHARED | GOVD_SEEN);
8672 octx = octx->outer_context;
8673 }
8674 }
8675 if (octx)
8676 omp_notice_variable (octx, decl, true);
8677 }
8678 }
8679 else if (outer_ctx->outer_context)
8680 omp_notice_variable (outer_ctx->outer_context, decl, true);
8681 }
8682 goto do_add;
8683 case OMP_CLAUSE_REDUCTION:
8684 if (OMP_CLAUSE_REDUCTION_TASK (c))
8685 {
8686 if (region_type == ORT_WORKSHARE)
8687 {
8688 if (nowait == -1)
8689 nowait = omp_find_clause (*list_p,
8690 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8691 if (nowait
8692 && (outer_ctx == NULL
8693 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8694 {
8695 error_at (OMP_CLAUSE_LOCATION (c),
8696 "%<task%> reduction modifier on a construct "
8697 "with a %<nowait%> clause");
8698 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8699 }
8700 }
8701 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8702 {
8703 error_at (OMP_CLAUSE_LOCATION (c),
8704 "invalid %<task%> reduction modifier on construct "
8705 "other than %<parallel%>, %qs or %<sections%>",
8706 lang_GNU_Fortran () ? "do" : "for");
8707 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8708 }
8709 }
8710 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8711 switch (code)
8712 {
8713 case OMP_SECTIONS:
8714 error_at (OMP_CLAUSE_LOCATION (c),
8715 "%<inscan%> %<reduction%> clause on "
8716 "%qs construct", "sections");
8717 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8718 break;
8719 case OMP_PARALLEL:
8720 error_at (OMP_CLAUSE_LOCATION (c),
8721 "%<inscan%> %<reduction%> clause on "
8722 "%qs construct", "parallel");
8723 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8724 break;
8725 case OMP_TEAMS:
8726 error_at (OMP_CLAUSE_LOCATION (c),
8727 "%<inscan%> %<reduction%> clause on "
8728 "%qs construct", "teams");
8729 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8730 break;
8731 case OMP_TASKLOOP:
8732 error_at (OMP_CLAUSE_LOCATION (c),
8733 "%<inscan%> %<reduction%> clause on "
8734 "%qs construct", "taskloop");
8735 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8736 break;
8737 default:
8738 break;
8739 }
8740 /* FALLTHRU */
8741 case OMP_CLAUSE_IN_REDUCTION:
8742 case OMP_CLAUSE_TASK_REDUCTION:
8743 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8744 /* OpenACC permits reductions on private variables. */
8745 if (!(region_type & ORT_ACC)
8746 /* taskgroup is actually not a worksharing region. */
8747 && code != OMP_TASKGROUP)
8748 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8749 decl = OMP_CLAUSE_DECL (c);
8750 if (TREE_CODE (decl) == MEM_REF)
8751 {
8752 tree type = TREE_TYPE (decl);
8753 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8754 NULL, is_gimple_val, fb_rvalue, false)
8755 == GS_ERROR)
8756 {
8757 remove = true;
8758 break;
8759 }
8760 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8761 if (DECL_P (v))
8762 {
8763 omp_firstprivatize_variable (ctx, v);
8764 omp_notice_variable (ctx, v, true);
8765 }
8766 decl = TREE_OPERAND (decl, 0);
8767 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8768 {
8769 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8770 NULL, is_gimple_val, fb_rvalue, false)
8771 == GS_ERROR)
8772 {
8773 remove = true;
8774 break;
8775 }
8776 v = TREE_OPERAND (decl, 1);
8777 if (DECL_P (v))
8778 {
8779 omp_firstprivatize_variable (ctx, v);
8780 omp_notice_variable (ctx, v, true);
8781 }
8782 decl = TREE_OPERAND (decl, 0);
8783 }
8784 if (TREE_CODE (decl) == ADDR_EXPR
8785 || TREE_CODE (decl) == INDIRECT_REF)
8786 decl = TREE_OPERAND (decl, 0);
8787 }
8788 goto do_add_decl;
8789 case OMP_CLAUSE_LINEAR:
8790 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8791 is_gimple_val, fb_rvalue) == GS_ERROR)
8792 {
8793 remove = true;
8794 break;
8795 }
8796 else
8797 {
8798 if (code == OMP_SIMD
8799 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8800 {
8801 struct gimplify_omp_ctx *octx = outer_ctx;
8802 if (octx
8803 && octx->region_type == ORT_WORKSHARE
8804 && octx->combined_loop
8805 && !octx->distribute)
8806 {
8807 if (octx->outer_context
8808 && (octx->outer_context->region_type
8809 == ORT_COMBINED_PARALLEL))
8810 octx = octx->outer_context->outer_context;
8811 else
8812 octx = octx->outer_context;
8813 }
8814 if (octx
8815 && octx->region_type == ORT_WORKSHARE
8816 && octx->combined_loop
8817 && octx->distribute)
8818 {
8819 error_at (OMP_CLAUSE_LOCATION (c),
8820 "%<linear%> clause for variable other than "
8821 "loop iterator specified on construct "
8822 "combined with %<distribute%>");
8823 remove = true;
8824 break;
8825 }
8826 }
8827 /* For combined #pragma omp parallel for simd, need to put
8828 lastprivate and perhaps firstprivate too on the
8829 parallel. Similarly for #pragma omp for simd. */
8830 struct gimplify_omp_ctx *octx = outer_ctx;
8831 decl = NULL_TREE;
8832 do
8833 {
8834 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8835 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8836 break;
8837 decl = OMP_CLAUSE_DECL (c);
8838 if (error_operand_p (decl))
8839 {
8840 decl = NULL_TREE;
8841 break;
8842 }
8843 flags = GOVD_SEEN;
8844 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8845 flags |= GOVD_FIRSTPRIVATE;
8846 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8847 flags |= GOVD_LASTPRIVATE;
8848 if (octx
8849 && octx->region_type == ORT_WORKSHARE
8850 && octx->combined_loop)
8851 {
8852 if (octx->outer_context
8853 && (octx->outer_context->region_type
8854 == ORT_COMBINED_PARALLEL))
8855 octx = octx->outer_context;
8856 else if (omp_check_private (octx, decl, false))
8857 break;
8858 }
8859 else if (octx
8860 && (octx->region_type & ORT_TASK) != 0
8861 && octx->combined_loop)
8862 ;
8863 else if (octx
8864 && octx->region_type == ORT_COMBINED_PARALLEL
8865 && ctx->region_type == ORT_WORKSHARE
8866 && octx == outer_ctx)
8867 flags = GOVD_SEEN | GOVD_SHARED;
8868 else if (octx
8869 && ((octx->region_type & ORT_COMBINED_TEAMS)
8870 == ORT_COMBINED_TEAMS))
8871 flags = GOVD_SEEN | GOVD_SHARED;
8872 else if (octx
8873 && octx->region_type == ORT_COMBINED_TARGET)
8874 {
8875 flags &= ~GOVD_LASTPRIVATE;
8876 if (flags == GOVD_SEEN)
8877 break;
8878 }
8879 else
8880 break;
8881 splay_tree_node on
8882 = splay_tree_lookup (octx->variables,
8883 (splay_tree_key) decl);
8884 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8885 {
8886 octx = NULL;
8887 break;
8888 }
8889 omp_add_variable (octx, decl, flags);
8890 if (octx->outer_context == NULL)
8891 break;
8892 octx = octx->outer_context;
8893 }
8894 while (1);
8895 if (octx
8896 && decl
8897 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8898 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8899 omp_notice_variable (octx, decl, true);
8900 }
8901 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8902 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8903 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8904 {
8905 notice_outer = false;
8906 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8907 }
8908 goto do_add;
8909
8910 case OMP_CLAUSE_MAP:
8911 decl = OMP_CLAUSE_DECL (c);
8912 if (error_operand_p (decl))
8913 remove = true;
8914 switch (code)
8915 {
8916 case OMP_TARGET:
8917 break;
8918 case OACC_DATA:
8919 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8920 break;
8921 /* FALLTHRU */
8922 case OMP_TARGET_DATA:
8923 case OMP_TARGET_ENTER_DATA:
8924 case OMP_TARGET_EXIT_DATA:
8925 case OACC_ENTER_DATA:
8926 case OACC_EXIT_DATA:
8927 case OACC_HOST_DATA:
8928 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8929 || (OMP_CLAUSE_MAP_KIND (c)
8930 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8931 /* For target {,enter ,exit }data only the array slice is
8932 mapped, but not the pointer to it. */
8933 remove = true;
8934 break;
8935 default:
8936 break;
8937 }
8938 /* For Fortran, not only the pointer to the data is mapped but also
8939 the address of the pointer, the array descriptor etc.; for
8940 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8941 does not make sense. Likewise, for 'update' only transferring the
8942 data itself is needed as the rest has been handled in previous
8943 directives. However, for 'exit data', the array descriptor needs
8944 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
8945
8946 NOTE: Generally, it is not safe to perform "enter data" operations
8947 on arrays where the data *or the descriptor* may go out of scope
8948 before a corresponding "exit data" operation -- and such a
8949 descriptor may be synthesized temporarily, e.g. to pass an
8950 explicit-shape array to a function expecting an assumed-shape
8951 argument. Performing "enter data" inside the called function
8952 would thus be problematic. */
8953 if (code == OMP_TARGET_EXIT_DATA
8954 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
8955 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
8956 == GOMP_MAP_DELETE
8957 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
8958 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8959 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8960 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8961 remove = true;
8962
8963 if (remove)
8964 break;
8965 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8966 {
8967 struct gimplify_omp_ctx *octx;
8968 for (octx = outer_ctx; octx; octx = octx->outer_context)
8969 {
8970 if (octx->region_type != ORT_ACC_HOST_DATA)
8971 break;
8972 splay_tree_node n2
8973 = splay_tree_lookup (octx->variables,
8974 (splay_tree_key) decl);
8975 if (n2)
8976 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8977 "declared in enclosing %<host_data%> region",
8978 DECL_NAME (decl));
8979 }
8980 }
8981 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8982 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8983 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8984 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8985 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8986 {
8987 remove = true;
8988 break;
8989 }
8990 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8991 || (OMP_CLAUSE_MAP_KIND (c)
8992 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8993 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8994 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8995 {
8996 OMP_CLAUSE_SIZE (c)
8997 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8998 false);
8999 if ((region_type & ORT_TARGET) != 0)
9000 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
9001 GOVD_FIRSTPRIVATE | GOVD_SEEN);
9002 }
9003
9004 if (!DECL_P (decl))
9005 {
9006 tree d = decl, *pd;
9007 if (TREE_CODE (d) == ARRAY_REF)
9008 {
9009 while (TREE_CODE (d) == ARRAY_REF)
9010 d = TREE_OPERAND (d, 0);
9011 if (TREE_CODE (d) == COMPONENT_REF
9012 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
9013 decl = d;
9014 }
9015 pd = &OMP_CLAUSE_DECL (c);
9016 if (d == decl
9017 && TREE_CODE (decl) == INDIRECT_REF
9018 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9019 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9020 == REFERENCE_TYPE))
9021 {
9022 pd = &TREE_OPERAND (decl, 0);
9023 decl = TREE_OPERAND (decl, 0);
9024 }
9025 bool indir_p = false;
9026 tree orig_decl = decl;
9027 tree decl_ref = NULL_TREE;
9028 if ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA)) != 0
9029 && TREE_CODE (*pd) == COMPONENT_REF
9030 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
9031 && code != OACC_UPDATE)
9032 {
9033 while (TREE_CODE (decl) == COMPONENT_REF)
9034 {
9035 decl = TREE_OPERAND (decl, 0);
9036 if (((TREE_CODE (decl) == MEM_REF
9037 && integer_zerop (TREE_OPERAND (decl, 1)))
9038 || INDIRECT_REF_P (decl))
9039 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9040 == POINTER_TYPE))
9041 {
9042 indir_p = true;
9043 decl = TREE_OPERAND (decl, 0);
9044 }
9045 if (TREE_CODE (decl) == INDIRECT_REF
9046 && DECL_P (TREE_OPERAND (decl, 0))
9047 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9048 == REFERENCE_TYPE))
9049 {
9050 decl_ref = decl;
9051 decl = TREE_OPERAND (decl, 0);
9052 }
9053 }
9054 }
9055 else if (TREE_CODE (decl) == COMPONENT_REF)
9056 {
9057 while (TREE_CODE (decl) == COMPONENT_REF)
9058 decl = TREE_OPERAND (decl, 0);
9059 if (TREE_CODE (decl) == INDIRECT_REF
9060 && DECL_P (TREE_OPERAND (decl, 0))
9061 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9062 == REFERENCE_TYPE))
9063 decl = TREE_OPERAND (decl, 0);
9064 }
9065 if (decl != orig_decl && DECL_P (decl) && indir_p)
9066 {
9067 gomp_map_kind k
9068 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9069 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9070 /* We have a dereference of a struct member. Make this an
9071 attach/detach operation, and ensure the base pointer is
9072 mapped as a FIRSTPRIVATE_POINTER. */
9073 OMP_CLAUSE_SET_MAP_KIND (c, k);
9074 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
9075 tree next_clause = OMP_CLAUSE_CHAIN (c);
9076 if (k == GOMP_MAP_ATTACH
9077 && code != OACC_ENTER_DATA
9078 && code != OMP_TARGET_ENTER_DATA
9079 && (!next_clause
9080 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
9081 || (OMP_CLAUSE_MAP_KIND (next_clause)
9082 != GOMP_MAP_POINTER)
9083 || OMP_CLAUSE_DECL (next_clause) != decl)
9084 && (!struct_deref_set
9085 || !struct_deref_set->contains (decl)))
9086 {
9087 if (!struct_deref_set)
9088 struct_deref_set = new hash_set<tree> ();
9089 /* As well as the attach, we also need a
9090 FIRSTPRIVATE_POINTER clause to properly map the
9091 pointer to the struct base. */
9092 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9093 OMP_CLAUSE_MAP);
9094 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
9095 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
9096 = 1;
9097 tree charptr_zero
9098 = build_int_cst (build_pointer_type (char_type_node),
9099 0);
9100 OMP_CLAUSE_DECL (c2)
9101 = build2 (MEM_REF, char_type_node,
9102 decl_ref ? decl_ref : decl, charptr_zero);
9103 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9104 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9105 OMP_CLAUSE_MAP);
9106 OMP_CLAUSE_SET_MAP_KIND (c3,
9107 GOMP_MAP_FIRSTPRIVATE_POINTER);
9108 OMP_CLAUSE_DECL (c3) = decl;
9109 OMP_CLAUSE_SIZE (c3) = size_zero_node;
9110 tree mapgrp = *prev_list_p;
9111 *prev_list_p = c2;
9112 OMP_CLAUSE_CHAIN (c3) = mapgrp;
9113 OMP_CLAUSE_CHAIN (c2) = c3;
9114
9115 struct_deref_set->add (decl);
9116 }
9117 goto do_add_decl;
9118 }
9119 /* An "attach/detach" operation on an update directive should
9120 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9121 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9122 depends on the previous mapping. */
9123 if (code == OACC_UPDATE
9124 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9125 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
9126 if (DECL_P (decl)
9127 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9128 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
9129 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
9130 && code != OACC_UPDATE
9131 && code != OMP_TARGET_UPDATE)
9132 {
9133 if (error_operand_p (decl))
9134 {
9135 remove = true;
9136 break;
9137 }
9138
9139 tree stype = TREE_TYPE (decl);
9140 if (TREE_CODE (stype) == REFERENCE_TYPE)
9141 stype = TREE_TYPE (stype);
9142 if (TYPE_SIZE_UNIT (stype) == NULL
9143 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
9144 {
9145 error_at (OMP_CLAUSE_LOCATION (c),
9146 "mapping field %qE of variable length "
9147 "structure", OMP_CLAUSE_DECL (c));
9148 remove = true;
9149 break;
9150 }
9151
9152 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
9153 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9154 {
9155 /* Error recovery. */
9156 if (prev_list_p == NULL)
9157 {
9158 remove = true;
9159 break;
9160 }
9161 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9162 {
9163 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9164 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9165 {
9166 remove = true;
9167 break;
9168 }
9169 }
9170 }
9171
9172 poly_offset_int offset1;
9173 poly_int64 bitpos1;
9174 tree base_ref;
9175
9176 tree base
9177 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9178 &bitpos1, &offset1);
9179
9180 gcc_assert (base == decl);
9181
9182 splay_tree_node n
9183 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9184 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9185 == GOMP_MAP_ALWAYS_POINTER);
9186 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9187 == GOMP_MAP_ATTACH_DETACH);
9188 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9189 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9190 bool has_attachments = false;
9191 /* For OpenACC, pointers in structs should trigger an
9192 attach action. */
9193 if (attach_detach
9194 && ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA))
9195 || code == OMP_TARGET_ENTER_DATA
9196 || code == OMP_TARGET_EXIT_DATA))
9197
9198 {
9199 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9200 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9201 have detected a case that needs a GOMP_MAP_STRUCT
9202 mapping added. */
9203 gomp_map_kind k
9204 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9205 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9206 OMP_CLAUSE_SET_MAP_KIND (c, k);
9207 has_attachments = true;
9208 }
9209 if (n == NULL || (n->value & GOVD_MAP) == 0)
9210 {
9211 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9212 OMP_CLAUSE_MAP);
9213 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9214 : GOMP_MAP_STRUCT;
9215
9216 OMP_CLAUSE_SET_MAP_KIND (l, k);
9217 if (base_ref)
9218 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9219 else
9220 OMP_CLAUSE_DECL (l) = decl;
9221 OMP_CLAUSE_SIZE (l)
9222 = (!attach
9223 ? size_int (1)
9224 : DECL_P (OMP_CLAUSE_DECL (l))
9225 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9226 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9227 if (struct_map_to_clause == NULL)
9228 struct_map_to_clause = new hash_map<tree, tree>;
9229 struct_map_to_clause->put (decl, l);
9230 if (ptr || attach_detach)
9231 {
9232 insert_struct_comp_map (code, c, l, *prev_list_p,
9233 NULL);
9234 *prev_list_p = l;
9235 prev_list_p = NULL;
9236 }
9237 else
9238 {
9239 OMP_CLAUSE_CHAIN (l) = c;
9240 *list_p = l;
9241 list_p = &OMP_CLAUSE_CHAIN (l);
9242 }
9243 if (base_ref && code == OMP_TARGET)
9244 {
9245 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9246 OMP_CLAUSE_MAP);
9247 enum gomp_map_kind mkind
9248 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9249 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9250 OMP_CLAUSE_DECL (c2) = decl;
9251 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9252 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9253 OMP_CLAUSE_CHAIN (l) = c2;
9254 }
9255 flags = GOVD_MAP | GOVD_EXPLICIT;
9256 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9257 || ptr
9258 || attach_detach)
9259 flags |= GOVD_SEEN;
9260 if (has_attachments)
9261 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9262 goto do_add_decl;
9263 }
9264 else if (struct_map_to_clause)
9265 {
9266 tree *osc = struct_map_to_clause->get (decl);
9267 tree *sc = NULL, *scp = NULL;
9268 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9269 || ptr
9270 || attach_detach)
9271 n->value |= GOVD_SEEN;
9272 sc = &OMP_CLAUSE_CHAIN (*osc);
9273 if (*sc != c
9274 && (OMP_CLAUSE_MAP_KIND (*sc)
9275 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9276 sc = &OMP_CLAUSE_CHAIN (*sc);
9277 /* Here "prev_list_p" is the end of the inserted
9278 alloc/release nodes after the struct node, OSC. */
9279 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9280 if ((ptr || attach_detach) && sc == prev_list_p)
9281 break;
9282 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9283 != COMPONENT_REF
9284 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9285 != INDIRECT_REF)
9286 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9287 != ARRAY_REF))
9288 break;
9289 else
9290 {
9291 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9292 poly_offset_int offsetn;
9293 poly_int64 bitposn;
9294 tree base
9295 = extract_base_bit_offset (sc_decl, NULL,
9296 &bitposn, &offsetn);
9297 if (base != decl)
9298 break;
9299 if (scp)
9300 continue;
9301 if ((region_type & ORT_ACC) != 0)
9302 {
9303 /* This duplicate checking code is currently only
9304 enabled for OpenACC. */
9305 tree d1 = OMP_CLAUSE_DECL (*sc);
9306 tree d2 = OMP_CLAUSE_DECL (c);
9307 while (TREE_CODE (d1) == ARRAY_REF)
9308 d1 = TREE_OPERAND (d1, 0);
9309 while (TREE_CODE (d2) == ARRAY_REF)
9310 d2 = TREE_OPERAND (d2, 0);
9311 if (TREE_CODE (d1) == INDIRECT_REF)
9312 d1 = TREE_OPERAND (d1, 0);
9313 if (TREE_CODE (d2) == INDIRECT_REF)
9314 d2 = TREE_OPERAND (d2, 0);
9315 while (TREE_CODE (d1) == COMPONENT_REF)
9316 if (TREE_CODE (d2) == COMPONENT_REF
9317 && TREE_OPERAND (d1, 1)
9318 == TREE_OPERAND (d2, 1))
9319 {
9320 d1 = TREE_OPERAND (d1, 0);
9321 d2 = TREE_OPERAND (d2, 0);
9322 }
9323 else
9324 break;
9325 if (d1 == d2)
9326 {
9327 error_at (OMP_CLAUSE_LOCATION (c),
9328 "%qE appears more than once in map "
9329 "clauses", OMP_CLAUSE_DECL (c));
9330 remove = true;
9331 break;
9332 }
9333 }
9334 if (maybe_lt (offset1, offsetn)
9335 || (known_eq (offset1, offsetn)
9336 && maybe_lt (bitpos1, bitposn)))
9337 {
9338 if (ptr || attach_detach)
9339 scp = sc;
9340 else
9341 break;
9342 }
9343 }
9344 if (remove)
9345 break;
9346 if (!attach)
9347 OMP_CLAUSE_SIZE (*osc)
9348 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9349 size_one_node);
9350 if (ptr || attach_detach)
9351 {
9352 tree cl = insert_struct_comp_map (code, c, NULL,
9353 *prev_list_p, scp);
9354 if (sc == prev_list_p)
9355 {
9356 *sc = cl;
9357 prev_list_p = NULL;
9358 }
9359 else
9360 {
9361 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9362 list_p = prev_list_p;
9363 prev_list_p = NULL;
9364 OMP_CLAUSE_CHAIN (c) = *sc;
9365 *sc = cl;
9366 continue;
9367 }
9368 }
9369 else if (*sc != c)
9370 {
9371 *list_p = OMP_CLAUSE_CHAIN (c);
9372 OMP_CLAUSE_CHAIN (c) = *sc;
9373 *sc = c;
9374 continue;
9375 }
9376 }
9377 }
9378
9379 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
9380 == GS_ERROR)
9381 {
9382 remove = true;
9383 break;
9384 }
9385
9386 if (!remove
9387 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9388 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9389 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9390 && OMP_CLAUSE_CHAIN (c)
9391 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9392 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9393 == GOMP_MAP_ALWAYS_POINTER)
9394 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9395 == GOMP_MAP_ATTACH_DETACH)
9396 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9397 == GOMP_MAP_TO_PSET)))
9398 prev_list_p = list_p;
9399
9400 break;
9401 }
9402 else
9403 {
9404 /* DECL_P (decl) == true */
9405 tree *sc;
9406 if (struct_map_to_clause
9407 && (sc = struct_map_to_clause->get (decl)) != NULL
9408 && OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_STRUCT
9409 && decl == OMP_CLAUSE_DECL (*sc))
9410 {
9411 /* We have found a map of the whole structure after a
9412 leading GOMP_MAP_STRUCT has been created, so refill the
9413 leading clause into a map of the whole structure
9414 variable, and remove the current one.
9415 TODO: we should be able to remove some maps of the
9416 following structure element maps if they are of
9417 compatible TO/FROM/ALLOC type. */
9418 OMP_CLAUSE_SET_MAP_KIND (*sc, OMP_CLAUSE_MAP_KIND (c));
9419 OMP_CLAUSE_SIZE (*sc) = unshare_expr (OMP_CLAUSE_SIZE (c));
9420 remove = true;
9421 break;
9422 }
9423 }
9424 flags = GOVD_MAP | GOVD_EXPLICIT;
9425 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9426 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9427 flags |= GOVD_MAP_ALWAYS_TO;
9428
9429 if ((code == OMP_TARGET
9430 || code == OMP_TARGET_DATA
9431 || code == OMP_TARGET_ENTER_DATA
9432 || code == OMP_TARGET_EXIT_DATA)
9433 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9434 {
9435 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
9436 octx = octx->outer_context)
9437 {
9438 splay_tree_node n
9439 = splay_tree_lookup (octx->variables,
9440 (splay_tree_key) OMP_CLAUSE_DECL (c));
9441 /* If this is contained in an outer OpenMP region as a
9442 firstprivate value, remove the attach/detach. */
9443 if (n && (n->value & GOVD_FIRSTPRIVATE))
9444 {
9445 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
9446 goto do_add;
9447 }
9448 }
9449
9450 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
9451 ? GOMP_MAP_DETACH
9452 : GOMP_MAP_ATTACH);
9453 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
9454 }
9455
9456 goto do_add;
9457
9458 case OMP_CLAUSE_DEPEND:
9459 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9460 {
9461 tree deps = OMP_CLAUSE_DECL (c);
9462 while (deps && TREE_CODE (deps) == TREE_LIST)
9463 {
9464 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9465 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9466 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9467 pre_p, NULL, is_gimple_val, fb_rvalue);
9468 deps = TREE_CHAIN (deps);
9469 }
9470 break;
9471 }
9472 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9473 break;
9474 if (handled_depend_iterators == -1)
9475 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9476 if (handled_depend_iterators)
9477 {
9478 if (handled_depend_iterators == 2)
9479 remove = true;
9480 break;
9481 }
9482 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9483 {
9484 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9485 NULL, is_gimple_val, fb_rvalue);
9486 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9487 }
9488 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9489 {
9490 remove = true;
9491 break;
9492 }
9493 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9494 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9495 is_gimple_val, fb_rvalue) == GS_ERROR)
9496 {
9497 remove = true;
9498 break;
9499 }
9500 break;
9501
9502 case OMP_CLAUSE_TO:
9503 case OMP_CLAUSE_FROM:
9504 case OMP_CLAUSE__CACHE_:
9505 decl = OMP_CLAUSE_DECL (c);
9506 if (error_operand_p (decl))
9507 {
9508 remove = true;
9509 break;
9510 }
9511 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9512 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9513 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9514 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9515 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9516 {
9517 remove = true;
9518 break;
9519 }
9520 if (!DECL_P (decl))
9521 {
9522 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9523 NULL, is_gimple_lvalue, fb_lvalue)
9524 == GS_ERROR)
9525 {
9526 remove = true;
9527 break;
9528 }
9529 break;
9530 }
9531 goto do_notice;
9532
9533 case OMP_CLAUSE_USE_DEVICE_PTR:
9534 case OMP_CLAUSE_USE_DEVICE_ADDR:
9535 flags = GOVD_EXPLICIT;
9536 goto do_add;
9537
9538 case OMP_CLAUSE_IS_DEVICE_PTR:
9539 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9540 goto do_add;
9541
9542 do_add:
9543 decl = OMP_CLAUSE_DECL (c);
9544 do_add_decl:
9545 if (error_operand_p (decl))
9546 {
9547 remove = true;
9548 break;
9549 }
9550 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9551 {
9552 tree t = omp_member_access_dummy_var (decl);
9553 if (t)
9554 {
9555 tree v = DECL_VALUE_EXPR (decl);
9556 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9557 if (outer_ctx)
9558 omp_notice_variable (outer_ctx, t, true);
9559 }
9560 }
9561 if (code == OACC_DATA
9562 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9563 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9564 flags |= GOVD_MAP_0LEN_ARRAY;
9565 omp_add_variable (ctx, decl, flags);
9566 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9567 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9568 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9569 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9570 {
9571 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9572 GOVD_LOCAL | GOVD_SEEN);
9573 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9574 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9575 find_decl_expr,
9576 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9577 NULL) == NULL_TREE)
9578 omp_add_variable (ctx,
9579 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9580 GOVD_LOCAL | GOVD_SEEN);
9581 gimplify_omp_ctxp = ctx;
9582 push_gimplify_context ();
9583
9584 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9585 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9586
9587 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9588 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9589 pop_gimplify_context
9590 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9591 push_gimplify_context ();
9592 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9593 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9594 pop_gimplify_context
9595 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9596 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9597 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9598
9599 gimplify_omp_ctxp = outer_ctx;
9600 }
9601 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9602 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9603 {
9604 gimplify_omp_ctxp = ctx;
9605 push_gimplify_context ();
9606 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9607 {
9608 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9609 NULL, NULL);
9610 TREE_SIDE_EFFECTS (bind) = 1;
9611 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9612 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9613 }
9614 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9615 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9616 pop_gimplify_context
9617 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9618 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9619
9620 gimplify_omp_ctxp = outer_ctx;
9621 }
9622 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9623 && OMP_CLAUSE_LINEAR_STMT (c))
9624 {
9625 gimplify_omp_ctxp = ctx;
9626 push_gimplify_context ();
9627 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9628 {
9629 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9630 NULL, NULL);
9631 TREE_SIDE_EFFECTS (bind) = 1;
9632 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9633 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9634 }
9635 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9636 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9637 pop_gimplify_context
9638 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9639 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9640
9641 gimplify_omp_ctxp = outer_ctx;
9642 }
9643 if (notice_outer)
9644 goto do_notice;
9645 break;
9646
9647 case OMP_CLAUSE_COPYIN:
9648 case OMP_CLAUSE_COPYPRIVATE:
9649 decl = OMP_CLAUSE_DECL (c);
9650 if (error_operand_p (decl))
9651 {
9652 remove = true;
9653 break;
9654 }
9655 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9656 && !remove
9657 && !omp_check_private (ctx, decl, true))
9658 {
9659 remove = true;
9660 if (is_global_var (decl))
9661 {
9662 if (DECL_THREAD_LOCAL_P (decl))
9663 remove = false;
9664 else if (DECL_HAS_VALUE_EXPR_P (decl))
9665 {
9666 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9667
9668 if (value
9669 && DECL_P (value)
9670 && DECL_THREAD_LOCAL_P (value))
9671 remove = false;
9672 }
9673 }
9674 if (remove)
9675 error_at (OMP_CLAUSE_LOCATION (c),
9676 "copyprivate variable %qE is not threadprivate"
9677 " or private in outer context", DECL_NAME (decl));
9678 }
9679 do_notice:
9680 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9681 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9682 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9683 && outer_ctx
9684 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9685 || (region_type == ORT_WORKSHARE
9686 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9687 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9688 || code == OMP_LOOP)))
9689 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9690 || (code == OMP_LOOP
9691 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9692 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9693 == ORT_COMBINED_TEAMS))))
9694 {
9695 splay_tree_node on
9696 = splay_tree_lookup (outer_ctx->variables,
9697 (splay_tree_key)decl);
9698 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9699 {
9700 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9701 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9702 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9703 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9704 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9705 == POINTER_TYPE))))
9706 omp_firstprivatize_variable (outer_ctx, decl);
9707 else
9708 {
9709 omp_add_variable (outer_ctx, decl,
9710 GOVD_SEEN | GOVD_SHARED);
9711 if (outer_ctx->outer_context)
9712 omp_notice_variable (outer_ctx->outer_context, decl,
9713 true);
9714 }
9715 }
9716 }
9717 if (outer_ctx)
9718 omp_notice_variable (outer_ctx, decl, true);
9719 if (check_non_private
9720 && region_type == ORT_WORKSHARE
9721 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9722 || decl == OMP_CLAUSE_DECL (c)
9723 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9724 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9725 == ADDR_EXPR
9726 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9727 == POINTER_PLUS_EXPR
9728 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9729 (OMP_CLAUSE_DECL (c), 0), 0))
9730 == ADDR_EXPR)))))
9731 && omp_check_private (ctx, decl, false))
9732 {
9733 error ("%s variable %qE is private in outer context",
9734 check_non_private, DECL_NAME (decl));
9735 remove = true;
9736 }
9737 break;
9738
9739 case OMP_CLAUSE_IF:
9740 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9741 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9742 {
9743 const char *p[2];
9744 for (int i = 0; i < 2; i++)
9745 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9746 {
9747 case VOID_CST: p[i] = "cancel"; break;
9748 case OMP_PARALLEL: p[i] = "parallel"; break;
9749 case OMP_SIMD: p[i] = "simd"; break;
9750 case OMP_TASK: p[i] = "task"; break;
9751 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9752 case OMP_TARGET_DATA: p[i] = "target data"; break;
9753 case OMP_TARGET: p[i] = "target"; break;
9754 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9755 case OMP_TARGET_ENTER_DATA:
9756 p[i] = "target enter data"; break;
9757 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9758 default: gcc_unreachable ();
9759 }
9760 error_at (OMP_CLAUSE_LOCATION (c),
9761 "expected %qs %<if%> clause modifier rather than %qs",
9762 p[0], p[1]);
9763 remove = true;
9764 }
9765 /* Fall through. */
9766
9767 case OMP_CLAUSE_FINAL:
9768 OMP_CLAUSE_OPERAND (c, 0)
9769 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9770 /* Fall through. */
9771
9772 case OMP_CLAUSE_SCHEDULE:
9773 case OMP_CLAUSE_NUM_THREADS:
9774 case OMP_CLAUSE_NUM_TEAMS:
9775 case OMP_CLAUSE_THREAD_LIMIT:
9776 case OMP_CLAUSE_DIST_SCHEDULE:
9777 case OMP_CLAUSE_DEVICE:
9778 case OMP_CLAUSE_PRIORITY:
9779 case OMP_CLAUSE_GRAINSIZE:
9780 case OMP_CLAUSE_NUM_TASKS:
9781 case OMP_CLAUSE_HINT:
9782 case OMP_CLAUSE_ASYNC:
9783 case OMP_CLAUSE_WAIT:
9784 case OMP_CLAUSE_NUM_GANGS:
9785 case OMP_CLAUSE_NUM_WORKERS:
9786 case OMP_CLAUSE_VECTOR_LENGTH:
9787 case OMP_CLAUSE_WORKER:
9788 case OMP_CLAUSE_VECTOR:
9789 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9790 is_gimple_val, fb_rvalue) == GS_ERROR)
9791 remove = true;
9792 break;
9793
9794 case OMP_CLAUSE_GANG:
9795 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9796 is_gimple_val, fb_rvalue) == GS_ERROR)
9797 remove = true;
9798 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9799 is_gimple_val, fb_rvalue) == GS_ERROR)
9800 remove = true;
9801 break;
9802
9803 case OMP_CLAUSE_NOWAIT:
9804 nowait = 1;
9805 break;
9806
9807 case OMP_CLAUSE_ORDERED:
9808 case OMP_CLAUSE_UNTIED:
9809 case OMP_CLAUSE_COLLAPSE:
9810 case OMP_CLAUSE_TILE:
9811 case OMP_CLAUSE_AUTO:
9812 case OMP_CLAUSE_SEQ:
9813 case OMP_CLAUSE_INDEPENDENT:
9814 case OMP_CLAUSE_MERGEABLE:
9815 case OMP_CLAUSE_PROC_BIND:
9816 case OMP_CLAUSE_SAFELEN:
9817 case OMP_CLAUSE_SIMDLEN:
9818 case OMP_CLAUSE_NOGROUP:
9819 case OMP_CLAUSE_THREADS:
9820 case OMP_CLAUSE_SIMD:
9821 case OMP_CLAUSE_BIND:
9822 case OMP_CLAUSE_IF_PRESENT:
9823 case OMP_CLAUSE_FINALIZE:
9824 break;
9825
9826 case OMP_CLAUSE_ORDER:
9827 ctx->order_concurrent = true;
9828 break;
9829
9830 case OMP_CLAUSE_DEFAULTMAP:
9831 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9832 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9833 {
9834 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9835 gdmkmin = GDMK_SCALAR;
9836 gdmkmax = GDMK_POINTER;
9837 break;
9838 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9839 gdmkmin = gdmkmax = GDMK_SCALAR;
9840 break;
9841 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9842 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9843 break;
9844 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9845 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9846 break;
9847 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9848 gdmkmin = gdmkmax = GDMK_POINTER;
9849 break;
9850 default:
9851 gcc_unreachable ();
9852 }
9853 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9854 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9855 {
9856 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9857 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9858 break;
9859 case OMP_CLAUSE_DEFAULTMAP_TO:
9860 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9861 break;
9862 case OMP_CLAUSE_DEFAULTMAP_FROM:
9863 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9864 break;
9865 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9866 ctx->defaultmap[gdmk] = GOVD_MAP;
9867 break;
9868 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9869 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9870 break;
9871 case OMP_CLAUSE_DEFAULTMAP_NONE:
9872 ctx->defaultmap[gdmk] = 0;
9873 break;
9874 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9875 switch (gdmk)
9876 {
9877 case GDMK_SCALAR:
9878 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9879 break;
9880 case GDMK_AGGREGATE:
9881 case GDMK_ALLOCATABLE:
9882 ctx->defaultmap[gdmk] = GOVD_MAP;
9883 break;
9884 case GDMK_POINTER:
9885 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9886 break;
9887 default:
9888 gcc_unreachable ();
9889 }
9890 break;
9891 default:
9892 gcc_unreachable ();
9893 }
9894 break;
9895
9896 case OMP_CLAUSE_ALIGNED:
9897 decl = OMP_CLAUSE_DECL (c);
9898 if (error_operand_p (decl))
9899 {
9900 remove = true;
9901 break;
9902 }
9903 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9904 is_gimple_val, fb_rvalue) == GS_ERROR)
9905 {
9906 remove = true;
9907 break;
9908 }
9909 if (!is_global_var (decl)
9910 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9911 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9912 break;
9913
9914 case OMP_CLAUSE_NONTEMPORAL:
9915 decl = OMP_CLAUSE_DECL (c);
9916 if (error_operand_p (decl))
9917 {
9918 remove = true;
9919 break;
9920 }
9921 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9922 break;
9923
9924 case OMP_CLAUSE_ALLOCATE:
9925 decl = OMP_CLAUSE_DECL (c);
9926 if (error_operand_p (decl))
9927 {
9928 remove = true;
9929 break;
9930 }
9931 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
9932 is_gimple_val, fb_rvalue) == GS_ERROR)
9933 {
9934 remove = true;
9935 break;
9936 }
9937 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
9938 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
9939 == INTEGER_CST))
9940 ;
9941 else if (code == OMP_TASKLOOP
9942 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
9943 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
9944 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
9945 pre_p, NULL, false);
9946 break;
9947
9948 case OMP_CLAUSE_DEFAULT:
9949 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9950 break;
9951
9952 case OMP_CLAUSE_INCLUSIVE:
9953 case OMP_CLAUSE_EXCLUSIVE:
9954 decl = OMP_CLAUSE_DECL (c);
9955 {
9956 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9957 (splay_tree_key) decl);
9958 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9959 {
9960 error_at (OMP_CLAUSE_LOCATION (c),
9961 "%qD specified in %qs clause but not in %<inscan%> "
9962 "%<reduction%> clause on the containing construct",
9963 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9964 remove = true;
9965 }
9966 else
9967 {
9968 n->value |= GOVD_REDUCTION_INSCAN;
9969 if (outer_ctx->region_type == ORT_SIMD
9970 && outer_ctx->outer_context
9971 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9972 {
9973 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9974 (splay_tree_key) decl);
9975 if (n && (n->value & GOVD_REDUCTION) != 0)
9976 n->value |= GOVD_REDUCTION_INSCAN;
9977 }
9978 }
9979 }
9980 break;
9981
9982 default:
9983 gcc_unreachable ();
9984 }
9985
9986 if (code == OACC_DATA
9987 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9988 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9989 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9990 remove = true;
9991 if (remove)
9992 *list_p = OMP_CLAUSE_CHAIN (c);
9993 else
9994 list_p = &OMP_CLAUSE_CHAIN (c);
9995 }
9996
9997 ctx->clauses = *orig_list_p;
9998 gimplify_omp_ctxp = ctx;
9999 if (struct_map_to_clause)
10000 delete struct_map_to_clause;
10001 if (struct_deref_set)
10002 delete struct_deref_set;
10003 }
10004
10005 /* Return true if DECL is a candidate for shared to firstprivate
10006 optimization. We only consider non-addressable scalars, not
10007 too big, and not references. */
10008
10009 static bool
10010 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
10011 {
10012 if (TREE_ADDRESSABLE (decl))
10013 return false;
10014 tree type = TREE_TYPE (decl);
10015 if (!is_gimple_reg_type (type)
10016 || TREE_CODE (type) == REFERENCE_TYPE
10017 || TREE_ADDRESSABLE (type))
10018 return false;
10019 /* Don't optimize too large decls, as each thread/task will have
10020 its own. */
10021 HOST_WIDE_INT len = int_size_in_bytes (type);
10022 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
10023 return false;
10024 if (lang_hooks.decls.omp_privatize_by_reference (decl))
10025 return false;
10026 return true;
10027 }
10028
10029 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10030 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10031 GOVD_WRITTEN in outer contexts. */
10032
10033 static void
10034 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
10035 {
10036 for (; ctx; ctx = ctx->outer_context)
10037 {
10038 splay_tree_node n = splay_tree_lookup (ctx->variables,
10039 (splay_tree_key) decl);
10040 if (n == NULL)
10041 continue;
10042 else if (n->value & GOVD_SHARED)
10043 {
10044 n->value |= GOVD_WRITTEN;
10045 return;
10046 }
10047 else if (n->value & GOVD_DATA_SHARE_CLASS)
10048 return;
10049 }
10050 }
10051
10052 /* Helper callback for walk_gimple_seq to discover possible stores
10053 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10054 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10055 for those. */
10056
10057 static tree
10058 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
10059 {
10060 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10061
10062 *walk_subtrees = 0;
10063 if (!wi->is_lhs)
10064 return NULL_TREE;
10065
10066 tree op = *tp;
10067 do
10068 {
10069 if (handled_component_p (op))
10070 op = TREE_OPERAND (op, 0);
10071 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
10072 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
10073 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
10074 else
10075 break;
10076 }
10077 while (1);
10078 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
10079 return NULL_TREE;
10080
10081 omp_mark_stores (gimplify_omp_ctxp, op);
10082 return NULL_TREE;
10083 }
10084
10085 /* Helper callback for walk_gimple_seq to discover possible stores
10086 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10087 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10088 for those. */
10089
10090 static tree
10091 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
10092 bool *handled_ops_p,
10093 struct walk_stmt_info *wi)
10094 {
10095 gimple *stmt = gsi_stmt (*gsi_p);
10096 switch (gimple_code (stmt))
10097 {
10098 /* Don't recurse on OpenMP constructs for which
10099 gimplify_adjust_omp_clauses already handled the bodies,
10100 except handle gimple_omp_for_pre_body. */
10101 case GIMPLE_OMP_FOR:
10102 *handled_ops_p = true;
10103 if (gimple_omp_for_pre_body (stmt))
10104 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10105 omp_find_stores_stmt, omp_find_stores_op, wi);
10106 break;
10107 case GIMPLE_OMP_PARALLEL:
10108 case GIMPLE_OMP_TASK:
10109 case GIMPLE_OMP_SECTIONS:
10110 case GIMPLE_OMP_SINGLE:
10111 case GIMPLE_OMP_TARGET:
10112 case GIMPLE_OMP_TEAMS:
10113 case GIMPLE_OMP_CRITICAL:
10114 *handled_ops_p = true;
10115 break;
10116 default:
10117 break;
10118 }
10119 return NULL_TREE;
10120 }
10121
10122 struct gimplify_adjust_omp_clauses_data
10123 {
10124 tree *list_p;
10125 gimple_seq *pre_p;
10126 };
10127
10128 /* For all variables that were not actually used within the context,
10129 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10130
10131 static int
10132 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
10133 {
10134 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
10135 gimple_seq *pre_p
10136 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
10137 tree decl = (tree) n->key;
10138 unsigned flags = n->value;
10139 enum omp_clause_code code;
10140 tree clause;
10141 bool private_debug;
10142
10143 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10144 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
10145 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
10146 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
10147 return 0;
10148 if ((flags & GOVD_SEEN) == 0)
10149 return 0;
10150 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
10151 return 0;
10152 if (flags & GOVD_DEBUG_PRIVATE)
10153 {
10154 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
10155 private_debug = true;
10156 }
10157 else if (flags & GOVD_MAP)
10158 private_debug = false;
10159 else
10160 private_debug
10161 = lang_hooks.decls.omp_private_debug_clause (decl,
10162 !!(flags & GOVD_SHARED));
10163 if (private_debug)
10164 code = OMP_CLAUSE_PRIVATE;
10165 else if (flags & GOVD_MAP)
10166 {
10167 code = OMP_CLAUSE_MAP;
10168 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10169 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10170 {
10171 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
10172 return 0;
10173 }
10174 if (VAR_P (decl)
10175 && DECL_IN_CONSTANT_POOL (decl)
10176 && !lookup_attribute ("omp declare target",
10177 DECL_ATTRIBUTES (decl)))
10178 {
10179 tree id = get_identifier ("omp declare target");
10180 DECL_ATTRIBUTES (decl)
10181 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
10182 varpool_node *node = varpool_node::get (decl);
10183 if (node)
10184 {
10185 node->offloadable = 1;
10186 if (ENABLE_OFFLOADING)
10187 g->have_offload = true;
10188 }
10189 }
10190 }
10191 else if (flags & GOVD_SHARED)
10192 {
10193 if (is_global_var (decl))
10194 {
10195 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10196 while (ctx != NULL)
10197 {
10198 splay_tree_node on
10199 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10200 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
10201 | GOVD_PRIVATE | GOVD_REDUCTION
10202 | GOVD_LINEAR | GOVD_MAP)) != 0)
10203 break;
10204 ctx = ctx->outer_context;
10205 }
10206 if (ctx == NULL)
10207 return 0;
10208 }
10209 code = OMP_CLAUSE_SHARED;
10210 }
10211 else if (flags & GOVD_PRIVATE)
10212 code = OMP_CLAUSE_PRIVATE;
10213 else if (flags & GOVD_FIRSTPRIVATE)
10214 {
10215 code = OMP_CLAUSE_FIRSTPRIVATE;
10216 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
10217 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10218 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10219 {
10220 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10221 "%<target%> construct", decl);
10222 return 0;
10223 }
10224 }
10225 else if (flags & GOVD_LASTPRIVATE)
10226 code = OMP_CLAUSE_LASTPRIVATE;
10227 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
10228 return 0;
10229 else if (flags & GOVD_CONDTEMP)
10230 {
10231 code = OMP_CLAUSE__CONDTEMP_;
10232 gimple_add_tmp_var (decl);
10233 }
10234 else
10235 gcc_unreachable ();
10236
10237 if (((flags & GOVD_LASTPRIVATE)
10238 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
10239 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10240 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10241
10242 tree chain = *list_p;
10243 clause = build_omp_clause (input_location, code);
10244 OMP_CLAUSE_DECL (clause) = decl;
10245 OMP_CLAUSE_CHAIN (clause) = chain;
10246 if (private_debug)
10247 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
10248 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
10249 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
10250 else if (code == OMP_CLAUSE_SHARED
10251 && (flags & GOVD_WRITTEN) == 0
10252 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10253 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
10254 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
10255 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
10256 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
10257 {
10258 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
10259 OMP_CLAUSE_DECL (nc) = decl;
10260 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10261 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10262 OMP_CLAUSE_DECL (clause)
10263 = build_simple_mem_ref_loc (input_location, decl);
10264 OMP_CLAUSE_DECL (clause)
10265 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
10266 build_int_cst (build_pointer_type (char_type_node), 0));
10267 OMP_CLAUSE_SIZE (clause) = size_zero_node;
10268 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10269 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
10270 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
10271 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10272 OMP_CLAUSE_CHAIN (nc) = chain;
10273 OMP_CLAUSE_CHAIN (clause) = nc;
10274 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10275 gimplify_omp_ctxp = ctx->outer_context;
10276 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
10277 pre_p, NULL, is_gimple_val, fb_rvalue);
10278 gimplify_omp_ctxp = ctx;
10279 }
10280 else if (code == OMP_CLAUSE_MAP)
10281 {
10282 int kind;
10283 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10284 switch (flags & (GOVD_MAP_TO_ONLY
10285 | GOVD_MAP_FORCE
10286 | GOVD_MAP_FORCE_PRESENT
10287 | GOVD_MAP_ALLOC_ONLY
10288 | GOVD_MAP_FROM_ONLY))
10289 {
10290 case 0:
10291 kind = GOMP_MAP_TOFROM;
10292 break;
10293 case GOVD_MAP_FORCE:
10294 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10295 break;
10296 case GOVD_MAP_TO_ONLY:
10297 kind = GOMP_MAP_TO;
10298 break;
10299 case GOVD_MAP_FROM_ONLY:
10300 kind = GOMP_MAP_FROM;
10301 break;
10302 case GOVD_MAP_ALLOC_ONLY:
10303 kind = GOMP_MAP_ALLOC;
10304 break;
10305 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10306 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10307 break;
10308 case GOVD_MAP_FORCE_PRESENT:
10309 kind = GOMP_MAP_FORCE_PRESENT;
10310 break;
10311 default:
10312 gcc_unreachable ();
10313 }
10314 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10315 if (DECL_SIZE (decl)
10316 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10317 {
10318 tree decl2 = DECL_VALUE_EXPR (decl);
10319 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10320 decl2 = TREE_OPERAND (decl2, 0);
10321 gcc_assert (DECL_P (decl2));
10322 tree mem = build_simple_mem_ref (decl2);
10323 OMP_CLAUSE_DECL (clause) = mem;
10324 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10325 if (gimplify_omp_ctxp->outer_context)
10326 {
10327 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10328 omp_notice_variable (ctx, decl2, true);
10329 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10330 }
10331 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10332 OMP_CLAUSE_MAP);
10333 OMP_CLAUSE_DECL (nc) = decl;
10334 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10335 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10336 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10337 else
10338 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10339 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10340 OMP_CLAUSE_CHAIN (clause) = nc;
10341 }
10342 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10343 && lang_hooks.decls.omp_privatize_by_reference (decl))
10344 {
10345 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10346 OMP_CLAUSE_SIZE (clause)
10347 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10348 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10349 gimplify_omp_ctxp = ctx->outer_context;
10350 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10351 pre_p, NULL, is_gimple_val, fb_rvalue);
10352 gimplify_omp_ctxp = ctx;
10353 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10354 OMP_CLAUSE_MAP);
10355 OMP_CLAUSE_DECL (nc) = decl;
10356 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10357 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10358 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10359 OMP_CLAUSE_CHAIN (clause) = nc;
10360 }
10361 else
10362 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10363 }
10364 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10365 {
10366 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10367 OMP_CLAUSE_DECL (nc) = decl;
10368 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10369 OMP_CLAUSE_CHAIN (nc) = chain;
10370 OMP_CLAUSE_CHAIN (clause) = nc;
10371 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10372 gimplify_omp_ctxp = ctx->outer_context;
10373 lang_hooks.decls.omp_finish_clause (nc, pre_p,
10374 (ctx->region_type & ORT_ACC) != 0);
10375 gimplify_omp_ctxp = ctx;
10376 }
10377 *list_p = clause;
10378 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10379 gimplify_omp_ctxp = ctx->outer_context;
10380 lang_hooks.decls.omp_finish_clause (clause, pre_p,
10381 (ctx->region_type & ORT_ACC) != 0);
10382 if (gimplify_omp_ctxp)
10383 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10384 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10385 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10386 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10387 true);
10388 gimplify_omp_ctxp = ctx;
10389 return 0;
10390 }
10391
10392 static void
10393 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10394 enum tree_code code)
10395 {
10396 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10397 tree *orig_list_p = list_p;
10398 tree c, decl;
10399 bool has_inscan_reductions = false;
10400
10401 if (body)
10402 {
10403 struct gimplify_omp_ctx *octx;
10404 for (octx = ctx; octx; octx = octx->outer_context)
10405 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10406 break;
10407 if (octx)
10408 {
10409 struct walk_stmt_info wi;
10410 memset (&wi, 0, sizeof (wi));
10411 walk_gimple_seq (body, omp_find_stores_stmt,
10412 omp_find_stores_op, &wi);
10413 }
10414 }
10415
10416 if (ctx->add_safelen1)
10417 {
10418 /* If there are VLAs in the body of simd loop, prevent
10419 vectorization. */
10420 gcc_assert (ctx->region_type == ORT_SIMD);
10421 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10422 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10423 OMP_CLAUSE_CHAIN (c) = *list_p;
10424 *list_p = c;
10425 list_p = &OMP_CLAUSE_CHAIN (c);
10426 }
10427
10428 if (ctx->region_type == ORT_WORKSHARE
10429 && ctx->outer_context
10430 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10431 {
10432 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10433 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10434 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10435 {
10436 decl = OMP_CLAUSE_DECL (c);
10437 splay_tree_node n
10438 = splay_tree_lookup (ctx->outer_context->variables,
10439 (splay_tree_key) decl);
10440 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10441 (splay_tree_key) decl));
10442 omp_add_variable (ctx, decl, n->value);
10443 tree c2 = copy_node (c);
10444 OMP_CLAUSE_CHAIN (c2) = *list_p;
10445 *list_p = c2;
10446 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10447 continue;
10448 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10449 OMP_CLAUSE_FIRSTPRIVATE);
10450 OMP_CLAUSE_DECL (c2) = decl;
10451 OMP_CLAUSE_CHAIN (c2) = *list_p;
10452 *list_p = c2;
10453 }
10454 }
10455 while ((c = *list_p) != NULL)
10456 {
10457 splay_tree_node n;
10458 bool remove = false;
10459
10460 switch (OMP_CLAUSE_CODE (c))
10461 {
10462 case OMP_CLAUSE_FIRSTPRIVATE:
10463 if ((ctx->region_type & ORT_TARGET)
10464 && (ctx->region_type & ORT_ACC) == 0
10465 && TYPE_ATOMIC (strip_array_types
10466 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10467 {
10468 error_at (OMP_CLAUSE_LOCATION (c),
10469 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10470 "%<target%> construct", OMP_CLAUSE_DECL (c));
10471 remove = true;
10472 break;
10473 }
10474 /* FALLTHRU */
10475 case OMP_CLAUSE_PRIVATE:
10476 case OMP_CLAUSE_SHARED:
10477 case OMP_CLAUSE_LINEAR:
10478 decl = OMP_CLAUSE_DECL (c);
10479 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10480 remove = !(n->value & GOVD_SEEN);
10481 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10482 && code == OMP_PARALLEL
10483 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10484 remove = true;
10485 if (! remove)
10486 {
10487 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10488 if ((n->value & GOVD_DEBUG_PRIVATE)
10489 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10490 {
10491 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10492 || ((n->value & GOVD_DATA_SHARE_CLASS)
10493 == GOVD_SHARED));
10494 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10495 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10496 }
10497 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10498 && (n->value & GOVD_WRITTEN) == 0
10499 && DECL_P (decl)
10500 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10501 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10502 else if (DECL_P (decl)
10503 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10504 && (n->value & GOVD_WRITTEN) != 0)
10505 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10506 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10507 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10508 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10509 }
10510 else
10511 n->value &= ~GOVD_EXPLICIT;
10512 break;
10513
10514 case OMP_CLAUSE_LASTPRIVATE:
10515 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10516 accurately reflect the presence of a FIRSTPRIVATE clause. */
10517 decl = OMP_CLAUSE_DECL (c);
10518 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10519 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10520 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10521 if (code == OMP_DISTRIBUTE
10522 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10523 {
10524 remove = true;
10525 error_at (OMP_CLAUSE_LOCATION (c),
10526 "same variable used in %<firstprivate%> and "
10527 "%<lastprivate%> clauses on %<distribute%> "
10528 "construct");
10529 }
10530 if (!remove
10531 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10532 && DECL_P (decl)
10533 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10534 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10535 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10536 remove = true;
10537 break;
10538
10539 case OMP_CLAUSE_ALIGNED:
10540 decl = OMP_CLAUSE_DECL (c);
10541 if (!is_global_var (decl))
10542 {
10543 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10544 remove = n == NULL || !(n->value & GOVD_SEEN);
10545 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10546 {
10547 struct gimplify_omp_ctx *octx;
10548 if (n != NULL
10549 && (n->value & (GOVD_DATA_SHARE_CLASS
10550 & ~GOVD_FIRSTPRIVATE)))
10551 remove = true;
10552 else
10553 for (octx = ctx->outer_context; octx;
10554 octx = octx->outer_context)
10555 {
10556 n = splay_tree_lookup (octx->variables,
10557 (splay_tree_key) decl);
10558 if (n == NULL)
10559 continue;
10560 if (n->value & GOVD_LOCAL)
10561 break;
10562 /* We have to avoid assigning a shared variable
10563 to itself when trying to add
10564 __builtin_assume_aligned. */
10565 if (n->value & GOVD_SHARED)
10566 {
10567 remove = true;
10568 break;
10569 }
10570 }
10571 }
10572 }
10573 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10574 {
10575 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10576 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10577 remove = true;
10578 }
10579 break;
10580
10581 case OMP_CLAUSE_NONTEMPORAL:
10582 decl = OMP_CLAUSE_DECL (c);
10583 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10584 remove = n == NULL || !(n->value & GOVD_SEEN);
10585 break;
10586
10587 case OMP_CLAUSE_MAP:
10588 if (code == OMP_TARGET_EXIT_DATA
10589 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10590 {
10591 remove = true;
10592 break;
10593 }
10594 decl = OMP_CLAUSE_DECL (c);
10595 /* Data clauses associated with reductions must be
10596 compatible with present_or_copy. Warn and adjust the clause
10597 if that is not the case. */
10598 if (ctx->region_type == ORT_ACC_PARALLEL
10599 || ctx->region_type == ORT_ACC_SERIAL)
10600 {
10601 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10602 n = NULL;
10603
10604 if (DECL_P (t))
10605 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10606
10607 if (n && (n->value & GOVD_REDUCTION))
10608 {
10609 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10610
10611 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10612 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10613 && kind != GOMP_MAP_FORCE_PRESENT
10614 && kind != GOMP_MAP_POINTER)
10615 {
10616 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10617 "incompatible data clause with reduction "
10618 "on %qE; promoting to %<present_or_copy%>",
10619 DECL_NAME (t));
10620 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10621 }
10622 }
10623 }
10624 if (!DECL_P (decl))
10625 {
10626 if ((ctx->region_type & ORT_TARGET) != 0
10627 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10628 {
10629 if (TREE_CODE (decl) == INDIRECT_REF
10630 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10632 == REFERENCE_TYPE))
10633 decl = TREE_OPERAND (decl, 0);
10634 if (TREE_CODE (decl) == COMPONENT_REF)
10635 {
10636 while (TREE_CODE (decl) == COMPONENT_REF)
10637 decl = TREE_OPERAND (decl, 0);
10638 if (DECL_P (decl))
10639 {
10640 n = splay_tree_lookup (ctx->variables,
10641 (splay_tree_key) decl);
10642 if (!(n->value & GOVD_SEEN))
10643 remove = true;
10644 }
10645 }
10646 }
10647 break;
10648 }
10649 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10650 if ((ctx->region_type & ORT_TARGET) != 0
10651 && !(n->value & GOVD_SEEN)
10652 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10653 && (!is_global_var (decl)
10654 || !lookup_attribute ("omp declare target link",
10655 DECL_ATTRIBUTES (decl))))
10656 {
10657 remove = true;
10658 /* For struct element mapping, if struct is never referenced
10659 in target block and none of the mapping has always modifier,
10660 remove all the struct element mappings, which immediately
10661 follow the GOMP_MAP_STRUCT map clause. */
10662 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10663 {
10664 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10665 while (cnt--)
10666 OMP_CLAUSE_CHAIN (c)
10667 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10668 }
10669 }
10670 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10671 && (code == OMP_TARGET_EXIT_DATA
10672 || code == OACC_EXIT_DATA))
10673 remove = true;
10674 else if (DECL_SIZE (decl)
10675 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10676 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10677 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10678 && (OMP_CLAUSE_MAP_KIND (c)
10679 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10680 {
10681 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10682 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10683 INTEGER_CST. */
10684 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10685
10686 tree decl2 = DECL_VALUE_EXPR (decl);
10687 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10688 decl2 = TREE_OPERAND (decl2, 0);
10689 gcc_assert (DECL_P (decl2));
10690 tree mem = build_simple_mem_ref (decl2);
10691 OMP_CLAUSE_DECL (c) = mem;
10692 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10693 if (ctx->outer_context)
10694 {
10695 omp_notice_variable (ctx->outer_context, decl2, true);
10696 omp_notice_variable (ctx->outer_context,
10697 OMP_CLAUSE_SIZE (c), true);
10698 }
10699 if (((ctx->region_type & ORT_TARGET) != 0
10700 || !ctx->target_firstprivatize_array_bases)
10701 && ((n->value & GOVD_SEEN) == 0
10702 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10703 {
10704 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10705 OMP_CLAUSE_MAP);
10706 OMP_CLAUSE_DECL (nc) = decl;
10707 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10708 if (ctx->target_firstprivatize_array_bases)
10709 OMP_CLAUSE_SET_MAP_KIND (nc,
10710 GOMP_MAP_FIRSTPRIVATE_POINTER);
10711 else
10712 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10713 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10714 OMP_CLAUSE_CHAIN (c) = nc;
10715 c = nc;
10716 }
10717 }
10718 else
10719 {
10720 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10721 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10722 gcc_assert ((n->value & GOVD_SEEN) == 0
10723 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10724 == 0));
10725 }
10726 break;
10727
10728 case OMP_CLAUSE_TO:
10729 case OMP_CLAUSE_FROM:
10730 case OMP_CLAUSE__CACHE_:
10731 decl = OMP_CLAUSE_DECL (c);
10732 if (!DECL_P (decl))
10733 break;
10734 if (DECL_SIZE (decl)
10735 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10736 {
10737 tree decl2 = DECL_VALUE_EXPR (decl);
10738 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10739 decl2 = TREE_OPERAND (decl2, 0);
10740 gcc_assert (DECL_P (decl2));
10741 tree mem = build_simple_mem_ref (decl2);
10742 OMP_CLAUSE_DECL (c) = mem;
10743 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10744 if (ctx->outer_context)
10745 {
10746 omp_notice_variable (ctx->outer_context, decl2, true);
10747 omp_notice_variable (ctx->outer_context,
10748 OMP_CLAUSE_SIZE (c), true);
10749 }
10750 }
10751 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10752 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10753 break;
10754
10755 case OMP_CLAUSE_REDUCTION:
10756 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10757 {
10758 decl = OMP_CLAUSE_DECL (c);
10759 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10760 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10761 {
10762 remove = true;
10763 error_at (OMP_CLAUSE_LOCATION (c),
10764 "%qD specified in %<inscan%> %<reduction%> clause "
10765 "but not in %<scan%> directive clause", decl);
10766 break;
10767 }
10768 has_inscan_reductions = true;
10769 }
10770 /* FALLTHRU */
10771 case OMP_CLAUSE_IN_REDUCTION:
10772 case OMP_CLAUSE_TASK_REDUCTION:
10773 decl = OMP_CLAUSE_DECL (c);
10774 /* OpenACC reductions need a present_or_copy data clause.
10775 Add one if necessary. Emit error when the reduction is private. */
10776 if (ctx->region_type == ORT_ACC_PARALLEL
10777 || ctx->region_type == ORT_ACC_SERIAL)
10778 {
10779 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10780 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10781 {
10782 remove = true;
10783 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10784 "reduction on %qE", DECL_NAME (decl));
10785 }
10786 else if ((n->value & GOVD_MAP) == 0)
10787 {
10788 tree next = OMP_CLAUSE_CHAIN (c);
10789 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10790 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10791 OMP_CLAUSE_DECL (nc) = decl;
10792 OMP_CLAUSE_CHAIN (c) = nc;
10793 lang_hooks.decls.omp_finish_clause (nc, pre_p,
10794 (ctx->region_type
10795 & ORT_ACC) != 0);
10796 while (1)
10797 {
10798 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10799 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10800 break;
10801 nc = OMP_CLAUSE_CHAIN (nc);
10802 }
10803 OMP_CLAUSE_CHAIN (nc) = next;
10804 n->value |= GOVD_MAP;
10805 }
10806 }
10807 if (DECL_P (decl)
10808 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10809 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10810 break;
10811
10812 case OMP_CLAUSE_ALLOCATE:
10813 decl = OMP_CLAUSE_DECL (c);
10814 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10815 if (n != NULL && !(n->value & GOVD_SEEN))
10816 {
10817 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
10818 != 0
10819 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
10820 remove = true;
10821 }
10822 if (!remove
10823 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
10824 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
10825 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
10826 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
10827 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
10828 {
10829 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
10830 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
10831 if (n == NULL)
10832 {
10833 enum omp_clause_default_kind default_kind
10834 = ctx->default_kind;
10835 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
10836 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
10837 true);
10838 ctx->default_kind = default_kind;
10839 }
10840 else
10841 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
10842 true);
10843 }
10844 break;
10845
10846 case OMP_CLAUSE_COPYIN:
10847 case OMP_CLAUSE_COPYPRIVATE:
10848 case OMP_CLAUSE_IF:
10849 case OMP_CLAUSE_NUM_THREADS:
10850 case OMP_CLAUSE_NUM_TEAMS:
10851 case OMP_CLAUSE_THREAD_LIMIT:
10852 case OMP_CLAUSE_DIST_SCHEDULE:
10853 case OMP_CLAUSE_DEVICE:
10854 case OMP_CLAUSE_SCHEDULE:
10855 case OMP_CLAUSE_NOWAIT:
10856 case OMP_CLAUSE_ORDERED:
10857 case OMP_CLAUSE_DEFAULT:
10858 case OMP_CLAUSE_UNTIED:
10859 case OMP_CLAUSE_COLLAPSE:
10860 case OMP_CLAUSE_FINAL:
10861 case OMP_CLAUSE_MERGEABLE:
10862 case OMP_CLAUSE_PROC_BIND:
10863 case OMP_CLAUSE_SAFELEN:
10864 case OMP_CLAUSE_SIMDLEN:
10865 case OMP_CLAUSE_DEPEND:
10866 case OMP_CLAUSE_PRIORITY:
10867 case OMP_CLAUSE_GRAINSIZE:
10868 case OMP_CLAUSE_NUM_TASKS:
10869 case OMP_CLAUSE_NOGROUP:
10870 case OMP_CLAUSE_THREADS:
10871 case OMP_CLAUSE_SIMD:
10872 case OMP_CLAUSE_HINT:
10873 case OMP_CLAUSE_DEFAULTMAP:
10874 case OMP_CLAUSE_ORDER:
10875 case OMP_CLAUSE_BIND:
10876 case OMP_CLAUSE_USE_DEVICE_PTR:
10877 case OMP_CLAUSE_USE_DEVICE_ADDR:
10878 case OMP_CLAUSE_IS_DEVICE_PTR:
10879 case OMP_CLAUSE_ASYNC:
10880 case OMP_CLAUSE_WAIT:
10881 case OMP_CLAUSE_INDEPENDENT:
10882 case OMP_CLAUSE_NUM_GANGS:
10883 case OMP_CLAUSE_NUM_WORKERS:
10884 case OMP_CLAUSE_VECTOR_LENGTH:
10885 case OMP_CLAUSE_GANG:
10886 case OMP_CLAUSE_WORKER:
10887 case OMP_CLAUSE_VECTOR:
10888 case OMP_CLAUSE_AUTO:
10889 case OMP_CLAUSE_SEQ:
10890 case OMP_CLAUSE_TILE:
10891 case OMP_CLAUSE_IF_PRESENT:
10892 case OMP_CLAUSE_FINALIZE:
10893 case OMP_CLAUSE_INCLUSIVE:
10894 case OMP_CLAUSE_EXCLUSIVE:
10895 break;
10896
10897 default:
10898 gcc_unreachable ();
10899 }
10900
10901 if (remove)
10902 *list_p = OMP_CLAUSE_CHAIN (c);
10903 else
10904 list_p = &OMP_CLAUSE_CHAIN (c);
10905 }
10906
10907 /* Add in any implicit data sharing. */
10908 struct gimplify_adjust_omp_clauses_data data;
10909 data.list_p = list_p;
10910 data.pre_p = pre_p;
10911 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10912
10913 if (has_inscan_reductions)
10914 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10915 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10916 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10917 {
10918 error_at (OMP_CLAUSE_LOCATION (c),
10919 "%<inscan%> %<reduction%> clause used together with "
10920 "%<linear%> clause for a variable other than loop "
10921 "iterator");
10922 break;
10923 }
10924
10925 gimplify_omp_ctxp = ctx->outer_context;
10926 delete_omp_context (ctx);
10927 }
10928
10929 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10930 -1 if unknown yet (simd is involved, won't be known until vectorization)
10931 and 1 if they do. If SCORES is non-NULL, it should point to an array
10932 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10933 of the CONSTRUCTS (position -1 if it will never match) followed by
10934 number of constructs in the OpenMP context construct trait. If the
10935 score depends on whether it will be in a declare simd clone or not,
10936 the function returns 2 and there will be two sets of the scores, the first
10937 one for the case that it is not in a declare simd clone, the other
10938 that it is in a declare simd clone. */
10939
10940 int
10941 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10942 int *scores)
10943 {
10944 int matched = 0, cnt = 0;
10945 bool simd_seen = false;
10946 bool target_seen = false;
10947 int declare_simd_cnt = -1;
10948 auto_vec<enum tree_code, 16> codes;
10949 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10950 {
10951 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10952 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10953 == ORT_TARGET && ctx->code == OMP_TARGET)
10954 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10955 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10956 || (ctx->region_type == ORT_SIMD
10957 && ctx->code == OMP_SIMD
10958 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10959 {
10960 ++cnt;
10961 if (scores)
10962 codes.safe_push (ctx->code);
10963 else if (matched < nconstructs && ctx->code == constructs[matched])
10964 {
10965 if (ctx->code == OMP_SIMD)
10966 {
10967 if (matched)
10968 return 0;
10969 simd_seen = true;
10970 }
10971 ++matched;
10972 }
10973 if (ctx->code == OMP_TARGET)
10974 {
10975 if (scores == NULL)
10976 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10977 target_seen = true;
10978 break;
10979 }
10980 }
10981 else if (ctx->region_type == ORT_WORKSHARE
10982 && ctx->code == OMP_LOOP
10983 && ctx->outer_context
10984 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10985 && ctx->outer_context->outer_context
10986 && ctx->outer_context->outer_context->code == OMP_LOOP
10987 && ctx->outer_context->outer_context->distribute)
10988 ctx = ctx->outer_context->outer_context;
10989 ctx = ctx->outer_context;
10990 }
10991 if (!target_seen
10992 && lookup_attribute ("omp declare simd",
10993 DECL_ATTRIBUTES (current_function_decl)))
10994 {
10995 /* Declare simd is a maybe case, it is supposed to be added only to the
10996 omp-simd-clone.c added clones and not to the base function. */
10997 declare_simd_cnt = cnt++;
10998 if (scores)
10999 codes.safe_push (OMP_SIMD);
11000 else if (cnt == 0
11001 && constructs[0] == OMP_SIMD)
11002 {
11003 gcc_assert (matched == 0);
11004 simd_seen = true;
11005 if (++matched == nconstructs)
11006 return -1;
11007 }
11008 }
11009 if (tree attr = lookup_attribute ("omp declare variant variant",
11010 DECL_ATTRIBUTES (current_function_decl)))
11011 {
11012 enum tree_code variant_constructs[5];
11013 int variant_nconstructs = 0;
11014 if (!target_seen)
11015 variant_nconstructs
11016 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
11017 variant_constructs);
11018 for (int i = 0; i < variant_nconstructs; i++)
11019 {
11020 ++cnt;
11021 if (scores)
11022 codes.safe_push (variant_constructs[i]);
11023 else if (matched < nconstructs
11024 && variant_constructs[i] == constructs[matched])
11025 {
11026 if (variant_constructs[i] == OMP_SIMD)
11027 {
11028 if (matched)
11029 return 0;
11030 simd_seen = true;
11031 }
11032 ++matched;
11033 }
11034 }
11035 }
11036 if (!target_seen
11037 && lookup_attribute ("omp declare target block",
11038 DECL_ATTRIBUTES (current_function_decl)))
11039 {
11040 if (scores)
11041 codes.safe_push (OMP_TARGET);
11042 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
11043 ++matched;
11044 }
11045 if (scores)
11046 {
11047 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
11048 {
11049 int j = codes.length () - 1;
11050 for (int i = nconstructs - 1; i >= 0; i--)
11051 {
11052 while (j >= 0
11053 && (pass != 0 || declare_simd_cnt != j)
11054 && constructs[i] != codes[j])
11055 --j;
11056 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
11057 *scores++ = j - 1;
11058 else
11059 *scores++ = j;
11060 }
11061 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
11062 ? codes.length () - 1 : codes.length ());
11063 }
11064 return declare_simd_cnt == -1 ? 1 : 2;
11065 }
11066 if (matched == nconstructs)
11067 return simd_seen ? -1 : 1;
11068 return 0;
11069 }
11070
11071 /* Gimplify OACC_CACHE. */
11072
11073 static void
11074 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
11075 {
11076 tree expr = *expr_p;
11077
11078 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
11079 OACC_CACHE);
11080 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
11081 OACC_CACHE);
11082
11083 /* TODO: Do something sensible with this information. */
11084
11085 *expr_p = NULL_TREE;
11086 }
11087
11088 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11089 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11090 kind. The entry kind will replace the one in CLAUSE, while the exit
11091 kind will be used in a new omp_clause and returned to the caller. */
11092
11093 static tree
11094 gimplify_oacc_declare_1 (tree clause)
11095 {
11096 HOST_WIDE_INT kind, new_op;
11097 bool ret = false;
11098 tree c = NULL;
11099
11100 kind = OMP_CLAUSE_MAP_KIND (clause);
11101
11102 switch (kind)
11103 {
11104 case GOMP_MAP_ALLOC:
11105 new_op = GOMP_MAP_RELEASE;
11106 ret = true;
11107 break;
11108
11109 case GOMP_MAP_FROM:
11110 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
11111 new_op = GOMP_MAP_FROM;
11112 ret = true;
11113 break;
11114
11115 case GOMP_MAP_TOFROM:
11116 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
11117 new_op = GOMP_MAP_FROM;
11118 ret = true;
11119 break;
11120
11121 case GOMP_MAP_DEVICE_RESIDENT:
11122 case GOMP_MAP_FORCE_DEVICEPTR:
11123 case GOMP_MAP_FORCE_PRESENT:
11124 case GOMP_MAP_LINK:
11125 case GOMP_MAP_POINTER:
11126 case GOMP_MAP_TO:
11127 break;
11128
11129 default:
11130 gcc_unreachable ();
11131 break;
11132 }
11133
11134 if (ret)
11135 {
11136 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
11137 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
11138 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
11139 }
11140
11141 return c;
11142 }
11143
11144 /* Gimplify OACC_DECLARE. */
11145
11146 static void
11147 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
11148 {
11149 tree expr = *expr_p;
11150 gomp_target *stmt;
11151 tree clauses, t, decl;
11152
11153 clauses = OACC_DECLARE_CLAUSES (expr);
11154
11155 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
11156 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
11157
11158 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
11159 {
11160 decl = OMP_CLAUSE_DECL (t);
11161
11162 if (TREE_CODE (decl) == MEM_REF)
11163 decl = TREE_OPERAND (decl, 0);
11164
11165 if (VAR_P (decl) && !is_oacc_declared (decl))
11166 {
11167 tree attr = get_identifier ("oacc declare target");
11168 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
11169 DECL_ATTRIBUTES (decl));
11170 }
11171
11172 if (VAR_P (decl)
11173 && !is_global_var (decl)
11174 && DECL_CONTEXT (decl) == current_function_decl)
11175 {
11176 tree c = gimplify_oacc_declare_1 (t);
11177 if (c)
11178 {
11179 if (oacc_declare_returns == NULL)
11180 oacc_declare_returns = new hash_map<tree, tree>;
11181
11182 oacc_declare_returns->put (decl, c);
11183 }
11184 }
11185
11186 if (gimplify_omp_ctxp)
11187 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
11188 }
11189
11190 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
11191 clauses);
11192
11193 gimplify_seq_add_stmt (pre_p, stmt);
11194
11195 *expr_p = NULL_TREE;
11196 }
11197
11198 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11199 gimplification of the body, as well as scanning the body for used
11200 variables. We need to do this scan now, because variable-sized
11201 decls will be decomposed during gimplification. */
11202
11203 static void
11204 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
11205 {
11206 tree expr = *expr_p;
11207 gimple *g;
11208 gimple_seq body = NULL;
11209
11210 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
11211 OMP_PARALLEL_COMBINED (expr)
11212 ? ORT_COMBINED_PARALLEL
11213 : ORT_PARALLEL, OMP_PARALLEL);
11214
11215 push_gimplify_context ();
11216
11217 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
11218 if (gimple_code (g) == GIMPLE_BIND)
11219 pop_gimplify_context (g);
11220 else
11221 pop_gimplify_context (NULL);
11222
11223 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
11224 OMP_PARALLEL);
11225
11226 g = gimple_build_omp_parallel (body,
11227 OMP_PARALLEL_CLAUSES (expr),
11228 NULL_TREE, NULL_TREE);
11229 if (OMP_PARALLEL_COMBINED (expr))
11230 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
11231 gimplify_seq_add_stmt (pre_p, g);
11232 *expr_p = NULL_TREE;
11233 }
11234
11235 /* Gimplify the contents of an OMP_TASK statement. This involves
11236 gimplification of the body, as well as scanning the body for used
11237 variables. We need to do this scan now, because variable-sized
11238 decls will be decomposed during gimplification. */
11239
11240 static void
11241 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
11242 {
11243 tree expr = *expr_p;
11244 gimple *g;
11245 gimple_seq body = NULL;
11246
11247 if (OMP_TASK_BODY (expr) == NULL_TREE)
11248 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11250 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
11251 {
11252 error_at (OMP_CLAUSE_LOCATION (c),
11253 "%<mutexinoutset%> kind in %<depend%> clause on a "
11254 "%<taskwait%> construct");
11255 break;
11256 }
11257
11258 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
11259 omp_find_clause (OMP_TASK_CLAUSES (expr),
11260 OMP_CLAUSE_UNTIED)
11261 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
11262
11263 if (OMP_TASK_BODY (expr))
11264 {
11265 push_gimplify_context ();
11266
11267 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
11268 if (gimple_code (g) == GIMPLE_BIND)
11269 pop_gimplify_context (g);
11270 else
11271 pop_gimplify_context (NULL);
11272 }
11273
11274 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
11275 OMP_TASK);
11276
11277 g = gimple_build_omp_task (body,
11278 OMP_TASK_CLAUSES (expr),
11279 NULL_TREE, NULL_TREE,
11280 NULL_TREE, NULL_TREE, NULL_TREE);
11281 if (OMP_TASK_BODY (expr) == NULL_TREE)
11282 gimple_omp_task_set_taskwait_p (g, true);
11283 gimplify_seq_add_stmt (pre_p, g);
11284 *expr_p = NULL_TREE;
11285 }
11286
11287 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11288 force it into a temporary initialized in PRE_P and add firstprivate clause
11289 to ORIG_FOR_STMT. */
11290
11291 static void
11292 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
11293 tree orig_for_stmt)
11294 {
11295 if (*tp == NULL || is_gimple_constant (*tp))
11296 return;
11297
11298 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
11299 /* Reference to pointer conversion is considered useless,
11300 but is significant for firstprivate clause. Force it
11301 here. */
11302 if (type
11303 && TREE_CODE (type) == POINTER_TYPE
11304 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
11305 {
11306 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11307 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
11308 gimplify_and_add (m, pre_p);
11309 *tp = v;
11310 }
11311
11312 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
11313 OMP_CLAUSE_DECL (c) = *tp;
11314 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11315 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11316 }
11317
11318 /* Gimplify the gross structure of an OMP_FOR statement. */
11319
11320 static enum gimplify_status
11321 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11322 {
11323 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11324 enum gimplify_status ret = GS_ALL_DONE;
11325 enum gimplify_status tret;
11326 gomp_for *gfor;
11327 gimple_seq for_body, for_pre_body;
11328 int i;
11329 bitmap has_decl_expr = NULL;
11330 enum omp_region_type ort = ORT_WORKSHARE;
11331 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
11332
11333 orig_for_stmt = for_stmt = *expr_p;
11334
11335 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11336 != NULL_TREE);
11337 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11338 {
11339 tree *data[4] = { NULL, NULL, NULL, NULL };
11340 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11341 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11342 find_combined_omp_for, data, NULL);
11343 if (inner_for_stmt == NULL_TREE)
11344 {
11345 gcc_assert (seen_error ());
11346 *expr_p = NULL_TREE;
11347 return GS_ERROR;
11348 }
11349 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11350 {
11351 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11352 &OMP_FOR_PRE_BODY (for_stmt));
11353 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11354 }
11355 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11356 {
11357 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11358 &OMP_FOR_PRE_BODY (for_stmt));
11359 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11360 }
11361
11362 if (data[0])
11363 {
11364 /* We have some statements or variable declarations in between
11365 the composite construct directives. Move them around the
11366 inner_for_stmt. */
11367 data[0] = expr_p;
11368 for (i = 0; i < 3; i++)
11369 if (data[i])
11370 {
11371 tree t = *data[i];
11372 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11373 data[i + 1] = data[i];
11374 *data[i] = OMP_BODY (t);
11375 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11376 NULL_TREE, make_node (BLOCK));
11377 OMP_BODY (t) = body;
11378 append_to_statement_list_force (inner_for_stmt,
11379 &BIND_EXPR_BODY (body));
11380 *data[3] = t;
11381 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11382 gcc_assert (*data[3] == inner_for_stmt);
11383 }
11384 return GS_OK;
11385 }
11386
11387 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11388 if (!loop_p
11389 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11390 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11391 i)) == TREE_LIST
11392 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11393 i)))
11394 {
11395 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11396 /* Class iterators aren't allowed on OMP_SIMD, so the only
11397 case we need to solve is distribute parallel for. They are
11398 allowed on the loop construct, but that is already handled
11399 in gimplify_omp_loop. */
11400 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11401 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11402 && data[1]);
11403 tree orig_decl = TREE_PURPOSE (orig);
11404 tree last = TREE_VALUE (orig);
11405 tree *pc;
11406 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11407 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11408 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11409 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11410 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11411 break;
11412 if (*pc == NULL_TREE)
11413 {
11414 tree *spc;
11415 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11416 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11417 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11418 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11419 break;
11420 if (*spc)
11421 {
11422 tree c = *spc;
11423 *spc = OMP_CLAUSE_CHAIN (c);
11424 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11425 *pc = c;
11426 }
11427 }
11428 if (*pc == NULL_TREE)
11429 ;
11430 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11431 {
11432 /* private clause will appear only on inner_for_stmt.
11433 Change it into firstprivate, and add private clause
11434 on for_stmt. */
11435 tree c = copy_node (*pc);
11436 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11437 OMP_FOR_CLAUSES (for_stmt) = c;
11438 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11439 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11440 }
11441 else
11442 {
11443 /* lastprivate clause will appear on both inner_for_stmt
11444 and for_stmt. Add firstprivate clause to
11445 inner_for_stmt. */
11446 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11447 OMP_CLAUSE_FIRSTPRIVATE);
11448 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11449 OMP_CLAUSE_CHAIN (c) = *pc;
11450 *pc = c;
11451 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11452 }
11453 tree c = build_omp_clause (UNKNOWN_LOCATION,
11454 OMP_CLAUSE_FIRSTPRIVATE);
11455 OMP_CLAUSE_DECL (c) = last;
11456 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11457 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11458 c = build_omp_clause (UNKNOWN_LOCATION,
11459 *pc ? OMP_CLAUSE_SHARED
11460 : OMP_CLAUSE_FIRSTPRIVATE);
11461 OMP_CLAUSE_DECL (c) = orig_decl;
11462 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11463 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11464 }
11465 /* Similarly, take care of C++ range for temporaries, those should
11466 be firstprivate on OMP_PARALLEL if any. */
11467 if (data[1])
11468 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11469 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11470 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11471 i)) == TREE_LIST
11472 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11473 i)))
11474 {
11475 tree orig
11476 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11477 tree v = TREE_CHAIN (orig);
11478 tree c = build_omp_clause (UNKNOWN_LOCATION,
11479 OMP_CLAUSE_FIRSTPRIVATE);
11480 /* First add firstprivate clause for the __for_end artificial
11481 decl. */
11482 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11483 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11484 == REFERENCE_TYPE)
11485 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11486 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11487 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11488 if (TREE_VEC_ELT (v, 0))
11489 {
11490 /* And now the same for __for_range artificial decl if it
11491 exists. */
11492 c = build_omp_clause (UNKNOWN_LOCATION,
11493 OMP_CLAUSE_FIRSTPRIVATE);
11494 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11495 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11496 == REFERENCE_TYPE)
11497 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11498 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11499 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11500 }
11501 }
11502 }
11503
11504 switch (TREE_CODE (for_stmt))
11505 {
11506 case OMP_FOR:
11507 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
11508 {
11509 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11510 OMP_CLAUSE_SCHEDULE))
11511 error_at (EXPR_LOCATION (for_stmt),
11512 "%qs clause may not appear on non-rectangular %qs",
11513 "schedule", "for");
11514 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
11515 error_at (EXPR_LOCATION (for_stmt),
11516 "%qs clause may not appear on non-rectangular %qs",
11517 "ordered", "for");
11518 }
11519 break;
11520 case OMP_DISTRIBUTE:
11521 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
11522 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11523 OMP_CLAUSE_DIST_SCHEDULE))
11524 error_at (EXPR_LOCATION (for_stmt),
11525 "%qs clause may not appear on non-rectangular %qs",
11526 "dist_schedule", "distribute");
11527 break;
11528 case OACC_LOOP:
11529 ort = ORT_ACC;
11530 break;
11531 case OMP_TASKLOOP:
11532 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11533 ort = ORT_UNTIED_TASKLOOP;
11534 else
11535 ort = ORT_TASKLOOP;
11536 break;
11537 case OMP_SIMD:
11538 ort = ORT_SIMD;
11539 break;
11540 default:
11541 gcc_unreachable ();
11542 }
11543
11544 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11545 clause for the IV. */
11546 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11547 {
11548 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11549 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11550 decl = TREE_OPERAND (t, 0);
11551 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11553 && OMP_CLAUSE_DECL (c) == decl)
11554 {
11555 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11556 break;
11557 }
11558 }
11559
11560 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11561 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11562 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11563 ? OMP_LOOP : TREE_CODE (for_stmt));
11564
11565 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11566 gimplify_omp_ctxp->distribute = true;
11567
11568 /* Handle OMP_FOR_INIT. */
11569 for_pre_body = NULL;
11570 if ((ort == ORT_SIMD
11571 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11572 && OMP_FOR_PRE_BODY (for_stmt))
11573 {
11574 has_decl_expr = BITMAP_ALLOC (NULL);
11575 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11576 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11577 == VAR_DECL)
11578 {
11579 t = OMP_FOR_PRE_BODY (for_stmt);
11580 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11581 }
11582 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11583 {
11584 tree_stmt_iterator si;
11585 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11586 tsi_next (&si))
11587 {
11588 t = tsi_stmt (si);
11589 if (TREE_CODE (t) == DECL_EXPR
11590 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11591 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11592 }
11593 }
11594 }
11595 if (OMP_FOR_PRE_BODY (for_stmt))
11596 {
11597 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11598 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11599 else
11600 {
11601 struct gimplify_omp_ctx ctx;
11602 memset (&ctx, 0, sizeof (ctx));
11603 ctx.region_type = ORT_NONE;
11604 gimplify_omp_ctxp = &ctx;
11605 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11606 gimplify_omp_ctxp = NULL;
11607 }
11608 }
11609 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11610
11611 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11612 for_stmt = inner_for_stmt;
11613
11614 /* For taskloop, need to gimplify the start, end and step before the
11615 taskloop, outside of the taskloop omp context. */
11616 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11617 {
11618 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11619 {
11620 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11621 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
11622 ? pre_p : &for_pre_body);
11623 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11624 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11625 {
11626 tree v = TREE_OPERAND (t, 1);
11627 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11628 for_pre_p, orig_for_stmt);
11629 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11630 for_pre_p, orig_for_stmt);
11631 }
11632 else
11633 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11634 orig_for_stmt);
11635
11636 /* Handle OMP_FOR_COND. */
11637 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11638 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11639 {
11640 tree v = TREE_OPERAND (t, 1);
11641 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11642 for_pre_p, orig_for_stmt);
11643 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11644 for_pre_p, orig_for_stmt);
11645 }
11646 else
11647 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11648 orig_for_stmt);
11649
11650 /* Handle OMP_FOR_INCR. */
11651 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11652 if (TREE_CODE (t) == MODIFY_EXPR)
11653 {
11654 decl = TREE_OPERAND (t, 0);
11655 t = TREE_OPERAND (t, 1);
11656 tree *tp = &TREE_OPERAND (t, 1);
11657 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11658 tp = &TREE_OPERAND (t, 0);
11659
11660 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
11661 orig_for_stmt);
11662 }
11663 }
11664
11665 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11666 OMP_TASKLOOP);
11667 }
11668
11669 if (orig_for_stmt != for_stmt)
11670 gimplify_omp_ctxp->combined_loop = true;
11671
11672 for_body = NULL;
11673 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11674 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11675 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11676 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11677
11678 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11679 bool is_doacross = false;
11680 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11681 {
11682 is_doacross = true;
11683 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11684 (OMP_FOR_INIT (for_stmt))
11685 * 2);
11686 }
11687 int collapse = 1, tile = 0;
11688 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11689 if (c)
11690 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11691 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11692 if (c)
11693 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11694 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
11695 hash_set<tree> *allocate_uids = NULL;
11696 if (c)
11697 {
11698 allocate_uids = new hash_set<tree>;
11699 for (; c; c = OMP_CLAUSE_CHAIN (c))
11700 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
11701 allocate_uids->add (OMP_CLAUSE_DECL (c));
11702 }
11703 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11704 {
11705 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11706 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11707 decl = TREE_OPERAND (t, 0);
11708 gcc_assert (DECL_P (decl));
11709 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11710 || POINTER_TYPE_P (TREE_TYPE (decl)));
11711 if (is_doacross)
11712 {
11713 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11714 {
11715 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11716 if (TREE_CODE (orig_decl) == TREE_LIST)
11717 {
11718 orig_decl = TREE_PURPOSE (orig_decl);
11719 if (!orig_decl)
11720 orig_decl = decl;
11721 }
11722 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11723 }
11724 else
11725 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11726 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11727 }
11728
11729 /* Make sure the iteration variable is private. */
11730 tree c = NULL_TREE;
11731 tree c2 = NULL_TREE;
11732 if (orig_for_stmt != for_stmt)
11733 {
11734 /* Preserve this information until we gimplify the inner simd. */
11735 if (has_decl_expr
11736 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11737 TREE_PRIVATE (t) = 1;
11738 }
11739 else if (ort == ORT_SIMD)
11740 {
11741 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11742 (splay_tree_key) decl);
11743 omp_is_private (gimplify_omp_ctxp, decl,
11744 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11745 != 1));
11746 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11747 {
11748 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11749 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11750 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11751 OMP_CLAUSE_LASTPRIVATE);
11752 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11753 OMP_CLAUSE_LASTPRIVATE))
11754 if (OMP_CLAUSE_DECL (c3) == decl)
11755 {
11756 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11757 "conditional %<lastprivate%> on loop "
11758 "iterator %qD ignored", decl);
11759 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11760 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11761 }
11762 }
11763 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11764 {
11765 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11766 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11767 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11768 if ((has_decl_expr
11769 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11770 || TREE_PRIVATE (t))
11771 {
11772 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11773 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11774 }
11775 struct gimplify_omp_ctx *outer
11776 = gimplify_omp_ctxp->outer_context;
11777 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11778 {
11779 if (outer->region_type == ORT_WORKSHARE
11780 && outer->combined_loop)
11781 {
11782 n = splay_tree_lookup (outer->variables,
11783 (splay_tree_key)decl);
11784 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11785 {
11786 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11787 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11788 }
11789 else
11790 {
11791 struct gimplify_omp_ctx *octx = outer->outer_context;
11792 if (octx
11793 && octx->region_type == ORT_COMBINED_PARALLEL
11794 && octx->outer_context
11795 && (octx->outer_context->region_type
11796 == ORT_WORKSHARE)
11797 && octx->outer_context->combined_loop)
11798 {
11799 octx = octx->outer_context;
11800 n = splay_tree_lookup (octx->variables,
11801 (splay_tree_key)decl);
11802 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11803 {
11804 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11805 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11806 }
11807 }
11808 }
11809 }
11810 }
11811
11812 OMP_CLAUSE_DECL (c) = decl;
11813 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11814 OMP_FOR_CLAUSES (for_stmt) = c;
11815 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11816 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11817 {
11818 if (outer->region_type == ORT_WORKSHARE
11819 && outer->combined_loop)
11820 {
11821 if (outer->outer_context
11822 && (outer->outer_context->region_type
11823 == ORT_COMBINED_PARALLEL))
11824 outer = outer->outer_context;
11825 else if (omp_check_private (outer, decl, false))
11826 outer = NULL;
11827 }
11828 else if (((outer->region_type & ORT_TASKLOOP)
11829 == ORT_TASKLOOP)
11830 && outer->combined_loop
11831 && !omp_check_private (gimplify_omp_ctxp,
11832 decl, false))
11833 ;
11834 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11835 {
11836 omp_notice_variable (outer, decl, true);
11837 outer = NULL;
11838 }
11839 if (outer)
11840 {
11841 n = splay_tree_lookup (outer->variables,
11842 (splay_tree_key)decl);
11843 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11844 {
11845 omp_add_variable (outer, decl,
11846 GOVD_LASTPRIVATE | GOVD_SEEN);
11847 if (outer->region_type == ORT_COMBINED_PARALLEL
11848 && outer->outer_context
11849 && (outer->outer_context->region_type
11850 == ORT_WORKSHARE)
11851 && outer->outer_context->combined_loop)
11852 {
11853 outer = outer->outer_context;
11854 n = splay_tree_lookup (outer->variables,
11855 (splay_tree_key)decl);
11856 if (omp_check_private (outer, decl, false))
11857 outer = NULL;
11858 else if (n == NULL
11859 || ((n->value & GOVD_DATA_SHARE_CLASS)
11860 == 0))
11861 omp_add_variable (outer, decl,
11862 GOVD_LASTPRIVATE
11863 | GOVD_SEEN);
11864 else
11865 outer = NULL;
11866 }
11867 if (outer && outer->outer_context
11868 && ((outer->outer_context->region_type
11869 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11870 || (((outer->region_type & ORT_TASKLOOP)
11871 == ORT_TASKLOOP)
11872 && (outer->outer_context->region_type
11873 == ORT_COMBINED_PARALLEL))))
11874 {
11875 outer = outer->outer_context;
11876 n = splay_tree_lookup (outer->variables,
11877 (splay_tree_key)decl);
11878 if (n == NULL
11879 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11880 omp_add_variable (outer, decl,
11881 GOVD_SHARED | GOVD_SEEN);
11882 else
11883 outer = NULL;
11884 }
11885 if (outer && outer->outer_context)
11886 omp_notice_variable (outer->outer_context, decl,
11887 true);
11888 }
11889 }
11890 }
11891 }
11892 else
11893 {
11894 bool lastprivate
11895 = (!has_decl_expr
11896 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11897 if (TREE_PRIVATE (t))
11898 lastprivate = false;
11899 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11900 {
11901 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11902 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11903 lastprivate = false;
11904 }
11905
11906 struct gimplify_omp_ctx *outer
11907 = gimplify_omp_ctxp->outer_context;
11908 if (outer && lastprivate)
11909 {
11910 if (outer->region_type == ORT_WORKSHARE
11911 && outer->combined_loop)
11912 {
11913 n = splay_tree_lookup (outer->variables,
11914 (splay_tree_key)decl);
11915 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11916 {
11917 lastprivate = false;
11918 outer = NULL;
11919 }
11920 else if (outer->outer_context
11921 && (outer->outer_context->region_type
11922 == ORT_COMBINED_PARALLEL))
11923 outer = outer->outer_context;
11924 else if (omp_check_private (outer, decl, false))
11925 outer = NULL;
11926 }
11927 else if (((outer->region_type & ORT_TASKLOOP)
11928 == ORT_TASKLOOP)
11929 && outer->combined_loop
11930 && !omp_check_private (gimplify_omp_ctxp,
11931 decl, false))
11932 ;
11933 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11934 {
11935 omp_notice_variable (outer, decl, true);
11936 outer = NULL;
11937 }
11938 if (outer)
11939 {
11940 n = splay_tree_lookup (outer->variables,
11941 (splay_tree_key)decl);
11942 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11943 {
11944 omp_add_variable (outer, decl,
11945 GOVD_LASTPRIVATE | GOVD_SEEN);
11946 if (outer->region_type == ORT_COMBINED_PARALLEL
11947 && outer->outer_context
11948 && (outer->outer_context->region_type
11949 == ORT_WORKSHARE)
11950 && outer->outer_context->combined_loop)
11951 {
11952 outer = outer->outer_context;
11953 n = splay_tree_lookup (outer->variables,
11954 (splay_tree_key)decl);
11955 if (omp_check_private (outer, decl, false))
11956 outer = NULL;
11957 else if (n == NULL
11958 || ((n->value & GOVD_DATA_SHARE_CLASS)
11959 == 0))
11960 omp_add_variable (outer, decl,
11961 GOVD_LASTPRIVATE
11962 | GOVD_SEEN);
11963 else
11964 outer = NULL;
11965 }
11966 if (outer && outer->outer_context
11967 && ((outer->outer_context->region_type
11968 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11969 || (((outer->region_type & ORT_TASKLOOP)
11970 == ORT_TASKLOOP)
11971 && (outer->outer_context->region_type
11972 == ORT_COMBINED_PARALLEL))))
11973 {
11974 outer = outer->outer_context;
11975 n = splay_tree_lookup (outer->variables,
11976 (splay_tree_key)decl);
11977 if (n == NULL
11978 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11979 omp_add_variable (outer, decl,
11980 GOVD_SHARED | GOVD_SEEN);
11981 else
11982 outer = NULL;
11983 }
11984 if (outer && outer->outer_context)
11985 omp_notice_variable (outer->outer_context, decl,
11986 true);
11987 }
11988 }
11989 }
11990
11991 c = build_omp_clause (input_location,
11992 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11993 : OMP_CLAUSE_PRIVATE);
11994 OMP_CLAUSE_DECL (c) = decl;
11995 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11996 OMP_FOR_CLAUSES (for_stmt) = c;
11997 omp_add_variable (gimplify_omp_ctxp, decl,
11998 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11999 | GOVD_EXPLICIT | GOVD_SEEN);
12000 c = NULL_TREE;
12001 }
12002 }
12003 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
12004 {
12005 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12006 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12007 (splay_tree_key) decl);
12008 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
12009 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12010 OMP_CLAUSE_LASTPRIVATE);
12011 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12012 OMP_CLAUSE_LASTPRIVATE))
12013 if (OMP_CLAUSE_DECL (c3) == decl)
12014 {
12015 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12016 "conditional %<lastprivate%> on loop "
12017 "iterator %qD ignored", decl);
12018 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12019 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12020 }
12021 }
12022 else
12023 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
12024
12025 /* If DECL is not a gimple register, create a temporary variable to act
12026 as an iteration counter. This is valid, since DECL cannot be
12027 modified in the body of the loop. Similarly for any iteration vars
12028 in simd with collapse > 1 where the iterator vars must be
12029 lastprivate. And similarly for vars mentioned in allocate clauses. */
12030 if (orig_for_stmt != for_stmt)
12031 var = decl;
12032 else if (!is_gimple_reg (decl)
12033 || (ort == ORT_SIMD
12034 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
12035 || (allocate_uids && allocate_uids->contains (decl)))
12036 {
12037 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12038 /* Make sure omp_add_variable is not called on it prematurely.
12039 We call it ourselves a few lines later. */
12040 gimplify_omp_ctxp = NULL;
12041 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12042 gimplify_omp_ctxp = ctx;
12043 TREE_OPERAND (t, 0) = var;
12044
12045 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
12046
12047 if (ort == ORT_SIMD
12048 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
12049 {
12050 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12051 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
12052 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
12053 OMP_CLAUSE_DECL (c2) = var;
12054 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
12055 OMP_FOR_CLAUSES (for_stmt) = c2;
12056 omp_add_variable (gimplify_omp_ctxp, var,
12057 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
12058 if (c == NULL_TREE)
12059 {
12060 c = c2;
12061 c2 = NULL_TREE;
12062 }
12063 }
12064 else
12065 omp_add_variable (gimplify_omp_ctxp, var,
12066 GOVD_PRIVATE | GOVD_SEEN);
12067 }
12068 else
12069 var = decl;
12070
12071 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12072 {
12073 tree lb = TREE_OPERAND (t, 1);
12074 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
12075 is_gimple_val, fb_rvalue, false);
12076 ret = MIN (ret, tret);
12077 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
12078 is_gimple_val, fb_rvalue, false);
12079 }
12080 else
12081 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12082 is_gimple_val, fb_rvalue, false);
12083 ret = MIN (ret, tret);
12084 if (ret == GS_ERROR)
12085 return ret;
12086
12087 /* Handle OMP_FOR_COND. */
12088 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12089 gcc_assert (COMPARISON_CLASS_P (t));
12090 gcc_assert (TREE_OPERAND (t, 0) == decl);
12091
12092 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12093 {
12094 tree ub = TREE_OPERAND (t, 1);
12095 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
12096 is_gimple_val, fb_rvalue, false);
12097 ret = MIN (ret, tret);
12098 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
12099 is_gimple_val, fb_rvalue, false);
12100 }
12101 else
12102 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12103 is_gimple_val, fb_rvalue, false);
12104 ret = MIN (ret, tret);
12105
12106 /* Handle OMP_FOR_INCR. */
12107 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12108 switch (TREE_CODE (t))
12109 {
12110 case PREINCREMENT_EXPR:
12111 case POSTINCREMENT_EXPR:
12112 {
12113 tree decl = TREE_OPERAND (t, 0);
12114 /* c_omp_for_incr_canonicalize_ptr() should have been
12115 called to massage things appropriately. */
12116 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12117
12118 if (orig_for_stmt != for_stmt)
12119 break;
12120 t = build_int_cst (TREE_TYPE (decl), 1);
12121 if (c)
12122 OMP_CLAUSE_LINEAR_STEP (c) = t;
12123 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12124 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12125 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12126 break;
12127 }
12128
12129 case PREDECREMENT_EXPR:
12130 case POSTDECREMENT_EXPR:
12131 /* c_omp_for_incr_canonicalize_ptr() should have been
12132 called to massage things appropriately. */
12133 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12134 if (orig_for_stmt != for_stmt)
12135 break;
12136 t = build_int_cst (TREE_TYPE (decl), -1);
12137 if (c)
12138 OMP_CLAUSE_LINEAR_STEP (c) = t;
12139 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12140 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12141 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12142 break;
12143
12144 case MODIFY_EXPR:
12145 gcc_assert (TREE_OPERAND (t, 0) == decl);
12146 TREE_OPERAND (t, 0) = var;
12147
12148 t = TREE_OPERAND (t, 1);
12149 switch (TREE_CODE (t))
12150 {
12151 case PLUS_EXPR:
12152 if (TREE_OPERAND (t, 1) == decl)
12153 {
12154 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
12155 TREE_OPERAND (t, 0) = var;
12156 break;
12157 }
12158
12159 /* Fallthru. */
12160 case MINUS_EXPR:
12161 case POINTER_PLUS_EXPR:
12162 gcc_assert (TREE_OPERAND (t, 0) == decl);
12163 TREE_OPERAND (t, 0) = var;
12164 break;
12165 default:
12166 gcc_unreachable ();
12167 }
12168
12169 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12170 is_gimple_val, fb_rvalue, false);
12171 ret = MIN (ret, tret);
12172 if (c)
12173 {
12174 tree step = TREE_OPERAND (t, 1);
12175 tree stept = TREE_TYPE (decl);
12176 if (POINTER_TYPE_P (stept))
12177 stept = sizetype;
12178 step = fold_convert (stept, step);
12179 if (TREE_CODE (t) == MINUS_EXPR)
12180 step = fold_build1 (NEGATE_EXPR, stept, step);
12181 OMP_CLAUSE_LINEAR_STEP (c) = step;
12182 if (step != TREE_OPERAND (t, 1))
12183 {
12184 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
12185 &for_pre_body, NULL,
12186 is_gimple_val, fb_rvalue, false);
12187 ret = MIN (ret, tret);
12188 }
12189 }
12190 break;
12191
12192 default:
12193 gcc_unreachable ();
12194 }
12195
12196 if (c2)
12197 {
12198 gcc_assert (c);
12199 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
12200 }
12201
12202 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
12203 {
12204 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
12205 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12206 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
12207 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12208 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
12209 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
12210 && OMP_CLAUSE_DECL (c) == decl)
12211 {
12212 if (is_doacross && (collapse == 1 || i >= collapse))
12213 t = var;
12214 else
12215 {
12216 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12217 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12218 gcc_assert (TREE_OPERAND (t, 0) == var);
12219 t = TREE_OPERAND (t, 1);
12220 gcc_assert (TREE_CODE (t) == PLUS_EXPR
12221 || TREE_CODE (t) == MINUS_EXPR
12222 || TREE_CODE (t) == POINTER_PLUS_EXPR);
12223 gcc_assert (TREE_OPERAND (t, 0) == var);
12224 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
12225 is_doacross ? var : decl,
12226 TREE_OPERAND (t, 1));
12227 }
12228 gimple_seq *seq;
12229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12230 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
12231 else
12232 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
12233 push_gimplify_context ();
12234 gimplify_assign (decl, t, seq);
12235 gimple *bind = NULL;
12236 if (gimplify_ctxp->temps)
12237 {
12238 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
12239 *seq = NULL;
12240 gimplify_seq_add_stmt (seq, bind);
12241 }
12242 pop_gimplify_context (bind);
12243 }
12244 }
12245 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
12246 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12247 {
12248 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12249 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12250 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12251 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12252 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12253 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12254 gcc_assert (COMPARISON_CLASS_P (t));
12255 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12256 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12257 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12258 }
12259 }
12260
12261 BITMAP_FREE (has_decl_expr);
12262 delete allocate_uids;
12263
12264 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12265 || (loop_p && orig_for_stmt == for_stmt))
12266 {
12267 push_gimplify_context ();
12268 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
12269 {
12270 OMP_FOR_BODY (orig_for_stmt)
12271 = build3 (BIND_EXPR, void_type_node, NULL,
12272 OMP_FOR_BODY (orig_for_stmt), NULL);
12273 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
12274 }
12275 }
12276
12277 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
12278 &for_body);
12279
12280 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12281 || (loop_p && orig_for_stmt == for_stmt))
12282 {
12283 if (gimple_code (g) == GIMPLE_BIND)
12284 pop_gimplify_context (g);
12285 else
12286 pop_gimplify_context (NULL);
12287 }
12288
12289 if (orig_for_stmt != for_stmt)
12290 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12291 {
12292 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12293 decl = TREE_OPERAND (t, 0);
12294 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12295 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12296 gimplify_omp_ctxp = ctx->outer_context;
12297 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12298 gimplify_omp_ctxp = ctx;
12299 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
12300 TREE_OPERAND (t, 0) = var;
12301 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12302 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12303 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
12304 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12305 for (int j = i + 1;
12306 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12307 {
12308 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12309 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12310 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12311 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12312 {
12313 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12314 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12315 }
12316 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12317 gcc_assert (COMPARISON_CLASS_P (t));
12318 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12319 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12320 {
12321 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12322 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12323 }
12324 }
12325 }
12326
12327 gimplify_adjust_omp_clauses (pre_p, for_body,
12328 &OMP_FOR_CLAUSES (orig_for_stmt),
12329 TREE_CODE (orig_for_stmt));
12330
12331 int kind;
12332 switch (TREE_CODE (orig_for_stmt))
12333 {
12334 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
12335 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
12336 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
12337 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
12338 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
12339 default:
12340 gcc_unreachable ();
12341 }
12342 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12343 {
12344 gimplify_seq_add_seq (pre_p, for_pre_body);
12345 for_pre_body = NULL;
12346 }
12347 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12348 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12349 for_pre_body);
12350 if (orig_for_stmt != for_stmt)
12351 gimple_omp_for_set_combined_p (gfor, true);
12352 if (gimplify_omp_ctxp
12353 && (gimplify_omp_ctxp->combined_loop
12354 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12355 && gimplify_omp_ctxp->outer_context
12356 && gimplify_omp_ctxp->outer_context->combined_loop)))
12357 {
12358 gimple_omp_for_set_combined_into_p (gfor, true);
12359 if (gimplify_omp_ctxp->combined_loop)
12360 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12361 else
12362 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12363 }
12364
12365 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12366 {
12367 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12368 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12369 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12370 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12371 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12372 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12373 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12374 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12375 }
12376
12377 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12378 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12379 The outer taskloop stands for computing the number of iterations,
12380 counts for collapsed loops and holding taskloop specific clauses.
12381 The task construct stands for the effect of data sharing on the
12382 explicit task it creates and the inner taskloop stands for expansion
12383 of the static loop inside of the explicit task construct. */
12384 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12385 {
12386 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12387 tree task_clauses = NULL_TREE;
12388 tree c = *gfor_clauses_ptr;
12389 tree *gtask_clauses_ptr = &task_clauses;
12390 tree outer_for_clauses = NULL_TREE;
12391 tree *gforo_clauses_ptr = &outer_for_clauses;
12392 bitmap lastprivate_uids = NULL;
12393 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
12394 {
12395 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
12396 if (c)
12397 {
12398 lastprivate_uids = BITMAP_ALLOC (NULL);
12399 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12400 OMP_CLAUSE_LASTPRIVATE))
12401 bitmap_set_bit (lastprivate_uids,
12402 DECL_UID (OMP_CLAUSE_DECL (c)));
12403 }
12404 c = *gfor_clauses_ptr;
12405 }
12406 for (; c; c = OMP_CLAUSE_CHAIN (c))
12407 switch (OMP_CLAUSE_CODE (c))
12408 {
12409 /* These clauses are allowed on task, move them there. */
12410 case OMP_CLAUSE_SHARED:
12411 case OMP_CLAUSE_FIRSTPRIVATE:
12412 case OMP_CLAUSE_DEFAULT:
12413 case OMP_CLAUSE_IF:
12414 case OMP_CLAUSE_UNTIED:
12415 case OMP_CLAUSE_FINAL:
12416 case OMP_CLAUSE_MERGEABLE:
12417 case OMP_CLAUSE_PRIORITY:
12418 case OMP_CLAUSE_REDUCTION:
12419 case OMP_CLAUSE_IN_REDUCTION:
12420 *gtask_clauses_ptr = c;
12421 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12422 break;
12423 case OMP_CLAUSE_PRIVATE:
12424 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12425 {
12426 /* We want private on outer for and firstprivate
12427 on task. */
12428 *gtask_clauses_ptr
12429 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12430 OMP_CLAUSE_FIRSTPRIVATE);
12431 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12432 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12433 openacc);
12434 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12435 *gforo_clauses_ptr = c;
12436 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12437 }
12438 else
12439 {
12440 *gtask_clauses_ptr = c;
12441 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12442 }
12443 break;
12444 /* These clauses go into outer taskloop clauses. */
12445 case OMP_CLAUSE_GRAINSIZE:
12446 case OMP_CLAUSE_NUM_TASKS:
12447 case OMP_CLAUSE_NOGROUP:
12448 *gforo_clauses_ptr = c;
12449 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12450 break;
12451 /* Collapse clause we duplicate on both taskloops. */
12452 case OMP_CLAUSE_COLLAPSE:
12453 *gfor_clauses_ptr = c;
12454 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12455 *gforo_clauses_ptr = copy_node (c);
12456 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12457 break;
12458 /* For lastprivate, keep the clause on inner taskloop, and add
12459 a shared clause on task. If the same decl is also firstprivate,
12460 add also firstprivate clause on the inner taskloop. */
12461 case OMP_CLAUSE_LASTPRIVATE:
12462 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12463 {
12464 /* For taskloop C++ lastprivate IVs, we want:
12465 1) private on outer taskloop
12466 2) firstprivate and shared on task
12467 3) lastprivate on inner taskloop */
12468 *gtask_clauses_ptr
12469 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12470 OMP_CLAUSE_FIRSTPRIVATE);
12471 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12472 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12473 openacc);
12474 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12475 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12476 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12477 OMP_CLAUSE_PRIVATE);
12478 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12479 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12480 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12481 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12482 }
12483 *gfor_clauses_ptr = c;
12484 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12485 *gtask_clauses_ptr
12486 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12487 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12488 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12489 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12490 gtask_clauses_ptr
12491 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12492 break;
12493 /* Allocate clause we duplicate on task and inner taskloop
12494 if the decl is lastprivate, otherwise just put on task. */
12495 case OMP_CLAUSE_ALLOCATE:
12496 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12497 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12498 {
12499 /* Additionally, put firstprivate clause on task
12500 for the allocator if it is not constant. */
12501 *gtask_clauses_ptr
12502 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12503 OMP_CLAUSE_FIRSTPRIVATE);
12504 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
12505 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12506 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12507 }
12508 if (lastprivate_uids
12509 && bitmap_bit_p (lastprivate_uids,
12510 DECL_UID (OMP_CLAUSE_DECL (c))))
12511 {
12512 *gfor_clauses_ptr = c;
12513 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12514 *gtask_clauses_ptr = copy_node (c);
12515 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12516 }
12517 else
12518 {
12519 *gtask_clauses_ptr = c;
12520 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12521 }
12522 break;
12523 default:
12524 gcc_unreachable ();
12525 }
12526 *gfor_clauses_ptr = NULL_TREE;
12527 *gtask_clauses_ptr = NULL_TREE;
12528 *gforo_clauses_ptr = NULL_TREE;
12529 BITMAP_FREE (lastprivate_uids);
12530 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12531 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12532 NULL_TREE, NULL_TREE, NULL_TREE);
12533 gimple_omp_task_set_taskloop_p (g, true);
12534 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12535 gomp_for *gforo
12536 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12537 gimple_omp_for_collapse (gfor),
12538 gimple_omp_for_pre_body (gfor));
12539 gimple_omp_for_set_pre_body (gfor, NULL);
12540 gimple_omp_for_set_combined_p (gforo, true);
12541 gimple_omp_for_set_combined_into_p (gfor, true);
12542 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12543 {
12544 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12545 tree v = create_tmp_var (type);
12546 gimple_omp_for_set_index (gforo, i, v);
12547 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12548 gimple_omp_for_set_initial (gforo, i, t);
12549 gimple_omp_for_set_cond (gforo, i,
12550 gimple_omp_for_cond (gfor, i));
12551 t = unshare_expr (gimple_omp_for_final (gfor, i));
12552 gimple_omp_for_set_final (gforo, i, t);
12553 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12554 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12555 TREE_OPERAND (t, 0) = v;
12556 gimple_omp_for_set_incr (gforo, i, t);
12557 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12558 OMP_CLAUSE_DECL (t) = v;
12559 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12560 gimple_omp_for_set_clauses (gforo, t);
12561 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12562 {
12563 tree *p1 = NULL, *p2 = NULL;
12564 t = gimple_omp_for_initial (gforo, i);
12565 if (TREE_CODE (t) == TREE_VEC)
12566 p1 = &TREE_VEC_ELT (t, 0);
12567 t = gimple_omp_for_final (gforo, i);
12568 if (TREE_CODE (t) == TREE_VEC)
12569 {
12570 if (p1)
12571 p2 = &TREE_VEC_ELT (t, 0);
12572 else
12573 p1 = &TREE_VEC_ELT (t, 0);
12574 }
12575 if (p1)
12576 {
12577 int j;
12578 for (j = 0; j < i; j++)
12579 if (*p1 == gimple_omp_for_index (gfor, j))
12580 {
12581 *p1 = gimple_omp_for_index (gforo, j);
12582 if (p2)
12583 *p2 = *p1;
12584 break;
12585 }
12586 gcc_assert (j < i);
12587 }
12588 }
12589 }
12590 gimplify_seq_add_stmt (pre_p, gforo);
12591 }
12592 else
12593 gimplify_seq_add_stmt (pre_p, gfor);
12594
12595 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12596 {
12597 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12598 unsigned lastprivate_conditional = 0;
12599 while (ctx
12600 && (ctx->region_type == ORT_TARGET_DATA
12601 || ctx->region_type == ORT_TASKGROUP))
12602 ctx = ctx->outer_context;
12603 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12604 for (tree c = gimple_omp_for_clauses (gfor);
12605 c; c = OMP_CLAUSE_CHAIN (c))
12606 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12607 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12608 ++lastprivate_conditional;
12609 if (lastprivate_conditional)
12610 {
12611 struct omp_for_data fd;
12612 omp_extract_for_data (gfor, &fd, NULL);
12613 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12614 lastprivate_conditional);
12615 tree var = create_tmp_var_raw (type);
12616 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12617 OMP_CLAUSE_DECL (c) = var;
12618 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12619 gimple_omp_for_set_clauses (gfor, c);
12620 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12621 }
12622 }
12623 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12624 {
12625 unsigned lastprivate_conditional = 0;
12626 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12627 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12628 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12629 ++lastprivate_conditional;
12630 if (lastprivate_conditional)
12631 {
12632 struct omp_for_data fd;
12633 omp_extract_for_data (gfor, &fd, NULL);
12634 tree type = unsigned_type_for (fd.iter_type);
12635 while (lastprivate_conditional--)
12636 {
12637 tree c = build_omp_clause (UNKNOWN_LOCATION,
12638 OMP_CLAUSE__CONDTEMP_);
12639 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12640 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12641 gimple_omp_for_set_clauses (gfor, c);
12642 }
12643 }
12644 }
12645
12646 if (ret != GS_ALL_DONE)
12647 return GS_ERROR;
12648 *expr_p = NULL_TREE;
12649 return GS_ALL_DONE;
12650 }
12651
12652 /* Helper for gimplify_omp_loop, called through walk_tree. */
12653
12654 static tree
12655 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12656 {
12657 if (DECL_P (*tp))
12658 {
12659 tree *d = (tree *) data;
12660 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12661 {
12662 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12663 *walk_subtrees = 0;
12664 }
12665 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12666 {
12667 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12668 *walk_subtrees = 0;
12669 }
12670 }
12671 return NULL_TREE;
12672 }
12673
12674 /* Gimplify the gross structure of an OMP_LOOP statement. */
12675
12676 static enum gimplify_status
12677 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12678 {
12679 tree for_stmt = *expr_p;
12680 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12681 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12682 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12683 int i;
12684
12685 /* If order is not present, the behavior is as if order(concurrent)
12686 appeared. */
12687 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12688 if (order == NULL_TREE)
12689 {
12690 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12691 OMP_CLAUSE_CHAIN (order) = clauses;
12692 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12693 }
12694
12695 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12696 if (bind == NULL_TREE)
12697 {
12698 if (!flag_openmp) /* flag_openmp_simd */
12699 ;
12700 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12701 kind = OMP_CLAUSE_BIND_TEAMS;
12702 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12703 kind = OMP_CLAUSE_BIND_PARALLEL;
12704 else
12705 {
12706 for (; octx; octx = octx->outer_context)
12707 {
12708 if ((octx->region_type & ORT_ACC) != 0
12709 || octx->region_type == ORT_NONE
12710 || octx->region_type == ORT_IMPLICIT_TARGET)
12711 continue;
12712 break;
12713 }
12714 if (octx == NULL && !in_omp_construct)
12715 error_at (EXPR_LOCATION (for_stmt),
12716 "%<bind%> clause not specified on a %<loop%> "
12717 "construct not nested inside another OpenMP construct");
12718 }
12719 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12720 OMP_CLAUSE_CHAIN (bind) = clauses;
12721 OMP_CLAUSE_BIND_KIND (bind) = kind;
12722 OMP_FOR_CLAUSES (for_stmt) = bind;
12723 }
12724 else
12725 switch (OMP_CLAUSE_BIND_KIND (bind))
12726 {
12727 case OMP_CLAUSE_BIND_THREAD:
12728 break;
12729 case OMP_CLAUSE_BIND_PARALLEL:
12730 if (!flag_openmp) /* flag_openmp_simd */
12731 {
12732 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12733 break;
12734 }
12735 for (; octx; octx = octx->outer_context)
12736 if (octx->region_type == ORT_SIMD
12737 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12738 {
12739 error_at (EXPR_LOCATION (for_stmt),
12740 "%<bind(parallel)%> on a %<loop%> construct nested "
12741 "inside %<simd%> construct");
12742 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12743 break;
12744 }
12745 kind = OMP_CLAUSE_BIND_PARALLEL;
12746 break;
12747 case OMP_CLAUSE_BIND_TEAMS:
12748 if (!flag_openmp) /* flag_openmp_simd */
12749 {
12750 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12751 break;
12752 }
12753 if ((octx
12754 && octx->region_type != ORT_IMPLICIT_TARGET
12755 && octx->region_type != ORT_NONE
12756 && (octx->region_type & ORT_TEAMS) == 0)
12757 || in_omp_construct)
12758 {
12759 error_at (EXPR_LOCATION (for_stmt),
12760 "%<bind(teams)%> on a %<loop%> region not strictly "
12761 "nested inside of a %<teams%> region");
12762 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12763 break;
12764 }
12765 kind = OMP_CLAUSE_BIND_TEAMS;
12766 break;
12767 default:
12768 gcc_unreachable ();
12769 }
12770
12771 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12772 switch (OMP_CLAUSE_CODE (*pc))
12773 {
12774 case OMP_CLAUSE_REDUCTION:
12775 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12776 {
12777 error_at (OMP_CLAUSE_LOCATION (*pc),
12778 "%<inscan%> %<reduction%> clause on "
12779 "%qs construct", "loop");
12780 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12781 }
12782 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12783 {
12784 error_at (OMP_CLAUSE_LOCATION (*pc),
12785 "invalid %<task%> reduction modifier on construct "
12786 "other than %<parallel%>, %qs or %<sections%>",
12787 lang_GNU_Fortran () ? "do" : "for");
12788 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12789 }
12790 pc = &OMP_CLAUSE_CHAIN (*pc);
12791 break;
12792 case OMP_CLAUSE_LASTPRIVATE:
12793 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12794 {
12795 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12796 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12797 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12798 break;
12799 if (OMP_FOR_ORIG_DECLS (for_stmt)
12800 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12801 i)) == TREE_LIST
12802 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12803 i)))
12804 {
12805 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12806 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12807 break;
12808 }
12809 }
12810 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12811 {
12812 error_at (OMP_CLAUSE_LOCATION (*pc),
12813 "%<lastprivate%> clause on a %<loop%> construct refers "
12814 "to a variable %qD which is not the loop iterator",
12815 OMP_CLAUSE_DECL (*pc));
12816 *pc = OMP_CLAUSE_CHAIN (*pc);
12817 break;
12818 }
12819 pc = &OMP_CLAUSE_CHAIN (*pc);
12820 break;
12821 default:
12822 pc = &OMP_CLAUSE_CHAIN (*pc);
12823 break;
12824 }
12825
12826 TREE_SET_CODE (for_stmt, OMP_SIMD);
12827
12828 int last;
12829 switch (kind)
12830 {
12831 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12832 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12833 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12834 }
12835 for (int pass = 1; pass <= last; pass++)
12836 {
12837 if (pass == 2)
12838 {
12839 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12840 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12841 *expr_p = make_node (OMP_PARALLEL);
12842 TREE_TYPE (*expr_p) = void_type_node;
12843 OMP_PARALLEL_BODY (*expr_p) = bind;
12844 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12845 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12846 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12847 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12848 if (OMP_FOR_ORIG_DECLS (for_stmt)
12849 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12850 == TREE_LIST))
12851 {
12852 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12853 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12854 {
12855 *pc = build_omp_clause (UNKNOWN_LOCATION,
12856 OMP_CLAUSE_FIRSTPRIVATE);
12857 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12858 pc = &OMP_CLAUSE_CHAIN (*pc);
12859 }
12860 }
12861 }
12862 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12863 tree *pc = &OMP_FOR_CLAUSES (t);
12864 TREE_TYPE (t) = void_type_node;
12865 OMP_FOR_BODY (t) = *expr_p;
12866 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12867 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12868 switch (OMP_CLAUSE_CODE (c))
12869 {
12870 case OMP_CLAUSE_BIND:
12871 case OMP_CLAUSE_ORDER:
12872 case OMP_CLAUSE_COLLAPSE:
12873 *pc = copy_node (c);
12874 pc = &OMP_CLAUSE_CHAIN (*pc);
12875 break;
12876 case OMP_CLAUSE_PRIVATE:
12877 case OMP_CLAUSE_FIRSTPRIVATE:
12878 /* Only needed on innermost. */
12879 break;
12880 case OMP_CLAUSE_LASTPRIVATE:
12881 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12882 {
12883 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12884 OMP_CLAUSE_FIRSTPRIVATE);
12885 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12886 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
12887 pc = &OMP_CLAUSE_CHAIN (*pc);
12888 }
12889 *pc = copy_node (c);
12890 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12891 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12892 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12893 {
12894 if (pass != last)
12895 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12896 else
12897 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
12898 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12899 }
12900 pc = &OMP_CLAUSE_CHAIN (*pc);
12901 break;
12902 case OMP_CLAUSE_REDUCTION:
12903 *pc = copy_node (c);
12904 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12905 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12906 OMP_CLAUSE_REDUCTION_INIT (*pc)
12907 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12908 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12909 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12910 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12911 {
12912 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12913 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12914 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12915 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12916 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12917 tree nc = *pc;
12918 tree data[2] = { c, nc };
12919 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12920 replace_reduction_placeholders,
12921 data);
12922 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12923 replace_reduction_placeholders,
12924 data);
12925 }
12926 pc = &OMP_CLAUSE_CHAIN (*pc);
12927 break;
12928 default:
12929 gcc_unreachable ();
12930 }
12931 *pc = NULL_TREE;
12932 *expr_p = t;
12933 }
12934 return gimplify_omp_for (expr_p, pre_p);
12935 }
12936
12937
12938 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12939 of OMP_TARGET's body. */
12940
12941 static tree
12942 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12943 {
12944 *walk_subtrees = 0;
12945 switch (TREE_CODE (*tp))
12946 {
12947 case OMP_TEAMS:
12948 return *tp;
12949 case BIND_EXPR:
12950 case STATEMENT_LIST:
12951 *walk_subtrees = 1;
12952 break;
12953 default:
12954 break;
12955 }
12956 return NULL_TREE;
12957 }
12958
12959 /* Helper function of optimize_target_teams, determine if the expression
12960 can be computed safely before the target construct on the host. */
12961
12962 static tree
12963 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12964 {
12965 splay_tree_node n;
12966
12967 if (TYPE_P (*tp))
12968 {
12969 *walk_subtrees = 0;
12970 return NULL_TREE;
12971 }
12972 switch (TREE_CODE (*tp))
12973 {
12974 case VAR_DECL:
12975 case PARM_DECL:
12976 case RESULT_DECL:
12977 *walk_subtrees = 0;
12978 if (error_operand_p (*tp)
12979 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12980 || DECL_HAS_VALUE_EXPR_P (*tp)
12981 || DECL_THREAD_LOCAL_P (*tp)
12982 || TREE_SIDE_EFFECTS (*tp)
12983 || TREE_THIS_VOLATILE (*tp))
12984 return *tp;
12985 if (is_global_var (*tp)
12986 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12987 || lookup_attribute ("omp declare target link",
12988 DECL_ATTRIBUTES (*tp))))
12989 return *tp;
12990 if (VAR_P (*tp)
12991 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12992 && !is_global_var (*tp)
12993 && decl_function_context (*tp) == current_function_decl)
12994 return *tp;
12995 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12996 (splay_tree_key) *tp);
12997 if (n == NULL)
12998 {
12999 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
13000 return NULL_TREE;
13001 return *tp;
13002 }
13003 else if (n->value & GOVD_LOCAL)
13004 return *tp;
13005 else if (n->value & GOVD_FIRSTPRIVATE)
13006 return NULL_TREE;
13007 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13008 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13009 return NULL_TREE;
13010 return *tp;
13011 case INTEGER_CST:
13012 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13013 return *tp;
13014 return NULL_TREE;
13015 case TARGET_EXPR:
13016 if (TARGET_EXPR_INITIAL (*tp)
13017 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
13018 return *tp;
13019 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
13020 walk_subtrees, NULL);
13021 /* Allow some reasonable subset of integral arithmetics. */
13022 case PLUS_EXPR:
13023 case MINUS_EXPR:
13024 case MULT_EXPR:
13025 case TRUNC_DIV_EXPR:
13026 case CEIL_DIV_EXPR:
13027 case FLOOR_DIV_EXPR:
13028 case ROUND_DIV_EXPR:
13029 case TRUNC_MOD_EXPR:
13030 case CEIL_MOD_EXPR:
13031 case FLOOR_MOD_EXPR:
13032 case ROUND_MOD_EXPR:
13033 case RDIV_EXPR:
13034 case EXACT_DIV_EXPR:
13035 case MIN_EXPR:
13036 case MAX_EXPR:
13037 case LSHIFT_EXPR:
13038 case RSHIFT_EXPR:
13039 case BIT_IOR_EXPR:
13040 case BIT_XOR_EXPR:
13041 case BIT_AND_EXPR:
13042 case NEGATE_EXPR:
13043 case ABS_EXPR:
13044 case BIT_NOT_EXPR:
13045 case NON_LVALUE_EXPR:
13046 CASE_CONVERT:
13047 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13048 return *tp;
13049 return NULL_TREE;
13050 /* And disallow anything else, except for comparisons. */
13051 default:
13052 if (COMPARISON_CLASS_P (*tp))
13053 return NULL_TREE;
13054 return *tp;
13055 }
13056 }
13057
13058 /* Try to determine if the num_teams and/or thread_limit expressions
13059 can have their values determined already before entering the
13060 target construct.
13061 INTEGER_CSTs trivially are,
13062 integral decls that are firstprivate (explicitly or implicitly)
13063 or explicitly map(always, to:) or map(always, tofrom:) on the target
13064 region too, and expressions involving simple arithmetics on those
13065 too, function calls are not ok, dereferencing something neither etc.
13066 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13067 EXPR based on what we find:
13068 0 stands for clause not specified at all, use implementation default
13069 -1 stands for value that can't be determined easily before entering
13070 the target construct.
13071 If teams construct is not present at all, use 1 for num_teams
13072 and 0 for thread_limit (only one team is involved, and the thread
13073 limit is implementation defined. */
13074
13075 static void
13076 optimize_target_teams (tree target, gimple_seq *pre_p)
13077 {
13078 tree body = OMP_BODY (target);
13079 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
13080 tree num_teams = integer_zero_node;
13081 tree thread_limit = integer_zero_node;
13082 location_t num_teams_loc = EXPR_LOCATION (target);
13083 location_t thread_limit_loc = EXPR_LOCATION (target);
13084 tree c, *p, expr;
13085 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
13086
13087 if (teams == NULL_TREE)
13088 num_teams = integer_one_node;
13089 else
13090 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
13091 {
13092 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
13093 {
13094 p = &num_teams;
13095 num_teams_loc = OMP_CLAUSE_LOCATION (c);
13096 }
13097 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
13098 {
13099 p = &thread_limit;
13100 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
13101 }
13102 else
13103 continue;
13104 expr = OMP_CLAUSE_OPERAND (c, 0);
13105 if (TREE_CODE (expr) == INTEGER_CST)
13106 {
13107 *p = expr;
13108 continue;
13109 }
13110 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
13111 {
13112 *p = integer_minus_one_node;
13113 continue;
13114 }
13115 *p = expr;
13116 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
13117 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
13118 == GS_ERROR)
13119 {
13120 gimplify_omp_ctxp = target_ctx;
13121 *p = integer_minus_one_node;
13122 continue;
13123 }
13124 gimplify_omp_ctxp = target_ctx;
13125 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
13126 OMP_CLAUSE_OPERAND (c, 0) = *p;
13127 }
13128 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
13129 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
13130 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13131 OMP_TARGET_CLAUSES (target) = c;
13132 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
13133 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
13134 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13135 OMP_TARGET_CLAUSES (target) = c;
13136 }
13137
13138 /* Gimplify the gross structure of several OMP constructs. */
13139
13140 static void
13141 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
13142 {
13143 tree expr = *expr_p;
13144 gimple *stmt;
13145 gimple_seq body = NULL;
13146 enum omp_region_type ort;
13147
13148 switch (TREE_CODE (expr))
13149 {
13150 case OMP_SECTIONS:
13151 case OMP_SINGLE:
13152 ort = ORT_WORKSHARE;
13153 break;
13154 case OMP_TARGET:
13155 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
13156 break;
13157 case OACC_KERNELS:
13158 ort = ORT_ACC_KERNELS;
13159 break;
13160 case OACC_PARALLEL:
13161 ort = ORT_ACC_PARALLEL;
13162 break;
13163 case OACC_SERIAL:
13164 ort = ORT_ACC_SERIAL;
13165 break;
13166 case OACC_DATA:
13167 ort = ORT_ACC_DATA;
13168 break;
13169 case OMP_TARGET_DATA:
13170 ort = ORT_TARGET_DATA;
13171 break;
13172 case OMP_TEAMS:
13173 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
13174 if (gimplify_omp_ctxp == NULL
13175 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
13176 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
13177 break;
13178 case OACC_HOST_DATA:
13179 ort = ORT_ACC_HOST_DATA;
13180 break;
13181 default:
13182 gcc_unreachable ();
13183 }
13184
13185 bool save_in_omp_construct = in_omp_construct;
13186 if ((ort & ORT_ACC) == 0)
13187 in_omp_construct = false;
13188 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
13189 TREE_CODE (expr));
13190 if (TREE_CODE (expr) == OMP_TARGET)
13191 optimize_target_teams (expr, pre_p);
13192 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
13193 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13194 {
13195 push_gimplify_context ();
13196 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
13197 if (gimple_code (g) == GIMPLE_BIND)
13198 pop_gimplify_context (g);
13199 else
13200 pop_gimplify_context (NULL);
13201 if ((ort & ORT_TARGET_DATA) != 0)
13202 {
13203 enum built_in_function end_ix;
13204 switch (TREE_CODE (expr))
13205 {
13206 case OACC_DATA:
13207 case OACC_HOST_DATA:
13208 end_ix = BUILT_IN_GOACC_DATA_END;
13209 break;
13210 case OMP_TARGET_DATA:
13211 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
13212 break;
13213 default:
13214 gcc_unreachable ();
13215 }
13216 tree fn = builtin_decl_explicit (end_ix);
13217 g = gimple_build_call (fn, 0);
13218 gimple_seq cleanup = NULL;
13219 gimple_seq_add_stmt (&cleanup, g);
13220 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13221 body = NULL;
13222 gimple_seq_add_stmt (&body, g);
13223 }
13224 }
13225 else
13226 gimplify_and_add (OMP_BODY (expr), &body);
13227 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
13228 TREE_CODE (expr));
13229 in_omp_construct = save_in_omp_construct;
13230
13231 switch (TREE_CODE (expr))
13232 {
13233 case OACC_DATA:
13234 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
13235 OMP_CLAUSES (expr));
13236 break;
13237 case OACC_HOST_DATA:
13238 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
13239 {
13240 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
13242 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
13243 }
13244
13245 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
13246 OMP_CLAUSES (expr));
13247 break;
13248 case OACC_KERNELS:
13249 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
13250 OMP_CLAUSES (expr));
13251 break;
13252 case OACC_PARALLEL:
13253 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
13254 OMP_CLAUSES (expr));
13255 break;
13256 case OACC_SERIAL:
13257 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
13258 OMP_CLAUSES (expr));
13259 break;
13260 case OMP_SECTIONS:
13261 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
13262 break;
13263 case OMP_SINGLE:
13264 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
13265 break;
13266 case OMP_TARGET:
13267 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
13268 OMP_CLAUSES (expr));
13269 break;
13270 case OMP_TARGET_DATA:
13271 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13272 to be evaluated before the use_device_{ptr,addr} clauses if they
13273 refer to the same variables. */
13274 {
13275 tree use_device_clauses;
13276 tree *pc, *uc = &use_device_clauses;
13277 for (pc = &OMP_CLAUSES (expr); *pc; )
13278 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
13279 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
13280 {
13281 *uc = *pc;
13282 *pc = OMP_CLAUSE_CHAIN (*pc);
13283 uc = &OMP_CLAUSE_CHAIN (*uc);
13284 }
13285 else
13286 pc = &OMP_CLAUSE_CHAIN (*pc);
13287 *uc = NULL_TREE;
13288 *pc = use_device_clauses;
13289 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
13290 OMP_CLAUSES (expr));
13291 }
13292 break;
13293 case OMP_TEAMS:
13294 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
13295 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13296 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
13297 break;
13298 default:
13299 gcc_unreachable ();
13300 }
13301
13302 gimplify_seq_add_stmt (pre_p, stmt);
13303 *expr_p = NULL_TREE;
13304 }
13305
13306 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13307 target update constructs. */
13308
13309 static void
13310 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
13311 {
13312 tree expr = *expr_p;
13313 int kind;
13314 gomp_target *stmt;
13315 enum omp_region_type ort = ORT_WORKSHARE;
13316
13317 switch (TREE_CODE (expr))
13318 {
13319 case OACC_ENTER_DATA:
13320 case OACC_EXIT_DATA:
13321 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
13322 ort = ORT_ACC;
13323 break;
13324 case OACC_UPDATE:
13325 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
13326 ort = ORT_ACC;
13327 break;
13328 case OMP_TARGET_UPDATE:
13329 kind = GF_OMP_TARGET_KIND_UPDATE;
13330 break;
13331 case OMP_TARGET_ENTER_DATA:
13332 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
13333 break;
13334 case OMP_TARGET_EXIT_DATA:
13335 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
13336 break;
13337 default:
13338 gcc_unreachable ();
13339 }
13340 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
13341 ort, TREE_CODE (expr));
13342 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
13343 TREE_CODE (expr));
13344 if (TREE_CODE (expr) == OACC_UPDATE
13345 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13346 OMP_CLAUSE_IF_PRESENT))
13347 {
13348 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13349 clause. */
13350 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13352 switch (OMP_CLAUSE_MAP_KIND (c))
13353 {
13354 case GOMP_MAP_FORCE_TO:
13355 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
13356 break;
13357 case GOMP_MAP_FORCE_FROM:
13358 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
13359 break;
13360 default:
13361 break;
13362 }
13363 }
13364 else if (TREE_CODE (expr) == OACC_EXIT_DATA
13365 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13366 OMP_CLAUSE_FINALIZE))
13367 {
13368 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13369 semantics. */
13370 bool have_clause = false;
13371 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13372 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13373 switch (OMP_CLAUSE_MAP_KIND (c))
13374 {
13375 case GOMP_MAP_FROM:
13376 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
13377 have_clause = true;
13378 break;
13379 case GOMP_MAP_RELEASE:
13380 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
13381 have_clause = true;
13382 break;
13383 case GOMP_MAP_TO_PSET:
13384 /* Fortran arrays with descriptors must map that descriptor when
13385 doing standalone "attach" operations (in OpenACC). In that
13386 case GOMP_MAP_TO_PSET appears by itself with no preceding
13387 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13388 break;
13389 case GOMP_MAP_POINTER:
13390 /* TODO PR92929: we may see these here, but they'll always follow
13391 one of the clauses above, and will be handled by libgomp as
13392 one group, so no handling required here. */
13393 gcc_assert (have_clause);
13394 break;
13395 case GOMP_MAP_DETACH:
13396 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
13397 have_clause = false;
13398 break;
13399 case GOMP_MAP_STRUCT:
13400 have_clause = false;
13401 break;
13402 default:
13403 gcc_unreachable ();
13404 }
13405 }
13406 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
13407
13408 gimplify_seq_add_stmt (pre_p, stmt);
13409 *expr_p = NULL_TREE;
13410 }
13411
13412 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13413 stabilized the lhs of the atomic operation as *ADDR. Return true if
13414 EXPR is this stabilized form. */
13415
13416 static bool
13417 goa_lhs_expr_p (tree expr, tree addr)
13418 {
13419 /* Also include casts to other type variants. The C front end is fond
13420 of adding these for e.g. volatile variables. This is like
13421 STRIP_TYPE_NOPS but includes the main variant lookup. */
13422 STRIP_USELESS_TYPE_CONVERSION (expr);
13423
13424 if (TREE_CODE (expr) == INDIRECT_REF)
13425 {
13426 expr = TREE_OPERAND (expr, 0);
13427 while (expr != addr
13428 && (CONVERT_EXPR_P (expr)
13429 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13430 && TREE_CODE (expr) == TREE_CODE (addr)
13431 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13432 {
13433 expr = TREE_OPERAND (expr, 0);
13434 addr = TREE_OPERAND (addr, 0);
13435 }
13436 if (expr == addr)
13437 return true;
13438 return (TREE_CODE (addr) == ADDR_EXPR
13439 && TREE_CODE (expr) == ADDR_EXPR
13440 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13441 }
13442 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13443 return true;
13444 return false;
13445 }
13446
13447 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13448 expression does not involve the lhs, evaluate it into a temporary.
13449 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13450 or -1 if an error was encountered. */
13451
13452 static int
13453 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13454 tree lhs_var)
13455 {
13456 tree expr = *expr_p;
13457 int saw_lhs;
13458
13459 if (goa_lhs_expr_p (expr, lhs_addr))
13460 {
13461 *expr_p = lhs_var;
13462 return 1;
13463 }
13464 if (is_gimple_val (expr))
13465 return 0;
13466
13467 saw_lhs = 0;
13468 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13469 {
13470 case tcc_binary:
13471 case tcc_comparison:
13472 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13473 lhs_var);
13474 /* FALLTHRU */
13475 case tcc_unary:
13476 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13477 lhs_var);
13478 break;
13479 case tcc_expression:
13480 switch (TREE_CODE (expr))
13481 {
13482 case TRUTH_ANDIF_EXPR:
13483 case TRUTH_ORIF_EXPR:
13484 case TRUTH_AND_EXPR:
13485 case TRUTH_OR_EXPR:
13486 case TRUTH_XOR_EXPR:
13487 case BIT_INSERT_EXPR:
13488 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13489 lhs_addr, lhs_var);
13490 /* FALLTHRU */
13491 case TRUTH_NOT_EXPR:
13492 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13493 lhs_addr, lhs_var);
13494 break;
13495 case COMPOUND_EXPR:
13496 /* Break out any preevaluations from cp_build_modify_expr. */
13497 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13498 expr = TREE_OPERAND (expr, 1))
13499 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13500 *expr_p = expr;
13501 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13502 default:
13503 break;
13504 }
13505 break;
13506 case tcc_reference:
13507 if (TREE_CODE (expr) == BIT_FIELD_REF)
13508 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13509 lhs_addr, lhs_var);
13510 break;
13511 default:
13512 break;
13513 }
13514
13515 if (saw_lhs == 0)
13516 {
13517 enum gimplify_status gs;
13518 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13519 if (gs != GS_ALL_DONE)
13520 saw_lhs = -1;
13521 }
13522
13523 return saw_lhs;
13524 }
13525
13526 /* Gimplify an OMP_ATOMIC statement. */
13527
13528 static enum gimplify_status
13529 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13530 {
13531 tree addr = TREE_OPERAND (*expr_p, 0);
13532 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13533 ? NULL : TREE_OPERAND (*expr_p, 1);
13534 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13535 tree tmp_load;
13536 gomp_atomic_load *loadstmt;
13537 gomp_atomic_store *storestmt;
13538
13539 tmp_load = create_tmp_reg (type);
13540 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13541 return GS_ERROR;
13542
13543 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13544 != GS_ALL_DONE)
13545 return GS_ERROR;
13546
13547 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13548 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13549 gimplify_seq_add_stmt (pre_p, loadstmt);
13550 if (rhs)
13551 {
13552 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13553 representatives. Use BIT_FIELD_REF on the lhs instead. */
13554 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13555 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13556 {
13557 tree bitpos = TREE_OPERAND (rhs, 2);
13558 tree op1 = TREE_OPERAND (rhs, 1);
13559 tree bitsize;
13560 tree tmp_store = tmp_load;
13561 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13562 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13563 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13564 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13565 else
13566 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13567 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13568 tree t = build2_loc (EXPR_LOCATION (rhs),
13569 MODIFY_EXPR, void_type_node,
13570 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13571 TREE_TYPE (op1), tmp_store, bitsize,
13572 bitpos), op1);
13573 gimplify_and_add (t, pre_p);
13574 rhs = tmp_store;
13575 }
13576 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13577 != GS_ALL_DONE)
13578 return GS_ERROR;
13579 }
13580
13581 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13582 rhs = tmp_load;
13583 storestmt
13584 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13585 gimplify_seq_add_stmt (pre_p, storestmt);
13586 switch (TREE_CODE (*expr_p))
13587 {
13588 case OMP_ATOMIC_READ:
13589 case OMP_ATOMIC_CAPTURE_OLD:
13590 *expr_p = tmp_load;
13591 gimple_omp_atomic_set_need_value (loadstmt);
13592 break;
13593 case OMP_ATOMIC_CAPTURE_NEW:
13594 *expr_p = rhs;
13595 gimple_omp_atomic_set_need_value (storestmt);
13596 break;
13597 default:
13598 *expr_p = NULL;
13599 break;
13600 }
13601
13602 return GS_ALL_DONE;
13603 }
13604
13605 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13606 body, and adding some EH bits. */
13607
13608 static enum gimplify_status
13609 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13610 {
13611 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13612 gimple *body_stmt;
13613 gtransaction *trans_stmt;
13614 gimple_seq body = NULL;
13615 int subcode = 0;
13616
13617 /* Wrap the transaction body in a BIND_EXPR so we have a context
13618 where to put decls for OMP. */
13619 if (TREE_CODE (tbody) != BIND_EXPR)
13620 {
13621 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13622 TREE_SIDE_EFFECTS (bind) = 1;
13623 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13624 TRANSACTION_EXPR_BODY (expr) = bind;
13625 }
13626
13627 push_gimplify_context ();
13628 temp = voidify_wrapper_expr (*expr_p, NULL);
13629
13630 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13631 pop_gimplify_context (body_stmt);
13632
13633 trans_stmt = gimple_build_transaction (body);
13634 if (TRANSACTION_EXPR_OUTER (expr))
13635 subcode = GTMA_IS_OUTER;
13636 else if (TRANSACTION_EXPR_RELAXED (expr))
13637 subcode = GTMA_IS_RELAXED;
13638 gimple_transaction_set_subcode (trans_stmt, subcode);
13639
13640 gimplify_seq_add_stmt (pre_p, trans_stmt);
13641
13642 if (temp)
13643 {
13644 *expr_p = temp;
13645 return GS_OK;
13646 }
13647
13648 *expr_p = NULL_TREE;
13649 return GS_ALL_DONE;
13650 }
13651
13652 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13653 is the OMP_BODY of the original EXPR (which has already been
13654 gimplified so it's not present in the EXPR).
13655
13656 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13657
13658 static gimple *
13659 gimplify_omp_ordered (tree expr, gimple_seq body)
13660 {
13661 tree c, decls;
13662 int failures = 0;
13663 unsigned int i;
13664 tree source_c = NULL_TREE;
13665 tree sink_c = NULL_TREE;
13666
13667 if (gimplify_omp_ctxp)
13668 {
13669 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13670 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13671 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13672 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13673 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13674 {
13675 error_at (OMP_CLAUSE_LOCATION (c),
13676 "%<ordered%> construct with %<depend%> clause must be "
13677 "closely nested inside a loop with %<ordered%> clause "
13678 "with a parameter");
13679 failures++;
13680 }
13681 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13682 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13683 {
13684 bool fail = false;
13685 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13686 decls && TREE_CODE (decls) == TREE_LIST;
13687 decls = TREE_CHAIN (decls), ++i)
13688 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13689 continue;
13690 else if (TREE_VALUE (decls)
13691 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13692 {
13693 error_at (OMP_CLAUSE_LOCATION (c),
13694 "variable %qE is not an iteration "
13695 "of outermost loop %d, expected %qE",
13696 TREE_VALUE (decls), i + 1,
13697 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13698 fail = true;
13699 failures++;
13700 }
13701 else
13702 TREE_VALUE (decls)
13703 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13704 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13705 {
13706 error_at (OMP_CLAUSE_LOCATION (c),
13707 "number of variables in %<depend%> clause with "
13708 "%<sink%> modifier does not match number of "
13709 "iteration variables");
13710 failures++;
13711 }
13712 sink_c = c;
13713 }
13714 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13715 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13716 {
13717 if (source_c)
13718 {
13719 error_at (OMP_CLAUSE_LOCATION (c),
13720 "more than one %<depend%> clause with %<source%> "
13721 "modifier on an %<ordered%> construct");
13722 failures++;
13723 }
13724 else
13725 source_c = c;
13726 }
13727 }
13728 if (source_c && sink_c)
13729 {
13730 error_at (OMP_CLAUSE_LOCATION (source_c),
13731 "%<depend%> clause with %<source%> modifier specified "
13732 "together with %<depend%> clauses with %<sink%> modifier "
13733 "on the same construct");
13734 failures++;
13735 }
13736
13737 if (failures)
13738 return gimple_build_nop ();
13739 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13740 }
13741
13742 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13743 expression produces a value to be used as an operand inside a GIMPLE
13744 statement, the value will be stored back in *EXPR_P. This value will
13745 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13746 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13747 emitted in PRE_P and POST_P.
13748
13749 Additionally, this process may overwrite parts of the input
13750 expression during gimplification. Ideally, it should be
13751 possible to do non-destructive gimplification.
13752
13753 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13754 the expression needs to evaluate to a value to be used as
13755 an operand in a GIMPLE statement, this value will be stored in
13756 *EXPR_P on exit. This happens when the caller specifies one
13757 of fb_lvalue or fb_rvalue fallback flags.
13758
13759 PRE_P will contain the sequence of GIMPLE statements corresponding
13760 to the evaluation of EXPR and all the side-effects that must
13761 be executed before the main expression. On exit, the last
13762 statement of PRE_P is the core statement being gimplified. For
13763 instance, when gimplifying 'if (++a)' the last statement in
13764 PRE_P will be 'if (t.1)' where t.1 is the result of
13765 pre-incrementing 'a'.
13766
13767 POST_P will contain the sequence of GIMPLE statements corresponding
13768 to the evaluation of all the side-effects that must be executed
13769 after the main expression. If this is NULL, the post
13770 side-effects are stored at the end of PRE_P.
13771
13772 The reason why the output is split in two is to handle post
13773 side-effects explicitly. In some cases, an expression may have
13774 inner and outer post side-effects which need to be emitted in
13775 an order different from the one given by the recursive
13776 traversal. For instance, for the expression (*p--)++ the post
13777 side-effects of '--' must actually occur *after* the post
13778 side-effects of '++'. However, gimplification will first visit
13779 the inner expression, so if a separate POST sequence was not
13780 used, the resulting sequence would be:
13781
13782 1 t.1 = *p
13783 2 p = p - 1
13784 3 t.2 = t.1 + 1
13785 4 *p = t.2
13786
13787 However, the post-decrement operation in line #2 must not be
13788 evaluated until after the store to *p at line #4, so the
13789 correct sequence should be:
13790
13791 1 t.1 = *p
13792 2 t.2 = t.1 + 1
13793 3 *p = t.2
13794 4 p = p - 1
13795
13796 So, by specifying a separate post queue, it is possible
13797 to emit the post side-effects in the correct order.
13798 If POST_P is NULL, an internal queue will be used. Before
13799 returning to the caller, the sequence POST_P is appended to
13800 the main output sequence PRE_P.
13801
13802 GIMPLE_TEST_F points to a function that takes a tree T and
13803 returns nonzero if T is in the GIMPLE form requested by the
13804 caller. The GIMPLE predicates are in gimple.c.
13805
13806 FALLBACK tells the function what sort of a temporary we want if
13807 gimplification cannot produce an expression that complies with
13808 GIMPLE_TEST_F.
13809
13810 fb_none means that no temporary should be generated
13811 fb_rvalue means that an rvalue is OK to generate
13812 fb_lvalue means that an lvalue is OK to generate
13813 fb_either means that either is OK, but an lvalue is preferable.
13814 fb_mayfail means that gimplification may fail (in which case
13815 GS_ERROR will be returned)
13816
13817 The return value is either GS_ERROR or GS_ALL_DONE, since this
13818 function iterates until EXPR is completely gimplified or an error
13819 occurs. */
13820
13821 enum gimplify_status
13822 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13823 bool (*gimple_test_f) (tree), fallback_t fallback)
13824 {
13825 tree tmp;
13826 gimple_seq internal_pre = NULL;
13827 gimple_seq internal_post = NULL;
13828 tree save_expr;
13829 bool is_statement;
13830 location_t saved_location;
13831 enum gimplify_status ret;
13832 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13833 tree label;
13834
13835 save_expr = *expr_p;
13836 if (save_expr == NULL_TREE)
13837 return GS_ALL_DONE;
13838
13839 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13840 is_statement = gimple_test_f == is_gimple_stmt;
13841 if (is_statement)
13842 gcc_assert (pre_p);
13843
13844 /* Consistency checks. */
13845 if (gimple_test_f == is_gimple_reg)
13846 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13847 else if (gimple_test_f == is_gimple_val
13848 || gimple_test_f == is_gimple_call_addr
13849 || gimple_test_f == is_gimple_condexpr
13850 || gimple_test_f == is_gimple_condexpr_for_cond
13851 || gimple_test_f == is_gimple_mem_rhs
13852 || gimple_test_f == is_gimple_mem_rhs_or_call
13853 || gimple_test_f == is_gimple_reg_rhs
13854 || gimple_test_f == is_gimple_reg_rhs_or_call
13855 || gimple_test_f == is_gimple_asm_val
13856 || gimple_test_f == is_gimple_mem_ref_addr)
13857 gcc_assert (fallback & fb_rvalue);
13858 else if (gimple_test_f == is_gimple_min_lval
13859 || gimple_test_f == is_gimple_lvalue)
13860 gcc_assert (fallback & fb_lvalue);
13861 else if (gimple_test_f == is_gimple_addressable)
13862 gcc_assert (fallback & fb_either);
13863 else if (gimple_test_f == is_gimple_stmt)
13864 gcc_assert (fallback == fb_none);
13865 else
13866 {
13867 /* We should have recognized the GIMPLE_TEST_F predicate to
13868 know what kind of fallback to use in case a temporary is
13869 needed to hold the value or address of *EXPR_P. */
13870 gcc_unreachable ();
13871 }
13872
13873 /* We used to check the predicate here and return immediately if it
13874 succeeds. This is wrong; the design is for gimplification to be
13875 idempotent, and for the predicates to only test for valid forms, not
13876 whether they are fully simplified. */
13877 if (pre_p == NULL)
13878 pre_p = &internal_pre;
13879
13880 if (post_p == NULL)
13881 post_p = &internal_post;
13882
13883 /* Remember the last statements added to PRE_P and POST_P. Every
13884 new statement added by the gimplification helpers needs to be
13885 annotated with location information. To centralize the
13886 responsibility, we remember the last statement that had been
13887 added to both queues before gimplifying *EXPR_P. If
13888 gimplification produces new statements in PRE_P and POST_P, those
13889 statements will be annotated with the same location information
13890 as *EXPR_P. */
13891 pre_last_gsi = gsi_last (*pre_p);
13892 post_last_gsi = gsi_last (*post_p);
13893
13894 saved_location = input_location;
13895 if (save_expr != error_mark_node
13896 && EXPR_HAS_LOCATION (*expr_p))
13897 input_location = EXPR_LOCATION (*expr_p);
13898
13899 /* Loop over the specific gimplifiers until the toplevel node
13900 remains the same. */
13901 do
13902 {
13903 /* Strip away as many useless type conversions as possible
13904 at the toplevel. */
13905 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13906
13907 /* Remember the expr. */
13908 save_expr = *expr_p;
13909
13910 /* Die, die, die, my darling. */
13911 if (error_operand_p (save_expr))
13912 {
13913 ret = GS_ERROR;
13914 break;
13915 }
13916
13917 /* Do any language-specific gimplification. */
13918 ret = ((enum gimplify_status)
13919 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13920 if (ret == GS_OK)
13921 {
13922 if (*expr_p == NULL_TREE)
13923 break;
13924 if (*expr_p != save_expr)
13925 continue;
13926 }
13927 else if (ret != GS_UNHANDLED)
13928 break;
13929
13930 /* Make sure that all the cases set 'ret' appropriately. */
13931 ret = GS_UNHANDLED;
13932 switch (TREE_CODE (*expr_p))
13933 {
13934 /* First deal with the special cases. */
13935
13936 case POSTINCREMENT_EXPR:
13937 case POSTDECREMENT_EXPR:
13938 case PREINCREMENT_EXPR:
13939 case PREDECREMENT_EXPR:
13940 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13941 fallback != fb_none,
13942 TREE_TYPE (*expr_p));
13943 break;
13944
13945 case VIEW_CONVERT_EXPR:
13946 if ((fallback & fb_rvalue)
13947 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13948 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13949 {
13950 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13951 post_p, is_gimple_val, fb_rvalue);
13952 recalculate_side_effects (*expr_p);
13953 break;
13954 }
13955 /* Fallthru. */
13956
13957 case ARRAY_REF:
13958 case ARRAY_RANGE_REF:
13959 case REALPART_EXPR:
13960 case IMAGPART_EXPR:
13961 case COMPONENT_REF:
13962 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13963 fallback ? fallback : fb_rvalue);
13964 break;
13965
13966 case COND_EXPR:
13967 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13968
13969 /* C99 code may assign to an array in a structure value of a
13970 conditional expression, and this has undefined behavior
13971 only on execution, so create a temporary if an lvalue is
13972 required. */
13973 if (fallback == fb_lvalue)
13974 {
13975 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13976 mark_addressable (*expr_p);
13977 ret = GS_OK;
13978 }
13979 break;
13980
13981 case CALL_EXPR:
13982 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13983
13984 /* C99 code may assign to an array in a structure returned
13985 from a function, and this has undefined behavior only on
13986 execution, so create a temporary if an lvalue is
13987 required. */
13988 if (fallback == fb_lvalue)
13989 {
13990 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13991 mark_addressable (*expr_p);
13992 ret = GS_OK;
13993 }
13994 break;
13995
13996 case TREE_LIST:
13997 gcc_unreachable ();
13998
13999 case COMPOUND_EXPR:
14000 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
14001 break;
14002
14003 case COMPOUND_LITERAL_EXPR:
14004 ret = gimplify_compound_literal_expr (expr_p, pre_p,
14005 gimple_test_f, fallback);
14006 break;
14007
14008 case MODIFY_EXPR:
14009 case INIT_EXPR:
14010 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
14011 fallback != fb_none);
14012 break;
14013
14014 case TRUTH_ANDIF_EXPR:
14015 case TRUTH_ORIF_EXPR:
14016 {
14017 /* Preserve the original type of the expression and the
14018 source location of the outer expression. */
14019 tree org_type = TREE_TYPE (*expr_p);
14020 *expr_p = gimple_boolify (*expr_p);
14021 *expr_p = build3_loc (input_location, COND_EXPR,
14022 org_type, *expr_p,
14023 fold_convert_loc
14024 (input_location,
14025 org_type, boolean_true_node),
14026 fold_convert_loc
14027 (input_location,
14028 org_type, boolean_false_node));
14029 ret = GS_OK;
14030 break;
14031 }
14032
14033 case TRUTH_NOT_EXPR:
14034 {
14035 tree type = TREE_TYPE (*expr_p);
14036 /* The parsers are careful to generate TRUTH_NOT_EXPR
14037 only with operands that are always zero or one.
14038 We do not fold here but handle the only interesting case
14039 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
14040 *expr_p = gimple_boolify (*expr_p);
14041 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
14042 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
14043 TREE_TYPE (*expr_p),
14044 TREE_OPERAND (*expr_p, 0));
14045 else
14046 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
14047 TREE_TYPE (*expr_p),
14048 TREE_OPERAND (*expr_p, 0),
14049 build_int_cst (TREE_TYPE (*expr_p), 1));
14050 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
14051 *expr_p = fold_convert_loc (input_location, type, *expr_p);
14052 ret = GS_OK;
14053 break;
14054 }
14055
14056 case ADDR_EXPR:
14057 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
14058 break;
14059
14060 case ANNOTATE_EXPR:
14061 {
14062 tree cond = TREE_OPERAND (*expr_p, 0);
14063 tree kind = TREE_OPERAND (*expr_p, 1);
14064 tree data = TREE_OPERAND (*expr_p, 2);
14065 tree type = TREE_TYPE (cond);
14066 if (!INTEGRAL_TYPE_P (type))
14067 {
14068 *expr_p = cond;
14069 ret = GS_OK;
14070 break;
14071 }
14072 tree tmp = create_tmp_var (type);
14073 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
14074 gcall *call
14075 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
14076 gimple_call_set_lhs (call, tmp);
14077 gimplify_seq_add_stmt (pre_p, call);
14078 *expr_p = tmp;
14079 ret = GS_ALL_DONE;
14080 break;
14081 }
14082
14083 case VA_ARG_EXPR:
14084 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
14085 break;
14086
14087 CASE_CONVERT:
14088 if (IS_EMPTY_STMT (*expr_p))
14089 {
14090 ret = GS_ALL_DONE;
14091 break;
14092 }
14093
14094 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
14095 || fallback == fb_none)
14096 {
14097 /* Just strip a conversion to void (or in void context) and
14098 try again. */
14099 *expr_p = TREE_OPERAND (*expr_p, 0);
14100 ret = GS_OK;
14101 break;
14102 }
14103
14104 ret = gimplify_conversion (expr_p);
14105 if (ret == GS_ERROR)
14106 break;
14107 if (*expr_p != save_expr)
14108 break;
14109 /* FALLTHRU */
14110
14111 case FIX_TRUNC_EXPR:
14112 /* unary_expr: ... | '(' cast ')' val | ... */
14113 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14114 is_gimple_val, fb_rvalue);
14115 recalculate_side_effects (*expr_p);
14116 break;
14117
14118 case INDIRECT_REF:
14119 {
14120 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
14121 bool notrap = TREE_THIS_NOTRAP (*expr_p);
14122 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
14123
14124 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
14125 if (*expr_p != save_expr)
14126 {
14127 ret = GS_OK;
14128 break;
14129 }
14130
14131 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14132 is_gimple_reg, fb_rvalue);
14133 if (ret == GS_ERROR)
14134 break;
14135
14136 recalculate_side_effects (*expr_p);
14137 *expr_p = fold_build2_loc (input_location, MEM_REF,
14138 TREE_TYPE (*expr_p),
14139 TREE_OPERAND (*expr_p, 0),
14140 build_int_cst (saved_ptr_type, 0));
14141 TREE_THIS_VOLATILE (*expr_p) = volatilep;
14142 TREE_THIS_NOTRAP (*expr_p) = notrap;
14143 ret = GS_OK;
14144 break;
14145 }
14146
14147 /* We arrive here through the various re-gimplifcation paths. */
14148 case MEM_REF:
14149 /* First try re-folding the whole thing. */
14150 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
14151 TREE_OPERAND (*expr_p, 0),
14152 TREE_OPERAND (*expr_p, 1));
14153 if (tmp)
14154 {
14155 REF_REVERSE_STORAGE_ORDER (tmp)
14156 = REF_REVERSE_STORAGE_ORDER (*expr_p);
14157 *expr_p = tmp;
14158 recalculate_side_effects (*expr_p);
14159 ret = GS_OK;
14160 break;
14161 }
14162 /* Avoid re-gimplifying the address operand if it is already
14163 in suitable form. Re-gimplifying would mark the address
14164 operand addressable. Always gimplify when not in SSA form
14165 as we still may have to gimplify decls with value-exprs. */
14166 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
14167 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
14168 {
14169 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14170 is_gimple_mem_ref_addr, fb_rvalue);
14171 if (ret == GS_ERROR)
14172 break;
14173 }
14174 recalculate_side_effects (*expr_p);
14175 ret = GS_ALL_DONE;
14176 break;
14177
14178 /* Constants need not be gimplified. */
14179 case INTEGER_CST:
14180 case REAL_CST:
14181 case FIXED_CST:
14182 case STRING_CST:
14183 case COMPLEX_CST:
14184 case VECTOR_CST:
14185 /* Drop the overflow flag on constants, we do not want
14186 that in the GIMPLE IL. */
14187 if (TREE_OVERFLOW_P (*expr_p))
14188 *expr_p = drop_tree_overflow (*expr_p);
14189 ret = GS_ALL_DONE;
14190 break;
14191
14192 case CONST_DECL:
14193 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14194 CONST_DECL node. Otherwise the decl is replaceable by its
14195 value. */
14196 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14197 if (fallback & fb_lvalue)
14198 ret = GS_ALL_DONE;
14199 else
14200 {
14201 *expr_p = DECL_INITIAL (*expr_p);
14202 ret = GS_OK;
14203 }
14204 break;
14205
14206 case DECL_EXPR:
14207 ret = gimplify_decl_expr (expr_p, pre_p);
14208 break;
14209
14210 case BIND_EXPR:
14211 ret = gimplify_bind_expr (expr_p, pre_p);
14212 break;
14213
14214 case LOOP_EXPR:
14215 ret = gimplify_loop_expr (expr_p, pre_p);
14216 break;
14217
14218 case SWITCH_EXPR:
14219 ret = gimplify_switch_expr (expr_p, pre_p);
14220 break;
14221
14222 case EXIT_EXPR:
14223 ret = gimplify_exit_expr (expr_p);
14224 break;
14225
14226 case GOTO_EXPR:
14227 /* If the target is not LABEL, then it is a computed jump
14228 and the target needs to be gimplified. */
14229 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
14230 {
14231 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
14232 NULL, is_gimple_val, fb_rvalue);
14233 if (ret == GS_ERROR)
14234 break;
14235 }
14236 gimplify_seq_add_stmt (pre_p,
14237 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
14238 ret = GS_ALL_DONE;
14239 break;
14240
14241 case PREDICT_EXPR:
14242 gimplify_seq_add_stmt (pre_p,
14243 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
14244 PREDICT_EXPR_OUTCOME (*expr_p)));
14245 ret = GS_ALL_DONE;
14246 break;
14247
14248 case LABEL_EXPR:
14249 ret = gimplify_label_expr (expr_p, pre_p);
14250 label = LABEL_EXPR_LABEL (*expr_p);
14251 gcc_assert (decl_function_context (label) == current_function_decl);
14252
14253 /* If the label is used in a goto statement, or address of the label
14254 is taken, we need to unpoison all variables that were seen so far.
14255 Doing so would prevent us from reporting a false positives. */
14256 if (asan_poisoned_variables
14257 && asan_used_labels != NULL
14258 && asan_used_labels->contains (label))
14259 asan_poison_variables (asan_poisoned_variables, false, pre_p);
14260 break;
14261
14262 case CASE_LABEL_EXPR:
14263 ret = gimplify_case_label_expr (expr_p, pre_p);
14264
14265 if (gimplify_ctxp->live_switch_vars)
14266 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
14267 pre_p);
14268 break;
14269
14270 case RETURN_EXPR:
14271 ret = gimplify_return_expr (*expr_p, pre_p);
14272 break;
14273
14274 case CONSTRUCTOR:
14275 /* Don't reduce this in place; let gimplify_init_constructor work its
14276 magic. Buf if we're just elaborating this for side effects, just
14277 gimplify any element that has side-effects. */
14278 if (fallback == fb_none)
14279 {
14280 unsigned HOST_WIDE_INT ix;
14281 tree val;
14282 tree temp = NULL_TREE;
14283 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
14284 if (TREE_SIDE_EFFECTS (val))
14285 append_to_statement_list (val, &temp);
14286
14287 *expr_p = temp;
14288 ret = temp ? GS_OK : GS_ALL_DONE;
14289 }
14290 /* C99 code may assign to an array in a constructed
14291 structure or union, and this has undefined behavior only
14292 on execution, so create a temporary if an lvalue is
14293 required. */
14294 else if (fallback == fb_lvalue)
14295 {
14296 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14297 mark_addressable (*expr_p);
14298 ret = GS_OK;
14299 }
14300 else
14301 ret = GS_ALL_DONE;
14302 break;
14303
14304 /* The following are special cases that are not handled by the
14305 original GIMPLE grammar. */
14306
14307 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14308 eliminated. */
14309 case SAVE_EXPR:
14310 ret = gimplify_save_expr (expr_p, pre_p, post_p);
14311 break;
14312
14313 case BIT_FIELD_REF:
14314 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14315 post_p, is_gimple_lvalue, fb_either);
14316 recalculate_side_effects (*expr_p);
14317 break;
14318
14319 case TARGET_MEM_REF:
14320 {
14321 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
14322
14323 if (TMR_BASE (*expr_p))
14324 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
14325 post_p, is_gimple_mem_ref_addr, fb_either);
14326 if (TMR_INDEX (*expr_p))
14327 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
14328 post_p, is_gimple_val, fb_rvalue);
14329 if (TMR_INDEX2 (*expr_p))
14330 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
14331 post_p, is_gimple_val, fb_rvalue);
14332 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14333 ret = MIN (r0, r1);
14334 }
14335 break;
14336
14337 case NON_LVALUE_EXPR:
14338 /* This should have been stripped above. */
14339 gcc_unreachable ();
14340
14341 case ASM_EXPR:
14342 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
14343 break;
14344
14345 case TRY_FINALLY_EXPR:
14346 case TRY_CATCH_EXPR:
14347 {
14348 gimple_seq eval, cleanup;
14349 gtry *try_;
14350
14351 /* Calls to destructors are generated automatically in FINALLY/CATCH
14352 block. They should have location as UNKNOWN_LOCATION. However,
14353 gimplify_call_expr will reset these call stmts to input_location
14354 if it finds stmt's location is unknown. To prevent resetting for
14355 destructors, we set the input_location to unknown.
14356 Note that this only affects the destructor calls in FINALLY/CATCH
14357 block, and will automatically reset to its original value by the
14358 end of gimplify_expr. */
14359 input_location = UNKNOWN_LOCATION;
14360 eval = cleanup = NULL;
14361 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
14362 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14363 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
14364 {
14365 gimple_seq n = NULL, e = NULL;
14366 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14367 0), &n);
14368 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14369 1), &e);
14370 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
14371 {
14372 geh_else *stmt = gimple_build_eh_else (n, e);
14373 gimple_seq_add_stmt (&cleanup, stmt);
14374 }
14375 }
14376 else
14377 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
14378 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14379 if (gimple_seq_empty_p (cleanup))
14380 {
14381 gimple_seq_add_seq (pre_p, eval);
14382 ret = GS_ALL_DONE;
14383 break;
14384 }
14385 try_ = gimple_build_try (eval, cleanup,
14386 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14387 ? GIMPLE_TRY_FINALLY
14388 : GIMPLE_TRY_CATCH);
14389 if (EXPR_HAS_LOCATION (save_expr))
14390 gimple_set_location (try_, EXPR_LOCATION (save_expr));
14391 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
14392 gimple_set_location (try_, saved_location);
14393 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
14394 gimple_try_set_catch_is_cleanup (try_,
14395 TRY_CATCH_IS_CLEANUP (*expr_p));
14396 gimplify_seq_add_stmt (pre_p, try_);
14397 ret = GS_ALL_DONE;
14398 break;
14399 }
14400
14401 case CLEANUP_POINT_EXPR:
14402 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
14403 break;
14404
14405 case TARGET_EXPR:
14406 ret = gimplify_target_expr (expr_p, pre_p, post_p);
14407 break;
14408
14409 case CATCH_EXPR:
14410 {
14411 gimple *c;
14412 gimple_seq handler = NULL;
14413 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
14414 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
14415 gimplify_seq_add_stmt (pre_p, c);
14416 ret = GS_ALL_DONE;
14417 break;
14418 }
14419
14420 case EH_FILTER_EXPR:
14421 {
14422 gimple *ehf;
14423 gimple_seq failure = NULL;
14424
14425 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14426 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
14427 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
14428 gimplify_seq_add_stmt (pre_p, ehf);
14429 ret = GS_ALL_DONE;
14430 break;
14431 }
14432
14433 case OBJ_TYPE_REF:
14434 {
14435 enum gimplify_status r0, r1;
14436 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14437 post_p, is_gimple_val, fb_rvalue);
14438 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14439 post_p, is_gimple_val, fb_rvalue);
14440 TREE_SIDE_EFFECTS (*expr_p) = 0;
14441 ret = MIN (r0, r1);
14442 }
14443 break;
14444
14445 case LABEL_DECL:
14446 /* We get here when taking the address of a label. We mark
14447 the label as "forced"; meaning it can never be removed and
14448 it is a potential target for any computed goto. */
14449 FORCED_LABEL (*expr_p) = 1;
14450 ret = GS_ALL_DONE;
14451 break;
14452
14453 case STATEMENT_LIST:
14454 ret = gimplify_statement_list (expr_p, pre_p);
14455 break;
14456
14457 case WITH_SIZE_EXPR:
14458 {
14459 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14460 post_p == &internal_post ? NULL : post_p,
14461 gimple_test_f, fallback);
14462 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14463 is_gimple_val, fb_rvalue);
14464 ret = GS_ALL_DONE;
14465 }
14466 break;
14467
14468 case VAR_DECL:
14469 case PARM_DECL:
14470 ret = gimplify_var_or_parm_decl (expr_p);
14471 break;
14472
14473 case RESULT_DECL:
14474 /* When within an OMP context, notice uses of variables. */
14475 if (gimplify_omp_ctxp)
14476 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14477 ret = GS_ALL_DONE;
14478 break;
14479
14480 case DEBUG_EXPR_DECL:
14481 gcc_unreachable ();
14482
14483 case DEBUG_BEGIN_STMT:
14484 gimplify_seq_add_stmt (pre_p,
14485 gimple_build_debug_begin_stmt
14486 (TREE_BLOCK (*expr_p),
14487 EXPR_LOCATION (*expr_p)));
14488 ret = GS_ALL_DONE;
14489 *expr_p = NULL;
14490 break;
14491
14492 case SSA_NAME:
14493 /* Allow callbacks into the gimplifier during optimization. */
14494 ret = GS_ALL_DONE;
14495 break;
14496
14497 case OMP_PARALLEL:
14498 gimplify_omp_parallel (expr_p, pre_p);
14499 ret = GS_ALL_DONE;
14500 break;
14501
14502 case OMP_TASK:
14503 gimplify_omp_task (expr_p, pre_p);
14504 ret = GS_ALL_DONE;
14505 break;
14506
14507 case OMP_FOR:
14508 case OMP_SIMD:
14509 case OMP_DISTRIBUTE:
14510 case OMP_TASKLOOP:
14511 case OACC_LOOP:
14512 ret = gimplify_omp_for (expr_p, pre_p);
14513 break;
14514
14515 case OMP_LOOP:
14516 ret = gimplify_omp_loop (expr_p, pre_p);
14517 break;
14518
14519 case OACC_CACHE:
14520 gimplify_oacc_cache (expr_p, pre_p);
14521 ret = GS_ALL_DONE;
14522 break;
14523
14524 case OACC_DECLARE:
14525 gimplify_oacc_declare (expr_p, pre_p);
14526 ret = GS_ALL_DONE;
14527 break;
14528
14529 case OACC_HOST_DATA:
14530 case OACC_DATA:
14531 case OACC_KERNELS:
14532 case OACC_PARALLEL:
14533 case OACC_SERIAL:
14534 case OMP_SECTIONS:
14535 case OMP_SINGLE:
14536 case OMP_TARGET:
14537 case OMP_TARGET_DATA:
14538 case OMP_TEAMS:
14539 gimplify_omp_workshare (expr_p, pre_p);
14540 ret = GS_ALL_DONE;
14541 break;
14542
14543 case OACC_ENTER_DATA:
14544 case OACC_EXIT_DATA:
14545 case OACC_UPDATE:
14546 case OMP_TARGET_UPDATE:
14547 case OMP_TARGET_ENTER_DATA:
14548 case OMP_TARGET_EXIT_DATA:
14549 gimplify_omp_target_update (expr_p, pre_p);
14550 ret = GS_ALL_DONE;
14551 break;
14552
14553 case OMP_SECTION:
14554 case OMP_MASTER:
14555 case OMP_ORDERED:
14556 case OMP_CRITICAL:
14557 case OMP_SCAN:
14558 {
14559 gimple_seq body = NULL;
14560 gimple *g;
14561 bool saved_in_omp_construct = in_omp_construct;
14562
14563 in_omp_construct = true;
14564 gimplify_and_add (OMP_BODY (*expr_p), &body);
14565 in_omp_construct = saved_in_omp_construct;
14566 switch (TREE_CODE (*expr_p))
14567 {
14568 case OMP_SECTION:
14569 g = gimple_build_omp_section (body);
14570 break;
14571 case OMP_MASTER:
14572 g = gimple_build_omp_master (body);
14573 break;
14574 case OMP_ORDERED:
14575 g = gimplify_omp_ordered (*expr_p, body);
14576 break;
14577 case OMP_CRITICAL:
14578 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14579 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14580 gimplify_adjust_omp_clauses (pre_p, body,
14581 &OMP_CRITICAL_CLAUSES (*expr_p),
14582 OMP_CRITICAL);
14583 g = gimple_build_omp_critical (body,
14584 OMP_CRITICAL_NAME (*expr_p),
14585 OMP_CRITICAL_CLAUSES (*expr_p));
14586 break;
14587 case OMP_SCAN:
14588 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14589 pre_p, ORT_WORKSHARE, OMP_SCAN);
14590 gimplify_adjust_omp_clauses (pre_p, body,
14591 &OMP_SCAN_CLAUSES (*expr_p),
14592 OMP_SCAN);
14593 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14594 break;
14595 default:
14596 gcc_unreachable ();
14597 }
14598 gimplify_seq_add_stmt (pre_p, g);
14599 ret = GS_ALL_DONE;
14600 break;
14601 }
14602
14603 case OMP_TASKGROUP:
14604 {
14605 gimple_seq body = NULL;
14606
14607 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14608 bool saved_in_omp_construct = in_omp_construct;
14609 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14610 OMP_TASKGROUP);
14611 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14612
14613 in_omp_construct = true;
14614 gimplify_and_add (OMP_BODY (*expr_p), &body);
14615 in_omp_construct = saved_in_omp_construct;
14616 gimple_seq cleanup = NULL;
14617 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14618 gimple *g = gimple_build_call (fn, 0);
14619 gimple_seq_add_stmt (&cleanup, g);
14620 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14621 body = NULL;
14622 gimple_seq_add_stmt (&body, g);
14623 g = gimple_build_omp_taskgroup (body, *pclauses);
14624 gimplify_seq_add_stmt (pre_p, g);
14625 ret = GS_ALL_DONE;
14626 break;
14627 }
14628
14629 case OMP_ATOMIC:
14630 case OMP_ATOMIC_READ:
14631 case OMP_ATOMIC_CAPTURE_OLD:
14632 case OMP_ATOMIC_CAPTURE_NEW:
14633 ret = gimplify_omp_atomic (expr_p, pre_p);
14634 break;
14635
14636 case TRANSACTION_EXPR:
14637 ret = gimplify_transaction (expr_p, pre_p);
14638 break;
14639
14640 case TRUTH_AND_EXPR:
14641 case TRUTH_OR_EXPR:
14642 case TRUTH_XOR_EXPR:
14643 {
14644 tree orig_type = TREE_TYPE (*expr_p);
14645 tree new_type, xop0, xop1;
14646 *expr_p = gimple_boolify (*expr_p);
14647 new_type = TREE_TYPE (*expr_p);
14648 if (!useless_type_conversion_p (orig_type, new_type))
14649 {
14650 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14651 ret = GS_OK;
14652 break;
14653 }
14654
14655 /* Boolified binary truth expressions are semantically equivalent
14656 to bitwise binary expressions. Canonicalize them to the
14657 bitwise variant. */
14658 switch (TREE_CODE (*expr_p))
14659 {
14660 case TRUTH_AND_EXPR:
14661 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14662 break;
14663 case TRUTH_OR_EXPR:
14664 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14665 break;
14666 case TRUTH_XOR_EXPR:
14667 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14668 break;
14669 default:
14670 break;
14671 }
14672 /* Now make sure that operands have compatible type to
14673 expression's new_type. */
14674 xop0 = TREE_OPERAND (*expr_p, 0);
14675 xop1 = TREE_OPERAND (*expr_p, 1);
14676 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14677 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14678 new_type,
14679 xop0);
14680 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14681 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14682 new_type,
14683 xop1);
14684 /* Continue classified as tcc_binary. */
14685 goto expr_2;
14686 }
14687
14688 case VEC_COND_EXPR:
14689 goto expr_3;
14690
14691 case VEC_PERM_EXPR:
14692 /* Classified as tcc_expression. */
14693 goto expr_3;
14694
14695 case BIT_INSERT_EXPR:
14696 /* Argument 3 is a constant. */
14697 goto expr_2;
14698
14699 case POINTER_PLUS_EXPR:
14700 {
14701 enum gimplify_status r0, r1;
14702 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14703 post_p, is_gimple_val, fb_rvalue);
14704 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14705 post_p, is_gimple_val, fb_rvalue);
14706 recalculate_side_effects (*expr_p);
14707 ret = MIN (r0, r1);
14708 break;
14709 }
14710
14711 default:
14712 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14713 {
14714 case tcc_comparison:
14715 /* Handle comparison of objects of non scalar mode aggregates
14716 with a call to memcmp. It would be nice to only have to do
14717 this for variable-sized objects, but then we'd have to allow
14718 the same nest of reference nodes we allow for MODIFY_EXPR and
14719 that's too complex.
14720
14721 Compare scalar mode aggregates as scalar mode values. Using
14722 memcmp for them would be very inefficient at best, and is
14723 plain wrong if bitfields are involved. */
14724 {
14725 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14726
14727 /* Vector comparisons need no boolification. */
14728 if (TREE_CODE (type) == VECTOR_TYPE)
14729 goto expr_2;
14730 else if (!AGGREGATE_TYPE_P (type))
14731 {
14732 tree org_type = TREE_TYPE (*expr_p);
14733 *expr_p = gimple_boolify (*expr_p);
14734 if (!useless_type_conversion_p (org_type,
14735 TREE_TYPE (*expr_p)))
14736 {
14737 *expr_p = fold_convert_loc (input_location,
14738 org_type, *expr_p);
14739 ret = GS_OK;
14740 }
14741 else
14742 goto expr_2;
14743 }
14744 else if (TYPE_MODE (type) != BLKmode)
14745 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14746 else
14747 ret = gimplify_variable_sized_compare (expr_p);
14748
14749 break;
14750 }
14751
14752 /* If *EXPR_P does not need to be special-cased, handle it
14753 according to its class. */
14754 case tcc_unary:
14755 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14756 post_p, is_gimple_val, fb_rvalue);
14757 break;
14758
14759 case tcc_binary:
14760 expr_2:
14761 {
14762 enum gimplify_status r0, r1;
14763
14764 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14765 post_p, is_gimple_val, fb_rvalue);
14766 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14767 post_p, is_gimple_val, fb_rvalue);
14768
14769 ret = MIN (r0, r1);
14770 break;
14771 }
14772
14773 expr_3:
14774 {
14775 enum gimplify_status r0, r1, r2;
14776
14777 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14778 post_p, is_gimple_val, fb_rvalue);
14779 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14780 post_p, is_gimple_val, fb_rvalue);
14781 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14782 post_p, is_gimple_val, fb_rvalue);
14783
14784 ret = MIN (MIN (r0, r1), r2);
14785 break;
14786 }
14787
14788 case tcc_declaration:
14789 case tcc_constant:
14790 ret = GS_ALL_DONE;
14791 goto dont_recalculate;
14792
14793 default:
14794 gcc_unreachable ();
14795 }
14796
14797 recalculate_side_effects (*expr_p);
14798
14799 dont_recalculate:
14800 break;
14801 }
14802
14803 gcc_assert (*expr_p || ret != GS_OK);
14804 }
14805 while (ret == GS_OK);
14806
14807 /* If we encountered an error_mark somewhere nested inside, either
14808 stub out the statement or propagate the error back out. */
14809 if (ret == GS_ERROR)
14810 {
14811 if (is_statement)
14812 *expr_p = NULL;
14813 goto out;
14814 }
14815
14816 /* This was only valid as a return value from the langhook, which
14817 we handled. Make sure it doesn't escape from any other context. */
14818 gcc_assert (ret != GS_UNHANDLED);
14819
14820 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14821 {
14822 /* We aren't looking for a value, and we don't have a valid
14823 statement. If it doesn't have side-effects, throw it away.
14824 We can also get here with code such as "*&&L;", where L is
14825 a LABEL_DECL that is marked as FORCED_LABEL. */
14826 if (TREE_CODE (*expr_p) == LABEL_DECL
14827 || !TREE_SIDE_EFFECTS (*expr_p))
14828 *expr_p = NULL;
14829 else if (!TREE_THIS_VOLATILE (*expr_p))
14830 {
14831 /* This is probably a _REF that contains something nested that
14832 has side effects. Recurse through the operands to find it. */
14833 enum tree_code code = TREE_CODE (*expr_p);
14834
14835 switch (code)
14836 {
14837 case COMPONENT_REF:
14838 case REALPART_EXPR:
14839 case IMAGPART_EXPR:
14840 case VIEW_CONVERT_EXPR:
14841 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14842 gimple_test_f, fallback);
14843 break;
14844
14845 case ARRAY_REF:
14846 case ARRAY_RANGE_REF:
14847 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14848 gimple_test_f, fallback);
14849 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14850 gimple_test_f, fallback);
14851 break;
14852
14853 default:
14854 /* Anything else with side-effects must be converted to
14855 a valid statement before we get here. */
14856 gcc_unreachable ();
14857 }
14858
14859 *expr_p = NULL;
14860 }
14861 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14862 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14863 {
14864 /* Historically, the compiler has treated a bare reference
14865 to a non-BLKmode volatile lvalue as forcing a load. */
14866 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14867
14868 /* Normally, we do not want to create a temporary for a
14869 TREE_ADDRESSABLE type because such a type should not be
14870 copied by bitwise-assignment. However, we make an
14871 exception here, as all we are doing here is ensuring that
14872 we read the bytes that make up the type. We use
14873 create_tmp_var_raw because create_tmp_var will abort when
14874 given a TREE_ADDRESSABLE type. */
14875 tree tmp = create_tmp_var_raw (type, "vol");
14876 gimple_add_tmp_var (tmp);
14877 gimplify_assign (tmp, *expr_p, pre_p);
14878 *expr_p = NULL;
14879 }
14880 else
14881 /* We can't do anything useful with a volatile reference to
14882 an incomplete type, so just throw it away. Likewise for
14883 a BLKmode type, since any implicit inner load should
14884 already have been turned into an explicit one by the
14885 gimplification process. */
14886 *expr_p = NULL;
14887 }
14888
14889 /* If we are gimplifying at the statement level, we're done. Tack
14890 everything together and return. */
14891 if (fallback == fb_none || is_statement)
14892 {
14893 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14894 it out for GC to reclaim it. */
14895 *expr_p = NULL_TREE;
14896
14897 if (!gimple_seq_empty_p (internal_pre)
14898 || !gimple_seq_empty_p (internal_post))
14899 {
14900 gimplify_seq_add_seq (&internal_pre, internal_post);
14901 gimplify_seq_add_seq (pre_p, internal_pre);
14902 }
14903
14904 /* The result of gimplifying *EXPR_P is going to be the last few
14905 statements in *PRE_P and *POST_P. Add location information
14906 to all the statements that were added by the gimplification
14907 helpers. */
14908 if (!gimple_seq_empty_p (*pre_p))
14909 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14910
14911 if (!gimple_seq_empty_p (*post_p))
14912 annotate_all_with_location_after (*post_p, post_last_gsi,
14913 input_location);
14914
14915 goto out;
14916 }
14917
14918 #ifdef ENABLE_GIMPLE_CHECKING
14919 if (*expr_p)
14920 {
14921 enum tree_code code = TREE_CODE (*expr_p);
14922 /* These expressions should already be in gimple IR form. */
14923 gcc_assert (code != MODIFY_EXPR
14924 && code != ASM_EXPR
14925 && code != BIND_EXPR
14926 && code != CATCH_EXPR
14927 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14928 && code != EH_FILTER_EXPR
14929 && code != GOTO_EXPR
14930 && code != LABEL_EXPR
14931 && code != LOOP_EXPR
14932 && code != SWITCH_EXPR
14933 && code != TRY_FINALLY_EXPR
14934 && code != EH_ELSE_EXPR
14935 && code != OACC_PARALLEL
14936 && code != OACC_KERNELS
14937 && code != OACC_SERIAL
14938 && code != OACC_DATA
14939 && code != OACC_HOST_DATA
14940 && code != OACC_DECLARE
14941 && code != OACC_UPDATE
14942 && code != OACC_ENTER_DATA
14943 && code != OACC_EXIT_DATA
14944 && code != OACC_CACHE
14945 && code != OMP_CRITICAL
14946 && code != OMP_FOR
14947 && code != OACC_LOOP
14948 && code != OMP_MASTER
14949 && code != OMP_TASKGROUP
14950 && code != OMP_ORDERED
14951 && code != OMP_PARALLEL
14952 && code != OMP_SCAN
14953 && code != OMP_SECTIONS
14954 && code != OMP_SECTION
14955 && code != OMP_SINGLE);
14956 }
14957 #endif
14958
14959 /* Otherwise we're gimplifying a subexpression, so the resulting
14960 value is interesting. If it's a valid operand that matches
14961 GIMPLE_TEST_F, we're done. Unless we are handling some
14962 post-effects internally; if that's the case, we need to copy into
14963 a temporary before adding the post-effects to POST_P. */
14964 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14965 goto out;
14966
14967 /* Otherwise, we need to create a new temporary for the gimplified
14968 expression. */
14969
14970 /* We can't return an lvalue if we have an internal postqueue. The
14971 object the lvalue refers to would (probably) be modified by the
14972 postqueue; we need to copy the value out first, which means an
14973 rvalue. */
14974 if ((fallback & fb_lvalue)
14975 && gimple_seq_empty_p (internal_post)
14976 && is_gimple_addressable (*expr_p))
14977 {
14978 /* An lvalue will do. Take the address of the expression, store it
14979 in a temporary, and replace the expression with an INDIRECT_REF of
14980 that temporary. */
14981 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14982 unsigned int ref_align = get_object_alignment (*expr_p);
14983 tree ref_type = TREE_TYPE (*expr_p);
14984 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14985 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14986 if (TYPE_ALIGN (ref_type) != ref_align)
14987 ref_type = build_aligned_type (ref_type, ref_align);
14988 *expr_p = build2 (MEM_REF, ref_type,
14989 tmp, build_zero_cst (ref_alias_type));
14990 }
14991 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14992 {
14993 /* An rvalue will do. Assign the gimplified expression into a
14994 new temporary TMP and replace the original expression with
14995 TMP. First, make sure that the expression has a type so that
14996 it can be assigned into a temporary. */
14997 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14998 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14999 }
15000 else
15001 {
15002 #ifdef ENABLE_GIMPLE_CHECKING
15003 if (!(fallback & fb_mayfail))
15004 {
15005 fprintf (stderr, "gimplification failed:\n");
15006 print_generic_expr (stderr, *expr_p);
15007 debug_tree (*expr_p);
15008 internal_error ("gimplification failed");
15009 }
15010 #endif
15011 gcc_assert (fallback & fb_mayfail);
15012
15013 /* If this is an asm statement, and the user asked for the
15014 impossible, don't die. Fail and let gimplify_asm_expr
15015 issue an error. */
15016 ret = GS_ERROR;
15017 goto out;
15018 }
15019
15020 /* Make sure the temporary matches our predicate. */
15021 gcc_assert ((*gimple_test_f) (*expr_p));
15022
15023 if (!gimple_seq_empty_p (internal_post))
15024 {
15025 annotate_all_with_location (internal_post, input_location);
15026 gimplify_seq_add_seq (pre_p, internal_post);
15027 }
15028
15029 out:
15030 input_location = saved_location;
15031 return ret;
15032 }
15033
15034 /* Like gimplify_expr but make sure the gimplified result is not itself
15035 a SSA name (but a decl if it were). Temporaries required by
15036 evaluating *EXPR_P may be still SSA names. */
15037
15038 static enum gimplify_status
15039 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
15040 bool (*gimple_test_f) (tree), fallback_t fallback,
15041 bool allow_ssa)
15042 {
15043 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
15044 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
15045 gimple_test_f, fallback);
15046 if (! allow_ssa
15047 && TREE_CODE (*expr_p) == SSA_NAME)
15048 {
15049 tree name = *expr_p;
15050 if (was_ssa_name_p)
15051 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
15052 else
15053 {
15054 /* Avoid the extra copy if possible. */
15055 *expr_p = create_tmp_reg (TREE_TYPE (name));
15056 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
15057 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
15058 release_ssa_name (name);
15059 }
15060 }
15061 return ret;
15062 }
15063
15064 /* Look through TYPE for variable-sized objects and gimplify each such
15065 size that we find. Add to LIST_P any statements generated. */
15066
15067 void
15068 gimplify_type_sizes (tree type, gimple_seq *list_p)
15069 {
15070 tree field, t;
15071
15072 if (type == NULL || type == error_mark_node)
15073 return;
15074
15075 /* We first do the main variant, then copy into any other variants. */
15076 type = TYPE_MAIN_VARIANT (type);
15077
15078 /* Avoid infinite recursion. */
15079 if (TYPE_SIZES_GIMPLIFIED (type))
15080 return;
15081
15082 TYPE_SIZES_GIMPLIFIED (type) = 1;
15083
15084 switch (TREE_CODE (type))
15085 {
15086 case INTEGER_TYPE:
15087 case ENUMERAL_TYPE:
15088 case BOOLEAN_TYPE:
15089 case REAL_TYPE:
15090 case FIXED_POINT_TYPE:
15091 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
15092 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
15093
15094 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15095 {
15096 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
15097 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
15098 }
15099 break;
15100
15101 case ARRAY_TYPE:
15102 /* These types may not have declarations, so handle them here. */
15103 gimplify_type_sizes (TREE_TYPE (type), list_p);
15104 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
15105 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15106 with assigned stack slots, for -O1+ -g they should be tracked
15107 by VTA. */
15108 if (!(TYPE_NAME (type)
15109 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
15110 && DECL_IGNORED_P (TYPE_NAME (type)))
15111 && TYPE_DOMAIN (type)
15112 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
15113 {
15114 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
15115 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15116 DECL_IGNORED_P (t) = 0;
15117 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
15118 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15119 DECL_IGNORED_P (t) = 0;
15120 }
15121 break;
15122
15123 case RECORD_TYPE:
15124 case UNION_TYPE:
15125 case QUAL_UNION_TYPE:
15126 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15127 if (TREE_CODE (field) == FIELD_DECL)
15128 {
15129 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
15130 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
15131 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
15132 gimplify_type_sizes (TREE_TYPE (field), list_p);
15133 }
15134 break;
15135
15136 case POINTER_TYPE:
15137 case REFERENCE_TYPE:
15138 /* We used to recurse on the pointed-to type here, which turned out to
15139 be incorrect because its definition might refer to variables not
15140 yet initialized at this point if a forward declaration is involved.
15141
15142 It was actually useful for anonymous pointed-to types to ensure
15143 that the sizes evaluation dominates every possible later use of the
15144 values. Restricting to such types here would be safe since there
15145 is no possible forward declaration around, but would introduce an
15146 undesirable middle-end semantic to anonymity. We then defer to
15147 front-ends the responsibility of ensuring that the sizes are
15148 evaluated both early and late enough, e.g. by attaching artificial
15149 type declarations to the tree. */
15150 break;
15151
15152 default:
15153 break;
15154 }
15155
15156 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
15157 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
15158
15159 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15160 {
15161 TYPE_SIZE (t) = TYPE_SIZE (type);
15162 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
15163 TYPE_SIZES_GIMPLIFIED (t) = 1;
15164 }
15165 }
15166
15167 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15168 a size or position, has had all of its SAVE_EXPRs evaluated.
15169 We add any required statements to *STMT_P. */
15170
15171 void
15172 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
15173 {
15174 tree expr = *expr_p;
15175
15176 /* We don't do anything if the value isn't there, is constant, or contains
15177 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15178 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15179 will want to replace it with a new variable, but that will cause problems
15180 if this type is from outside the function. It's OK to have that here. */
15181 if (expr == NULL_TREE
15182 || is_gimple_constant (expr)
15183 || TREE_CODE (expr) == VAR_DECL
15184 || CONTAINS_PLACEHOLDER_P (expr))
15185 return;
15186
15187 *expr_p = unshare_expr (expr);
15188
15189 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15190 if the def vanishes. */
15191 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
15192
15193 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15194 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15195 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15196 if (is_gimple_constant (*expr_p))
15197 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
15198 }
15199
15200 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15201 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15202 is true, also gimplify the parameters. */
15203
15204 gbind *
15205 gimplify_body (tree fndecl, bool do_parms)
15206 {
15207 location_t saved_location = input_location;
15208 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
15209 gimple *outer_stmt;
15210 gbind *outer_bind;
15211
15212 timevar_push (TV_TREE_GIMPLIFY);
15213
15214 init_tree_ssa (cfun);
15215
15216 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15217 gimplification. */
15218 default_rtl_profile ();
15219
15220 gcc_assert (gimplify_ctxp == NULL);
15221 push_gimplify_context (true);
15222
15223 if (flag_openacc || flag_openmp)
15224 {
15225 gcc_assert (gimplify_omp_ctxp == NULL);
15226 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
15227 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
15228 }
15229
15230 /* Unshare most shared trees in the body and in that of any nested functions.
15231 It would seem we don't have to do this for nested functions because
15232 they are supposed to be output and then the outer function gimplified
15233 first, but the g++ front end doesn't always do it that way. */
15234 unshare_body (fndecl);
15235 unvisit_body (fndecl);
15236
15237 /* Make sure input_location isn't set to something weird. */
15238 input_location = DECL_SOURCE_LOCATION (fndecl);
15239
15240 /* Resolve callee-copies. This has to be done before processing
15241 the body so that DECL_VALUE_EXPR gets processed correctly. */
15242 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
15243
15244 /* Gimplify the function's body. */
15245 seq = NULL;
15246 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
15247 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
15248 if (!outer_stmt)
15249 {
15250 outer_stmt = gimple_build_nop ();
15251 gimplify_seq_add_stmt (&seq, outer_stmt);
15252 }
15253
15254 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15255 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15256 if (gimple_code (outer_stmt) == GIMPLE_BIND
15257 && (gimple_seq_first_nondebug_stmt (seq)
15258 == gimple_seq_last_nondebug_stmt (seq)))
15259 {
15260 outer_bind = as_a <gbind *> (outer_stmt);
15261 if (gimple_seq_first_stmt (seq) != outer_stmt
15262 || gimple_seq_last_stmt (seq) != outer_stmt)
15263 {
15264 /* If there are debug stmts before or after outer_stmt, move them
15265 inside of outer_bind body. */
15266 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
15267 gimple_seq second_seq = NULL;
15268 if (gimple_seq_first_stmt (seq) != outer_stmt
15269 && gimple_seq_last_stmt (seq) != outer_stmt)
15270 {
15271 second_seq = gsi_split_seq_after (gsi);
15272 gsi_remove (&gsi, false);
15273 }
15274 else if (gimple_seq_first_stmt (seq) != outer_stmt)
15275 gsi_remove (&gsi, false);
15276 else
15277 {
15278 gsi_remove (&gsi, false);
15279 second_seq = seq;
15280 seq = NULL;
15281 }
15282 gimple_seq_add_seq_without_update (&seq,
15283 gimple_bind_body (outer_bind));
15284 gimple_seq_add_seq_without_update (&seq, second_seq);
15285 gimple_bind_set_body (outer_bind, seq);
15286 }
15287 }
15288 else
15289 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
15290
15291 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15292
15293 /* If we had callee-copies statements, insert them at the beginning
15294 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15295 if (!gimple_seq_empty_p (parm_stmts))
15296 {
15297 tree parm;
15298
15299 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
15300 if (parm_cleanup)
15301 {
15302 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
15303 GIMPLE_TRY_FINALLY);
15304 parm_stmts = NULL;
15305 gimple_seq_add_stmt (&parm_stmts, g);
15306 }
15307 gimple_bind_set_body (outer_bind, parm_stmts);
15308
15309 for (parm = DECL_ARGUMENTS (current_function_decl);
15310 parm; parm = DECL_CHAIN (parm))
15311 if (DECL_HAS_VALUE_EXPR_P (parm))
15312 {
15313 DECL_HAS_VALUE_EXPR_P (parm) = 0;
15314 DECL_IGNORED_P (parm) = 0;
15315 }
15316 }
15317
15318 if ((flag_openacc || flag_openmp || flag_openmp_simd)
15319 && gimplify_omp_ctxp)
15320 {
15321 delete_omp_context (gimplify_omp_ctxp);
15322 gimplify_omp_ctxp = NULL;
15323 }
15324
15325 pop_gimplify_context (outer_bind);
15326 gcc_assert (gimplify_ctxp == NULL);
15327
15328 if (flag_checking && !seen_error ())
15329 verify_gimple_in_seq (gimple_bind_body (outer_bind));
15330
15331 timevar_pop (TV_TREE_GIMPLIFY);
15332 input_location = saved_location;
15333
15334 return outer_bind;
15335 }
15336
15337 typedef char *char_p; /* For DEF_VEC_P. */
15338
15339 /* Return whether we should exclude FNDECL from instrumentation. */
15340
15341 static bool
15342 flag_instrument_functions_exclude_p (tree fndecl)
15343 {
15344 vec<char_p> *v;
15345
15346 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
15347 if (v && v->length () > 0)
15348 {
15349 const char *name;
15350 int i;
15351 char *s;
15352
15353 name = lang_hooks.decl_printable_name (fndecl, 1);
15354 FOR_EACH_VEC_ELT (*v, i, s)
15355 if (strstr (name, s) != NULL)
15356 return true;
15357 }
15358
15359 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
15360 if (v && v->length () > 0)
15361 {
15362 const char *name;
15363 int i;
15364 char *s;
15365
15366 name = DECL_SOURCE_FILE (fndecl);
15367 FOR_EACH_VEC_ELT (*v, i, s)
15368 if (strstr (name, s) != NULL)
15369 return true;
15370 }
15371
15372 return false;
15373 }
15374
15375 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15376 node for the function we want to gimplify.
15377
15378 Return the sequence of GIMPLE statements corresponding to the body
15379 of FNDECL. */
15380
15381 void
15382 gimplify_function_tree (tree fndecl)
15383 {
15384 gimple_seq seq;
15385 gbind *bind;
15386
15387 gcc_assert (!gimple_body (fndecl));
15388
15389 if (DECL_STRUCT_FUNCTION (fndecl))
15390 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
15391 else
15392 push_struct_function (fndecl);
15393
15394 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15395 if necessary. */
15396 cfun->curr_properties |= PROP_gimple_lva;
15397
15398 if (asan_sanitize_use_after_scope ())
15399 asan_poisoned_variables = new hash_set<tree> ();
15400 bind = gimplify_body (fndecl, true);
15401 if (asan_poisoned_variables)
15402 {
15403 delete asan_poisoned_variables;
15404 asan_poisoned_variables = NULL;
15405 }
15406
15407 /* The tree body of the function is no longer needed, replace it
15408 with the new GIMPLE body. */
15409 seq = NULL;
15410 gimple_seq_add_stmt (&seq, bind);
15411 gimple_set_body (fndecl, seq);
15412
15413 /* If we're instrumenting function entry/exit, then prepend the call to
15414 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15415 catch the exit hook. */
15416 /* ??? Add some way to ignore exceptions for this TFE. */
15417 if (flag_instrument_function_entry_exit
15418 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
15419 /* Do not instrument extern inline functions. */
15420 && !(DECL_DECLARED_INLINE_P (fndecl)
15421 && DECL_EXTERNAL (fndecl)
15422 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
15423 && !flag_instrument_functions_exclude_p (fndecl))
15424 {
15425 tree x;
15426 gbind *new_bind;
15427 gimple *tf;
15428 gimple_seq cleanup = NULL, body = NULL;
15429 tree tmp_var, this_fn_addr;
15430 gcall *call;
15431
15432 /* The instrumentation hooks aren't going to call the instrumented
15433 function and the address they receive is expected to be matchable
15434 against symbol addresses. Make sure we don't create a trampoline,
15435 in case the current function is nested. */
15436 this_fn_addr = build_fold_addr_expr (current_function_decl);
15437 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15438
15439 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15440 call = gimple_build_call (x, 1, integer_zero_node);
15441 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15442 gimple_call_set_lhs (call, tmp_var);
15443 gimplify_seq_add_stmt (&cleanup, call);
15444 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15445 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15446 gimplify_seq_add_stmt (&cleanup, call);
15447 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15448
15449 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15450 call = gimple_build_call (x, 1, integer_zero_node);
15451 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15452 gimple_call_set_lhs (call, tmp_var);
15453 gimplify_seq_add_stmt (&body, call);
15454 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15455 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15456 gimplify_seq_add_stmt (&body, call);
15457 gimplify_seq_add_stmt (&body, tf);
15458 new_bind = gimple_build_bind (NULL, body, NULL);
15459
15460 /* Replace the current function body with the body
15461 wrapped in the try/finally TF. */
15462 seq = NULL;
15463 gimple_seq_add_stmt (&seq, new_bind);
15464 gimple_set_body (fndecl, seq);
15465 bind = new_bind;
15466 }
15467
15468 if (sanitize_flags_p (SANITIZE_THREAD)
15469 && param_tsan_instrument_func_entry_exit)
15470 {
15471 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15472 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15473 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15474 /* Replace the current function body with the body
15475 wrapped in the try/finally TF. */
15476 seq = NULL;
15477 gimple_seq_add_stmt (&seq, new_bind);
15478 gimple_set_body (fndecl, seq);
15479 }
15480
15481 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15482 cfun->curr_properties |= PROP_gimple_any;
15483
15484 pop_cfun ();
15485
15486 dump_function (TDI_gimple, fndecl);
15487 }
15488
15489 /* Return a dummy expression of type TYPE in order to keep going after an
15490 error. */
15491
15492 static tree
15493 dummy_object (tree type)
15494 {
15495 tree t = build_int_cst (build_pointer_type (type), 0);
15496 return build2 (MEM_REF, type, t, t);
15497 }
15498
15499 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15500 builtin function, but a very special sort of operator. */
15501
15502 enum gimplify_status
15503 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15504 gimple_seq *post_p ATTRIBUTE_UNUSED)
15505 {
15506 tree promoted_type, have_va_type;
15507 tree valist = TREE_OPERAND (*expr_p, 0);
15508 tree type = TREE_TYPE (*expr_p);
15509 tree t, tag, aptag;
15510 location_t loc = EXPR_LOCATION (*expr_p);
15511
15512 /* Verify that valist is of the proper type. */
15513 have_va_type = TREE_TYPE (valist);
15514 if (have_va_type == error_mark_node)
15515 return GS_ERROR;
15516 have_va_type = targetm.canonical_va_list_type (have_va_type);
15517 if (have_va_type == NULL_TREE
15518 && POINTER_TYPE_P (TREE_TYPE (valist)))
15519 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15520 have_va_type
15521 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15522 gcc_assert (have_va_type != NULL_TREE);
15523
15524 /* Generate a diagnostic for requesting data of a type that cannot
15525 be passed through `...' due to type promotion at the call site. */
15526 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15527 != type)
15528 {
15529 static bool gave_help;
15530 bool warned;
15531 /* Use the expansion point to handle cases such as passing bool (defined
15532 in a system header) through `...'. */
15533 location_t xloc
15534 = expansion_point_location_if_in_system_header (loc);
15535
15536 /* Unfortunately, this is merely undefined, rather than a constraint
15537 violation, so we cannot make this an error. If this call is never
15538 executed, the program is still strictly conforming. */
15539 auto_diagnostic_group d;
15540 warned = warning_at (xloc, 0,
15541 "%qT is promoted to %qT when passed through %<...%>",
15542 type, promoted_type);
15543 if (!gave_help && warned)
15544 {
15545 gave_help = true;
15546 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15547 promoted_type, type);
15548 }
15549
15550 /* We can, however, treat "undefined" any way we please.
15551 Call abort to encourage the user to fix the program. */
15552 if (warned)
15553 inform (xloc, "if this code is reached, the program will abort");
15554 /* Before the abort, allow the evaluation of the va_list
15555 expression to exit or longjmp. */
15556 gimplify_and_add (valist, pre_p);
15557 t = build_call_expr_loc (loc,
15558 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15559 gimplify_and_add (t, pre_p);
15560
15561 /* This is dead code, but go ahead and finish so that the
15562 mode of the result comes out right. */
15563 *expr_p = dummy_object (type);
15564 return GS_ALL_DONE;
15565 }
15566
15567 tag = build_int_cst (build_pointer_type (type), 0);
15568 aptag = build_int_cst (TREE_TYPE (valist), 0);
15569
15570 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15571 valist, tag, aptag);
15572
15573 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15574 needs to be expanded. */
15575 cfun->curr_properties &= ~PROP_gimple_lva;
15576
15577 return GS_OK;
15578 }
15579
15580 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15581
15582 DST/SRC are the destination and source respectively. You can pass
15583 ungimplified trees in DST or SRC, in which case they will be
15584 converted to a gimple operand if necessary.
15585
15586 This function returns the newly created GIMPLE_ASSIGN tuple. */
15587
15588 gimple *
15589 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15590 {
15591 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15592 gimplify_and_add (t, seq_p);
15593 ggc_free (t);
15594 return gimple_seq_last_stmt (*seq_p);
15595 }
15596
15597 inline hashval_t
15598 gimplify_hasher::hash (const elt_t *p)
15599 {
15600 tree t = p->val;
15601 return iterative_hash_expr (t, 0);
15602 }
15603
15604 inline bool
15605 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15606 {
15607 tree t1 = p1->val;
15608 tree t2 = p2->val;
15609 enum tree_code code = TREE_CODE (t1);
15610
15611 if (TREE_CODE (t2) != code
15612 || TREE_TYPE (t1) != TREE_TYPE (t2))
15613 return false;
15614
15615 if (!operand_equal_p (t1, t2, 0))
15616 return false;
15617
15618 /* Only allow them to compare equal if they also hash equal; otherwise
15619 results are nondeterminate, and we fail bootstrap comparison. */
15620 gcc_checking_assert (hash (p1) == hash (p2));
15621
15622 return true;
15623 }