compile, runtime: permit anonymous and empty fields in C header
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
73
74 enum gimplify_omp_var_data
75 {
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
89
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY = 0x002000,
92
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
95
96 GOVD_MAP_0LEN_ARRAY = 0x008000,
97
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO = 0x010000,
100
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN = 0x020000,
103
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE = 0x040000,
106
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT = 0x080000,
109
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY = 0x100000,
112
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY = 0x200000,
115
116 GOVD_NONTEMPORAL = 0x400000,
117
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
120
121 GOVD_CONDTEMP = 0x1000000,
122
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
125
126 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
127 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
128 | GOVD_LOCAL)
129 };
130
131
132 enum omp_region_type
133 {
134 ORT_WORKSHARE = 0x00,
135 ORT_TASKGROUP = 0x01,
136 ORT_SIMD = 0x04,
137
138 ORT_PARALLEL = 0x08,
139 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
140
141 ORT_TASK = 0x10,
142 ORT_UNTIED_TASK = ORT_TASK | 1,
143 ORT_TASKLOOP = ORT_TASK | 2,
144 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
145
146 ORT_TEAMS = 0x20,
147 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
148 ORT_HOST_TEAMS = ORT_TEAMS | 2,
149 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
150
151 /* Data region. */
152 ORT_TARGET_DATA = 0x40,
153
154 /* Data region with offloading. */
155 ORT_TARGET = 0x80,
156 ORT_COMBINED_TARGET = ORT_TARGET | 1,
157 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
158
159 /* OpenACC variants. */
160 ORT_ACC = 0x100, /* A generic OpenACC region. */
161 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
162 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
163 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
164 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
165
166 /* Dummy OpenMP region, used to disable expansion of
167 DECL_VALUE_EXPRs in taskloop pre body. */
168 ORT_NONE = 0x200
169 };
170
171 /* Gimplify hashtable helper. */
172
173 struct gimplify_hasher : free_ptr_hash <elt_t>
174 {
175 static inline hashval_t hash (const elt_t *);
176 static inline bool equal (const elt_t *, const elt_t *);
177 };
178
179 struct gimplify_ctx
180 {
181 struct gimplify_ctx *prev_context;
182
183 vec<gbind *> bind_expr_stack;
184 tree temps;
185 gimple_seq conditional_cleanups;
186 tree exit_label;
187 tree return_temp;
188
189 vec<tree> case_labels;
190 hash_set<tree> *live_switch_vars;
191 /* The formal temporary table. Should this be persistent? */
192 hash_table<gimplify_hasher> *temp_htab;
193
194 int conditions;
195 unsigned into_ssa : 1;
196 unsigned allow_rhs_cond_expr : 1;
197 unsigned in_cleanup_point_expr : 1;
198 unsigned keep_stack : 1;
199 unsigned save_stack : 1;
200 unsigned in_switch_expr : 1;
201 };
202
203 enum gimplify_defaultmap_kind
204 {
205 GDMK_SCALAR,
206 GDMK_AGGREGATE,
207 GDMK_ALLOCATABLE,
208 GDMK_POINTER
209 };
210
211 struct gimplify_omp_ctx
212 {
213 struct gimplify_omp_ctx *outer_context;
214 splay_tree variables;
215 hash_set<tree> *privatized_types;
216 tree clauses;
217 /* Iteration variables in an OMP_FOR. */
218 vec<tree> loop_iter_var;
219 location_t location;
220 enum omp_clause_default_kind default_kind;
221 enum omp_region_type region_type;
222 bool combined_loop;
223 bool distribute;
224 bool target_firstprivatize_array_bases;
225 bool add_safelen1;
226 bool order_concurrent;
227 int defaultmap[4];
228 };
229
230 static struct gimplify_ctx *gimplify_ctxp;
231 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
232 static bool in_omp_construct;
233
234 /* Forward declaration. */
235 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
236 static hash_map<tree, tree> *oacc_declare_returns;
237 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
238 bool (*) (tree), fallback_t, bool);
239
240 /* Shorter alias name for the above function for use in gimplify.c
241 only. */
242
243 static inline void
244 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
245 {
246 gimple_seq_add_stmt_without_update (seq_p, gs);
247 }
248
249 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
250 NULL, a new sequence is allocated. This function is
251 similar to gimple_seq_add_seq, but does not scan the operands.
252 During gimplification, we need to manipulate statement sequences
253 before the def/use vectors have been constructed. */
254
255 static void
256 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
257 {
258 gimple_stmt_iterator si;
259
260 if (src == NULL)
261 return;
262
263 si = gsi_last (*dst_p);
264 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
265 }
266
267
268 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
269 and popping gimplify contexts. */
270
271 static struct gimplify_ctx *ctx_pool = NULL;
272
273 /* Return a gimplify context struct from the pool. */
274
275 static inline struct gimplify_ctx *
276 ctx_alloc (void)
277 {
278 struct gimplify_ctx * c = ctx_pool;
279
280 if (c)
281 ctx_pool = c->prev_context;
282 else
283 c = XNEW (struct gimplify_ctx);
284
285 memset (c, '\0', sizeof (*c));
286 return c;
287 }
288
289 /* Put gimplify context C back into the pool. */
290
291 static inline void
292 ctx_free (struct gimplify_ctx *c)
293 {
294 c->prev_context = ctx_pool;
295 ctx_pool = c;
296 }
297
298 /* Free allocated ctx stack memory. */
299
300 void
301 free_gimplify_stack (void)
302 {
303 struct gimplify_ctx *c;
304
305 while ((c = ctx_pool))
306 {
307 ctx_pool = c->prev_context;
308 free (c);
309 }
310 }
311
312
313 /* Set up a context for the gimplifier. */
314
315 void
316 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
317 {
318 struct gimplify_ctx *c = ctx_alloc ();
319
320 c->prev_context = gimplify_ctxp;
321 gimplify_ctxp = c;
322 gimplify_ctxp->into_ssa = in_ssa;
323 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
324 }
325
326 /* Tear down a context for the gimplifier. If BODY is non-null, then
327 put the temporaries into the outer BIND_EXPR. Otherwise, put them
328 in the local_decls.
329
330 BODY is not a sequence, but the first tuple in a sequence. */
331
332 void
333 pop_gimplify_context (gimple *body)
334 {
335 struct gimplify_ctx *c = gimplify_ctxp;
336
337 gcc_assert (c
338 && (!c->bind_expr_stack.exists ()
339 || c->bind_expr_stack.is_empty ()));
340 c->bind_expr_stack.release ();
341 gimplify_ctxp = c->prev_context;
342
343 if (body)
344 declare_vars (c->temps, body, false);
345 else
346 record_vars (c->temps);
347
348 delete c->temp_htab;
349 c->temp_htab = NULL;
350 ctx_free (c);
351 }
352
353 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
354
355 static void
356 gimple_push_bind_expr (gbind *bind_stmt)
357 {
358 gimplify_ctxp->bind_expr_stack.reserve (8);
359 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
360 }
361
362 /* Pop the first element off the stack of bindings. */
363
364 static void
365 gimple_pop_bind_expr (void)
366 {
367 gimplify_ctxp->bind_expr_stack.pop ();
368 }
369
370 /* Return the first element of the stack of bindings. */
371
372 gbind *
373 gimple_current_bind_expr (void)
374 {
375 return gimplify_ctxp->bind_expr_stack.last ();
376 }
377
378 /* Return the stack of bindings created during gimplification. */
379
380 vec<gbind *>
381 gimple_bind_expr_stack (void)
382 {
383 return gimplify_ctxp->bind_expr_stack;
384 }
385
386 /* Return true iff there is a COND_EXPR between us and the innermost
387 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
388
389 static bool
390 gimple_conditional_context (void)
391 {
392 return gimplify_ctxp->conditions > 0;
393 }
394
395 /* Note that we've entered a COND_EXPR. */
396
397 static void
398 gimple_push_condition (void)
399 {
400 #ifdef ENABLE_GIMPLE_CHECKING
401 if (gimplify_ctxp->conditions == 0)
402 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
403 #endif
404 ++(gimplify_ctxp->conditions);
405 }
406
407 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
408 now, add any conditional cleanups we've seen to the prequeue. */
409
410 static void
411 gimple_pop_condition (gimple_seq *pre_p)
412 {
413 int conds = --(gimplify_ctxp->conditions);
414
415 gcc_assert (conds >= 0);
416 if (conds == 0)
417 {
418 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
419 gimplify_ctxp->conditional_cleanups = NULL;
420 }
421 }
422
423 /* A stable comparison routine for use with splay trees and DECLs. */
424
425 static int
426 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
427 {
428 tree a = (tree) xa;
429 tree b = (tree) xb;
430
431 return DECL_UID (a) - DECL_UID (b);
432 }
433
434 /* Create a new omp construct that deals with variable remapping. */
435
436 static struct gimplify_omp_ctx *
437 new_omp_context (enum omp_region_type region_type)
438 {
439 struct gimplify_omp_ctx *c;
440
441 c = XCNEW (struct gimplify_omp_ctx);
442 c->outer_context = gimplify_omp_ctxp;
443 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
444 c->privatized_types = new hash_set<tree>;
445 c->location = input_location;
446 c->region_type = region_type;
447 if ((region_type & ORT_TASK) == 0)
448 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
449 else
450 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
451 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
452 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
453 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
454 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
455
456 return c;
457 }
458
459 /* Destroy an omp construct that deals with variable remapping. */
460
461 static void
462 delete_omp_context (struct gimplify_omp_ctx *c)
463 {
464 splay_tree_delete (c->variables);
465 delete c->privatized_types;
466 c->loop_iter_var.release ();
467 XDELETE (c);
468 }
469
470 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
471 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
472
473 /* Both gimplify the statement T and append it to *SEQ_P. This function
474 behaves exactly as gimplify_stmt, but you don't have to pass T as a
475 reference. */
476
477 void
478 gimplify_and_add (tree t, gimple_seq *seq_p)
479 {
480 gimplify_stmt (&t, seq_p);
481 }
482
483 /* Gimplify statement T into sequence *SEQ_P, and return the first
484 tuple in the sequence of generated tuples for this statement.
485 Return NULL if gimplifying T produced no tuples. */
486
487 static gimple *
488 gimplify_and_return_first (tree t, gimple_seq *seq_p)
489 {
490 gimple_stmt_iterator last = gsi_last (*seq_p);
491
492 gimplify_and_add (t, seq_p);
493
494 if (!gsi_end_p (last))
495 {
496 gsi_next (&last);
497 return gsi_stmt (last);
498 }
499 else
500 return gimple_seq_first_stmt (*seq_p);
501 }
502
503 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
504 LHS, or for a call argument. */
505
506 static bool
507 is_gimple_mem_rhs (tree t)
508 {
509 /* If we're dealing with a renamable type, either source or dest must be
510 a renamed variable. */
511 if (is_gimple_reg_type (TREE_TYPE (t)))
512 return is_gimple_val (t);
513 else
514 return is_gimple_val (t) || is_gimple_lvalue (t);
515 }
516
517 /* Return true if T is a CALL_EXPR or an expression that can be
518 assigned to a temporary. Note that this predicate should only be
519 used during gimplification. See the rationale for this in
520 gimplify_modify_expr. */
521
522 static bool
523 is_gimple_reg_rhs_or_call (tree t)
524 {
525 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
526 || TREE_CODE (t) == CALL_EXPR);
527 }
528
529 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
530 this predicate should only be used during gimplification. See the
531 rationale for this in gimplify_modify_expr. */
532
533 static bool
534 is_gimple_mem_rhs_or_call (tree t)
535 {
536 /* If we're dealing with a renamable type, either source or dest must be
537 a renamed variable. */
538 if (is_gimple_reg_type (TREE_TYPE (t)))
539 return is_gimple_val (t);
540 else
541 return (is_gimple_val (t)
542 || is_gimple_lvalue (t)
543 || TREE_CLOBBER_P (t)
544 || TREE_CODE (t) == CALL_EXPR);
545 }
546
547 /* Create a temporary with a name derived from VAL. Subroutine of
548 lookup_tmp_var; nobody else should call this function. */
549
550 static inline tree
551 create_tmp_from_val (tree val)
552 {
553 /* Drop all qualifiers and address-space information from the value type. */
554 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
555 tree var = create_tmp_var (type, get_name (val));
556 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
557 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
558 DECL_GIMPLE_REG_P (var) = 1;
559 return var;
560 }
561
562 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
563 an existing expression temporary. */
564
565 static tree
566 lookup_tmp_var (tree val, bool is_formal)
567 {
568 tree ret;
569
570 /* If not optimizing, never really reuse a temporary. local-alloc
571 won't allocate any variable that is used in more than one basic
572 block, which means it will go into memory, causing much extra
573 work in reload and final and poorer code generation, outweighing
574 the extra memory allocation here. */
575 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
576 ret = create_tmp_from_val (val);
577 else
578 {
579 elt_t elt, *elt_p;
580 elt_t **slot;
581
582 elt.val = val;
583 if (!gimplify_ctxp->temp_htab)
584 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
585 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
586 if (*slot == NULL)
587 {
588 elt_p = XNEW (elt_t);
589 elt_p->val = val;
590 elt_p->temp = ret = create_tmp_from_val (val);
591 *slot = elt_p;
592 }
593 else
594 {
595 elt_p = *slot;
596 ret = elt_p->temp;
597 }
598 }
599
600 return ret;
601 }
602
603 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
604
605 static tree
606 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
607 bool is_formal, bool allow_ssa)
608 {
609 tree t, mod;
610
611 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
612 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
613 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
614 fb_rvalue);
615
616 if (allow_ssa
617 && gimplify_ctxp->into_ssa
618 && is_gimple_reg_type (TREE_TYPE (val)))
619 {
620 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
621 if (! gimple_in_ssa_p (cfun))
622 {
623 const char *name = get_name (val);
624 if (name)
625 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
626 }
627 }
628 else
629 t = lookup_tmp_var (val, is_formal);
630
631 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
632
633 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
634
635 /* gimplify_modify_expr might want to reduce this further. */
636 gimplify_and_add (mod, pre_p);
637 ggc_free (mod);
638
639 return t;
640 }
641
642 /* Return a formal temporary variable initialized with VAL. PRE_P is as
643 in gimplify_expr. Only use this function if:
644
645 1) The value of the unfactored expression represented by VAL will not
646 change between the initialization and use of the temporary, and
647 2) The temporary will not be otherwise modified.
648
649 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
650 and #2 means it is inappropriate for && temps.
651
652 For other cases, use get_initialized_tmp_var instead. */
653
654 tree
655 get_formal_tmp_var (tree val, gimple_seq *pre_p)
656 {
657 return internal_get_tmp_var (val, pre_p, NULL, true, true);
658 }
659
660 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
661 are as in gimplify_expr. */
662
663 tree
664 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
665 bool allow_ssa)
666 {
667 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
668 }
669
670 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
671 generate debug info for them; otherwise don't. */
672
673 void
674 declare_vars (tree vars, gimple *gs, bool debug_info)
675 {
676 tree last = vars;
677 if (last)
678 {
679 tree temps, block;
680
681 gbind *scope = as_a <gbind *> (gs);
682
683 temps = nreverse (last);
684
685 block = gimple_bind_block (scope);
686 gcc_assert (!block || TREE_CODE (block) == BLOCK);
687 if (!block || !debug_info)
688 {
689 DECL_CHAIN (last) = gimple_bind_vars (scope);
690 gimple_bind_set_vars (scope, temps);
691 }
692 else
693 {
694 /* We need to attach the nodes both to the BIND_EXPR and to its
695 associated BLOCK for debugging purposes. The key point here
696 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
697 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
698 if (BLOCK_VARS (block))
699 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
700 else
701 {
702 gimple_bind_set_vars (scope,
703 chainon (gimple_bind_vars (scope), temps));
704 BLOCK_VARS (block) = temps;
705 }
706 }
707 }
708 }
709
710 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
711 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
712 no such upper bound can be obtained. */
713
714 static void
715 force_constant_size (tree var)
716 {
717 /* The only attempt we make is by querying the maximum size of objects
718 of the variable's type. */
719
720 HOST_WIDE_INT max_size;
721
722 gcc_assert (VAR_P (var));
723
724 max_size = max_int_size_in_bytes (TREE_TYPE (var));
725
726 gcc_assert (max_size >= 0);
727
728 DECL_SIZE_UNIT (var)
729 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
730 DECL_SIZE (var)
731 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
732 }
733
734 /* Push the temporary variable TMP into the current binding. */
735
736 void
737 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
738 {
739 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
740
741 /* Later processing assumes that the object size is constant, which might
742 not be true at this point. Force the use of a constant upper bound in
743 this case. */
744 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
745 force_constant_size (tmp);
746
747 DECL_CONTEXT (tmp) = fn->decl;
748 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
749
750 record_vars_into (tmp, fn->decl);
751 }
752
753 /* Push the temporary variable TMP into the current binding. */
754
755 void
756 gimple_add_tmp_var (tree tmp)
757 {
758 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
759
760 /* Later processing assumes that the object size is constant, which might
761 not be true at this point. Force the use of a constant upper bound in
762 this case. */
763 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
764 force_constant_size (tmp);
765
766 DECL_CONTEXT (tmp) = current_function_decl;
767 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
768
769 if (gimplify_ctxp)
770 {
771 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
772 gimplify_ctxp->temps = tmp;
773
774 /* Mark temporaries local within the nearest enclosing parallel. */
775 if (gimplify_omp_ctxp)
776 {
777 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
778 int flag = GOVD_LOCAL;
779 while (ctx
780 && (ctx->region_type == ORT_WORKSHARE
781 || ctx->region_type == ORT_TASKGROUP
782 || ctx->region_type == ORT_SIMD
783 || ctx->region_type == ORT_ACC))
784 {
785 if (ctx->region_type == ORT_SIMD
786 && TREE_ADDRESSABLE (tmp)
787 && !TREE_STATIC (tmp))
788 {
789 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
790 ctx->add_safelen1 = true;
791 else
792 flag = GOVD_PRIVATE;
793 break;
794 }
795 ctx = ctx->outer_context;
796 }
797 if (ctx)
798 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
799 }
800 }
801 else if (cfun)
802 record_vars (tmp);
803 else
804 {
805 gimple_seq body_seq;
806
807 /* This case is for nested functions. We need to expose the locals
808 they create. */
809 body_seq = gimple_body (current_function_decl);
810 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
811 }
812 }
813
814
815 \f
816 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
817 nodes that are referenced more than once in GENERIC functions. This is
818 necessary because gimplification (translation into GIMPLE) is performed
819 by modifying tree nodes in-place, so gimplication of a shared node in a
820 first context could generate an invalid GIMPLE form in a second context.
821
822 This is achieved with a simple mark/copy/unmark algorithm that walks the
823 GENERIC representation top-down, marks nodes with TREE_VISITED the first
824 time it encounters them, duplicates them if they already have TREE_VISITED
825 set, and finally removes the TREE_VISITED marks it has set.
826
827 The algorithm works only at the function level, i.e. it generates a GENERIC
828 representation of a function with no nodes shared within the function when
829 passed a GENERIC function (except for nodes that are allowed to be shared).
830
831 At the global level, it is also necessary to unshare tree nodes that are
832 referenced in more than one function, for the same aforementioned reason.
833 This requires some cooperation from the front-end. There are 2 strategies:
834
835 1. Manual unsharing. The front-end needs to call unshare_expr on every
836 expression that might end up being shared across functions.
837
838 2. Deep unsharing. This is an extension of regular unsharing. Instead
839 of calling unshare_expr on expressions that might be shared across
840 functions, the front-end pre-marks them with TREE_VISITED. This will
841 ensure that they are unshared on the first reference within functions
842 when the regular unsharing algorithm runs. The counterpart is that
843 this algorithm must look deeper than for manual unsharing, which is
844 specified by LANG_HOOKS_DEEP_UNSHARING.
845
846 If there are only few specific cases of node sharing across functions, it is
847 probably easier for a front-end to unshare the expressions manually. On the
848 contrary, if the expressions generated at the global level are as widespread
849 as expressions generated within functions, deep unsharing is very likely the
850 way to go. */
851
852 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
853 These nodes model computations that must be done once. If we were to
854 unshare something like SAVE_EXPR(i++), the gimplification process would
855 create wrong code. However, if DATA is non-null, it must hold a pointer
856 set that is used to unshare the subtrees of these nodes. */
857
858 static tree
859 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
860 {
861 tree t = *tp;
862 enum tree_code code = TREE_CODE (t);
863
864 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
865 copy their subtrees if we can make sure to do it only once. */
866 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
867 {
868 if (data && !((hash_set<tree> *)data)->add (t))
869 ;
870 else
871 *walk_subtrees = 0;
872 }
873
874 /* Stop at types, decls, constants like copy_tree_r. */
875 else if (TREE_CODE_CLASS (code) == tcc_type
876 || TREE_CODE_CLASS (code) == tcc_declaration
877 || TREE_CODE_CLASS (code) == tcc_constant)
878 *walk_subtrees = 0;
879
880 /* Cope with the statement expression extension. */
881 else if (code == STATEMENT_LIST)
882 ;
883
884 /* Leave the bulk of the work to copy_tree_r itself. */
885 else
886 copy_tree_r (tp, walk_subtrees, NULL);
887
888 return NULL_TREE;
889 }
890
891 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
892 If *TP has been visited already, then *TP is deeply copied by calling
893 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
894
895 static tree
896 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
897 {
898 tree t = *tp;
899 enum tree_code code = TREE_CODE (t);
900
901 /* Skip types, decls, and constants. But we do want to look at their
902 types and the bounds of types. Mark them as visited so we properly
903 unmark their subtrees on the unmark pass. If we've already seen them,
904 don't look down further. */
905 if (TREE_CODE_CLASS (code) == tcc_type
906 || TREE_CODE_CLASS (code) == tcc_declaration
907 || TREE_CODE_CLASS (code) == tcc_constant)
908 {
909 if (TREE_VISITED (t))
910 *walk_subtrees = 0;
911 else
912 TREE_VISITED (t) = 1;
913 }
914
915 /* If this node has been visited already, unshare it and don't look
916 any deeper. */
917 else if (TREE_VISITED (t))
918 {
919 walk_tree (tp, mostly_copy_tree_r, data, NULL);
920 *walk_subtrees = 0;
921 }
922
923 /* Otherwise, mark the node as visited and keep looking. */
924 else
925 TREE_VISITED (t) = 1;
926
927 return NULL_TREE;
928 }
929
930 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
931 copy_if_shared_r callback unmodified. */
932
933 static inline void
934 copy_if_shared (tree *tp, void *data)
935 {
936 walk_tree (tp, copy_if_shared_r, data, NULL);
937 }
938
939 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
940 any nested functions. */
941
942 static void
943 unshare_body (tree fndecl)
944 {
945 struct cgraph_node *cgn = cgraph_node::get (fndecl);
946 /* If the language requires deep unsharing, we need a pointer set to make
947 sure we don't repeatedly unshare subtrees of unshareable nodes. */
948 hash_set<tree> *visited
949 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
950
951 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
952 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
953 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
954
955 delete visited;
956
957 if (cgn)
958 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
959 unshare_body (cgn->decl);
960 }
961
962 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
963 Subtrees are walked until the first unvisited node is encountered. */
964
965 static tree
966 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
967 {
968 tree t = *tp;
969
970 /* If this node has been visited, unmark it and keep looking. */
971 if (TREE_VISITED (t))
972 TREE_VISITED (t) = 0;
973
974 /* Otherwise, don't look any deeper. */
975 else
976 *walk_subtrees = 0;
977
978 return NULL_TREE;
979 }
980
981 /* Unmark the visited trees rooted at *TP. */
982
983 static inline void
984 unmark_visited (tree *tp)
985 {
986 walk_tree (tp, unmark_visited_r, NULL, NULL);
987 }
988
989 /* Likewise, but mark all trees as not visited. */
990
991 static void
992 unvisit_body (tree fndecl)
993 {
994 struct cgraph_node *cgn = cgraph_node::get (fndecl);
995
996 unmark_visited (&DECL_SAVED_TREE (fndecl));
997 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
998 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
999
1000 if (cgn)
1001 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1002 unvisit_body (cgn->decl);
1003 }
1004
1005 /* Unconditionally make an unshared copy of EXPR. This is used when using
1006 stored expressions which span multiple functions, such as BINFO_VTABLE,
1007 as the normal unsharing process can't tell that they're shared. */
1008
1009 tree
1010 unshare_expr (tree expr)
1011 {
1012 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1013 return expr;
1014 }
1015
1016 /* Worker for unshare_expr_without_location. */
1017
1018 static tree
1019 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1020 {
1021 if (EXPR_P (*tp))
1022 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1023 else
1024 *walk_subtrees = 0;
1025 return NULL_TREE;
1026 }
1027
1028 /* Similar to unshare_expr but also prune all expression locations
1029 from EXPR. */
1030
1031 tree
1032 unshare_expr_without_location (tree expr)
1033 {
1034 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1035 if (EXPR_P (expr))
1036 walk_tree (&expr, prune_expr_location, NULL, NULL);
1037 return expr;
1038 }
1039
1040 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1041 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1042 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1043 EXPR is the location of the EXPR. */
1044
1045 static location_t
1046 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1047 {
1048 if (!expr)
1049 return or_else;
1050
1051 if (EXPR_HAS_LOCATION (expr))
1052 return EXPR_LOCATION (expr);
1053
1054 if (TREE_CODE (expr) != STATEMENT_LIST)
1055 return or_else;
1056
1057 tree_stmt_iterator i = tsi_start (expr);
1058
1059 bool found = false;
1060 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1061 {
1062 found = true;
1063 tsi_next (&i);
1064 }
1065
1066 if (!found || !tsi_one_before_end_p (i))
1067 return or_else;
1068
1069 return rexpr_location (tsi_stmt (i), or_else);
1070 }
1071
1072 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1073 rexpr_location for the potential recursion. */
1074
1075 static inline bool
1076 rexpr_has_location (tree expr)
1077 {
1078 return rexpr_location (expr) != UNKNOWN_LOCATION;
1079 }
1080
1081 \f
1082 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1083 contain statements and have a value. Assign its value to a temporary
1084 and give it void_type_node. Return the temporary, or NULL_TREE if
1085 WRAPPER was already void. */
1086
1087 tree
1088 voidify_wrapper_expr (tree wrapper, tree temp)
1089 {
1090 tree type = TREE_TYPE (wrapper);
1091 if (type && !VOID_TYPE_P (type))
1092 {
1093 tree *p;
1094
1095 /* Set p to point to the body of the wrapper. Loop until we find
1096 something that isn't a wrapper. */
1097 for (p = &wrapper; p && *p; )
1098 {
1099 switch (TREE_CODE (*p))
1100 {
1101 case BIND_EXPR:
1102 TREE_SIDE_EFFECTS (*p) = 1;
1103 TREE_TYPE (*p) = void_type_node;
1104 /* For a BIND_EXPR, the body is operand 1. */
1105 p = &BIND_EXPR_BODY (*p);
1106 break;
1107
1108 case CLEANUP_POINT_EXPR:
1109 case TRY_FINALLY_EXPR:
1110 case TRY_CATCH_EXPR:
1111 TREE_SIDE_EFFECTS (*p) = 1;
1112 TREE_TYPE (*p) = void_type_node;
1113 p = &TREE_OPERAND (*p, 0);
1114 break;
1115
1116 case STATEMENT_LIST:
1117 {
1118 tree_stmt_iterator i = tsi_last (*p);
1119 TREE_SIDE_EFFECTS (*p) = 1;
1120 TREE_TYPE (*p) = void_type_node;
1121 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1122 }
1123 break;
1124
1125 case COMPOUND_EXPR:
1126 /* Advance to the last statement. Set all container types to
1127 void. */
1128 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1129 {
1130 TREE_SIDE_EFFECTS (*p) = 1;
1131 TREE_TYPE (*p) = void_type_node;
1132 }
1133 break;
1134
1135 case TRANSACTION_EXPR:
1136 TREE_SIDE_EFFECTS (*p) = 1;
1137 TREE_TYPE (*p) = void_type_node;
1138 p = &TRANSACTION_EXPR_BODY (*p);
1139 break;
1140
1141 default:
1142 /* Assume that any tree upon which voidify_wrapper_expr is
1143 directly called is a wrapper, and that its body is op0. */
1144 if (p == &wrapper)
1145 {
1146 TREE_SIDE_EFFECTS (*p) = 1;
1147 TREE_TYPE (*p) = void_type_node;
1148 p = &TREE_OPERAND (*p, 0);
1149 break;
1150 }
1151 goto out;
1152 }
1153 }
1154
1155 out:
1156 if (p == NULL || IS_EMPTY_STMT (*p))
1157 temp = NULL_TREE;
1158 else if (temp)
1159 {
1160 /* The wrapper is on the RHS of an assignment that we're pushing
1161 down. */
1162 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1163 || TREE_CODE (temp) == MODIFY_EXPR);
1164 TREE_OPERAND (temp, 1) = *p;
1165 *p = temp;
1166 }
1167 else
1168 {
1169 temp = create_tmp_var (type, "retval");
1170 *p = build2 (INIT_EXPR, type, temp, *p);
1171 }
1172
1173 return temp;
1174 }
1175
1176 return NULL_TREE;
1177 }
1178
1179 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1180 a temporary through which they communicate. */
1181
1182 static void
1183 build_stack_save_restore (gcall **save, gcall **restore)
1184 {
1185 tree tmp_var;
1186
1187 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1188 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1189 gimple_call_set_lhs (*save, tmp_var);
1190
1191 *restore
1192 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1193 1, tmp_var);
1194 }
1195
1196 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1197
1198 static tree
1199 build_asan_poison_call_expr (tree decl)
1200 {
1201 /* Do not poison variables that have size equal to zero. */
1202 tree unit_size = DECL_SIZE_UNIT (decl);
1203 if (zerop (unit_size))
1204 return NULL_TREE;
1205
1206 tree base = build_fold_addr_expr (decl);
1207
1208 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1209 void_type_node, 3,
1210 build_int_cst (integer_type_node,
1211 ASAN_MARK_POISON),
1212 base, unit_size);
1213 }
1214
1215 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1216 on POISON flag, shadow memory of a DECL variable. The call will be
1217 put on location identified by IT iterator, where BEFORE flag drives
1218 position where the stmt will be put. */
1219
1220 static void
1221 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1222 bool before)
1223 {
1224 tree unit_size = DECL_SIZE_UNIT (decl);
1225 tree base = build_fold_addr_expr (decl);
1226
1227 /* Do not poison variables that have size equal to zero. */
1228 if (zerop (unit_size))
1229 return;
1230
1231 /* It's necessary to have all stack variables aligned to ASAN granularity
1232 bytes. */
1233 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1234 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1235
1236 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1237
1238 gimple *g
1239 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1240 build_int_cst (integer_type_node, flags),
1241 base, unit_size);
1242
1243 if (before)
1244 gsi_insert_before (it, g, GSI_NEW_STMT);
1245 else
1246 gsi_insert_after (it, g, GSI_NEW_STMT);
1247 }
1248
1249 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1250 either poisons or unpoisons a DECL. Created statement is appended
1251 to SEQ_P gimple sequence. */
1252
1253 static void
1254 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1255 {
1256 gimple_stmt_iterator it = gsi_last (*seq_p);
1257 bool before = false;
1258
1259 if (gsi_end_p (it))
1260 before = true;
1261
1262 asan_poison_variable (decl, poison, &it, before);
1263 }
1264
1265 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1266
1267 static int
1268 sort_by_decl_uid (const void *a, const void *b)
1269 {
1270 const tree *t1 = (const tree *)a;
1271 const tree *t2 = (const tree *)b;
1272
1273 int uid1 = DECL_UID (*t1);
1274 int uid2 = DECL_UID (*t2);
1275
1276 if (uid1 < uid2)
1277 return -1;
1278 else if (uid1 > uid2)
1279 return 1;
1280 else
1281 return 0;
1282 }
1283
1284 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1285 depending on POISON flag. Created statement is appended
1286 to SEQ_P gimple sequence. */
1287
1288 static void
1289 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1290 {
1291 unsigned c = variables->elements ();
1292 if (c == 0)
1293 return;
1294
1295 auto_vec<tree> sorted_variables (c);
1296
1297 for (hash_set<tree>::iterator it = variables->begin ();
1298 it != variables->end (); ++it)
1299 sorted_variables.safe_push (*it);
1300
1301 sorted_variables.qsort (sort_by_decl_uid);
1302
1303 unsigned i;
1304 tree var;
1305 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1306 {
1307 asan_poison_variable (var, poison, seq_p);
1308
1309 /* Add use_after_scope_memory attribute for the variable in order
1310 to prevent re-written into SSA. */
1311 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1312 DECL_ATTRIBUTES (var)))
1313 DECL_ATTRIBUTES (var)
1314 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1315 integer_one_node,
1316 DECL_ATTRIBUTES (var));
1317 }
1318 }
1319
1320 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1321
1322 static enum gimplify_status
1323 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1324 {
1325 tree bind_expr = *expr_p;
1326 bool old_keep_stack = gimplify_ctxp->keep_stack;
1327 bool old_save_stack = gimplify_ctxp->save_stack;
1328 tree t;
1329 gbind *bind_stmt;
1330 gimple_seq body, cleanup;
1331 gcall *stack_save;
1332 location_t start_locus = 0, end_locus = 0;
1333 tree ret_clauses = NULL;
1334
1335 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1336
1337 /* Mark variables seen in this bind expr. */
1338 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1339 {
1340 if (VAR_P (t))
1341 {
1342 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1343
1344 /* Mark variable as local. */
1345 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1346 {
1347 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1348 || splay_tree_lookup (ctx->variables,
1349 (splay_tree_key) t) == NULL)
1350 {
1351 int flag = GOVD_LOCAL;
1352 if (ctx->region_type == ORT_SIMD
1353 && TREE_ADDRESSABLE (t)
1354 && !TREE_STATIC (t))
1355 {
1356 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1357 ctx->add_safelen1 = true;
1358 else
1359 flag = GOVD_PRIVATE;
1360 }
1361 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1362 }
1363 /* Static locals inside of target construct or offloaded
1364 routines need to be "omp declare target". */
1365 if (TREE_STATIC (t))
1366 for (; ctx; ctx = ctx->outer_context)
1367 if ((ctx->region_type & ORT_TARGET) != 0)
1368 {
1369 if (!lookup_attribute ("omp declare target",
1370 DECL_ATTRIBUTES (t)))
1371 {
1372 tree id = get_identifier ("omp declare target");
1373 DECL_ATTRIBUTES (t)
1374 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1375 varpool_node *node = varpool_node::get (t);
1376 if (node)
1377 {
1378 node->offloadable = 1;
1379 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1380 {
1381 g->have_offload = true;
1382 if (!in_lto_p)
1383 vec_safe_push (offload_vars, t);
1384 }
1385 }
1386 }
1387 break;
1388 }
1389 }
1390
1391 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1392
1393 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1394 cfun->has_local_explicit_reg_vars = true;
1395 }
1396
1397 /* Preliminarily mark non-addressed complex variables as eligible
1398 for promotion to gimple registers. We'll transform their uses
1399 as we find them. */
1400 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1401 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1402 && !TREE_THIS_VOLATILE (t)
1403 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1404 && !needs_to_live_in_memory (t))
1405 DECL_GIMPLE_REG_P (t) = 1;
1406 }
1407
1408 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1409 BIND_EXPR_BLOCK (bind_expr));
1410 gimple_push_bind_expr (bind_stmt);
1411
1412 gimplify_ctxp->keep_stack = false;
1413 gimplify_ctxp->save_stack = false;
1414
1415 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1416 body = NULL;
1417 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1418 gimple_bind_set_body (bind_stmt, body);
1419
1420 /* Source location wise, the cleanup code (stack_restore and clobbers)
1421 belongs to the end of the block, so propagate what we have. The
1422 stack_save operation belongs to the beginning of block, which we can
1423 infer from the bind_expr directly if the block has no explicit
1424 assignment. */
1425 if (BIND_EXPR_BLOCK (bind_expr))
1426 {
1427 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1428 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1429 }
1430 if (start_locus == 0)
1431 start_locus = EXPR_LOCATION (bind_expr);
1432
1433 cleanup = NULL;
1434 stack_save = NULL;
1435
1436 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1437 the stack space allocated to the VLAs. */
1438 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1439 {
1440 gcall *stack_restore;
1441
1442 /* Save stack on entry and restore it on exit. Add a try_finally
1443 block to achieve this. */
1444 build_stack_save_restore (&stack_save, &stack_restore);
1445
1446 gimple_set_location (stack_save, start_locus);
1447 gimple_set_location (stack_restore, end_locus);
1448
1449 gimplify_seq_add_stmt (&cleanup, stack_restore);
1450 }
1451
1452 /* Add clobbers for all variables that go out of scope. */
1453 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1454 {
1455 if (VAR_P (t)
1456 && !is_global_var (t)
1457 && DECL_CONTEXT (t) == current_function_decl)
1458 {
1459 if (!DECL_HARD_REGISTER (t)
1460 && !TREE_THIS_VOLATILE (t)
1461 && !DECL_HAS_VALUE_EXPR_P (t)
1462 /* Only care for variables that have to be in memory. Others
1463 will be rewritten into SSA names, hence moved to the
1464 top-level. */
1465 && !is_gimple_reg (t)
1466 && flag_stack_reuse != SR_NONE)
1467 {
1468 tree clobber = build_clobber (TREE_TYPE (t));
1469 gimple *clobber_stmt;
1470 clobber_stmt = gimple_build_assign (t, clobber);
1471 gimple_set_location (clobber_stmt, end_locus);
1472 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1473 }
1474
1475 if (flag_openacc && oacc_declare_returns != NULL)
1476 {
1477 tree *c = oacc_declare_returns->get (t);
1478 if (c != NULL)
1479 {
1480 if (ret_clauses)
1481 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1482
1483 ret_clauses = *c;
1484
1485 oacc_declare_returns->remove (t);
1486
1487 if (oacc_declare_returns->is_empty ())
1488 {
1489 delete oacc_declare_returns;
1490 oacc_declare_returns = NULL;
1491 }
1492 }
1493 }
1494 }
1495
1496 if (asan_poisoned_variables != NULL
1497 && asan_poisoned_variables->contains (t))
1498 {
1499 asan_poisoned_variables->remove (t);
1500 asan_poison_variable (t, true, &cleanup);
1501 }
1502
1503 if (gimplify_ctxp->live_switch_vars != NULL
1504 && gimplify_ctxp->live_switch_vars->contains (t))
1505 gimplify_ctxp->live_switch_vars->remove (t);
1506 }
1507
1508 if (ret_clauses)
1509 {
1510 gomp_target *stmt;
1511 gimple_stmt_iterator si = gsi_start (cleanup);
1512
1513 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1514 ret_clauses);
1515 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1516 }
1517
1518 if (cleanup)
1519 {
1520 gtry *gs;
1521 gimple_seq new_body;
1522
1523 new_body = NULL;
1524 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1525 GIMPLE_TRY_FINALLY);
1526
1527 if (stack_save)
1528 gimplify_seq_add_stmt (&new_body, stack_save);
1529 gimplify_seq_add_stmt (&new_body, gs);
1530 gimple_bind_set_body (bind_stmt, new_body);
1531 }
1532
1533 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1534 if (!gimplify_ctxp->keep_stack)
1535 gimplify_ctxp->keep_stack = old_keep_stack;
1536 gimplify_ctxp->save_stack = old_save_stack;
1537
1538 gimple_pop_bind_expr ();
1539
1540 gimplify_seq_add_stmt (pre_p, bind_stmt);
1541
1542 if (temp)
1543 {
1544 *expr_p = temp;
1545 return GS_OK;
1546 }
1547
1548 *expr_p = NULL_TREE;
1549 return GS_ALL_DONE;
1550 }
1551
1552 /* Maybe add early return predict statement to PRE_P sequence. */
1553
1554 static void
1555 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1556 {
1557 /* If we are not in a conditional context, add PREDICT statement. */
1558 if (gimple_conditional_context ())
1559 {
1560 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1561 NOT_TAKEN);
1562 gimplify_seq_add_stmt (pre_p, predict);
1563 }
1564 }
1565
1566 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1567 GIMPLE value, it is assigned to a new temporary and the statement is
1568 re-written to return the temporary.
1569
1570 PRE_P points to the sequence where side effects that must happen before
1571 STMT should be stored. */
1572
1573 static enum gimplify_status
1574 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1575 {
1576 greturn *ret;
1577 tree ret_expr = TREE_OPERAND (stmt, 0);
1578 tree result_decl, result;
1579
1580 if (ret_expr == error_mark_node)
1581 return GS_ERROR;
1582
1583 if (!ret_expr
1584 || TREE_CODE (ret_expr) == RESULT_DECL)
1585 {
1586 maybe_add_early_return_predict_stmt (pre_p);
1587 greturn *ret = gimple_build_return (ret_expr);
1588 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1589 gimplify_seq_add_stmt (pre_p, ret);
1590 return GS_ALL_DONE;
1591 }
1592
1593 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1594 result_decl = NULL_TREE;
1595 else
1596 {
1597 result_decl = TREE_OPERAND (ret_expr, 0);
1598
1599 /* See through a return by reference. */
1600 if (TREE_CODE (result_decl) == INDIRECT_REF)
1601 result_decl = TREE_OPERAND (result_decl, 0);
1602
1603 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1604 || TREE_CODE (ret_expr) == INIT_EXPR)
1605 && TREE_CODE (result_decl) == RESULT_DECL);
1606 }
1607
1608 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1609 Recall that aggregate_value_p is FALSE for any aggregate type that is
1610 returned in registers. If we're returning values in registers, then
1611 we don't want to extend the lifetime of the RESULT_DECL, particularly
1612 across another call. In addition, for those aggregates for which
1613 hard_function_value generates a PARALLEL, we'll die during normal
1614 expansion of structure assignments; there's special code in expand_return
1615 to handle this case that does not exist in expand_expr. */
1616 if (!result_decl)
1617 result = NULL_TREE;
1618 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1619 {
1620 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1621 {
1622 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1623 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1624 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1625 should be effectively allocated by the caller, i.e. all calls to
1626 this function must be subject to the Return Slot Optimization. */
1627 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1628 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1629 }
1630 result = result_decl;
1631 }
1632 else if (gimplify_ctxp->return_temp)
1633 result = gimplify_ctxp->return_temp;
1634 else
1635 {
1636 result = create_tmp_reg (TREE_TYPE (result_decl));
1637
1638 /* ??? With complex control flow (usually involving abnormal edges),
1639 we can wind up warning about an uninitialized value for this. Due
1640 to how this variable is constructed and initialized, this is never
1641 true. Give up and never warn. */
1642 TREE_NO_WARNING (result) = 1;
1643
1644 gimplify_ctxp->return_temp = result;
1645 }
1646
1647 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1648 Then gimplify the whole thing. */
1649 if (result != result_decl)
1650 TREE_OPERAND (ret_expr, 0) = result;
1651
1652 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1653
1654 maybe_add_early_return_predict_stmt (pre_p);
1655 ret = gimple_build_return (result);
1656 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1657 gimplify_seq_add_stmt (pre_p, ret);
1658
1659 return GS_ALL_DONE;
1660 }
1661
1662 /* Gimplify a variable-length array DECL. */
1663
1664 static void
1665 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1666 {
1667 /* This is a variable-sized decl. Simplify its size and mark it
1668 for deferred expansion. */
1669 tree t, addr, ptr_type;
1670
1671 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1672 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1673
1674 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1675 if (DECL_HAS_VALUE_EXPR_P (decl))
1676 return;
1677
1678 /* All occurrences of this decl in final gimplified code will be
1679 replaced by indirection. Setting DECL_VALUE_EXPR does two
1680 things: First, it lets the rest of the gimplifier know what
1681 replacement to use. Second, it lets the debug info know
1682 where to find the value. */
1683 ptr_type = build_pointer_type (TREE_TYPE (decl));
1684 addr = create_tmp_var (ptr_type, get_name (decl));
1685 DECL_IGNORED_P (addr) = 0;
1686 t = build_fold_indirect_ref (addr);
1687 TREE_THIS_NOTRAP (t) = 1;
1688 SET_DECL_VALUE_EXPR (decl, t);
1689 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1690
1691 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1692 max_int_size_in_bytes (TREE_TYPE (decl)));
1693 /* The call has been built for a variable-sized object. */
1694 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1695 t = fold_convert (ptr_type, t);
1696 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1697
1698 gimplify_and_add (t, seq_p);
1699 }
1700
1701 /* A helper function to be called via walk_tree. Mark all labels under *TP
1702 as being forced. To be called for DECL_INITIAL of static variables. */
1703
1704 static tree
1705 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1706 {
1707 if (TYPE_P (*tp))
1708 *walk_subtrees = 0;
1709 if (TREE_CODE (*tp) == LABEL_DECL)
1710 {
1711 FORCED_LABEL (*tp) = 1;
1712 cfun->has_forced_label_in_static = 1;
1713 }
1714
1715 return NULL_TREE;
1716 }
1717
1718 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1719 and initialization explicit. */
1720
1721 static enum gimplify_status
1722 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1723 {
1724 tree stmt = *stmt_p;
1725 tree decl = DECL_EXPR_DECL (stmt);
1726
1727 *stmt_p = NULL_TREE;
1728
1729 if (TREE_TYPE (decl) == error_mark_node)
1730 return GS_ERROR;
1731
1732 if ((TREE_CODE (decl) == TYPE_DECL
1733 || VAR_P (decl))
1734 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1735 {
1736 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1737 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1738 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1739 }
1740
1741 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1742 in case its size expressions contain problematic nodes like CALL_EXPR. */
1743 if (TREE_CODE (decl) == TYPE_DECL
1744 && DECL_ORIGINAL_TYPE (decl)
1745 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1746 {
1747 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1748 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1749 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1750 }
1751
1752 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1753 {
1754 tree init = DECL_INITIAL (decl);
1755 bool is_vla = false;
1756
1757 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1758 || (!TREE_STATIC (decl)
1759 && flag_stack_check == GENERIC_STACK_CHECK
1760 && compare_tree_int (DECL_SIZE_UNIT (decl),
1761 STACK_CHECK_MAX_VAR_SIZE) > 0))
1762 {
1763 gimplify_vla_decl (decl, seq_p);
1764 is_vla = true;
1765 }
1766
1767 if (asan_poisoned_variables
1768 && !is_vla
1769 && TREE_ADDRESSABLE (decl)
1770 && !TREE_STATIC (decl)
1771 && !DECL_HAS_VALUE_EXPR_P (decl)
1772 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1773 && dbg_cnt (asan_use_after_scope)
1774 && !gimplify_omp_ctxp)
1775 {
1776 asan_poisoned_variables->add (decl);
1777 asan_poison_variable (decl, false, seq_p);
1778 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1779 gimplify_ctxp->live_switch_vars->add (decl);
1780 }
1781
1782 /* Some front ends do not explicitly declare all anonymous
1783 artificial variables. We compensate here by declaring the
1784 variables, though it would be better if the front ends would
1785 explicitly declare them. */
1786 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1787 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1788 gimple_add_tmp_var (decl);
1789
1790 if (init && init != error_mark_node)
1791 {
1792 if (!TREE_STATIC (decl))
1793 {
1794 DECL_INITIAL (decl) = NULL_TREE;
1795 init = build2 (INIT_EXPR, void_type_node, decl, init);
1796 gimplify_and_add (init, seq_p);
1797 ggc_free (init);
1798 }
1799 else
1800 /* We must still examine initializers for static variables
1801 as they may contain a label address. */
1802 walk_tree (&init, force_labels_r, NULL, NULL);
1803 }
1804 }
1805
1806 return GS_ALL_DONE;
1807 }
1808
1809 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1810 and replacing the LOOP_EXPR with goto, but if the loop contains an
1811 EXIT_EXPR, we need to append a label for it to jump to. */
1812
1813 static enum gimplify_status
1814 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1815 {
1816 tree saved_label = gimplify_ctxp->exit_label;
1817 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1818
1819 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1820
1821 gimplify_ctxp->exit_label = NULL_TREE;
1822
1823 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1824
1825 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1826
1827 if (gimplify_ctxp->exit_label)
1828 gimplify_seq_add_stmt (pre_p,
1829 gimple_build_label (gimplify_ctxp->exit_label));
1830
1831 gimplify_ctxp->exit_label = saved_label;
1832
1833 *expr_p = NULL;
1834 return GS_ALL_DONE;
1835 }
1836
1837 /* Gimplify a statement list onto a sequence. These may be created either
1838 by an enlightened front-end, or by shortcut_cond_expr. */
1839
1840 static enum gimplify_status
1841 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1842 {
1843 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1844
1845 tree_stmt_iterator i = tsi_start (*expr_p);
1846
1847 while (!tsi_end_p (i))
1848 {
1849 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1850 tsi_delink (&i);
1851 }
1852
1853 if (temp)
1854 {
1855 *expr_p = temp;
1856 return GS_OK;
1857 }
1858
1859 return GS_ALL_DONE;
1860 }
1861
1862 /* Callback for walk_gimple_seq. */
1863
1864 static tree
1865 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1866 struct walk_stmt_info *wi)
1867 {
1868 gimple *stmt = gsi_stmt (*gsi_p);
1869
1870 *handled_ops_p = true;
1871 switch (gimple_code (stmt))
1872 {
1873 case GIMPLE_TRY:
1874 /* A compiler-generated cleanup or a user-written try block.
1875 If it's empty, don't dive into it--that would result in
1876 worse location info. */
1877 if (gimple_try_eval (stmt) == NULL)
1878 {
1879 wi->info = stmt;
1880 return integer_zero_node;
1881 }
1882 /* Fall through. */
1883 case GIMPLE_BIND:
1884 case GIMPLE_CATCH:
1885 case GIMPLE_EH_FILTER:
1886 case GIMPLE_TRANSACTION:
1887 /* Walk the sub-statements. */
1888 *handled_ops_p = false;
1889 break;
1890
1891 case GIMPLE_DEBUG:
1892 /* Ignore these. We may generate them before declarations that
1893 are never executed. If there's something to warn about,
1894 there will be non-debug stmts too, and we'll catch those. */
1895 break;
1896
1897 case GIMPLE_CALL:
1898 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1899 {
1900 *handled_ops_p = false;
1901 break;
1902 }
1903 /* Fall through. */
1904 default:
1905 /* Save the first "real" statement (not a decl/lexical scope/...). */
1906 wi->info = stmt;
1907 return integer_zero_node;
1908 }
1909 return NULL_TREE;
1910 }
1911
1912 /* Possibly warn about unreachable statements between switch's controlling
1913 expression and the first case. SEQ is the body of a switch expression. */
1914
1915 static void
1916 maybe_warn_switch_unreachable (gimple_seq seq)
1917 {
1918 if (!warn_switch_unreachable
1919 /* This warning doesn't play well with Fortran when optimizations
1920 are on. */
1921 || lang_GNU_Fortran ()
1922 || seq == NULL)
1923 return;
1924
1925 struct walk_stmt_info wi;
1926 memset (&wi, 0, sizeof (wi));
1927 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1928 gimple *stmt = (gimple *) wi.info;
1929
1930 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1931 {
1932 if (gimple_code (stmt) == GIMPLE_GOTO
1933 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1934 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1935 /* Don't warn for compiler-generated gotos. These occur
1936 in Duff's devices, for example. */;
1937 else
1938 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1939 "statement will never be executed");
1940 }
1941 }
1942
1943
1944 /* A label entry that pairs label and a location. */
1945 struct label_entry
1946 {
1947 tree label;
1948 location_t loc;
1949 };
1950
1951 /* Find LABEL in vector of label entries VEC. */
1952
1953 static struct label_entry *
1954 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1955 {
1956 unsigned int i;
1957 struct label_entry *l;
1958
1959 FOR_EACH_VEC_ELT (*vec, i, l)
1960 if (l->label == label)
1961 return l;
1962 return NULL;
1963 }
1964
1965 /* Return true if LABEL, a LABEL_DECL, represents a case label
1966 in a vector of labels CASES. */
1967
1968 static bool
1969 case_label_p (const vec<tree> *cases, tree label)
1970 {
1971 unsigned int i;
1972 tree l;
1973
1974 FOR_EACH_VEC_ELT (*cases, i, l)
1975 if (CASE_LABEL (l) == label)
1976 return true;
1977 return false;
1978 }
1979
1980 /* Find the last nondebug statement in a scope STMT. */
1981
1982 static gimple *
1983 last_stmt_in_scope (gimple *stmt)
1984 {
1985 if (!stmt)
1986 return NULL;
1987
1988 switch (gimple_code (stmt))
1989 {
1990 case GIMPLE_BIND:
1991 {
1992 gbind *bind = as_a <gbind *> (stmt);
1993 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1994 return last_stmt_in_scope (stmt);
1995 }
1996
1997 case GIMPLE_TRY:
1998 {
1999 gtry *try_stmt = as_a <gtry *> (stmt);
2000 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2001 gimple *last_eval = last_stmt_in_scope (stmt);
2002 if (gimple_stmt_may_fallthru (last_eval)
2003 && (last_eval == NULL
2004 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2005 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2006 {
2007 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2008 return last_stmt_in_scope (stmt);
2009 }
2010 else
2011 return last_eval;
2012 }
2013
2014 case GIMPLE_DEBUG:
2015 gcc_unreachable ();
2016
2017 default:
2018 return stmt;
2019 }
2020 }
2021
2022 /* Collect interesting labels in LABELS and return the statement preceding
2023 another case label, or a user-defined label. Store a location useful
2024 to give warnings at *PREVLOC (usually the location of the returned
2025 statement or of its surrounding scope). */
2026
2027 static gimple *
2028 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2029 auto_vec <struct label_entry> *labels,
2030 location_t *prevloc)
2031 {
2032 gimple *prev = NULL;
2033
2034 *prevloc = UNKNOWN_LOCATION;
2035 do
2036 {
2037 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2038 {
2039 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2040 which starts on a GIMPLE_SWITCH and ends with a break label.
2041 Handle that as a single statement that can fall through. */
2042 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2043 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2044 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2045 if (last
2046 && gimple_code (first) == GIMPLE_SWITCH
2047 && gimple_code (last) == GIMPLE_LABEL)
2048 {
2049 tree label = gimple_label_label (as_a <glabel *> (last));
2050 if (SWITCH_BREAK_LABEL_P (label))
2051 {
2052 prev = bind;
2053 gsi_next (gsi_p);
2054 continue;
2055 }
2056 }
2057 }
2058 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2059 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2060 {
2061 /* Nested scope. Only look at the last statement of
2062 the innermost scope. */
2063 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2064 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2065 if (last)
2066 {
2067 prev = last;
2068 /* It might be a label without a location. Use the
2069 location of the scope then. */
2070 if (!gimple_has_location (prev))
2071 *prevloc = bind_loc;
2072 }
2073 gsi_next (gsi_p);
2074 continue;
2075 }
2076
2077 /* Ifs are tricky. */
2078 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2079 {
2080 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2081 tree false_lab = gimple_cond_false_label (cond_stmt);
2082 location_t if_loc = gimple_location (cond_stmt);
2083
2084 /* If we have e.g.
2085 if (i > 1) goto <D.2259>; else goto D;
2086 we can't do much with the else-branch. */
2087 if (!DECL_ARTIFICIAL (false_lab))
2088 break;
2089
2090 /* Go on until the false label, then one step back. */
2091 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2092 {
2093 gimple *stmt = gsi_stmt (*gsi_p);
2094 if (gimple_code (stmt) == GIMPLE_LABEL
2095 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2096 break;
2097 }
2098
2099 /* Not found? Oops. */
2100 if (gsi_end_p (*gsi_p))
2101 break;
2102
2103 struct label_entry l = { false_lab, if_loc };
2104 labels->safe_push (l);
2105
2106 /* Go to the last statement of the then branch. */
2107 gsi_prev (gsi_p);
2108
2109 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2110 <D.1759>:
2111 <stmt>;
2112 goto <D.1761>;
2113 <D.1760>:
2114 */
2115 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2116 && !gimple_has_location (gsi_stmt (*gsi_p)))
2117 {
2118 /* Look at the statement before, it might be
2119 attribute fallthrough, in which case don't warn. */
2120 gsi_prev (gsi_p);
2121 bool fallthru_before_dest
2122 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2123 gsi_next (gsi_p);
2124 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2125 if (!fallthru_before_dest)
2126 {
2127 struct label_entry l = { goto_dest, if_loc };
2128 labels->safe_push (l);
2129 }
2130 }
2131 /* And move back. */
2132 gsi_next (gsi_p);
2133 }
2134
2135 /* Remember the last statement. Skip labels that are of no interest
2136 to us. */
2137 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2138 {
2139 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2140 if (find_label_entry (labels, label))
2141 prev = gsi_stmt (*gsi_p);
2142 }
2143 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2144 ;
2145 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2146 ;
2147 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2148 prev = gsi_stmt (*gsi_p);
2149 gsi_next (gsi_p);
2150 }
2151 while (!gsi_end_p (*gsi_p)
2152 /* Stop if we find a case or a user-defined label. */
2153 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2154 || !gimple_has_location (gsi_stmt (*gsi_p))));
2155
2156 if (prev && gimple_has_location (prev))
2157 *prevloc = gimple_location (prev);
2158 return prev;
2159 }
2160
2161 /* Return true if the switch fallthough warning should occur. LABEL is
2162 the label statement that we're falling through to. */
2163
2164 static bool
2165 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2166 {
2167 gimple_stmt_iterator gsi = *gsi_p;
2168
2169 /* Don't warn if the label is marked with a "falls through" comment. */
2170 if (FALLTHROUGH_LABEL_P (label))
2171 return false;
2172
2173 /* Don't warn for non-case labels followed by a statement:
2174 case 0:
2175 foo ();
2176 label:
2177 bar ();
2178 as these are likely intentional. */
2179 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2180 {
2181 tree l;
2182 while (!gsi_end_p (gsi)
2183 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2184 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2185 && !case_label_p (&gimplify_ctxp->case_labels, l))
2186 gsi_next_nondebug (&gsi);
2187 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2188 return false;
2189 }
2190
2191 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2192 immediately breaks. */
2193 gsi = *gsi_p;
2194
2195 /* Skip all immediately following labels. */
2196 while (!gsi_end_p (gsi)
2197 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2198 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2199 gsi_next_nondebug (&gsi);
2200
2201 /* { ... something; default:; } */
2202 if (gsi_end_p (gsi)
2203 /* { ... something; default: break; } or
2204 { ... something; default: goto L; } */
2205 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2206 /* { ... something; default: return; } */
2207 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2208 return false;
2209
2210 return true;
2211 }
2212
2213 /* Callback for walk_gimple_seq. */
2214
2215 static tree
2216 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2217 struct walk_stmt_info *)
2218 {
2219 gimple *stmt = gsi_stmt (*gsi_p);
2220
2221 *handled_ops_p = true;
2222 switch (gimple_code (stmt))
2223 {
2224 case GIMPLE_TRY:
2225 case GIMPLE_BIND:
2226 case GIMPLE_CATCH:
2227 case GIMPLE_EH_FILTER:
2228 case GIMPLE_TRANSACTION:
2229 /* Walk the sub-statements. */
2230 *handled_ops_p = false;
2231 break;
2232
2233 /* Find a sequence of form:
2234
2235 GIMPLE_LABEL
2236 [...]
2237 <may fallthru stmt>
2238 GIMPLE_LABEL
2239
2240 and possibly warn. */
2241 case GIMPLE_LABEL:
2242 {
2243 /* Found a label. Skip all immediately following labels. */
2244 while (!gsi_end_p (*gsi_p)
2245 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2246 gsi_next_nondebug (gsi_p);
2247
2248 /* There might be no more statements. */
2249 if (gsi_end_p (*gsi_p))
2250 return integer_zero_node;
2251
2252 /* Vector of labels that fall through. */
2253 auto_vec <struct label_entry> labels;
2254 location_t prevloc;
2255 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2256
2257 /* There might be no more statements. */
2258 if (gsi_end_p (*gsi_p))
2259 return integer_zero_node;
2260
2261 gimple *next = gsi_stmt (*gsi_p);
2262 tree label;
2263 /* If what follows is a label, then we may have a fallthrough. */
2264 if (gimple_code (next) == GIMPLE_LABEL
2265 && gimple_has_location (next)
2266 && (label = gimple_label_label (as_a <glabel *> (next)))
2267 && prev != NULL)
2268 {
2269 struct label_entry *l;
2270 bool warned_p = false;
2271 auto_diagnostic_group d;
2272 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2273 /* Quiet. */;
2274 else if (gimple_code (prev) == GIMPLE_LABEL
2275 && (label = gimple_label_label (as_a <glabel *> (prev)))
2276 && (l = find_label_entry (&labels, label)))
2277 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2278 "this statement may fall through");
2279 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2280 /* Try to be clever and don't warn when the statement
2281 can't actually fall through. */
2282 && gimple_stmt_may_fallthru (prev)
2283 && prevloc != UNKNOWN_LOCATION)
2284 warned_p = warning_at (prevloc,
2285 OPT_Wimplicit_fallthrough_,
2286 "this statement may fall through");
2287 if (warned_p)
2288 inform (gimple_location (next), "here");
2289
2290 /* Mark this label as processed so as to prevent multiple
2291 warnings in nested switches. */
2292 FALLTHROUGH_LABEL_P (label) = true;
2293
2294 /* So that next warn_implicit_fallthrough_r will start looking for
2295 a new sequence starting with this label. */
2296 gsi_prev (gsi_p);
2297 }
2298 }
2299 break;
2300 default:
2301 break;
2302 }
2303 return NULL_TREE;
2304 }
2305
2306 /* Warn when a switch case falls through. */
2307
2308 static void
2309 maybe_warn_implicit_fallthrough (gimple_seq seq)
2310 {
2311 if (!warn_implicit_fallthrough)
2312 return;
2313
2314 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2315 if (!(lang_GNU_C ()
2316 || lang_GNU_CXX ()
2317 || lang_GNU_OBJC ()))
2318 return;
2319
2320 struct walk_stmt_info wi;
2321 memset (&wi, 0, sizeof (wi));
2322 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2323 }
2324
2325 /* Callback for walk_gimple_seq. */
2326
2327 static tree
2328 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2329 struct walk_stmt_info *wi)
2330 {
2331 gimple *stmt = gsi_stmt (*gsi_p);
2332
2333 *handled_ops_p = true;
2334 switch (gimple_code (stmt))
2335 {
2336 case GIMPLE_TRY:
2337 case GIMPLE_BIND:
2338 case GIMPLE_CATCH:
2339 case GIMPLE_EH_FILTER:
2340 case GIMPLE_TRANSACTION:
2341 /* Walk the sub-statements. */
2342 *handled_ops_p = false;
2343 break;
2344 case GIMPLE_CALL:
2345 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2346 {
2347 gsi_remove (gsi_p, true);
2348 if (gsi_end_p (*gsi_p))
2349 {
2350 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2351 return integer_zero_node;
2352 }
2353
2354 bool found = false;
2355 location_t loc = gimple_location (stmt);
2356
2357 gimple_stmt_iterator gsi2 = *gsi_p;
2358 stmt = gsi_stmt (gsi2);
2359 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2360 {
2361 /* Go on until the artificial label. */
2362 tree goto_dest = gimple_goto_dest (stmt);
2363 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2364 {
2365 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2366 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2367 == goto_dest)
2368 break;
2369 }
2370
2371 /* Not found? Stop. */
2372 if (gsi_end_p (gsi2))
2373 break;
2374
2375 /* Look one past it. */
2376 gsi_next (&gsi2);
2377 }
2378
2379 /* We're looking for a case label or default label here. */
2380 while (!gsi_end_p (gsi2))
2381 {
2382 stmt = gsi_stmt (gsi2);
2383 if (gimple_code (stmt) == GIMPLE_LABEL)
2384 {
2385 tree label = gimple_label_label (as_a <glabel *> (stmt));
2386 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2387 {
2388 found = true;
2389 break;
2390 }
2391 }
2392 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2393 ;
2394 else if (!is_gimple_debug (stmt))
2395 /* Anything else is not expected. */
2396 break;
2397 gsi_next (&gsi2);
2398 }
2399 if (!found)
2400 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2401 "a case label or default label");
2402 }
2403 break;
2404 default:
2405 break;
2406 }
2407 return NULL_TREE;
2408 }
2409
2410 /* Expand all FALLTHROUGH () calls in SEQ. */
2411
2412 static void
2413 expand_FALLTHROUGH (gimple_seq *seq_p)
2414 {
2415 struct walk_stmt_info wi;
2416 location_t loc;
2417 memset (&wi, 0, sizeof (wi));
2418 wi.info = (void *) &loc;
2419 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2420 if (wi.callback_result == integer_zero_node)
2421 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2422 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2423 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2424 "a case label or default label");
2425 }
2426
2427 \f
2428 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2429 branch to. */
2430
2431 static enum gimplify_status
2432 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2433 {
2434 tree switch_expr = *expr_p;
2435 gimple_seq switch_body_seq = NULL;
2436 enum gimplify_status ret;
2437 tree index_type = TREE_TYPE (switch_expr);
2438 if (index_type == NULL_TREE)
2439 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2440
2441 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2442 fb_rvalue);
2443 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2444 return ret;
2445
2446 if (SWITCH_BODY (switch_expr))
2447 {
2448 vec<tree> labels;
2449 vec<tree> saved_labels;
2450 hash_set<tree> *saved_live_switch_vars = NULL;
2451 tree default_case = NULL_TREE;
2452 gswitch *switch_stmt;
2453
2454 /* Save old labels, get new ones from body, then restore the old
2455 labels. Save all the things from the switch body to append after. */
2456 saved_labels = gimplify_ctxp->case_labels;
2457 gimplify_ctxp->case_labels.create (8);
2458
2459 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2460 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2461 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2462 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2463 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2464 else
2465 gimplify_ctxp->live_switch_vars = NULL;
2466
2467 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2468 gimplify_ctxp->in_switch_expr = true;
2469
2470 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2471
2472 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2473 maybe_warn_switch_unreachable (switch_body_seq);
2474 maybe_warn_implicit_fallthrough (switch_body_seq);
2475 /* Only do this for the outermost GIMPLE_SWITCH. */
2476 if (!gimplify_ctxp->in_switch_expr)
2477 expand_FALLTHROUGH (&switch_body_seq);
2478
2479 labels = gimplify_ctxp->case_labels;
2480 gimplify_ctxp->case_labels = saved_labels;
2481
2482 if (gimplify_ctxp->live_switch_vars)
2483 {
2484 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2485 delete gimplify_ctxp->live_switch_vars;
2486 }
2487 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2488
2489 preprocess_case_label_vec_for_gimple (labels, index_type,
2490 &default_case);
2491
2492 bool add_bind = false;
2493 if (!default_case)
2494 {
2495 glabel *new_default;
2496
2497 default_case
2498 = build_case_label (NULL_TREE, NULL_TREE,
2499 create_artificial_label (UNKNOWN_LOCATION));
2500 if (old_in_switch_expr)
2501 {
2502 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2503 add_bind = true;
2504 }
2505 new_default = gimple_build_label (CASE_LABEL (default_case));
2506 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2507 }
2508 else if (old_in_switch_expr)
2509 {
2510 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2511 if (last && gimple_code (last) == GIMPLE_LABEL)
2512 {
2513 tree label = gimple_label_label (as_a <glabel *> (last));
2514 if (SWITCH_BREAK_LABEL_P (label))
2515 add_bind = true;
2516 }
2517 }
2518
2519 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2520 default_case, labels);
2521 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2522 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2523 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2524 so that we can easily find the start and end of the switch
2525 statement. */
2526 if (add_bind)
2527 {
2528 gimple_seq bind_body = NULL;
2529 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2530 gimple_seq_add_seq (&bind_body, switch_body_seq);
2531 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2532 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2533 gimplify_seq_add_stmt (pre_p, bind);
2534 }
2535 else
2536 {
2537 gimplify_seq_add_stmt (pre_p, switch_stmt);
2538 gimplify_seq_add_seq (pre_p, switch_body_seq);
2539 }
2540 labels.release ();
2541 }
2542 else
2543 gcc_unreachable ();
2544
2545 return GS_ALL_DONE;
2546 }
2547
2548 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2549
2550 static enum gimplify_status
2551 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2552 {
2553 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2554 == current_function_decl);
2555
2556 tree label = LABEL_EXPR_LABEL (*expr_p);
2557 glabel *label_stmt = gimple_build_label (label);
2558 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2559 gimplify_seq_add_stmt (pre_p, label_stmt);
2560
2561 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2562 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2563 NOT_TAKEN));
2564 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2565 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2566 TAKEN));
2567
2568 return GS_ALL_DONE;
2569 }
2570
2571 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2572
2573 static enum gimplify_status
2574 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2575 {
2576 struct gimplify_ctx *ctxp;
2577 glabel *label_stmt;
2578
2579 /* Invalid programs can play Duff's Device type games with, for example,
2580 #pragma omp parallel. At least in the C front end, we don't
2581 detect such invalid branches until after gimplification, in the
2582 diagnose_omp_blocks pass. */
2583 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2584 if (ctxp->case_labels.exists ())
2585 break;
2586
2587 tree label = CASE_LABEL (*expr_p);
2588 label_stmt = gimple_build_label (label);
2589 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2590 ctxp->case_labels.safe_push (*expr_p);
2591 gimplify_seq_add_stmt (pre_p, label_stmt);
2592
2593 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2594 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2595 NOT_TAKEN));
2596 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2597 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2598 TAKEN));
2599
2600 return GS_ALL_DONE;
2601 }
2602
2603 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2604 if necessary. */
2605
2606 tree
2607 build_and_jump (tree *label_p)
2608 {
2609 if (label_p == NULL)
2610 /* If there's nowhere to jump, just fall through. */
2611 return NULL_TREE;
2612
2613 if (*label_p == NULL_TREE)
2614 {
2615 tree label = create_artificial_label (UNKNOWN_LOCATION);
2616 *label_p = label;
2617 }
2618
2619 return build1 (GOTO_EXPR, void_type_node, *label_p);
2620 }
2621
2622 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2623 This also involves building a label to jump to and communicating it to
2624 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2625
2626 static enum gimplify_status
2627 gimplify_exit_expr (tree *expr_p)
2628 {
2629 tree cond = TREE_OPERAND (*expr_p, 0);
2630 tree expr;
2631
2632 expr = build_and_jump (&gimplify_ctxp->exit_label);
2633 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2634 *expr_p = expr;
2635
2636 return GS_OK;
2637 }
2638
2639 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2640 different from its canonical type, wrap the whole thing inside a
2641 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2642 type.
2643
2644 The canonical type of a COMPONENT_REF is the type of the field being
2645 referenced--unless the field is a bit-field which can be read directly
2646 in a smaller mode, in which case the canonical type is the
2647 sign-appropriate type corresponding to that mode. */
2648
2649 static void
2650 canonicalize_component_ref (tree *expr_p)
2651 {
2652 tree expr = *expr_p;
2653 tree type;
2654
2655 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2656
2657 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2658 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2659 else
2660 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2661
2662 /* One could argue that all the stuff below is not necessary for
2663 the non-bitfield case and declare it a FE error if type
2664 adjustment would be needed. */
2665 if (TREE_TYPE (expr) != type)
2666 {
2667 #ifdef ENABLE_TYPES_CHECKING
2668 tree old_type = TREE_TYPE (expr);
2669 #endif
2670 int type_quals;
2671
2672 /* We need to preserve qualifiers and propagate them from
2673 operand 0. */
2674 type_quals = TYPE_QUALS (type)
2675 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2676 if (TYPE_QUALS (type) != type_quals)
2677 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2678
2679 /* Set the type of the COMPONENT_REF to the underlying type. */
2680 TREE_TYPE (expr) = type;
2681
2682 #ifdef ENABLE_TYPES_CHECKING
2683 /* It is now a FE error, if the conversion from the canonical
2684 type to the original expression type is not useless. */
2685 gcc_assert (useless_type_conversion_p (old_type, type));
2686 #endif
2687 }
2688 }
2689
2690 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2691 to foo, embed that change in the ADDR_EXPR by converting
2692 T array[U];
2693 (T *)&array
2694 ==>
2695 &array[L]
2696 where L is the lower bound. For simplicity, only do this for constant
2697 lower bound.
2698 The constraint is that the type of &array[L] is trivially convertible
2699 to T *. */
2700
2701 static void
2702 canonicalize_addr_expr (tree *expr_p)
2703 {
2704 tree expr = *expr_p;
2705 tree addr_expr = TREE_OPERAND (expr, 0);
2706 tree datype, ddatype, pddatype;
2707
2708 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2709 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2710 || TREE_CODE (addr_expr) != ADDR_EXPR)
2711 return;
2712
2713 /* The addr_expr type should be a pointer to an array. */
2714 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2715 if (TREE_CODE (datype) != ARRAY_TYPE)
2716 return;
2717
2718 /* The pointer to element type shall be trivially convertible to
2719 the expression pointer type. */
2720 ddatype = TREE_TYPE (datype);
2721 pddatype = build_pointer_type (ddatype);
2722 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2723 pddatype))
2724 return;
2725
2726 /* The lower bound and element sizes must be constant. */
2727 if (!TYPE_SIZE_UNIT (ddatype)
2728 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2729 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2730 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2731 return;
2732
2733 /* All checks succeeded. Build a new node to merge the cast. */
2734 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2735 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2736 NULL_TREE, NULL_TREE);
2737 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2738
2739 /* We can have stripped a required restrict qualifier above. */
2740 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2741 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2742 }
2743
2744 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2745 underneath as appropriate. */
2746
2747 static enum gimplify_status
2748 gimplify_conversion (tree *expr_p)
2749 {
2750 location_t loc = EXPR_LOCATION (*expr_p);
2751 gcc_assert (CONVERT_EXPR_P (*expr_p));
2752
2753 /* Then strip away all but the outermost conversion. */
2754 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2755
2756 /* And remove the outermost conversion if it's useless. */
2757 if (tree_ssa_useless_type_conversion (*expr_p))
2758 *expr_p = TREE_OPERAND (*expr_p, 0);
2759
2760 /* If we still have a conversion at the toplevel,
2761 then canonicalize some constructs. */
2762 if (CONVERT_EXPR_P (*expr_p))
2763 {
2764 tree sub = TREE_OPERAND (*expr_p, 0);
2765
2766 /* If a NOP conversion is changing the type of a COMPONENT_REF
2767 expression, then canonicalize its type now in order to expose more
2768 redundant conversions. */
2769 if (TREE_CODE (sub) == COMPONENT_REF)
2770 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2771
2772 /* If a NOP conversion is changing a pointer to array of foo
2773 to a pointer to foo, embed that change in the ADDR_EXPR. */
2774 else if (TREE_CODE (sub) == ADDR_EXPR)
2775 canonicalize_addr_expr (expr_p);
2776 }
2777
2778 /* If we have a conversion to a non-register type force the
2779 use of a VIEW_CONVERT_EXPR instead. */
2780 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2781 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2782 TREE_OPERAND (*expr_p, 0));
2783
2784 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2785 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2786 TREE_SET_CODE (*expr_p, NOP_EXPR);
2787
2788 return GS_OK;
2789 }
2790
2791 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2792 DECL_VALUE_EXPR, and it's worth re-examining things. */
2793
2794 static enum gimplify_status
2795 gimplify_var_or_parm_decl (tree *expr_p)
2796 {
2797 tree decl = *expr_p;
2798
2799 /* ??? If this is a local variable, and it has not been seen in any
2800 outer BIND_EXPR, then it's probably the result of a duplicate
2801 declaration, for which we've already issued an error. It would
2802 be really nice if the front end wouldn't leak these at all.
2803 Currently the only known culprit is C++ destructors, as seen
2804 in g++.old-deja/g++.jason/binding.C. */
2805 if (VAR_P (decl)
2806 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2807 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2808 && decl_function_context (decl) == current_function_decl)
2809 {
2810 gcc_assert (seen_error ());
2811 return GS_ERROR;
2812 }
2813
2814 /* When within an OMP context, notice uses of variables. */
2815 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2816 return GS_ALL_DONE;
2817
2818 /* If the decl is an alias for another expression, substitute it now. */
2819 if (DECL_HAS_VALUE_EXPR_P (decl))
2820 {
2821 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2822 return GS_OK;
2823 }
2824
2825 return GS_ALL_DONE;
2826 }
2827
2828 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2829
2830 static void
2831 recalculate_side_effects (tree t)
2832 {
2833 enum tree_code code = TREE_CODE (t);
2834 int len = TREE_OPERAND_LENGTH (t);
2835 int i;
2836
2837 switch (TREE_CODE_CLASS (code))
2838 {
2839 case tcc_expression:
2840 switch (code)
2841 {
2842 case INIT_EXPR:
2843 case MODIFY_EXPR:
2844 case VA_ARG_EXPR:
2845 case PREDECREMENT_EXPR:
2846 case PREINCREMENT_EXPR:
2847 case POSTDECREMENT_EXPR:
2848 case POSTINCREMENT_EXPR:
2849 /* All of these have side-effects, no matter what their
2850 operands are. */
2851 return;
2852
2853 default:
2854 break;
2855 }
2856 /* Fall through. */
2857
2858 case tcc_comparison: /* a comparison expression */
2859 case tcc_unary: /* a unary arithmetic expression */
2860 case tcc_binary: /* a binary arithmetic expression */
2861 case tcc_reference: /* a reference */
2862 case tcc_vl_exp: /* a function call */
2863 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2864 for (i = 0; i < len; ++i)
2865 {
2866 tree op = TREE_OPERAND (t, i);
2867 if (op && TREE_SIDE_EFFECTS (op))
2868 TREE_SIDE_EFFECTS (t) = 1;
2869 }
2870 break;
2871
2872 case tcc_constant:
2873 /* No side-effects. */
2874 return;
2875
2876 default:
2877 gcc_unreachable ();
2878 }
2879 }
2880
2881 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2882 node *EXPR_P.
2883
2884 compound_lval
2885 : min_lval '[' val ']'
2886 | min_lval '.' ID
2887 | compound_lval '[' val ']'
2888 | compound_lval '.' ID
2889
2890 This is not part of the original SIMPLE definition, which separates
2891 array and member references, but it seems reasonable to handle them
2892 together. Also, this way we don't run into problems with union
2893 aliasing; gcc requires that for accesses through a union to alias, the
2894 union reference must be explicit, which was not always the case when we
2895 were splitting up array and member refs.
2896
2897 PRE_P points to the sequence where side effects that must happen before
2898 *EXPR_P should be stored.
2899
2900 POST_P points to the sequence where side effects that must happen after
2901 *EXPR_P should be stored. */
2902
2903 static enum gimplify_status
2904 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2905 fallback_t fallback)
2906 {
2907 tree *p;
2908 enum gimplify_status ret = GS_ALL_DONE, tret;
2909 int i;
2910 location_t loc = EXPR_LOCATION (*expr_p);
2911 tree expr = *expr_p;
2912
2913 /* Create a stack of the subexpressions so later we can walk them in
2914 order from inner to outer. */
2915 auto_vec<tree, 10> expr_stack;
2916
2917 /* We can handle anything that get_inner_reference can deal with. */
2918 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2919 {
2920 restart:
2921 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2922 if (TREE_CODE (*p) == INDIRECT_REF)
2923 *p = fold_indirect_ref_loc (loc, *p);
2924
2925 if (handled_component_p (*p))
2926 ;
2927 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2928 additional COMPONENT_REFs. */
2929 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2930 && gimplify_var_or_parm_decl (p) == GS_OK)
2931 goto restart;
2932 else
2933 break;
2934
2935 expr_stack.safe_push (*p);
2936 }
2937
2938 gcc_assert (expr_stack.length ());
2939
2940 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2941 walked through and P points to the innermost expression.
2942
2943 Java requires that we elaborated nodes in source order. That
2944 means we must gimplify the inner expression followed by each of
2945 the indices, in order. But we can't gimplify the inner
2946 expression until we deal with any variable bounds, sizes, or
2947 positions in order to deal with PLACEHOLDER_EXPRs.
2948
2949 So we do this in three steps. First we deal with the annotations
2950 for any variables in the components, then we gimplify the base,
2951 then we gimplify any indices, from left to right. */
2952 for (i = expr_stack.length () - 1; i >= 0; i--)
2953 {
2954 tree t = expr_stack[i];
2955
2956 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2957 {
2958 /* Gimplify the low bound and element type size and put them into
2959 the ARRAY_REF. If these values are set, they have already been
2960 gimplified. */
2961 if (TREE_OPERAND (t, 2) == NULL_TREE)
2962 {
2963 tree low = unshare_expr (array_ref_low_bound (t));
2964 if (!is_gimple_min_invariant (low))
2965 {
2966 TREE_OPERAND (t, 2) = low;
2967 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2968 post_p, is_gimple_reg,
2969 fb_rvalue);
2970 ret = MIN (ret, tret);
2971 }
2972 }
2973 else
2974 {
2975 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2976 is_gimple_reg, fb_rvalue);
2977 ret = MIN (ret, tret);
2978 }
2979
2980 if (TREE_OPERAND (t, 3) == NULL_TREE)
2981 {
2982 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2983 tree elmt_size = unshare_expr (array_ref_element_size (t));
2984 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2985
2986 /* Divide the element size by the alignment of the element
2987 type (above). */
2988 elmt_size
2989 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2990
2991 if (!is_gimple_min_invariant (elmt_size))
2992 {
2993 TREE_OPERAND (t, 3) = elmt_size;
2994 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2995 post_p, is_gimple_reg,
2996 fb_rvalue);
2997 ret = MIN (ret, tret);
2998 }
2999 }
3000 else
3001 {
3002 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3003 is_gimple_reg, fb_rvalue);
3004 ret = MIN (ret, tret);
3005 }
3006 }
3007 else if (TREE_CODE (t) == COMPONENT_REF)
3008 {
3009 /* Set the field offset into T and gimplify it. */
3010 if (TREE_OPERAND (t, 2) == NULL_TREE)
3011 {
3012 tree offset = unshare_expr (component_ref_field_offset (t));
3013 tree field = TREE_OPERAND (t, 1);
3014 tree factor
3015 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3016
3017 /* Divide the offset by its alignment. */
3018 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
3019
3020 if (!is_gimple_min_invariant (offset))
3021 {
3022 TREE_OPERAND (t, 2) = offset;
3023 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3024 post_p, is_gimple_reg,
3025 fb_rvalue);
3026 ret = MIN (ret, tret);
3027 }
3028 }
3029 else
3030 {
3031 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3032 is_gimple_reg, fb_rvalue);
3033 ret = MIN (ret, tret);
3034 }
3035 }
3036 }
3037
3038 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3039 so as to match the min_lval predicate. Failure to do so may result
3040 in the creation of large aggregate temporaries. */
3041 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3042 fallback | fb_lvalue);
3043 ret = MIN (ret, tret);
3044
3045 /* And finally, the indices and operands of ARRAY_REF. During this
3046 loop we also remove any useless conversions. */
3047 for (; expr_stack.length () > 0; )
3048 {
3049 tree t = expr_stack.pop ();
3050
3051 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3052 {
3053 /* Gimplify the dimension. */
3054 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3055 {
3056 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3057 is_gimple_val, fb_rvalue);
3058 ret = MIN (ret, tret);
3059 }
3060 }
3061
3062 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3063
3064 /* The innermost expression P may have originally had
3065 TREE_SIDE_EFFECTS set which would have caused all the outer
3066 expressions in *EXPR_P leading to P to also have had
3067 TREE_SIDE_EFFECTS set. */
3068 recalculate_side_effects (t);
3069 }
3070
3071 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3072 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3073 {
3074 canonicalize_component_ref (expr_p);
3075 }
3076
3077 expr_stack.release ();
3078
3079 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3080
3081 return ret;
3082 }
3083
3084 /* Gimplify the self modifying expression pointed to by EXPR_P
3085 (++, --, +=, -=).
3086
3087 PRE_P points to the list where side effects that must happen before
3088 *EXPR_P should be stored.
3089
3090 POST_P points to the list where side effects that must happen after
3091 *EXPR_P should be stored.
3092
3093 WANT_VALUE is nonzero iff we want to use the value of this expression
3094 in another expression.
3095
3096 ARITH_TYPE is the type the computation should be performed in. */
3097
3098 enum gimplify_status
3099 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3100 bool want_value, tree arith_type)
3101 {
3102 enum tree_code code;
3103 tree lhs, lvalue, rhs, t1;
3104 gimple_seq post = NULL, *orig_post_p = post_p;
3105 bool postfix;
3106 enum tree_code arith_code;
3107 enum gimplify_status ret;
3108 location_t loc = EXPR_LOCATION (*expr_p);
3109
3110 code = TREE_CODE (*expr_p);
3111
3112 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3113 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3114
3115 /* Prefix or postfix? */
3116 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3117 /* Faster to treat as prefix if result is not used. */
3118 postfix = want_value;
3119 else
3120 postfix = false;
3121
3122 /* For postfix, make sure the inner expression's post side effects
3123 are executed after side effects from this expression. */
3124 if (postfix)
3125 post_p = &post;
3126
3127 /* Add or subtract? */
3128 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3129 arith_code = PLUS_EXPR;
3130 else
3131 arith_code = MINUS_EXPR;
3132
3133 /* Gimplify the LHS into a GIMPLE lvalue. */
3134 lvalue = TREE_OPERAND (*expr_p, 0);
3135 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3136 if (ret == GS_ERROR)
3137 return ret;
3138
3139 /* Extract the operands to the arithmetic operation. */
3140 lhs = lvalue;
3141 rhs = TREE_OPERAND (*expr_p, 1);
3142
3143 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3144 that as the result value and in the postqueue operation. */
3145 if (postfix)
3146 {
3147 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3148 if (ret == GS_ERROR)
3149 return ret;
3150
3151 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3152 }
3153
3154 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3155 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3156 {
3157 rhs = convert_to_ptrofftype_loc (loc, rhs);
3158 if (arith_code == MINUS_EXPR)
3159 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3160 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3161 }
3162 else
3163 t1 = fold_convert (TREE_TYPE (*expr_p),
3164 fold_build2 (arith_code, arith_type,
3165 fold_convert (arith_type, lhs),
3166 fold_convert (arith_type, rhs)));
3167
3168 if (postfix)
3169 {
3170 gimplify_assign (lvalue, t1, pre_p);
3171 gimplify_seq_add_seq (orig_post_p, post);
3172 *expr_p = lhs;
3173 return GS_ALL_DONE;
3174 }
3175 else
3176 {
3177 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3178 return GS_OK;
3179 }
3180 }
3181
3182 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3183
3184 static void
3185 maybe_with_size_expr (tree *expr_p)
3186 {
3187 tree expr = *expr_p;
3188 tree type = TREE_TYPE (expr);
3189 tree size;
3190
3191 /* If we've already wrapped this or the type is error_mark_node, we can't do
3192 anything. */
3193 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3194 || type == error_mark_node)
3195 return;
3196
3197 /* If the size isn't known or is a constant, we have nothing to do. */
3198 size = TYPE_SIZE_UNIT (type);
3199 if (!size || poly_int_tree_p (size))
3200 return;
3201
3202 /* Otherwise, make a WITH_SIZE_EXPR. */
3203 size = unshare_expr (size);
3204 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3205 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3206 }
3207
3208 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3209 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3210 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3211 gimplified to an SSA name. */
3212
3213 enum gimplify_status
3214 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3215 bool allow_ssa)
3216 {
3217 bool (*test) (tree);
3218 fallback_t fb;
3219
3220 /* In general, we allow lvalues for function arguments to avoid
3221 extra overhead of copying large aggregates out of even larger
3222 aggregates into temporaries only to copy the temporaries to
3223 the argument list. Make optimizers happy by pulling out to
3224 temporaries those types that fit in registers. */
3225 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3226 test = is_gimple_val, fb = fb_rvalue;
3227 else
3228 {
3229 test = is_gimple_lvalue, fb = fb_either;
3230 /* Also strip a TARGET_EXPR that would force an extra copy. */
3231 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3232 {
3233 tree init = TARGET_EXPR_INITIAL (*arg_p);
3234 if (init
3235 && !VOID_TYPE_P (TREE_TYPE (init)))
3236 *arg_p = init;
3237 }
3238 }
3239
3240 /* If this is a variable sized type, we must remember the size. */
3241 maybe_with_size_expr (arg_p);
3242
3243 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3244 /* Make sure arguments have the same location as the function call
3245 itself. */
3246 protected_set_expr_location (*arg_p, call_location);
3247
3248 /* There is a sequence point before a function call. Side effects in
3249 the argument list must occur before the actual call. So, when
3250 gimplifying arguments, force gimplify_expr to use an internal
3251 post queue which is then appended to the end of PRE_P. */
3252 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3253 }
3254
3255 /* Don't fold inside offloading or taskreg regions: it can break code by
3256 adding decl references that weren't in the source. We'll do it during
3257 omplower pass instead. */
3258
3259 static bool
3260 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3261 {
3262 struct gimplify_omp_ctx *ctx;
3263 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3264 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3265 return false;
3266 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3267 return false;
3268 /* Delay folding of builtins until the IL is in consistent state
3269 so the diagnostic machinery can do a better job. */
3270 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3271 return false;
3272 return fold_stmt (gsi);
3273 }
3274
3275 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3276 WANT_VALUE is true if the result of the call is desired. */
3277
3278 static enum gimplify_status
3279 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3280 {
3281 tree fndecl, parms, p, fnptrtype;
3282 enum gimplify_status ret;
3283 int i, nargs;
3284 gcall *call;
3285 bool builtin_va_start_p = false;
3286 location_t loc = EXPR_LOCATION (*expr_p);
3287
3288 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3289
3290 /* For reliable diagnostics during inlining, it is necessary that
3291 every call_expr be annotated with file and line. */
3292 if (! EXPR_HAS_LOCATION (*expr_p))
3293 SET_EXPR_LOCATION (*expr_p, input_location);
3294
3295 /* Gimplify internal functions created in the FEs. */
3296 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3297 {
3298 if (want_value)
3299 return GS_ALL_DONE;
3300
3301 nargs = call_expr_nargs (*expr_p);
3302 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3303 auto_vec<tree> vargs (nargs);
3304
3305 for (i = 0; i < nargs; i++)
3306 {
3307 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3308 EXPR_LOCATION (*expr_p));
3309 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3310 }
3311
3312 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3313 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3314 gimplify_seq_add_stmt (pre_p, call);
3315 return GS_ALL_DONE;
3316 }
3317
3318 /* This may be a call to a builtin function.
3319
3320 Builtin function calls may be transformed into different
3321 (and more efficient) builtin function calls under certain
3322 circumstances. Unfortunately, gimplification can muck things
3323 up enough that the builtin expanders are not aware that certain
3324 transformations are still valid.
3325
3326 So we attempt transformation/gimplification of the call before
3327 we gimplify the CALL_EXPR. At this time we do not manage to
3328 transform all calls in the same manner as the expanders do, but
3329 we do transform most of them. */
3330 fndecl = get_callee_fndecl (*expr_p);
3331 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3332 switch (DECL_FUNCTION_CODE (fndecl))
3333 {
3334 CASE_BUILT_IN_ALLOCA:
3335 /* If the call has been built for a variable-sized object, then we
3336 want to restore the stack level when the enclosing BIND_EXPR is
3337 exited to reclaim the allocated space; otherwise, we precisely
3338 need to do the opposite and preserve the latest stack level. */
3339 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3340 gimplify_ctxp->save_stack = true;
3341 else
3342 gimplify_ctxp->keep_stack = true;
3343 break;
3344
3345 case BUILT_IN_VA_START:
3346 {
3347 builtin_va_start_p = TRUE;
3348 if (call_expr_nargs (*expr_p) < 2)
3349 {
3350 error ("too few arguments to function %<va_start%>");
3351 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3352 return GS_OK;
3353 }
3354
3355 if (fold_builtin_next_arg (*expr_p, true))
3356 {
3357 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3358 return GS_OK;
3359 }
3360 break;
3361 }
3362
3363 case BUILT_IN_EH_RETURN:
3364 cfun->calls_eh_return = true;
3365 break;
3366
3367 default:
3368 ;
3369 }
3370 if (fndecl && fndecl_built_in_p (fndecl))
3371 {
3372 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3373 if (new_tree && new_tree != *expr_p)
3374 {
3375 /* There was a transformation of this call which computes the
3376 same value, but in a more efficient way. Return and try
3377 again. */
3378 *expr_p = new_tree;
3379 return GS_OK;
3380 }
3381 }
3382
3383 /* Remember the original function pointer type. */
3384 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3385
3386 /* There is a sequence point before the call, so any side effects in
3387 the calling expression must occur before the actual call. Force
3388 gimplify_expr to use an internal post queue. */
3389 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3390 is_gimple_call_addr, fb_rvalue);
3391
3392 nargs = call_expr_nargs (*expr_p);
3393
3394 /* Get argument types for verification. */
3395 fndecl = get_callee_fndecl (*expr_p);
3396 parms = NULL_TREE;
3397 if (fndecl)
3398 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3399 else
3400 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3401
3402 if (fndecl && DECL_ARGUMENTS (fndecl))
3403 p = DECL_ARGUMENTS (fndecl);
3404 else if (parms)
3405 p = parms;
3406 else
3407 p = NULL_TREE;
3408 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3409 ;
3410
3411 /* If the last argument is __builtin_va_arg_pack () and it is not
3412 passed as a named argument, decrease the number of CALL_EXPR
3413 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3414 if (!p
3415 && i < nargs
3416 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3417 {
3418 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3419 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3420
3421 if (last_arg_fndecl
3422 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3423 {
3424 tree call = *expr_p;
3425
3426 --nargs;
3427 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3428 CALL_EXPR_FN (call),
3429 nargs, CALL_EXPR_ARGP (call));
3430
3431 /* Copy all CALL_EXPR flags, location and block, except
3432 CALL_EXPR_VA_ARG_PACK flag. */
3433 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3434 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3435 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3436 = CALL_EXPR_RETURN_SLOT_OPT (call);
3437 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3438 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3439
3440 /* Set CALL_EXPR_VA_ARG_PACK. */
3441 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3442 }
3443 }
3444
3445 /* If the call returns twice then after building the CFG the call
3446 argument computations will no longer dominate the call because
3447 we add an abnormal incoming edge to the call. So do not use SSA
3448 vars there. */
3449 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3450
3451 /* Gimplify the function arguments. */
3452 if (nargs > 0)
3453 {
3454 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3455 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3456 PUSH_ARGS_REVERSED ? i-- : i++)
3457 {
3458 enum gimplify_status t;
3459
3460 /* Avoid gimplifying the second argument to va_start, which needs to
3461 be the plain PARM_DECL. */
3462 if ((i != 1) || !builtin_va_start_p)
3463 {
3464 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3465 EXPR_LOCATION (*expr_p), ! returns_twice);
3466
3467 if (t == GS_ERROR)
3468 ret = GS_ERROR;
3469 }
3470 }
3471 }
3472
3473 /* Gimplify the static chain. */
3474 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3475 {
3476 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3477 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3478 else
3479 {
3480 enum gimplify_status t;
3481 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3482 EXPR_LOCATION (*expr_p), ! returns_twice);
3483 if (t == GS_ERROR)
3484 ret = GS_ERROR;
3485 }
3486 }
3487
3488 /* Verify the function result. */
3489 if (want_value && fndecl
3490 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3491 {
3492 error_at (loc, "using result of function returning %<void%>");
3493 ret = GS_ERROR;
3494 }
3495
3496 /* Try this again in case gimplification exposed something. */
3497 if (ret != GS_ERROR)
3498 {
3499 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3500
3501 if (new_tree && new_tree != *expr_p)
3502 {
3503 /* There was a transformation of this call which computes the
3504 same value, but in a more efficient way. Return and try
3505 again. */
3506 *expr_p = new_tree;
3507 return GS_OK;
3508 }
3509 }
3510 else
3511 {
3512 *expr_p = error_mark_node;
3513 return GS_ERROR;
3514 }
3515
3516 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3517 decl. This allows us to eliminate redundant or useless
3518 calls to "const" functions. */
3519 if (TREE_CODE (*expr_p) == CALL_EXPR)
3520 {
3521 int flags = call_expr_flags (*expr_p);
3522 if (flags & (ECF_CONST | ECF_PURE)
3523 /* An infinite loop is considered a side effect. */
3524 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3525 TREE_SIDE_EFFECTS (*expr_p) = 0;
3526 }
3527
3528 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3529 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3530 form and delegate the creation of a GIMPLE_CALL to
3531 gimplify_modify_expr. This is always possible because when
3532 WANT_VALUE is true, the caller wants the result of this call into
3533 a temporary, which means that we will emit an INIT_EXPR in
3534 internal_get_tmp_var which will then be handled by
3535 gimplify_modify_expr. */
3536 if (!want_value)
3537 {
3538 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3539 have to do is replicate it as a GIMPLE_CALL tuple. */
3540 gimple_stmt_iterator gsi;
3541 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3542 notice_special_calls (call);
3543 gimplify_seq_add_stmt (pre_p, call);
3544 gsi = gsi_last (*pre_p);
3545 maybe_fold_stmt (&gsi);
3546 *expr_p = NULL_TREE;
3547 }
3548 else
3549 /* Remember the original function type. */
3550 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3551 CALL_EXPR_FN (*expr_p));
3552
3553 return ret;
3554 }
3555
3556 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3557 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3558
3559 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3560 condition is true or false, respectively. If null, we should generate
3561 our own to skip over the evaluation of this specific expression.
3562
3563 LOCUS is the source location of the COND_EXPR.
3564
3565 This function is the tree equivalent of do_jump.
3566
3567 shortcut_cond_r should only be called by shortcut_cond_expr. */
3568
3569 static tree
3570 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3571 location_t locus)
3572 {
3573 tree local_label = NULL_TREE;
3574 tree t, expr = NULL;
3575
3576 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3577 retain the shortcut semantics. Just insert the gotos here;
3578 shortcut_cond_expr will append the real blocks later. */
3579 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3580 {
3581 location_t new_locus;
3582
3583 /* Turn if (a && b) into
3584
3585 if (a); else goto no;
3586 if (b) goto yes; else goto no;
3587 (no:) */
3588
3589 if (false_label_p == NULL)
3590 false_label_p = &local_label;
3591
3592 /* Keep the original source location on the first 'if'. */
3593 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3594 append_to_statement_list (t, &expr);
3595
3596 /* Set the source location of the && on the second 'if'. */
3597 new_locus = rexpr_location (pred, locus);
3598 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3599 new_locus);
3600 append_to_statement_list (t, &expr);
3601 }
3602 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3603 {
3604 location_t new_locus;
3605
3606 /* Turn if (a || b) into
3607
3608 if (a) goto yes;
3609 if (b) goto yes; else goto no;
3610 (yes:) */
3611
3612 if (true_label_p == NULL)
3613 true_label_p = &local_label;
3614
3615 /* Keep the original source location on the first 'if'. */
3616 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3617 append_to_statement_list (t, &expr);
3618
3619 /* Set the source location of the || on the second 'if'. */
3620 new_locus = rexpr_location (pred, locus);
3621 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3622 new_locus);
3623 append_to_statement_list (t, &expr);
3624 }
3625 else if (TREE_CODE (pred) == COND_EXPR
3626 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3627 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3628 {
3629 location_t new_locus;
3630
3631 /* As long as we're messing with gotos, turn if (a ? b : c) into
3632 if (a)
3633 if (b) goto yes; else goto no;
3634 else
3635 if (c) goto yes; else goto no;
3636
3637 Don't do this if one of the arms has void type, which can happen
3638 in C++ when the arm is throw. */
3639
3640 /* Keep the original source location on the first 'if'. Set the source
3641 location of the ? on the second 'if'. */
3642 new_locus = rexpr_location (pred, locus);
3643 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3644 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3645 false_label_p, locus),
3646 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3647 false_label_p, new_locus));
3648 }
3649 else
3650 {
3651 expr = build3 (COND_EXPR, void_type_node, pred,
3652 build_and_jump (true_label_p),
3653 build_and_jump (false_label_p));
3654 SET_EXPR_LOCATION (expr, locus);
3655 }
3656
3657 if (local_label)
3658 {
3659 t = build1 (LABEL_EXPR, void_type_node, local_label);
3660 append_to_statement_list (t, &expr);
3661 }
3662
3663 return expr;
3664 }
3665
3666 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3667 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3668 statement, if it is the last one. Otherwise, return NULL. */
3669
3670 static tree
3671 find_goto (tree expr)
3672 {
3673 if (!expr)
3674 return NULL_TREE;
3675
3676 if (TREE_CODE (expr) == GOTO_EXPR)
3677 return expr;
3678
3679 if (TREE_CODE (expr) != STATEMENT_LIST)
3680 return NULL_TREE;
3681
3682 tree_stmt_iterator i = tsi_start (expr);
3683
3684 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3685 tsi_next (&i);
3686
3687 if (!tsi_one_before_end_p (i))
3688 return NULL_TREE;
3689
3690 return find_goto (tsi_stmt (i));
3691 }
3692
3693 /* Same as find_goto, except that it returns NULL if the destination
3694 is not a LABEL_DECL. */
3695
3696 static inline tree
3697 find_goto_label (tree expr)
3698 {
3699 tree dest = find_goto (expr);
3700 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3701 return dest;
3702 return NULL_TREE;
3703 }
3704
3705 /* Given a conditional expression EXPR with short-circuit boolean
3706 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3707 predicate apart into the equivalent sequence of conditionals. */
3708
3709 static tree
3710 shortcut_cond_expr (tree expr)
3711 {
3712 tree pred = TREE_OPERAND (expr, 0);
3713 tree then_ = TREE_OPERAND (expr, 1);
3714 tree else_ = TREE_OPERAND (expr, 2);
3715 tree true_label, false_label, end_label, t;
3716 tree *true_label_p;
3717 tree *false_label_p;
3718 bool emit_end, emit_false, jump_over_else;
3719 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3720 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3721
3722 /* First do simple transformations. */
3723 if (!else_se)
3724 {
3725 /* If there is no 'else', turn
3726 if (a && b) then c
3727 into
3728 if (a) if (b) then c. */
3729 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3730 {
3731 /* Keep the original source location on the first 'if'. */
3732 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3733 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3734 /* Set the source location of the && on the second 'if'. */
3735 if (rexpr_has_location (pred))
3736 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3737 then_ = shortcut_cond_expr (expr);
3738 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3739 pred = TREE_OPERAND (pred, 0);
3740 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3741 SET_EXPR_LOCATION (expr, locus);
3742 }
3743 }
3744
3745 if (!then_se)
3746 {
3747 /* If there is no 'then', turn
3748 if (a || b); else d
3749 into
3750 if (a); else if (b); else d. */
3751 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3752 {
3753 /* Keep the original source location on the first 'if'. */
3754 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3755 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3756 /* Set the source location of the || on the second 'if'. */
3757 if (rexpr_has_location (pred))
3758 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3759 else_ = shortcut_cond_expr (expr);
3760 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3761 pred = TREE_OPERAND (pred, 0);
3762 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3763 SET_EXPR_LOCATION (expr, locus);
3764 }
3765 }
3766
3767 /* If we're done, great. */
3768 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3769 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3770 return expr;
3771
3772 /* Otherwise we need to mess with gotos. Change
3773 if (a) c; else d;
3774 to
3775 if (a); else goto no;
3776 c; goto end;
3777 no: d; end:
3778 and recursively gimplify the condition. */
3779
3780 true_label = false_label = end_label = NULL_TREE;
3781
3782 /* If our arms just jump somewhere, hijack those labels so we don't
3783 generate jumps to jumps. */
3784
3785 if (tree then_goto = find_goto_label (then_))
3786 {
3787 true_label = GOTO_DESTINATION (then_goto);
3788 then_ = NULL;
3789 then_se = false;
3790 }
3791
3792 if (tree else_goto = find_goto_label (else_))
3793 {
3794 false_label = GOTO_DESTINATION (else_goto);
3795 else_ = NULL;
3796 else_se = false;
3797 }
3798
3799 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3800 if (true_label)
3801 true_label_p = &true_label;
3802 else
3803 true_label_p = NULL;
3804
3805 /* The 'else' branch also needs a label if it contains interesting code. */
3806 if (false_label || else_se)
3807 false_label_p = &false_label;
3808 else
3809 false_label_p = NULL;
3810
3811 /* If there was nothing else in our arms, just forward the label(s). */
3812 if (!then_se && !else_se)
3813 return shortcut_cond_r (pred, true_label_p, false_label_p,
3814 EXPR_LOC_OR_LOC (expr, input_location));
3815
3816 /* If our last subexpression already has a terminal label, reuse it. */
3817 if (else_se)
3818 t = expr_last (else_);
3819 else if (then_se)
3820 t = expr_last (then_);
3821 else
3822 t = NULL;
3823 if (t && TREE_CODE (t) == LABEL_EXPR)
3824 end_label = LABEL_EXPR_LABEL (t);
3825
3826 /* If we don't care about jumping to the 'else' branch, jump to the end
3827 if the condition is false. */
3828 if (!false_label_p)
3829 false_label_p = &end_label;
3830
3831 /* We only want to emit these labels if we aren't hijacking them. */
3832 emit_end = (end_label == NULL_TREE);
3833 emit_false = (false_label == NULL_TREE);
3834
3835 /* We only emit the jump over the else clause if we have to--if the
3836 then clause may fall through. Otherwise we can wind up with a
3837 useless jump and a useless label at the end of gimplified code,
3838 which will cause us to think that this conditional as a whole
3839 falls through even if it doesn't. If we then inline a function
3840 which ends with such a condition, that can cause us to issue an
3841 inappropriate warning about control reaching the end of a
3842 non-void function. */
3843 jump_over_else = block_may_fallthru (then_);
3844
3845 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3846 EXPR_LOC_OR_LOC (expr, input_location));
3847
3848 expr = NULL;
3849 append_to_statement_list (pred, &expr);
3850
3851 append_to_statement_list (then_, &expr);
3852 if (else_se)
3853 {
3854 if (jump_over_else)
3855 {
3856 tree last = expr_last (expr);
3857 t = build_and_jump (&end_label);
3858 if (rexpr_has_location (last))
3859 SET_EXPR_LOCATION (t, rexpr_location (last));
3860 append_to_statement_list (t, &expr);
3861 }
3862 if (emit_false)
3863 {
3864 t = build1 (LABEL_EXPR, void_type_node, false_label);
3865 append_to_statement_list (t, &expr);
3866 }
3867 append_to_statement_list (else_, &expr);
3868 }
3869 if (emit_end && end_label)
3870 {
3871 t = build1 (LABEL_EXPR, void_type_node, end_label);
3872 append_to_statement_list (t, &expr);
3873 }
3874
3875 return expr;
3876 }
3877
3878 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3879
3880 tree
3881 gimple_boolify (tree expr)
3882 {
3883 tree type = TREE_TYPE (expr);
3884 location_t loc = EXPR_LOCATION (expr);
3885
3886 if (TREE_CODE (expr) == NE_EXPR
3887 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3888 && integer_zerop (TREE_OPERAND (expr, 1)))
3889 {
3890 tree call = TREE_OPERAND (expr, 0);
3891 tree fn = get_callee_fndecl (call);
3892
3893 /* For __builtin_expect ((long) (x), y) recurse into x as well
3894 if x is truth_value_p. */
3895 if (fn
3896 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3897 && call_expr_nargs (call) == 2)
3898 {
3899 tree arg = CALL_EXPR_ARG (call, 0);
3900 if (arg)
3901 {
3902 if (TREE_CODE (arg) == NOP_EXPR
3903 && TREE_TYPE (arg) == TREE_TYPE (call))
3904 arg = TREE_OPERAND (arg, 0);
3905 if (truth_value_p (TREE_CODE (arg)))
3906 {
3907 arg = gimple_boolify (arg);
3908 CALL_EXPR_ARG (call, 0)
3909 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3910 }
3911 }
3912 }
3913 }
3914
3915 switch (TREE_CODE (expr))
3916 {
3917 case TRUTH_AND_EXPR:
3918 case TRUTH_OR_EXPR:
3919 case TRUTH_XOR_EXPR:
3920 case TRUTH_ANDIF_EXPR:
3921 case TRUTH_ORIF_EXPR:
3922 /* Also boolify the arguments of truth exprs. */
3923 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3924 /* FALLTHRU */
3925
3926 case TRUTH_NOT_EXPR:
3927 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3928
3929 /* These expressions always produce boolean results. */
3930 if (TREE_CODE (type) != BOOLEAN_TYPE)
3931 TREE_TYPE (expr) = boolean_type_node;
3932 return expr;
3933
3934 case ANNOTATE_EXPR:
3935 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3936 {
3937 case annot_expr_ivdep_kind:
3938 case annot_expr_unroll_kind:
3939 case annot_expr_no_vector_kind:
3940 case annot_expr_vector_kind:
3941 case annot_expr_parallel_kind:
3942 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3943 if (TREE_CODE (type) != BOOLEAN_TYPE)
3944 TREE_TYPE (expr) = boolean_type_node;
3945 return expr;
3946 default:
3947 gcc_unreachable ();
3948 }
3949
3950 default:
3951 if (COMPARISON_CLASS_P (expr))
3952 {
3953 /* There expressions always prduce boolean results. */
3954 if (TREE_CODE (type) != BOOLEAN_TYPE)
3955 TREE_TYPE (expr) = boolean_type_node;
3956 return expr;
3957 }
3958 /* Other expressions that get here must have boolean values, but
3959 might need to be converted to the appropriate mode. */
3960 if (TREE_CODE (type) == BOOLEAN_TYPE)
3961 return expr;
3962 return fold_convert_loc (loc, boolean_type_node, expr);
3963 }
3964 }
3965
3966 /* Given a conditional expression *EXPR_P without side effects, gimplify
3967 its operands. New statements are inserted to PRE_P. */
3968
3969 static enum gimplify_status
3970 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3971 {
3972 tree expr = *expr_p, cond;
3973 enum gimplify_status ret, tret;
3974 enum tree_code code;
3975
3976 cond = gimple_boolify (COND_EXPR_COND (expr));
3977
3978 /* We need to handle && and || specially, as their gimplification
3979 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3980 code = TREE_CODE (cond);
3981 if (code == TRUTH_ANDIF_EXPR)
3982 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3983 else if (code == TRUTH_ORIF_EXPR)
3984 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3985 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3986 COND_EXPR_COND (*expr_p) = cond;
3987
3988 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3989 is_gimple_val, fb_rvalue);
3990 ret = MIN (ret, tret);
3991 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3992 is_gimple_val, fb_rvalue);
3993
3994 return MIN (ret, tret);
3995 }
3996
3997 /* Return true if evaluating EXPR could trap.
3998 EXPR is GENERIC, while tree_could_trap_p can be called
3999 only on GIMPLE. */
4000
4001 bool
4002 generic_expr_could_trap_p (tree expr)
4003 {
4004 unsigned i, n;
4005
4006 if (!expr || is_gimple_val (expr))
4007 return false;
4008
4009 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4010 return true;
4011
4012 n = TREE_OPERAND_LENGTH (expr);
4013 for (i = 0; i < n; i++)
4014 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4015 return true;
4016
4017 return false;
4018 }
4019
4020 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4021 into
4022
4023 if (p) if (p)
4024 t1 = a; a;
4025 else or else
4026 t1 = b; b;
4027 t1;
4028
4029 The second form is used when *EXPR_P is of type void.
4030
4031 PRE_P points to the list where side effects that must happen before
4032 *EXPR_P should be stored. */
4033
4034 static enum gimplify_status
4035 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4036 {
4037 tree expr = *expr_p;
4038 tree type = TREE_TYPE (expr);
4039 location_t loc = EXPR_LOCATION (expr);
4040 tree tmp, arm1, arm2;
4041 enum gimplify_status ret;
4042 tree label_true, label_false, label_cont;
4043 bool have_then_clause_p, have_else_clause_p;
4044 gcond *cond_stmt;
4045 enum tree_code pred_code;
4046 gimple_seq seq = NULL;
4047
4048 /* If this COND_EXPR has a value, copy the values into a temporary within
4049 the arms. */
4050 if (!VOID_TYPE_P (type))
4051 {
4052 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4053 tree result;
4054
4055 /* If either an rvalue is ok or we do not require an lvalue, create the
4056 temporary. But we cannot do that if the type is addressable. */
4057 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4058 && !TREE_ADDRESSABLE (type))
4059 {
4060 if (gimplify_ctxp->allow_rhs_cond_expr
4061 /* If either branch has side effects or could trap, it can't be
4062 evaluated unconditionally. */
4063 && !TREE_SIDE_EFFECTS (then_)
4064 && !generic_expr_could_trap_p (then_)
4065 && !TREE_SIDE_EFFECTS (else_)
4066 && !generic_expr_could_trap_p (else_))
4067 return gimplify_pure_cond_expr (expr_p, pre_p);
4068
4069 tmp = create_tmp_var (type, "iftmp");
4070 result = tmp;
4071 }
4072
4073 /* Otherwise, only create and copy references to the values. */
4074 else
4075 {
4076 type = build_pointer_type (type);
4077
4078 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4079 then_ = build_fold_addr_expr_loc (loc, then_);
4080
4081 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4082 else_ = build_fold_addr_expr_loc (loc, else_);
4083
4084 expr
4085 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4086
4087 tmp = create_tmp_var (type, "iftmp");
4088 result = build_simple_mem_ref_loc (loc, tmp);
4089 }
4090
4091 /* Build the new then clause, `tmp = then_;'. But don't build the
4092 assignment if the value is void; in C++ it can be if it's a throw. */
4093 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4094 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4095
4096 /* Similarly, build the new else clause, `tmp = else_;'. */
4097 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4098 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4099
4100 TREE_TYPE (expr) = void_type_node;
4101 recalculate_side_effects (expr);
4102
4103 /* Move the COND_EXPR to the prequeue. */
4104 gimplify_stmt (&expr, pre_p);
4105
4106 *expr_p = result;
4107 return GS_ALL_DONE;
4108 }
4109
4110 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4111 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4112 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4113 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4114
4115 /* Make sure the condition has BOOLEAN_TYPE. */
4116 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4117
4118 /* Break apart && and || conditions. */
4119 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4120 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4121 {
4122 expr = shortcut_cond_expr (expr);
4123
4124 if (expr != *expr_p)
4125 {
4126 *expr_p = expr;
4127
4128 /* We can't rely on gimplify_expr to re-gimplify the expanded
4129 form properly, as cleanups might cause the target labels to be
4130 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4131 set up a conditional context. */
4132 gimple_push_condition ();
4133 gimplify_stmt (expr_p, &seq);
4134 gimple_pop_condition (pre_p);
4135 gimple_seq_add_seq (pre_p, seq);
4136
4137 return GS_ALL_DONE;
4138 }
4139 }
4140
4141 /* Now do the normal gimplification. */
4142
4143 /* Gimplify condition. */
4144 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4145 fb_rvalue);
4146 if (ret == GS_ERROR)
4147 return GS_ERROR;
4148 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4149
4150 gimple_push_condition ();
4151
4152 have_then_clause_p = have_else_clause_p = false;
4153 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4154 if (label_true
4155 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4156 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4157 have different locations, otherwise we end up with incorrect
4158 location information on the branches. */
4159 && (optimize
4160 || !EXPR_HAS_LOCATION (expr)
4161 || !rexpr_has_location (label_true)
4162 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4163 {
4164 have_then_clause_p = true;
4165 label_true = GOTO_DESTINATION (label_true);
4166 }
4167 else
4168 label_true = create_artificial_label (UNKNOWN_LOCATION);
4169 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4170 if (label_false
4171 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4172 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4173 have different locations, otherwise we end up with incorrect
4174 location information on the branches. */
4175 && (optimize
4176 || !EXPR_HAS_LOCATION (expr)
4177 || !rexpr_has_location (label_false)
4178 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4179 {
4180 have_else_clause_p = true;
4181 label_false = GOTO_DESTINATION (label_false);
4182 }
4183 else
4184 label_false = create_artificial_label (UNKNOWN_LOCATION);
4185
4186 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4187 &arm2);
4188 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4189 label_false);
4190 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4191 gimplify_seq_add_stmt (&seq, cond_stmt);
4192 gimple_stmt_iterator gsi = gsi_last (seq);
4193 maybe_fold_stmt (&gsi);
4194
4195 label_cont = NULL_TREE;
4196 if (!have_then_clause_p)
4197 {
4198 /* For if (...) {} else { code; } put label_true after
4199 the else block. */
4200 if (TREE_OPERAND (expr, 1) == NULL_TREE
4201 && !have_else_clause_p
4202 && TREE_OPERAND (expr, 2) != NULL_TREE)
4203 label_cont = label_true;
4204 else
4205 {
4206 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4207 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4208 /* For if (...) { code; } else {} or
4209 if (...) { code; } else goto label; or
4210 if (...) { code; return; } else { ... }
4211 label_cont isn't needed. */
4212 if (!have_else_clause_p
4213 && TREE_OPERAND (expr, 2) != NULL_TREE
4214 && gimple_seq_may_fallthru (seq))
4215 {
4216 gimple *g;
4217 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4218
4219 g = gimple_build_goto (label_cont);
4220
4221 /* GIMPLE_COND's are very low level; they have embedded
4222 gotos. This particular embedded goto should not be marked
4223 with the location of the original COND_EXPR, as it would
4224 correspond to the COND_EXPR's condition, not the ELSE or the
4225 THEN arms. To avoid marking it with the wrong location, flag
4226 it as "no location". */
4227 gimple_set_do_not_emit_location (g);
4228
4229 gimplify_seq_add_stmt (&seq, g);
4230 }
4231 }
4232 }
4233 if (!have_else_clause_p)
4234 {
4235 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4236 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4237 }
4238 if (label_cont)
4239 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4240
4241 gimple_pop_condition (pre_p);
4242 gimple_seq_add_seq (pre_p, seq);
4243
4244 if (ret == GS_ERROR)
4245 ; /* Do nothing. */
4246 else if (have_then_clause_p || have_else_clause_p)
4247 ret = GS_ALL_DONE;
4248 else
4249 {
4250 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4251 expr = TREE_OPERAND (expr, 0);
4252 gimplify_stmt (&expr, pre_p);
4253 }
4254
4255 *expr_p = NULL;
4256 return ret;
4257 }
4258
4259 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4260 to be marked addressable.
4261
4262 We cannot rely on such an expression being directly markable if a temporary
4263 has been created by the gimplification. In this case, we create another
4264 temporary and initialize it with a copy, which will become a store after we
4265 mark it addressable. This can happen if the front-end passed us something
4266 that it could not mark addressable yet, like a Fortran pass-by-reference
4267 parameter (int) floatvar. */
4268
4269 static void
4270 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4271 {
4272 while (handled_component_p (*expr_p))
4273 expr_p = &TREE_OPERAND (*expr_p, 0);
4274 if (is_gimple_reg (*expr_p))
4275 {
4276 /* Do not allow an SSA name as the temporary. */
4277 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4278 DECL_GIMPLE_REG_P (var) = 0;
4279 *expr_p = var;
4280 }
4281 }
4282
4283 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4284 a call to __builtin_memcpy. */
4285
4286 static enum gimplify_status
4287 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4288 gimple_seq *seq_p)
4289 {
4290 tree t, to, to_ptr, from, from_ptr;
4291 gcall *gs;
4292 location_t loc = EXPR_LOCATION (*expr_p);
4293
4294 to = TREE_OPERAND (*expr_p, 0);
4295 from = TREE_OPERAND (*expr_p, 1);
4296
4297 /* Mark the RHS addressable. Beware that it may not be possible to do so
4298 directly if a temporary has been created by the gimplification. */
4299 prepare_gimple_addressable (&from, seq_p);
4300
4301 mark_addressable (from);
4302 from_ptr = build_fold_addr_expr_loc (loc, from);
4303 gimplify_arg (&from_ptr, seq_p, loc);
4304
4305 mark_addressable (to);
4306 to_ptr = build_fold_addr_expr_loc (loc, to);
4307 gimplify_arg (&to_ptr, seq_p, loc);
4308
4309 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4310
4311 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4312
4313 if (want_value)
4314 {
4315 /* tmp = memcpy() */
4316 t = create_tmp_var (TREE_TYPE (to_ptr));
4317 gimple_call_set_lhs (gs, t);
4318 gimplify_seq_add_stmt (seq_p, gs);
4319
4320 *expr_p = build_simple_mem_ref (t);
4321 return GS_ALL_DONE;
4322 }
4323
4324 gimplify_seq_add_stmt (seq_p, gs);
4325 *expr_p = NULL;
4326 return GS_ALL_DONE;
4327 }
4328
4329 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4330 a call to __builtin_memset. In this case we know that the RHS is
4331 a CONSTRUCTOR with an empty element list. */
4332
4333 static enum gimplify_status
4334 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4335 gimple_seq *seq_p)
4336 {
4337 tree t, from, to, to_ptr;
4338 gcall *gs;
4339 location_t loc = EXPR_LOCATION (*expr_p);
4340
4341 /* Assert our assumptions, to abort instead of producing wrong code
4342 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4343 not be immediately exposed. */
4344 from = TREE_OPERAND (*expr_p, 1);
4345 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4346 from = TREE_OPERAND (from, 0);
4347
4348 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4349 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4350
4351 /* Now proceed. */
4352 to = TREE_OPERAND (*expr_p, 0);
4353
4354 to_ptr = build_fold_addr_expr_loc (loc, to);
4355 gimplify_arg (&to_ptr, seq_p, loc);
4356 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4357
4358 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4359
4360 if (want_value)
4361 {
4362 /* tmp = memset() */
4363 t = create_tmp_var (TREE_TYPE (to_ptr));
4364 gimple_call_set_lhs (gs, t);
4365 gimplify_seq_add_stmt (seq_p, gs);
4366
4367 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4368 return GS_ALL_DONE;
4369 }
4370
4371 gimplify_seq_add_stmt (seq_p, gs);
4372 *expr_p = NULL;
4373 return GS_ALL_DONE;
4374 }
4375
4376 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4377 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4378 assignment. Return non-null if we detect a potential overlap. */
4379
4380 struct gimplify_init_ctor_preeval_data
4381 {
4382 /* The base decl of the lhs object. May be NULL, in which case we
4383 have to assume the lhs is indirect. */
4384 tree lhs_base_decl;
4385
4386 /* The alias set of the lhs object. */
4387 alias_set_type lhs_alias_set;
4388 };
4389
4390 static tree
4391 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4392 {
4393 struct gimplify_init_ctor_preeval_data *data
4394 = (struct gimplify_init_ctor_preeval_data *) xdata;
4395 tree t = *tp;
4396
4397 /* If we find the base object, obviously we have overlap. */
4398 if (data->lhs_base_decl == t)
4399 return t;
4400
4401 /* If the constructor component is indirect, determine if we have a
4402 potential overlap with the lhs. The only bits of information we
4403 have to go on at this point are addressability and alias sets. */
4404 if ((INDIRECT_REF_P (t)
4405 || TREE_CODE (t) == MEM_REF)
4406 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4407 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4408 return t;
4409
4410 /* If the constructor component is a call, determine if it can hide a
4411 potential overlap with the lhs through an INDIRECT_REF like above.
4412 ??? Ugh - this is completely broken. In fact this whole analysis
4413 doesn't look conservative. */
4414 if (TREE_CODE (t) == CALL_EXPR)
4415 {
4416 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4417
4418 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4419 if (POINTER_TYPE_P (TREE_VALUE (type))
4420 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4421 && alias_sets_conflict_p (data->lhs_alias_set,
4422 get_alias_set
4423 (TREE_TYPE (TREE_VALUE (type)))))
4424 return t;
4425 }
4426
4427 if (IS_TYPE_OR_DECL_P (t))
4428 *walk_subtrees = 0;
4429 return NULL;
4430 }
4431
4432 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4433 force values that overlap with the lhs (as described by *DATA)
4434 into temporaries. */
4435
4436 static void
4437 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4438 struct gimplify_init_ctor_preeval_data *data)
4439 {
4440 enum gimplify_status one;
4441
4442 /* If the value is constant, then there's nothing to pre-evaluate. */
4443 if (TREE_CONSTANT (*expr_p))
4444 {
4445 /* Ensure it does not have side effects, it might contain a reference to
4446 the object we're initializing. */
4447 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4448 return;
4449 }
4450
4451 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4452 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4453 return;
4454
4455 /* Recurse for nested constructors. */
4456 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4457 {
4458 unsigned HOST_WIDE_INT ix;
4459 constructor_elt *ce;
4460 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4461
4462 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4463 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4464
4465 return;
4466 }
4467
4468 /* If this is a variable sized type, we must remember the size. */
4469 maybe_with_size_expr (expr_p);
4470
4471 /* Gimplify the constructor element to something appropriate for the rhs
4472 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4473 the gimplifier will consider this a store to memory. Doing this
4474 gimplification now means that we won't have to deal with complicated
4475 language-specific trees, nor trees like SAVE_EXPR that can induce
4476 exponential search behavior. */
4477 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4478 if (one == GS_ERROR)
4479 {
4480 *expr_p = NULL;
4481 return;
4482 }
4483
4484 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4485 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4486 always be true for all scalars, since is_gimple_mem_rhs insists on a
4487 temporary variable for them. */
4488 if (DECL_P (*expr_p))
4489 return;
4490
4491 /* If this is of variable size, we have no choice but to assume it doesn't
4492 overlap since we can't make a temporary for it. */
4493 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4494 return;
4495
4496 /* Otherwise, we must search for overlap ... */
4497 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4498 return;
4499
4500 /* ... and if found, force the value into a temporary. */
4501 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4502 }
4503
4504 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4505 a RANGE_EXPR in a CONSTRUCTOR for an array.
4506
4507 var = lower;
4508 loop_entry:
4509 object[var] = value;
4510 if (var == upper)
4511 goto loop_exit;
4512 var = var + 1;
4513 goto loop_entry;
4514 loop_exit:
4515
4516 We increment var _after_ the loop exit check because we might otherwise
4517 fail if upper == TYPE_MAX_VALUE (type for upper).
4518
4519 Note that we never have to deal with SAVE_EXPRs here, because this has
4520 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4521
4522 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4523 gimple_seq *, bool);
4524
4525 static void
4526 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4527 tree value, tree array_elt_type,
4528 gimple_seq *pre_p, bool cleared)
4529 {
4530 tree loop_entry_label, loop_exit_label, fall_thru_label;
4531 tree var, var_type, cref, tmp;
4532
4533 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4534 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4535 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4536
4537 /* Create and initialize the index variable. */
4538 var_type = TREE_TYPE (upper);
4539 var = create_tmp_var (var_type);
4540 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4541
4542 /* Add the loop entry label. */
4543 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4544
4545 /* Build the reference. */
4546 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4547 var, NULL_TREE, NULL_TREE);
4548
4549 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4550 the store. Otherwise just assign value to the reference. */
4551
4552 if (TREE_CODE (value) == CONSTRUCTOR)
4553 /* NB we might have to call ourself recursively through
4554 gimplify_init_ctor_eval if the value is a constructor. */
4555 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4556 pre_p, cleared);
4557 else
4558 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4559
4560 /* We exit the loop when the index var is equal to the upper bound. */
4561 gimplify_seq_add_stmt (pre_p,
4562 gimple_build_cond (EQ_EXPR, var, upper,
4563 loop_exit_label, fall_thru_label));
4564
4565 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4566
4567 /* Otherwise, increment the index var... */
4568 tmp = build2 (PLUS_EXPR, var_type, var,
4569 fold_convert (var_type, integer_one_node));
4570 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4571
4572 /* ...and jump back to the loop entry. */
4573 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4574
4575 /* Add the loop exit label. */
4576 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4577 }
4578
4579 /* Return true if FDECL is accessing a field that is zero sized. */
4580
4581 static bool
4582 zero_sized_field_decl (const_tree fdecl)
4583 {
4584 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4585 && integer_zerop (DECL_SIZE (fdecl)))
4586 return true;
4587 return false;
4588 }
4589
4590 /* Return true if TYPE is zero sized. */
4591
4592 static bool
4593 zero_sized_type (const_tree type)
4594 {
4595 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4596 && integer_zerop (TYPE_SIZE (type)))
4597 return true;
4598 return false;
4599 }
4600
4601 /* A subroutine of gimplify_init_constructor. Generate individual
4602 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4603 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4604 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4605 zeroed first. */
4606
4607 static void
4608 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4609 gimple_seq *pre_p, bool cleared)
4610 {
4611 tree array_elt_type = NULL;
4612 unsigned HOST_WIDE_INT ix;
4613 tree purpose, value;
4614
4615 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4616 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4617
4618 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4619 {
4620 tree cref;
4621
4622 /* NULL values are created above for gimplification errors. */
4623 if (value == NULL)
4624 continue;
4625
4626 if (cleared && initializer_zerop (value))
4627 continue;
4628
4629 /* ??? Here's to hoping the front end fills in all of the indices,
4630 so we don't have to figure out what's missing ourselves. */
4631 gcc_assert (purpose);
4632
4633 /* Skip zero-sized fields, unless value has side-effects. This can
4634 happen with calls to functions returning a zero-sized type, which
4635 we shouldn't discard. As a number of downstream passes don't
4636 expect sets of zero-sized fields, we rely on the gimplification of
4637 the MODIFY_EXPR we make below to drop the assignment statement. */
4638 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4639 continue;
4640
4641 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4642 whole range. */
4643 if (TREE_CODE (purpose) == RANGE_EXPR)
4644 {
4645 tree lower = TREE_OPERAND (purpose, 0);
4646 tree upper = TREE_OPERAND (purpose, 1);
4647
4648 /* If the lower bound is equal to upper, just treat it as if
4649 upper was the index. */
4650 if (simple_cst_equal (lower, upper))
4651 purpose = upper;
4652 else
4653 {
4654 gimplify_init_ctor_eval_range (object, lower, upper, value,
4655 array_elt_type, pre_p, cleared);
4656 continue;
4657 }
4658 }
4659
4660 if (array_elt_type)
4661 {
4662 /* Do not use bitsizetype for ARRAY_REF indices. */
4663 if (TYPE_DOMAIN (TREE_TYPE (object)))
4664 purpose
4665 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4666 purpose);
4667 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4668 purpose, NULL_TREE, NULL_TREE);
4669 }
4670 else
4671 {
4672 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4673 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4674 unshare_expr (object), purpose, NULL_TREE);
4675 }
4676
4677 if (TREE_CODE (value) == CONSTRUCTOR
4678 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4679 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4680 pre_p, cleared);
4681 else
4682 {
4683 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4684 gimplify_and_add (init, pre_p);
4685 ggc_free (init);
4686 }
4687 }
4688 }
4689
4690 /* Return the appropriate RHS predicate for this LHS. */
4691
4692 gimple_predicate
4693 rhs_predicate_for (tree lhs)
4694 {
4695 if (is_gimple_reg (lhs))
4696 return is_gimple_reg_rhs_or_call;
4697 else
4698 return is_gimple_mem_rhs_or_call;
4699 }
4700
4701 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4702 before the LHS has been gimplified. */
4703
4704 static gimple_predicate
4705 initial_rhs_predicate_for (tree lhs)
4706 {
4707 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4708 return is_gimple_reg_rhs_or_call;
4709 else
4710 return is_gimple_mem_rhs_or_call;
4711 }
4712
4713 /* Gimplify a C99 compound literal expression. This just means adding
4714 the DECL_EXPR before the current statement and using its anonymous
4715 decl instead. */
4716
4717 static enum gimplify_status
4718 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4719 bool (*gimple_test_f) (tree),
4720 fallback_t fallback)
4721 {
4722 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4723 tree decl = DECL_EXPR_DECL (decl_s);
4724 tree init = DECL_INITIAL (decl);
4725 /* Mark the decl as addressable if the compound literal
4726 expression is addressable now, otherwise it is marked too late
4727 after we gimplify the initialization expression. */
4728 if (TREE_ADDRESSABLE (*expr_p))
4729 TREE_ADDRESSABLE (decl) = 1;
4730 /* Otherwise, if we don't need an lvalue and have a literal directly
4731 substitute it. Check if it matches the gimple predicate, as
4732 otherwise we'd generate a new temporary, and we can as well just
4733 use the decl we already have. */
4734 else if (!TREE_ADDRESSABLE (decl)
4735 && !TREE_THIS_VOLATILE (decl)
4736 && init
4737 && (fallback & fb_lvalue) == 0
4738 && gimple_test_f (init))
4739 {
4740 *expr_p = init;
4741 return GS_OK;
4742 }
4743
4744 /* Preliminarily mark non-addressed complex variables as eligible
4745 for promotion to gimple registers. We'll transform their uses
4746 as we find them. */
4747 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4748 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4749 && !TREE_THIS_VOLATILE (decl)
4750 && !needs_to_live_in_memory (decl))
4751 DECL_GIMPLE_REG_P (decl) = 1;
4752
4753 /* If the decl is not addressable, then it is being used in some
4754 expression or on the right hand side of a statement, and it can
4755 be put into a readonly data section. */
4756 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4757 TREE_READONLY (decl) = 1;
4758
4759 /* This decl isn't mentioned in the enclosing block, so add it to the
4760 list of temps. FIXME it seems a bit of a kludge to say that
4761 anonymous artificial vars aren't pushed, but everything else is. */
4762 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4763 gimple_add_tmp_var (decl);
4764
4765 gimplify_and_add (decl_s, pre_p);
4766 *expr_p = decl;
4767 return GS_OK;
4768 }
4769
4770 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4771 return a new CONSTRUCTOR if something changed. */
4772
4773 static tree
4774 optimize_compound_literals_in_ctor (tree orig_ctor)
4775 {
4776 tree ctor = orig_ctor;
4777 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4778 unsigned int idx, num = vec_safe_length (elts);
4779
4780 for (idx = 0; idx < num; idx++)
4781 {
4782 tree value = (*elts)[idx].value;
4783 tree newval = value;
4784 if (TREE_CODE (value) == CONSTRUCTOR)
4785 newval = optimize_compound_literals_in_ctor (value);
4786 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4787 {
4788 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4789 tree decl = DECL_EXPR_DECL (decl_s);
4790 tree init = DECL_INITIAL (decl);
4791
4792 if (!TREE_ADDRESSABLE (value)
4793 && !TREE_ADDRESSABLE (decl)
4794 && init
4795 && TREE_CODE (init) == CONSTRUCTOR)
4796 newval = optimize_compound_literals_in_ctor (init);
4797 }
4798 if (newval == value)
4799 continue;
4800
4801 if (ctor == orig_ctor)
4802 {
4803 ctor = copy_node (orig_ctor);
4804 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4805 elts = CONSTRUCTOR_ELTS (ctor);
4806 }
4807 (*elts)[idx].value = newval;
4808 }
4809 return ctor;
4810 }
4811
4812 /* A subroutine of gimplify_modify_expr. Break out elements of a
4813 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4814
4815 Note that we still need to clear any elements that don't have explicit
4816 initializers, so if not all elements are initialized we keep the
4817 original MODIFY_EXPR, we just remove all of the constructor elements.
4818
4819 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4820 GS_ERROR if we would have to create a temporary when gimplifying
4821 this constructor. Otherwise, return GS_OK.
4822
4823 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4824
4825 static enum gimplify_status
4826 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4827 bool want_value, bool notify_temp_creation)
4828 {
4829 tree object, ctor, type;
4830 enum gimplify_status ret;
4831 vec<constructor_elt, va_gc> *elts;
4832
4833 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4834
4835 if (!notify_temp_creation)
4836 {
4837 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4838 is_gimple_lvalue, fb_lvalue);
4839 if (ret == GS_ERROR)
4840 return ret;
4841 }
4842
4843 object = TREE_OPERAND (*expr_p, 0);
4844 ctor = TREE_OPERAND (*expr_p, 1)
4845 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4846 type = TREE_TYPE (ctor);
4847 elts = CONSTRUCTOR_ELTS (ctor);
4848 ret = GS_ALL_DONE;
4849
4850 switch (TREE_CODE (type))
4851 {
4852 case RECORD_TYPE:
4853 case UNION_TYPE:
4854 case QUAL_UNION_TYPE:
4855 case ARRAY_TYPE:
4856 {
4857 struct gimplify_init_ctor_preeval_data preeval_data;
4858 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4859 HOST_WIDE_INT num_unique_nonzero_elements;
4860 bool cleared, complete_p, valid_const_initializer;
4861 /* Use readonly data for initializers of this or smaller size
4862 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4863 ratio. */
4864 const HOST_WIDE_INT min_unique_size = 64;
4865 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4866 is smaller than this, use readonly data. */
4867 const int unique_nonzero_ratio = 8;
4868
4869 /* Aggregate types must lower constructors to initialization of
4870 individual elements. The exception is that a CONSTRUCTOR node
4871 with no elements indicates zero-initialization of the whole. */
4872 if (vec_safe_is_empty (elts))
4873 {
4874 if (notify_temp_creation)
4875 return GS_OK;
4876 break;
4877 }
4878
4879 /* Fetch information about the constructor to direct later processing.
4880 We might want to make static versions of it in various cases, and
4881 can only do so if it known to be a valid constant initializer. */
4882 valid_const_initializer
4883 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4884 &num_unique_nonzero_elements,
4885 &num_ctor_elements, &complete_p);
4886
4887 /* If a const aggregate variable is being initialized, then it
4888 should never be a lose to promote the variable to be static. */
4889 if (valid_const_initializer
4890 && num_nonzero_elements > 1
4891 && TREE_READONLY (object)
4892 && VAR_P (object)
4893 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4894 /* For ctors that have many repeated nonzero elements
4895 represented through RANGE_EXPRs, prefer initializing
4896 those through runtime loops over copies of large amounts
4897 of data from readonly data section. */
4898 && (num_unique_nonzero_elements
4899 > num_nonzero_elements / unique_nonzero_ratio
4900 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4901 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4902 {
4903 if (notify_temp_creation)
4904 return GS_ERROR;
4905 DECL_INITIAL (object) = ctor;
4906 TREE_STATIC (object) = 1;
4907 if (!DECL_NAME (object))
4908 DECL_NAME (object) = create_tmp_var_name ("C");
4909 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4910
4911 /* ??? C++ doesn't automatically append a .<number> to the
4912 assembler name, and even when it does, it looks at FE private
4913 data structures to figure out what that number should be,
4914 which are not set for this variable. I suppose this is
4915 important for local statics for inline functions, which aren't
4916 "local" in the object file sense. So in order to get a unique
4917 TU-local symbol, we must invoke the lhd version now. */
4918 lhd_set_decl_assembler_name (object);
4919
4920 *expr_p = NULL_TREE;
4921 break;
4922 }
4923
4924 /* If there are "lots" of initialized elements, even discounting
4925 those that are not address constants (and thus *must* be
4926 computed at runtime), then partition the constructor into
4927 constant and non-constant parts. Block copy the constant
4928 parts in, then generate code for the non-constant parts. */
4929 /* TODO. There's code in cp/typeck.c to do this. */
4930
4931 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4932 /* store_constructor will ignore the clearing of variable-sized
4933 objects. Initializers for such objects must explicitly set
4934 every field that needs to be set. */
4935 cleared = false;
4936 else if (!complete_p)
4937 /* If the constructor isn't complete, clear the whole object
4938 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4939
4940 ??? This ought not to be needed. For any element not present
4941 in the initializer, we should simply set them to zero. Except
4942 we'd need to *find* the elements that are not present, and that
4943 requires trickery to avoid quadratic compile-time behavior in
4944 large cases or excessive memory use in small cases. */
4945 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4946 else if (num_ctor_elements - num_nonzero_elements
4947 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4948 && num_nonzero_elements < num_ctor_elements / 4)
4949 /* If there are "lots" of zeros, it's more efficient to clear
4950 the memory and then set the nonzero elements. */
4951 cleared = true;
4952 else
4953 cleared = false;
4954
4955 /* If there are "lots" of initialized elements, and all of them
4956 are valid address constants, then the entire initializer can
4957 be dropped to memory, and then memcpy'd out. Don't do this
4958 for sparse arrays, though, as it's more efficient to follow
4959 the standard CONSTRUCTOR behavior of memset followed by
4960 individual element initialization. Also don't do this for small
4961 all-zero initializers (which aren't big enough to merit
4962 clearing), and don't try to make bitwise copies of
4963 TREE_ADDRESSABLE types. */
4964
4965 if (valid_const_initializer
4966 && !(cleared || num_nonzero_elements == 0)
4967 && !TREE_ADDRESSABLE (type))
4968 {
4969 HOST_WIDE_INT size = int_size_in_bytes (type);
4970 unsigned int align;
4971
4972 /* ??? We can still get unbounded array types, at least
4973 from the C++ front end. This seems wrong, but attempt
4974 to work around it for now. */
4975 if (size < 0)
4976 {
4977 size = int_size_in_bytes (TREE_TYPE (object));
4978 if (size >= 0)
4979 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4980 }
4981
4982 /* Find the maximum alignment we can assume for the object. */
4983 /* ??? Make use of DECL_OFFSET_ALIGN. */
4984 if (DECL_P (object))
4985 align = DECL_ALIGN (object);
4986 else
4987 align = TYPE_ALIGN (type);
4988
4989 /* Do a block move either if the size is so small as to make
4990 each individual move a sub-unit move on average, or if it
4991 is so large as to make individual moves inefficient. */
4992 if (size > 0
4993 && num_nonzero_elements > 1
4994 /* For ctors that have many repeated nonzero elements
4995 represented through RANGE_EXPRs, prefer initializing
4996 those through runtime loops over copies of large amounts
4997 of data from readonly data section. */
4998 && (num_unique_nonzero_elements
4999 > num_nonzero_elements / unique_nonzero_ratio
5000 || size <= min_unique_size)
5001 && (size < num_nonzero_elements
5002 || !can_move_by_pieces (size, align)))
5003 {
5004 if (notify_temp_creation)
5005 return GS_ERROR;
5006
5007 walk_tree (&ctor, force_labels_r, NULL, NULL);
5008 ctor = tree_output_constant_def (ctor);
5009 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5010 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5011 TREE_OPERAND (*expr_p, 1) = ctor;
5012
5013 /* This is no longer an assignment of a CONSTRUCTOR, but
5014 we still may have processing to do on the LHS. So
5015 pretend we didn't do anything here to let that happen. */
5016 return GS_UNHANDLED;
5017 }
5018 }
5019
5020 /* If the target is volatile, we have non-zero elements and more than
5021 one field to assign, initialize the target from a temporary. */
5022 if (TREE_THIS_VOLATILE (object)
5023 && !TREE_ADDRESSABLE (type)
5024 && (num_nonzero_elements > 0 || !cleared)
5025 && vec_safe_length (elts) > 1)
5026 {
5027 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5028 TREE_OPERAND (*expr_p, 0) = temp;
5029 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5030 *expr_p,
5031 build2 (MODIFY_EXPR, void_type_node,
5032 object, temp));
5033 return GS_OK;
5034 }
5035
5036 if (notify_temp_creation)
5037 return GS_OK;
5038
5039 /* If there are nonzero elements and if needed, pre-evaluate to capture
5040 elements overlapping with the lhs into temporaries. We must do this
5041 before clearing to fetch the values before they are zeroed-out. */
5042 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5043 {
5044 preeval_data.lhs_base_decl = get_base_address (object);
5045 if (!DECL_P (preeval_data.lhs_base_decl))
5046 preeval_data.lhs_base_decl = NULL;
5047 preeval_data.lhs_alias_set = get_alias_set (object);
5048
5049 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5050 pre_p, post_p, &preeval_data);
5051 }
5052
5053 bool ctor_has_side_effects_p
5054 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5055
5056 if (cleared)
5057 {
5058 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5059 Note that we still have to gimplify, in order to handle the
5060 case of variable sized types. Avoid shared tree structures. */
5061 CONSTRUCTOR_ELTS (ctor) = NULL;
5062 TREE_SIDE_EFFECTS (ctor) = 0;
5063 object = unshare_expr (object);
5064 gimplify_stmt (expr_p, pre_p);
5065 }
5066
5067 /* If we have not block cleared the object, or if there are nonzero
5068 elements in the constructor, or if the constructor has side effects,
5069 add assignments to the individual scalar fields of the object. */
5070 if (!cleared
5071 || num_nonzero_elements > 0
5072 || ctor_has_side_effects_p)
5073 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5074
5075 *expr_p = NULL_TREE;
5076 }
5077 break;
5078
5079 case COMPLEX_TYPE:
5080 {
5081 tree r, i;
5082
5083 if (notify_temp_creation)
5084 return GS_OK;
5085
5086 /* Extract the real and imaginary parts out of the ctor. */
5087 gcc_assert (elts->length () == 2);
5088 r = (*elts)[0].value;
5089 i = (*elts)[1].value;
5090 if (r == NULL || i == NULL)
5091 {
5092 tree zero = build_zero_cst (TREE_TYPE (type));
5093 if (r == NULL)
5094 r = zero;
5095 if (i == NULL)
5096 i = zero;
5097 }
5098
5099 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5100 represent creation of a complex value. */
5101 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5102 {
5103 ctor = build_complex (type, r, i);
5104 TREE_OPERAND (*expr_p, 1) = ctor;
5105 }
5106 else
5107 {
5108 ctor = build2 (COMPLEX_EXPR, type, r, i);
5109 TREE_OPERAND (*expr_p, 1) = ctor;
5110 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5111 pre_p,
5112 post_p,
5113 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5114 fb_rvalue);
5115 }
5116 }
5117 break;
5118
5119 case VECTOR_TYPE:
5120 {
5121 unsigned HOST_WIDE_INT ix;
5122 constructor_elt *ce;
5123
5124 if (notify_temp_creation)
5125 return GS_OK;
5126
5127 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5128 if (TREE_CONSTANT (ctor))
5129 {
5130 bool constant_p = true;
5131 tree value;
5132
5133 /* Even when ctor is constant, it might contain non-*_CST
5134 elements, such as addresses or trapping values like
5135 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5136 in VECTOR_CST nodes. */
5137 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5138 if (!CONSTANT_CLASS_P (value))
5139 {
5140 constant_p = false;
5141 break;
5142 }
5143
5144 if (constant_p)
5145 {
5146 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5147 break;
5148 }
5149
5150 TREE_CONSTANT (ctor) = 0;
5151 }
5152
5153 /* Vector types use CONSTRUCTOR all the way through gimple
5154 compilation as a general initializer. */
5155 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5156 {
5157 enum gimplify_status tret;
5158 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5159 fb_rvalue);
5160 if (tret == GS_ERROR)
5161 ret = GS_ERROR;
5162 else if (TREE_STATIC (ctor)
5163 && !initializer_constant_valid_p (ce->value,
5164 TREE_TYPE (ce->value)))
5165 TREE_STATIC (ctor) = 0;
5166 }
5167 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5168 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5169 }
5170 break;
5171
5172 default:
5173 /* So how did we get a CONSTRUCTOR for a scalar type? */
5174 gcc_unreachable ();
5175 }
5176
5177 if (ret == GS_ERROR)
5178 return GS_ERROR;
5179 /* If we have gimplified both sides of the initializer but have
5180 not emitted an assignment, do so now. */
5181 if (*expr_p)
5182 {
5183 tree lhs = TREE_OPERAND (*expr_p, 0);
5184 tree rhs = TREE_OPERAND (*expr_p, 1);
5185 if (want_value && object == lhs)
5186 lhs = unshare_expr (lhs);
5187 gassign *init = gimple_build_assign (lhs, rhs);
5188 gimplify_seq_add_stmt (pre_p, init);
5189 }
5190 if (want_value)
5191 {
5192 *expr_p = object;
5193 return GS_OK;
5194 }
5195 else
5196 {
5197 *expr_p = NULL;
5198 return GS_ALL_DONE;
5199 }
5200 }
5201
5202 /* Given a pointer value OP0, return a simplified version of an
5203 indirection through OP0, or NULL_TREE if no simplification is
5204 possible. This may only be applied to a rhs of an expression.
5205 Note that the resulting type may be different from the type pointed
5206 to in the sense that it is still compatible from the langhooks
5207 point of view. */
5208
5209 static tree
5210 gimple_fold_indirect_ref_rhs (tree t)
5211 {
5212 return gimple_fold_indirect_ref (t);
5213 }
5214
5215 /* Subroutine of gimplify_modify_expr to do simplifications of
5216 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5217 something changes. */
5218
5219 static enum gimplify_status
5220 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5221 gimple_seq *pre_p, gimple_seq *post_p,
5222 bool want_value)
5223 {
5224 enum gimplify_status ret = GS_UNHANDLED;
5225 bool changed;
5226
5227 do
5228 {
5229 changed = false;
5230 switch (TREE_CODE (*from_p))
5231 {
5232 case VAR_DECL:
5233 /* If we're assigning from a read-only variable initialized with
5234 a constructor, do the direct assignment from the constructor,
5235 but only if neither source nor target are volatile since this
5236 latter assignment might end up being done on a per-field basis. */
5237 if (DECL_INITIAL (*from_p)
5238 && TREE_READONLY (*from_p)
5239 && !TREE_THIS_VOLATILE (*from_p)
5240 && !TREE_THIS_VOLATILE (*to_p)
5241 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5242 {
5243 tree old_from = *from_p;
5244 enum gimplify_status subret;
5245
5246 /* Move the constructor into the RHS. */
5247 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5248
5249 /* Let's see if gimplify_init_constructor will need to put
5250 it in memory. */
5251 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5252 false, true);
5253 if (subret == GS_ERROR)
5254 {
5255 /* If so, revert the change. */
5256 *from_p = old_from;
5257 }
5258 else
5259 {
5260 ret = GS_OK;
5261 changed = true;
5262 }
5263 }
5264 break;
5265 case INDIRECT_REF:
5266 {
5267 /* If we have code like
5268
5269 *(const A*)(A*)&x
5270
5271 where the type of "x" is a (possibly cv-qualified variant
5272 of "A"), treat the entire expression as identical to "x".
5273 This kind of code arises in C++ when an object is bound
5274 to a const reference, and if "x" is a TARGET_EXPR we want
5275 to take advantage of the optimization below. */
5276 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5277 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5278 if (t)
5279 {
5280 if (TREE_THIS_VOLATILE (t) != volatile_p)
5281 {
5282 if (DECL_P (t))
5283 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5284 build_fold_addr_expr (t));
5285 if (REFERENCE_CLASS_P (t))
5286 TREE_THIS_VOLATILE (t) = volatile_p;
5287 }
5288 *from_p = t;
5289 ret = GS_OK;
5290 changed = true;
5291 }
5292 break;
5293 }
5294
5295 case TARGET_EXPR:
5296 {
5297 /* If we are initializing something from a TARGET_EXPR, strip the
5298 TARGET_EXPR and initialize it directly, if possible. This can't
5299 be done if the initializer is void, since that implies that the
5300 temporary is set in some non-trivial way.
5301
5302 ??? What about code that pulls out the temp and uses it
5303 elsewhere? I think that such code never uses the TARGET_EXPR as
5304 an initializer. If I'm wrong, we'll die because the temp won't
5305 have any RTL. In that case, I guess we'll need to replace
5306 references somehow. */
5307 tree init = TARGET_EXPR_INITIAL (*from_p);
5308
5309 if (init
5310 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5311 || !TARGET_EXPR_NO_ELIDE (*from_p))
5312 && !VOID_TYPE_P (TREE_TYPE (init)))
5313 {
5314 *from_p = init;
5315 ret = GS_OK;
5316 changed = true;
5317 }
5318 }
5319 break;
5320
5321 case COMPOUND_EXPR:
5322 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5323 caught. */
5324 gimplify_compound_expr (from_p, pre_p, true);
5325 ret = GS_OK;
5326 changed = true;
5327 break;
5328
5329 case CONSTRUCTOR:
5330 /* If we already made some changes, let the front end have a
5331 crack at this before we break it down. */
5332 if (ret != GS_UNHANDLED)
5333 break;
5334 /* If we're initializing from a CONSTRUCTOR, break this into
5335 individual MODIFY_EXPRs. */
5336 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5337 false);
5338
5339 case COND_EXPR:
5340 /* If we're assigning to a non-register type, push the assignment
5341 down into the branches. This is mandatory for ADDRESSABLE types,
5342 since we cannot generate temporaries for such, but it saves a
5343 copy in other cases as well. */
5344 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5345 {
5346 /* This code should mirror the code in gimplify_cond_expr. */
5347 enum tree_code code = TREE_CODE (*expr_p);
5348 tree cond = *from_p;
5349 tree result = *to_p;
5350
5351 ret = gimplify_expr (&result, pre_p, post_p,
5352 is_gimple_lvalue, fb_lvalue);
5353 if (ret != GS_ERROR)
5354 ret = GS_OK;
5355
5356 /* If we are going to write RESULT more than once, clear
5357 TREE_READONLY flag, otherwise we might incorrectly promote
5358 the variable to static const and initialize it at compile
5359 time in one of the branches. */
5360 if (VAR_P (result)
5361 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5362 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5363 TREE_READONLY (result) = 0;
5364 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5365 TREE_OPERAND (cond, 1)
5366 = build2 (code, void_type_node, result,
5367 TREE_OPERAND (cond, 1));
5368 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5369 TREE_OPERAND (cond, 2)
5370 = build2 (code, void_type_node, unshare_expr (result),
5371 TREE_OPERAND (cond, 2));
5372
5373 TREE_TYPE (cond) = void_type_node;
5374 recalculate_side_effects (cond);
5375
5376 if (want_value)
5377 {
5378 gimplify_and_add (cond, pre_p);
5379 *expr_p = unshare_expr (result);
5380 }
5381 else
5382 *expr_p = cond;
5383 return ret;
5384 }
5385 break;
5386
5387 case CALL_EXPR:
5388 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5389 return slot so that we don't generate a temporary. */
5390 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5391 && aggregate_value_p (*from_p, *from_p))
5392 {
5393 bool use_target;
5394
5395 if (!(rhs_predicate_for (*to_p))(*from_p))
5396 /* If we need a temporary, *to_p isn't accurate. */
5397 use_target = false;
5398 /* It's OK to use the return slot directly unless it's an NRV. */
5399 else if (TREE_CODE (*to_p) == RESULT_DECL
5400 && DECL_NAME (*to_p) == NULL_TREE
5401 && needs_to_live_in_memory (*to_p))
5402 use_target = true;
5403 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5404 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5405 /* Don't force regs into memory. */
5406 use_target = false;
5407 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5408 /* It's OK to use the target directly if it's being
5409 initialized. */
5410 use_target = true;
5411 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5412 != INTEGER_CST)
5413 /* Always use the target and thus RSO for variable-sized types.
5414 GIMPLE cannot deal with a variable-sized assignment
5415 embedded in a call statement. */
5416 use_target = true;
5417 else if (TREE_CODE (*to_p) != SSA_NAME
5418 && (!is_gimple_variable (*to_p)
5419 || needs_to_live_in_memory (*to_p)))
5420 /* Don't use the original target if it's already addressable;
5421 if its address escapes, and the called function uses the
5422 NRV optimization, a conforming program could see *to_p
5423 change before the called function returns; see c++/19317.
5424 When optimizing, the return_slot pass marks more functions
5425 as safe after we have escape info. */
5426 use_target = false;
5427 else
5428 use_target = true;
5429
5430 if (use_target)
5431 {
5432 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5433 mark_addressable (*to_p);
5434 }
5435 }
5436 break;
5437
5438 case WITH_SIZE_EXPR:
5439 /* Likewise for calls that return an aggregate of non-constant size,
5440 since we would not be able to generate a temporary at all. */
5441 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5442 {
5443 *from_p = TREE_OPERAND (*from_p, 0);
5444 /* We don't change ret in this case because the
5445 WITH_SIZE_EXPR might have been added in
5446 gimplify_modify_expr, so returning GS_OK would lead to an
5447 infinite loop. */
5448 changed = true;
5449 }
5450 break;
5451
5452 /* If we're initializing from a container, push the initialization
5453 inside it. */
5454 case CLEANUP_POINT_EXPR:
5455 case BIND_EXPR:
5456 case STATEMENT_LIST:
5457 {
5458 tree wrap = *from_p;
5459 tree t;
5460
5461 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5462 fb_lvalue);
5463 if (ret != GS_ERROR)
5464 ret = GS_OK;
5465
5466 t = voidify_wrapper_expr (wrap, *expr_p);
5467 gcc_assert (t == *expr_p);
5468
5469 if (want_value)
5470 {
5471 gimplify_and_add (wrap, pre_p);
5472 *expr_p = unshare_expr (*to_p);
5473 }
5474 else
5475 *expr_p = wrap;
5476 return GS_OK;
5477 }
5478
5479 case COMPOUND_LITERAL_EXPR:
5480 {
5481 tree complit = TREE_OPERAND (*expr_p, 1);
5482 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5483 tree decl = DECL_EXPR_DECL (decl_s);
5484 tree init = DECL_INITIAL (decl);
5485
5486 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5487 into struct T x = { 0, 1, 2 } if the address of the
5488 compound literal has never been taken. */
5489 if (!TREE_ADDRESSABLE (complit)
5490 && !TREE_ADDRESSABLE (decl)
5491 && init)
5492 {
5493 *expr_p = copy_node (*expr_p);
5494 TREE_OPERAND (*expr_p, 1) = init;
5495 return GS_OK;
5496 }
5497 }
5498
5499 default:
5500 break;
5501 }
5502 }
5503 while (changed);
5504
5505 return ret;
5506 }
5507
5508
5509 /* Return true if T looks like a valid GIMPLE statement. */
5510
5511 static bool
5512 is_gimple_stmt (tree t)
5513 {
5514 const enum tree_code code = TREE_CODE (t);
5515
5516 switch (code)
5517 {
5518 case NOP_EXPR:
5519 /* The only valid NOP_EXPR is the empty statement. */
5520 return IS_EMPTY_STMT (t);
5521
5522 case BIND_EXPR:
5523 case COND_EXPR:
5524 /* These are only valid if they're void. */
5525 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5526
5527 case SWITCH_EXPR:
5528 case GOTO_EXPR:
5529 case RETURN_EXPR:
5530 case LABEL_EXPR:
5531 case CASE_LABEL_EXPR:
5532 case TRY_CATCH_EXPR:
5533 case TRY_FINALLY_EXPR:
5534 case EH_FILTER_EXPR:
5535 case CATCH_EXPR:
5536 case ASM_EXPR:
5537 case STATEMENT_LIST:
5538 case OACC_PARALLEL:
5539 case OACC_KERNELS:
5540 case OACC_DATA:
5541 case OACC_HOST_DATA:
5542 case OACC_DECLARE:
5543 case OACC_UPDATE:
5544 case OACC_ENTER_DATA:
5545 case OACC_EXIT_DATA:
5546 case OACC_CACHE:
5547 case OMP_PARALLEL:
5548 case OMP_FOR:
5549 case OMP_SIMD:
5550 case OMP_DISTRIBUTE:
5551 case OMP_LOOP:
5552 case OACC_LOOP:
5553 case OMP_SCAN:
5554 case OMP_SECTIONS:
5555 case OMP_SECTION:
5556 case OMP_SINGLE:
5557 case OMP_MASTER:
5558 case OMP_TASKGROUP:
5559 case OMP_ORDERED:
5560 case OMP_CRITICAL:
5561 case OMP_TASK:
5562 case OMP_TARGET:
5563 case OMP_TARGET_DATA:
5564 case OMP_TARGET_UPDATE:
5565 case OMP_TARGET_ENTER_DATA:
5566 case OMP_TARGET_EXIT_DATA:
5567 case OMP_TASKLOOP:
5568 case OMP_TEAMS:
5569 /* These are always void. */
5570 return true;
5571
5572 case CALL_EXPR:
5573 case MODIFY_EXPR:
5574 case PREDICT_EXPR:
5575 /* These are valid regardless of their type. */
5576 return true;
5577
5578 default:
5579 return false;
5580 }
5581 }
5582
5583
5584 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5585 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5586 DECL_GIMPLE_REG_P set.
5587
5588 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5589 other, unmodified part of the complex object just before the total store.
5590 As a consequence, if the object is still uninitialized, an undefined value
5591 will be loaded into a register, which may result in a spurious exception
5592 if the register is floating-point and the value happens to be a signaling
5593 NaN for example. Then the fully-fledged complex operations lowering pass
5594 followed by a DCE pass are necessary in order to fix things up. */
5595
5596 static enum gimplify_status
5597 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5598 bool want_value)
5599 {
5600 enum tree_code code, ocode;
5601 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5602
5603 lhs = TREE_OPERAND (*expr_p, 0);
5604 rhs = TREE_OPERAND (*expr_p, 1);
5605 code = TREE_CODE (lhs);
5606 lhs = TREE_OPERAND (lhs, 0);
5607
5608 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5609 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5610 TREE_NO_WARNING (other) = 1;
5611 other = get_formal_tmp_var (other, pre_p);
5612
5613 realpart = code == REALPART_EXPR ? rhs : other;
5614 imagpart = code == REALPART_EXPR ? other : rhs;
5615
5616 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5617 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5618 else
5619 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5620
5621 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5622 *expr_p = (want_value) ? rhs : NULL_TREE;
5623
5624 return GS_ALL_DONE;
5625 }
5626
5627 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5628
5629 modify_expr
5630 : varname '=' rhs
5631 | '*' ID '=' rhs
5632
5633 PRE_P points to the list where side effects that must happen before
5634 *EXPR_P should be stored.
5635
5636 POST_P points to the list where side effects that must happen after
5637 *EXPR_P should be stored.
5638
5639 WANT_VALUE is nonzero iff we want to use the value of this expression
5640 in another expression. */
5641
5642 static enum gimplify_status
5643 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5644 bool want_value)
5645 {
5646 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5647 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5648 enum gimplify_status ret = GS_UNHANDLED;
5649 gimple *assign;
5650 location_t loc = EXPR_LOCATION (*expr_p);
5651 gimple_stmt_iterator gsi;
5652
5653 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5654 || TREE_CODE (*expr_p) == INIT_EXPR);
5655
5656 /* Trying to simplify a clobber using normal logic doesn't work,
5657 so handle it here. */
5658 if (TREE_CLOBBER_P (*from_p))
5659 {
5660 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5661 if (ret == GS_ERROR)
5662 return ret;
5663 gcc_assert (!want_value);
5664 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5665 {
5666 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5667 pre_p, post_p);
5668 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5669 }
5670 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5671 *expr_p = NULL;
5672 return GS_ALL_DONE;
5673 }
5674
5675 /* Insert pointer conversions required by the middle-end that are not
5676 required by the frontend. This fixes middle-end type checking for
5677 for example gcc.dg/redecl-6.c. */
5678 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5679 {
5680 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5681 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5682 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5683 }
5684
5685 /* See if any simplifications can be done based on what the RHS is. */
5686 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5687 want_value);
5688 if (ret != GS_UNHANDLED)
5689 return ret;
5690
5691 /* For zero sized types only gimplify the left hand side and right hand
5692 side as statements and throw away the assignment. Do this after
5693 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5694 types properly. */
5695 if (zero_sized_type (TREE_TYPE (*from_p))
5696 && !want_value
5697 /* Don't do this for calls that return addressable types, expand_call
5698 relies on those having a lhs. */
5699 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5700 && TREE_CODE (*from_p) == CALL_EXPR))
5701 {
5702 gimplify_stmt (from_p, pre_p);
5703 gimplify_stmt (to_p, pre_p);
5704 *expr_p = NULL_TREE;
5705 return GS_ALL_DONE;
5706 }
5707
5708 /* If the value being copied is of variable width, compute the length
5709 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5710 before gimplifying any of the operands so that we can resolve any
5711 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5712 the size of the expression to be copied, not of the destination, so
5713 that is what we must do here. */
5714 maybe_with_size_expr (from_p);
5715
5716 /* As a special case, we have to temporarily allow for assignments
5717 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5718 a toplevel statement, when gimplifying the GENERIC expression
5719 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5720 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5721
5722 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5723 prevent gimplify_expr from trying to create a new temporary for
5724 foo's LHS, we tell it that it should only gimplify until it
5725 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5726 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5727 and all we need to do here is set 'a' to be its LHS. */
5728
5729 /* Gimplify the RHS first for C++17 and bug 71104. */
5730 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5731 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5732 if (ret == GS_ERROR)
5733 return ret;
5734
5735 /* Then gimplify the LHS. */
5736 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5737 twice we have to make sure to gimplify into non-SSA as otherwise
5738 the abnormal edge added later will make those defs not dominate
5739 their uses.
5740 ??? Technically this applies only to the registers used in the
5741 resulting non-register *TO_P. */
5742 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5743 if (saved_into_ssa
5744 && TREE_CODE (*from_p) == CALL_EXPR
5745 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5746 gimplify_ctxp->into_ssa = false;
5747 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5748 gimplify_ctxp->into_ssa = saved_into_ssa;
5749 if (ret == GS_ERROR)
5750 return ret;
5751
5752 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5753 guess for the predicate was wrong. */
5754 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5755 if (final_pred != initial_pred)
5756 {
5757 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5758 if (ret == GS_ERROR)
5759 return ret;
5760 }
5761
5762 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5763 size as argument to the call. */
5764 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5765 {
5766 tree call = TREE_OPERAND (*from_p, 0);
5767 tree vlasize = TREE_OPERAND (*from_p, 1);
5768
5769 if (TREE_CODE (call) == CALL_EXPR
5770 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5771 {
5772 int nargs = call_expr_nargs (call);
5773 tree type = TREE_TYPE (call);
5774 tree ap = CALL_EXPR_ARG (call, 0);
5775 tree tag = CALL_EXPR_ARG (call, 1);
5776 tree aptag = CALL_EXPR_ARG (call, 2);
5777 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5778 IFN_VA_ARG, type,
5779 nargs + 1, ap, tag,
5780 aptag, vlasize);
5781 TREE_OPERAND (*from_p, 0) = newcall;
5782 }
5783 }
5784
5785 /* Now see if the above changed *from_p to something we handle specially. */
5786 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5787 want_value);
5788 if (ret != GS_UNHANDLED)
5789 return ret;
5790
5791 /* If we've got a variable sized assignment between two lvalues (i.e. does
5792 not involve a call), then we can make things a bit more straightforward
5793 by converting the assignment to memcpy or memset. */
5794 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5795 {
5796 tree from = TREE_OPERAND (*from_p, 0);
5797 tree size = TREE_OPERAND (*from_p, 1);
5798
5799 if (TREE_CODE (from) == CONSTRUCTOR)
5800 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5801
5802 if (is_gimple_addressable (from))
5803 {
5804 *from_p = from;
5805 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5806 pre_p);
5807 }
5808 }
5809
5810 /* Transform partial stores to non-addressable complex variables into
5811 total stores. This allows us to use real instead of virtual operands
5812 for these variables, which improves optimization. */
5813 if ((TREE_CODE (*to_p) == REALPART_EXPR
5814 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5815 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5816 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5817
5818 /* Try to alleviate the effects of the gimplification creating artificial
5819 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5820 make sure not to create DECL_DEBUG_EXPR links across functions. */
5821 if (!gimplify_ctxp->into_ssa
5822 && VAR_P (*from_p)
5823 && DECL_IGNORED_P (*from_p)
5824 && DECL_P (*to_p)
5825 && !DECL_IGNORED_P (*to_p)
5826 && decl_function_context (*to_p) == current_function_decl
5827 && decl_function_context (*from_p) == current_function_decl)
5828 {
5829 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5830 DECL_NAME (*from_p)
5831 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5832 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5833 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5834 }
5835
5836 if (want_value && TREE_THIS_VOLATILE (*to_p))
5837 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5838
5839 if (TREE_CODE (*from_p) == CALL_EXPR)
5840 {
5841 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5842 instead of a GIMPLE_ASSIGN. */
5843 gcall *call_stmt;
5844 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5845 {
5846 /* Gimplify internal functions created in the FEs. */
5847 int nargs = call_expr_nargs (*from_p), i;
5848 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5849 auto_vec<tree> vargs (nargs);
5850
5851 for (i = 0; i < nargs; i++)
5852 {
5853 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5854 EXPR_LOCATION (*from_p));
5855 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5856 }
5857 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5858 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5859 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5860 }
5861 else
5862 {
5863 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5864 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5865 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5866 tree fndecl = get_callee_fndecl (*from_p);
5867 if (fndecl
5868 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5869 && call_expr_nargs (*from_p) == 3)
5870 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5871 CALL_EXPR_ARG (*from_p, 0),
5872 CALL_EXPR_ARG (*from_p, 1),
5873 CALL_EXPR_ARG (*from_p, 2));
5874 else
5875 {
5876 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5877 }
5878 }
5879 notice_special_calls (call_stmt);
5880 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5881 gimple_call_set_lhs (call_stmt, *to_p);
5882 else if (TREE_CODE (*to_p) == SSA_NAME)
5883 /* The above is somewhat premature, avoid ICEing later for a
5884 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5885 ??? This doesn't make it a default-def. */
5886 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5887
5888 assign = call_stmt;
5889 }
5890 else
5891 {
5892 assign = gimple_build_assign (*to_p, *from_p);
5893 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5894 if (COMPARISON_CLASS_P (*from_p))
5895 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5896 }
5897
5898 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5899 {
5900 /* We should have got an SSA name from the start. */
5901 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5902 || ! gimple_in_ssa_p (cfun));
5903 }
5904
5905 gimplify_seq_add_stmt (pre_p, assign);
5906 gsi = gsi_last (*pre_p);
5907 maybe_fold_stmt (&gsi);
5908
5909 if (want_value)
5910 {
5911 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5912 return GS_OK;
5913 }
5914 else
5915 *expr_p = NULL;
5916
5917 return GS_ALL_DONE;
5918 }
5919
5920 /* Gimplify a comparison between two variable-sized objects. Do this
5921 with a call to BUILT_IN_MEMCMP. */
5922
5923 static enum gimplify_status
5924 gimplify_variable_sized_compare (tree *expr_p)
5925 {
5926 location_t loc = EXPR_LOCATION (*expr_p);
5927 tree op0 = TREE_OPERAND (*expr_p, 0);
5928 tree op1 = TREE_OPERAND (*expr_p, 1);
5929 tree t, arg, dest, src, expr;
5930
5931 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5932 arg = unshare_expr (arg);
5933 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5934 src = build_fold_addr_expr_loc (loc, op1);
5935 dest = build_fold_addr_expr_loc (loc, op0);
5936 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5937 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5938
5939 expr
5940 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5941 SET_EXPR_LOCATION (expr, loc);
5942 *expr_p = expr;
5943
5944 return GS_OK;
5945 }
5946
5947 /* Gimplify a comparison between two aggregate objects of integral scalar
5948 mode as a comparison between the bitwise equivalent scalar values. */
5949
5950 static enum gimplify_status
5951 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5952 {
5953 location_t loc = EXPR_LOCATION (*expr_p);
5954 tree op0 = TREE_OPERAND (*expr_p, 0);
5955 tree op1 = TREE_OPERAND (*expr_p, 1);
5956
5957 tree type = TREE_TYPE (op0);
5958 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5959
5960 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5961 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5962
5963 *expr_p
5964 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5965
5966 return GS_OK;
5967 }
5968
5969 /* Gimplify an expression sequence. This function gimplifies each
5970 expression and rewrites the original expression with the last
5971 expression of the sequence in GIMPLE form.
5972
5973 PRE_P points to the list where the side effects for all the
5974 expressions in the sequence will be emitted.
5975
5976 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5977
5978 static enum gimplify_status
5979 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5980 {
5981 tree t = *expr_p;
5982
5983 do
5984 {
5985 tree *sub_p = &TREE_OPERAND (t, 0);
5986
5987 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5988 gimplify_compound_expr (sub_p, pre_p, false);
5989 else
5990 gimplify_stmt (sub_p, pre_p);
5991
5992 t = TREE_OPERAND (t, 1);
5993 }
5994 while (TREE_CODE (t) == COMPOUND_EXPR);
5995
5996 *expr_p = t;
5997 if (want_value)
5998 return GS_OK;
5999 else
6000 {
6001 gimplify_stmt (expr_p, pre_p);
6002 return GS_ALL_DONE;
6003 }
6004 }
6005
6006 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6007 gimplify. After gimplification, EXPR_P will point to a new temporary
6008 that holds the original value of the SAVE_EXPR node.
6009
6010 PRE_P points to the list where side effects that must happen before
6011 *EXPR_P should be stored. */
6012
6013 static enum gimplify_status
6014 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6015 {
6016 enum gimplify_status ret = GS_ALL_DONE;
6017 tree val;
6018
6019 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6020 val = TREE_OPERAND (*expr_p, 0);
6021
6022 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6023 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6024 {
6025 /* The operand may be a void-valued expression. It is
6026 being executed only for its side-effects. */
6027 if (TREE_TYPE (val) == void_type_node)
6028 {
6029 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6030 is_gimple_stmt, fb_none);
6031 val = NULL;
6032 }
6033 else
6034 /* The temporary may not be an SSA name as later abnormal and EH
6035 control flow may invalidate use/def domination. When in SSA
6036 form then assume there are no such issues and SAVE_EXPRs only
6037 appear via GENERIC foldings. */
6038 val = get_initialized_tmp_var (val, pre_p, post_p,
6039 gimple_in_ssa_p (cfun));
6040
6041 TREE_OPERAND (*expr_p, 0) = val;
6042 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6043 }
6044
6045 *expr_p = val;
6046
6047 return ret;
6048 }
6049
6050 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6051
6052 unary_expr
6053 : ...
6054 | '&' varname
6055 ...
6056
6057 PRE_P points to the list where side effects that must happen before
6058 *EXPR_P should be stored.
6059
6060 POST_P points to the list where side effects that must happen after
6061 *EXPR_P should be stored. */
6062
6063 static enum gimplify_status
6064 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6065 {
6066 tree expr = *expr_p;
6067 tree op0 = TREE_OPERAND (expr, 0);
6068 enum gimplify_status ret;
6069 location_t loc = EXPR_LOCATION (*expr_p);
6070
6071 switch (TREE_CODE (op0))
6072 {
6073 case INDIRECT_REF:
6074 do_indirect_ref:
6075 /* Check if we are dealing with an expression of the form '&*ptr'.
6076 While the front end folds away '&*ptr' into 'ptr', these
6077 expressions may be generated internally by the compiler (e.g.,
6078 builtins like __builtin_va_end). */
6079 /* Caution: the silent array decomposition semantics we allow for
6080 ADDR_EXPR means we can't always discard the pair. */
6081 /* Gimplification of the ADDR_EXPR operand may drop
6082 cv-qualification conversions, so make sure we add them if
6083 needed. */
6084 {
6085 tree op00 = TREE_OPERAND (op0, 0);
6086 tree t_expr = TREE_TYPE (expr);
6087 tree t_op00 = TREE_TYPE (op00);
6088
6089 if (!useless_type_conversion_p (t_expr, t_op00))
6090 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6091 *expr_p = op00;
6092 ret = GS_OK;
6093 }
6094 break;
6095
6096 case VIEW_CONVERT_EXPR:
6097 /* Take the address of our operand and then convert it to the type of
6098 this ADDR_EXPR.
6099
6100 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6101 all clear. The impact of this transformation is even less clear. */
6102
6103 /* If the operand is a useless conversion, look through it. Doing so
6104 guarantees that the ADDR_EXPR and its operand will remain of the
6105 same type. */
6106 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6107 op0 = TREE_OPERAND (op0, 0);
6108
6109 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6110 build_fold_addr_expr_loc (loc,
6111 TREE_OPERAND (op0, 0)));
6112 ret = GS_OK;
6113 break;
6114
6115 case MEM_REF:
6116 if (integer_zerop (TREE_OPERAND (op0, 1)))
6117 goto do_indirect_ref;
6118
6119 /* fall through */
6120
6121 default:
6122 /* If we see a call to a declared builtin or see its address
6123 being taken (we can unify those cases here) then we can mark
6124 the builtin for implicit generation by GCC. */
6125 if (TREE_CODE (op0) == FUNCTION_DECL
6126 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6127 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6128 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6129
6130 /* We use fb_either here because the C frontend sometimes takes
6131 the address of a call that returns a struct; see
6132 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6133 the implied temporary explicit. */
6134
6135 /* Make the operand addressable. */
6136 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6137 is_gimple_addressable, fb_either);
6138 if (ret == GS_ERROR)
6139 break;
6140
6141 /* Then mark it. Beware that it may not be possible to do so directly
6142 if a temporary has been created by the gimplification. */
6143 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6144
6145 op0 = TREE_OPERAND (expr, 0);
6146
6147 /* For various reasons, the gimplification of the expression
6148 may have made a new INDIRECT_REF. */
6149 if (TREE_CODE (op0) == INDIRECT_REF)
6150 goto do_indirect_ref;
6151
6152 mark_addressable (TREE_OPERAND (expr, 0));
6153
6154 /* The FEs may end up building ADDR_EXPRs early on a decl with
6155 an incomplete type. Re-build ADDR_EXPRs in canonical form
6156 here. */
6157 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6158 *expr_p = build_fold_addr_expr (op0);
6159
6160 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6161 recompute_tree_invariant_for_addr_expr (*expr_p);
6162
6163 /* If we re-built the ADDR_EXPR add a conversion to the original type
6164 if required. */
6165 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6166 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6167
6168 break;
6169 }
6170
6171 return ret;
6172 }
6173
6174 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6175 value; output operands should be a gimple lvalue. */
6176
6177 static enum gimplify_status
6178 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6179 {
6180 tree expr;
6181 int noutputs;
6182 const char **oconstraints;
6183 int i;
6184 tree link;
6185 const char *constraint;
6186 bool allows_mem, allows_reg, is_inout;
6187 enum gimplify_status ret, tret;
6188 gasm *stmt;
6189 vec<tree, va_gc> *inputs;
6190 vec<tree, va_gc> *outputs;
6191 vec<tree, va_gc> *clobbers;
6192 vec<tree, va_gc> *labels;
6193 tree link_next;
6194
6195 expr = *expr_p;
6196 noutputs = list_length (ASM_OUTPUTS (expr));
6197 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6198
6199 inputs = NULL;
6200 outputs = NULL;
6201 clobbers = NULL;
6202 labels = NULL;
6203
6204 ret = GS_ALL_DONE;
6205 link_next = NULL_TREE;
6206 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6207 {
6208 bool ok;
6209 size_t constraint_len;
6210
6211 link_next = TREE_CHAIN (link);
6212
6213 oconstraints[i]
6214 = constraint
6215 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6216 constraint_len = strlen (constraint);
6217 if (constraint_len == 0)
6218 continue;
6219
6220 ok = parse_output_constraint (&constraint, i, 0, 0,
6221 &allows_mem, &allows_reg, &is_inout);
6222 if (!ok)
6223 {
6224 ret = GS_ERROR;
6225 is_inout = false;
6226 }
6227
6228 /* If we can't make copies, we can only accept memory. */
6229 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6230 {
6231 if (allows_mem)
6232 allows_reg = 0;
6233 else
6234 {
6235 error ("impossible constraint in %<asm%>");
6236 error ("non-memory output %d must stay in memory", i);
6237 return GS_ERROR;
6238 }
6239 }
6240
6241 if (!allows_reg && allows_mem)
6242 mark_addressable (TREE_VALUE (link));
6243
6244 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6245 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6246 fb_lvalue | fb_mayfail);
6247 if (tret == GS_ERROR)
6248 {
6249 error ("invalid lvalue in %<asm%> output %d", i);
6250 ret = tret;
6251 }
6252
6253 /* If the constraint does not allow memory make sure we gimplify
6254 it to a register if it is not already but its base is. This
6255 happens for complex and vector components. */
6256 if (!allows_mem)
6257 {
6258 tree op = TREE_VALUE (link);
6259 if (! is_gimple_val (op)
6260 && is_gimple_reg_type (TREE_TYPE (op))
6261 && is_gimple_reg (get_base_address (op)))
6262 {
6263 tree tem = create_tmp_reg (TREE_TYPE (op));
6264 tree ass;
6265 if (is_inout)
6266 {
6267 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6268 tem, unshare_expr (op));
6269 gimplify_and_add (ass, pre_p);
6270 }
6271 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6272 gimplify_and_add (ass, post_p);
6273
6274 TREE_VALUE (link) = tem;
6275 tret = GS_OK;
6276 }
6277 }
6278
6279 vec_safe_push (outputs, link);
6280 TREE_CHAIN (link) = NULL_TREE;
6281
6282 if (is_inout)
6283 {
6284 /* An input/output operand. To give the optimizers more
6285 flexibility, split it into separate input and output
6286 operands. */
6287 tree input;
6288 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6289 char buf[11];
6290
6291 /* Turn the in/out constraint into an output constraint. */
6292 char *p = xstrdup (constraint);
6293 p[0] = '=';
6294 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6295
6296 /* And add a matching input constraint. */
6297 if (allows_reg)
6298 {
6299 sprintf (buf, "%u", i);
6300
6301 /* If there are multiple alternatives in the constraint,
6302 handle each of them individually. Those that allow register
6303 will be replaced with operand number, the others will stay
6304 unchanged. */
6305 if (strchr (p, ',') != NULL)
6306 {
6307 size_t len = 0, buflen = strlen (buf);
6308 char *beg, *end, *str, *dst;
6309
6310 for (beg = p + 1;;)
6311 {
6312 end = strchr (beg, ',');
6313 if (end == NULL)
6314 end = strchr (beg, '\0');
6315 if ((size_t) (end - beg) < buflen)
6316 len += buflen + 1;
6317 else
6318 len += end - beg + 1;
6319 if (*end)
6320 beg = end + 1;
6321 else
6322 break;
6323 }
6324
6325 str = (char *) alloca (len);
6326 for (beg = p + 1, dst = str;;)
6327 {
6328 const char *tem;
6329 bool mem_p, reg_p, inout_p;
6330
6331 end = strchr (beg, ',');
6332 if (end)
6333 *end = '\0';
6334 beg[-1] = '=';
6335 tem = beg - 1;
6336 parse_output_constraint (&tem, i, 0, 0,
6337 &mem_p, &reg_p, &inout_p);
6338 if (dst != str)
6339 *dst++ = ',';
6340 if (reg_p)
6341 {
6342 memcpy (dst, buf, buflen);
6343 dst += buflen;
6344 }
6345 else
6346 {
6347 if (end)
6348 len = end - beg;
6349 else
6350 len = strlen (beg);
6351 memcpy (dst, beg, len);
6352 dst += len;
6353 }
6354 if (end)
6355 beg = end + 1;
6356 else
6357 break;
6358 }
6359 *dst = '\0';
6360 input = build_string (dst - str, str);
6361 }
6362 else
6363 input = build_string (strlen (buf), buf);
6364 }
6365 else
6366 input = build_string (constraint_len - 1, constraint + 1);
6367
6368 free (p);
6369
6370 input = build_tree_list (build_tree_list (NULL_TREE, input),
6371 unshare_expr (TREE_VALUE (link)));
6372 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6373 }
6374 }
6375
6376 link_next = NULL_TREE;
6377 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6378 {
6379 link_next = TREE_CHAIN (link);
6380 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6381 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6382 oconstraints, &allows_mem, &allows_reg);
6383
6384 /* If we can't make copies, we can only accept memory. */
6385 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6386 {
6387 if (allows_mem)
6388 allows_reg = 0;
6389 else
6390 {
6391 error ("impossible constraint in %<asm%>");
6392 error ("non-memory input %d must stay in memory", i);
6393 return GS_ERROR;
6394 }
6395 }
6396
6397 /* If the operand is a memory input, it should be an lvalue. */
6398 if (!allows_reg && allows_mem)
6399 {
6400 tree inputv = TREE_VALUE (link);
6401 STRIP_NOPS (inputv);
6402 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6403 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6404 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6405 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6406 || TREE_CODE (inputv) == MODIFY_EXPR)
6407 TREE_VALUE (link) = error_mark_node;
6408 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6409 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6410 if (tret != GS_ERROR)
6411 {
6412 /* Unlike output operands, memory inputs are not guaranteed
6413 to be lvalues by the FE, and while the expressions are
6414 marked addressable there, if it is e.g. a statement
6415 expression, temporaries in it might not end up being
6416 addressable. They might be already used in the IL and thus
6417 it is too late to make them addressable now though. */
6418 tree x = TREE_VALUE (link);
6419 while (handled_component_p (x))
6420 x = TREE_OPERAND (x, 0);
6421 if (TREE_CODE (x) == MEM_REF
6422 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6423 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6424 if ((VAR_P (x)
6425 || TREE_CODE (x) == PARM_DECL
6426 || TREE_CODE (x) == RESULT_DECL)
6427 && !TREE_ADDRESSABLE (x)
6428 && is_gimple_reg (x))
6429 {
6430 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6431 input_location), 0,
6432 "memory input %d is not directly addressable",
6433 i);
6434 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6435 }
6436 }
6437 mark_addressable (TREE_VALUE (link));
6438 if (tret == GS_ERROR)
6439 {
6440 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6441 "memory input %d is not directly addressable", i);
6442 ret = tret;
6443 }
6444 }
6445 else
6446 {
6447 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6448 is_gimple_asm_val, fb_rvalue);
6449 if (tret == GS_ERROR)
6450 ret = tret;
6451 }
6452
6453 TREE_CHAIN (link) = NULL_TREE;
6454 vec_safe_push (inputs, link);
6455 }
6456
6457 link_next = NULL_TREE;
6458 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6459 {
6460 link_next = TREE_CHAIN (link);
6461 TREE_CHAIN (link) = NULL_TREE;
6462 vec_safe_push (clobbers, link);
6463 }
6464
6465 link_next = NULL_TREE;
6466 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6467 {
6468 link_next = TREE_CHAIN (link);
6469 TREE_CHAIN (link) = NULL_TREE;
6470 vec_safe_push (labels, link);
6471 }
6472
6473 /* Do not add ASMs with errors to the gimple IL stream. */
6474 if (ret != GS_ERROR)
6475 {
6476 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6477 inputs, outputs, clobbers, labels);
6478
6479 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6480 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6481 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6482
6483 gimplify_seq_add_stmt (pre_p, stmt);
6484 }
6485
6486 return ret;
6487 }
6488
6489 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6490 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6491 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6492 return to this function.
6493
6494 FIXME should we complexify the prequeue handling instead? Or use flags
6495 for all the cleanups and let the optimizer tighten them up? The current
6496 code seems pretty fragile; it will break on a cleanup within any
6497 non-conditional nesting. But any such nesting would be broken, anyway;
6498 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6499 and continues out of it. We can do that at the RTL level, though, so
6500 having an optimizer to tighten up try/finally regions would be a Good
6501 Thing. */
6502
6503 static enum gimplify_status
6504 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6505 {
6506 gimple_stmt_iterator iter;
6507 gimple_seq body_sequence = NULL;
6508
6509 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6510
6511 /* We only care about the number of conditions between the innermost
6512 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6513 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6514 int old_conds = gimplify_ctxp->conditions;
6515 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6516 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6517 gimplify_ctxp->conditions = 0;
6518 gimplify_ctxp->conditional_cleanups = NULL;
6519 gimplify_ctxp->in_cleanup_point_expr = true;
6520
6521 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6522
6523 gimplify_ctxp->conditions = old_conds;
6524 gimplify_ctxp->conditional_cleanups = old_cleanups;
6525 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6526
6527 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6528 {
6529 gimple *wce = gsi_stmt (iter);
6530
6531 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6532 {
6533 if (gsi_one_before_end_p (iter))
6534 {
6535 /* Note that gsi_insert_seq_before and gsi_remove do not
6536 scan operands, unlike some other sequence mutators. */
6537 if (!gimple_wce_cleanup_eh_only (wce))
6538 gsi_insert_seq_before_without_update (&iter,
6539 gimple_wce_cleanup (wce),
6540 GSI_SAME_STMT);
6541 gsi_remove (&iter, true);
6542 break;
6543 }
6544 else
6545 {
6546 gtry *gtry;
6547 gimple_seq seq;
6548 enum gimple_try_flags kind;
6549
6550 if (gimple_wce_cleanup_eh_only (wce))
6551 kind = GIMPLE_TRY_CATCH;
6552 else
6553 kind = GIMPLE_TRY_FINALLY;
6554 seq = gsi_split_seq_after (iter);
6555
6556 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6557 /* Do not use gsi_replace here, as it may scan operands.
6558 We want to do a simple structural modification only. */
6559 gsi_set_stmt (&iter, gtry);
6560 iter = gsi_start (gtry->eval);
6561 }
6562 }
6563 else
6564 gsi_next (&iter);
6565 }
6566
6567 gimplify_seq_add_seq (pre_p, body_sequence);
6568 if (temp)
6569 {
6570 *expr_p = temp;
6571 return GS_OK;
6572 }
6573 else
6574 {
6575 *expr_p = NULL;
6576 return GS_ALL_DONE;
6577 }
6578 }
6579
6580 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6581 is the cleanup action required. EH_ONLY is true if the cleanup should
6582 only be executed if an exception is thrown, not on normal exit.
6583 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6584 only valid for clobbers. */
6585
6586 static void
6587 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6588 bool force_uncond = false)
6589 {
6590 gimple *wce;
6591 gimple_seq cleanup_stmts = NULL;
6592
6593 /* Errors can result in improperly nested cleanups. Which results in
6594 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6595 if (seen_error ())
6596 return;
6597
6598 if (gimple_conditional_context ())
6599 {
6600 /* If we're in a conditional context, this is more complex. We only
6601 want to run the cleanup if we actually ran the initialization that
6602 necessitates it, but we want to run it after the end of the
6603 conditional context. So we wrap the try/finally around the
6604 condition and use a flag to determine whether or not to actually
6605 run the destructor. Thus
6606
6607 test ? f(A()) : 0
6608
6609 becomes (approximately)
6610
6611 flag = 0;
6612 try {
6613 if (test) { A::A(temp); flag = 1; val = f(temp); }
6614 else { val = 0; }
6615 } finally {
6616 if (flag) A::~A(temp);
6617 }
6618 val
6619 */
6620 if (force_uncond)
6621 {
6622 gimplify_stmt (&cleanup, &cleanup_stmts);
6623 wce = gimple_build_wce (cleanup_stmts);
6624 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6625 }
6626 else
6627 {
6628 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6629 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6630 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6631
6632 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6633 gimplify_stmt (&cleanup, &cleanup_stmts);
6634 wce = gimple_build_wce (cleanup_stmts);
6635
6636 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6637 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6638 gimplify_seq_add_stmt (pre_p, ftrue);
6639
6640 /* Because of this manipulation, and the EH edges that jump
6641 threading cannot redirect, the temporary (VAR) will appear
6642 to be used uninitialized. Don't warn. */
6643 TREE_NO_WARNING (var) = 1;
6644 }
6645 }
6646 else
6647 {
6648 gimplify_stmt (&cleanup, &cleanup_stmts);
6649 wce = gimple_build_wce (cleanup_stmts);
6650 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6651 gimplify_seq_add_stmt (pre_p, wce);
6652 }
6653 }
6654
6655 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6656
6657 static enum gimplify_status
6658 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6659 {
6660 tree targ = *expr_p;
6661 tree temp = TARGET_EXPR_SLOT (targ);
6662 tree init = TARGET_EXPR_INITIAL (targ);
6663 enum gimplify_status ret;
6664
6665 bool unpoison_empty_seq = false;
6666 gimple_stmt_iterator unpoison_it;
6667
6668 if (init)
6669 {
6670 tree cleanup = NULL_TREE;
6671
6672 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6673 to the temps list. Handle also variable length TARGET_EXPRs. */
6674 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6675 {
6676 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6677 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6678 gimplify_vla_decl (temp, pre_p);
6679 }
6680 else
6681 {
6682 /* Save location where we need to place unpoisoning. It's possible
6683 that a variable will be converted to needs_to_live_in_memory. */
6684 unpoison_it = gsi_last (*pre_p);
6685 unpoison_empty_seq = gsi_end_p (unpoison_it);
6686
6687 gimple_add_tmp_var (temp);
6688 }
6689
6690 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6691 expression is supposed to initialize the slot. */
6692 if (VOID_TYPE_P (TREE_TYPE (init)))
6693 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6694 else
6695 {
6696 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6697 init = init_expr;
6698 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6699 init = NULL;
6700 ggc_free (init_expr);
6701 }
6702 if (ret == GS_ERROR)
6703 {
6704 /* PR c++/28266 Make sure this is expanded only once. */
6705 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6706 return GS_ERROR;
6707 }
6708 if (init)
6709 gimplify_and_add (init, pre_p);
6710
6711 /* If needed, push the cleanup for the temp. */
6712 if (TARGET_EXPR_CLEANUP (targ))
6713 {
6714 if (CLEANUP_EH_ONLY (targ))
6715 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6716 CLEANUP_EH_ONLY (targ), pre_p);
6717 else
6718 cleanup = TARGET_EXPR_CLEANUP (targ);
6719 }
6720
6721 /* Add a clobber for the temporary going out of scope, like
6722 gimplify_bind_expr. */
6723 if (gimplify_ctxp->in_cleanup_point_expr
6724 && needs_to_live_in_memory (temp))
6725 {
6726 if (flag_stack_reuse == SR_ALL)
6727 {
6728 tree clobber = build_clobber (TREE_TYPE (temp));
6729 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6730 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6731 }
6732 if (asan_poisoned_variables
6733 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6734 && !TREE_STATIC (temp)
6735 && dbg_cnt (asan_use_after_scope)
6736 && !gimplify_omp_ctxp)
6737 {
6738 tree asan_cleanup = build_asan_poison_call_expr (temp);
6739 if (asan_cleanup)
6740 {
6741 if (unpoison_empty_seq)
6742 unpoison_it = gsi_start (*pre_p);
6743
6744 asan_poison_variable (temp, false, &unpoison_it,
6745 unpoison_empty_seq);
6746 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6747 }
6748 }
6749 }
6750 if (cleanup)
6751 gimple_push_cleanup (temp, cleanup, false, pre_p);
6752
6753 /* Only expand this once. */
6754 TREE_OPERAND (targ, 3) = init;
6755 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6756 }
6757 else
6758 /* We should have expanded this before. */
6759 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6760
6761 *expr_p = temp;
6762 return GS_OK;
6763 }
6764
6765 /* Gimplification of expression trees. */
6766
6767 /* Gimplify an expression which appears at statement context. The
6768 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6769 NULL, a new sequence is allocated.
6770
6771 Return true if we actually added a statement to the queue. */
6772
6773 bool
6774 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6775 {
6776 gimple_seq_node last;
6777
6778 last = gimple_seq_last (*seq_p);
6779 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6780 return last != gimple_seq_last (*seq_p);
6781 }
6782
6783 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6784 to CTX. If entries already exist, force them to be some flavor of private.
6785 If there is no enclosing parallel, do nothing. */
6786
6787 void
6788 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6789 {
6790 splay_tree_node n;
6791
6792 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6793 return;
6794
6795 do
6796 {
6797 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6798 if (n != NULL)
6799 {
6800 if (n->value & GOVD_SHARED)
6801 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6802 else if (n->value & GOVD_MAP)
6803 n->value |= GOVD_MAP_TO_ONLY;
6804 else
6805 return;
6806 }
6807 else if ((ctx->region_type & ORT_TARGET) != 0)
6808 {
6809 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6810 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6811 else
6812 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6813 }
6814 else if (ctx->region_type != ORT_WORKSHARE
6815 && ctx->region_type != ORT_TASKGROUP
6816 && ctx->region_type != ORT_SIMD
6817 && ctx->region_type != ORT_ACC
6818 && !(ctx->region_type & ORT_TARGET_DATA))
6819 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6820
6821 ctx = ctx->outer_context;
6822 }
6823 while (ctx);
6824 }
6825
6826 /* Similarly for each of the type sizes of TYPE. */
6827
6828 static void
6829 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6830 {
6831 if (type == NULL || type == error_mark_node)
6832 return;
6833 type = TYPE_MAIN_VARIANT (type);
6834
6835 if (ctx->privatized_types->add (type))
6836 return;
6837
6838 switch (TREE_CODE (type))
6839 {
6840 case INTEGER_TYPE:
6841 case ENUMERAL_TYPE:
6842 case BOOLEAN_TYPE:
6843 case REAL_TYPE:
6844 case FIXED_POINT_TYPE:
6845 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6846 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6847 break;
6848
6849 case ARRAY_TYPE:
6850 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6851 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6852 break;
6853
6854 case RECORD_TYPE:
6855 case UNION_TYPE:
6856 case QUAL_UNION_TYPE:
6857 {
6858 tree field;
6859 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6860 if (TREE_CODE (field) == FIELD_DECL)
6861 {
6862 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6863 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6864 }
6865 }
6866 break;
6867
6868 case POINTER_TYPE:
6869 case REFERENCE_TYPE:
6870 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6871 break;
6872
6873 default:
6874 break;
6875 }
6876
6877 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6878 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6879 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6880 }
6881
6882 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6883
6884 static void
6885 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6886 {
6887 splay_tree_node n;
6888 unsigned int nflags;
6889 tree t;
6890
6891 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6892 return;
6893
6894 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6895 there are constructors involved somewhere. Exception is a shared clause,
6896 there is nothing privatized in that case. */
6897 if ((flags & GOVD_SHARED) == 0
6898 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6899 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6900 flags |= GOVD_SEEN;
6901
6902 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6903 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6904 {
6905 /* We shouldn't be re-adding the decl with the same data
6906 sharing class. */
6907 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6908 nflags = n->value | flags;
6909 /* The only combination of data sharing classes we should see is
6910 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6911 reduction variables to be used in data sharing clauses. */
6912 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6913 || ((nflags & GOVD_DATA_SHARE_CLASS)
6914 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6915 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6916 n->value = nflags;
6917 return;
6918 }
6919
6920 /* When adding a variable-sized variable, we have to handle all sorts
6921 of additional bits of data: the pointer replacement variable, and
6922 the parameters of the type. */
6923 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6924 {
6925 /* Add the pointer replacement variable as PRIVATE if the variable
6926 replacement is private, else FIRSTPRIVATE since we'll need the
6927 address of the original variable either for SHARED, or for the
6928 copy into or out of the context. */
6929 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6930 {
6931 if (flags & GOVD_MAP)
6932 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6933 else if (flags & GOVD_PRIVATE)
6934 nflags = GOVD_PRIVATE;
6935 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6936 && (flags & GOVD_FIRSTPRIVATE))
6937 || (ctx->region_type == ORT_TARGET_DATA
6938 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6939 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6940 else
6941 nflags = GOVD_FIRSTPRIVATE;
6942 nflags |= flags & GOVD_SEEN;
6943 t = DECL_VALUE_EXPR (decl);
6944 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6945 t = TREE_OPERAND (t, 0);
6946 gcc_assert (DECL_P (t));
6947 omp_add_variable (ctx, t, nflags);
6948 }
6949
6950 /* Add all of the variable and type parameters (which should have
6951 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6952 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6953 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6954 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6955
6956 /* The variable-sized variable itself is never SHARED, only some form
6957 of PRIVATE. The sharing would take place via the pointer variable
6958 which we remapped above. */
6959 if (flags & GOVD_SHARED)
6960 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6961 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6962
6963 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6964 alloca statement we generate for the variable, so make sure it
6965 is available. This isn't automatically needed for the SHARED
6966 case, since we won't be allocating local storage then.
6967 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6968 in this case omp_notice_variable will be called later
6969 on when it is gimplified. */
6970 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6971 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6972 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6973 }
6974 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6975 && lang_hooks.decls.omp_privatize_by_reference (decl))
6976 {
6977 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6978
6979 /* Similar to the direct variable sized case above, we'll need the
6980 size of references being privatized. */
6981 if ((flags & GOVD_SHARED) == 0)
6982 {
6983 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6984 if (DECL_P (t))
6985 omp_notice_variable (ctx, t, true);
6986 }
6987 }
6988
6989 if (n != NULL)
6990 n->value |= flags;
6991 else
6992 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6993
6994 /* For reductions clauses in OpenACC loop directives, by default create a
6995 copy clause on the enclosing parallel construct for carrying back the
6996 results. */
6997 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6998 {
6999 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7000 while (outer_ctx)
7001 {
7002 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7003 if (n != NULL)
7004 {
7005 /* Ignore local variables and explicitly declared clauses. */
7006 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7007 break;
7008 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7009 {
7010 /* According to the OpenACC spec, such a reduction variable
7011 should already have a copy map on a kernels construct,
7012 verify that here. */
7013 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7014 && (n->value & GOVD_MAP));
7015 }
7016 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7017 {
7018 /* Remove firstprivate and make it a copy map. */
7019 n->value &= ~GOVD_FIRSTPRIVATE;
7020 n->value |= GOVD_MAP;
7021 }
7022 }
7023 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7024 {
7025 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7026 GOVD_MAP | GOVD_SEEN);
7027 break;
7028 }
7029 outer_ctx = outer_ctx->outer_context;
7030 }
7031 }
7032 }
7033
7034 /* Notice a threadprivate variable DECL used in OMP context CTX.
7035 This just prints out diagnostics about threadprivate variable uses
7036 in untied tasks. If DECL2 is non-NULL, prevent this warning
7037 on that variable. */
7038
7039 static bool
7040 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7041 tree decl2)
7042 {
7043 splay_tree_node n;
7044 struct gimplify_omp_ctx *octx;
7045
7046 for (octx = ctx; octx; octx = octx->outer_context)
7047 if ((octx->region_type & ORT_TARGET) != 0
7048 || octx->order_concurrent)
7049 {
7050 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7051 if (n == NULL)
7052 {
7053 if (octx->order_concurrent)
7054 {
7055 error ("threadprivate variable %qE used in a region with"
7056 " %<order(concurrent)%> clause", DECL_NAME (decl));
7057 error_at (octx->location, "enclosing region");
7058 }
7059 else
7060 {
7061 error ("threadprivate variable %qE used in target region",
7062 DECL_NAME (decl));
7063 error_at (octx->location, "enclosing target region");
7064 }
7065 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7066 }
7067 if (decl2)
7068 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7069 }
7070
7071 if (ctx->region_type != ORT_UNTIED_TASK)
7072 return false;
7073 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7074 if (n == NULL)
7075 {
7076 error ("threadprivate variable %qE used in untied task",
7077 DECL_NAME (decl));
7078 error_at (ctx->location, "enclosing task");
7079 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7080 }
7081 if (decl2)
7082 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7083 return false;
7084 }
7085
7086 /* Return true if global var DECL is device resident. */
7087
7088 static bool
7089 device_resident_p (tree decl)
7090 {
7091 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7092
7093 if (!attr)
7094 return false;
7095
7096 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7097 {
7098 tree c = TREE_VALUE (t);
7099 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7100 return true;
7101 }
7102
7103 return false;
7104 }
7105
7106 /* Return true if DECL has an ACC DECLARE attribute. */
7107
7108 static bool
7109 is_oacc_declared (tree decl)
7110 {
7111 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7112 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7113 return declared != NULL_TREE;
7114 }
7115
7116 /* Determine outer default flags for DECL mentioned in an OMP region
7117 but not declared in an enclosing clause.
7118
7119 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7120 remapped firstprivate instead of shared. To some extent this is
7121 addressed in omp_firstprivatize_type_sizes, but not
7122 effectively. */
7123
7124 static unsigned
7125 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7126 bool in_code, unsigned flags)
7127 {
7128 enum omp_clause_default_kind default_kind = ctx->default_kind;
7129 enum omp_clause_default_kind kind;
7130
7131 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7132 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7133 default_kind = kind;
7134
7135 switch (default_kind)
7136 {
7137 case OMP_CLAUSE_DEFAULT_NONE:
7138 {
7139 const char *rtype;
7140
7141 if (ctx->region_type & ORT_PARALLEL)
7142 rtype = "parallel";
7143 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7144 rtype = "taskloop";
7145 else if (ctx->region_type & ORT_TASK)
7146 rtype = "task";
7147 else if (ctx->region_type & ORT_TEAMS)
7148 rtype = "teams";
7149 else
7150 gcc_unreachable ();
7151
7152 error ("%qE not specified in enclosing %qs",
7153 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7154 error_at (ctx->location, "enclosing %qs", rtype);
7155 }
7156 /* FALLTHRU */
7157 case OMP_CLAUSE_DEFAULT_SHARED:
7158 flags |= GOVD_SHARED;
7159 break;
7160 case OMP_CLAUSE_DEFAULT_PRIVATE:
7161 flags |= GOVD_PRIVATE;
7162 break;
7163 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7164 flags |= GOVD_FIRSTPRIVATE;
7165 break;
7166 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7167 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7168 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7169 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7170 {
7171 omp_notice_variable (octx, decl, in_code);
7172 for (; octx; octx = octx->outer_context)
7173 {
7174 splay_tree_node n2;
7175
7176 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7177 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7178 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7179 continue;
7180 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7181 {
7182 flags |= GOVD_FIRSTPRIVATE;
7183 goto found_outer;
7184 }
7185 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7186 {
7187 flags |= GOVD_SHARED;
7188 goto found_outer;
7189 }
7190 }
7191 }
7192
7193 if (TREE_CODE (decl) == PARM_DECL
7194 || (!is_global_var (decl)
7195 && DECL_CONTEXT (decl) == current_function_decl))
7196 flags |= GOVD_FIRSTPRIVATE;
7197 else
7198 flags |= GOVD_SHARED;
7199 found_outer:
7200 break;
7201
7202 default:
7203 gcc_unreachable ();
7204 }
7205
7206 return flags;
7207 }
7208
7209
7210 /* Determine outer default flags for DECL mentioned in an OACC region
7211 but not declared in an enclosing clause. */
7212
7213 static unsigned
7214 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7215 {
7216 const char *rkind;
7217 bool on_device = false;
7218 bool declared = is_oacc_declared (decl);
7219 tree type = TREE_TYPE (decl);
7220
7221 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7222 type = TREE_TYPE (type);
7223
7224 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7225 && is_global_var (decl)
7226 && device_resident_p (decl))
7227 {
7228 on_device = true;
7229 flags |= GOVD_MAP_TO_ONLY;
7230 }
7231
7232 switch (ctx->region_type)
7233 {
7234 case ORT_ACC_KERNELS:
7235 rkind = "kernels";
7236
7237 if (AGGREGATE_TYPE_P (type))
7238 {
7239 /* Aggregates default to 'present_or_copy', or 'present'. */
7240 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7241 flags |= GOVD_MAP;
7242 else
7243 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7244 }
7245 else
7246 /* Scalars default to 'copy'. */
7247 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7248
7249 break;
7250
7251 case ORT_ACC_PARALLEL:
7252 rkind = "parallel";
7253
7254 if (on_device || declared)
7255 flags |= GOVD_MAP;
7256 else if (AGGREGATE_TYPE_P (type))
7257 {
7258 /* Aggregates default to 'present_or_copy', or 'present'. */
7259 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7260 flags |= GOVD_MAP;
7261 else
7262 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7263 }
7264 else
7265 /* Scalars default to 'firstprivate'. */
7266 flags |= GOVD_FIRSTPRIVATE;
7267
7268 break;
7269
7270 default:
7271 gcc_unreachable ();
7272 }
7273
7274 if (DECL_ARTIFICIAL (decl))
7275 ; /* We can get compiler-generated decls, and should not complain
7276 about them. */
7277 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7278 {
7279 error ("%qE not specified in enclosing OpenACC %qs construct",
7280 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7281 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7282 }
7283 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7284 ; /* Handled above. */
7285 else
7286 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7287
7288 return flags;
7289 }
7290
7291 /* Record the fact that DECL was used within the OMP context CTX.
7292 IN_CODE is true when real code uses DECL, and false when we should
7293 merely emit default(none) errors. Return true if DECL is going to
7294 be remapped and thus DECL shouldn't be gimplified into its
7295 DECL_VALUE_EXPR (if any). */
7296
7297 static bool
7298 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7299 {
7300 splay_tree_node n;
7301 unsigned flags = in_code ? GOVD_SEEN : 0;
7302 bool ret = false, shared;
7303
7304 if (error_operand_p (decl))
7305 return false;
7306
7307 if (ctx->region_type == ORT_NONE)
7308 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7309
7310 if (is_global_var (decl))
7311 {
7312 /* Threadprivate variables are predetermined. */
7313 if (DECL_THREAD_LOCAL_P (decl))
7314 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7315
7316 if (DECL_HAS_VALUE_EXPR_P (decl))
7317 {
7318 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7319
7320 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7321 return omp_notice_threadprivate_variable (ctx, decl, value);
7322 }
7323
7324 if (gimplify_omp_ctxp->outer_context == NULL
7325 && VAR_P (decl)
7326 && oacc_get_fn_attrib (current_function_decl))
7327 {
7328 location_t loc = DECL_SOURCE_LOCATION (decl);
7329
7330 if (lookup_attribute ("omp declare target link",
7331 DECL_ATTRIBUTES (decl)))
7332 {
7333 error_at (loc,
7334 "%qE with %<link%> clause used in %<routine%> function",
7335 DECL_NAME (decl));
7336 return false;
7337 }
7338 else if (!lookup_attribute ("omp declare target",
7339 DECL_ATTRIBUTES (decl)))
7340 {
7341 error_at (loc,
7342 "%qE requires a %<declare%> directive for use "
7343 "in a %<routine%> function", DECL_NAME (decl));
7344 return false;
7345 }
7346 }
7347 }
7348
7349 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7350 if ((ctx->region_type & ORT_TARGET) != 0)
7351 {
7352 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7353 if (n == NULL)
7354 {
7355 unsigned nflags = flags;
7356 if ((ctx->region_type & ORT_ACC) == 0)
7357 {
7358 bool is_declare_target = false;
7359 if (is_global_var (decl)
7360 && varpool_node::get_create (decl)->offloadable)
7361 {
7362 struct gimplify_omp_ctx *octx;
7363 for (octx = ctx->outer_context;
7364 octx; octx = octx->outer_context)
7365 {
7366 n = splay_tree_lookup (octx->variables,
7367 (splay_tree_key)decl);
7368 if (n
7369 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7370 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7371 break;
7372 }
7373 is_declare_target = octx == NULL;
7374 }
7375 if (!is_declare_target)
7376 {
7377 int gdmk;
7378 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7379 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7380 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7381 == POINTER_TYPE)))
7382 gdmk = GDMK_POINTER;
7383 else if (lang_hooks.decls.omp_scalar_p (decl))
7384 gdmk = GDMK_SCALAR;
7385 else
7386 gdmk = GDMK_AGGREGATE;
7387 if (ctx->defaultmap[gdmk] == 0)
7388 {
7389 tree d = lang_hooks.decls.omp_report_decl (decl);
7390 error ("%qE not specified in enclosing %<target%>",
7391 DECL_NAME (d));
7392 error_at (ctx->location, "enclosing %<target%>");
7393 }
7394 else if (ctx->defaultmap[gdmk]
7395 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7396 nflags |= ctx->defaultmap[gdmk];
7397 else
7398 {
7399 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7400 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7401 }
7402 }
7403 }
7404
7405 struct gimplify_omp_ctx *octx = ctx->outer_context;
7406 if ((ctx->region_type & ORT_ACC) && octx)
7407 {
7408 /* Look in outer OpenACC contexts, to see if there's a
7409 data attribute for this variable. */
7410 omp_notice_variable (octx, decl, in_code);
7411
7412 for (; octx; octx = octx->outer_context)
7413 {
7414 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7415 break;
7416 splay_tree_node n2
7417 = splay_tree_lookup (octx->variables,
7418 (splay_tree_key) decl);
7419 if (n2)
7420 {
7421 if (octx->region_type == ORT_ACC_HOST_DATA)
7422 error ("variable %qE declared in enclosing "
7423 "%<host_data%> region", DECL_NAME (decl));
7424 nflags |= GOVD_MAP;
7425 if (octx->region_type == ORT_ACC_DATA
7426 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7427 nflags |= GOVD_MAP_0LEN_ARRAY;
7428 goto found_outer;
7429 }
7430 }
7431 }
7432
7433 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7434 | GOVD_MAP_ALLOC_ONLY)) == flags)
7435 {
7436 tree type = TREE_TYPE (decl);
7437
7438 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7439 && lang_hooks.decls.omp_privatize_by_reference (decl))
7440 type = TREE_TYPE (type);
7441 if (!lang_hooks.types.omp_mappable_type (type))
7442 {
7443 error ("%qD referenced in target region does not have "
7444 "a mappable type", decl);
7445 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7446 }
7447 else
7448 {
7449 if ((ctx->region_type & ORT_ACC) != 0)
7450 nflags = oacc_default_clause (ctx, decl, flags);
7451 else
7452 nflags |= GOVD_MAP;
7453 }
7454 }
7455 found_outer:
7456 omp_add_variable (ctx, decl, nflags);
7457 }
7458 else
7459 {
7460 /* If nothing changed, there's nothing left to do. */
7461 if ((n->value & flags) == flags)
7462 return ret;
7463 flags |= n->value;
7464 n->value = flags;
7465 }
7466 goto do_outer;
7467 }
7468
7469 if (n == NULL)
7470 {
7471 if (ctx->region_type == ORT_WORKSHARE
7472 || ctx->region_type == ORT_TASKGROUP
7473 || ctx->region_type == ORT_SIMD
7474 || ctx->region_type == ORT_ACC
7475 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7476 goto do_outer;
7477
7478 flags = omp_default_clause (ctx, decl, in_code, flags);
7479
7480 if ((flags & GOVD_PRIVATE)
7481 && lang_hooks.decls.omp_private_outer_ref (decl))
7482 flags |= GOVD_PRIVATE_OUTER_REF;
7483
7484 omp_add_variable (ctx, decl, flags);
7485
7486 shared = (flags & GOVD_SHARED) != 0;
7487 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7488 goto do_outer;
7489 }
7490
7491 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7492 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7493 && DECL_SIZE (decl))
7494 {
7495 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7496 {
7497 splay_tree_node n2;
7498 tree t = DECL_VALUE_EXPR (decl);
7499 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7500 t = TREE_OPERAND (t, 0);
7501 gcc_assert (DECL_P (t));
7502 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7503 n2->value |= GOVD_SEEN;
7504 }
7505 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7506 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7507 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7508 != INTEGER_CST))
7509 {
7510 splay_tree_node n2;
7511 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7512 gcc_assert (DECL_P (t));
7513 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7514 if (n2)
7515 omp_notice_variable (ctx, t, true);
7516 }
7517 }
7518
7519 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7520 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7521
7522 /* If nothing changed, there's nothing left to do. */
7523 if ((n->value & flags) == flags)
7524 return ret;
7525 flags |= n->value;
7526 n->value = flags;
7527
7528 do_outer:
7529 /* If the variable is private in the current context, then we don't
7530 need to propagate anything to an outer context. */
7531 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7532 return ret;
7533 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7534 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7535 return ret;
7536 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7537 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7538 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7539 return ret;
7540 if (ctx->outer_context
7541 && omp_notice_variable (ctx->outer_context, decl, in_code))
7542 return true;
7543 return ret;
7544 }
7545
7546 /* Verify that DECL is private within CTX. If there's specific information
7547 to the contrary in the innermost scope, generate an error. */
7548
7549 static bool
7550 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7551 {
7552 splay_tree_node n;
7553
7554 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7555 if (n != NULL)
7556 {
7557 if (n->value & GOVD_SHARED)
7558 {
7559 if (ctx == gimplify_omp_ctxp)
7560 {
7561 if (simd)
7562 error ("iteration variable %qE is predetermined linear",
7563 DECL_NAME (decl));
7564 else
7565 error ("iteration variable %qE should be private",
7566 DECL_NAME (decl));
7567 n->value = GOVD_PRIVATE;
7568 return true;
7569 }
7570 else
7571 return false;
7572 }
7573 else if ((n->value & GOVD_EXPLICIT) != 0
7574 && (ctx == gimplify_omp_ctxp
7575 || (ctx->region_type == ORT_COMBINED_PARALLEL
7576 && gimplify_omp_ctxp->outer_context == ctx)))
7577 {
7578 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7579 error ("iteration variable %qE should not be firstprivate",
7580 DECL_NAME (decl));
7581 else if ((n->value & GOVD_REDUCTION) != 0)
7582 error ("iteration variable %qE should not be reduction",
7583 DECL_NAME (decl));
7584 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7585 error ("iteration variable %qE should not be linear",
7586 DECL_NAME (decl));
7587 }
7588 return (ctx == gimplify_omp_ctxp
7589 || (ctx->region_type == ORT_COMBINED_PARALLEL
7590 && gimplify_omp_ctxp->outer_context == ctx));
7591 }
7592
7593 if (ctx->region_type != ORT_WORKSHARE
7594 && ctx->region_type != ORT_TASKGROUP
7595 && ctx->region_type != ORT_SIMD
7596 && ctx->region_type != ORT_ACC)
7597 return false;
7598 else if (ctx->outer_context)
7599 return omp_is_private (ctx->outer_context, decl, simd);
7600 return false;
7601 }
7602
7603 /* Return true if DECL is private within a parallel region
7604 that binds to the current construct's context or in parallel
7605 region's REDUCTION clause. */
7606
7607 static bool
7608 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7609 {
7610 splay_tree_node n;
7611
7612 do
7613 {
7614 ctx = ctx->outer_context;
7615 if (ctx == NULL)
7616 {
7617 if (is_global_var (decl))
7618 return false;
7619
7620 /* References might be private, but might be shared too,
7621 when checking for copyprivate, assume they might be
7622 private, otherwise assume they might be shared. */
7623 if (copyprivate)
7624 return true;
7625
7626 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7627 return false;
7628
7629 /* Treat C++ privatized non-static data members outside
7630 of the privatization the same. */
7631 if (omp_member_access_dummy_var (decl))
7632 return false;
7633
7634 return true;
7635 }
7636
7637 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7638
7639 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7640 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7641 continue;
7642
7643 if (n != NULL)
7644 {
7645 if ((n->value & GOVD_LOCAL) != 0
7646 && omp_member_access_dummy_var (decl))
7647 return false;
7648 return (n->value & GOVD_SHARED) == 0;
7649 }
7650 }
7651 while (ctx->region_type == ORT_WORKSHARE
7652 || ctx->region_type == ORT_TASKGROUP
7653 || ctx->region_type == ORT_SIMD
7654 || ctx->region_type == ORT_ACC);
7655 return false;
7656 }
7657
7658 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7659
7660 static tree
7661 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7662 {
7663 tree t = *tp;
7664
7665 /* If this node has been visited, unmark it and keep looking. */
7666 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7667 return t;
7668
7669 if (IS_TYPE_OR_DECL_P (t))
7670 *walk_subtrees = 0;
7671 return NULL_TREE;
7672 }
7673
7674 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7675 lower all the depend clauses by populating corresponding depend
7676 array. Returns 0 if there are no such depend clauses, or
7677 2 if all depend clauses should be removed, 1 otherwise. */
7678
7679 static int
7680 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7681 {
7682 tree c;
7683 gimple *g;
7684 size_t n[4] = { 0, 0, 0, 0 };
7685 bool unused[4];
7686 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7687 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7688 size_t i, j;
7689 location_t first_loc = UNKNOWN_LOCATION;
7690
7691 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7693 {
7694 switch (OMP_CLAUSE_DEPEND_KIND (c))
7695 {
7696 case OMP_CLAUSE_DEPEND_IN:
7697 i = 2;
7698 break;
7699 case OMP_CLAUSE_DEPEND_OUT:
7700 case OMP_CLAUSE_DEPEND_INOUT:
7701 i = 0;
7702 break;
7703 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7704 i = 1;
7705 break;
7706 case OMP_CLAUSE_DEPEND_DEPOBJ:
7707 i = 3;
7708 break;
7709 case OMP_CLAUSE_DEPEND_SOURCE:
7710 case OMP_CLAUSE_DEPEND_SINK:
7711 continue;
7712 default:
7713 gcc_unreachable ();
7714 }
7715 tree t = OMP_CLAUSE_DECL (c);
7716 if (first_loc == UNKNOWN_LOCATION)
7717 first_loc = OMP_CLAUSE_LOCATION (c);
7718 if (TREE_CODE (t) == TREE_LIST
7719 && TREE_PURPOSE (t)
7720 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7721 {
7722 if (TREE_PURPOSE (t) != last_iter)
7723 {
7724 tree tcnt = size_one_node;
7725 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7726 {
7727 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7728 is_gimple_val, fb_rvalue) == GS_ERROR
7729 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7730 is_gimple_val, fb_rvalue) == GS_ERROR
7731 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7732 is_gimple_val, fb_rvalue) == GS_ERROR
7733 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7734 is_gimple_val, fb_rvalue)
7735 == GS_ERROR))
7736 return 2;
7737 tree var = TREE_VEC_ELT (it, 0);
7738 tree begin = TREE_VEC_ELT (it, 1);
7739 tree end = TREE_VEC_ELT (it, 2);
7740 tree step = TREE_VEC_ELT (it, 3);
7741 tree orig_step = TREE_VEC_ELT (it, 4);
7742 tree type = TREE_TYPE (var);
7743 tree stype = TREE_TYPE (step);
7744 location_t loc = DECL_SOURCE_LOCATION (var);
7745 tree endmbegin;
7746 /* Compute count for this iterator as
7747 orig_step > 0
7748 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7749 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7750 and compute product of those for the entire depend
7751 clause. */
7752 if (POINTER_TYPE_P (type))
7753 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7754 stype, end, begin);
7755 else
7756 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7757 end, begin);
7758 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7759 step,
7760 build_int_cst (stype, 1));
7761 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7762 build_int_cst (stype, 1));
7763 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7764 unshare_expr (endmbegin),
7765 stepm1);
7766 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7767 pos, step);
7768 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7769 endmbegin, stepp1);
7770 if (TYPE_UNSIGNED (stype))
7771 {
7772 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7773 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7774 }
7775 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7776 neg, step);
7777 step = NULL_TREE;
7778 tree cond = fold_build2_loc (loc, LT_EXPR,
7779 boolean_type_node,
7780 begin, end);
7781 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7782 build_int_cst (stype, 0));
7783 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7784 end, begin);
7785 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7786 build_int_cst (stype, 0));
7787 tree osteptype = TREE_TYPE (orig_step);
7788 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7789 orig_step,
7790 build_int_cst (osteptype, 0));
7791 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7792 cond, pos, neg);
7793 cnt = fold_convert_loc (loc, sizetype, cnt);
7794 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7795 fb_rvalue) == GS_ERROR)
7796 return 2;
7797 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7798 }
7799 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7800 fb_rvalue) == GS_ERROR)
7801 return 2;
7802 last_iter = TREE_PURPOSE (t);
7803 last_count = tcnt;
7804 }
7805 if (counts[i] == NULL_TREE)
7806 counts[i] = last_count;
7807 else
7808 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7809 PLUS_EXPR, counts[i], last_count);
7810 }
7811 else
7812 n[i]++;
7813 }
7814 for (i = 0; i < 4; i++)
7815 if (counts[i])
7816 break;
7817 if (i == 4)
7818 return 0;
7819
7820 tree total = size_zero_node;
7821 for (i = 0; i < 4; i++)
7822 {
7823 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7824 if (counts[i] == NULL_TREE)
7825 counts[i] = size_zero_node;
7826 if (n[i])
7827 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7828 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7829 fb_rvalue) == GS_ERROR)
7830 return 2;
7831 total = size_binop (PLUS_EXPR, total, counts[i]);
7832 }
7833
7834 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7835 == GS_ERROR)
7836 return 2;
7837 bool is_old = unused[1] && unused[3];
7838 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7839 size_int (is_old ? 1 : 4));
7840 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7841 tree array = create_tmp_var_raw (type);
7842 TREE_ADDRESSABLE (array) = 1;
7843 if (TREE_CODE (totalpx) != INTEGER_CST)
7844 {
7845 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7846 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7847 if (gimplify_omp_ctxp)
7848 {
7849 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7850 while (ctx
7851 && (ctx->region_type == ORT_WORKSHARE
7852 || ctx->region_type == ORT_TASKGROUP
7853 || ctx->region_type == ORT_SIMD
7854 || ctx->region_type == ORT_ACC))
7855 ctx = ctx->outer_context;
7856 if (ctx)
7857 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7858 }
7859 gimplify_vla_decl (array, pre_p);
7860 }
7861 else
7862 gimple_add_tmp_var (array);
7863 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7864 NULL_TREE);
7865 tree tem;
7866 if (!is_old)
7867 {
7868 tem = build2 (MODIFY_EXPR, void_type_node, r,
7869 build_int_cst (ptr_type_node, 0));
7870 gimplify_and_add (tem, pre_p);
7871 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7872 NULL_TREE);
7873 }
7874 tem = build2 (MODIFY_EXPR, void_type_node, r,
7875 fold_convert (ptr_type_node, total));
7876 gimplify_and_add (tem, pre_p);
7877 for (i = 1; i < (is_old ? 2 : 4); i++)
7878 {
7879 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7880 NULL_TREE, NULL_TREE);
7881 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7882 gimplify_and_add (tem, pre_p);
7883 }
7884
7885 tree cnts[4];
7886 for (j = 4; j; j--)
7887 if (!unused[j - 1])
7888 break;
7889 for (i = 0; i < 4; i++)
7890 {
7891 if (i && (i >= j || unused[i - 1]))
7892 {
7893 cnts[i] = cnts[i - 1];
7894 continue;
7895 }
7896 cnts[i] = create_tmp_var (sizetype);
7897 if (i == 0)
7898 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7899 else
7900 {
7901 tree t;
7902 if (is_old)
7903 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7904 else
7905 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7906 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7907 == GS_ERROR)
7908 return 2;
7909 g = gimple_build_assign (cnts[i], t);
7910 }
7911 gimple_seq_add_stmt (pre_p, g);
7912 }
7913
7914 last_iter = NULL_TREE;
7915 tree last_bind = NULL_TREE;
7916 tree *last_body = NULL;
7917 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7918 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7919 {
7920 switch (OMP_CLAUSE_DEPEND_KIND (c))
7921 {
7922 case OMP_CLAUSE_DEPEND_IN:
7923 i = 2;
7924 break;
7925 case OMP_CLAUSE_DEPEND_OUT:
7926 case OMP_CLAUSE_DEPEND_INOUT:
7927 i = 0;
7928 break;
7929 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7930 i = 1;
7931 break;
7932 case OMP_CLAUSE_DEPEND_DEPOBJ:
7933 i = 3;
7934 break;
7935 case OMP_CLAUSE_DEPEND_SOURCE:
7936 case OMP_CLAUSE_DEPEND_SINK:
7937 continue;
7938 default:
7939 gcc_unreachable ();
7940 }
7941 tree t = OMP_CLAUSE_DECL (c);
7942 if (TREE_CODE (t) == TREE_LIST
7943 && TREE_PURPOSE (t)
7944 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7945 {
7946 if (TREE_PURPOSE (t) != last_iter)
7947 {
7948 if (last_bind)
7949 gimplify_and_add (last_bind, pre_p);
7950 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
7951 last_bind = build3 (BIND_EXPR, void_type_node,
7952 BLOCK_VARS (block), NULL, block);
7953 TREE_SIDE_EFFECTS (last_bind) = 1;
7954 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
7955 tree *p = &BIND_EXPR_BODY (last_bind);
7956 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7957 {
7958 tree var = TREE_VEC_ELT (it, 0);
7959 tree begin = TREE_VEC_ELT (it, 1);
7960 tree end = TREE_VEC_ELT (it, 2);
7961 tree step = TREE_VEC_ELT (it, 3);
7962 tree orig_step = TREE_VEC_ELT (it, 4);
7963 tree type = TREE_TYPE (var);
7964 location_t loc = DECL_SOURCE_LOCATION (var);
7965 /* Emit:
7966 var = begin;
7967 goto cond_label;
7968 beg_label:
7969 ...
7970 var = var + step;
7971 cond_label:
7972 if (orig_step > 0) {
7973 if (var < end) goto beg_label;
7974 } else {
7975 if (var > end) goto beg_label;
7976 }
7977 for each iterator, with inner iterators added to
7978 the ... above. */
7979 tree beg_label = create_artificial_label (loc);
7980 tree cond_label = NULL_TREE;
7981 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
7982 var, begin);
7983 append_to_statement_list_force (tem, p);
7984 tem = build_and_jump (&cond_label);
7985 append_to_statement_list_force (tem, p);
7986 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
7987 append_to_statement_list (tem, p);
7988 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
7989 NULL_TREE, NULL_TREE);
7990 TREE_SIDE_EFFECTS (bind) = 1;
7991 SET_EXPR_LOCATION (bind, loc);
7992 append_to_statement_list_force (bind, p);
7993 if (POINTER_TYPE_P (type))
7994 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
7995 var, fold_convert_loc (loc, sizetype,
7996 step));
7997 else
7998 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
7999 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8000 var, tem);
8001 append_to_statement_list_force (tem, p);
8002 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8003 append_to_statement_list (tem, p);
8004 tree cond = fold_build2_loc (loc, LT_EXPR,
8005 boolean_type_node,
8006 var, end);
8007 tree pos
8008 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8009 cond, build_and_jump (&beg_label),
8010 void_node);
8011 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8012 var, end);
8013 tree neg
8014 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8015 cond, build_and_jump (&beg_label),
8016 void_node);
8017 tree osteptype = TREE_TYPE (orig_step);
8018 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8019 orig_step,
8020 build_int_cst (osteptype, 0));
8021 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8022 cond, pos, neg);
8023 append_to_statement_list_force (tem, p);
8024 p = &BIND_EXPR_BODY (bind);
8025 }
8026 last_body = p;
8027 }
8028 last_iter = TREE_PURPOSE (t);
8029 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8030 {
8031 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8032 0), last_body);
8033 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8034 }
8035 if (error_operand_p (TREE_VALUE (t)))
8036 return 2;
8037 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8038 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8039 NULL_TREE, NULL_TREE);
8040 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8041 void_type_node, r, TREE_VALUE (t));
8042 append_to_statement_list_force (tem, last_body);
8043 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8044 void_type_node, cnts[i],
8045 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8046 append_to_statement_list_force (tem, last_body);
8047 TREE_VALUE (t) = null_pointer_node;
8048 }
8049 else
8050 {
8051 if (last_bind)
8052 {
8053 gimplify_and_add (last_bind, pre_p);
8054 last_bind = NULL_TREE;
8055 }
8056 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8057 {
8058 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8059 NULL, is_gimple_val, fb_rvalue);
8060 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8061 }
8062 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8063 return 2;
8064 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8065 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8066 is_gimple_val, fb_rvalue) == GS_ERROR)
8067 return 2;
8068 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8069 NULL_TREE, NULL_TREE);
8070 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8071 gimplify_and_add (tem, pre_p);
8072 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8073 size_int (1)));
8074 gimple_seq_add_stmt (pre_p, g);
8075 }
8076 }
8077 if (last_bind)
8078 gimplify_and_add (last_bind, pre_p);
8079 tree cond = boolean_false_node;
8080 if (is_old)
8081 {
8082 if (!unused[0])
8083 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8084 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8085 size_int (2)));
8086 if (!unused[2])
8087 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8088 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8089 cnts[2],
8090 size_binop_loc (first_loc, PLUS_EXPR,
8091 totalpx,
8092 size_int (1))));
8093 }
8094 else
8095 {
8096 tree prev = size_int (5);
8097 for (i = 0; i < 4; i++)
8098 {
8099 if (unused[i])
8100 continue;
8101 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8102 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8103 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8104 cnts[i], unshare_expr (prev)));
8105 }
8106 }
8107 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8108 build_call_expr_loc (first_loc,
8109 builtin_decl_explicit (BUILT_IN_TRAP),
8110 0), void_node);
8111 gimplify_and_add (tem, pre_p);
8112 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8113 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8114 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8115 OMP_CLAUSE_CHAIN (c) = *list_p;
8116 *list_p = c;
8117 return 1;
8118 }
8119
8120 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8121 and previous omp contexts. */
8122
8123 static void
8124 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8125 enum omp_region_type region_type,
8126 enum tree_code code)
8127 {
8128 struct gimplify_omp_ctx *ctx, *outer_ctx;
8129 tree c;
8130 hash_map<tree, tree> *struct_map_to_clause = NULL;
8131 tree *prev_list_p = NULL, *orig_list_p = list_p;
8132 int handled_depend_iterators = -1;
8133 int nowait = -1;
8134
8135 ctx = new_omp_context (region_type);
8136 outer_ctx = ctx->outer_context;
8137 if (code == OMP_TARGET)
8138 {
8139 if (!lang_GNU_Fortran ())
8140 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8141 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8142 }
8143 if (!lang_GNU_Fortran ())
8144 switch (code)
8145 {
8146 case OMP_TARGET:
8147 case OMP_TARGET_DATA:
8148 case OMP_TARGET_ENTER_DATA:
8149 case OMP_TARGET_EXIT_DATA:
8150 case OACC_DECLARE:
8151 case OACC_HOST_DATA:
8152 case OACC_PARALLEL:
8153 case OACC_KERNELS:
8154 ctx->target_firstprivatize_array_bases = true;
8155 default:
8156 break;
8157 }
8158
8159 while ((c = *list_p) != NULL)
8160 {
8161 bool remove = false;
8162 bool notice_outer = true;
8163 const char *check_non_private = NULL;
8164 unsigned int flags;
8165 tree decl;
8166
8167 switch (OMP_CLAUSE_CODE (c))
8168 {
8169 case OMP_CLAUSE_PRIVATE:
8170 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8171 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8172 {
8173 flags |= GOVD_PRIVATE_OUTER_REF;
8174 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8175 }
8176 else
8177 notice_outer = false;
8178 goto do_add;
8179 case OMP_CLAUSE_SHARED:
8180 flags = GOVD_SHARED | GOVD_EXPLICIT;
8181 goto do_add;
8182 case OMP_CLAUSE_FIRSTPRIVATE:
8183 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8184 check_non_private = "firstprivate";
8185 goto do_add;
8186 case OMP_CLAUSE_LASTPRIVATE:
8187 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8188 switch (code)
8189 {
8190 case OMP_DISTRIBUTE:
8191 error_at (OMP_CLAUSE_LOCATION (c),
8192 "conditional %<lastprivate%> clause on "
8193 "%qs construct", "distribute");
8194 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8195 break;
8196 case OMP_TASKLOOP:
8197 error_at (OMP_CLAUSE_LOCATION (c),
8198 "conditional %<lastprivate%> clause on "
8199 "%qs construct", "taskloop");
8200 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8201 break;
8202 default:
8203 break;
8204 }
8205 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8206 if (code != OMP_LOOP)
8207 check_non_private = "lastprivate";
8208 decl = OMP_CLAUSE_DECL (c);
8209 if (error_operand_p (decl))
8210 goto do_add;
8211 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8212 && !lang_hooks.decls.omp_scalar_p (decl))
8213 {
8214 error_at (OMP_CLAUSE_LOCATION (c),
8215 "non-scalar variable %qD in conditional "
8216 "%<lastprivate%> clause", decl);
8217 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8218 }
8219 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8220 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8221 if (outer_ctx
8222 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8223 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8224 == ORT_COMBINED_TEAMS))
8225 && splay_tree_lookup (outer_ctx->variables,
8226 (splay_tree_key) decl) == NULL)
8227 {
8228 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8229 if (outer_ctx->outer_context)
8230 omp_notice_variable (outer_ctx->outer_context, decl, true);
8231 }
8232 else if (outer_ctx
8233 && (outer_ctx->region_type & ORT_TASK) != 0
8234 && outer_ctx->combined_loop
8235 && splay_tree_lookup (outer_ctx->variables,
8236 (splay_tree_key) decl) == NULL)
8237 {
8238 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8239 if (outer_ctx->outer_context)
8240 omp_notice_variable (outer_ctx->outer_context, decl, true);
8241 }
8242 else if (outer_ctx
8243 && (outer_ctx->region_type == ORT_WORKSHARE
8244 || outer_ctx->region_type == ORT_ACC)
8245 && outer_ctx->combined_loop
8246 && splay_tree_lookup (outer_ctx->variables,
8247 (splay_tree_key) decl) == NULL
8248 && !omp_check_private (outer_ctx, decl, false))
8249 {
8250 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8251 if (outer_ctx->outer_context
8252 && (outer_ctx->outer_context->region_type
8253 == ORT_COMBINED_PARALLEL)
8254 && splay_tree_lookup (outer_ctx->outer_context->variables,
8255 (splay_tree_key) decl) == NULL)
8256 {
8257 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8258 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8259 if (octx->outer_context)
8260 {
8261 octx = octx->outer_context;
8262 if (octx->region_type == ORT_WORKSHARE
8263 && octx->combined_loop
8264 && splay_tree_lookup (octx->variables,
8265 (splay_tree_key) decl) == NULL
8266 && !omp_check_private (octx, decl, false))
8267 {
8268 omp_add_variable (octx, decl,
8269 GOVD_LASTPRIVATE | GOVD_SEEN);
8270 octx = octx->outer_context;
8271 if (octx
8272 && ((octx->region_type & ORT_COMBINED_TEAMS)
8273 == ORT_COMBINED_TEAMS)
8274 && (splay_tree_lookup (octx->variables,
8275 (splay_tree_key) decl)
8276 == NULL))
8277 {
8278 omp_add_variable (octx, decl,
8279 GOVD_SHARED | GOVD_SEEN);
8280 octx = octx->outer_context;
8281 }
8282 }
8283 if (octx)
8284 omp_notice_variable (octx, decl, true);
8285 }
8286 }
8287 else if (outer_ctx->outer_context)
8288 omp_notice_variable (outer_ctx->outer_context, decl, true);
8289 }
8290 goto do_add;
8291 case OMP_CLAUSE_REDUCTION:
8292 if (OMP_CLAUSE_REDUCTION_TASK (c))
8293 {
8294 if (region_type == ORT_WORKSHARE)
8295 {
8296 if (nowait == -1)
8297 nowait = omp_find_clause (*list_p,
8298 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8299 if (nowait
8300 && (outer_ctx == NULL
8301 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8302 {
8303 error_at (OMP_CLAUSE_LOCATION (c),
8304 "%<task%> reduction modifier on a construct "
8305 "with a %<nowait%> clause");
8306 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8307 }
8308 }
8309 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8310 {
8311 error_at (OMP_CLAUSE_LOCATION (c),
8312 "invalid %<task%> reduction modifier on construct "
8313 "other than %<parallel%>, %<for%> or %<sections%>");
8314 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8315 }
8316 }
8317 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8318 switch (code)
8319 {
8320 case OMP_SECTIONS:
8321 error_at (OMP_CLAUSE_LOCATION (c),
8322 "%<inscan%> %<reduction%> clause on "
8323 "%qs construct", "sections");
8324 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8325 break;
8326 case OMP_PARALLEL:
8327 error_at (OMP_CLAUSE_LOCATION (c),
8328 "%<inscan%> %<reduction%> clause on "
8329 "%qs construct", "parallel");
8330 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8331 break;
8332 case OMP_TEAMS:
8333 error_at (OMP_CLAUSE_LOCATION (c),
8334 "%<inscan%> %<reduction%> clause on "
8335 "%qs construct", "teams");
8336 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8337 break;
8338 case OMP_TASKLOOP:
8339 error_at (OMP_CLAUSE_LOCATION (c),
8340 "%<inscan%> %<reduction%> clause on "
8341 "%qs construct", "taskloop");
8342 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8343 break;
8344 default:
8345 break;
8346 }
8347 /* FALLTHRU */
8348 case OMP_CLAUSE_IN_REDUCTION:
8349 case OMP_CLAUSE_TASK_REDUCTION:
8350 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8351 /* OpenACC permits reductions on private variables. */
8352 if (!(region_type & ORT_ACC)
8353 /* taskgroup is actually not a worksharing region. */
8354 && code != OMP_TASKGROUP)
8355 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8356 decl = OMP_CLAUSE_DECL (c);
8357 if (TREE_CODE (decl) == MEM_REF)
8358 {
8359 tree type = TREE_TYPE (decl);
8360 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8361 NULL, is_gimple_val, fb_rvalue, false)
8362 == GS_ERROR)
8363 {
8364 remove = true;
8365 break;
8366 }
8367 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8368 if (DECL_P (v))
8369 {
8370 omp_firstprivatize_variable (ctx, v);
8371 omp_notice_variable (ctx, v, true);
8372 }
8373 decl = TREE_OPERAND (decl, 0);
8374 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8375 {
8376 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8377 NULL, is_gimple_val, fb_rvalue, false)
8378 == GS_ERROR)
8379 {
8380 remove = true;
8381 break;
8382 }
8383 v = TREE_OPERAND (decl, 1);
8384 if (DECL_P (v))
8385 {
8386 omp_firstprivatize_variable (ctx, v);
8387 omp_notice_variable (ctx, v, true);
8388 }
8389 decl = TREE_OPERAND (decl, 0);
8390 }
8391 if (TREE_CODE (decl) == ADDR_EXPR
8392 || TREE_CODE (decl) == INDIRECT_REF)
8393 decl = TREE_OPERAND (decl, 0);
8394 }
8395 goto do_add_decl;
8396 case OMP_CLAUSE_LINEAR:
8397 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8398 is_gimple_val, fb_rvalue) == GS_ERROR)
8399 {
8400 remove = true;
8401 break;
8402 }
8403 else
8404 {
8405 if (code == OMP_SIMD
8406 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8407 {
8408 struct gimplify_omp_ctx *octx = outer_ctx;
8409 if (octx
8410 && octx->region_type == ORT_WORKSHARE
8411 && octx->combined_loop
8412 && !octx->distribute)
8413 {
8414 if (octx->outer_context
8415 && (octx->outer_context->region_type
8416 == ORT_COMBINED_PARALLEL))
8417 octx = octx->outer_context->outer_context;
8418 else
8419 octx = octx->outer_context;
8420 }
8421 if (octx
8422 && octx->region_type == ORT_WORKSHARE
8423 && octx->combined_loop
8424 && octx->distribute)
8425 {
8426 error_at (OMP_CLAUSE_LOCATION (c),
8427 "%<linear%> clause for variable other than "
8428 "loop iterator specified on construct "
8429 "combined with %<distribute%>");
8430 remove = true;
8431 break;
8432 }
8433 }
8434 /* For combined #pragma omp parallel for simd, need to put
8435 lastprivate and perhaps firstprivate too on the
8436 parallel. Similarly for #pragma omp for simd. */
8437 struct gimplify_omp_ctx *octx = outer_ctx;
8438 decl = NULL_TREE;
8439 do
8440 {
8441 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8442 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8443 break;
8444 decl = OMP_CLAUSE_DECL (c);
8445 if (error_operand_p (decl))
8446 {
8447 decl = NULL_TREE;
8448 break;
8449 }
8450 flags = GOVD_SEEN;
8451 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8452 flags |= GOVD_FIRSTPRIVATE;
8453 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8454 flags |= GOVD_LASTPRIVATE;
8455 if (octx
8456 && octx->region_type == ORT_WORKSHARE
8457 && octx->combined_loop)
8458 {
8459 if (octx->outer_context
8460 && (octx->outer_context->region_type
8461 == ORT_COMBINED_PARALLEL))
8462 octx = octx->outer_context;
8463 else if (omp_check_private (octx, decl, false))
8464 break;
8465 }
8466 else if (octx
8467 && (octx->region_type & ORT_TASK) != 0
8468 && octx->combined_loop)
8469 ;
8470 else if (octx
8471 && octx->region_type == ORT_COMBINED_PARALLEL
8472 && ctx->region_type == ORT_WORKSHARE
8473 && octx == outer_ctx)
8474 flags = GOVD_SEEN | GOVD_SHARED;
8475 else if (octx
8476 && ((octx->region_type & ORT_COMBINED_TEAMS)
8477 == ORT_COMBINED_TEAMS))
8478 flags = GOVD_SEEN | GOVD_SHARED;
8479 else if (octx
8480 && octx->region_type == ORT_COMBINED_TARGET)
8481 {
8482 flags &= ~GOVD_LASTPRIVATE;
8483 if (flags == GOVD_SEEN)
8484 break;
8485 }
8486 else
8487 break;
8488 splay_tree_node on
8489 = splay_tree_lookup (octx->variables,
8490 (splay_tree_key) decl);
8491 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8492 {
8493 octx = NULL;
8494 break;
8495 }
8496 omp_add_variable (octx, decl, flags);
8497 if (octx->outer_context == NULL)
8498 break;
8499 octx = octx->outer_context;
8500 }
8501 while (1);
8502 if (octx
8503 && decl
8504 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8505 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8506 omp_notice_variable (octx, decl, true);
8507 }
8508 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8509 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8510 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8511 {
8512 notice_outer = false;
8513 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8514 }
8515 goto do_add;
8516
8517 case OMP_CLAUSE_MAP:
8518 decl = OMP_CLAUSE_DECL (c);
8519 if (error_operand_p (decl))
8520 remove = true;
8521 switch (code)
8522 {
8523 case OMP_TARGET:
8524 break;
8525 case OACC_DATA:
8526 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8527 break;
8528 /* FALLTHRU */
8529 case OMP_TARGET_DATA:
8530 case OMP_TARGET_ENTER_DATA:
8531 case OMP_TARGET_EXIT_DATA:
8532 case OACC_ENTER_DATA:
8533 case OACC_EXIT_DATA:
8534 case OACC_HOST_DATA:
8535 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8536 || (OMP_CLAUSE_MAP_KIND (c)
8537 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8538 /* For target {,enter ,exit }data only the array slice is
8539 mapped, but not the pointer to it. */
8540 remove = true;
8541 break;
8542 default:
8543 break;
8544 }
8545 if (remove)
8546 break;
8547 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8548 {
8549 struct gimplify_omp_ctx *octx;
8550 for (octx = outer_ctx; octx; octx = octx->outer_context)
8551 {
8552 if (octx->region_type != ORT_ACC_HOST_DATA)
8553 break;
8554 splay_tree_node n2
8555 = splay_tree_lookup (octx->variables,
8556 (splay_tree_key) decl);
8557 if (n2)
8558 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8559 "declared in enclosing %<host_data%> region",
8560 DECL_NAME (decl));
8561 }
8562 }
8563 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8564 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8565 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8566 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8567 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8568 {
8569 remove = true;
8570 break;
8571 }
8572 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8573 || (OMP_CLAUSE_MAP_KIND (c)
8574 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8575 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8576 {
8577 OMP_CLAUSE_SIZE (c)
8578 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8579 false);
8580 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8581 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8582 }
8583 if (!DECL_P (decl))
8584 {
8585 tree d = decl, *pd;
8586 if (TREE_CODE (d) == ARRAY_REF)
8587 {
8588 while (TREE_CODE (d) == ARRAY_REF)
8589 d = TREE_OPERAND (d, 0);
8590 if (TREE_CODE (d) == COMPONENT_REF
8591 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8592 decl = d;
8593 }
8594 pd = &OMP_CLAUSE_DECL (c);
8595 if (d == decl
8596 && TREE_CODE (decl) == INDIRECT_REF
8597 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8598 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8599 == REFERENCE_TYPE))
8600 {
8601 pd = &TREE_OPERAND (decl, 0);
8602 decl = TREE_OPERAND (decl, 0);
8603 }
8604 if (TREE_CODE (decl) == COMPONENT_REF)
8605 {
8606 while (TREE_CODE (decl) == COMPONENT_REF)
8607 decl = TREE_OPERAND (decl, 0);
8608 if (TREE_CODE (decl) == INDIRECT_REF
8609 && DECL_P (TREE_OPERAND (decl, 0))
8610 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8611 == REFERENCE_TYPE))
8612 decl = TREE_OPERAND (decl, 0);
8613 }
8614 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8615 == GS_ERROR)
8616 {
8617 remove = true;
8618 break;
8619 }
8620 if (DECL_P (decl))
8621 {
8622 if (error_operand_p (decl))
8623 {
8624 remove = true;
8625 break;
8626 }
8627
8628 tree stype = TREE_TYPE (decl);
8629 if (TREE_CODE (stype) == REFERENCE_TYPE)
8630 stype = TREE_TYPE (stype);
8631 if (TYPE_SIZE_UNIT (stype) == NULL
8632 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8633 {
8634 error_at (OMP_CLAUSE_LOCATION (c),
8635 "mapping field %qE of variable length "
8636 "structure", OMP_CLAUSE_DECL (c));
8637 remove = true;
8638 break;
8639 }
8640
8641 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8642 {
8643 /* Error recovery. */
8644 if (prev_list_p == NULL)
8645 {
8646 remove = true;
8647 break;
8648 }
8649 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8650 {
8651 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8652 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8653 {
8654 remove = true;
8655 break;
8656 }
8657 }
8658 }
8659
8660 tree offset;
8661 poly_int64 bitsize, bitpos;
8662 machine_mode mode;
8663 int unsignedp, reversep, volatilep = 0;
8664 tree base = OMP_CLAUSE_DECL (c);
8665 while (TREE_CODE (base) == ARRAY_REF)
8666 base = TREE_OPERAND (base, 0);
8667 if (TREE_CODE (base) == INDIRECT_REF)
8668 base = TREE_OPERAND (base, 0);
8669 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8670 &mode, &unsignedp, &reversep,
8671 &volatilep);
8672 tree orig_base = base;
8673 if ((TREE_CODE (base) == INDIRECT_REF
8674 || (TREE_CODE (base) == MEM_REF
8675 && integer_zerop (TREE_OPERAND (base, 1))))
8676 && DECL_P (TREE_OPERAND (base, 0))
8677 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8678 == REFERENCE_TYPE))
8679 base = TREE_OPERAND (base, 0);
8680 gcc_assert (base == decl
8681 && (offset == NULL_TREE
8682 || poly_int_tree_p (offset)));
8683
8684 splay_tree_node n
8685 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8686 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8687 == GOMP_MAP_ALWAYS_POINTER);
8688 if (n == NULL || (n->value & GOVD_MAP) == 0)
8689 {
8690 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8691 OMP_CLAUSE_MAP);
8692 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8693 if (orig_base != base)
8694 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8695 else
8696 OMP_CLAUSE_DECL (l) = decl;
8697 OMP_CLAUSE_SIZE (l) = size_int (1);
8698 if (struct_map_to_clause == NULL)
8699 struct_map_to_clause = new hash_map<tree, tree>;
8700 struct_map_to_clause->put (decl, l);
8701 if (ptr)
8702 {
8703 enum gomp_map_kind mkind
8704 = code == OMP_TARGET_EXIT_DATA
8705 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8706 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8707 OMP_CLAUSE_MAP);
8708 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8709 OMP_CLAUSE_DECL (c2)
8710 = unshare_expr (OMP_CLAUSE_DECL (c));
8711 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8712 OMP_CLAUSE_SIZE (c2)
8713 = TYPE_SIZE_UNIT (ptr_type_node);
8714 OMP_CLAUSE_CHAIN (l) = c2;
8715 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8716 {
8717 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8718 tree c3
8719 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8720 OMP_CLAUSE_MAP);
8721 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8722 OMP_CLAUSE_DECL (c3)
8723 = unshare_expr (OMP_CLAUSE_DECL (c4));
8724 OMP_CLAUSE_SIZE (c3)
8725 = TYPE_SIZE_UNIT (ptr_type_node);
8726 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8727 OMP_CLAUSE_CHAIN (c2) = c3;
8728 }
8729 *prev_list_p = l;
8730 prev_list_p = NULL;
8731 }
8732 else
8733 {
8734 OMP_CLAUSE_CHAIN (l) = c;
8735 *list_p = l;
8736 list_p = &OMP_CLAUSE_CHAIN (l);
8737 }
8738 if (orig_base != base && code == OMP_TARGET)
8739 {
8740 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8741 OMP_CLAUSE_MAP);
8742 enum gomp_map_kind mkind
8743 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8744 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8745 OMP_CLAUSE_DECL (c2) = decl;
8746 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8747 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8748 OMP_CLAUSE_CHAIN (l) = c2;
8749 }
8750 flags = GOVD_MAP | GOVD_EXPLICIT;
8751 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8752 flags |= GOVD_SEEN;
8753 goto do_add_decl;
8754 }
8755 else
8756 {
8757 tree *osc = struct_map_to_clause->get (decl);
8758 tree *sc = NULL, *scp = NULL;
8759 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8760 n->value |= GOVD_SEEN;
8761 poly_offset_int o1, o2;
8762 if (offset)
8763 o1 = wi::to_poly_offset (offset);
8764 else
8765 o1 = 0;
8766 if (maybe_ne (bitpos, 0))
8767 o1 += bits_to_bytes_round_down (bitpos);
8768 sc = &OMP_CLAUSE_CHAIN (*osc);
8769 if (*sc != c
8770 && (OMP_CLAUSE_MAP_KIND (*sc)
8771 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8772 sc = &OMP_CLAUSE_CHAIN (*sc);
8773 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8774 if (ptr && sc == prev_list_p)
8775 break;
8776 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8777 != COMPONENT_REF
8778 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8779 != INDIRECT_REF)
8780 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8781 != ARRAY_REF))
8782 break;
8783 else
8784 {
8785 tree offset2;
8786 poly_int64 bitsize2, bitpos2;
8787 base = OMP_CLAUSE_DECL (*sc);
8788 if (TREE_CODE (base) == ARRAY_REF)
8789 {
8790 while (TREE_CODE (base) == ARRAY_REF)
8791 base = TREE_OPERAND (base, 0);
8792 if (TREE_CODE (base) != COMPONENT_REF
8793 || (TREE_CODE (TREE_TYPE (base))
8794 != ARRAY_TYPE))
8795 break;
8796 }
8797 else if (TREE_CODE (base) == INDIRECT_REF
8798 && (TREE_CODE (TREE_OPERAND (base, 0))
8799 == COMPONENT_REF)
8800 && (TREE_CODE (TREE_TYPE
8801 (TREE_OPERAND (base, 0)))
8802 == REFERENCE_TYPE))
8803 base = TREE_OPERAND (base, 0);
8804 base = get_inner_reference (base, &bitsize2,
8805 &bitpos2, &offset2,
8806 &mode, &unsignedp,
8807 &reversep, &volatilep);
8808 if ((TREE_CODE (base) == INDIRECT_REF
8809 || (TREE_CODE (base) == MEM_REF
8810 && integer_zerop (TREE_OPERAND (base,
8811 1))))
8812 && DECL_P (TREE_OPERAND (base, 0))
8813 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8814 0)))
8815 == REFERENCE_TYPE))
8816 base = TREE_OPERAND (base, 0);
8817 if (base != decl)
8818 break;
8819 if (scp)
8820 continue;
8821 gcc_assert (offset == NULL_TREE
8822 || poly_int_tree_p (offset));
8823 tree d1 = OMP_CLAUSE_DECL (*sc);
8824 tree d2 = OMP_CLAUSE_DECL (c);
8825 while (TREE_CODE (d1) == ARRAY_REF)
8826 d1 = TREE_OPERAND (d1, 0);
8827 while (TREE_CODE (d2) == ARRAY_REF)
8828 d2 = TREE_OPERAND (d2, 0);
8829 if (TREE_CODE (d1) == INDIRECT_REF)
8830 d1 = TREE_OPERAND (d1, 0);
8831 if (TREE_CODE (d2) == INDIRECT_REF)
8832 d2 = TREE_OPERAND (d2, 0);
8833 while (TREE_CODE (d1) == COMPONENT_REF)
8834 if (TREE_CODE (d2) == COMPONENT_REF
8835 && TREE_OPERAND (d1, 1)
8836 == TREE_OPERAND (d2, 1))
8837 {
8838 d1 = TREE_OPERAND (d1, 0);
8839 d2 = TREE_OPERAND (d2, 0);
8840 }
8841 else
8842 break;
8843 if (d1 == d2)
8844 {
8845 error_at (OMP_CLAUSE_LOCATION (c),
8846 "%qE appears more than once in map "
8847 "clauses", OMP_CLAUSE_DECL (c));
8848 remove = true;
8849 break;
8850 }
8851 if (offset2)
8852 o2 = wi::to_poly_offset (offset2);
8853 else
8854 o2 = 0;
8855 o2 += bits_to_bytes_round_down (bitpos2);
8856 if (maybe_lt (o1, o2)
8857 || (known_eq (o1, o2)
8858 && maybe_lt (bitpos, bitpos2)))
8859 {
8860 if (ptr)
8861 scp = sc;
8862 else
8863 break;
8864 }
8865 }
8866 if (remove)
8867 break;
8868 OMP_CLAUSE_SIZE (*osc)
8869 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8870 size_one_node);
8871 if (ptr)
8872 {
8873 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8874 OMP_CLAUSE_MAP);
8875 tree cl = NULL_TREE;
8876 enum gomp_map_kind mkind
8877 = code == OMP_TARGET_EXIT_DATA
8878 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8879 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8880 OMP_CLAUSE_DECL (c2)
8881 = unshare_expr (OMP_CLAUSE_DECL (c));
8882 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8883 OMP_CLAUSE_SIZE (c2)
8884 = TYPE_SIZE_UNIT (ptr_type_node);
8885 cl = scp ? *prev_list_p : c2;
8886 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8887 {
8888 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8889 tree c3
8890 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8891 OMP_CLAUSE_MAP);
8892 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8893 OMP_CLAUSE_DECL (c3)
8894 = unshare_expr (OMP_CLAUSE_DECL (c4));
8895 OMP_CLAUSE_SIZE (c3)
8896 = TYPE_SIZE_UNIT (ptr_type_node);
8897 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8898 if (!scp)
8899 OMP_CLAUSE_CHAIN (c2) = c3;
8900 else
8901 cl = c3;
8902 }
8903 if (scp)
8904 *scp = c2;
8905 if (sc == prev_list_p)
8906 {
8907 *sc = cl;
8908 prev_list_p = NULL;
8909 }
8910 else
8911 {
8912 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8913 list_p = prev_list_p;
8914 prev_list_p = NULL;
8915 OMP_CLAUSE_CHAIN (c) = *sc;
8916 *sc = cl;
8917 continue;
8918 }
8919 }
8920 else if (*sc != c)
8921 {
8922 *list_p = OMP_CLAUSE_CHAIN (c);
8923 OMP_CLAUSE_CHAIN (c) = *sc;
8924 *sc = c;
8925 continue;
8926 }
8927 }
8928 }
8929 if (!remove
8930 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8931 && OMP_CLAUSE_CHAIN (c)
8932 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8933 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8934 == GOMP_MAP_ALWAYS_POINTER))
8935 prev_list_p = list_p;
8936 break;
8937 }
8938 flags = GOVD_MAP | GOVD_EXPLICIT;
8939 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8940 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8941 flags |= GOVD_MAP_ALWAYS_TO;
8942 goto do_add;
8943
8944 case OMP_CLAUSE_DEPEND:
8945 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8946 {
8947 tree deps = OMP_CLAUSE_DECL (c);
8948 while (deps && TREE_CODE (deps) == TREE_LIST)
8949 {
8950 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8951 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8952 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8953 pre_p, NULL, is_gimple_val, fb_rvalue);
8954 deps = TREE_CHAIN (deps);
8955 }
8956 break;
8957 }
8958 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8959 break;
8960 if (handled_depend_iterators == -1)
8961 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
8962 if (handled_depend_iterators)
8963 {
8964 if (handled_depend_iterators == 2)
8965 remove = true;
8966 break;
8967 }
8968 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8969 {
8970 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8971 NULL, is_gimple_val, fb_rvalue);
8972 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8973 }
8974 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8975 {
8976 remove = true;
8977 break;
8978 }
8979 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8980 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8981 is_gimple_val, fb_rvalue) == GS_ERROR)
8982 {
8983 remove = true;
8984 break;
8985 }
8986 break;
8987
8988 case OMP_CLAUSE_TO:
8989 case OMP_CLAUSE_FROM:
8990 case OMP_CLAUSE__CACHE_:
8991 decl = OMP_CLAUSE_DECL (c);
8992 if (error_operand_p (decl))
8993 {
8994 remove = true;
8995 break;
8996 }
8997 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8998 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8999 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9000 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9001 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9002 {
9003 remove = true;
9004 break;
9005 }
9006 if (!DECL_P (decl))
9007 {
9008 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9009 NULL, is_gimple_lvalue, fb_lvalue)
9010 == GS_ERROR)
9011 {
9012 remove = true;
9013 break;
9014 }
9015 break;
9016 }
9017 goto do_notice;
9018
9019 case OMP_CLAUSE_USE_DEVICE_PTR:
9020 case OMP_CLAUSE_USE_DEVICE_ADDR:
9021 flags = GOVD_EXPLICIT;
9022 goto do_add;
9023
9024 case OMP_CLAUSE_IS_DEVICE_PTR:
9025 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9026 goto do_add;
9027
9028 do_add:
9029 decl = OMP_CLAUSE_DECL (c);
9030 do_add_decl:
9031 if (error_operand_p (decl))
9032 {
9033 remove = true;
9034 break;
9035 }
9036 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9037 {
9038 tree t = omp_member_access_dummy_var (decl);
9039 if (t)
9040 {
9041 tree v = DECL_VALUE_EXPR (decl);
9042 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9043 if (outer_ctx)
9044 omp_notice_variable (outer_ctx, t, true);
9045 }
9046 }
9047 if (code == OACC_DATA
9048 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9049 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9050 flags |= GOVD_MAP_0LEN_ARRAY;
9051 omp_add_variable (ctx, decl, flags);
9052 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9053 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9054 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9055 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9056 {
9057 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9058 GOVD_LOCAL | GOVD_SEEN);
9059 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9060 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9061 find_decl_expr,
9062 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9063 NULL) == NULL_TREE)
9064 omp_add_variable (ctx,
9065 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9066 GOVD_LOCAL | GOVD_SEEN);
9067 gimplify_omp_ctxp = ctx;
9068 push_gimplify_context ();
9069
9070 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9071 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9072
9073 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9074 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9075 pop_gimplify_context
9076 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9077 push_gimplify_context ();
9078 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9079 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9080 pop_gimplify_context
9081 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9082 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9083 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9084
9085 gimplify_omp_ctxp = outer_ctx;
9086 }
9087 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9088 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9089 {
9090 gimplify_omp_ctxp = ctx;
9091 push_gimplify_context ();
9092 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9093 {
9094 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9095 NULL, NULL);
9096 TREE_SIDE_EFFECTS (bind) = 1;
9097 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9098 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9099 }
9100 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9101 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9102 pop_gimplify_context
9103 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9104 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9105
9106 gimplify_omp_ctxp = outer_ctx;
9107 }
9108 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9109 && OMP_CLAUSE_LINEAR_STMT (c))
9110 {
9111 gimplify_omp_ctxp = ctx;
9112 push_gimplify_context ();
9113 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9114 {
9115 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9116 NULL, NULL);
9117 TREE_SIDE_EFFECTS (bind) = 1;
9118 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9119 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9120 }
9121 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9122 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9123 pop_gimplify_context
9124 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9125 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9126
9127 gimplify_omp_ctxp = outer_ctx;
9128 }
9129 if (notice_outer)
9130 goto do_notice;
9131 break;
9132
9133 case OMP_CLAUSE_COPYIN:
9134 case OMP_CLAUSE_COPYPRIVATE:
9135 decl = OMP_CLAUSE_DECL (c);
9136 if (error_operand_p (decl))
9137 {
9138 remove = true;
9139 break;
9140 }
9141 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9142 && !remove
9143 && !omp_check_private (ctx, decl, true))
9144 {
9145 remove = true;
9146 if (is_global_var (decl))
9147 {
9148 if (DECL_THREAD_LOCAL_P (decl))
9149 remove = false;
9150 else if (DECL_HAS_VALUE_EXPR_P (decl))
9151 {
9152 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9153
9154 if (value
9155 && DECL_P (value)
9156 && DECL_THREAD_LOCAL_P (value))
9157 remove = false;
9158 }
9159 }
9160 if (remove)
9161 error_at (OMP_CLAUSE_LOCATION (c),
9162 "copyprivate variable %qE is not threadprivate"
9163 " or private in outer context", DECL_NAME (decl));
9164 }
9165 do_notice:
9166 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9167 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9168 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9169 && outer_ctx
9170 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9171 || (region_type == ORT_WORKSHARE
9172 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9173 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9174 || code == OMP_LOOP)))
9175 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9176 || (code == OMP_LOOP
9177 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9178 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9179 == ORT_COMBINED_TEAMS))))
9180 {
9181 splay_tree_node on
9182 = splay_tree_lookup (outer_ctx->variables,
9183 (splay_tree_key)decl);
9184 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9185 {
9186 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9187 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9188 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9189 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9190 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9191 == POINTER_TYPE))))
9192 omp_firstprivatize_variable (outer_ctx, decl);
9193 else
9194 omp_add_variable (outer_ctx, decl,
9195 GOVD_SEEN | GOVD_SHARED);
9196 omp_notice_variable (outer_ctx, decl, true);
9197 }
9198 }
9199 if (outer_ctx)
9200 omp_notice_variable (outer_ctx, decl, true);
9201 if (check_non_private
9202 && region_type == ORT_WORKSHARE
9203 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9204 || decl == OMP_CLAUSE_DECL (c)
9205 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9206 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9207 == ADDR_EXPR
9208 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9209 == POINTER_PLUS_EXPR
9210 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9211 (OMP_CLAUSE_DECL (c), 0), 0))
9212 == ADDR_EXPR)))))
9213 && omp_check_private (ctx, decl, false))
9214 {
9215 error ("%s variable %qE is private in outer context",
9216 check_non_private, DECL_NAME (decl));
9217 remove = true;
9218 }
9219 break;
9220
9221 case OMP_CLAUSE_IF:
9222 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9223 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9224 {
9225 const char *p[2];
9226 for (int i = 0; i < 2; i++)
9227 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9228 {
9229 case VOID_CST: p[i] = "cancel"; break;
9230 case OMP_PARALLEL: p[i] = "parallel"; break;
9231 case OMP_SIMD: p[i] = "simd"; break;
9232 case OMP_TASK: p[i] = "task"; break;
9233 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9234 case OMP_TARGET_DATA: p[i] = "target data"; break;
9235 case OMP_TARGET: p[i] = "target"; break;
9236 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9237 case OMP_TARGET_ENTER_DATA:
9238 p[i] = "target enter data"; break;
9239 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9240 default: gcc_unreachable ();
9241 }
9242 error_at (OMP_CLAUSE_LOCATION (c),
9243 "expected %qs %<if%> clause modifier rather than %qs",
9244 p[0], p[1]);
9245 remove = true;
9246 }
9247 /* Fall through. */
9248
9249 case OMP_CLAUSE_FINAL:
9250 OMP_CLAUSE_OPERAND (c, 0)
9251 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9252 /* Fall through. */
9253
9254 case OMP_CLAUSE_SCHEDULE:
9255 case OMP_CLAUSE_NUM_THREADS:
9256 case OMP_CLAUSE_NUM_TEAMS:
9257 case OMP_CLAUSE_THREAD_LIMIT:
9258 case OMP_CLAUSE_DIST_SCHEDULE:
9259 case OMP_CLAUSE_DEVICE:
9260 case OMP_CLAUSE_PRIORITY:
9261 case OMP_CLAUSE_GRAINSIZE:
9262 case OMP_CLAUSE_NUM_TASKS:
9263 case OMP_CLAUSE_HINT:
9264 case OMP_CLAUSE_ASYNC:
9265 case OMP_CLAUSE_WAIT:
9266 case OMP_CLAUSE_NUM_GANGS:
9267 case OMP_CLAUSE_NUM_WORKERS:
9268 case OMP_CLAUSE_VECTOR_LENGTH:
9269 case OMP_CLAUSE_WORKER:
9270 case OMP_CLAUSE_VECTOR:
9271 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9272 is_gimple_val, fb_rvalue) == GS_ERROR)
9273 remove = true;
9274 break;
9275
9276 case OMP_CLAUSE_GANG:
9277 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9278 is_gimple_val, fb_rvalue) == GS_ERROR)
9279 remove = true;
9280 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9281 is_gimple_val, fb_rvalue) == GS_ERROR)
9282 remove = true;
9283 break;
9284
9285 case OMP_CLAUSE_NOWAIT:
9286 nowait = 1;
9287 break;
9288
9289 case OMP_CLAUSE_ORDERED:
9290 case OMP_CLAUSE_UNTIED:
9291 case OMP_CLAUSE_COLLAPSE:
9292 case OMP_CLAUSE_TILE:
9293 case OMP_CLAUSE_AUTO:
9294 case OMP_CLAUSE_SEQ:
9295 case OMP_CLAUSE_INDEPENDENT:
9296 case OMP_CLAUSE_MERGEABLE:
9297 case OMP_CLAUSE_PROC_BIND:
9298 case OMP_CLAUSE_SAFELEN:
9299 case OMP_CLAUSE_SIMDLEN:
9300 case OMP_CLAUSE_NOGROUP:
9301 case OMP_CLAUSE_THREADS:
9302 case OMP_CLAUSE_SIMD:
9303 case OMP_CLAUSE_BIND:
9304 case OMP_CLAUSE_IF_PRESENT:
9305 case OMP_CLAUSE_FINALIZE:
9306 break;
9307
9308 case OMP_CLAUSE_ORDER:
9309 ctx->order_concurrent = true;
9310 break;
9311
9312 case OMP_CLAUSE_DEFAULTMAP:
9313 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9314 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9315 {
9316 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9317 gdmkmin = GDMK_SCALAR;
9318 gdmkmax = GDMK_POINTER;
9319 break;
9320 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9321 gdmkmin = gdmkmax = GDMK_SCALAR;
9322 break;
9323 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9324 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9325 break;
9326 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9327 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9328 break;
9329 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9330 gdmkmin = gdmkmax = GDMK_POINTER;
9331 break;
9332 default:
9333 gcc_unreachable ();
9334 }
9335 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9336 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9337 {
9338 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9339 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9340 break;
9341 case OMP_CLAUSE_DEFAULTMAP_TO:
9342 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9343 break;
9344 case OMP_CLAUSE_DEFAULTMAP_FROM:
9345 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9346 break;
9347 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9348 ctx->defaultmap[gdmk] = GOVD_MAP;
9349 break;
9350 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9351 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9352 break;
9353 case OMP_CLAUSE_DEFAULTMAP_NONE:
9354 ctx->defaultmap[gdmk] = 0;
9355 break;
9356 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9357 switch (gdmk)
9358 {
9359 case GDMK_SCALAR:
9360 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9361 break;
9362 case GDMK_AGGREGATE:
9363 case GDMK_ALLOCATABLE:
9364 ctx->defaultmap[gdmk] = GOVD_MAP;
9365 break;
9366 case GDMK_POINTER:
9367 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9368 break;
9369 default:
9370 gcc_unreachable ();
9371 }
9372 break;
9373 default:
9374 gcc_unreachable ();
9375 }
9376 break;
9377
9378 case OMP_CLAUSE_ALIGNED:
9379 decl = OMP_CLAUSE_DECL (c);
9380 if (error_operand_p (decl))
9381 {
9382 remove = true;
9383 break;
9384 }
9385 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9386 is_gimple_val, fb_rvalue) == GS_ERROR)
9387 {
9388 remove = true;
9389 break;
9390 }
9391 if (!is_global_var (decl)
9392 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9393 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9394 break;
9395
9396 case OMP_CLAUSE_NONTEMPORAL:
9397 decl = OMP_CLAUSE_DECL (c);
9398 if (error_operand_p (decl))
9399 {
9400 remove = true;
9401 break;
9402 }
9403 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9404 break;
9405
9406 case OMP_CLAUSE_DEFAULT:
9407 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9408 break;
9409
9410 case OMP_CLAUSE_INCLUSIVE:
9411 case OMP_CLAUSE_EXCLUSIVE:
9412 decl = OMP_CLAUSE_DECL (c);
9413 {
9414 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9415 (splay_tree_key) decl);
9416 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9417 {
9418 error_at (OMP_CLAUSE_LOCATION (c),
9419 "%qD specified in %qs clause but not in %<inscan%> "
9420 "%<reduction%> clause on the containing construct",
9421 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9422 remove = true;
9423 }
9424 else
9425 {
9426 n->value |= GOVD_REDUCTION_INSCAN;
9427 if (outer_ctx->region_type == ORT_SIMD
9428 && outer_ctx->outer_context
9429 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9430 {
9431 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9432 (splay_tree_key) decl);
9433 if (n && (n->value & GOVD_REDUCTION) != 0)
9434 n->value |= GOVD_REDUCTION_INSCAN;
9435 }
9436 }
9437 }
9438 break;
9439
9440 default:
9441 gcc_unreachable ();
9442 }
9443
9444 if (code == OACC_DATA
9445 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9446 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9447 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9448 remove = true;
9449 if (remove)
9450 *list_p = OMP_CLAUSE_CHAIN (c);
9451 else
9452 list_p = &OMP_CLAUSE_CHAIN (c);
9453 }
9454
9455 ctx->clauses = *orig_list_p;
9456 gimplify_omp_ctxp = ctx;
9457 if (struct_map_to_clause)
9458 delete struct_map_to_clause;
9459 }
9460
9461 /* Return true if DECL is a candidate for shared to firstprivate
9462 optimization. We only consider non-addressable scalars, not
9463 too big, and not references. */
9464
9465 static bool
9466 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9467 {
9468 if (TREE_ADDRESSABLE (decl))
9469 return false;
9470 tree type = TREE_TYPE (decl);
9471 if (!is_gimple_reg_type (type)
9472 || TREE_CODE (type) == REFERENCE_TYPE
9473 || TREE_ADDRESSABLE (type))
9474 return false;
9475 /* Don't optimize too large decls, as each thread/task will have
9476 its own. */
9477 HOST_WIDE_INT len = int_size_in_bytes (type);
9478 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9479 return false;
9480 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9481 return false;
9482 return true;
9483 }
9484
9485 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9486 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9487 GOVD_WRITTEN in outer contexts. */
9488
9489 static void
9490 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9491 {
9492 for (; ctx; ctx = ctx->outer_context)
9493 {
9494 splay_tree_node n = splay_tree_lookup (ctx->variables,
9495 (splay_tree_key) decl);
9496 if (n == NULL)
9497 continue;
9498 else if (n->value & GOVD_SHARED)
9499 {
9500 n->value |= GOVD_WRITTEN;
9501 return;
9502 }
9503 else if (n->value & GOVD_DATA_SHARE_CLASS)
9504 return;
9505 }
9506 }
9507
9508 /* Helper callback for walk_gimple_seq to discover possible stores
9509 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9510 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9511 for those. */
9512
9513 static tree
9514 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9515 {
9516 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9517
9518 *walk_subtrees = 0;
9519 if (!wi->is_lhs)
9520 return NULL_TREE;
9521
9522 tree op = *tp;
9523 do
9524 {
9525 if (handled_component_p (op))
9526 op = TREE_OPERAND (op, 0);
9527 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9528 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9529 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9530 else
9531 break;
9532 }
9533 while (1);
9534 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9535 return NULL_TREE;
9536
9537 omp_mark_stores (gimplify_omp_ctxp, op);
9538 return NULL_TREE;
9539 }
9540
9541 /* Helper callback for walk_gimple_seq to discover possible stores
9542 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9543 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9544 for those. */
9545
9546 static tree
9547 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9548 bool *handled_ops_p,
9549 struct walk_stmt_info *wi)
9550 {
9551 gimple *stmt = gsi_stmt (*gsi_p);
9552 switch (gimple_code (stmt))
9553 {
9554 /* Don't recurse on OpenMP constructs for which
9555 gimplify_adjust_omp_clauses already handled the bodies,
9556 except handle gimple_omp_for_pre_body. */
9557 case GIMPLE_OMP_FOR:
9558 *handled_ops_p = true;
9559 if (gimple_omp_for_pre_body (stmt))
9560 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9561 omp_find_stores_stmt, omp_find_stores_op, wi);
9562 break;
9563 case GIMPLE_OMP_PARALLEL:
9564 case GIMPLE_OMP_TASK:
9565 case GIMPLE_OMP_SECTIONS:
9566 case GIMPLE_OMP_SINGLE:
9567 case GIMPLE_OMP_TARGET:
9568 case GIMPLE_OMP_TEAMS:
9569 case GIMPLE_OMP_CRITICAL:
9570 *handled_ops_p = true;
9571 break;
9572 default:
9573 break;
9574 }
9575 return NULL_TREE;
9576 }
9577
9578 struct gimplify_adjust_omp_clauses_data
9579 {
9580 tree *list_p;
9581 gimple_seq *pre_p;
9582 };
9583
9584 /* For all variables that were not actually used within the context,
9585 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9586
9587 static int
9588 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9589 {
9590 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9591 gimple_seq *pre_p
9592 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9593 tree decl = (tree) n->key;
9594 unsigned flags = n->value;
9595 enum omp_clause_code code;
9596 tree clause;
9597 bool private_debug;
9598
9599 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9600 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9601 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9602 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9603 return 0;
9604 if ((flags & GOVD_SEEN) == 0)
9605 return 0;
9606 if (flags & GOVD_DEBUG_PRIVATE)
9607 {
9608 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9609 private_debug = true;
9610 }
9611 else if (flags & GOVD_MAP)
9612 private_debug = false;
9613 else
9614 private_debug
9615 = lang_hooks.decls.omp_private_debug_clause (decl,
9616 !!(flags & GOVD_SHARED));
9617 if (private_debug)
9618 code = OMP_CLAUSE_PRIVATE;
9619 else if (flags & GOVD_MAP)
9620 {
9621 code = OMP_CLAUSE_MAP;
9622 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9623 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9624 {
9625 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9626 return 0;
9627 }
9628 }
9629 else if (flags & GOVD_SHARED)
9630 {
9631 if (is_global_var (decl))
9632 {
9633 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9634 while (ctx != NULL)
9635 {
9636 splay_tree_node on
9637 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9638 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9639 | GOVD_PRIVATE | GOVD_REDUCTION
9640 | GOVD_LINEAR | GOVD_MAP)) != 0)
9641 break;
9642 ctx = ctx->outer_context;
9643 }
9644 if (ctx == NULL)
9645 return 0;
9646 }
9647 code = OMP_CLAUSE_SHARED;
9648 }
9649 else if (flags & GOVD_PRIVATE)
9650 code = OMP_CLAUSE_PRIVATE;
9651 else if (flags & GOVD_FIRSTPRIVATE)
9652 {
9653 code = OMP_CLAUSE_FIRSTPRIVATE;
9654 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9655 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9656 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9657 {
9658 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9659 "%<target%> construct", decl);
9660 return 0;
9661 }
9662 }
9663 else if (flags & GOVD_LASTPRIVATE)
9664 code = OMP_CLAUSE_LASTPRIVATE;
9665 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9666 return 0;
9667 else if (flags & GOVD_CONDTEMP)
9668 {
9669 code = OMP_CLAUSE__CONDTEMP_;
9670 gimple_add_tmp_var (decl);
9671 }
9672 else
9673 gcc_unreachable ();
9674
9675 if (((flags & GOVD_LASTPRIVATE)
9676 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9677 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9678 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9679
9680 tree chain = *list_p;
9681 clause = build_omp_clause (input_location, code);
9682 OMP_CLAUSE_DECL (clause) = decl;
9683 OMP_CLAUSE_CHAIN (clause) = chain;
9684 if (private_debug)
9685 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9686 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9687 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9688 else if (code == OMP_CLAUSE_SHARED
9689 && (flags & GOVD_WRITTEN) == 0
9690 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9691 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9692 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9693 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9694 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9695 {
9696 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9697 OMP_CLAUSE_DECL (nc) = decl;
9698 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9699 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9700 OMP_CLAUSE_DECL (clause)
9701 = build_simple_mem_ref_loc (input_location, decl);
9702 OMP_CLAUSE_DECL (clause)
9703 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9704 build_int_cst (build_pointer_type (char_type_node), 0));
9705 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9706 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9707 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9708 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9709 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9710 OMP_CLAUSE_CHAIN (nc) = chain;
9711 OMP_CLAUSE_CHAIN (clause) = nc;
9712 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9713 gimplify_omp_ctxp = ctx->outer_context;
9714 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9715 pre_p, NULL, is_gimple_val, fb_rvalue);
9716 gimplify_omp_ctxp = ctx;
9717 }
9718 else if (code == OMP_CLAUSE_MAP)
9719 {
9720 int kind;
9721 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9722 switch (flags & (GOVD_MAP_TO_ONLY
9723 | GOVD_MAP_FORCE
9724 | GOVD_MAP_FORCE_PRESENT
9725 | GOVD_MAP_ALLOC_ONLY
9726 | GOVD_MAP_FROM_ONLY))
9727 {
9728 case 0:
9729 kind = GOMP_MAP_TOFROM;
9730 break;
9731 case GOVD_MAP_FORCE:
9732 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
9733 break;
9734 case GOVD_MAP_TO_ONLY:
9735 kind = GOMP_MAP_TO;
9736 break;
9737 case GOVD_MAP_FROM_ONLY:
9738 kind = GOMP_MAP_FROM;
9739 break;
9740 case GOVD_MAP_ALLOC_ONLY:
9741 kind = GOMP_MAP_ALLOC;
9742 break;
9743 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
9744 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
9745 break;
9746 case GOVD_MAP_FORCE_PRESENT:
9747 kind = GOMP_MAP_FORCE_PRESENT;
9748 break;
9749 default:
9750 gcc_unreachable ();
9751 }
9752 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
9753 if (DECL_SIZE (decl)
9754 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9755 {
9756 tree decl2 = DECL_VALUE_EXPR (decl);
9757 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9758 decl2 = TREE_OPERAND (decl2, 0);
9759 gcc_assert (DECL_P (decl2));
9760 tree mem = build_simple_mem_ref (decl2);
9761 OMP_CLAUSE_DECL (clause) = mem;
9762 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9763 if (gimplify_omp_ctxp->outer_context)
9764 {
9765 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9766 omp_notice_variable (ctx, decl2, true);
9767 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
9768 }
9769 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9770 OMP_CLAUSE_MAP);
9771 OMP_CLAUSE_DECL (nc) = decl;
9772 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9773 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
9774 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9775 else
9776 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9777 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9778 OMP_CLAUSE_CHAIN (clause) = nc;
9779 }
9780 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
9781 && lang_hooks.decls.omp_privatize_by_reference (decl))
9782 {
9783 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
9784 OMP_CLAUSE_SIZE (clause)
9785 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
9786 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9787 gimplify_omp_ctxp = ctx->outer_context;
9788 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
9789 pre_p, NULL, is_gimple_val, fb_rvalue);
9790 gimplify_omp_ctxp = ctx;
9791 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
9792 OMP_CLAUSE_MAP);
9793 OMP_CLAUSE_DECL (nc) = decl;
9794 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9795 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
9796 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
9797 OMP_CLAUSE_CHAIN (clause) = nc;
9798 }
9799 else
9800 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
9801 }
9802 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
9803 {
9804 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
9805 OMP_CLAUSE_DECL (nc) = decl;
9806 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
9807 OMP_CLAUSE_CHAIN (nc) = chain;
9808 OMP_CLAUSE_CHAIN (clause) = nc;
9809 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9810 gimplify_omp_ctxp = ctx->outer_context;
9811 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9812 gimplify_omp_ctxp = ctx;
9813 }
9814 *list_p = clause;
9815 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9816 gimplify_omp_ctxp = ctx->outer_context;
9817 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9818 if (gimplify_omp_ctxp)
9819 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9820 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9821 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9822 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9823 true);
9824 gimplify_omp_ctxp = ctx;
9825 return 0;
9826 }
9827
9828 static void
9829 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9830 enum tree_code code)
9831 {
9832 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9833 tree *orig_list_p = list_p;
9834 tree c, decl;
9835 bool has_inscan_reductions = false;
9836
9837 if (body)
9838 {
9839 struct gimplify_omp_ctx *octx;
9840 for (octx = ctx; octx; octx = octx->outer_context)
9841 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9842 break;
9843 if (octx)
9844 {
9845 struct walk_stmt_info wi;
9846 memset (&wi, 0, sizeof (wi));
9847 walk_gimple_seq (body, omp_find_stores_stmt,
9848 omp_find_stores_op, &wi);
9849 }
9850 }
9851
9852 if (ctx->add_safelen1)
9853 {
9854 /* If there are VLAs in the body of simd loop, prevent
9855 vectorization. */
9856 gcc_assert (ctx->region_type == ORT_SIMD);
9857 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
9858 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
9859 OMP_CLAUSE_CHAIN (c) = *list_p;
9860 *list_p = c;
9861 list_p = &OMP_CLAUSE_CHAIN (c);
9862 }
9863
9864 if (ctx->region_type == ORT_WORKSHARE
9865 && ctx->outer_context
9866 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
9867 {
9868 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
9869 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9870 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
9871 {
9872 decl = OMP_CLAUSE_DECL (c);
9873 splay_tree_node n
9874 = splay_tree_lookup (ctx->outer_context->variables,
9875 (splay_tree_key) decl);
9876 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
9877 (splay_tree_key) decl));
9878 omp_add_variable (ctx, decl, n->value);
9879 tree c2 = copy_node (c);
9880 OMP_CLAUSE_CHAIN (c2) = *list_p;
9881 *list_p = c2;
9882 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
9883 continue;
9884 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9885 OMP_CLAUSE_FIRSTPRIVATE);
9886 OMP_CLAUSE_DECL (c2) = decl;
9887 OMP_CLAUSE_CHAIN (c2) = *list_p;
9888 *list_p = c2;
9889 }
9890 }
9891 while ((c = *list_p) != NULL)
9892 {
9893 splay_tree_node n;
9894 bool remove = false;
9895
9896 switch (OMP_CLAUSE_CODE (c))
9897 {
9898 case OMP_CLAUSE_FIRSTPRIVATE:
9899 if ((ctx->region_type & ORT_TARGET)
9900 && (ctx->region_type & ORT_ACC) == 0
9901 && TYPE_ATOMIC (strip_array_types
9902 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9903 {
9904 error_at (OMP_CLAUSE_LOCATION (c),
9905 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9906 "%<target%> construct", OMP_CLAUSE_DECL (c));
9907 remove = true;
9908 break;
9909 }
9910 /* FALLTHRU */
9911 case OMP_CLAUSE_PRIVATE:
9912 case OMP_CLAUSE_SHARED:
9913 case OMP_CLAUSE_LINEAR:
9914 decl = OMP_CLAUSE_DECL (c);
9915 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9916 remove = !(n->value & GOVD_SEEN);
9917 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
9918 && code == OMP_PARALLEL
9919 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9920 remove = true;
9921 if (! remove)
9922 {
9923 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9924 if ((n->value & GOVD_DEBUG_PRIVATE)
9925 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9926 {
9927 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9928 || ((n->value & GOVD_DATA_SHARE_CLASS)
9929 == GOVD_SHARED));
9930 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9931 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9932 }
9933 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9934 && (n->value & GOVD_WRITTEN) == 0
9935 && DECL_P (decl)
9936 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9937 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9938 else if (DECL_P (decl)
9939 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9940 && (n->value & GOVD_WRITTEN) != 0)
9941 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9942 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9943 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9944 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9945 }
9946 break;
9947
9948 case OMP_CLAUSE_LASTPRIVATE:
9949 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9950 accurately reflect the presence of a FIRSTPRIVATE clause. */
9951 decl = OMP_CLAUSE_DECL (c);
9952 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9953 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9954 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9955 if (code == OMP_DISTRIBUTE
9956 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9957 {
9958 remove = true;
9959 error_at (OMP_CLAUSE_LOCATION (c),
9960 "same variable used in %<firstprivate%> and "
9961 "%<lastprivate%> clauses on %<distribute%> "
9962 "construct");
9963 }
9964 if (!remove
9965 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9966 && DECL_P (decl)
9967 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9968 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9969 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
9970 remove = true;
9971 break;
9972
9973 case OMP_CLAUSE_ALIGNED:
9974 decl = OMP_CLAUSE_DECL (c);
9975 if (!is_global_var (decl))
9976 {
9977 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9978 remove = n == NULL || !(n->value & GOVD_SEEN);
9979 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9980 {
9981 struct gimplify_omp_ctx *octx;
9982 if (n != NULL
9983 && (n->value & (GOVD_DATA_SHARE_CLASS
9984 & ~GOVD_FIRSTPRIVATE)))
9985 remove = true;
9986 else
9987 for (octx = ctx->outer_context; octx;
9988 octx = octx->outer_context)
9989 {
9990 n = splay_tree_lookup (octx->variables,
9991 (splay_tree_key) decl);
9992 if (n == NULL)
9993 continue;
9994 if (n->value & GOVD_LOCAL)
9995 break;
9996 /* We have to avoid assigning a shared variable
9997 to itself when trying to add
9998 __builtin_assume_aligned. */
9999 if (n->value & GOVD_SHARED)
10000 {
10001 remove = true;
10002 break;
10003 }
10004 }
10005 }
10006 }
10007 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10008 {
10009 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10010 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10011 remove = true;
10012 }
10013 break;
10014
10015 case OMP_CLAUSE_NONTEMPORAL:
10016 decl = OMP_CLAUSE_DECL (c);
10017 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10018 remove = n == NULL || !(n->value & GOVD_SEEN);
10019 break;
10020
10021 case OMP_CLAUSE_MAP:
10022 if (code == OMP_TARGET_EXIT_DATA
10023 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10024 {
10025 remove = true;
10026 break;
10027 }
10028 decl = OMP_CLAUSE_DECL (c);
10029 /* Data clauses associated with acc parallel reductions must be
10030 compatible with present_or_copy. Warn and adjust the clause
10031 if that is not the case. */
10032 if (ctx->region_type == ORT_ACC_PARALLEL)
10033 {
10034 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10035 n = NULL;
10036
10037 if (DECL_P (t))
10038 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10039
10040 if (n && (n->value & GOVD_REDUCTION))
10041 {
10042 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10043
10044 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10045 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10046 && kind != GOMP_MAP_FORCE_PRESENT
10047 && kind != GOMP_MAP_POINTER)
10048 {
10049 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10050 "incompatible data clause with reduction "
10051 "on %qE; promoting to %<present_or_copy%>",
10052 DECL_NAME (t));
10053 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10054 }
10055 }
10056 }
10057 if (!DECL_P (decl))
10058 {
10059 if ((ctx->region_type & ORT_TARGET) != 0
10060 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10061 {
10062 if (TREE_CODE (decl) == INDIRECT_REF
10063 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10064 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10065 == REFERENCE_TYPE))
10066 decl = TREE_OPERAND (decl, 0);
10067 if (TREE_CODE (decl) == COMPONENT_REF)
10068 {
10069 while (TREE_CODE (decl) == COMPONENT_REF)
10070 decl = TREE_OPERAND (decl, 0);
10071 if (DECL_P (decl))
10072 {
10073 n = splay_tree_lookup (ctx->variables,
10074 (splay_tree_key) decl);
10075 if (!(n->value & GOVD_SEEN))
10076 remove = true;
10077 }
10078 }
10079 }
10080 break;
10081 }
10082 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10083 if ((ctx->region_type & ORT_TARGET) != 0
10084 && !(n->value & GOVD_SEEN)
10085 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10086 && (!is_global_var (decl)
10087 || !lookup_attribute ("omp declare target link",
10088 DECL_ATTRIBUTES (decl))))
10089 {
10090 remove = true;
10091 /* For struct element mapping, if struct is never referenced
10092 in target block and none of the mapping has always modifier,
10093 remove all the struct element mappings, which immediately
10094 follow the GOMP_MAP_STRUCT map clause. */
10095 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10096 {
10097 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10098 while (cnt--)
10099 OMP_CLAUSE_CHAIN (c)
10100 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10101 }
10102 }
10103 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10104 && code == OMP_TARGET_EXIT_DATA)
10105 remove = true;
10106 else if (DECL_SIZE (decl)
10107 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10108 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10109 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10110 && (OMP_CLAUSE_MAP_KIND (c)
10111 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10112 {
10113 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10114 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10115 INTEGER_CST. */
10116 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10117
10118 tree decl2 = DECL_VALUE_EXPR (decl);
10119 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10120 decl2 = TREE_OPERAND (decl2, 0);
10121 gcc_assert (DECL_P (decl2));
10122 tree mem = build_simple_mem_ref (decl2);
10123 OMP_CLAUSE_DECL (c) = mem;
10124 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10125 if (ctx->outer_context)
10126 {
10127 omp_notice_variable (ctx->outer_context, decl2, true);
10128 omp_notice_variable (ctx->outer_context,
10129 OMP_CLAUSE_SIZE (c), true);
10130 }
10131 if (((ctx->region_type & ORT_TARGET) != 0
10132 || !ctx->target_firstprivatize_array_bases)
10133 && ((n->value & GOVD_SEEN) == 0
10134 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10135 {
10136 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10137 OMP_CLAUSE_MAP);
10138 OMP_CLAUSE_DECL (nc) = decl;
10139 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10140 if (ctx->target_firstprivatize_array_bases)
10141 OMP_CLAUSE_SET_MAP_KIND (nc,
10142 GOMP_MAP_FIRSTPRIVATE_POINTER);
10143 else
10144 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10145 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10146 OMP_CLAUSE_CHAIN (c) = nc;
10147 c = nc;
10148 }
10149 }
10150 else
10151 {
10152 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10153 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10154 gcc_assert ((n->value & GOVD_SEEN) == 0
10155 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10156 == 0));
10157 }
10158 break;
10159
10160 case OMP_CLAUSE_TO:
10161 case OMP_CLAUSE_FROM:
10162 case OMP_CLAUSE__CACHE_:
10163 decl = OMP_CLAUSE_DECL (c);
10164 if (!DECL_P (decl))
10165 break;
10166 if (DECL_SIZE (decl)
10167 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10168 {
10169 tree decl2 = DECL_VALUE_EXPR (decl);
10170 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10171 decl2 = TREE_OPERAND (decl2, 0);
10172 gcc_assert (DECL_P (decl2));
10173 tree mem = build_simple_mem_ref (decl2);
10174 OMP_CLAUSE_DECL (c) = mem;
10175 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10176 if (ctx->outer_context)
10177 {
10178 omp_notice_variable (ctx->outer_context, decl2, true);
10179 omp_notice_variable (ctx->outer_context,
10180 OMP_CLAUSE_SIZE (c), true);
10181 }
10182 }
10183 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10184 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10185 break;
10186
10187 case OMP_CLAUSE_REDUCTION:
10188 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10189 {
10190 decl = OMP_CLAUSE_DECL (c);
10191 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10192 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10193 {
10194 remove = true;
10195 error_at (OMP_CLAUSE_LOCATION (c),
10196 "%qD specified in %<inscan%> %<reduction%> clause "
10197 "but not in %<scan%> directive clause", decl);
10198 break;
10199 }
10200 has_inscan_reductions = true;
10201 }
10202 /* FALLTHRU */
10203 case OMP_CLAUSE_IN_REDUCTION:
10204 case OMP_CLAUSE_TASK_REDUCTION:
10205 decl = OMP_CLAUSE_DECL (c);
10206 /* OpenACC reductions need a present_or_copy data clause.
10207 Add one if necessary. Emit error when the reduction is private. */
10208 if (ctx->region_type == ORT_ACC_PARALLEL)
10209 {
10210 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10211 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10212 {
10213 remove = true;
10214 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10215 "reduction on %qE", DECL_NAME (decl));
10216 }
10217 else if ((n->value & GOVD_MAP) == 0)
10218 {
10219 tree next = OMP_CLAUSE_CHAIN (c);
10220 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10221 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10222 OMP_CLAUSE_DECL (nc) = decl;
10223 OMP_CLAUSE_CHAIN (c) = nc;
10224 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10225 while (1)
10226 {
10227 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10228 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10229 break;
10230 nc = OMP_CLAUSE_CHAIN (nc);
10231 }
10232 OMP_CLAUSE_CHAIN (nc) = next;
10233 n->value |= GOVD_MAP;
10234 }
10235 }
10236 if (DECL_P (decl)
10237 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10238 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10239 break;
10240 case OMP_CLAUSE_COPYIN:
10241 case OMP_CLAUSE_COPYPRIVATE:
10242 case OMP_CLAUSE_IF:
10243 case OMP_CLAUSE_NUM_THREADS:
10244 case OMP_CLAUSE_NUM_TEAMS:
10245 case OMP_CLAUSE_THREAD_LIMIT:
10246 case OMP_CLAUSE_DIST_SCHEDULE:
10247 case OMP_CLAUSE_DEVICE:
10248 case OMP_CLAUSE_SCHEDULE:
10249 case OMP_CLAUSE_NOWAIT:
10250 case OMP_CLAUSE_ORDERED:
10251 case OMP_CLAUSE_DEFAULT:
10252 case OMP_CLAUSE_UNTIED:
10253 case OMP_CLAUSE_COLLAPSE:
10254 case OMP_CLAUSE_FINAL:
10255 case OMP_CLAUSE_MERGEABLE:
10256 case OMP_CLAUSE_PROC_BIND:
10257 case OMP_CLAUSE_SAFELEN:
10258 case OMP_CLAUSE_SIMDLEN:
10259 case OMP_CLAUSE_DEPEND:
10260 case OMP_CLAUSE_PRIORITY:
10261 case OMP_CLAUSE_GRAINSIZE:
10262 case OMP_CLAUSE_NUM_TASKS:
10263 case OMP_CLAUSE_NOGROUP:
10264 case OMP_CLAUSE_THREADS:
10265 case OMP_CLAUSE_SIMD:
10266 case OMP_CLAUSE_HINT:
10267 case OMP_CLAUSE_DEFAULTMAP:
10268 case OMP_CLAUSE_ORDER:
10269 case OMP_CLAUSE_BIND:
10270 case OMP_CLAUSE_USE_DEVICE_PTR:
10271 case OMP_CLAUSE_USE_DEVICE_ADDR:
10272 case OMP_CLAUSE_IS_DEVICE_PTR:
10273 case OMP_CLAUSE_ASYNC:
10274 case OMP_CLAUSE_WAIT:
10275 case OMP_CLAUSE_INDEPENDENT:
10276 case OMP_CLAUSE_NUM_GANGS:
10277 case OMP_CLAUSE_NUM_WORKERS:
10278 case OMP_CLAUSE_VECTOR_LENGTH:
10279 case OMP_CLAUSE_GANG:
10280 case OMP_CLAUSE_WORKER:
10281 case OMP_CLAUSE_VECTOR:
10282 case OMP_CLAUSE_AUTO:
10283 case OMP_CLAUSE_SEQ:
10284 case OMP_CLAUSE_TILE:
10285 case OMP_CLAUSE_IF_PRESENT:
10286 case OMP_CLAUSE_FINALIZE:
10287 case OMP_CLAUSE_INCLUSIVE:
10288 case OMP_CLAUSE_EXCLUSIVE:
10289 break;
10290
10291 default:
10292 gcc_unreachable ();
10293 }
10294
10295 if (remove)
10296 *list_p = OMP_CLAUSE_CHAIN (c);
10297 else
10298 list_p = &OMP_CLAUSE_CHAIN (c);
10299 }
10300
10301 /* Add in any implicit data sharing. */
10302 struct gimplify_adjust_omp_clauses_data data;
10303 data.list_p = list_p;
10304 data.pre_p = pre_p;
10305 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10306
10307 if (has_inscan_reductions)
10308 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10309 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10310 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10311 {
10312 error_at (OMP_CLAUSE_LOCATION (c),
10313 "%<inscan%> %<reduction%> clause used together with "
10314 "%<linear%> clause for a variable other than loop "
10315 "iterator");
10316 break;
10317 }
10318
10319 gimplify_omp_ctxp = ctx->outer_context;
10320 delete_omp_context (ctx);
10321 }
10322
10323 /* Gimplify OACC_CACHE. */
10324
10325 static void
10326 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10327 {
10328 tree expr = *expr_p;
10329
10330 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10331 OACC_CACHE);
10332 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10333 OACC_CACHE);
10334
10335 /* TODO: Do something sensible with this information. */
10336
10337 *expr_p = NULL_TREE;
10338 }
10339
10340 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10341 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10342 kind. The entry kind will replace the one in CLAUSE, while the exit
10343 kind will be used in a new omp_clause and returned to the caller. */
10344
10345 static tree
10346 gimplify_oacc_declare_1 (tree clause)
10347 {
10348 HOST_WIDE_INT kind, new_op;
10349 bool ret = false;
10350 tree c = NULL;
10351
10352 kind = OMP_CLAUSE_MAP_KIND (clause);
10353
10354 switch (kind)
10355 {
10356 case GOMP_MAP_ALLOC:
10357 new_op = GOMP_MAP_RELEASE;
10358 ret = true;
10359 break;
10360
10361 case GOMP_MAP_FROM:
10362 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10363 new_op = GOMP_MAP_FROM;
10364 ret = true;
10365 break;
10366
10367 case GOMP_MAP_TOFROM:
10368 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10369 new_op = GOMP_MAP_FROM;
10370 ret = true;
10371 break;
10372
10373 case GOMP_MAP_DEVICE_RESIDENT:
10374 case GOMP_MAP_FORCE_DEVICEPTR:
10375 case GOMP_MAP_FORCE_PRESENT:
10376 case GOMP_MAP_LINK:
10377 case GOMP_MAP_POINTER:
10378 case GOMP_MAP_TO:
10379 break;
10380
10381 default:
10382 gcc_unreachable ();
10383 break;
10384 }
10385
10386 if (ret)
10387 {
10388 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10389 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10390 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10391 }
10392
10393 return c;
10394 }
10395
10396 /* Gimplify OACC_DECLARE. */
10397
10398 static void
10399 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10400 {
10401 tree expr = *expr_p;
10402 gomp_target *stmt;
10403 tree clauses, t, decl;
10404
10405 clauses = OACC_DECLARE_CLAUSES (expr);
10406
10407 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10408 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10409
10410 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10411 {
10412 decl = OMP_CLAUSE_DECL (t);
10413
10414 if (TREE_CODE (decl) == MEM_REF)
10415 decl = TREE_OPERAND (decl, 0);
10416
10417 if (VAR_P (decl) && !is_oacc_declared (decl))
10418 {
10419 tree attr = get_identifier ("oacc declare target");
10420 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10421 DECL_ATTRIBUTES (decl));
10422 }
10423
10424 if (VAR_P (decl)
10425 && !is_global_var (decl)
10426 && DECL_CONTEXT (decl) == current_function_decl)
10427 {
10428 tree c = gimplify_oacc_declare_1 (t);
10429 if (c)
10430 {
10431 if (oacc_declare_returns == NULL)
10432 oacc_declare_returns = new hash_map<tree, tree>;
10433
10434 oacc_declare_returns->put (decl, c);
10435 }
10436 }
10437
10438 if (gimplify_omp_ctxp)
10439 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10440 }
10441
10442 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10443 clauses);
10444
10445 gimplify_seq_add_stmt (pre_p, stmt);
10446
10447 *expr_p = NULL_TREE;
10448 }
10449
10450 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10451 gimplification of the body, as well as scanning the body for used
10452 variables. We need to do this scan now, because variable-sized
10453 decls will be decomposed during gimplification. */
10454
10455 static void
10456 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10457 {
10458 tree expr = *expr_p;
10459 gimple *g;
10460 gimple_seq body = NULL;
10461
10462 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10463 OMP_PARALLEL_COMBINED (expr)
10464 ? ORT_COMBINED_PARALLEL
10465 : ORT_PARALLEL, OMP_PARALLEL);
10466
10467 push_gimplify_context ();
10468
10469 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10470 if (gimple_code (g) == GIMPLE_BIND)
10471 pop_gimplify_context (g);
10472 else
10473 pop_gimplify_context (NULL);
10474
10475 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10476 OMP_PARALLEL);
10477
10478 g = gimple_build_omp_parallel (body,
10479 OMP_PARALLEL_CLAUSES (expr),
10480 NULL_TREE, NULL_TREE);
10481 if (OMP_PARALLEL_COMBINED (expr))
10482 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10483 gimplify_seq_add_stmt (pre_p, g);
10484 *expr_p = NULL_TREE;
10485 }
10486
10487 /* Gimplify the contents of an OMP_TASK statement. This involves
10488 gimplification of the body, as well as scanning the body for used
10489 variables. We need to do this scan now, because variable-sized
10490 decls will be decomposed during gimplification. */
10491
10492 static void
10493 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10494 {
10495 tree expr = *expr_p;
10496 gimple *g;
10497 gimple_seq body = NULL;
10498
10499 if (OMP_TASK_BODY (expr) == NULL_TREE)
10500 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10501 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10502 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10503 {
10504 error_at (OMP_CLAUSE_LOCATION (c),
10505 "%<mutexinoutset%> kind in %<depend%> clause on a "
10506 "%<taskwait%> construct");
10507 break;
10508 }
10509
10510 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10511 omp_find_clause (OMP_TASK_CLAUSES (expr),
10512 OMP_CLAUSE_UNTIED)
10513 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10514
10515 if (OMP_TASK_BODY (expr))
10516 {
10517 push_gimplify_context ();
10518
10519 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10520 if (gimple_code (g) == GIMPLE_BIND)
10521 pop_gimplify_context (g);
10522 else
10523 pop_gimplify_context (NULL);
10524 }
10525
10526 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10527 OMP_TASK);
10528
10529 g = gimple_build_omp_task (body,
10530 OMP_TASK_CLAUSES (expr),
10531 NULL_TREE, NULL_TREE,
10532 NULL_TREE, NULL_TREE, NULL_TREE);
10533 if (OMP_TASK_BODY (expr) == NULL_TREE)
10534 gimple_omp_task_set_taskwait_p (g, true);
10535 gimplify_seq_add_stmt (pre_p, g);
10536 *expr_p = NULL_TREE;
10537 }
10538
10539 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10540 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10541 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10542 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10543 OMP_FOR in between if any and pdata[3] is address of the inner
10544 OMP_FOR/OMP_SIMD. */
10545
10546 static tree
10547 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10548 {
10549 tree **pdata = (tree **) data;
10550 *walk_subtrees = 0;
10551 switch (TREE_CODE (*tp))
10552 {
10553 case OMP_FOR:
10554 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10555 {
10556 pdata[3] = tp;
10557 return *tp;
10558 }
10559 pdata[2] = tp;
10560 *walk_subtrees = 1;
10561 break;
10562 case OMP_SIMD:
10563 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10564 {
10565 pdata[3] = tp;
10566 return *tp;
10567 }
10568 break;
10569 case BIND_EXPR:
10570 if (BIND_EXPR_VARS (*tp)
10571 || (BIND_EXPR_BLOCK (*tp)
10572 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10573 pdata[0] = tp;
10574 *walk_subtrees = 1;
10575 break;
10576 case STATEMENT_LIST:
10577 if (!tsi_one_before_end_p (tsi_start (*tp)))
10578 pdata[0] = tp;
10579 *walk_subtrees = 1;
10580 break;
10581 case TRY_FINALLY_EXPR:
10582 pdata[0] = tp;
10583 *walk_subtrees = 1;
10584 break;
10585 case OMP_PARALLEL:
10586 pdata[1] = tp;
10587 *walk_subtrees = 1;
10588 break;
10589 default:
10590 break;
10591 }
10592 return NULL_TREE;
10593 }
10594
10595 /* Gimplify the gross structure of an OMP_FOR statement. */
10596
10597 static enum gimplify_status
10598 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
10599 {
10600 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
10601 enum gimplify_status ret = GS_ALL_DONE;
10602 enum gimplify_status tret;
10603 gomp_for *gfor;
10604 gimple_seq for_body, for_pre_body;
10605 int i;
10606 bitmap has_decl_expr = NULL;
10607 enum omp_region_type ort = ORT_WORKSHARE;
10608
10609 orig_for_stmt = for_stmt = *expr_p;
10610
10611 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
10612 != NULL_TREE);
10613 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10614 {
10615 tree *data[4] = { NULL, NULL, NULL, NULL };
10616 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
10617 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
10618 find_combined_omp_for, data, NULL);
10619 if (inner_for_stmt == NULL_TREE)
10620 {
10621 gcc_assert (seen_error ());
10622 *expr_p = NULL_TREE;
10623 return GS_ERROR;
10624 }
10625 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
10626 {
10627 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
10628 &OMP_FOR_PRE_BODY (for_stmt));
10629 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
10630 }
10631 if (OMP_FOR_PRE_BODY (inner_for_stmt))
10632 {
10633 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
10634 &OMP_FOR_PRE_BODY (for_stmt));
10635 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
10636 }
10637
10638 if (data[0])
10639 {
10640 /* We have some statements or variable declarations in between
10641 the composite construct directives. Move them around the
10642 inner_for_stmt. */
10643 data[0] = expr_p;
10644 for (i = 0; i < 3; i++)
10645 if (data[i])
10646 {
10647 tree t = *data[i];
10648 if (i < 2 && data[i + 1] == &OMP_BODY (t))
10649 data[i + 1] = data[i];
10650 *data[i] = OMP_BODY (t);
10651 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10652 NULL_TREE, make_node (BLOCK));
10653 OMP_BODY (t) = body;
10654 append_to_statement_list_force (inner_for_stmt,
10655 &BIND_EXPR_BODY (body));
10656 *data[3] = t;
10657 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
10658 gcc_assert (*data[3] == inner_for_stmt);
10659 }
10660 return GS_OK;
10661 }
10662
10663 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10664 if (!loop_p
10665 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
10666 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10667 i)) == TREE_LIST
10668 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10669 i)))
10670 {
10671 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10672 /* Class iterators aren't allowed on OMP_SIMD, so the only
10673 case we need to solve is distribute parallel for. They are
10674 allowed on the loop construct, but that is already handled
10675 in gimplify_omp_loop. */
10676 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
10677 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
10678 && data[1]);
10679 tree orig_decl = TREE_PURPOSE (orig);
10680 tree last = TREE_VALUE (orig);
10681 tree *pc;
10682 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
10683 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
10684 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
10685 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
10686 && OMP_CLAUSE_DECL (*pc) == orig_decl)
10687 break;
10688 if (*pc == NULL_TREE)
10689 {
10690 tree *spc;
10691 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
10692 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
10693 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
10694 && OMP_CLAUSE_DECL (*spc) == orig_decl)
10695 break;
10696 if (*spc)
10697 {
10698 tree c = *spc;
10699 *spc = OMP_CLAUSE_CHAIN (c);
10700 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
10701 *pc = c;
10702 }
10703 }
10704 if (*pc == NULL_TREE)
10705 ;
10706 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
10707 {
10708 /* private clause will appear only on inner_for_stmt.
10709 Change it into firstprivate, and add private clause
10710 on for_stmt. */
10711 tree c = copy_node (*pc);
10712 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10713 OMP_FOR_CLAUSES (for_stmt) = c;
10714 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
10715 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10716 }
10717 else
10718 {
10719 /* lastprivate clause will appear on both inner_for_stmt
10720 and for_stmt. Add firstprivate clause to
10721 inner_for_stmt. */
10722 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
10723 OMP_CLAUSE_FIRSTPRIVATE);
10724 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
10725 OMP_CLAUSE_CHAIN (c) = *pc;
10726 *pc = c;
10727 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
10728 }
10729 tree c = build_omp_clause (UNKNOWN_LOCATION,
10730 OMP_CLAUSE_FIRSTPRIVATE);
10731 OMP_CLAUSE_DECL (c) = last;
10732 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10733 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10734 c = build_omp_clause (UNKNOWN_LOCATION,
10735 *pc ? OMP_CLAUSE_SHARED
10736 : OMP_CLAUSE_FIRSTPRIVATE);
10737 OMP_CLAUSE_DECL (c) = orig_decl;
10738 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10739 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10740 }
10741 /* Similarly, take care of C++ range for temporaries, those should
10742 be firstprivate on OMP_PARALLEL if any. */
10743 if (data[1])
10744 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
10745 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
10746 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10747 i)) == TREE_LIST
10748 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
10749 i)))
10750 {
10751 tree orig
10752 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
10753 tree v = TREE_CHAIN (orig);
10754 tree c = build_omp_clause (UNKNOWN_LOCATION,
10755 OMP_CLAUSE_FIRSTPRIVATE);
10756 /* First add firstprivate clause for the __for_end artificial
10757 decl. */
10758 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
10759 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10760 == REFERENCE_TYPE)
10761 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10762 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10763 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10764 if (TREE_VEC_ELT (v, 0))
10765 {
10766 /* And now the same for __for_range artificial decl if it
10767 exists. */
10768 c = build_omp_clause (UNKNOWN_LOCATION,
10769 OMP_CLAUSE_FIRSTPRIVATE);
10770 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
10771 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
10772 == REFERENCE_TYPE)
10773 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
10774 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
10775 OMP_PARALLEL_CLAUSES (*data[1]) = c;
10776 }
10777 }
10778 }
10779
10780 switch (TREE_CODE (for_stmt))
10781 {
10782 case OMP_FOR:
10783 case OMP_DISTRIBUTE:
10784 break;
10785 case OACC_LOOP:
10786 ort = ORT_ACC;
10787 break;
10788 case OMP_TASKLOOP:
10789 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
10790 ort = ORT_UNTIED_TASKLOOP;
10791 else
10792 ort = ORT_TASKLOOP;
10793 break;
10794 case OMP_SIMD:
10795 ort = ORT_SIMD;
10796 break;
10797 default:
10798 gcc_unreachable ();
10799 }
10800
10801 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10802 clause for the IV. */
10803 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10804 {
10805 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
10806 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10807 decl = TREE_OPERAND (t, 0);
10808 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10809 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10810 && OMP_CLAUSE_DECL (c) == decl)
10811 {
10812 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
10813 break;
10814 }
10815 }
10816
10817 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
10818 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
10819 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
10820 ? OMP_LOOP : TREE_CODE (for_stmt));
10821
10822 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
10823 gimplify_omp_ctxp->distribute = true;
10824
10825 /* Handle OMP_FOR_INIT. */
10826 for_pre_body = NULL;
10827 if ((ort == ORT_SIMD
10828 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
10829 && OMP_FOR_PRE_BODY (for_stmt))
10830 {
10831 has_decl_expr = BITMAP_ALLOC (NULL);
10832 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
10833 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
10834 == VAR_DECL)
10835 {
10836 t = OMP_FOR_PRE_BODY (for_stmt);
10837 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10838 }
10839 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
10840 {
10841 tree_stmt_iterator si;
10842 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
10843 tsi_next (&si))
10844 {
10845 t = tsi_stmt (si);
10846 if (TREE_CODE (t) == DECL_EXPR
10847 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
10848 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
10849 }
10850 }
10851 }
10852 if (OMP_FOR_PRE_BODY (for_stmt))
10853 {
10854 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
10855 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10856 else
10857 {
10858 struct gimplify_omp_ctx ctx;
10859 memset (&ctx, 0, sizeof (ctx));
10860 ctx.region_type = ORT_NONE;
10861 gimplify_omp_ctxp = &ctx;
10862 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
10863 gimplify_omp_ctxp = NULL;
10864 }
10865 }
10866 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
10867
10868 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
10869 for_stmt = inner_for_stmt;
10870
10871 /* For taskloop, need to gimplify the start, end and step before the
10872 taskloop, outside of the taskloop omp context. */
10873 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10874 {
10875 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10876 {
10877 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10878 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10879 {
10880 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10881 TREE_OPERAND (t, 1)
10882 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10883 gimple_seq_empty_p (for_pre_body)
10884 ? pre_p : &for_pre_body, NULL,
10885 false);
10886 /* Reference to pointer conversion is considered useless,
10887 but is significant for firstprivate clause. Force it
10888 here. */
10889 if (TREE_CODE (type) == POINTER_TYPE
10890 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10891 == REFERENCE_TYPE))
10892 {
10893 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10894 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10895 TREE_OPERAND (t, 1));
10896 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10897 ? pre_p : &for_pre_body);
10898 TREE_OPERAND (t, 1) = v;
10899 }
10900 tree c = build_omp_clause (input_location,
10901 OMP_CLAUSE_FIRSTPRIVATE);
10902 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10903 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10904 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10905 }
10906
10907 /* Handle OMP_FOR_COND. */
10908 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10909 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
10910 {
10911 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
10912 TREE_OPERAND (t, 1)
10913 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
10914 gimple_seq_empty_p (for_pre_body)
10915 ? pre_p : &for_pre_body, NULL,
10916 false);
10917 /* Reference to pointer conversion is considered useless,
10918 but is significant for firstprivate clause. Force it
10919 here. */
10920 if (TREE_CODE (type) == POINTER_TYPE
10921 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
10922 == REFERENCE_TYPE))
10923 {
10924 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
10925 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
10926 TREE_OPERAND (t, 1));
10927 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
10928 ? pre_p : &for_pre_body);
10929 TREE_OPERAND (t, 1) = v;
10930 }
10931 tree c = build_omp_clause (input_location,
10932 OMP_CLAUSE_FIRSTPRIVATE);
10933 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
10934 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10935 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10936 }
10937
10938 /* Handle OMP_FOR_INCR. */
10939 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10940 if (TREE_CODE (t) == MODIFY_EXPR)
10941 {
10942 decl = TREE_OPERAND (t, 0);
10943 t = TREE_OPERAND (t, 1);
10944 tree *tp = &TREE_OPERAND (t, 1);
10945 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
10946 tp = &TREE_OPERAND (t, 0);
10947
10948 if (!is_gimple_constant (*tp))
10949 {
10950 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
10951 ? pre_p : &for_pre_body;
10952 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
10953 tree c = build_omp_clause (input_location,
10954 OMP_CLAUSE_FIRSTPRIVATE);
10955 OMP_CLAUSE_DECL (c) = *tp;
10956 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
10957 OMP_FOR_CLAUSES (orig_for_stmt) = c;
10958 }
10959 }
10960 }
10961
10962 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
10963 OMP_TASKLOOP);
10964 }
10965
10966 if (orig_for_stmt != for_stmt)
10967 gimplify_omp_ctxp->combined_loop = true;
10968
10969 for_body = NULL;
10970 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10971 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
10972 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
10973 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
10974
10975 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
10976 bool is_doacross = false;
10977 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
10978 {
10979 is_doacross = true;
10980 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
10981 (OMP_FOR_INIT (for_stmt))
10982 * 2);
10983 }
10984 int collapse = 1, tile = 0;
10985 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
10986 if (c)
10987 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10988 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
10989 if (c)
10990 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
10991 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10992 {
10993 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10994 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10995 decl = TREE_OPERAND (t, 0);
10996 gcc_assert (DECL_P (decl));
10997 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
10998 || POINTER_TYPE_P (TREE_TYPE (decl)));
10999 if (is_doacross)
11000 {
11001 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11002 {
11003 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11004 if (TREE_CODE (orig_decl) == TREE_LIST)
11005 {
11006 orig_decl = TREE_PURPOSE (orig_decl);
11007 if (!orig_decl)
11008 orig_decl = decl;
11009 }
11010 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11011 }
11012 else
11013 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11014 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11015 }
11016
11017 /* Make sure the iteration variable is private. */
11018 tree c = NULL_TREE;
11019 tree c2 = NULL_TREE;
11020 if (orig_for_stmt != for_stmt)
11021 {
11022 /* Preserve this information until we gimplify the inner simd. */
11023 if (has_decl_expr
11024 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11025 TREE_PRIVATE (t) = 1;
11026 }
11027 else if (ort == ORT_SIMD)
11028 {
11029 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11030 (splay_tree_key) decl);
11031 omp_is_private (gimplify_omp_ctxp, decl,
11032 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11033 != 1));
11034 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11035 {
11036 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11037 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11038 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11039 OMP_CLAUSE_LASTPRIVATE);
11040 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11041 OMP_CLAUSE_LASTPRIVATE))
11042 if (OMP_CLAUSE_DECL (c3) == decl)
11043 {
11044 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11045 "conditional %<lastprivate%> on loop "
11046 "iterator %qD ignored", decl);
11047 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11048 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11049 }
11050 }
11051 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11052 {
11053 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11054 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11055 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11056 if ((has_decl_expr
11057 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11058 || TREE_PRIVATE (t))
11059 {
11060 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11061 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11062 }
11063 struct gimplify_omp_ctx *outer
11064 = gimplify_omp_ctxp->outer_context;
11065 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11066 {
11067 if (outer->region_type == ORT_WORKSHARE
11068 && outer->combined_loop)
11069 {
11070 n = splay_tree_lookup (outer->variables,
11071 (splay_tree_key)decl);
11072 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11073 {
11074 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11075 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11076 }
11077 else
11078 {
11079 struct gimplify_omp_ctx *octx = outer->outer_context;
11080 if (octx
11081 && octx->region_type == ORT_COMBINED_PARALLEL
11082 && octx->outer_context
11083 && (octx->outer_context->region_type
11084 == ORT_WORKSHARE)
11085 && octx->outer_context->combined_loop)
11086 {
11087 octx = octx->outer_context;
11088 n = splay_tree_lookup (octx->variables,
11089 (splay_tree_key)decl);
11090 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11091 {
11092 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11093 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11094 }
11095 }
11096 }
11097 }
11098 }
11099
11100 OMP_CLAUSE_DECL (c) = decl;
11101 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11102 OMP_FOR_CLAUSES (for_stmt) = c;
11103 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11104 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11105 {
11106 if (outer->region_type == ORT_WORKSHARE
11107 && outer->combined_loop)
11108 {
11109 if (outer->outer_context
11110 && (outer->outer_context->region_type
11111 == ORT_COMBINED_PARALLEL))
11112 outer = outer->outer_context;
11113 else if (omp_check_private (outer, decl, false))
11114 outer = NULL;
11115 }
11116 else if (((outer->region_type & ORT_TASKLOOP)
11117 == ORT_TASKLOOP)
11118 && outer->combined_loop
11119 && !omp_check_private (gimplify_omp_ctxp,
11120 decl, false))
11121 ;
11122 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11123 {
11124 omp_notice_variable (outer, decl, true);
11125 outer = NULL;
11126 }
11127 if (outer)
11128 {
11129 n = splay_tree_lookup (outer->variables,
11130 (splay_tree_key)decl);
11131 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11132 {
11133 omp_add_variable (outer, decl,
11134 GOVD_LASTPRIVATE | GOVD_SEEN);
11135 if (outer->region_type == ORT_COMBINED_PARALLEL
11136 && outer->outer_context
11137 && (outer->outer_context->region_type
11138 == ORT_WORKSHARE)
11139 && outer->outer_context->combined_loop)
11140 {
11141 outer = outer->outer_context;
11142 n = splay_tree_lookup (outer->variables,
11143 (splay_tree_key)decl);
11144 if (omp_check_private (outer, decl, false))
11145 outer = NULL;
11146 else if (n == NULL
11147 || ((n->value & GOVD_DATA_SHARE_CLASS)
11148 == 0))
11149 omp_add_variable (outer, decl,
11150 GOVD_LASTPRIVATE
11151 | GOVD_SEEN);
11152 else
11153 outer = NULL;
11154 }
11155 if (outer && outer->outer_context
11156 && ((outer->outer_context->region_type
11157 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11158 || (((outer->region_type & ORT_TASKLOOP)
11159 == ORT_TASKLOOP)
11160 && (outer->outer_context->region_type
11161 == ORT_COMBINED_PARALLEL))))
11162 {
11163 outer = outer->outer_context;
11164 n = splay_tree_lookup (outer->variables,
11165 (splay_tree_key)decl);
11166 if (n == NULL
11167 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11168 omp_add_variable (outer, decl,
11169 GOVD_SHARED | GOVD_SEEN);
11170 else
11171 outer = NULL;
11172 }
11173 if (outer && outer->outer_context)
11174 omp_notice_variable (outer->outer_context, decl,
11175 true);
11176 }
11177 }
11178 }
11179 }
11180 else
11181 {
11182 bool lastprivate
11183 = (!has_decl_expr
11184 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11185 if (TREE_PRIVATE (t))
11186 lastprivate = false;
11187 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11188 {
11189 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11190 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11191 lastprivate = false;
11192 }
11193
11194 struct gimplify_omp_ctx *outer
11195 = gimplify_omp_ctxp->outer_context;
11196 if (outer && lastprivate)
11197 {
11198 if (outer->region_type == ORT_WORKSHARE
11199 && outer->combined_loop)
11200 {
11201 n = splay_tree_lookup (outer->variables,
11202 (splay_tree_key)decl);
11203 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11204 {
11205 lastprivate = false;
11206 outer = NULL;
11207 }
11208 else if (outer->outer_context
11209 && (outer->outer_context->region_type
11210 == ORT_COMBINED_PARALLEL))
11211 outer = outer->outer_context;
11212 else if (omp_check_private (outer, decl, false))
11213 outer = NULL;
11214 }
11215 else if (((outer->region_type & ORT_TASKLOOP)
11216 == ORT_TASKLOOP)
11217 && outer->combined_loop
11218 && !omp_check_private (gimplify_omp_ctxp,
11219 decl, false))
11220 ;
11221 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11222 {
11223 omp_notice_variable (outer, decl, true);
11224 outer = NULL;
11225 }
11226 if (outer)
11227 {
11228 n = splay_tree_lookup (outer->variables,
11229 (splay_tree_key)decl);
11230 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11231 {
11232 omp_add_variable (outer, decl,
11233 GOVD_LASTPRIVATE | GOVD_SEEN);
11234 if (outer->region_type == ORT_COMBINED_PARALLEL
11235 && outer->outer_context
11236 && (outer->outer_context->region_type
11237 == ORT_WORKSHARE)
11238 && outer->outer_context->combined_loop)
11239 {
11240 outer = outer->outer_context;
11241 n = splay_tree_lookup (outer->variables,
11242 (splay_tree_key)decl);
11243 if (omp_check_private (outer, decl, false))
11244 outer = NULL;
11245 else if (n == NULL
11246 || ((n->value & GOVD_DATA_SHARE_CLASS)
11247 == 0))
11248 omp_add_variable (outer, decl,
11249 GOVD_LASTPRIVATE
11250 | GOVD_SEEN);
11251 else
11252 outer = NULL;
11253 }
11254 if (outer && outer->outer_context
11255 && ((outer->outer_context->region_type
11256 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11257 || (((outer->region_type & ORT_TASKLOOP)
11258 == ORT_TASKLOOP)
11259 && (outer->outer_context->region_type
11260 == ORT_COMBINED_PARALLEL))))
11261 {
11262 outer = outer->outer_context;
11263 n = splay_tree_lookup (outer->variables,
11264 (splay_tree_key)decl);
11265 if (n == NULL
11266 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11267 omp_add_variable (outer, decl,
11268 GOVD_SHARED | GOVD_SEEN);
11269 else
11270 outer = NULL;
11271 }
11272 if (outer && outer->outer_context)
11273 omp_notice_variable (outer->outer_context, decl,
11274 true);
11275 }
11276 }
11277 }
11278
11279 c = build_omp_clause (input_location,
11280 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11281 : OMP_CLAUSE_PRIVATE);
11282 OMP_CLAUSE_DECL (c) = decl;
11283 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11284 OMP_FOR_CLAUSES (for_stmt) = c;
11285 omp_add_variable (gimplify_omp_ctxp, decl,
11286 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11287 | GOVD_EXPLICIT | GOVD_SEEN);
11288 c = NULL_TREE;
11289 }
11290 }
11291 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11292 {
11293 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11294 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11295 (splay_tree_key) decl);
11296 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11297 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11298 OMP_CLAUSE_LASTPRIVATE);
11299 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11300 OMP_CLAUSE_LASTPRIVATE))
11301 if (OMP_CLAUSE_DECL (c3) == decl)
11302 {
11303 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11304 "conditional %<lastprivate%> on loop "
11305 "iterator %qD ignored", decl);
11306 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11307 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11308 }
11309 }
11310 else
11311 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11312
11313 /* If DECL is not a gimple register, create a temporary variable to act
11314 as an iteration counter. This is valid, since DECL cannot be
11315 modified in the body of the loop. Similarly for any iteration vars
11316 in simd with collapse > 1 where the iterator vars must be
11317 lastprivate. */
11318 if (orig_for_stmt != for_stmt)
11319 var = decl;
11320 else if (!is_gimple_reg (decl)
11321 || (ort == ORT_SIMD
11322 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11323 {
11324 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11325 /* Make sure omp_add_variable is not called on it prematurely.
11326 We call it ourselves a few lines later. */
11327 gimplify_omp_ctxp = NULL;
11328 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11329 gimplify_omp_ctxp = ctx;
11330 TREE_OPERAND (t, 0) = var;
11331
11332 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11333
11334 if (ort == ORT_SIMD
11335 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11336 {
11337 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11338 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11339 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11340 OMP_CLAUSE_DECL (c2) = var;
11341 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11342 OMP_FOR_CLAUSES (for_stmt) = c2;
11343 omp_add_variable (gimplify_omp_ctxp, var,
11344 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11345 if (c == NULL_TREE)
11346 {
11347 c = c2;
11348 c2 = NULL_TREE;
11349 }
11350 }
11351 else
11352 omp_add_variable (gimplify_omp_ctxp, var,
11353 GOVD_PRIVATE | GOVD_SEEN);
11354 }
11355 else
11356 var = decl;
11357
11358 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11359 is_gimple_val, fb_rvalue, false);
11360 ret = MIN (ret, tret);
11361 if (ret == GS_ERROR)
11362 return ret;
11363
11364 /* Handle OMP_FOR_COND. */
11365 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11366 gcc_assert (COMPARISON_CLASS_P (t));
11367 gcc_assert (TREE_OPERAND (t, 0) == decl);
11368
11369 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11370 is_gimple_val, fb_rvalue, false);
11371 ret = MIN (ret, tret);
11372
11373 /* Handle OMP_FOR_INCR. */
11374 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11375 switch (TREE_CODE (t))
11376 {
11377 case PREINCREMENT_EXPR:
11378 case POSTINCREMENT_EXPR:
11379 {
11380 tree decl = TREE_OPERAND (t, 0);
11381 /* c_omp_for_incr_canonicalize_ptr() should have been
11382 called to massage things appropriately. */
11383 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11384
11385 if (orig_for_stmt != for_stmt)
11386 break;
11387 t = build_int_cst (TREE_TYPE (decl), 1);
11388 if (c)
11389 OMP_CLAUSE_LINEAR_STEP (c) = t;
11390 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11391 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11392 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11393 break;
11394 }
11395
11396 case PREDECREMENT_EXPR:
11397 case POSTDECREMENT_EXPR:
11398 /* c_omp_for_incr_canonicalize_ptr() should have been
11399 called to massage things appropriately. */
11400 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11401 if (orig_for_stmt != for_stmt)
11402 break;
11403 t = build_int_cst (TREE_TYPE (decl), -1);
11404 if (c)
11405 OMP_CLAUSE_LINEAR_STEP (c) = t;
11406 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11407 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11408 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11409 break;
11410
11411 case MODIFY_EXPR:
11412 gcc_assert (TREE_OPERAND (t, 0) == decl);
11413 TREE_OPERAND (t, 0) = var;
11414
11415 t = TREE_OPERAND (t, 1);
11416 switch (TREE_CODE (t))
11417 {
11418 case PLUS_EXPR:
11419 if (TREE_OPERAND (t, 1) == decl)
11420 {
11421 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11422 TREE_OPERAND (t, 0) = var;
11423 break;
11424 }
11425
11426 /* Fallthru. */
11427 case MINUS_EXPR:
11428 case POINTER_PLUS_EXPR:
11429 gcc_assert (TREE_OPERAND (t, 0) == decl);
11430 TREE_OPERAND (t, 0) = var;
11431 break;
11432 default:
11433 gcc_unreachable ();
11434 }
11435
11436 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11437 is_gimple_val, fb_rvalue, false);
11438 ret = MIN (ret, tret);
11439 if (c)
11440 {
11441 tree step = TREE_OPERAND (t, 1);
11442 tree stept = TREE_TYPE (decl);
11443 if (POINTER_TYPE_P (stept))
11444 stept = sizetype;
11445 step = fold_convert (stept, step);
11446 if (TREE_CODE (t) == MINUS_EXPR)
11447 step = fold_build1 (NEGATE_EXPR, stept, step);
11448 OMP_CLAUSE_LINEAR_STEP (c) = step;
11449 if (step != TREE_OPERAND (t, 1))
11450 {
11451 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11452 &for_pre_body, NULL,
11453 is_gimple_val, fb_rvalue, false);
11454 ret = MIN (ret, tret);
11455 }
11456 }
11457 break;
11458
11459 default:
11460 gcc_unreachable ();
11461 }
11462
11463 if (c2)
11464 {
11465 gcc_assert (c);
11466 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11467 }
11468
11469 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11470 {
11471 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11472 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11473 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11474 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11475 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11476 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11477 && OMP_CLAUSE_DECL (c) == decl)
11478 {
11479 if (is_doacross && (collapse == 1 || i >= collapse))
11480 t = var;
11481 else
11482 {
11483 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11484 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11485 gcc_assert (TREE_OPERAND (t, 0) == var);
11486 t = TREE_OPERAND (t, 1);
11487 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11488 || TREE_CODE (t) == MINUS_EXPR
11489 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11490 gcc_assert (TREE_OPERAND (t, 0) == var);
11491 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11492 is_doacross ? var : decl,
11493 TREE_OPERAND (t, 1));
11494 }
11495 gimple_seq *seq;
11496 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11497 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11498 else
11499 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11500 push_gimplify_context ();
11501 gimplify_assign (decl, t, seq);
11502 gimple *bind = NULL;
11503 if (gimplify_ctxp->temps)
11504 {
11505 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11506 *seq = NULL;
11507 gimplify_seq_add_stmt (seq, bind);
11508 }
11509 pop_gimplify_context (bind);
11510 }
11511 }
11512 }
11513
11514 BITMAP_FREE (has_decl_expr);
11515
11516 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11517 || (loop_p && orig_for_stmt == for_stmt))
11518 {
11519 push_gimplify_context ();
11520 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11521 {
11522 OMP_FOR_BODY (orig_for_stmt)
11523 = build3 (BIND_EXPR, void_type_node, NULL,
11524 OMP_FOR_BODY (orig_for_stmt), NULL);
11525 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11526 }
11527 }
11528
11529 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11530 &for_body);
11531
11532 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11533 || (loop_p && orig_for_stmt == for_stmt))
11534 {
11535 if (gimple_code (g) == GIMPLE_BIND)
11536 pop_gimplify_context (g);
11537 else
11538 pop_gimplify_context (NULL);
11539 }
11540
11541 if (orig_for_stmt != for_stmt)
11542 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11543 {
11544 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11545 decl = TREE_OPERAND (t, 0);
11546 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11547 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11548 gimplify_omp_ctxp = ctx->outer_context;
11549 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11550 gimplify_omp_ctxp = ctx;
11551 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11552 TREE_OPERAND (t, 0) = var;
11553 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11554 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11555 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11556 }
11557
11558 gimplify_adjust_omp_clauses (pre_p, for_body,
11559 &OMP_FOR_CLAUSES (orig_for_stmt),
11560 TREE_CODE (orig_for_stmt));
11561
11562 int kind;
11563 switch (TREE_CODE (orig_for_stmt))
11564 {
11565 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11566 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11567 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11568 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11569 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11570 default:
11571 gcc_unreachable ();
11572 }
11573 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
11574 {
11575 gimplify_seq_add_seq (pre_p, for_pre_body);
11576 for_pre_body = NULL;
11577 }
11578 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
11579 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
11580 for_pre_body);
11581 if (orig_for_stmt != for_stmt)
11582 gimple_omp_for_set_combined_p (gfor, true);
11583 if (gimplify_omp_ctxp
11584 && (gimplify_omp_ctxp->combined_loop
11585 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11586 && gimplify_omp_ctxp->outer_context
11587 && gimplify_omp_ctxp->outer_context->combined_loop)))
11588 {
11589 gimple_omp_for_set_combined_into_p (gfor, true);
11590 if (gimplify_omp_ctxp->combined_loop)
11591 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
11592 else
11593 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
11594 }
11595
11596 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11597 {
11598 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11599 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
11600 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
11601 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11602 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
11603 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
11604 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11605 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
11606 }
11607
11608 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11609 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11610 The outer taskloop stands for computing the number of iterations,
11611 counts for collapsed loops and holding taskloop specific clauses.
11612 The task construct stands for the effect of data sharing on the
11613 explicit task it creates and the inner taskloop stands for expansion
11614 of the static loop inside of the explicit task construct. */
11615 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11616 {
11617 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
11618 tree task_clauses = NULL_TREE;
11619 tree c = *gfor_clauses_ptr;
11620 tree *gtask_clauses_ptr = &task_clauses;
11621 tree outer_for_clauses = NULL_TREE;
11622 tree *gforo_clauses_ptr = &outer_for_clauses;
11623 for (; c; c = OMP_CLAUSE_CHAIN (c))
11624 switch (OMP_CLAUSE_CODE (c))
11625 {
11626 /* These clauses are allowed on task, move them there. */
11627 case OMP_CLAUSE_SHARED:
11628 case OMP_CLAUSE_FIRSTPRIVATE:
11629 case OMP_CLAUSE_DEFAULT:
11630 case OMP_CLAUSE_IF:
11631 case OMP_CLAUSE_UNTIED:
11632 case OMP_CLAUSE_FINAL:
11633 case OMP_CLAUSE_MERGEABLE:
11634 case OMP_CLAUSE_PRIORITY:
11635 case OMP_CLAUSE_REDUCTION:
11636 case OMP_CLAUSE_IN_REDUCTION:
11637 *gtask_clauses_ptr = c;
11638 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11639 break;
11640 case OMP_CLAUSE_PRIVATE:
11641 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
11642 {
11643 /* We want private on outer for and firstprivate
11644 on task. */
11645 *gtask_clauses_ptr
11646 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11647 OMP_CLAUSE_FIRSTPRIVATE);
11648 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11649 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11650 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11651 *gforo_clauses_ptr = c;
11652 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11653 }
11654 else
11655 {
11656 *gtask_clauses_ptr = c;
11657 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11658 }
11659 break;
11660 /* These clauses go into outer taskloop clauses. */
11661 case OMP_CLAUSE_GRAINSIZE:
11662 case OMP_CLAUSE_NUM_TASKS:
11663 case OMP_CLAUSE_NOGROUP:
11664 *gforo_clauses_ptr = c;
11665 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11666 break;
11667 /* Taskloop clause we duplicate on both taskloops. */
11668 case OMP_CLAUSE_COLLAPSE:
11669 *gfor_clauses_ptr = c;
11670 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11671 *gforo_clauses_ptr = copy_node (c);
11672 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11673 break;
11674 /* For lastprivate, keep the clause on inner taskloop, and add
11675 a shared clause on task. If the same decl is also firstprivate,
11676 add also firstprivate clause on the inner taskloop. */
11677 case OMP_CLAUSE_LASTPRIVATE:
11678 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
11679 {
11680 /* For taskloop C++ lastprivate IVs, we want:
11681 1) private on outer taskloop
11682 2) firstprivate and shared on task
11683 3) lastprivate on inner taskloop */
11684 *gtask_clauses_ptr
11685 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11686 OMP_CLAUSE_FIRSTPRIVATE);
11687 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11688 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
11689 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11690 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
11691 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11692 OMP_CLAUSE_PRIVATE);
11693 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
11694 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
11695 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
11696 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
11697 }
11698 *gfor_clauses_ptr = c;
11699 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
11700 *gtask_clauses_ptr
11701 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
11702 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
11703 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11704 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
11705 gtask_clauses_ptr
11706 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
11707 break;
11708 default:
11709 gcc_unreachable ();
11710 }
11711 *gfor_clauses_ptr = NULL_TREE;
11712 *gtask_clauses_ptr = NULL_TREE;
11713 *gforo_clauses_ptr = NULL_TREE;
11714 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
11715 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
11716 NULL_TREE, NULL_TREE, NULL_TREE);
11717 gimple_omp_task_set_taskloop_p (g, true);
11718 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
11719 gomp_for *gforo
11720 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
11721 gimple_omp_for_collapse (gfor),
11722 gimple_omp_for_pre_body (gfor));
11723 gimple_omp_for_set_pre_body (gfor, NULL);
11724 gimple_omp_for_set_combined_p (gforo, true);
11725 gimple_omp_for_set_combined_into_p (gfor, true);
11726 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
11727 {
11728 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
11729 tree v = create_tmp_var (type);
11730 gimple_omp_for_set_index (gforo, i, v);
11731 t = unshare_expr (gimple_omp_for_initial (gfor, i));
11732 gimple_omp_for_set_initial (gforo, i, t);
11733 gimple_omp_for_set_cond (gforo, i,
11734 gimple_omp_for_cond (gfor, i));
11735 t = unshare_expr (gimple_omp_for_final (gfor, i));
11736 gimple_omp_for_set_final (gforo, i, t);
11737 t = unshare_expr (gimple_omp_for_incr (gfor, i));
11738 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
11739 TREE_OPERAND (t, 0) = v;
11740 gimple_omp_for_set_incr (gforo, i, t);
11741 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
11742 OMP_CLAUSE_DECL (t) = v;
11743 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
11744 gimple_omp_for_set_clauses (gforo, t);
11745 }
11746 gimplify_seq_add_stmt (pre_p, gforo);
11747 }
11748 else
11749 gimplify_seq_add_stmt (pre_p, gfor);
11750
11751 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
11752 {
11753 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11754 unsigned lastprivate_conditional = 0;
11755 while (ctx
11756 && (ctx->region_type == ORT_TARGET_DATA
11757 || ctx->region_type == ORT_TASKGROUP))
11758 ctx = ctx->outer_context;
11759 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
11760 for (tree c = gimple_omp_for_clauses (gfor);
11761 c; c = OMP_CLAUSE_CHAIN (c))
11762 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11763 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11764 ++lastprivate_conditional;
11765 if (lastprivate_conditional)
11766 {
11767 struct omp_for_data fd;
11768 omp_extract_for_data (gfor, &fd, NULL);
11769 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
11770 lastprivate_conditional);
11771 tree var = create_tmp_var_raw (type);
11772 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
11773 OMP_CLAUSE_DECL (c) = var;
11774 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
11775 gimple_omp_for_set_clauses (gfor, c);
11776 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
11777 }
11778 }
11779 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
11780 {
11781 unsigned lastprivate_conditional = 0;
11782 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
11783 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11784 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11785 ++lastprivate_conditional;
11786 if (lastprivate_conditional)
11787 {
11788 struct omp_for_data fd;
11789 omp_extract_for_data (gfor, &fd, NULL);
11790 tree type = unsigned_type_for (fd.iter_type);
11791 while (lastprivate_conditional--)
11792 {
11793 tree c = build_omp_clause (UNKNOWN_LOCATION,
11794 OMP_CLAUSE__CONDTEMP_);
11795 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
11796 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
11797 gimple_omp_for_set_clauses (gfor, c);
11798 }
11799 }
11800 }
11801
11802 if (ret != GS_ALL_DONE)
11803 return GS_ERROR;
11804 *expr_p = NULL_TREE;
11805 return GS_ALL_DONE;
11806 }
11807
11808 /* Helper for gimplify_omp_loop, called through walk_tree. */
11809
11810 static tree
11811 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
11812 {
11813 if (DECL_P (*tp))
11814 {
11815 tree *d = (tree *) data;
11816 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
11817 {
11818 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
11819 *walk_subtrees = 0;
11820 }
11821 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
11822 {
11823 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
11824 *walk_subtrees = 0;
11825 }
11826 }
11827 return NULL_TREE;
11828 }
11829
11830 /* Gimplify the gross structure of an OMP_LOOP statement. */
11831
11832 static enum gimplify_status
11833 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
11834 {
11835 tree for_stmt = *expr_p;
11836 tree clauses = OMP_FOR_CLAUSES (for_stmt);
11837 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
11838 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
11839 int i;
11840
11841 /* If order is not present, the behavior is as if order(concurrent)
11842 appeared. */
11843 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
11844 if (order == NULL_TREE)
11845 {
11846 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
11847 OMP_CLAUSE_CHAIN (order) = clauses;
11848 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
11849 }
11850
11851 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
11852 if (bind == NULL_TREE)
11853 {
11854 if (!flag_openmp) /* flag_openmp_simd */
11855 ;
11856 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
11857 kind = OMP_CLAUSE_BIND_TEAMS;
11858 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
11859 kind = OMP_CLAUSE_BIND_PARALLEL;
11860 else
11861 {
11862 for (; octx; octx = octx->outer_context)
11863 {
11864 if ((octx->region_type & ORT_ACC) != 0
11865 || octx->region_type == ORT_NONE
11866 || octx->region_type == ORT_IMPLICIT_TARGET)
11867 continue;
11868 break;
11869 }
11870 if (octx == NULL && !in_omp_construct)
11871 error_at (EXPR_LOCATION (for_stmt),
11872 "%<bind%> clause not specified on a %<loop%> "
11873 "construct not nested inside another OpenMP construct");
11874 }
11875 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
11876 OMP_CLAUSE_CHAIN (bind) = clauses;
11877 OMP_CLAUSE_BIND_KIND (bind) = kind;
11878 OMP_FOR_CLAUSES (for_stmt) = bind;
11879 }
11880 else
11881 switch (OMP_CLAUSE_BIND_KIND (bind))
11882 {
11883 case OMP_CLAUSE_BIND_THREAD:
11884 break;
11885 case OMP_CLAUSE_BIND_PARALLEL:
11886 if (!flag_openmp) /* flag_openmp_simd */
11887 {
11888 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
11889 break;
11890 }
11891 for (; octx; octx = octx->outer_context)
11892 if (octx->region_type == ORT_SIMD
11893 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
11894 {
11895 error_at (EXPR_LOCATION (for_stmt),
11896 "%<bind(parallel)%> on a %<loop%> construct nested "
11897 "inside %<simd%> construct");
11898 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
11899 break;
11900 }
11901 kind = OMP_CLAUSE_BIND_PARALLEL;
11902 break;
11903 case OMP_CLAUSE_BIND_TEAMS:
11904 if (!flag_openmp) /* flag_openmp_simd */
11905 {
11906 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
11907 break;
11908 }
11909 if ((octx
11910 && octx->region_type != ORT_IMPLICIT_TARGET
11911 && octx->region_type != ORT_NONE
11912 && (octx->region_type & ORT_TEAMS) == 0)
11913 || in_omp_construct)
11914 {
11915 error_at (EXPR_LOCATION (for_stmt),
11916 "%<bind(teams)%> on a %<loop%> region not strictly "
11917 "nested inside of a %<teams%> region");
11918 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
11919 break;
11920 }
11921 kind = OMP_CLAUSE_BIND_TEAMS;
11922 break;
11923 default:
11924 gcc_unreachable ();
11925 }
11926
11927 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
11928 switch (OMP_CLAUSE_CODE (*pc))
11929 {
11930 case OMP_CLAUSE_REDUCTION:
11931 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
11932 {
11933 error_at (OMP_CLAUSE_LOCATION (*pc),
11934 "%<inscan%> %<reduction%> clause on "
11935 "%qs construct", "loop");
11936 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
11937 }
11938 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
11939 {
11940 error_at (OMP_CLAUSE_LOCATION (*pc),
11941 "invalid %<task%> reduction modifier on construct "
11942 "other than %<parallel%>, %<for%> or %<sections%>");
11943 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
11944 }
11945 pc = &OMP_CLAUSE_CHAIN (*pc);
11946 break;
11947 case OMP_CLAUSE_LASTPRIVATE:
11948 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11949 {
11950 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11951 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11952 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
11953 break;
11954 if (OMP_FOR_ORIG_DECLS (for_stmt)
11955 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
11956 i)) == TREE_LIST
11957 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
11958 i)))
11959 {
11960 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11961 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
11962 break;
11963 }
11964 }
11965 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
11966 {
11967 error_at (OMP_CLAUSE_LOCATION (*pc),
11968 "%<lastprivate%> clause on a %<loop%> construct refers "
11969 "to a variable %qD which is not the loop iterator",
11970 OMP_CLAUSE_DECL (*pc));
11971 *pc = OMP_CLAUSE_CHAIN (*pc);
11972 break;
11973 }
11974 pc = &OMP_CLAUSE_CHAIN (*pc);
11975 break;
11976 default:
11977 pc = &OMP_CLAUSE_CHAIN (*pc);
11978 break;
11979 }
11980
11981 TREE_SET_CODE (for_stmt, OMP_SIMD);
11982
11983 int last;
11984 switch (kind)
11985 {
11986 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
11987 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
11988 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
11989 }
11990 for (int pass = 1; pass <= last; pass++)
11991 {
11992 if (pass == 2)
11993 {
11994 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11995 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
11996 *expr_p = make_node (OMP_PARALLEL);
11997 TREE_TYPE (*expr_p) = void_type_node;
11998 OMP_PARALLEL_BODY (*expr_p) = bind;
11999 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12000 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12001 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12002 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12003 if (OMP_FOR_ORIG_DECLS (for_stmt)
12004 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12005 == TREE_LIST))
12006 {
12007 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12008 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12009 {
12010 *pc = build_omp_clause (UNKNOWN_LOCATION,
12011 OMP_CLAUSE_FIRSTPRIVATE);
12012 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12013 pc = &OMP_CLAUSE_CHAIN (*pc);
12014 }
12015 }
12016 }
12017 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12018 tree *pc = &OMP_FOR_CLAUSES (t);
12019 TREE_TYPE (t) = void_type_node;
12020 OMP_FOR_BODY (t) = *expr_p;
12021 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12022 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12023 switch (OMP_CLAUSE_CODE (c))
12024 {
12025 case OMP_CLAUSE_BIND:
12026 case OMP_CLAUSE_ORDER:
12027 case OMP_CLAUSE_COLLAPSE:
12028 *pc = copy_node (c);
12029 pc = &OMP_CLAUSE_CHAIN (*pc);
12030 break;
12031 case OMP_CLAUSE_PRIVATE:
12032 case OMP_CLAUSE_FIRSTPRIVATE:
12033 /* Only needed on innermost. */
12034 break;
12035 case OMP_CLAUSE_LASTPRIVATE:
12036 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12037 {
12038 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12039 OMP_CLAUSE_FIRSTPRIVATE);
12040 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12041 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12042 pc = &OMP_CLAUSE_CHAIN (*pc);
12043 }
12044 *pc = copy_node (c);
12045 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12046 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12047 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12048 {
12049 if (pass != last)
12050 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12051 else
12052 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12053 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12054 }
12055 pc = &OMP_CLAUSE_CHAIN (*pc);
12056 break;
12057 case OMP_CLAUSE_REDUCTION:
12058 *pc = copy_node (c);
12059 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12060 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12061 OMP_CLAUSE_REDUCTION_INIT (*pc)
12062 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12063 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12064 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12065 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12066 {
12067 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12068 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12069 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12070 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12071 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12072 tree nc = *pc;
12073 tree data[2] = { c, nc };
12074 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12075 replace_reduction_placeholders,
12076 data);
12077 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12078 replace_reduction_placeholders,
12079 data);
12080 }
12081 pc = &OMP_CLAUSE_CHAIN (*pc);
12082 break;
12083 default:
12084 gcc_unreachable ();
12085 }
12086 *pc = NULL_TREE;
12087 *expr_p = t;
12088 }
12089 return gimplify_omp_for (expr_p, pre_p);
12090 }
12091
12092
12093 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12094 of OMP_TARGET's body. */
12095
12096 static tree
12097 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12098 {
12099 *walk_subtrees = 0;
12100 switch (TREE_CODE (*tp))
12101 {
12102 case OMP_TEAMS:
12103 return *tp;
12104 case BIND_EXPR:
12105 case STATEMENT_LIST:
12106 *walk_subtrees = 1;
12107 break;
12108 default:
12109 break;
12110 }
12111 return NULL_TREE;
12112 }
12113
12114 /* Helper function of optimize_target_teams, determine if the expression
12115 can be computed safely before the target construct on the host. */
12116
12117 static tree
12118 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12119 {
12120 splay_tree_node n;
12121
12122 if (TYPE_P (*tp))
12123 {
12124 *walk_subtrees = 0;
12125 return NULL_TREE;
12126 }
12127 switch (TREE_CODE (*tp))
12128 {
12129 case VAR_DECL:
12130 case PARM_DECL:
12131 case RESULT_DECL:
12132 *walk_subtrees = 0;
12133 if (error_operand_p (*tp)
12134 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12135 || DECL_HAS_VALUE_EXPR_P (*tp)
12136 || DECL_THREAD_LOCAL_P (*tp)
12137 || TREE_SIDE_EFFECTS (*tp)
12138 || TREE_THIS_VOLATILE (*tp))
12139 return *tp;
12140 if (is_global_var (*tp)
12141 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12142 || lookup_attribute ("omp declare target link",
12143 DECL_ATTRIBUTES (*tp))))
12144 return *tp;
12145 if (VAR_P (*tp)
12146 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12147 && !is_global_var (*tp)
12148 && decl_function_context (*tp) == current_function_decl)
12149 return *tp;
12150 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12151 (splay_tree_key) *tp);
12152 if (n == NULL)
12153 {
12154 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12155 return NULL_TREE;
12156 return *tp;
12157 }
12158 else if (n->value & GOVD_LOCAL)
12159 return *tp;
12160 else if (n->value & GOVD_FIRSTPRIVATE)
12161 return NULL_TREE;
12162 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12163 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12164 return NULL_TREE;
12165 return *tp;
12166 case INTEGER_CST:
12167 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12168 return *tp;
12169 return NULL_TREE;
12170 case TARGET_EXPR:
12171 if (TARGET_EXPR_INITIAL (*tp)
12172 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12173 return *tp;
12174 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12175 walk_subtrees, NULL);
12176 /* Allow some reasonable subset of integral arithmetics. */
12177 case PLUS_EXPR:
12178 case MINUS_EXPR:
12179 case MULT_EXPR:
12180 case TRUNC_DIV_EXPR:
12181 case CEIL_DIV_EXPR:
12182 case FLOOR_DIV_EXPR:
12183 case ROUND_DIV_EXPR:
12184 case TRUNC_MOD_EXPR:
12185 case CEIL_MOD_EXPR:
12186 case FLOOR_MOD_EXPR:
12187 case ROUND_MOD_EXPR:
12188 case RDIV_EXPR:
12189 case EXACT_DIV_EXPR:
12190 case MIN_EXPR:
12191 case MAX_EXPR:
12192 case LSHIFT_EXPR:
12193 case RSHIFT_EXPR:
12194 case BIT_IOR_EXPR:
12195 case BIT_XOR_EXPR:
12196 case BIT_AND_EXPR:
12197 case NEGATE_EXPR:
12198 case ABS_EXPR:
12199 case BIT_NOT_EXPR:
12200 case NON_LVALUE_EXPR:
12201 CASE_CONVERT:
12202 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12203 return *tp;
12204 return NULL_TREE;
12205 /* And disallow anything else, except for comparisons. */
12206 default:
12207 if (COMPARISON_CLASS_P (*tp))
12208 return NULL_TREE;
12209 return *tp;
12210 }
12211 }
12212
12213 /* Try to determine if the num_teams and/or thread_limit expressions
12214 can have their values determined already before entering the
12215 target construct.
12216 INTEGER_CSTs trivially are,
12217 integral decls that are firstprivate (explicitly or implicitly)
12218 or explicitly map(always, to:) or map(always, tofrom:) on the target
12219 region too, and expressions involving simple arithmetics on those
12220 too, function calls are not ok, dereferencing something neither etc.
12221 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12222 EXPR based on what we find:
12223 0 stands for clause not specified at all, use implementation default
12224 -1 stands for value that can't be determined easily before entering
12225 the target construct.
12226 If teams construct is not present at all, use 1 for num_teams
12227 and 0 for thread_limit (only one team is involved, and the thread
12228 limit is implementation defined. */
12229
12230 static void
12231 optimize_target_teams (tree target, gimple_seq *pre_p)
12232 {
12233 tree body = OMP_BODY (target);
12234 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12235 tree num_teams = integer_zero_node;
12236 tree thread_limit = integer_zero_node;
12237 location_t num_teams_loc = EXPR_LOCATION (target);
12238 location_t thread_limit_loc = EXPR_LOCATION (target);
12239 tree c, *p, expr;
12240 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12241
12242 if (teams == NULL_TREE)
12243 num_teams = integer_one_node;
12244 else
12245 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12246 {
12247 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12248 {
12249 p = &num_teams;
12250 num_teams_loc = OMP_CLAUSE_LOCATION (c);
12251 }
12252 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12253 {
12254 p = &thread_limit;
12255 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12256 }
12257 else
12258 continue;
12259 expr = OMP_CLAUSE_OPERAND (c, 0);
12260 if (TREE_CODE (expr) == INTEGER_CST)
12261 {
12262 *p = expr;
12263 continue;
12264 }
12265 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12266 {
12267 *p = integer_minus_one_node;
12268 continue;
12269 }
12270 *p = expr;
12271 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
12272 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
12273 == GS_ERROR)
12274 {
12275 gimplify_omp_ctxp = target_ctx;
12276 *p = integer_minus_one_node;
12277 continue;
12278 }
12279 gimplify_omp_ctxp = target_ctx;
12280 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12281 OMP_CLAUSE_OPERAND (c, 0) = *p;
12282 }
12283 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12284 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12285 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12286 OMP_TARGET_CLAUSES (target) = c;
12287 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12288 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12289 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12290 OMP_TARGET_CLAUSES (target) = c;
12291 }
12292
12293 /* Gimplify the gross structure of several OMP constructs. */
12294
12295 static void
12296 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
12297 {
12298 tree expr = *expr_p;
12299 gimple *stmt;
12300 gimple_seq body = NULL;
12301 enum omp_region_type ort;
12302
12303 switch (TREE_CODE (expr))
12304 {
12305 case OMP_SECTIONS:
12306 case OMP_SINGLE:
12307 ort = ORT_WORKSHARE;
12308 break;
12309 case OMP_TARGET:
12310 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12311 break;
12312 case OACC_KERNELS:
12313 ort = ORT_ACC_KERNELS;
12314 break;
12315 case OACC_PARALLEL:
12316 ort = ORT_ACC_PARALLEL;
12317 break;
12318 case OACC_DATA:
12319 ort = ORT_ACC_DATA;
12320 break;
12321 case OMP_TARGET_DATA:
12322 ort = ORT_TARGET_DATA;
12323 break;
12324 case OMP_TEAMS:
12325 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
12326 if (gimplify_omp_ctxp == NULL
12327 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
12328 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
12329 break;
12330 case OACC_HOST_DATA:
12331 ort = ORT_ACC_HOST_DATA;
12332 break;
12333 default:
12334 gcc_unreachable ();
12335 }
12336
12337 bool save_in_omp_construct = in_omp_construct;
12338 if ((ort & ORT_ACC) == 0)
12339 in_omp_construct = false;
12340 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12341 TREE_CODE (expr));
12342 if (TREE_CODE (expr) == OMP_TARGET)
12343 optimize_target_teams (expr, pre_p);
12344 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12345 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12346 {
12347 push_gimplify_context ();
12348 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
12349 if (gimple_code (g) == GIMPLE_BIND)
12350 pop_gimplify_context (g);
12351 else
12352 pop_gimplify_context (NULL);
12353 if ((ort & ORT_TARGET_DATA) != 0)
12354 {
12355 enum built_in_function end_ix;
12356 switch (TREE_CODE (expr))
12357 {
12358 case OACC_DATA:
12359 case OACC_HOST_DATA:
12360 end_ix = BUILT_IN_GOACC_DATA_END;
12361 break;
12362 case OMP_TARGET_DATA:
12363 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12364 break;
12365 default:
12366 gcc_unreachable ();
12367 }
12368 tree fn = builtin_decl_explicit (end_ix);
12369 g = gimple_build_call (fn, 0);
12370 gimple_seq cleanup = NULL;
12371 gimple_seq_add_stmt (&cleanup, g);
12372 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12373 body = NULL;
12374 gimple_seq_add_stmt (&body, g);
12375 }
12376 }
12377 else
12378 gimplify_and_add (OMP_BODY (expr), &body);
12379 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12380 TREE_CODE (expr));
12381 in_omp_construct = save_in_omp_construct;
12382
12383 switch (TREE_CODE (expr))
12384 {
12385 case OACC_DATA:
12386 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12387 OMP_CLAUSES (expr));
12388 break;
12389 case OACC_KERNELS:
12390 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12391 OMP_CLAUSES (expr));
12392 break;
12393 case OACC_HOST_DATA:
12394 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12395 OMP_CLAUSES (expr));
12396 break;
12397 case OACC_PARALLEL:
12398 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12399 OMP_CLAUSES (expr));
12400 break;
12401 case OMP_SECTIONS:
12402 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12403 break;
12404 case OMP_SINGLE:
12405 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12406 break;
12407 case OMP_TARGET:
12408 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12409 OMP_CLAUSES (expr));
12410 break;
12411 case OMP_TARGET_DATA:
12412 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12413 to be evaluated before the use_device_{ptr,addr} clauses if they
12414 refer to the same variables. */
12415 {
12416 tree use_device_clauses;
12417 tree *pc, *uc = &use_device_clauses;
12418 for (pc = &OMP_CLAUSES (expr); *pc; )
12419 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12420 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12421 {
12422 *uc = *pc;
12423 *pc = OMP_CLAUSE_CHAIN (*pc);
12424 uc = &OMP_CLAUSE_CHAIN (*uc);
12425 }
12426 else
12427 pc = &OMP_CLAUSE_CHAIN (*pc);
12428 *uc = NULL_TREE;
12429 *pc = use_device_clauses;
12430 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12431 OMP_CLAUSES (expr));
12432 }
12433 break;
12434 case OMP_TEAMS:
12435 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12436 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12437 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12438 break;
12439 default:
12440 gcc_unreachable ();
12441 }
12442
12443 gimplify_seq_add_stmt (pre_p, stmt);
12444 *expr_p = NULL_TREE;
12445 }
12446
12447 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12448 target update constructs. */
12449
12450 static void
12451 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12452 {
12453 tree expr = *expr_p;
12454 int kind;
12455 gomp_target *stmt;
12456 enum omp_region_type ort = ORT_WORKSHARE;
12457
12458 switch (TREE_CODE (expr))
12459 {
12460 case OACC_ENTER_DATA:
12461 case OACC_EXIT_DATA:
12462 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12463 ort = ORT_ACC;
12464 break;
12465 case OACC_UPDATE:
12466 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12467 ort = ORT_ACC;
12468 break;
12469 case OMP_TARGET_UPDATE:
12470 kind = GF_OMP_TARGET_KIND_UPDATE;
12471 break;
12472 case OMP_TARGET_ENTER_DATA:
12473 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12474 break;
12475 case OMP_TARGET_EXIT_DATA:
12476 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12477 break;
12478 default:
12479 gcc_unreachable ();
12480 }
12481 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12482 ort, TREE_CODE (expr));
12483 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12484 TREE_CODE (expr));
12485 if (TREE_CODE (expr) == OACC_UPDATE
12486 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12487 OMP_CLAUSE_IF_PRESENT))
12488 {
12489 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12490 clause. */
12491 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12492 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12493 switch (OMP_CLAUSE_MAP_KIND (c))
12494 {
12495 case GOMP_MAP_FORCE_TO:
12496 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12497 break;
12498 case GOMP_MAP_FORCE_FROM:
12499 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12500 break;
12501 default:
12502 break;
12503 }
12504 }
12505 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12506 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12507 OMP_CLAUSE_FINALIZE))
12508 {
12509 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
12510 semantics apply to all mappings of this OpenACC directive. */
12511 bool finalize_marked = false;
12512 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12513 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12514 switch (OMP_CLAUSE_MAP_KIND (c))
12515 {
12516 case GOMP_MAP_FROM:
12517 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
12518 finalize_marked = true;
12519 break;
12520 case GOMP_MAP_RELEASE:
12521 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
12522 finalize_marked = true;
12523 break;
12524 default:
12525 /* Check consistency: libgomp relies on the very first data
12526 mapping clause being marked, so make sure we did that before
12527 any other mapping clauses. */
12528 gcc_assert (finalize_marked);
12529 break;
12530 }
12531 }
12532 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
12533
12534 gimplify_seq_add_stmt (pre_p, stmt);
12535 *expr_p = NULL_TREE;
12536 }
12537
12538 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12539 stabilized the lhs of the atomic operation as *ADDR. Return true if
12540 EXPR is this stabilized form. */
12541
12542 static bool
12543 goa_lhs_expr_p (tree expr, tree addr)
12544 {
12545 /* Also include casts to other type variants. The C front end is fond
12546 of adding these for e.g. volatile variables. This is like
12547 STRIP_TYPE_NOPS but includes the main variant lookup. */
12548 STRIP_USELESS_TYPE_CONVERSION (expr);
12549
12550 if (TREE_CODE (expr) == INDIRECT_REF)
12551 {
12552 expr = TREE_OPERAND (expr, 0);
12553 while (expr != addr
12554 && (CONVERT_EXPR_P (expr)
12555 || TREE_CODE (expr) == NON_LVALUE_EXPR)
12556 && TREE_CODE (expr) == TREE_CODE (addr)
12557 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
12558 {
12559 expr = TREE_OPERAND (expr, 0);
12560 addr = TREE_OPERAND (addr, 0);
12561 }
12562 if (expr == addr)
12563 return true;
12564 return (TREE_CODE (addr) == ADDR_EXPR
12565 && TREE_CODE (expr) == ADDR_EXPR
12566 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
12567 }
12568 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
12569 return true;
12570 return false;
12571 }
12572
12573 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
12574 expression does not involve the lhs, evaluate it into a temporary.
12575 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
12576 or -1 if an error was encountered. */
12577
12578 static int
12579 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
12580 tree lhs_var)
12581 {
12582 tree expr = *expr_p;
12583 int saw_lhs;
12584
12585 if (goa_lhs_expr_p (expr, lhs_addr))
12586 {
12587 *expr_p = lhs_var;
12588 return 1;
12589 }
12590 if (is_gimple_val (expr))
12591 return 0;
12592
12593 saw_lhs = 0;
12594 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
12595 {
12596 case tcc_binary:
12597 case tcc_comparison:
12598 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
12599 lhs_var);
12600 /* FALLTHRU */
12601 case tcc_unary:
12602 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
12603 lhs_var);
12604 break;
12605 case tcc_expression:
12606 switch (TREE_CODE (expr))
12607 {
12608 case TRUTH_ANDIF_EXPR:
12609 case TRUTH_ORIF_EXPR:
12610 case TRUTH_AND_EXPR:
12611 case TRUTH_OR_EXPR:
12612 case TRUTH_XOR_EXPR:
12613 case BIT_INSERT_EXPR:
12614 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
12615 lhs_addr, lhs_var);
12616 /* FALLTHRU */
12617 case TRUTH_NOT_EXPR:
12618 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
12619 lhs_addr, lhs_var);
12620 break;
12621 case COMPOUND_EXPR:
12622 /* Break out any preevaluations from cp_build_modify_expr. */
12623 for (; TREE_CODE (expr) == COMPOUND_EXPR;
12624 expr = TREE_OPERAND (expr, 1))
12625 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
12626 *expr_p = expr;
12627 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
12628 default:
12629 break;
12630 }
12631 break;
12632 case tcc_reference:
12633 if (TREE_CODE (expr) == BIT_FIELD_REF)
12634 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
12635 lhs_addr, lhs_var);
12636 break;
12637 default:
12638 break;
12639 }
12640
12641 if (saw_lhs == 0)
12642 {
12643 enum gimplify_status gs;
12644 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
12645 if (gs != GS_ALL_DONE)
12646 saw_lhs = -1;
12647 }
12648
12649 return saw_lhs;
12650 }
12651
12652 /* Gimplify an OMP_ATOMIC statement. */
12653
12654 static enum gimplify_status
12655 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
12656 {
12657 tree addr = TREE_OPERAND (*expr_p, 0);
12658 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
12659 ? NULL : TREE_OPERAND (*expr_p, 1);
12660 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
12661 tree tmp_load;
12662 gomp_atomic_load *loadstmt;
12663 gomp_atomic_store *storestmt;
12664
12665 tmp_load = create_tmp_reg (type);
12666 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
12667 return GS_ERROR;
12668
12669 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
12670 != GS_ALL_DONE)
12671 return GS_ERROR;
12672
12673 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
12674 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
12675 gimplify_seq_add_stmt (pre_p, loadstmt);
12676 if (rhs)
12677 {
12678 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
12679 representatives. Use BIT_FIELD_REF on the lhs instead. */
12680 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
12681 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
12682 {
12683 tree bitpos = TREE_OPERAND (rhs, 2);
12684 tree op1 = TREE_OPERAND (rhs, 1);
12685 tree bitsize;
12686 tree tmp_store = tmp_load;
12687 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
12688 tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL);
12689 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
12690 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
12691 else
12692 bitsize = TYPE_SIZE (TREE_TYPE (op1));
12693 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
12694 tree t = build2_loc (EXPR_LOCATION (rhs),
12695 MODIFY_EXPR, void_type_node,
12696 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
12697 TREE_TYPE (op1), tmp_store, bitsize,
12698 bitpos), op1);
12699 gimplify_and_add (t, pre_p);
12700 rhs = tmp_store;
12701 }
12702 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
12703 != GS_ALL_DONE)
12704 return GS_ERROR;
12705 }
12706
12707 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
12708 rhs = tmp_load;
12709 storestmt
12710 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
12711 gimplify_seq_add_stmt (pre_p, storestmt);
12712 switch (TREE_CODE (*expr_p))
12713 {
12714 case OMP_ATOMIC_READ:
12715 case OMP_ATOMIC_CAPTURE_OLD:
12716 *expr_p = tmp_load;
12717 gimple_omp_atomic_set_need_value (loadstmt);
12718 break;
12719 case OMP_ATOMIC_CAPTURE_NEW:
12720 *expr_p = rhs;
12721 gimple_omp_atomic_set_need_value (storestmt);
12722 break;
12723 default:
12724 *expr_p = NULL;
12725 break;
12726 }
12727
12728 return GS_ALL_DONE;
12729 }
12730
12731 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
12732 body, and adding some EH bits. */
12733
12734 static enum gimplify_status
12735 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
12736 {
12737 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
12738 gimple *body_stmt;
12739 gtransaction *trans_stmt;
12740 gimple_seq body = NULL;
12741 int subcode = 0;
12742
12743 /* Wrap the transaction body in a BIND_EXPR so we have a context
12744 where to put decls for OMP. */
12745 if (TREE_CODE (tbody) != BIND_EXPR)
12746 {
12747 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
12748 TREE_SIDE_EFFECTS (bind) = 1;
12749 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
12750 TRANSACTION_EXPR_BODY (expr) = bind;
12751 }
12752
12753 push_gimplify_context ();
12754 temp = voidify_wrapper_expr (*expr_p, NULL);
12755
12756 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
12757 pop_gimplify_context (body_stmt);
12758
12759 trans_stmt = gimple_build_transaction (body);
12760 if (TRANSACTION_EXPR_OUTER (expr))
12761 subcode = GTMA_IS_OUTER;
12762 else if (TRANSACTION_EXPR_RELAXED (expr))
12763 subcode = GTMA_IS_RELAXED;
12764 gimple_transaction_set_subcode (trans_stmt, subcode);
12765
12766 gimplify_seq_add_stmt (pre_p, trans_stmt);
12767
12768 if (temp)
12769 {
12770 *expr_p = temp;
12771 return GS_OK;
12772 }
12773
12774 *expr_p = NULL_TREE;
12775 return GS_ALL_DONE;
12776 }
12777
12778 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12779 is the OMP_BODY of the original EXPR (which has already been
12780 gimplified so it's not present in the EXPR).
12781
12782 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12783
12784 static gimple *
12785 gimplify_omp_ordered (tree expr, gimple_seq body)
12786 {
12787 tree c, decls;
12788 int failures = 0;
12789 unsigned int i;
12790 tree source_c = NULL_TREE;
12791 tree sink_c = NULL_TREE;
12792
12793 if (gimplify_omp_ctxp)
12794 {
12795 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12796 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12797 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
12798 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
12799 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
12800 {
12801 error_at (OMP_CLAUSE_LOCATION (c),
12802 "%<ordered%> construct with %<depend%> clause must be "
12803 "closely nested inside a loop with %<ordered%> clause "
12804 "with a parameter");
12805 failures++;
12806 }
12807 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12808 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
12809 {
12810 bool fail = false;
12811 for (decls = OMP_CLAUSE_DECL (c), i = 0;
12812 decls && TREE_CODE (decls) == TREE_LIST;
12813 decls = TREE_CHAIN (decls), ++i)
12814 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
12815 continue;
12816 else if (TREE_VALUE (decls)
12817 != gimplify_omp_ctxp->loop_iter_var[2 * i])
12818 {
12819 error_at (OMP_CLAUSE_LOCATION (c),
12820 "variable %qE is not an iteration "
12821 "of outermost loop %d, expected %qE",
12822 TREE_VALUE (decls), i + 1,
12823 gimplify_omp_ctxp->loop_iter_var[2 * i]);
12824 fail = true;
12825 failures++;
12826 }
12827 else
12828 TREE_VALUE (decls)
12829 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
12830 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
12831 {
12832 error_at (OMP_CLAUSE_LOCATION (c),
12833 "number of variables in %<depend%> clause with "
12834 "%<sink%> modifier does not match number of "
12835 "iteration variables");
12836 failures++;
12837 }
12838 sink_c = c;
12839 }
12840 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12841 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
12842 {
12843 if (source_c)
12844 {
12845 error_at (OMP_CLAUSE_LOCATION (c),
12846 "more than one %<depend%> clause with %<source%> "
12847 "modifier on an %<ordered%> construct");
12848 failures++;
12849 }
12850 else
12851 source_c = c;
12852 }
12853 }
12854 if (source_c && sink_c)
12855 {
12856 error_at (OMP_CLAUSE_LOCATION (source_c),
12857 "%<depend%> clause with %<source%> modifier specified "
12858 "together with %<depend%> clauses with %<sink%> modifier "
12859 "on the same construct");
12860 failures++;
12861 }
12862
12863 if (failures)
12864 return gimple_build_nop ();
12865 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
12866 }
12867
12868 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12869 expression produces a value to be used as an operand inside a GIMPLE
12870 statement, the value will be stored back in *EXPR_P. This value will
12871 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12872 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12873 emitted in PRE_P and POST_P.
12874
12875 Additionally, this process may overwrite parts of the input
12876 expression during gimplification. Ideally, it should be
12877 possible to do non-destructive gimplification.
12878
12879 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12880 the expression needs to evaluate to a value to be used as
12881 an operand in a GIMPLE statement, this value will be stored in
12882 *EXPR_P on exit. This happens when the caller specifies one
12883 of fb_lvalue or fb_rvalue fallback flags.
12884
12885 PRE_P will contain the sequence of GIMPLE statements corresponding
12886 to the evaluation of EXPR and all the side-effects that must
12887 be executed before the main expression. On exit, the last
12888 statement of PRE_P is the core statement being gimplified. For
12889 instance, when gimplifying 'if (++a)' the last statement in
12890 PRE_P will be 'if (t.1)' where t.1 is the result of
12891 pre-incrementing 'a'.
12892
12893 POST_P will contain the sequence of GIMPLE statements corresponding
12894 to the evaluation of all the side-effects that must be executed
12895 after the main expression. If this is NULL, the post
12896 side-effects are stored at the end of PRE_P.
12897
12898 The reason why the output is split in two is to handle post
12899 side-effects explicitly. In some cases, an expression may have
12900 inner and outer post side-effects which need to be emitted in
12901 an order different from the one given by the recursive
12902 traversal. For instance, for the expression (*p--)++ the post
12903 side-effects of '--' must actually occur *after* the post
12904 side-effects of '++'. However, gimplification will first visit
12905 the inner expression, so if a separate POST sequence was not
12906 used, the resulting sequence would be:
12907
12908 1 t.1 = *p
12909 2 p = p - 1
12910 3 t.2 = t.1 + 1
12911 4 *p = t.2
12912
12913 However, the post-decrement operation in line #2 must not be
12914 evaluated until after the store to *p at line #4, so the
12915 correct sequence should be:
12916
12917 1 t.1 = *p
12918 2 t.2 = t.1 + 1
12919 3 *p = t.2
12920 4 p = p - 1
12921
12922 So, by specifying a separate post queue, it is possible
12923 to emit the post side-effects in the correct order.
12924 If POST_P is NULL, an internal queue will be used. Before
12925 returning to the caller, the sequence POST_P is appended to
12926 the main output sequence PRE_P.
12927
12928 GIMPLE_TEST_F points to a function that takes a tree T and
12929 returns nonzero if T is in the GIMPLE form requested by the
12930 caller. The GIMPLE predicates are in gimple.c.
12931
12932 FALLBACK tells the function what sort of a temporary we want if
12933 gimplification cannot produce an expression that complies with
12934 GIMPLE_TEST_F.
12935
12936 fb_none means that no temporary should be generated
12937 fb_rvalue means that an rvalue is OK to generate
12938 fb_lvalue means that an lvalue is OK to generate
12939 fb_either means that either is OK, but an lvalue is preferable.
12940 fb_mayfail means that gimplification may fail (in which case
12941 GS_ERROR will be returned)
12942
12943 The return value is either GS_ERROR or GS_ALL_DONE, since this
12944 function iterates until EXPR is completely gimplified or an error
12945 occurs. */
12946
12947 enum gimplify_status
12948 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12949 bool (*gimple_test_f) (tree), fallback_t fallback)
12950 {
12951 tree tmp;
12952 gimple_seq internal_pre = NULL;
12953 gimple_seq internal_post = NULL;
12954 tree save_expr;
12955 bool is_statement;
12956 location_t saved_location;
12957 enum gimplify_status ret;
12958 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
12959 tree label;
12960
12961 save_expr = *expr_p;
12962 if (save_expr == NULL_TREE)
12963 return GS_ALL_DONE;
12964
12965 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12966 is_statement = gimple_test_f == is_gimple_stmt;
12967 if (is_statement)
12968 gcc_assert (pre_p);
12969
12970 /* Consistency checks. */
12971 if (gimple_test_f == is_gimple_reg)
12972 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
12973 else if (gimple_test_f == is_gimple_val
12974 || gimple_test_f == is_gimple_call_addr
12975 || gimple_test_f == is_gimple_condexpr
12976 || gimple_test_f == is_gimple_mem_rhs
12977 || gimple_test_f == is_gimple_mem_rhs_or_call
12978 || gimple_test_f == is_gimple_reg_rhs
12979 || gimple_test_f == is_gimple_reg_rhs_or_call
12980 || gimple_test_f == is_gimple_asm_val
12981 || gimple_test_f == is_gimple_mem_ref_addr)
12982 gcc_assert (fallback & fb_rvalue);
12983 else if (gimple_test_f == is_gimple_min_lval
12984 || gimple_test_f == is_gimple_lvalue)
12985 gcc_assert (fallback & fb_lvalue);
12986 else if (gimple_test_f == is_gimple_addressable)
12987 gcc_assert (fallback & fb_either);
12988 else if (gimple_test_f == is_gimple_stmt)
12989 gcc_assert (fallback == fb_none);
12990 else
12991 {
12992 /* We should have recognized the GIMPLE_TEST_F predicate to
12993 know what kind of fallback to use in case a temporary is
12994 needed to hold the value or address of *EXPR_P. */
12995 gcc_unreachable ();
12996 }
12997
12998 /* We used to check the predicate here and return immediately if it
12999 succeeds. This is wrong; the design is for gimplification to be
13000 idempotent, and for the predicates to only test for valid forms, not
13001 whether they are fully simplified. */
13002 if (pre_p == NULL)
13003 pre_p = &internal_pre;
13004
13005 if (post_p == NULL)
13006 post_p = &internal_post;
13007
13008 /* Remember the last statements added to PRE_P and POST_P. Every
13009 new statement added by the gimplification helpers needs to be
13010 annotated with location information. To centralize the
13011 responsibility, we remember the last statement that had been
13012 added to both queues before gimplifying *EXPR_P. If
13013 gimplification produces new statements in PRE_P and POST_P, those
13014 statements will be annotated with the same location information
13015 as *EXPR_P. */
13016 pre_last_gsi = gsi_last (*pre_p);
13017 post_last_gsi = gsi_last (*post_p);
13018
13019 saved_location = input_location;
13020 if (save_expr != error_mark_node
13021 && EXPR_HAS_LOCATION (*expr_p))
13022 input_location = EXPR_LOCATION (*expr_p);
13023
13024 /* Loop over the specific gimplifiers until the toplevel node
13025 remains the same. */
13026 do
13027 {
13028 /* Strip away as many useless type conversions as possible
13029 at the toplevel. */
13030 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13031
13032 /* Remember the expr. */
13033 save_expr = *expr_p;
13034
13035 /* Die, die, die, my darling. */
13036 if (error_operand_p (save_expr))
13037 {
13038 ret = GS_ERROR;
13039 break;
13040 }
13041
13042 /* Do any language-specific gimplification. */
13043 ret = ((enum gimplify_status)
13044 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13045 if (ret == GS_OK)
13046 {
13047 if (*expr_p == NULL_TREE)
13048 break;
13049 if (*expr_p != save_expr)
13050 continue;
13051 }
13052 else if (ret != GS_UNHANDLED)
13053 break;
13054
13055 /* Make sure that all the cases set 'ret' appropriately. */
13056 ret = GS_UNHANDLED;
13057 switch (TREE_CODE (*expr_p))
13058 {
13059 /* First deal with the special cases. */
13060
13061 case POSTINCREMENT_EXPR:
13062 case POSTDECREMENT_EXPR:
13063 case PREINCREMENT_EXPR:
13064 case PREDECREMENT_EXPR:
13065 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13066 fallback != fb_none,
13067 TREE_TYPE (*expr_p));
13068 break;
13069
13070 case VIEW_CONVERT_EXPR:
13071 if ((fallback & fb_rvalue)
13072 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13073 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13074 {
13075 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13076 post_p, is_gimple_val, fb_rvalue);
13077 recalculate_side_effects (*expr_p);
13078 break;
13079 }
13080 /* Fallthru. */
13081
13082 case ARRAY_REF:
13083 case ARRAY_RANGE_REF:
13084 case REALPART_EXPR:
13085 case IMAGPART_EXPR:
13086 case COMPONENT_REF:
13087 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13088 fallback ? fallback : fb_rvalue);
13089 break;
13090
13091 case COND_EXPR:
13092 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13093
13094 /* C99 code may assign to an array in a structure value of a
13095 conditional expression, and this has undefined behavior
13096 only on execution, so create a temporary if an lvalue is
13097 required. */
13098 if (fallback == fb_lvalue)
13099 {
13100 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13101 mark_addressable (*expr_p);
13102 ret = GS_OK;
13103 }
13104 break;
13105
13106 case CALL_EXPR:
13107 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13108
13109 /* C99 code may assign to an array in a structure returned
13110 from a function, and this has undefined behavior only on
13111 execution, so create a temporary if an lvalue is
13112 required. */
13113 if (fallback == fb_lvalue)
13114 {
13115 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13116 mark_addressable (*expr_p);
13117 ret = GS_OK;
13118 }
13119 break;
13120
13121 case TREE_LIST:
13122 gcc_unreachable ();
13123
13124 case COMPOUND_EXPR:
13125 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13126 break;
13127
13128 case COMPOUND_LITERAL_EXPR:
13129 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13130 gimple_test_f, fallback);
13131 break;
13132
13133 case MODIFY_EXPR:
13134 case INIT_EXPR:
13135 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13136 fallback != fb_none);
13137 break;
13138
13139 case TRUTH_ANDIF_EXPR:
13140 case TRUTH_ORIF_EXPR:
13141 {
13142 /* Preserve the original type of the expression and the
13143 source location of the outer expression. */
13144 tree org_type = TREE_TYPE (*expr_p);
13145 *expr_p = gimple_boolify (*expr_p);
13146 *expr_p = build3_loc (input_location, COND_EXPR,
13147 org_type, *expr_p,
13148 fold_convert_loc
13149 (input_location,
13150 org_type, boolean_true_node),
13151 fold_convert_loc
13152 (input_location,
13153 org_type, boolean_false_node));
13154 ret = GS_OK;
13155 break;
13156 }
13157
13158 case TRUTH_NOT_EXPR:
13159 {
13160 tree type = TREE_TYPE (*expr_p);
13161 /* The parsers are careful to generate TRUTH_NOT_EXPR
13162 only with operands that are always zero or one.
13163 We do not fold here but handle the only interesting case
13164 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13165 *expr_p = gimple_boolify (*expr_p);
13166 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13167 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13168 TREE_TYPE (*expr_p),
13169 TREE_OPERAND (*expr_p, 0));
13170 else
13171 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13172 TREE_TYPE (*expr_p),
13173 TREE_OPERAND (*expr_p, 0),
13174 build_int_cst (TREE_TYPE (*expr_p), 1));
13175 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13176 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13177 ret = GS_OK;
13178 break;
13179 }
13180
13181 case ADDR_EXPR:
13182 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13183 break;
13184
13185 case ANNOTATE_EXPR:
13186 {
13187 tree cond = TREE_OPERAND (*expr_p, 0);
13188 tree kind = TREE_OPERAND (*expr_p, 1);
13189 tree data = TREE_OPERAND (*expr_p, 2);
13190 tree type = TREE_TYPE (cond);
13191 if (!INTEGRAL_TYPE_P (type))
13192 {
13193 *expr_p = cond;
13194 ret = GS_OK;
13195 break;
13196 }
13197 tree tmp = create_tmp_var (type);
13198 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13199 gcall *call
13200 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13201 gimple_call_set_lhs (call, tmp);
13202 gimplify_seq_add_stmt (pre_p, call);
13203 *expr_p = tmp;
13204 ret = GS_ALL_DONE;
13205 break;
13206 }
13207
13208 case VA_ARG_EXPR:
13209 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
13210 break;
13211
13212 CASE_CONVERT:
13213 if (IS_EMPTY_STMT (*expr_p))
13214 {
13215 ret = GS_ALL_DONE;
13216 break;
13217 }
13218
13219 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13220 || fallback == fb_none)
13221 {
13222 /* Just strip a conversion to void (or in void context) and
13223 try again. */
13224 *expr_p = TREE_OPERAND (*expr_p, 0);
13225 ret = GS_OK;
13226 break;
13227 }
13228
13229 ret = gimplify_conversion (expr_p);
13230 if (ret == GS_ERROR)
13231 break;
13232 if (*expr_p != save_expr)
13233 break;
13234 /* FALLTHRU */
13235
13236 case FIX_TRUNC_EXPR:
13237 /* unary_expr: ... | '(' cast ')' val | ... */
13238 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13239 is_gimple_val, fb_rvalue);
13240 recalculate_side_effects (*expr_p);
13241 break;
13242
13243 case INDIRECT_REF:
13244 {
13245 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
13246 bool notrap = TREE_THIS_NOTRAP (*expr_p);
13247 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13248
13249 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13250 if (*expr_p != save_expr)
13251 {
13252 ret = GS_OK;
13253 break;
13254 }
13255
13256 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13257 is_gimple_reg, fb_rvalue);
13258 if (ret == GS_ERROR)
13259 break;
13260
13261 recalculate_side_effects (*expr_p);
13262 *expr_p = fold_build2_loc (input_location, MEM_REF,
13263 TREE_TYPE (*expr_p),
13264 TREE_OPERAND (*expr_p, 0),
13265 build_int_cst (saved_ptr_type, 0));
13266 TREE_THIS_VOLATILE (*expr_p) = volatilep;
13267 TREE_THIS_NOTRAP (*expr_p) = notrap;
13268 ret = GS_OK;
13269 break;
13270 }
13271
13272 /* We arrive here through the various re-gimplifcation paths. */
13273 case MEM_REF:
13274 /* First try re-folding the whole thing. */
13275 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13276 TREE_OPERAND (*expr_p, 0),
13277 TREE_OPERAND (*expr_p, 1));
13278 if (tmp)
13279 {
13280 REF_REVERSE_STORAGE_ORDER (tmp)
13281 = REF_REVERSE_STORAGE_ORDER (*expr_p);
13282 *expr_p = tmp;
13283 recalculate_side_effects (*expr_p);
13284 ret = GS_OK;
13285 break;
13286 }
13287 /* Avoid re-gimplifying the address operand if it is already
13288 in suitable form. Re-gimplifying would mark the address
13289 operand addressable. Always gimplify when not in SSA form
13290 as we still may have to gimplify decls with value-exprs. */
13291 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
13292 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13293 {
13294 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13295 is_gimple_mem_ref_addr, fb_rvalue);
13296 if (ret == GS_ERROR)
13297 break;
13298 }
13299 recalculate_side_effects (*expr_p);
13300 ret = GS_ALL_DONE;
13301 break;
13302
13303 /* Constants need not be gimplified. */
13304 case INTEGER_CST:
13305 case REAL_CST:
13306 case FIXED_CST:
13307 case STRING_CST:
13308 case COMPLEX_CST:
13309 case VECTOR_CST:
13310 /* Drop the overflow flag on constants, we do not want
13311 that in the GIMPLE IL. */
13312 if (TREE_OVERFLOW_P (*expr_p))
13313 *expr_p = drop_tree_overflow (*expr_p);
13314 ret = GS_ALL_DONE;
13315 break;
13316
13317 case CONST_DECL:
13318 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13319 CONST_DECL node. Otherwise the decl is replaceable by its
13320 value. */
13321 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13322 if (fallback & fb_lvalue)
13323 ret = GS_ALL_DONE;
13324 else
13325 {
13326 *expr_p = DECL_INITIAL (*expr_p);
13327 ret = GS_OK;
13328 }
13329 break;
13330
13331 case DECL_EXPR:
13332 ret = gimplify_decl_expr (expr_p, pre_p);
13333 break;
13334
13335 case BIND_EXPR:
13336 ret = gimplify_bind_expr (expr_p, pre_p);
13337 break;
13338
13339 case LOOP_EXPR:
13340 ret = gimplify_loop_expr (expr_p, pre_p);
13341 break;
13342
13343 case SWITCH_EXPR:
13344 ret = gimplify_switch_expr (expr_p, pre_p);
13345 break;
13346
13347 case EXIT_EXPR:
13348 ret = gimplify_exit_expr (expr_p);
13349 break;
13350
13351 case GOTO_EXPR:
13352 /* If the target is not LABEL, then it is a computed jump
13353 and the target needs to be gimplified. */
13354 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
13355 {
13356 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13357 NULL, is_gimple_val, fb_rvalue);
13358 if (ret == GS_ERROR)
13359 break;
13360 }
13361 gimplify_seq_add_stmt (pre_p,
13362 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
13363 ret = GS_ALL_DONE;
13364 break;
13365
13366 case PREDICT_EXPR:
13367 gimplify_seq_add_stmt (pre_p,
13368 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13369 PREDICT_EXPR_OUTCOME (*expr_p)));
13370 ret = GS_ALL_DONE;
13371 break;
13372
13373 case LABEL_EXPR:
13374 ret = gimplify_label_expr (expr_p, pre_p);
13375 label = LABEL_EXPR_LABEL (*expr_p);
13376 gcc_assert (decl_function_context (label) == current_function_decl);
13377
13378 /* If the label is used in a goto statement, or address of the label
13379 is taken, we need to unpoison all variables that were seen so far.
13380 Doing so would prevent us from reporting a false positives. */
13381 if (asan_poisoned_variables
13382 && asan_used_labels != NULL
13383 && asan_used_labels->contains (label))
13384 asan_poison_variables (asan_poisoned_variables, false, pre_p);
13385 break;
13386
13387 case CASE_LABEL_EXPR:
13388 ret = gimplify_case_label_expr (expr_p, pre_p);
13389
13390 if (gimplify_ctxp->live_switch_vars)
13391 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13392 pre_p);
13393 break;
13394
13395 case RETURN_EXPR:
13396 ret = gimplify_return_expr (*expr_p, pre_p);
13397 break;
13398
13399 case CONSTRUCTOR:
13400 /* Don't reduce this in place; let gimplify_init_constructor work its
13401 magic. Buf if we're just elaborating this for side effects, just
13402 gimplify any element that has side-effects. */
13403 if (fallback == fb_none)
13404 {
13405 unsigned HOST_WIDE_INT ix;
13406 tree val;
13407 tree temp = NULL_TREE;
13408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13409 if (TREE_SIDE_EFFECTS (val))
13410 append_to_statement_list (val, &temp);
13411
13412 *expr_p = temp;
13413 ret = temp ? GS_OK : GS_ALL_DONE;
13414 }
13415 /* C99 code may assign to an array in a constructed
13416 structure or union, and this has undefined behavior only
13417 on execution, so create a temporary if an lvalue is
13418 required. */
13419 else if (fallback == fb_lvalue)
13420 {
13421 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13422 mark_addressable (*expr_p);
13423 ret = GS_OK;
13424 }
13425 else
13426 ret = GS_ALL_DONE;
13427 break;
13428
13429 /* The following are special cases that are not handled by the
13430 original GIMPLE grammar. */
13431
13432 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13433 eliminated. */
13434 case SAVE_EXPR:
13435 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13436 break;
13437
13438 case BIT_FIELD_REF:
13439 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13440 post_p, is_gimple_lvalue, fb_either);
13441 recalculate_side_effects (*expr_p);
13442 break;
13443
13444 case TARGET_MEM_REF:
13445 {
13446 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13447
13448 if (TMR_BASE (*expr_p))
13449 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13450 post_p, is_gimple_mem_ref_addr, fb_either);
13451 if (TMR_INDEX (*expr_p))
13452 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13453 post_p, is_gimple_val, fb_rvalue);
13454 if (TMR_INDEX2 (*expr_p))
13455 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13456 post_p, is_gimple_val, fb_rvalue);
13457 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13458 ret = MIN (r0, r1);
13459 }
13460 break;
13461
13462 case NON_LVALUE_EXPR:
13463 /* This should have been stripped above. */
13464 gcc_unreachable ();
13465
13466 case ASM_EXPR:
13467 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13468 break;
13469
13470 case TRY_FINALLY_EXPR:
13471 case TRY_CATCH_EXPR:
13472 {
13473 gimple_seq eval, cleanup;
13474 gtry *try_;
13475
13476 /* Calls to destructors are generated automatically in FINALLY/CATCH
13477 block. They should have location as UNKNOWN_LOCATION. However,
13478 gimplify_call_expr will reset these call stmts to input_location
13479 if it finds stmt's location is unknown. To prevent resetting for
13480 destructors, we set the input_location to unknown.
13481 Note that this only affects the destructor calls in FINALLY/CATCH
13482 block, and will automatically reset to its original value by the
13483 end of gimplify_expr. */
13484 input_location = UNKNOWN_LOCATION;
13485 eval = cleanup = NULL;
13486 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13487 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13488 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13489 {
13490 gimple_seq n = NULL, e = NULL;
13491 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13492 0), &n);
13493 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13494 1), &e);
13495 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
13496 {
13497 geh_else *stmt = gimple_build_eh_else (n, e);
13498 gimple_seq_add_stmt (&cleanup, stmt);
13499 }
13500 }
13501 else
13502 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
13503 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13504 if (gimple_seq_empty_p (cleanup))
13505 {
13506 gimple_seq_add_seq (pre_p, eval);
13507 ret = GS_ALL_DONE;
13508 break;
13509 }
13510 try_ = gimple_build_try (eval, cleanup,
13511 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13512 ? GIMPLE_TRY_FINALLY
13513 : GIMPLE_TRY_CATCH);
13514 if (EXPR_HAS_LOCATION (save_expr))
13515 gimple_set_location (try_, EXPR_LOCATION (save_expr));
13516 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
13517 gimple_set_location (try_, saved_location);
13518 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
13519 gimple_try_set_catch_is_cleanup (try_,
13520 TRY_CATCH_IS_CLEANUP (*expr_p));
13521 gimplify_seq_add_stmt (pre_p, try_);
13522 ret = GS_ALL_DONE;
13523 break;
13524 }
13525
13526 case CLEANUP_POINT_EXPR:
13527 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
13528 break;
13529
13530 case TARGET_EXPR:
13531 ret = gimplify_target_expr (expr_p, pre_p, post_p);
13532 break;
13533
13534 case CATCH_EXPR:
13535 {
13536 gimple *c;
13537 gimple_seq handler = NULL;
13538 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
13539 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
13540 gimplify_seq_add_stmt (pre_p, c);
13541 ret = GS_ALL_DONE;
13542 break;
13543 }
13544
13545 case EH_FILTER_EXPR:
13546 {
13547 gimple *ehf;
13548 gimple_seq failure = NULL;
13549
13550 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
13551 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
13552 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
13553 gimplify_seq_add_stmt (pre_p, ehf);
13554 ret = GS_ALL_DONE;
13555 break;
13556 }
13557
13558 case OBJ_TYPE_REF:
13559 {
13560 enum gimplify_status r0, r1;
13561 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
13562 post_p, is_gimple_val, fb_rvalue);
13563 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
13564 post_p, is_gimple_val, fb_rvalue);
13565 TREE_SIDE_EFFECTS (*expr_p) = 0;
13566 ret = MIN (r0, r1);
13567 }
13568 break;
13569
13570 case LABEL_DECL:
13571 /* We get here when taking the address of a label. We mark
13572 the label as "forced"; meaning it can never be removed and
13573 it is a potential target for any computed goto. */
13574 FORCED_LABEL (*expr_p) = 1;
13575 ret = GS_ALL_DONE;
13576 break;
13577
13578 case STATEMENT_LIST:
13579 ret = gimplify_statement_list (expr_p, pre_p);
13580 break;
13581
13582 case WITH_SIZE_EXPR:
13583 {
13584 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13585 post_p == &internal_post ? NULL : post_p,
13586 gimple_test_f, fallback);
13587 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13588 is_gimple_val, fb_rvalue);
13589 ret = GS_ALL_DONE;
13590 }
13591 break;
13592
13593 case VAR_DECL:
13594 case PARM_DECL:
13595 ret = gimplify_var_or_parm_decl (expr_p);
13596 break;
13597
13598 case RESULT_DECL:
13599 /* When within an OMP context, notice uses of variables. */
13600 if (gimplify_omp_ctxp)
13601 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
13602 ret = GS_ALL_DONE;
13603 break;
13604
13605 case DEBUG_EXPR_DECL:
13606 gcc_unreachable ();
13607
13608 case DEBUG_BEGIN_STMT:
13609 gimplify_seq_add_stmt (pre_p,
13610 gimple_build_debug_begin_stmt
13611 (TREE_BLOCK (*expr_p),
13612 EXPR_LOCATION (*expr_p)));
13613 ret = GS_ALL_DONE;
13614 *expr_p = NULL;
13615 break;
13616
13617 case SSA_NAME:
13618 /* Allow callbacks into the gimplifier during optimization. */
13619 ret = GS_ALL_DONE;
13620 break;
13621
13622 case OMP_PARALLEL:
13623 gimplify_omp_parallel (expr_p, pre_p);
13624 ret = GS_ALL_DONE;
13625 break;
13626
13627 case OMP_TASK:
13628 gimplify_omp_task (expr_p, pre_p);
13629 ret = GS_ALL_DONE;
13630 break;
13631
13632 case OMP_FOR:
13633 case OMP_SIMD:
13634 case OMP_DISTRIBUTE:
13635 case OMP_TASKLOOP:
13636 case OACC_LOOP:
13637 ret = gimplify_omp_for (expr_p, pre_p);
13638 break;
13639
13640 case OMP_LOOP:
13641 ret = gimplify_omp_loop (expr_p, pre_p);
13642 break;
13643
13644 case OACC_CACHE:
13645 gimplify_oacc_cache (expr_p, pre_p);
13646 ret = GS_ALL_DONE;
13647 break;
13648
13649 case OACC_DECLARE:
13650 gimplify_oacc_declare (expr_p, pre_p);
13651 ret = GS_ALL_DONE;
13652 break;
13653
13654 case OACC_HOST_DATA:
13655 case OACC_DATA:
13656 case OACC_KERNELS:
13657 case OACC_PARALLEL:
13658 case OMP_SECTIONS:
13659 case OMP_SINGLE:
13660 case OMP_TARGET:
13661 case OMP_TARGET_DATA:
13662 case OMP_TEAMS:
13663 gimplify_omp_workshare (expr_p, pre_p);
13664 ret = GS_ALL_DONE;
13665 break;
13666
13667 case OACC_ENTER_DATA:
13668 case OACC_EXIT_DATA:
13669 case OACC_UPDATE:
13670 case OMP_TARGET_UPDATE:
13671 case OMP_TARGET_ENTER_DATA:
13672 case OMP_TARGET_EXIT_DATA:
13673 gimplify_omp_target_update (expr_p, pre_p);
13674 ret = GS_ALL_DONE;
13675 break;
13676
13677 case OMP_SECTION:
13678 case OMP_MASTER:
13679 case OMP_ORDERED:
13680 case OMP_CRITICAL:
13681 case OMP_SCAN:
13682 {
13683 gimple_seq body = NULL;
13684 gimple *g;
13685 bool saved_in_omp_construct = in_omp_construct;
13686
13687 in_omp_construct = true;
13688 gimplify_and_add (OMP_BODY (*expr_p), &body);
13689 in_omp_construct = saved_in_omp_construct;
13690 switch (TREE_CODE (*expr_p))
13691 {
13692 case OMP_SECTION:
13693 g = gimple_build_omp_section (body);
13694 break;
13695 case OMP_MASTER:
13696 g = gimple_build_omp_master (body);
13697 break;
13698 case OMP_ORDERED:
13699 g = gimplify_omp_ordered (*expr_p, body);
13700 break;
13701 case OMP_CRITICAL:
13702 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
13703 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
13704 gimplify_adjust_omp_clauses (pre_p, body,
13705 &OMP_CRITICAL_CLAUSES (*expr_p),
13706 OMP_CRITICAL);
13707 g = gimple_build_omp_critical (body,
13708 OMP_CRITICAL_NAME (*expr_p),
13709 OMP_CRITICAL_CLAUSES (*expr_p));
13710 break;
13711 case OMP_SCAN:
13712 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
13713 pre_p, ORT_WORKSHARE, OMP_SCAN);
13714 gimplify_adjust_omp_clauses (pre_p, body,
13715 &OMP_SCAN_CLAUSES (*expr_p),
13716 OMP_SCAN);
13717 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
13718 break;
13719 default:
13720 gcc_unreachable ();
13721 }
13722 gimplify_seq_add_stmt (pre_p, g);
13723 ret = GS_ALL_DONE;
13724 break;
13725 }
13726
13727 case OMP_TASKGROUP:
13728 {
13729 gimple_seq body = NULL;
13730
13731 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
13732 bool saved_in_omp_construct = in_omp_construct;
13733 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
13734 OMP_TASKGROUP);
13735 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
13736
13737 in_omp_construct = true;
13738 gimplify_and_add (OMP_BODY (*expr_p), &body);
13739 in_omp_construct = saved_in_omp_construct;
13740 gimple_seq cleanup = NULL;
13741 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
13742 gimple *g = gimple_build_call (fn, 0);
13743 gimple_seq_add_stmt (&cleanup, g);
13744 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13745 body = NULL;
13746 gimple_seq_add_stmt (&body, g);
13747 g = gimple_build_omp_taskgroup (body, *pclauses);
13748 gimplify_seq_add_stmt (pre_p, g);
13749 ret = GS_ALL_DONE;
13750 break;
13751 }
13752
13753 case OMP_ATOMIC:
13754 case OMP_ATOMIC_READ:
13755 case OMP_ATOMIC_CAPTURE_OLD:
13756 case OMP_ATOMIC_CAPTURE_NEW:
13757 ret = gimplify_omp_atomic (expr_p, pre_p);
13758 break;
13759
13760 case TRANSACTION_EXPR:
13761 ret = gimplify_transaction (expr_p, pre_p);
13762 break;
13763
13764 case TRUTH_AND_EXPR:
13765 case TRUTH_OR_EXPR:
13766 case TRUTH_XOR_EXPR:
13767 {
13768 tree orig_type = TREE_TYPE (*expr_p);
13769 tree new_type, xop0, xop1;
13770 *expr_p = gimple_boolify (*expr_p);
13771 new_type = TREE_TYPE (*expr_p);
13772 if (!useless_type_conversion_p (orig_type, new_type))
13773 {
13774 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
13775 ret = GS_OK;
13776 break;
13777 }
13778
13779 /* Boolified binary truth expressions are semantically equivalent
13780 to bitwise binary expressions. Canonicalize them to the
13781 bitwise variant. */
13782 switch (TREE_CODE (*expr_p))
13783 {
13784 case TRUTH_AND_EXPR:
13785 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
13786 break;
13787 case TRUTH_OR_EXPR:
13788 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
13789 break;
13790 case TRUTH_XOR_EXPR:
13791 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
13792 break;
13793 default:
13794 break;
13795 }
13796 /* Now make sure that operands have compatible type to
13797 expression's new_type. */
13798 xop0 = TREE_OPERAND (*expr_p, 0);
13799 xop1 = TREE_OPERAND (*expr_p, 1);
13800 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
13801 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
13802 new_type,
13803 xop0);
13804 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
13805 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
13806 new_type,
13807 xop1);
13808 /* Continue classified as tcc_binary. */
13809 goto expr_2;
13810 }
13811
13812 case VEC_COND_EXPR:
13813 {
13814 enum gimplify_status r0, r1, r2;
13815
13816 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13817 post_p, is_gimple_condexpr, fb_rvalue);
13818 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13819 post_p, is_gimple_val, fb_rvalue);
13820 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13821 post_p, is_gimple_val, fb_rvalue);
13822
13823 ret = MIN (MIN (r0, r1), r2);
13824 recalculate_side_effects (*expr_p);
13825 }
13826 break;
13827
13828 case VEC_PERM_EXPR:
13829 /* Classified as tcc_expression. */
13830 goto expr_3;
13831
13832 case BIT_INSERT_EXPR:
13833 /* Argument 3 is a constant. */
13834 goto expr_2;
13835
13836 case POINTER_PLUS_EXPR:
13837 {
13838 enum gimplify_status r0, r1;
13839 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13840 post_p, is_gimple_val, fb_rvalue);
13841 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13842 post_p, is_gimple_val, fb_rvalue);
13843 recalculate_side_effects (*expr_p);
13844 ret = MIN (r0, r1);
13845 break;
13846 }
13847
13848 default:
13849 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
13850 {
13851 case tcc_comparison:
13852 /* Handle comparison of objects of non scalar mode aggregates
13853 with a call to memcmp. It would be nice to only have to do
13854 this for variable-sized objects, but then we'd have to allow
13855 the same nest of reference nodes we allow for MODIFY_EXPR and
13856 that's too complex.
13857
13858 Compare scalar mode aggregates as scalar mode values. Using
13859 memcmp for them would be very inefficient at best, and is
13860 plain wrong if bitfields are involved. */
13861 {
13862 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
13863
13864 /* Vector comparisons need no boolification. */
13865 if (TREE_CODE (type) == VECTOR_TYPE)
13866 goto expr_2;
13867 else if (!AGGREGATE_TYPE_P (type))
13868 {
13869 tree org_type = TREE_TYPE (*expr_p);
13870 *expr_p = gimple_boolify (*expr_p);
13871 if (!useless_type_conversion_p (org_type,
13872 TREE_TYPE (*expr_p)))
13873 {
13874 *expr_p = fold_convert_loc (input_location,
13875 org_type, *expr_p);
13876 ret = GS_OK;
13877 }
13878 else
13879 goto expr_2;
13880 }
13881 else if (TYPE_MODE (type) != BLKmode)
13882 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
13883 else
13884 ret = gimplify_variable_sized_compare (expr_p);
13885
13886 break;
13887 }
13888
13889 /* If *EXPR_P does not need to be special-cased, handle it
13890 according to its class. */
13891 case tcc_unary:
13892 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13893 post_p, is_gimple_val, fb_rvalue);
13894 break;
13895
13896 case tcc_binary:
13897 expr_2:
13898 {
13899 enum gimplify_status r0, r1;
13900
13901 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13902 post_p, is_gimple_val, fb_rvalue);
13903 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13904 post_p, is_gimple_val, fb_rvalue);
13905
13906 ret = MIN (r0, r1);
13907 break;
13908 }
13909
13910 expr_3:
13911 {
13912 enum gimplify_status r0, r1, r2;
13913
13914 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13915 post_p, is_gimple_val, fb_rvalue);
13916 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
13917 post_p, is_gimple_val, fb_rvalue);
13918 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
13919 post_p, is_gimple_val, fb_rvalue);
13920
13921 ret = MIN (MIN (r0, r1), r2);
13922 break;
13923 }
13924
13925 case tcc_declaration:
13926 case tcc_constant:
13927 ret = GS_ALL_DONE;
13928 goto dont_recalculate;
13929
13930 default:
13931 gcc_unreachable ();
13932 }
13933
13934 recalculate_side_effects (*expr_p);
13935
13936 dont_recalculate:
13937 break;
13938 }
13939
13940 gcc_assert (*expr_p || ret != GS_OK);
13941 }
13942 while (ret == GS_OK);
13943
13944 /* If we encountered an error_mark somewhere nested inside, either
13945 stub out the statement or propagate the error back out. */
13946 if (ret == GS_ERROR)
13947 {
13948 if (is_statement)
13949 *expr_p = NULL;
13950 goto out;
13951 }
13952
13953 /* This was only valid as a return value from the langhook, which
13954 we handled. Make sure it doesn't escape from any other context. */
13955 gcc_assert (ret != GS_UNHANDLED);
13956
13957 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
13958 {
13959 /* We aren't looking for a value, and we don't have a valid
13960 statement. If it doesn't have side-effects, throw it away.
13961 We can also get here with code such as "*&&L;", where L is
13962 a LABEL_DECL that is marked as FORCED_LABEL. */
13963 if (TREE_CODE (*expr_p) == LABEL_DECL
13964 || !TREE_SIDE_EFFECTS (*expr_p))
13965 *expr_p = NULL;
13966 else if (!TREE_THIS_VOLATILE (*expr_p))
13967 {
13968 /* This is probably a _REF that contains something nested that
13969 has side effects. Recurse through the operands to find it. */
13970 enum tree_code code = TREE_CODE (*expr_p);
13971
13972 switch (code)
13973 {
13974 case COMPONENT_REF:
13975 case REALPART_EXPR:
13976 case IMAGPART_EXPR:
13977 case VIEW_CONVERT_EXPR:
13978 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13979 gimple_test_f, fallback);
13980 break;
13981
13982 case ARRAY_REF:
13983 case ARRAY_RANGE_REF:
13984 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13985 gimple_test_f, fallback);
13986 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
13987 gimple_test_f, fallback);
13988 break;
13989
13990 default:
13991 /* Anything else with side-effects must be converted to
13992 a valid statement before we get here. */
13993 gcc_unreachable ();
13994 }
13995
13996 *expr_p = NULL;
13997 }
13998 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
13999 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14000 {
14001 /* Historically, the compiler has treated a bare reference
14002 to a non-BLKmode volatile lvalue as forcing a load. */
14003 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14004
14005 /* Normally, we do not want to create a temporary for a
14006 TREE_ADDRESSABLE type because such a type should not be
14007 copied by bitwise-assignment. However, we make an
14008 exception here, as all we are doing here is ensuring that
14009 we read the bytes that make up the type. We use
14010 create_tmp_var_raw because create_tmp_var will abort when
14011 given a TREE_ADDRESSABLE type. */
14012 tree tmp = create_tmp_var_raw (type, "vol");
14013 gimple_add_tmp_var (tmp);
14014 gimplify_assign (tmp, *expr_p, pre_p);
14015 *expr_p = NULL;
14016 }
14017 else
14018 /* We can't do anything useful with a volatile reference to
14019 an incomplete type, so just throw it away. Likewise for
14020 a BLKmode type, since any implicit inner load should
14021 already have been turned into an explicit one by the
14022 gimplification process. */
14023 *expr_p = NULL;
14024 }
14025
14026 /* If we are gimplifying at the statement level, we're done. Tack
14027 everything together and return. */
14028 if (fallback == fb_none || is_statement)
14029 {
14030 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14031 it out for GC to reclaim it. */
14032 *expr_p = NULL_TREE;
14033
14034 if (!gimple_seq_empty_p (internal_pre)
14035 || !gimple_seq_empty_p (internal_post))
14036 {
14037 gimplify_seq_add_seq (&internal_pre, internal_post);
14038 gimplify_seq_add_seq (pre_p, internal_pre);
14039 }
14040
14041 /* The result of gimplifying *EXPR_P is going to be the last few
14042 statements in *PRE_P and *POST_P. Add location information
14043 to all the statements that were added by the gimplification
14044 helpers. */
14045 if (!gimple_seq_empty_p (*pre_p))
14046 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14047
14048 if (!gimple_seq_empty_p (*post_p))
14049 annotate_all_with_location_after (*post_p, post_last_gsi,
14050 input_location);
14051
14052 goto out;
14053 }
14054
14055 #ifdef ENABLE_GIMPLE_CHECKING
14056 if (*expr_p)
14057 {
14058 enum tree_code code = TREE_CODE (*expr_p);
14059 /* These expressions should already be in gimple IR form. */
14060 gcc_assert (code != MODIFY_EXPR
14061 && code != ASM_EXPR
14062 && code != BIND_EXPR
14063 && code != CATCH_EXPR
14064 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14065 && code != EH_FILTER_EXPR
14066 && code != GOTO_EXPR
14067 && code != LABEL_EXPR
14068 && code != LOOP_EXPR
14069 && code != SWITCH_EXPR
14070 && code != TRY_FINALLY_EXPR
14071 && code != EH_ELSE_EXPR
14072 && code != OACC_PARALLEL
14073 && code != OACC_KERNELS
14074 && code != OACC_DATA
14075 && code != OACC_HOST_DATA
14076 && code != OACC_DECLARE
14077 && code != OACC_UPDATE
14078 && code != OACC_ENTER_DATA
14079 && code != OACC_EXIT_DATA
14080 && code != OACC_CACHE
14081 && code != OMP_CRITICAL
14082 && code != OMP_FOR
14083 && code != OACC_LOOP
14084 && code != OMP_MASTER
14085 && code != OMP_TASKGROUP
14086 && code != OMP_ORDERED
14087 && code != OMP_PARALLEL
14088 && code != OMP_SCAN
14089 && code != OMP_SECTIONS
14090 && code != OMP_SECTION
14091 && code != OMP_SINGLE);
14092 }
14093 #endif
14094
14095 /* Otherwise we're gimplifying a subexpression, so the resulting
14096 value is interesting. If it's a valid operand that matches
14097 GIMPLE_TEST_F, we're done. Unless we are handling some
14098 post-effects internally; if that's the case, we need to copy into
14099 a temporary before adding the post-effects to POST_P. */
14100 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14101 goto out;
14102
14103 /* Otherwise, we need to create a new temporary for the gimplified
14104 expression. */
14105
14106 /* We can't return an lvalue if we have an internal postqueue. The
14107 object the lvalue refers to would (probably) be modified by the
14108 postqueue; we need to copy the value out first, which means an
14109 rvalue. */
14110 if ((fallback & fb_lvalue)
14111 && gimple_seq_empty_p (internal_post)
14112 && is_gimple_addressable (*expr_p))
14113 {
14114 /* An lvalue will do. Take the address of the expression, store it
14115 in a temporary, and replace the expression with an INDIRECT_REF of
14116 that temporary. */
14117 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14118 unsigned int ref_align = get_object_alignment (*expr_p);
14119 tree ref_type = TREE_TYPE (*expr_p);
14120 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14121 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14122 if (TYPE_ALIGN (ref_type) != ref_align)
14123 ref_type = build_aligned_type (ref_type, ref_align);
14124 *expr_p = build2 (MEM_REF, ref_type,
14125 tmp, build_zero_cst (ref_alias_type));
14126 }
14127 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14128 {
14129 /* An rvalue will do. Assign the gimplified expression into a
14130 new temporary TMP and replace the original expression with
14131 TMP. First, make sure that the expression has a type so that
14132 it can be assigned into a temporary. */
14133 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14134 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14135 }
14136 else
14137 {
14138 #ifdef ENABLE_GIMPLE_CHECKING
14139 if (!(fallback & fb_mayfail))
14140 {
14141 fprintf (stderr, "gimplification failed:\n");
14142 print_generic_expr (stderr, *expr_p);
14143 debug_tree (*expr_p);
14144 internal_error ("gimplification failed");
14145 }
14146 #endif
14147 gcc_assert (fallback & fb_mayfail);
14148
14149 /* If this is an asm statement, and the user asked for the
14150 impossible, don't die. Fail and let gimplify_asm_expr
14151 issue an error. */
14152 ret = GS_ERROR;
14153 goto out;
14154 }
14155
14156 /* Make sure the temporary matches our predicate. */
14157 gcc_assert ((*gimple_test_f) (*expr_p));
14158
14159 if (!gimple_seq_empty_p (internal_post))
14160 {
14161 annotate_all_with_location (internal_post, input_location);
14162 gimplify_seq_add_seq (pre_p, internal_post);
14163 }
14164
14165 out:
14166 input_location = saved_location;
14167 return ret;
14168 }
14169
14170 /* Like gimplify_expr but make sure the gimplified result is not itself
14171 a SSA name (but a decl if it were). Temporaries required by
14172 evaluating *EXPR_P may be still SSA names. */
14173
14174 static enum gimplify_status
14175 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14176 bool (*gimple_test_f) (tree), fallback_t fallback,
14177 bool allow_ssa)
14178 {
14179 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14180 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14181 gimple_test_f, fallback);
14182 if (! allow_ssa
14183 && TREE_CODE (*expr_p) == SSA_NAME)
14184 {
14185 tree name = *expr_p;
14186 if (was_ssa_name_p)
14187 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14188 else
14189 {
14190 /* Avoid the extra copy if possible. */
14191 *expr_p = create_tmp_reg (TREE_TYPE (name));
14192 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14193 release_ssa_name (name);
14194 }
14195 }
14196 return ret;
14197 }
14198
14199 /* Look through TYPE for variable-sized objects and gimplify each such
14200 size that we find. Add to LIST_P any statements generated. */
14201
14202 void
14203 gimplify_type_sizes (tree type, gimple_seq *list_p)
14204 {
14205 tree field, t;
14206
14207 if (type == NULL || type == error_mark_node)
14208 return;
14209
14210 /* We first do the main variant, then copy into any other variants. */
14211 type = TYPE_MAIN_VARIANT (type);
14212
14213 /* Avoid infinite recursion. */
14214 if (TYPE_SIZES_GIMPLIFIED (type))
14215 return;
14216
14217 TYPE_SIZES_GIMPLIFIED (type) = 1;
14218
14219 switch (TREE_CODE (type))
14220 {
14221 case INTEGER_TYPE:
14222 case ENUMERAL_TYPE:
14223 case BOOLEAN_TYPE:
14224 case REAL_TYPE:
14225 case FIXED_POINT_TYPE:
14226 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14227 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
14228
14229 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14230 {
14231 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14232 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
14233 }
14234 break;
14235
14236 case ARRAY_TYPE:
14237 /* These types may not have declarations, so handle them here. */
14238 gimplify_type_sizes (TREE_TYPE (type), list_p);
14239 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
14240 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14241 with assigned stack slots, for -O1+ -g they should be tracked
14242 by VTA. */
14243 if (!(TYPE_NAME (type)
14244 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14245 && DECL_IGNORED_P (TYPE_NAME (type)))
14246 && TYPE_DOMAIN (type)
14247 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14248 {
14249 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
14250 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14251 DECL_IGNORED_P (t) = 0;
14252 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14253 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14254 DECL_IGNORED_P (t) = 0;
14255 }
14256 break;
14257
14258 case RECORD_TYPE:
14259 case UNION_TYPE:
14260 case QUAL_UNION_TYPE:
14261 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14262 if (TREE_CODE (field) == FIELD_DECL)
14263 {
14264 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
14265 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14266 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
14267 gimplify_type_sizes (TREE_TYPE (field), list_p);
14268 }
14269 break;
14270
14271 case POINTER_TYPE:
14272 case REFERENCE_TYPE:
14273 /* We used to recurse on the pointed-to type here, which turned out to
14274 be incorrect because its definition might refer to variables not
14275 yet initialized at this point if a forward declaration is involved.
14276
14277 It was actually useful for anonymous pointed-to types to ensure
14278 that the sizes evaluation dominates every possible later use of the
14279 values. Restricting to such types here would be safe since there
14280 is no possible forward declaration around, but would introduce an
14281 undesirable middle-end semantic to anonymity. We then defer to
14282 front-ends the responsibility of ensuring that the sizes are
14283 evaluated both early and late enough, e.g. by attaching artificial
14284 type declarations to the tree. */
14285 break;
14286
14287 default:
14288 break;
14289 }
14290
14291 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14292 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
14293
14294 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14295 {
14296 TYPE_SIZE (t) = TYPE_SIZE (type);
14297 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14298 TYPE_SIZES_GIMPLIFIED (t) = 1;
14299 }
14300 }
14301
14302 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14303 a size or position, has had all of its SAVE_EXPRs evaluated.
14304 We add any required statements to *STMT_P. */
14305
14306 void
14307 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
14308 {
14309 tree expr = *expr_p;
14310
14311 /* We don't do anything if the value isn't there, is constant, or contains
14312 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14313 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14314 will want to replace it with a new variable, but that will cause problems
14315 if this type is from outside the function. It's OK to have that here. */
14316 if (expr == NULL_TREE
14317 || is_gimple_constant (expr)
14318 || TREE_CODE (expr) == VAR_DECL
14319 || CONTAINS_PLACEHOLDER_P (expr))
14320 return;
14321
14322 *expr_p = unshare_expr (expr);
14323
14324 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14325 if the def vanishes. */
14326 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
14327
14328 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14329 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14330 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14331 if (is_gimple_constant (*expr_p))
14332 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
14333 }
14334
14335 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14336 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14337 is true, also gimplify the parameters. */
14338
14339 gbind *
14340 gimplify_body (tree fndecl, bool do_parms)
14341 {
14342 location_t saved_location = input_location;
14343 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
14344 gimple *outer_stmt;
14345 gbind *outer_bind;
14346
14347 timevar_push (TV_TREE_GIMPLIFY);
14348
14349 init_tree_ssa (cfun);
14350
14351 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14352 gimplification. */
14353 default_rtl_profile ();
14354
14355 gcc_assert (gimplify_ctxp == NULL);
14356 push_gimplify_context (true);
14357
14358 if (flag_openacc || flag_openmp)
14359 {
14360 gcc_assert (gimplify_omp_ctxp == NULL);
14361 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
14362 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
14363 }
14364
14365 /* Unshare most shared trees in the body and in that of any nested functions.
14366 It would seem we don't have to do this for nested functions because
14367 they are supposed to be output and then the outer function gimplified
14368 first, but the g++ front end doesn't always do it that way. */
14369 unshare_body (fndecl);
14370 unvisit_body (fndecl);
14371
14372 /* Make sure input_location isn't set to something weird. */
14373 input_location = DECL_SOURCE_LOCATION (fndecl);
14374
14375 /* Resolve callee-copies. This has to be done before processing
14376 the body so that DECL_VALUE_EXPR gets processed correctly. */
14377 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
14378
14379 /* Gimplify the function's body. */
14380 seq = NULL;
14381 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
14382 outer_stmt = gimple_seq_first_stmt (seq);
14383 if (!outer_stmt)
14384 {
14385 outer_stmt = gimple_build_nop ();
14386 gimplify_seq_add_stmt (&seq, outer_stmt);
14387 }
14388
14389 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14390 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14391 if (gimple_code (outer_stmt) == GIMPLE_BIND
14392 && gimple_seq_first (seq) == gimple_seq_last (seq))
14393 outer_bind = as_a <gbind *> (outer_stmt);
14394 else
14395 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14396
14397 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14398
14399 /* If we had callee-copies statements, insert them at the beginning
14400 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14401 if (!gimple_seq_empty_p (parm_stmts))
14402 {
14403 tree parm;
14404
14405 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
14406 if (parm_cleanup)
14407 {
14408 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14409 GIMPLE_TRY_FINALLY);
14410 parm_stmts = NULL;
14411 gimple_seq_add_stmt (&parm_stmts, g);
14412 }
14413 gimple_bind_set_body (outer_bind, parm_stmts);
14414
14415 for (parm = DECL_ARGUMENTS (current_function_decl);
14416 parm; parm = DECL_CHAIN (parm))
14417 if (DECL_HAS_VALUE_EXPR_P (parm))
14418 {
14419 DECL_HAS_VALUE_EXPR_P (parm) = 0;
14420 DECL_IGNORED_P (parm) = 0;
14421 }
14422 }
14423
14424 if ((flag_openacc || flag_openmp || flag_openmp_simd)
14425 && gimplify_omp_ctxp)
14426 {
14427 delete_omp_context (gimplify_omp_ctxp);
14428 gimplify_omp_ctxp = NULL;
14429 }
14430
14431 pop_gimplify_context (outer_bind);
14432 gcc_assert (gimplify_ctxp == NULL);
14433
14434 if (flag_checking && !seen_error ())
14435 verify_gimple_in_seq (gimple_bind_body (outer_bind));
14436
14437 timevar_pop (TV_TREE_GIMPLIFY);
14438 input_location = saved_location;
14439
14440 return outer_bind;
14441 }
14442
14443 typedef char *char_p; /* For DEF_VEC_P. */
14444
14445 /* Return whether we should exclude FNDECL from instrumentation. */
14446
14447 static bool
14448 flag_instrument_functions_exclude_p (tree fndecl)
14449 {
14450 vec<char_p> *v;
14451
14452 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14453 if (v && v->length () > 0)
14454 {
14455 const char *name;
14456 int i;
14457 char *s;
14458
14459 name = lang_hooks.decl_printable_name (fndecl, 1);
14460 FOR_EACH_VEC_ELT (*v, i, s)
14461 if (strstr (name, s) != NULL)
14462 return true;
14463 }
14464
14465 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14466 if (v && v->length () > 0)
14467 {
14468 const char *name;
14469 int i;
14470 char *s;
14471
14472 name = DECL_SOURCE_FILE (fndecl);
14473 FOR_EACH_VEC_ELT (*v, i, s)
14474 if (strstr (name, s) != NULL)
14475 return true;
14476 }
14477
14478 return false;
14479 }
14480
14481 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14482 node for the function we want to gimplify.
14483
14484 Return the sequence of GIMPLE statements corresponding to the body
14485 of FNDECL. */
14486
14487 void
14488 gimplify_function_tree (tree fndecl)
14489 {
14490 tree parm, ret;
14491 gimple_seq seq;
14492 gbind *bind;
14493
14494 gcc_assert (!gimple_body (fndecl));
14495
14496 if (DECL_STRUCT_FUNCTION (fndecl))
14497 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
14498 else
14499 push_struct_function (fndecl);
14500
14501 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14502 if necessary. */
14503 cfun->curr_properties |= PROP_gimple_lva;
14504
14505 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
14506 {
14507 /* Preliminarily mark non-addressed complex variables as eligible
14508 for promotion to gimple registers. We'll transform their uses
14509 as we find them. */
14510 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
14511 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
14512 && !TREE_THIS_VOLATILE (parm)
14513 && !needs_to_live_in_memory (parm))
14514 DECL_GIMPLE_REG_P (parm) = 1;
14515 }
14516
14517 ret = DECL_RESULT (fndecl);
14518 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
14519 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
14520 && !needs_to_live_in_memory (ret))
14521 DECL_GIMPLE_REG_P (ret) = 1;
14522
14523 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
14524 asan_poisoned_variables = new hash_set<tree> ();
14525 bind = gimplify_body (fndecl, true);
14526 if (asan_poisoned_variables)
14527 {
14528 delete asan_poisoned_variables;
14529 asan_poisoned_variables = NULL;
14530 }
14531
14532 /* The tree body of the function is no longer needed, replace it
14533 with the new GIMPLE body. */
14534 seq = NULL;
14535 gimple_seq_add_stmt (&seq, bind);
14536 gimple_set_body (fndecl, seq);
14537
14538 /* If we're instrumenting function entry/exit, then prepend the call to
14539 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14540 catch the exit hook. */
14541 /* ??? Add some way to ignore exceptions for this TFE. */
14542 if (flag_instrument_function_entry_exit
14543 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
14544 /* Do not instrument extern inline functions. */
14545 && !(DECL_DECLARED_INLINE_P (fndecl)
14546 && DECL_EXTERNAL (fndecl)
14547 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
14548 && !flag_instrument_functions_exclude_p (fndecl))
14549 {
14550 tree x;
14551 gbind *new_bind;
14552 gimple *tf;
14553 gimple_seq cleanup = NULL, body = NULL;
14554 tree tmp_var, this_fn_addr;
14555 gcall *call;
14556
14557 /* The instrumentation hooks aren't going to call the instrumented
14558 function and the address they receive is expected to be matchable
14559 against symbol addresses. Make sure we don't create a trampoline,
14560 in case the current function is nested. */
14561 this_fn_addr = build_fold_addr_expr (current_function_decl);
14562 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
14563
14564 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
14565 call = gimple_build_call (x, 1, integer_zero_node);
14566 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
14567 gimple_call_set_lhs (call, tmp_var);
14568 gimplify_seq_add_stmt (&cleanup, call);
14569 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
14570 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
14571 gimplify_seq_add_stmt (&cleanup, call);
14572 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
14573
14574 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
14575 call = gimple_build_call (x, 1, integer_zero_node);
14576 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
14577 gimple_call_set_lhs (call, tmp_var);
14578 gimplify_seq_add_stmt (&body, call);
14579 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
14580 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
14581 gimplify_seq_add_stmt (&body, call);
14582 gimplify_seq_add_stmt (&body, tf);
14583 new_bind = gimple_build_bind (NULL, body, NULL);
14584
14585 /* Replace the current function body with the body
14586 wrapped in the try/finally TF. */
14587 seq = NULL;
14588 gimple_seq_add_stmt (&seq, new_bind);
14589 gimple_set_body (fndecl, seq);
14590 bind = new_bind;
14591 }
14592
14593 if (sanitize_flags_p (SANITIZE_THREAD))
14594 {
14595 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
14596 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
14597 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
14598 /* Replace the current function body with the body
14599 wrapped in the try/finally TF. */
14600 seq = NULL;
14601 gimple_seq_add_stmt (&seq, new_bind);
14602 gimple_set_body (fndecl, seq);
14603 }
14604
14605 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14606 cfun->curr_properties |= PROP_gimple_any;
14607
14608 pop_cfun ();
14609
14610 dump_function (TDI_gimple, fndecl);
14611 }
14612
14613 /* Return a dummy expression of type TYPE in order to keep going after an
14614 error. */
14615
14616 static tree
14617 dummy_object (tree type)
14618 {
14619 tree t = build_int_cst (build_pointer_type (type), 0);
14620 return build2 (MEM_REF, type, t, t);
14621 }
14622
14623 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
14624 builtin function, but a very special sort of operator. */
14625
14626 enum gimplify_status
14627 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
14628 gimple_seq *post_p ATTRIBUTE_UNUSED)
14629 {
14630 tree promoted_type, have_va_type;
14631 tree valist = TREE_OPERAND (*expr_p, 0);
14632 tree type = TREE_TYPE (*expr_p);
14633 tree t, tag, aptag;
14634 location_t loc = EXPR_LOCATION (*expr_p);
14635
14636 /* Verify that valist is of the proper type. */
14637 have_va_type = TREE_TYPE (valist);
14638 if (have_va_type == error_mark_node)
14639 return GS_ERROR;
14640 have_va_type = targetm.canonical_va_list_type (have_va_type);
14641 if (have_va_type == NULL_TREE
14642 && POINTER_TYPE_P (TREE_TYPE (valist)))
14643 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
14644 have_va_type
14645 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
14646 gcc_assert (have_va_type != NULL_TREE);
14647
14648 /* Generate a diagnostic for requesting data of a type that cannot
14649 be passed through `...' due to type promotion at the call site. */
14650 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
14651 != type)
14652 {
14653 static bool gave_help;
14654 bool warned;
14655 /* Use the expansion point to handle cases such as passing bool (defined
14656 in a system header) through `...'. */
14657 location_t xloc
14658 = expansion_point_location_if_in_system_header (loc);
14659
14660 /* Unfortunately, this is merely undefined, rather than a constraint
14661 violation, so we cannot make this an error. If this call is never
14662 executed, the program is still strictly conforming. */
14663 auto_diagnostic_group d;
14664 warned = warning_at (xloc, 0,
14665 "%qT is promoted to %qT when passed through %<...%>",
14666 type, promoted_type);
14667 if (!gave_help && warned)
14668 {
14669 gave_help = true;
14670 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
14671 promoted_type, type);
14672 }
14673
14674 /* We can, however, treat "undefined" any way we please.
14675 Call abort to encourage the user to fix the program. */
14676 if (warned)
14677 inform (xloc, "if this code is reached, the program will abort");
14678 /* Before the abort, allow the evaluation of the va_list
14679 expression to exit or longjmp. */
14680 gimplify_and_add (valist, pre_p);
14681 t = build_call_expr_loc (loc,
14682 builtin_decl_implicit (BUILT_IN_TRAP), 0);
14683 gimplify_and_add (t, pre_p);
14684
14685 /* This is dead code, but go ahead and finish so that the
14686 mode of the result comes out right. */
14687 *expr_p = dummy_object (type);
14688 return GS_ALL_DONE;
14689 }
14690
14691 tag = build_int_cst (build_pointer_type (type), 0);
14692 aptag = build_int_cst (TREE_TYPE (valist), 0);
14693
14694 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
14695 valist, tag, aptag);
14696
14697 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
14698 needs to be expanded. */
14699 cfun->curr_properties &= ~PROP_gimple_lva;
14700
14701 return GS_OK;
14702 }
14703
14704 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
14705
14706 DST/SRC are the destination and source respectively. You can pass
14707 ungimplified trees in DST or SRC, in which case they will be
14708 converted to a gimple operand if necessary.
14709
14710 This function returns the newly created GIMPLE_ASSIGN tuple. */
14711
14712 gimple *
14713 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
14714 {
14715 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
14716 gimplify_and_add (t, seq_p);
14717 ggc_free (t);
14718 return gimple_seq_last_stmt (*seq_p);
14719 }
14720
14721 inline hashval_t
14722 gimplify_hasher::hash (const elt_t *p)
14723 {
14724 tree t = p->val;
14725 return iterative_hash_expr (t, 0);
14726 }
14727
14728 inline bool
14729 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
14730 {
14731 tree t1 = p1->val;
14732 tree t2 = p2->val;
14733 enum tree_code code = TREE_CODE (t1);
14734
14735 if (TREE_CODE (t2) != code
14736 || TREE_TYPE (t1) != TREE_TYPE (t2))
14737 return false;
14738
14739 if (!operand_equal_p (t1, t2, 0))
14740 return false;
14741
14742 /* Only allow them to compare equal if they also hash equal; otherwise
14743 results are nondeterminate, and we fail bootstrap comparison. */
14744 gcc_checking_assert (hash (p1) == hash (p2));
14745
14746 return true;
14747 }