787435c38cd6dae51734e8f2b360fbc6df00f891
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
73
74 enum gimplify_omp_var_data
75 {
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
89
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY = 0x002000,
92
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
95
96 GOVD_MAP_0LEN_ARRAY = 0x008000,
97
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO = 0x010000,
100
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN = 0x020000,
103
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE = 0x040000,
106
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT = 0x080000,
109
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY = 0x100000,
112
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY = 0x200000,
115
116 GOVD_NONTEMPORAL = 0x400000,
117
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
120
121 GOVD_CONDTEMP = 0x1000000,
122
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
125
126 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
127 fields. */
128 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
129
130 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
131 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
132 | GOVD_LOCAL)
133 };
134
135
136 enum omp_region_type
137 {
138 ORT_WORKSHARE = 0x00,
139 ORT_TASKGROUP = 0x01,
140 ORT_SIMD = 0x04,
141
142 ORT_PARALLEL = 0x08,
143 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
144
145 ORT_TASK = 0x10,
146 ORT_UNTIED_TASK = ORT_TASK | 1,
147 ORT_TASKLOOP = ORT_TASK | 2,
148 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
149
150 ORT_TEAMS = 0x20,
151 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
152 ORT_HOST_TEAMS = ORT_TEAMS | 2,
153 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
154
155 /* Data region. */
156 ORT_TARGET_DATA = 0x40,
157
158 /* Data region with offloading. */
159 ORT_TARGET = 0x80,
160 ORT_COMBINED_TARGET = ORT_TARGET | 1,
161 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
162
163 /* OpenACC variants. */
164 ORT_ACC = 0x100, /* A generic OpenACC region. */
165 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
166 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
167 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
168 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
169 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
170
171 /* Dummy OpenMP region, used to disable expansion of
172 DECL_VALUE_EXPRs in taskloop pre body. */
173 ORT_NONE = 0x200
174 };
175
176 /* Gimplify hashtable helper. */
177
178 struct gimplify_hasher : free_ptr_hash <elt_t>
179 {
180 static inline hashval_t hash (const elt_t *);
181 static inline bool equal (const elt_t *, const elt_t *);
182 };
183
184 struct gimplify_ctx
185 {
186 struct gimplify_ctx *prev_context;
187
188 vec<gbind *> bind_expr_stack;
189 tree temps;
190 gimple_seq conditional_cleanups;
191 tree exit_label;
192 tree return_temp;
193
194 vec<tree> case_labels;
195 hash_set<tree> *live_switch_vars;
196 /* The formal temporary table. Should this be persistent? */
197 hash_table<gimplify_hasher> *temp_htab;
198
199 int conditions;
200 unsigned into_ssa : 1;
201 unsigned allow_rhs_cond_expr : 1;
202 unsigned in_cleanup_point_expr : 1;
203 unsigned keep_stack : 1;
204 unsigned save_stack : 1;
205 unsigned in_switch_expr : 1;
206 };
207
208 enum gimplify_defaultmap_kind
209 {
210 GDMK_SCALAR,
211 GDMK_AGGREGATE,
212 GDMK_ALLOCATABLE,
213 GDMK_POINTER
214 };
215
216 struct gimplify_omp_ctx
217 {
218 struct gimplify_omp_ctx *outer_context;
219 splay_tree variables;
220 hash_set<tree> *privatized_types;
221 tree clauses;
222 /* Iteration variables in an OMP_FOR. */
223 vec<tree> loop_iter_var;
224 location_t location;
225 enum omp_clause_default_kind default_kind;
226 enum omp_region_type region_type;
227 enum tree_code code;
228 bool combined_loop;
229 bool distribute;
230 bool target_firstprivatize_array_bases;
231 bool add_safelen1;
232 bool order_concurrent;
233 int defaultmap[4];
234 };
235
236 static struct gimplify_ctx *gimplify_ctxp;
237 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
238 static bool in_omp_construct;
239
240 /* Forward declaration. */
241 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
242 static hash_map<tree, tree> *oacc_declare_returns;
243 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
244 bool (*) (tree), fallback_t, bool);
245
246 /* Shorter alias name for the above function for use in gimplify.c
247 only. */
248
249 static inline void
250 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
251 {
252 gimple_seq_add_stmt_without_update (seq_p, gs);
253 }
254
255 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
256 NULL, a new sequence is allocated. This function is
257 similar to gimple_seq_add_seq, but does not scan the operands.
258 During gimplification, we need to manipulate statement sequences
259 before the def/use vectors have been constructed. */
260
261 static void
262 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
263 {
264 gimple_stmt_iterator si;
265
266 if (src == NULL)
267 return;
268
269 si = gsi_last (*dst_p);
270 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
271 }
272
273
274 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
275 and popping gimplify contexts. */
276
277 static struct gimplify_ctx *ctx_pool = NULL;
278
279 /* Return a gimplify context struct from the pool. */
280
281 static inline struct gimplify_ctx *
282 ctx_alloc (void)
283 {
284 struct gimplify_ctx * c = ctx_pool;
285
286 if (c)
287 ctx_pool = c->prev_context;
288 else
289 c = XNEW (struct gimplify_ctx);
290
291 memset (c, '\0', sizeof (*c));
292 return c;
293 }
294
295 /* Put gimplify context C back into the pool. */
296
297 static inline void
298 ctx_free (struct gimplify_ctx *c)
299 {
300 c->prev_context = ctx_pool;
301 ctx_pool = c;
302 }
303
304 /* Free allocated ctx stack memory. */
305
306 void
307 free_gimplify_stack (void)
308 {
309 struct gimplify_ctx *c;
310
311 while ((c = ctx_pool))
312 {
313 ctx_pool = c->prev_context;
314 free (c);
315 }
316 }
317
318
319 /* Set up a context for the gimplifier. */
320
321 void
322 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
323 {
324 struct gimplify_ctx *c = ctx_alloc ();
325
326 c->prev_context = gimplify_ctxp;
327 gimplify_ctxp = c;
328 gimplify_ctxp->into_ssa = in_ssa;
329 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
330 }
331
332 /* Tear down a context for the gimplifier. If BODY is non-null, then
333 put the temporaries into the outer BIND_EXPR. Otherwise, put them
334 in the local_decls.
335
336 BODY is not a sequence, but the first tuple in a sequence. */
337
338 void
339 pop_gimplify_context (gimple *body)
340 {
341 struct gimplify_ctx *c = gimplify_ctxp;
342
343 gcc_assert (c
344 && (!c->bind_expr_stack.exists ()
345 || c->bind_expr_stack.is_empty ()));
346 c->bind_expr_stack.release ();
347 gimplify_ctxp = c->prev_context;
348
349 if (body)
350 declare_vars (c->temps, body, false);
351 else
352 record_vars (c->temps);
353
354 delete c->temp_htab;
355 c->temp_htab = NULL;
356 ctx_free (c);
357 }
358
359 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
360
361 static void
362 gimple_push_bind_expr (gbind *bind_stmt)
363 {
364 gimplify_ctxp->bind_expr_stack.reserve (8);
365 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
366 }
367
368 /* Pop the first element off the stack of bindings. */
369
370 static void
371 gimple_pop_bind_expr (void)
372 {
373 gimplify_ctxp->bind_expr_stack.pop ();
374 }
375
376 /* Return the first element of the stack of bindings. */
377
378 gbind *
379 gimple_current_bind_expr (void)
380 {
381 return gimplify_ctxp->bind_expr_stack.last ();
382 }
383
384 /* Return the stack of bindings created during gimplification. */
385
386 vec<gbind *>
387 gimple_bind_expr_stack (void)
388 {
389 return gimplify_ctxp->bind_expr_stack;
390 }
391
392 /* Return true iff there is a COND_EXPR between us and the innermost
393 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
394
395 static bool
396 gimple_conditional_context (void)
397 {
398 return gimplify_ctxp->conditions > 0;
399 }
400
401 /* Note that we've entered a COND_EXPR. */
402
403 static void
404 gimple_push_condition (void)
405 {
406 #ifdef ENABLE_GIMPLE_CHECKING
407 if (gimplify_ctxp->conditions == 0)
408 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
409 #endif
410 ++(gimplify_ctxp->conditions);
411 }
412
413 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
414 now, add any conditional cleanups we've seen to the prequeue. */
415
416 static void
417 gimple_pop_condition (gimple_seq *pre_p)
418 {
419 int conds = --(gimplify_ctxp->conditions);
420
421 gcc_assert (conds >= 0);
422 if (conds == 0)
423 {
424 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
425 gimplify_ctxp->conditional_cleanups = NULL;
426 }
427 }
428
429 /* A stable comparison routine for use with splay trees and DECLs. */
430
431 static int
432 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
433 {
434 tree a = (tree) xa;
435 tree b = (tree) xb;
436
437 return DECL_UID (a) - DECL_UID (b);
438 }
439
440 /* Create a new omp construct that deals with variable remapping. */
441
442 static struct gimplify_omp_ctx *
443 new_omp_context (enum omp_region_type region_type)
444 {
445 struct gimplify_omp_ctx *c;
446
447 c = XCNEW (struct gimplify_omp_ctx);
448 c->outer_context = gimplify_omp_ctxp;
449 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
450 c->privatized_types = new hash_set<tree>;
451 c->location = input_location;
452 c->region_type = region_type;
453 if ((region_type & ORT_TASK) == 0)
454 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
455 else
456 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
457 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
458 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
459 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
460 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
461
462 return c;
463 }
464
465 /* Destroy an omp construct that deals with variable remapping. */
466
467 static void
468 delete_omp_context (struct gimplify_omp_ctx *c)
469 {
470 splay_tree_delete (c->variables);
471 delete c->privatized_types;
472 c->loop_iter_var.release ();
473 XDELETE (c);
474 }
475
476 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
477 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
478
479 /* Both gimplify the statement T and append it to *SEQ_P. This function
480 behaves exactly as gimplify_stmt, but you don't have to pass T as a
481 reference. */
482
483 void
484 gimplify_and_add (tree t, gimple_seq *seq_p)
485 {
486 gimplify_stmt (&t, seq_p);
487 }
488
489 /* Gimplify statement T into sequence *SEQ_P, and return the first
490 tuple in the sequence of generated tuples for this statement.
491 Return NULL if gimplifying T produced no tuples. */
492
493 static gimple *
494 gimplify_and_return_first (tree t, gimple_seq *seq_p)
495 {
496 gimple_stmt_iterator last = gsi_last (*seq_p);
497
498 gimplify_and_add (t, seq_p);
499
500 if (!gsi_end_p (last))
501 {
502 gsi_next (&last);
503 return gsi_stmt (last);
504 }
505 else
506 return gimple_seq_first_stmt (*seq_p);
507 }
508
509 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
510 LHS, or for a call argument. */
511
512 static bool
513 is_gimple_mem_rhs (tree t)
514 {
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t)))
518 return is_gimple_val (t);
519 else
520 return is_gimple_val (t) || is_gimple_lvalue (t);
521 }
522
523 /* Return true if T is a CALL_EXPR or an expression that can be
524 assigned to a temporary. Note that this predicate should only be
525 used during gimplification. See the rationale for this in
526 gimplify_modify_expr. */
527
528 static bool
529 is_gimple_reg_rhs_or_call (tree t)
530 {
531 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
532 || TREE_CODE (t) == CALL_EXPR);
533 }
534
535 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
536 this predicate should only be used during gimplification. See the
537 rationale for this in gimplify_modify_expr. */
538
539 static bool
540 is_gimple_mem_rhs_or_call (tree t)
541 {
542 /* If we're dealing with a renamable type, either source or dest must be
543 a renamed variable. */
544 if (is_gimple_reg_type (TREE_TYPE (t)))
545 return is_gimple_val (t);
546 else
547 return (is_gimple_val (t)
548 || is_gimple_lvalue (t)
549 || TREE_CLOBBER_P (t)
550 || TREE_CODE (t) == CALL_EXPR);
551 }
552
553 /* Create a temporary with a name derived from VAL. Subroutine of
554 lookup_tmp_var; nobody else should call this function. */
555
556 static inline tree
557 create_tmp_from_val (tree val)
558 {
559 /* Drop all qualifiers and address-space information from the value type. */
560 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
561 tree var = create_tmp_var (type, get_name (val));
562 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
563 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
564 DECL_GIMPLE_REG_P (var) = 1;
565 return var;
566 }
567
568 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
569 an existing expression temporary. */
570
571 static tree
572 lookup_tmp_var (tree val, bool is_formal)
573 {
574 tree ret;
575
576 /* If not optimizing, never really reuse a temporary. local-alloc
577 won't allocate any variable that is used in more than one basic
578 block, which means it will go into memory, causing much extra
579 work in reload and final and poorer code generation, outweighing
580 the extra memory allocation here. */
581 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
582 ret = create_tmp_from_val (val);
583 else
584 {
585 elt_t elt, *elt_p;
586 elt_t **slot;
587
588 elt.val = val;
589 if (!gimplify_ctxp->temp_htab)
590 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
591 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
592 if (*slot == NULL)
593 {
594 elt_p = XNEW (elt_t);
595 elt_p->val = val;
596 elt_p->temp = ret = create_tmp_from_val (val);
597 *slot = elt_p;
598 }
599 else
600 {
601 elt_p = *slot;
602 ret = elt_p->temp;
603 }
604 }
605
606 return ret;
607 }
608
609 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
610
611 static tree
612 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
613 bool is_formal, bool allow_ssa)
614 {
615 tree t, mod;
616
617 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
618 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
619 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
620 fb_rvalue);
621
622 if (allow_ssa
623 && gimplify_ctxp->into_ssa
624 && is_gimple_reg_type (TREE_TYPE (val)))
625 {
626 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
627 if (! gimple_in_ssa_p (cfun))
628 {
629 const char *name = get_name (val);
630 if (name)
631 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
632 }
633 }
634 else
635 t = lookup_tmp_var (val, is_formal);
636
637 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
638
639 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
640
641 /* gimplify_modify_expr might want to reduce this further. */
642 gimplify_and_add (mod, pre_p);
643 ggc_free (mod);
644
645 return t;
646 }
647
648 /* Return a formal temporary variable initialized with VAL. PRE_P is as
649 in gimplify_expr. Only use this function if:
650
651 1) The value of the unfactored expression represented by VAL will not
652 change between the initialization and use of the temporary, and
653 2) The temporary will not be otherwise modified.
654
655 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
656 and #2 means it is inappropriate for && temps.
657
658 For other cases, use get_initialized_tmp_var instead. */
659
660 tree
661 get_formal_tmp_var (tree val, gimple_seq *pre_p)
662 {
663 return internal_get_tmp_var (val, pre_p, NULL, true, true);
664 }
665
666 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
667 are as in gimplify_expr. */
668
669 tree
670 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
671 gimple_seq *post_p /* = NULL */,
672 bool allow_ssa /* = true */)
673 {
674 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
675 }
676
677 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
678 generate debug info for them; otherwise don't. */
679
680 void
681 declare_vars (tree vars, gimple *gs, bool debug_info)
682 {
683 tree last = vars;
684 if (last)
685 {
686 tree temps, block;
687
688 gbind *scope = as_a <gbind *> (gs);
689
690 temps = nreverse (last);
691
692 block = gimple_bind_block (scope);
693 gcc_assert (!block || TREE_CODE (block) == BLOCK);
694 if (!block || !debug_info)
695 {
696 DECL_CHAIN (last) = gimple_bind_vars (scope);
697 gimple_bind_set_vars (scope, temps);
698 }
699 else
700 {
701 /* We need to attach the nodes both to the BIND_EXPR and to its
702 associated BLOCK for debugging purposes. The key point here
703 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
704 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
705 if (BLOCK_VARS (block))
706 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
707 else
708 {
709 gimple_bind_set_vars (scope,
710 chainon (gimple_bind_vars (scope), temps));
711 BLOCK_VARS (block) = temps;
712 }
713 }
714 }
715 }
716
717 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
718 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
719 no such upper bound can be obtained. */
720
721 static void
722 force_constant_size (tree var)
723 {
724 /* The only attempt we make is by querying the maximum size of objects
725 of the variable's type. */
726
727 HOST_WIDE_INT max_size;
728
729 gcc_assert (VAR_P (var));
730
731 max_size = max_int_size_in_bytes (TREE_TYPE (var));
732
733 gcc_assert (max_size >= 0);
734
735 DECL_SIZE_UNIT (var)
736 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
737 DECL_SIZE (var)
738 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
739 }
740
741 /* Push the temporary variable TMP into the current binding. */
742
743 void
744 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
745 {
746 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
747
748 /* Later processing assumes that the object size is constant, which might
749 not be true at this point. Force the use of a constant upper bound in
750 this case. */
751 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
752 force_constant_size (tmp);
753
754 DECL_CONTEXT (tmp) = fn->decl;
755 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
756
757 record_vars_into (tmp, fn->decl);
758 }
759
760 /* Push the temporary variable TMP into the current binding. */
761
762 void
763 gimple_add_tmp_var (tree tmp)
764 {
765 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
766
767 /* Later processing assumes that the object size is constant, which might
768 not be true at this point. Force the use of a constant upper bound in
769 this case. */
770 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
771 force_constant_size (tmp);
772
773 DECL_CONTEXT (tmp) = current_function_decl;
774 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
775
776 if (gimplify_ctxp)
777 {
778 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
779 gimplify_ctxp->temps = tmp;
780
781 /* Mark temporaries local within the nearest enclosing parallel. */
782 if (gimplify_omp_ctxp)
783 {
784 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
785 int flag = GOVD_LOCAL;
786 while (ctx
787 && (ctx->region_type == ORT_WORKSHARE
788 || ctx->region_type == ORT_TASKGROUP
789 || ctx->region_type == ORT_SIMD
790 || ctx->region_type == ORT_ACC))
791 {
792 if (ctx->region_type == ORT_SIMD
793 && TREE_ADDRESSABLE (tmp)
794 && !TREE_STATIC (tmp))
795 {
796 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
797 ctx->add_safelen1 = true;
798 else
799 flag = GOVD_PRIVATE;
800 break;
801 }
802 ctx = ctx->outer_context;
803 }
804 if (ctx)
805 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
806 }
807 }
808 else if (cfun)
809 record_vars (tmp);
810 else
811 {
812 gimple_seq body_seq;
813
814 /* This case is for nested functions. We need to expose the locals
815 they create. */
816 body_seq = gimple_body (current_function_decl);
817 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
818 }
819 }
820
821
822 \f
823 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
824 nodes that are referenced more than once in GENERIC functions. This is
825 necessary because gimplification (translation into GIMPLE) is performed
826 by modifying tree nodes in-place, so gimplication of a shared node in a
827 first context could generate an invalid GIMPLE form in a second context.
828
829 This is achieved with a simple mark/copy/unmark algorithm that walks the
830 GENERIC representation top-down, marks nodes with TREE_VISITED the first
831 time it encounters them, duplicates them if they already have TREE_VISITED
832 set, and finally removes the TREE_VISITED marks it has set.
833
834 The algorithm works only at the function level, i.e. it generates a GENERIC
835 representation of a function with no nodes shared within the function when
836 passed a GENERIC function (except for nodes that are allowed to be shared).
837
838 At the global level, it is also necessary to unshare tree nodes that are
839 referenced in more than one function, for the same aforementioned reason.
840 This requires some cooperation from the front-end. There are 2 strategies:
841
842 1. Manual unsharing. The front-end needs to call unshare_expr on every
843 expression that might end up being shared across functions.
844
845 2. Deep unsharing. This is an extension of regular unsharing. Instead
846 of calling unshare_expr on expressions that might be shared across
847 functions, the front-end pre-marks them with TREE_VISITED. This will
848 ensure that they are unshared on the first reference within functions
849 when the regular unsharing algorithm runs. The counterpart is that
850 this algorithm must look deeper than for manual unsharing, which is
851 specified by LANG_HOOKS_DEEP_UNSHARING.
852
853 If there are only few specific cases of node sharing across functions, it is
854 probably easier for a front-end to unshare the expressions manually. On the
855 contrary, if the expressions generated at the global level are as widespread
856 as expressions generated within functions, deep unsharing is very likely the
857 way to go. */
858
859 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
860 These nodes model computations that must be done once. If we were to
861 unshare something like SAVE_EXPR(i++), the gimplification process would
862 create wrong code. However, if DATA is non-null, it must hold a pointer
863 set that is used to unshare the subtrees of these nodes. */
864
865 static tree
866 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
867 {
868 tree t = *tp;
869 enum tree_code code = TREE_CODE (t);
870
871 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
872 copy their subtrees if we can make sure to do it only once. */
873 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
874 {
875 if (data && !((hash_set<tree> *)data)->add (t))
876 ;
877 else
878 *walk_subtrees = 0;
879 }
880
881 /* Stop at types, decls, constants like copy_tree_r. */
882 else if (TREE_CODE_CLASS (code) == tcc_type
883 || TREE_CODE_CLASS (code) == tcc_declaration
884 || TREE_CODE_CLASS (code) == tcc_constant)
885 *walk_subtrees = 0;
886
887 /* Cope with the statement expression extension. */
888 else if (code == STATEMENT_LIST)
889 ;
890
891 /* Leave the bulk of the work to copy_tree_r itself. */
892 else
893 copy_tree_r (tp, walk_subtrees, NULL);
894
895 return NULL_TREE;
896 }
897
898 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
899 If *TP has been visited already, then *TP is deeply copied by calling
900 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
901
902 static tree
903 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
904 {
905 tree t = *tp;
906 enum tree_code code = TREE_CODE (t);
907
908 /* Skip types, decls, and constants. But we do want to look at their
909 types and the bounds of types. Mark them as visited so we properly
910 unmark their subtrees on the unmark pass. If we've already seen them,
911 don't look down further. */
912 if (TREE_CODE_CLASS (code) == tcc_type
913 || TREE_CODE_CLASS (code) == tcc_declaration
914 || TREE_CODE_CLASS (code) == tcc_constant)
915 {
916 if (TREE_VISITED (t))
917 *walk_subtrees = 0;
918 else
919 TREE_VISITED (t) = 1;
920 }
921
922 /* If this node has been visited already, unshare it and don't look
923 any deeper. */
924 else if (TREE_VISITED (t))
925 {
926 walk_tree (tp, mostly_copy_tree_r, data, NULL);
927 *walk_subtrees = 0;
928 }
929
930 /* Otherwise, mark the node as visited and keep looking. */
931 else
932 TREE_VISITED (t) = 1;
933
934 return NULL_TREE;
935 }
936
937 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
938 copy_if_shared_r callback unmodified. */
939
940 void
941 copy_if_shared (tree *tp, void *data)
942 {
943 walk_tree (tp, copy_if_shared_r, data, NULL);
944 }
945
946 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
947 any nested functions. */
948
949 static void
950 unshare_body (tree fndecl)
951 {
952 struct cgraph_node *cgn = cgraph_node::get (fndecl);
953 /* If the language requires deep unsharing, we need a pointer set to make
954 sure we don't repeatedly unshare subtrees of unshareable nodes. */
955 hash_set<tree> *visited
956 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
957
958 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
959 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
960 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
961
962 delete visited;
963
964 if (cgn)
965 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
966 unshare_body (cgn->decl);
967 }
968
969 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
970 Subtrees are walked until the first unvisited node is encountered. */
971
972 static tree
973 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
974 {
975 tree t = *tp;
976
977 /* If this node has been visited, unmark it and keep looking. */
978 if (TREE_VISITED (t))
979 TREE_VISITED (t) = 0;
980
981 /* Otherwise, don't look any deeper. */
982 else
983 *walk_subtrees = 0;
984
985 return NULL_TREE;
986 }
987
988 /* Unmark the visited trees rooted at *TP. */
989
990 static inline void
991 unmark_visited (tree *tp)
992 {
993 walk_tree (tp, unmark_visited_r, NULL, NULL);
994 }
995
996 /* Likewise, but mark all trees as not visited. */
997
998 static void
999 unvisit_body (tree fndecl)
1000 {
1001 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1002
1003 unmark_visited (&DECL_SAVED_TREE (fndecl));
1004 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1005 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1006
1007 if (cgn)
1008 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1009 unvisit_body (cgn->decl);
1010 }
1011
1012 /* Unconditionally make an unshared copy of EXPR. This is used when using
1013 stored expressions which span multiple functions, such as BINFO_VTABLE,
1014 as the normal unsharing process can't tell that they're shared. */
1015
1016 tree
1017 unshare_expr (tree expr)
1018 {
1019 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1020 return expr;
1021 }
1022
1023 /* Worker for unshare_expr_without_location. */
1024
1025 static tree
1026 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1027 {
1028 if (EXPR_P (*tp))
1029 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1030 else
1031 *walk_subtrees = 0;
1032 return NULL_TREE;
1033 }
1034
1035 /* Similar to unshare_expr but also prune all expression locations
1036 from EXPR. */
1037
1038 tree
1039 unshare_expr_without_location (tree expr)
1040 {
1041 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1042 if (EXPR_P (expr))
1043 walk_tree (&expr, prune_expr_location, NULL, NULL);
1044 return expr;
1045 }
1046
1047 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1048 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1049 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1050 EXPR is the location of the EXPR. */
1051
1052 static location_t
1053 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1054 {
1055 if (!expr)
1056 return or_else;
1057
1058 if (EXPR_HAS_LOCATION (expr))
1059 return EXPR_LOCATION (expr);
1060
1061 if (TREE_CODE (expr) != STATEMENT_LIST)
1062 return or_else;
1063
1064 tree_stmt_iterator i = tsi_start (expr);
1065
1066 bool found = false;
1067 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1068 {
1069 found = true;
1070 tsi_next (&i);
1071 }
1072
1073 if (!found || !tsi_one_before_end_p (i))
1074 return or_else;
1075
1076 return rexpr_location (tsi_stmt (i), or_else);
1077 }
1078
1079 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1080 rexpr_location for the potential recursion. */
1081
1082 static inline bool
1083 rexpr_has_location (tree expr)
1084 {
1085 return rexpr_location (expr) != UNKNOWN_LOCATION;
1086 }
1087
1088 \f
1089 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1090 contain statements and have a value. Assign its value to a temporary
1091 and give it void_type_node. Return the temporary, or NULL_TREE if
1092 WRAPPER was already void. */
1093
1094 tree
1095 voidify_wrapper_expr (tree wrapper, tree temp)
1096 {
1097 tree type = TREE_TYPE (wrapper);
1098 if (type && !VOID_TYPE_P (type))
1099 {
1100 tree *p;
1101
1102 /* Set p to point to the body of the wrapper. Loop until we find
1103 something that isn't a wrapper. */
1104 for (p = &wrapper; p && *p; )
1105 {
1106 switch (TREE_CODE (*p))
1107 {
1108 case BIND_EXPR:
1109 TREE_SIDE_EFFECTS (*p) = 1;
1110 TREE_TYPE (*p) = void_type_node;
1111 /* For a BIND_EXPR, the body is operand 1. */
1112 p = &BIND_EXPR_BODY (*p);
1113 break;
1114
1115 case CLEANUP_POINT_EXPR:
1116 case TRY_FINALLY_EXPR:
1117 case TRY_CATCH_EXPR:
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1122
1123 case STATEMENT_LIST:
1124 {
1125 tree_stmt_iterator i = tsi_last (*p);
1126 TREE_SIDE_EFFECTS (*p) = 1;
1127 TREE_TYPE (*p) = void_type_node;
1128 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1129 }
1130 break;
1131
1132 case COMPOUND_EXPR:
1133 /* Advance to the last statement. Set all container types to
1134 void. */
1135 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1136 {
1137 TREE_SIDE_EFFECTS (*p) = 1;
1138 TREE_TYPE (*p) = void_type_node;
1139 }
1140 break;
1141
1142 case TRANSACTION_EXPR:
1143 TREE_SIDE_EFFECTS (*p) = 1;
1144 TREE_TYPE (*p) = void_type_node;
1145 p = &TRANSACTION_EXPR_BODY (*p);
1146 break;
1147
1148 default:
1149 /* Assume that any tree upon which voidify_wrapper_expr is
1150 directly called is a wrapper, and that its body is op0. */
1151 if (p == &wrapper)
1152 {
1153 TREE_SIDE_EFFECTS (*p) = 1;
1154 TREE_TYPE (*p) = void_type_node;
1155 p = &TREE_OPERAND (*p, 0);
1156 break;
1157 }
1158 goto out;
1159 }
1160 }
1161
1162 out:
1163 if (p == NULL || IS_EMPTY_STMT (*p))
1164 temp = NULL_TREE;
1165 else if (temp)
1166 {
1167 /* The wrapper is on the RHS of an assignment that we're pushing
1168 down. */
1169 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1170 || TREE_CODE (temp) == MODIFY_EXPR);
1171 TREE_OPERAND (temp, 1) = *p;
1172 *p = temp;
1173 }
1174 else
1175 {
1176 temp = create_tmp_var (type, "retval");
1177 *p = build2 (INIT_EXPR, type, temp, *p);
1178 }
1179
1180 return temp;
1181 }
1182
1183 return NULL_TREE;
1184 }
1185
1186 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1187 a temporary through which they communicate. */
1188
1189 static void
1190 build_stack_save_restore (gcall **save, gcall **restore)
1191 {
1192 tree tmp_var;
1193
1194 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1195 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1196 gimple_call_set_lhs (*save, tmp_var);
1197
1198 *restore
1199 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1200 1, tmp_var);
1201 }
1202
1203 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1204
1205 static tree
1206 build_asan_poison_call_expr (tree decl)
1207 {
1208 /* Do not poison variables that have size equal to zero. */
1209 tree unit_size = DECL_SIZE_UNIT (decl);
1210 if (zerop (unit_size))
1211 return NULL_TREE;
1212
1213 tree base = build_fold_addr_expr (decl);
1214
1215 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1216 void_type_node, 3,
1217 build_int_cst (integer_type_node,
1218 ASAN_MARK_POISON),
1219 base, unit_size);
1220 }
1221
1222 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1223 on POISON flag, shadow memory of a DECL variable. The call will be
1224 put on location identified by IT iterator, where BEFORE flag drives
1225 position where the stmt will be put. */
1226
1227 static void
1228 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1229 bool before)
1230 {
1231 tree unit_size = DECL_SIZE_UNIT (decl);
1232 tree base = build_fold_addr_expr (decl);
1233
1234 /* Do not poison variables that have size equal to zero. */
1235 if (zerop (unit_size))
1236 return;
1237
1238 /* It's necessary to have all stack variables aligned to ASAN granularity
1239 bytes. */
1240 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1241 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1242
1243 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1244
1245 gimple *g
1246 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1247 build_int_cst (integer_type_node, flags),
1248 base, unit_size);
1249
1250 if (before)
1251 gsi_insert_before (it, g, GSI_NEW_STMT);
1252 else
1253 gsi_insert_after (it, g, GSI_NEW_STMT);
1254 }
1255
1256 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1257 either poisons or unpoisons a DECL. Created statement is appended
1258 to SEQ_P gimple sequence. */
1259
1260 static void
1261 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1262 {
1263 gimple_stmt_iterator it = gsi_last (*seq_p);
1264 bool before = false;
1265
1266 if (gsi_end_p (it))
1267 before = true;
1268
1269 asan_poison_variable (decl, poison, &it, before);
1270 }
1271
1272 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1273
1274 static int
1275 sort_by_decl_uid (const void *a, const void *b)
1276 {
1277 const tree *t1 = (const tree *)a;
1278 const tree *t2 = (const tree *)b;
1279
1280 int uid1 = DECL_UID (*t1);
1281 int uid2 = DECL_UID (*t2);
1282
1283 if (uid1 < uid2)
1284 return -1;
1285 else if (uid1 > uid2)
1286 return 1;
1287 else
1288 return 0;
1289 }
1290
1291 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1292 depending on POISON flag. Created statement is appended
1293 to SEQ_P gimple sequence. */
1294
1295 static void
1296 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1297 {
1298 unsigned c = variables->elements ();
1299 if (c == 0)
1300 return;
1301
1302 auto_vec<tree> sorted_variables (c);
1303
1304 for (hash_set<tree>::iterator it = variables->begin ();
1305 it != variables->end (); ++it)
1306 sorted_variables.safe_push (*it);
1307
1308 sorted_variables.qsort (sort_by_decl_uid);
1309
1310 unsigned i;
1311 tree var;
1312 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1313 {
1314 asan_poison_variable (var, poison, seq_p);
1315
1316 /* Add use_after_scope_memory attribute for the variable in order
1317 to prevent re-written into SSA. */
1318 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1319 DECL_ATTRIBUTES (var)))
1320 DECL_ATTRIBUTES (var)
1321 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1322 integer_one_node,
1323 DECL_ATTRIBUTES (var));
1324 }
1325 }
1326
1327 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1328
1329 static enum gimplify_status
1330 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1331 {
1332 tree bind_expr = *expr_p;
1333 bool old_keep_stack = gimplify_ctxp->keep_stack;
1334 bool old_save_stack = gimplify_ctxp->save_stack;
1335 tree t;
1336 gbind *bind_stmt;
1337 gimple_seq body, cleanup;
1338 gcall *stack_save;
1339 location_t start_locus = 0, end_locus = 0;
1340 tree ret_clauses = NULL;
1341
1342 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1343
1344 /* Mark variables seen in this bind expr. */
1345 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1346 {
1347 if (VAR_P (t))
1348 {
1349 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1350
1351 /* Mark variable as local. */
1352 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1353 {
1354 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1355 || splay_tree_lookup (ctx->variables,
1356 (splay_tree_key) t) == NULL)
1357 {
1358 int flag = GOVD_LOCAL;
1359 if (ctx->region_type == ORT_SIMD
1360 && TREE_ADDRESSABLE (t)
1361 && !TREE_STATIC (t))
1362 {
1363 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1364 ctx->add_safelen1 = true;
1365 else
1366 flag = GOVD_PRIVATE;
1367 }
1368 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1369 }
1370 /* Static locals inside of target construct or offloaded
1371 routines need to be "omp declare target". */
1372 if (TREE_STATIC (t))
1373 for (; ctx; ctx = ctx->outer_context)
1374 if ((ctx->region_type & ORT_TARGET) != 0)
1375 {
1376 if (!lookup_attribute ("omp declare target",
1377 DECL_ATTRIBUTES (t)))
1378 {
1379 tree id = get_identifier ("omp declare target");
1380 DECL_ATTRIBUTES (t)
1381 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1382 varpool_node *node = varpool_node::get (t);
1383 if (node)
1384 {
1385 node->offloadable = 1;
1386 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1387 {
1388 g->have_offload = true;
1389 if (!in_lto_p)
1390 vec_safe_push (offload_vars, t);
1391 }
1392 }
1393 }
1394 break;
1395 }
1396 }
1397
1398 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1399
1400 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1401 cfun->has_local_explicit_reg_vars = true;
1402 }
1403
1404 /* Preliminarily mark non-addressed complex variables as eligible
1405 for promotion to gimple registers. We'll transform their uses
1406 as we find them. */
1407 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1408 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1409 && !TREE_THIS_VOLATILE (t)
1410 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1411 && !needs_to_live_in_memory (t))
1412 DECL_GIMPLE_REG_P (t) = 1;
1413 }
1414
1415 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1416 BIND_EXPR_BLOCK (bind_expr));
1417 gimple_push_bind_expr (bind_stmt);
1418
1419 gimplify_ctxp->keep_stack = false;
1420 gimplify_ctxp->save_stack = false;
1421
1422 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1423 body = NULL;
1424 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1425 gimple_bind_set_body (bind_stmt, body);
1426
1427 /* Source location wise, the cleanup code (stack_restore and clobbers)
1428 belongs to the end of the block, so propagate what we have. The
1429 stack_save operation belongs to the beginning of block, which we can
1430 infer from the bind_expr directly if the block has no explicit
1431 assignment. */
1432 if (BIND_EXPR_BLOCK (bind_expr))
1433 {
1434 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1435 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1436 }
1437 if (start_locus == 0)
1438 start_locus = EXPR_LOCATION (bind_expr);
1439
1440 cleanup = NULL;
1441 stack_save = NULL;
1442
1443 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1444 the stack space allocated to the VLAs. */
1445 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1446 {
1447 gcall *stack_restore;
1448
1449 /* Save stack on entry and restore it on exit. Add a try_finally
1450 block to achieve this. */
1451 build_stack_save_restore (&stack_save, &stack_restore);
1452
1453 gimple_set_location (stack_save, start_locus);
1454 gimple_set_location (stack_restore, end_locus);
1455
1456 gimplify_seq_add_stmt (&cleanup, stack_restore);
1457 }
1458
1459 /* Add clobbers for all variables that go out of scope. */
1460 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1461 {
1462 if (VAR_P (t)
1463 && !is_global_var (t)
1464 && DECL_CONTEXT (t) == current_function_decl)
1465 {
1466 if (!DECL_HARD_REGISTER (t)
1467 && !TREE_THIS_VOLATILE (t)
1468 && !DECL_HAS_VALUE_EXPR_P (t)
1469 /* Only care for variables that have to be in memory. Others
1470 will be rewritten into SSA names, hence moved to the
1471 top-level. */
1472 && !is_gimple_reg (t)
1473 && flag_stack_reuse != SR_NONE)
1474 {
1475 tree clobber = build_clobber (TREE_TYPE (t));
1476 gimple *clobber_stmt;
1477 clobber_stmt = gimple_build_assign (t, clobber);
1478 gimple_set_location (clobber_stmt, end_locus);
1479 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1480 }
1481
1482 if (flag_openacc && oacc_declare_returns != NULL)
1483 {
1484 tree *c = oacc_declare_returns->get (t);
1485 if (c != NULL)
1486 {
1487 if (ret_clauses)
1488 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1489
1490 ret_clauses = *c;
1491
1492 oacc_declare_returns->remove (t);
1493
1494 if (oacc_declare_returns->is_empty ())
1495 {
1496 delete oacc_declare_returns;
1497 oacc_declare_returns = NULL;
1498 }
1499 }
1500 }
1501 }
1502
1503 if (asan_poisoned_variables != NULL
1504 && asan_poisoned_variables->contains (t))
1505 {
1506 asan_poisoned_variables->remove (t);
1507 asan_poison_variable (t, true, &cleanup);
1508 }
1509
1510 if (gimplify_ctxp->live_switch_vars != NULL
1511 && gimplify_ctxp->live_switch_vars->contains (t))
1512 gimplify_ctxp->live_switch_vars->remove (t);
1513 }
1514
1515 if (ret_clauses)
1516 {
1517 gomp_target *stmt;
1518 gimple_stmt_iterator si = gsi_start (cleanup);
1519
1520 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1521 ret_clauses);
1522 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1523 }
1524
1525 if (cleanup)
1526 {
1527 gtry *gs;
1528 gimple_seq new_body;
1529
1530 new_body = NULL;
1531 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1532 GIMPLE_TRY_FINALLY);
1533
1534 if (stack_save)
1535 gimplify_seq_add_stmt (&new_body, stack_save);
1536 gimplify_seq_add_stmt (&new_body, gs);
1537 gimple_bind_set_body (bind_stmt, new_body);
1538 }
1539
1540 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1541 if (!gimplify_ctxp->keep_stack)
1542 gimplify_ctxp->keep_stack = old_keep_stack;
1543 gimplify_ctxp->save_stack = old_save_stack;
1544
1545 gimple_pop_bind_expr ();
1546
1547 gimplify_seq_add_stmt (pre_p, bind_stmt);
1548
1549 if (temp)
1550 {
1551 *expr_p = temp;
1552 return GS_OK;
1553 }
1554
1555 *expr_p = NULL_TREE;
1556 return GS_ALL_DONE;
1557 }
1558
1559 /* Maybe add early return predict statement to PRE_P sequence. */
1560
1561 static void
1562 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1563 {
1564 /* If we are not in a conditional context, add PREDICT statement. */
1565 if (gimple_conditional_context ())
1566 {
1567 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1568 NOT_TAKEN);
1569 gimplify_seq_add_stmt (pre_p, predict);
1570 }
1571 }
1572
1573 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1574 GIMPLE value, it is assigned to a new temporary and the statement is
1575 re-written to return the temporary.
1576
1577 PRE_P points to the sequence where side effects that must happen before
1578 STMT should be stored. */
1579
1580 static enum gimplify_status
1581 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1582 {
1583 greturn *ret;
1584 tree ret_expr = TREE_OPERAND (stmt, 0);
1585 tree result_decl, result;
1586
1587 if (ret_expr == error_mark_node)
1588 return GS_ERROR;
1589
1590 if (!ret_expr
1591 || TREE_CODE (ret_expr) == RESULT_DECL)
1592 {
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 greturn *ret = gimple_build_return (ret_expr);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1597 return GS_ALL_DONE;
1598 }
1599
1600 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1601 result_decl = NULL_TREE;
1602 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1603 {
1604 /* Used in C++ for handling EH cleanup of the return value if a local
1605 cleanup throws. Assume the front-end knows what it's doing. */
1606 result_decl = DECL_RESULT (current_function_decl);
1607 /* But crash if we end up trying to modify ret_expr below. */
1608 ret_expr = NULL_TREE;
1609 }
1610 else
1611 {
1612 result_decl = TREE_OPERAND (ret_expr, 0);
1613
1614 /* See through a return by reference. */
1615 if (TREE_CODE (result_decl) == INDIRECT_REF)
1616 result_decl = TREE_OPERAND (result_decl, 0);
1617
1618 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1619 || TREE_CODE (ret_expr) == INIT_EXPR)
1620 && TREE_CODE (result_decl) == RESULT_DECL);
1621 }
1622
1623 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1624 Recall that aggregate_value_p is FALSE for any aggregate type that is
1625 returned in registers. If we're returning values in registers, then
1626 we don't want to extend the lifetime of the RESULT_DECL, particularly
1627 across another call. In addition, for those aggregates for which
1628 hard_function_value generates a PARALLEL, we'll die during normal
1629 expansion of structure assignments; there's special code in expand_return
1630 to handle this case that does not exist in expand_expr. */
1631 if (!result_decl)
1632 result = NULL_TREE;
1633 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1634 {
1635 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1636 {
1637 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1638 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1639 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1640 should be effectively allocated by the caller, i.e. all calls to
1641 this function must be subject to the Return Slot Optimization. */
1642 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1643 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1644 }
1645 result = result_decl;
1646 }
1647 else if (gimplify_ctxp->return_temp)
1648 result = gimplify_ctxp->return_temp;
1649 else
1650 {
1651 result = create_tmp_reg (TREE_TYPE (result_decl));
1652
1653 /* ??? With complex control flow (usually involving abnormal edges),
1654 we can wind up warning about an uninitialized value for this. Due
1655 to how this variable is constructed and initialized, this is never
1656 true. Give up and never warn. */
1657 TREE_NO_WARNING (result) = 1;
1658
1659 gimplify_ctxp->return_temp = result;
1660 }
1661
1662 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1663 Then gimplify the whole thing. */
1664 if (result != result_decl)
1665 TREE_OPERAND (ret_expr, 0) = result;
1666
1667 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1668
1669 maybe_add_early_return_predict_stmt (pre_p);
1670 ret = gimple_build_return (result);
1671 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1672 gimplify_seq_add_stmt (pre_p, ret);
1673
1674 return GS_ALL_DONE;
1675 }
1676
1677 /* Gimplify a variable-length array DECL. */
1678
1679 static void
1680 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1681 {
1682 /* This is a variable-sized decl. Simplify its size and mark it
1683 for deferred expansion. */
1684 tree t, addr, ptr_type;
1685
1686 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1687 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1688
1689 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1690 if (DECL_HAS_VALUE_EXPR_P (decl))
1691 return;
1692
1693 /* All occurrences of this decl in final gimplified code will be
1694 replaced by indirection. Setting DECL_VALUE_EXPR does two
1695 things: First, it lets the rest of the gimplifier know what
1696 replacement to use. Second, it lets the debug info know
1697 where to find the value. */
1698 ptr_type = build_pointer_type (TREE_TYPE (decl));
1699 addr = create_tmp_var (ptr_type, get_name (decl));
1700 DECL_IGNORED_P (addr) = 0;
1701 t = build_fold_indirect_ref (addr);
1702 TREE_THIS_NOTRAP (t) = 1;
1703 SET_DECL_VALUE_EXPR (decl, t);
1704 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1705
1706 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1707 max_int_size_in_bytes (TREE_TYPE (decl)));
1708 /* The call has been built for a variable-sized object. */
1709 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1710 t = fold_convert (ptr_type, t);
1711 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1712
1713 gimplify_and_add (t, seq_p);
1714
1715 /* Record the dynamic allocation associated with DECL if requested. */
1716 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1717 record_dynamic_alloc (decl);
1718 }
1719
1720 /* A helper function to be called via walk_tree. Mark all labels under *TP
1721 as being forced. To be called for DECL_INITIAL of static variables. */
1722
1723 static tree
1724 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1725 {
1726 if (TYPE_P (*tp))
1727 *walk_subtrees = 0;
1728 if (TREE_CODE (*tp) == LABEL_DECL)
1729 {
1730 FORCED_LABEL (*tp) = 1;
1731 cfun->has_forced_label_in_static = 1;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1738 and initialization explicit. */
1739
1740 static enum gimplify_status
1741 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1742 {
1743 tree stmt = *stmt_p;
1744 tree decl = DECL_EXPR_DECL (stmt);
1745
1746 *stmt_p = NULL_TREE;
1747
1748 if (TREE_TYPE (decl) == error_mark_node)
1749 return GS_ERROR;
1750
1751 if ((TREE_CODE (decl) == TYPE_DECL
1752 || VAR_P (decl))
1753 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1754 {
1755 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1756 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1757 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1758 }
1759
1760 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1761 in case its size expressions contain problematic nodes like CALL_EXPR. */
1762 if (TREE_CODE (decl) == TYPE_DECL
1763 && DECL_ORIGINAL_TYPE (decl)
1764 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1765 {
1766 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1767 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1768 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1769 }
1770
1771 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1772 {
1773 tree init = DECL_INITIAL (decl);
1774 bool is_vla = false;
1775
1776 poly_uint64 size;
1777 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1778 || (!TREE_STATIC (decl)
1779 && flag_stack_check == GENERIC_STACK_CHECK
1780 && maybe_gt (size,
1781 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1782 {
1783 gimplify_vla_decl (decl, seq_p);
1784 is_vla = true;
1785 }
1786
1787 if (asan_poisoned_variables
1788 && !is_vla
1789 && TREE_ADDRESSABLE (decl)
1790 && !TREE_STATIC (decl)
1791 && !DECL_HAS_VALUE_EXPR_P (decl)
1792 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1793 && dbg_cnt (asan_use_after_scope)
1794 && !gimplify_omp_ctxp)
1795 {
1796 asan_poisoned_variables->add (decl);
1797 asan_poison_variable (decl, false, seq_p);
1798 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1799 gimplify_ctxp->live_switch_vars->add (decl);
1800 }
1801
1802 /* Some front ends do not explicitly declare all anonymous
1803 artificial variables. We compensate here by declaring the
1804 variables, though it would be better if the front ends would
1805 explicitly declare them. */
1806 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1807 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1808 gimple_add_tmp_var (decl);
1809
1810 if (init && init != error_mark_node)
1811 {
1812 if (!TREE_STATIC (decl))
1813 {
1814 DECL_INITIAL (decl) = NULL_TREE;
1815 init = build2 (INIT_EXPR, void_type_node, decl, init);
1816 gimplify_and_add (init, seq_p);
1817 ggc_free (init);
1818 }
1819 else
1820 /* We must still examine initializers for static variables
1821 as they may contain a label address. */
1822 walk_tree (&init, force_labels_r, NULL, NULL);
1823 }
1824 }
1825
1826 return GS_ALL_DONE;
1827 }
1828
1829 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1830 and replacing the LOOP_EXPR with goto, but if the loop contains an
1831 EXIT_EXPR, we need to append a label for it to jump to. */
1832
1833 static enum gimplify_status
1834 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1835 {
1836 tree saved_label = gimplify_ctxp->exit_label;
1837 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1838
1839 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1840
1841 gimplify_ctxp->exit_label = NULL_TREE;
1842
1843 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1844
1845 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1846
1847 if (gimplify_ctxp->exit_label)
1848 gimplify_seq_add_stmt (pre_p,
1849 gimple_build_label (gimplify_ctxp->exit_label));
1850
1851 gimplify_ctxp->exit_label = saved_label;
1852
1853 *expr_p = NULL;
1854 return GS_ALL_DONE;
1855 }
1856
1857 /* Gimplify a statement list onto a sequence. These may be created either
1858 by an enlightened front-end, or by shortcut_cond_expr. */
1859
1860 static enum gimplify_status
1861 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1862 {
1863 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1864
1865 tree_stmt_iterator i = tsi_start (*expr_p);
1866
1867 while (!tsi_end_p (i))
1868 {
1869 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1870 tsi_delink (&i);
1871 }
1872
1873 if (temp)
1874 {
1875 *expr_p = temp;
1876 return GS_OK;
1877 }
1878
1879 return GS_ALL_DONE;
1880 }
1881
1882 /* Callback for walk_gimple_seq. */
1883
1884 static tree
1885 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1886 struct walk_stmt_info *wi)
1887 {
1888 gimple *stmt = gsi_stmt (*gsi_p);
1889
1890 *handled_ops_p = true;
1891 switch (gimple_code (stmt))
1892 {
1893 case GIMPLE_TRY:
1894 /* A compiler-generated cleanup or a user-written try block.
1895 If it's empty, don't dive into it--that would result in
1896 worse location info. */
1897 if (gimple_try_eval (stmt) == NULL)
1898 {
1899 wi->info = stmt;
1900 return integer_zero_node;
1901 }
1902 /* Fall through. */
1903 case GIMPLE_BIND:
1904 case GIMPLE_CATCH:
1905 case GIMPLE_EH_FILTER:
1906 case GIMPLE_TRANSACTION:
1907 /* Walk the sub-statements. */
1908 *handled_ops_p = false;
1909 break;
1910
1911 case GIMPLE_DEBUG:
1912 /* Ignore these. We may generate them before declarations that
1913 are never executed. If there's something to warn about,
1914 there will be non-debug stmts too, and we'll catch those. */
1915 break;
1916
1917 case GIMPLE_CALL:
1918 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1919 {
1920 *handled_ops_p = false;
1921 break;
1922 }
1923 /* Fall through. */
1924 default:
1925 /* Save the first "real" statement (not a decl/lexical scope/...). */
1926 wi->info = stmt;
1927 return integer_zero_node;
1928 }
1929 return NULL_TREE;
1930 }
1931
1932 /* Possibly warn about unreachable statements between switch's controlling
1933 expression and the first case. SEQ is the body of a switch expression. */
1934
1935 static void
1936 maybe_warn_switch_unreachable (gimple_seq seq)
1937 {
1938 if (!warn_switch_unreachable
1939 /* This warning doesn't play well with Fortran when optimizations
1940 are on. */
1941 || lang_GNU_Fortran ()
1942 || seq == NULL)
1943 return;
1944
1945 struct walk_stmt_info wi;
1946 memset (&wi, 0, sizeof (wi));
1947 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1948 gimple *stmt = (gimple *) wi.info;
1949
1950 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1951 {
1952 if (gimple_code (stmt) == GIMPLE_GOTO
1953 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1954 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1955 /* Don't warn for compiler-generated gotos. These occur
1956 in Duff's devices, for example. */;
1957 else
1958 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1959 "statement will never be executed");
1960 }
1961 }
1962
1963
1964 /* A label entry that pairs label and a location. */
1965 struct label_entry
1966 {
1967 tree label;
1968 location_t loc;
1969 };
1970
1971 /* Find LABEL in vector of label entries VEC. */
1972
1973 static struct label_entry *
1974 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1975 {
1976 unsigned int i;
1977 struct label_entry *l;
1978
1979 FOR_EACH_VEC_ELT (*vec, i, l)
1980 if (l->label == label)
1981 return l;
1982 return NULL;
1983 }
1984
1985 /* Return true if LABEL, a LABEL_DECL, represents a case label
1986 in a vector of labels CASES. */
1987
1988 static bool
1989 case_label_p (const vec<tree> *cases, tree label)
1990 {
1991 unsigned int i;
1992 tree l;
1993
1994 FOR_EACH_VEC_ELT (*cases, i, l)
1995 if (CASE_LABEL (l) == label)
1996 return true;
1997 return false;
1998 }
1999
2000 /* Find the last nondebug statement in a scope STMT. */
2001
2002 static gimple *
2003 last_stmt_in_scope (gimple *stmt)
2004 {
2005 if (!stmt)
2006 return NULL;
2007
2008 switch (gimple_code (stmt))
2009 {
2010 case GIMPLE_BIND:
2011 {
2012 gbind *bind = as_a <gbind *> (stmt);
2013 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2014 return last_stmt_in_scope (stmt);
2015 }
2016
2017 case GIMPLE_TRY:
2018 {
2019 gtry *try_stmt = as_a <gtry *> (stmt);
2020 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2021 gimple *last_eval = last_stmt_in_scope (stmt);
2022 if (gimple_stmt_may_fallthru (last_eval)
2023 && (last_eval == NULL
2024 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2025 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2026 {
2027 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2028 return last_stmt_in_scope (stmt);
2029 }
2030 else
2031 return last_eval;
2032 }
2033
2034 case GIMPLE_DEBUG:
2035 gcc_unreachable ();
2036
2037 default:
2038 return stmt;
2039 }
2040 }
2041
2042 /* Collect interesting labels in LABELS and return the statement preceding
2043 another case label, or a user-defined label. Store a location useful
2044 to give warnings at *PREVLOC (usually the location of the returned
2045 statement or of its surrounding scope). */
2046
2047 static gimple *
2048 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2049 auto_vec <struct label_entry> *labels,
2050 location_t *prevloc)
2051 {
2052 gimple *prev = NULL;
2053
2054 *prevloc = UNKNOWN_LOCATION;
2055 do
2056 {
2057 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2058 {
2059 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2060 which starts on a GIMPLE_SWITCH and ends with a break label.
2061 Handle that as a single statement that can fall through. */
2062 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2063 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2064 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2065 if (last
2066 && gimple_code (first) == GIMPLE_SWITCH
2067 && gimple_code (last) == GIMPLE_LABEL)
2068 {
2069 tree label = gimple_label_label (as_a <glabel *> (last));
2070 if (SWITCH_BREAK_LABEL_P (label))
2071 {
2072 prev = bind;
2073 gsi_next (gsi_p);
2074 continue;
2075 }
2076 }
2077 }
2078 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2079 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2080 {
2081 /* Nested scope. Only look at the last statement of
2082 the innermost scope. */
2083 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2084 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2085 if (last)
2086 {
2087 prev = last;
2088 /* It might be a label without a location. Use the
2089 location of the scope then. */
2090 if (!gimple_has_location (prev))
2091 *prevloc = bind_loc;
2092 }
2093 gsi_next (gsi_p);
2094 continue;
2095 }
2096
2097 /* Ifs are tricky. */
2098 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2099 {
2100 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2101 tree false_lab = gimple_cond_false_label (cond_stmt);
2102 location_t if_loc = gimple_location (cond_stmt);
2103
2104 /* If we have e.g.
2105 if (i > 1) goto <D.2259>; else goto D;
2106 we can't do much with the else-branch. */
2107 if (!DECL_ARTIFICIAL (false_lab))
2108 break;
2109
2110 /* Go on until the false label, then one step back. */
2111 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2112 {
2113 gimple *stmt = gsi_stmt (*gsi_p);
2114 if (gimple_code (stmt) == GIMPLE_LABEL
2115 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2116 break;
2117 }
2118
2119 /* Not found? Oops. */
2120 if (gsi_end_p (*gsi_p))
2121 break;
2122
2123 struct label_entry l = { false_lab, if_loc };
2124 labels->safe_push (l);
2125
2126 /* Go to the last statement of the then branch. */
2127 gsi_prev (gsi_p);
2128
2129 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2130 <D.1759>:
2131 <stmt>;
2132 goto <D.1761>;
2133 <D.1760>:
2134 */
2135 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2136 && !gimple_has_location (gsi_stmt (*gsi_p)))
2137 {
2138 /* Look at the statement before, it might be
2139 attribute fallthrough, in which case don't warn. */
2140 gsi_prev (gsi_p);
2141 bool fallthru_before_dest
2142 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2143 gsi_next (gsi_p);
2144 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2145 if (!fallthru_before_dest)
2146 {
2147 struct label_entry l = { goto_dest, if_loc };
2148 labels->safe_push (l);
2149 }
2150 }
2151 /* And move back. */
2152 gsi_next (gsi_p);
2153 }
2154
2155 /* Remember the last statement. Skip labels that are of no interest
2156 to us. */
2157 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2158 {
2159 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2160 if (find_label_entry (labels, label))
2161 prev = gsi_stmt (*gsi_p);
2162 }
2163 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2164 ;
2165 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2166 ;
2167 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2168 prev = gsi_stmt (*gsi_p);
2169 gsi_next (gsi_p);
2170 }
2171 while (!gsi_end_p (*gsi_p)
2172 /* Stop if we find a case or a user-defined label. */
2173 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2174 || !gimple_has_location (gsi_stmt (*gsi_p))));
2175
2176 if (prev && gimple_has_location (prev))
2177 *prevloc = gimple_location (prev);
2178 return prev;
2179 }
2180
2181 /* Return true if the switch fallthough warning should occur. LABEL is
2182 the label statement that we're falling through to. */
2183
2184 static bool
2185 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2186 {
2187 gimple_stmt_iterator gsi = *gsi_p;
2188
2189 /* Don't warn if the label is marked with a "falls through" comment. */
2190 if (FALLTHROUGH_LABEL_P (label))
2191 return false;
2192
2193 /* Don't warn for non-case labels followed by a statement:
2194 case 0:
2195 foo ();
2196 label:
2197 bar ();
2198 as these are likely intentional. */
2199 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2200 {
2201 tree l;
2202 while (!gsi_end_p (gsi)
2203 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2204 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2205 && !case_label_p (&gimplify_ctxp->case_labels, l))
2206 gsi_next_nondebug (&gsi);
2207 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2208 return false;
2209 }
2210
2211 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2212 immediately breaks. */
2213 gsi = *gsi_p;
2214
2215 /* Skip all immediately following labels. */
2216 while (!gsi_end_p (gsi)
2217 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2218 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2219 gsi_next_nondebug (&gsi);
2220
2221 /* { ... something; default:; } */
2222 if (gsi_end_p (gsi)
2223 /* { ... something; default: break; } or
2224 { ... something; default: goto L; } */
2225 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2226 /* { ... something; default: return; } */
2227 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2228 return false;
2229
2230 return true;
2231 }
2232
2233 /* Callback for walk_gimple_seq. */
2234
2235 static tree
2236 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2237 struct walk_stmt_info *)
2238 {
2239 gimple *stmt = gsi_stmt (*gsi_p);
2240
2241 *handled_ops_p = true;
2242 switch (gimple_code (stmt))
2243 {
2244 case GIMPLE_TRY:
2245 case GIMPLE_BIND:
2246 case GIMPLE_CATCH:
2247 case GIMPLE_EH_FILTER:
2248 case GIMPLE_TRANSACTION:
2249 /* Walk the sub-statements. */
2250 *handled_ops_p = false;
2251 break;
2252
2253 /* Find a sequence of form:
2254
2255 GIMPLE_LABEL
2256 [...]
2257 <may fallthru stmt>
2258 GIMPLE_LABEL
2259
2260 and possibly warn. */
2261 case GIMPLE_LABEL:
2262 {
2263 /* Found a label. Skip all immediately following labels. */
2264 while (!gsi_end_p (*gsi_p)
2265 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2266 gsi_next_nondebug (gsi_p);
2267
2268 /* There might be no more statements. */
2269 if (gsi_end_p (*gsi_p))
2270 return integer_zero_node;
2271
2272 /* Vector of labels that fall through. */
2273 auto_vec <struct label_entry> labels;
2274 location_t prevloc;
2275 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2276
2277 /* There might be no more statements. */
2278 if (gsi_end_p (*gsi_p))
2279 return integer_zero_node;
2280
2281 gimple *next = gsi_stmt (*gsi_p);
2282 tree label;
2283 /* If what follows is a label, then we may have a fallthrough. */
2284 if (gimple_code (next) == GIMPLE_LABEL
2285 && gimple_has_location (next)
2286 && (label = gimple_label_label (as_a <glabel *> (next)))
2287 && prev != NULL)
2288 {
2289 struct label_entry *l;
2290 bool warned_p = false;
2291 auto_diagnostic_group d;
2292 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2293 /* Quiet. */;
2294 else if (gimple_code (prev) == GIMPLE_LABEL
2295 && (label = gimple_label_label (as_a <glabel *> (prev)))
2296 && (l = find_label_entry (&labels, label)))
2297 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2298 "this statement may fall through");
2299 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2300 /* Try to be clever and don't warn when the statement
2301 can't actually fall through. */
2302 && gimple_stmt_may_fallthru (prev)
2303 && prevloc != UNKNOWN_LOCATION)
2304 warned_p = warning_at (prevloc,
2305 OPT_Wimplicit_fallthrough_,
2306 "this statement may fall through");
2307 if (warned_p)
2308 inform (gimple_location (next), "here");
2309
2310 /* Mark this label as processed so as to prevent multiple
2311 warnings in nested switches. */
2312 FALLTHROUGH_LABEL_P (label) = true;
2313
2314 /* So that next warn_implicit_fallthrough_r will start looking for
2315 a new sequence starting with this label. */
2316 gsi_prev (gsi_p);
2317 }
2318 }
2319 break;
2320 default:
2321 break;
2322 }
2323 return NULL_TREE;
2324 }
2325
2326 /* Warn when a switch case falls through. */
2327
2328 static void
2329 maybe_warn_implicit_fallthrough (gimple_seq seq)
2330 {
2331 if (!warn_implicit_fallthrough)
2332 return;
2333
2334 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2335 if (!(lang_GNU_C ()
2336 || lang_GNU_CXX ()
2337 || lang_GNU_OBJC ()))
2338 return;
2339
2340 struct walk_stmt_info wi;
2341 memset (&wi, 0, sizeof (wi));
2342 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2343 }
2344
2345 /* Callback for walk_gimple_seq. */
2346
2347 static tree
2348 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2349 struct walk_stmt_info *wi)
2350 {
2351 gimple *stmt = gsi_stmt (*gsi_p);
2352
2353 *handled_ops_p = true;
2354 switch (gimple_code (stmt))
2355 {
2356 case GIMPLE_TRY:
2357 case GIMPLE_BIND:
2358 case GIMPLE_CATCH:
2359 case GIMPLE_EH_FILTER:
2360 case GIMPLE_TRANSACTION:
2361 /* Walk the sub-statements. */
2362 *handled_ops_p = false;
2363 break;
2364 case GIMPLE_CALL:
2365 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2366 {
2367 gsi_remove (gsi_p, true);
2368 if (gsi_end_p (*gsi_p))
2369 {
2370 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2371 return integer_zero_node;
2372 }
2373
2374 bool found = false;
2375 location_t loc = gimple_location (stmt);
2376
2377 gimple_stmt_iterator gsi2 = *gsi_p;
2378 stmt = gsi_stmt (gsi2);
2379 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2380 {
2381 /* Go on until the artificial label. */
2382 tree goto_dest = gimple_goto_dest (stmt);
2383 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2384 {
2385 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2386 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2387 == goto_dest)
2388 break;
2389 }
2390
2391 /* Not found? Stop. */
2392 if (gsi_end_p (gsi2))
2393 break;
2394
2395 /* Look one past it. */
2396 gsi_next (&gsi2);
2397 }
2398
2399 /* We're looking for a case label or default label here. */
2400 while (!gsi_end_p (gsi2))
2401 {
2402 stmt = gsi_stmt (gsi2);
2403 if (gimple_code (stmt) == GIMPLE_LABEL)
2404 {
2405 tree label = gimple_label_label (as_a <glabel *> (stmt));
2406 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2407 {
2408 found = true;
2409 break;
2410 }
2411 }
2412 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2413 ;
2414 else if (!is_gimple_debug (stmt))
2415 /* Anything else is not expected. */
2416 break;
2417 gsi_next (&gsi2);
2418 }
2419 if (!found)
2420 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2421 "a case label or default label");
2422 }
2423 break;
2424 default:
2425 break;
2426 }
2427 return NULL_TREE;
2428 }
2429
2430 /* Expand all FALLTHROUGH () calls in SEQ. */
2431
2432 static void
2433 expand_FALLTHROUGH (gimple_seq *seq_p)
2434 {
2435 struct walk_stmt_info wi;
2436 location_t loc;
2437 memset (&wi, 0, sizeof (wi));
2438 wi.info = (void *) &loc;
2439 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2440 if (wi.callback_result == integer_zero_node)
2441 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2442 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2443 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2444 "a case label or default label");
2445 }
2446
2447 \f
2448 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2449 branch to. */
2450
2451 static enum gimplify_status
2452 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2453 {
2454 tree switch_expr = *expr_p;
2455 gimple_seq switch_body_seq = NULL;
2456 enum gimplify_status ret;
2457 tree index_type = TREE_TYPE (switch_expr);
2458 if (index_type == NULL_TREE)
2459 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2460
2461 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2462 fb_rvalue);
2463 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2464 return ret;
2465
2466 if (SWITCH_BODY (switch_expr))
2467 {
2468 vec<tree> labels;
2469 vec<tree> saved_labels;
2470 hash_set<tree> *saved_live_switch_vars = NULL;
2471 tree default_case = NULL_TREE;
2472 gswitch *switch_stmt;
2473
2474 /* Save old labels, get new ones from body, then restore the old
2475 labels. Save all the things from the switch body to append after. */
2476 saved_labels = gimplify_ctxp->case_labels;
2477 gimplify_ctxp->case_labels.create (8);
2478
2479 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2480 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2481 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2482 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2483 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2484 else
2485 gimplify_ctxp->live_switch_vars = NULL;
2486
2487 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2488 gimplify_ctxp->in_switch_expr = true;
2489
2490 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2491
2492 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2493 maybe_warn_switch_unreachable (switch_body_seq);
2494 maybe_warn_implicit_fallthrough (switch_body_seq);
2495 /* Only do this for the outermost GIMPLE_SWITCH. */
2496 if (!gimplify_ctxp->in_switch_expr)
2497 expand_FALLTHROUGH (&switch_body_seq);
2498
2499 labels = gimplify_ctxp->case_labels;
2500 gimplify_ctxp->case_labels = saved_labels;
2501
2502 if (gimplify_ctxp->live_switch_vars)
2503 {
2504 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2505 delete gimplify_ctxp->live_switch_vars;
2506 }
2507 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2508
2509 preprocess_case_label_vec_for_gimple (labels, index_type,
2510 &default_case);
2511
2512 bool add_bind = false;
2513 if (!default_case)
2514 {
2515 glabel *new_default;
2516
2517 default_case
2518 = build_case_label (NULL_TREE, NULL_TREE,
2519 create_artificial_label (UNKNOWN_LOCATION));
2520 if (old_in_switch_expr)
2521 {
2522 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2523 add_bind = true;
2524 }
2525 new_default = gimple_build_label (CASE_LABEL (default_case));
2526 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2527 }
2528 else if (old_in_switch_expr)
2529 {
2530 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2531 if (last && gimple_code (last) == GIMPLE_LABEL)
2532 {
2533 tree label = gimple_label_label (as_a <glabel *> (last));
2534 if (SWITCH_BREAK_LABEL_P (label))
2535 add_bind = true;
2536 }
2537 }
2538
2539 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2540 default_case, labels);
2541 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2542 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2543 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2544 so that we can easily find the start and end of the switch
2545 statement. */
2546 if (add_bind)
2547 {
2548 gimple_seq bind_body = NULL;
2549 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2550 gimple_seq_add_seq (&bind_body, switch_body_seq);
2551 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2552 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2553 gimplify_seq_add_stmt (pre_p, bind);
2554 }
2555 else
2556 {
2557 gimplify_seq_add_stmt (pre_p, switch_stmt);
2558 gimplify_seq_add_seq (pre_p, switch_body_seq);
2559 }
2560 labels.release ();
2561 }
2562 else
2563 gcc_unreachable ();
2564
2565 return GS_ALL_DONE;
2566 }
2567
2568 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2569
2570 static enum gimplify_status
2571 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2572 {
2573 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2574 == current_function_decl);
2575
2576 tree label = LABEL_EXPR_LABEL (*expr_p);
2577 glabel *label_stmt = gimple_build_label (label);
2578 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2579 gimplify_seq_add_stmt (pre_p, label_stmt);
2580
2581 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2582 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2583 NOT_TAKEN));
2584 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2585 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2586 TAKEN));
2587
2588 return GS_ALL_DONE;
2589 }
2590
2591 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2592
2593 static enum gimplify_status
2594 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2595 {
2596 struct gimplify_ctx *ctxp;
2597 glabel *label_stmt;
2598
2599 /* Invalid programs can play Duff's Device type games with, for example,
2600 #pragma omp parallel. At least in the C front end, we don't
2601 detect such invalid branches until after gimplification, in the
2602 diagnose_omp_blocks pass. */
2603 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2604 if (ctxp->case_labels.exists ())
2605 break;
2606
2607 tree label = CASE_LABEL (*expr_p);
2608 label_stmt = gimple_build_label (label);
2609 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2610 ctxp->case_labels.safe_push (*expr_p);
2611 gimplify_seq_add_stmt (pre_p, label_stmt);
2612
2613 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2614 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2615 NOT_TAKEN));
2616 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2617 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2618 TAKEN));
2619
2620 return GS_ALL_DONE;
2621 }
2622
2623 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2624 if necessary. */
2625
2626 tree
2627 build_and_jump (tree *label_p)
2628 {
2629 if (label_p == NULL)
2630 /* If there's nowhere to jump, just fall through. */
2631 return NULL_TREE;
2632
2633 if (*label_p == NULL_TREE)
2634 {
2635 tree label = create_artificial_label (UNKNOWN_LOCATION);
2636 *label_p = label;
2637 }
2638
2639 return build1 (GOTO_EXPR, void_type_node, *label_p);
2640 }
2641
2642 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2643 This also involves building a label to jump to and communicating it to
2644 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2645
2646 static enum gimplify_status
2647 gimplify_exit_expr (tree *expr_p)
2648 {
2649 tree cond = TREE_OPERAND (*expr_p, 0);
2650 tree expr;
2651
2652 expr = build_and_jump (&gimplify_ctxp->exit_label);
2653 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2654 *expr_p = expr;
2655
2656 return GS_OK;
2657 }
2658
2659 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2660 different from its canonical type, wrap the whole thing inside a
2661 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2662 type.
2663
2664 The canonical type of a COMPONENT_REF is the type of the field being
2665 referenced--unless the field is a bit-field which can be read directly
2666 in a smaller mode, in which case the canonical type is the
2667 sign-appropriate type corresponding to that mode. */
2668
2669 static void
2670 canonicalize_component_ref (tree *expr_p)
2671 {
2672 tree expr = *expr_p;
2673 tree type;
2674
2675 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2676
2677 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2678 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2679 else
2680 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2681
2682 /* One could argue that all the stuff below is not necessary for
2683 the non-bitfield case and declare it a FE error if type
2684 adjustment would be needed. */
2685 if (TREE_TYPE (expr) != type)
2686 {
2687 #ifdef ENABLE_TYPES_CHECKING
2688 tree old_type = TREE_TYPE (expr);
2689 #endif
2690 int type_quals;
2691
2692 /* We need to preserve qualifiers and propagate them from
2693 operand 0. */
2694 type_quals = TYPE_QUALS (type)
2695 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2696 if (TYPE_QUALS (type) != type_quals)
2697 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2698
2699 /* Set the type of the COMPONENT_REF to the underlying type. */
2700 TREE_TYPE (expr) = type;
2701
2702 #ifdef ENABLE_TYPES_CHECKING
2703 /* It is now a FE error, if the conversion from the canonical
2704 type to the original expression type is not useless. */
2705 gcc_assert (useless_type_conversion_p (old_type, type));
2706 #endif
2707 }
2708 }
2709
2710 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2711 to foo, embed that change in the ADDR_EXPR by converting
2712 T array[U];
2713 (T *)&array
2714 ==>
2715 &array[L]
2716 where L is the lower bound. For simplicity, only do this for constant
2717 lower bound.
2718 The constraint is that the type of &array[L] is trivially convertible
2719 to T *. */
2720
2721 static void
2722 canonicalize_addr_expr (tree *expr_p)
2723 {
2724 tree expr = *expr_p;
2725 tree addr_expr = TREE_OPERAND (expr, 0);
2726 tree datype, ddatype, pddatype;
2727
2728 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2729 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2730 || TREE_CODE (addr_expr) != ADDR_EXPR)
2731 return;
2732
2733 /* The addr_expr type should be a pointer to an array. */
2734 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2735 if (TREE_CODE (datype) != ARRAY_TYPE)
2736 return;
2737
2738 /* The pointer to element type shall be trivially convertible to
2739 the expression pointer type. */
2740 ddatype = TREE_TYPE (datype);
2741 pddatype = build_pointer_type (ddatype);
2742 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2743 pddatype))
2744 return;
2745
2746 /* The lower bound and element sizes must be constant. */
2747 if (!TYPE_SIZE_UNIT (ddatype)
2748 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2749 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2750 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2751 return;
2752
2753 /* All checks succeeded. Build a new node to merge the cast. */
2754 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2755 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2756 NULL_TREE, NULL_TREE);
2757 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2758
2759 /* We can have stripped a required restrict qualifier above. */
2760 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2761 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2762 }
2763
2764 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2765 underneath as appropriate. */
2766
2767 static enum gimplify_status
2768 gimplify_conversion (tree *expr_p)
2769 {
2770 location_t loc = EXPR_LOCATION (*expr_p);
2771 gcc_assert (CONVERT_EXPR_P (*expr_p));
2772
2773 /* Then strip away all but the outermost conversion. */
2774 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2775
2776 /* And remove the outermost conversion if it's useless. */
2777 if (tree_ssa_useless_type_conversion (*expr_p))
2778 *expr_p = TREE_OPERAND (*expr_p, 0);
2779
2780 /* If we still have a conversion at the toplevel,
2781 then canonicalize some constructs. */
2782 if (CONVERT_EXPR_P (*expr_p))
2783 {
2784 tree sub = TREE_OPERAND (*expr_p, 0);
2785
2786 /* If a NOP conversion is changing the type of a COMPONENT_REF
2787 expression, then canonicalize its type now in order to expose more
2788 redundant conversions. */
2789 if (TREE_CODE (sub) == COMPONENT_REF)
2790 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2791
2792 /* If a NOP conversion is changing a pointer to array of foo
2793 to a pointer to foo, embed that change in the ADDR_EXPR. */
2794 else if (TREE_CODE (sub) == ADDR_EXPR)
2795 canonicalize_addr_expr (expr_p);
2796 }
2797
2798 /* If we have a conversion to a non-register type force the
2799 use of a VIEW_CONVERT_EXPR instead. */
2800 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2801 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2802 TREE_OPERAND (*expr_p, 0));
2803
2804 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2805 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2806 TREE_SET_CODE (*expr_p, NOP_EXPR);
2807
2808 return GS_OK;
2809 }
2810
2811 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2812 DECL_VALUE_EXPR, and it's worth re-examining things. */
2813
2814 static enum gimplify_status
2815 gimplify_var_or_parm_decl (tree *expr_p)
2816 {
2817 tree decl = *expr_p;
2818
2819 /* ??? If this is a local variable, and it has not been seen in any
2820 outer BIND_EXPR, then it's probably the result of a duplicate
2821 declaration, for which we've already issued an error. It would
2822 be really nice if the front end wouldn't leak these at all.
2823 Currently the only known culprit is C++ destructors, as seen
2824 in g++.old-deja/g++.jason/binding.C. */
2825 if (VAR_P (decl)
2826 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2827 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2828 && decl_function_context (decl) == current_function_decl)
2829 {
2830 gcc_assert (seen_error ());
2831 return GS_ERROR;
2832 }
2833
2834 /* When within an OMP context, notice uses of variables. */
2835 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2836 return GS_ALL_DONE;
2837
2838 /* If the decl is an alias for another expression, substitute it now. */
2839 if (DECL_HAS_VALUE_EXPR_P (decl))
2840 {
2841 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2842 return GS_OK;
2843 }
2844
2845 return GS_ALL_DONE;
2846 }
2847
2848 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2849
2850 static void
2851 recalculate_side_effects (tree t)
2852 {
2853 enum tree_code code = TREE_CODE (t);
2854 int len = TREE_OPERAND_LENGTH (t);
2855 int i;
2856
2857 switch (TREE_CODE_CLASS (code))
2858 {
2859 case tcc_expression:
2860 switch (code)
2861 {
2862 case INIT_EXPR:
2863 case MODIFY_EXPR:
2864 case VA_ARG_EXPR:
2865 case PREDECREMENT_EXPR:
2866 case PREINCREMENT_EXPR:
2867 case POSTDECREMENT_EXPR:
2868 case POSTINCREMENT_EXPR:
2869 /* All of these have side-effects, no matter what their
2870 operands are. */
2871 return;
2872
2873 default:
2874 break;
2875 }
2876 /* Fall through. */
2877
2878 case tcc_comparison: /* a comparison expression */
2879 case tcc_unary: /* a unary arithmetic expression */
2880 case tcc_binary: /* a binary arithmetic expression */
2881 case tcc_reference: /* a reference */
2882 case tcc_vl_exp: /* a function call */
2883 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2884 for (i = 0; i < len; ++i)
2885 {
2886 tree op = TREE_OPERAND (t, i);
2887 if (op && TREE_SIDE_EFFECTS (op))
2888 TREE_SIDE_EFFECTS (t) = 1;
2889 }
2890 break;
2891
2892 case tcc_constant:
2893 /* No side-effects. */
2894 return;
2895
2896 default:
2897 gcc_unreachable ();
2898 }
2899 }
2900
2901 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2902 node *EXPR_P.
2903
2904 compound_lval
2905 : min_lval '[' val ']'
2906 | min_lval '.' ID
2907 | compound_lval '[' val ']'
2908 | compound_lval '.' ID
2909
2910 This is not part of the original SIMPLE definition, which separates
2911 array and member references, but it seems reasonable to handle them
2912 together. Also, this way we don't run into problems with union
2913 aliasing; gcc requires that for accesses through a union to alias, the
2914 union reference must be explicit, which was not always the case when we
2915 were splitting up array and member refs.
2916
2917 PRE_P points to the sequence where side effects that must happen before
2918 *EXPR_P should be stored.
2919
2920 POST_P points to the sequence where side effects that must happen after
2921 *EXPR_P should be stored. */
2922
2923 static enum gimplify_status
2924 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2925 fallback_t fallback)
2926 {
2927 tree *p;
2928 enum gimplify_status ret = GS_ALL_DONE, tret;
2929 int i;
2930 location_t loc = EXPR_LOCATION (*expr_p);
2931 tree expr = *expr_p;
2932
2933 /* Create a stack of the subexpressions so later we can walk them in
2934 order from inner to outer. */
2935 auto_vec<tree, 10> expr_stack;
2936
2937 /* We can handle anything that get_inner_reference can deal with. */
2938 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2939 {
2940 restart:
2941 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2942 if (TREE_CODE (*p) == INDIRECT_REF)
2943 *p = fold_indirect_ref_loc (loc, *p);
2944
2945 if (handled_component_p (*p))
2946 ;
2947 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2948 additional COMPONENT_REFs. */
2949 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2950 && gimplify_var_or_parm_decl (p) == GS_OK)
2951 goto restart;
2952 else
2953 break;
2954
2955 expr_stack.safe_push (*p);
2956 }
2957
2958 gcc_assert (expr_stack.length ());
2959
2960 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2961 walked through and P points to the innermost expression.
2962
2963 Java requires that we elaborated nodes in source order. That
2964 means we must gimplify the inner expression followed by each of
2965 the indices, in order. But we can't gimplify the inner
2966 expression until we deal with any variable bounds, sizes, or
2967 positions in order to deal with PLACEHOLDER_EXPRs.
2968
2969 So we do this in three steps. First we deal with the annotations
2970 for any variables in the components, then we gimplify the base,
2971 then we gimplify any indices, from left to right. */
2972 for (i = expr_stack.length () - 1; i >= 0; i--)
2973 {
2974 tree t = expr_stack[i];
2975
2976 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2977 {
2978 /* Gimplify the low bound and element type size and put them into
2979 the ARRAY_REF. If these values are set, they have already been
2980 gimplified. */
2981 if (TREE_OPERAND (t, 2) == NULL_TREE)
2982 {
2983 tree low = unshare_expr (array_ref_low_bound (t));
2984 if (!is_gimple_min_invariant (low))
2985 {
2986 TREE_OPERAND (t, 2) = low;
2987 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2988 post_p, is_gimple_reg,
2989 fb_rvalue);
2990 ret = MIN (ret, tret);
2991 }
2992 }
2993 else
2994 {
2995 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2996 is_gimple_reg, fb_rvalue);
2997 ret = MIN (ret, tret);
2998 }
2999
3000 if (TREE_OPERAND (t, 3) == NULL_TREE)
3001 {
3002 tree elmt_size = array_ref_element_size (t);
3003 if (!is_gimple_min_invariant (elmt_size))
3004 {
3005 elmt_size = unshare_expr (elmt_size);
3006 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3007 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3008
3009 /* Divide the element size by the alignment of the element
3010 type (above). */
3011 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3012 elmt_size, factor);
3013
3014 TREE_OPERAND (t, 3) = elmt_size;
3015 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3016 post_p, is_gimple_reg,
3017 fb_rvalue);
3018 ret = MIN (ret, tret);
3019 }
3020 }
3021 else
3022 {
3023 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3024 is_gimple_reg, fb_rvalue);
3025 ret = MIN (ret, tret);
3026 }
3027 }
3028 else if (TREE_CODE (t) == COMPONENT_REF)
3029 {
3030 /* Set the field offset into T and gimplify it. */
3031 if (TREE_OPERAND (t, 2) == NULL_TREE)
3032 {
3033 tree offset = component_ref_field_offset (t);
3034 if (!is_gimple_min_invariant (offset))
3035 {
3036 offset = unshare_expr (offset);
3037 tree field = TREE_OPERAND (t, 1);
3038 tree factor
3039 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3040
3041 /* Divide the offset by its alignment. */
3042 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3043 offset, factor);
3044
3045 TREE_OPERAND (t, 2) = offset;
3046 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3047 post_p, is_gimple_reg,
3048 fb_rvalue);
3049 ret = MIN (ret, tret);
3050 }
3051 }
3052 else
3053 {
3054 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3055 is_gimple_reg, fb_rvalue);
3056 ret = MIN (ret, tret);
3057 }
3058 }
3059 }
3060
3061 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3062 so as to match the min_lval predicate. Failure to do so may result
3063 in the creation of large aggregate temporaries. */
3064 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3065 fallback | fb_lvalue);
3066 ret = MIN (ret, tret);
3067
3068 /* And finally, the indices and operands of ARRAY_REF. During this
3069 loop we also remove any useless conversions. */
3070 for (; expr_stack.length () > 0; )
3071 {
3072 tree t = expr_stack.pop ();
3073
3074 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3075 {
3076 /* Gimplify the dimension. */
3077 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3078 {
3079 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3080 is_gimple_val, fb_rvalue);
3081 ret = MIN (ret, tret);
3082 }
3083 }
3084
3085 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3086
3087 /* The innermost expression P may have originally had
3088 TREE_SIDE_EFFECTS set which would have caused all the outer
3089 expressions in *EXPR_P leading to P to also have had
3090 TREE_SIDE_EFFECTS set. */
3091 recalculate_side_effects (t);
3092 }
3093
3094 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3095 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3096 {
3097 canonicalize_component_ref (expr_p);
3098 }
3099
3100 expr_stack.release ();
3101
3102 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3103
3104 return ret;
3105 }
3106
3107 /* Gimplify the self modifying expression pointed to by EXPR_P
3108 (++, --, +=, -=).
3109
3110 PRE_P points to the list where side effects that must happen before
3111 *EXPR_P should be stored.
3112
3113 POST_P points to the list where side effects that must happen after
3114 *EXPR_P should be stored.
3115
3116 WANT_VALUE is nonzero iff we want to use the value of this expression
3117 in another expression.
3118
3119 ARITH_TYPE is the type the computation should be performed in. */
3120
3121 enum gimplify_status
3122 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3123 bool want_value, tree arith_type)
3124 {
3125 enum tree_code code;
3126 tree lhs, lvalue, rhs, t1;
3127 gimple_seq post = NULL, *orig_post_p = post_p;
3128 bool postfix;
3129 enum tree_code arith_code;
3130 enum gimplify_status ret;
3131 location_t loc = EXPR_LOCATION (*expr_p);
3132
3133 code = TREE_CODE (*expr_p);
3134
3135 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3136 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3137
3138 /* Prefix or postfix? */
3139 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3140 /* Faster to treat as prefix if result is not used. */
3141 postfix = want_value;
3142 else
3143 postfix = false;
3144
3145 /* For postfix, make sure the inner expression's post side effects
3146 are executed after side effects from this expression. */
3147 if (postfix)
3148 post_p = &post;
3149
3150 /* Add or subtract? */
3151 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3152 arith_code = PLUS_EXPR;
3153 else
3154 arith_code = MINUS_EXPR;
3155
3156 /* Gimplify the LHS into a GIMPLE lvalue. */
3157 lvalue = TREE_OPERAND (*expr_p, 0);
3158 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3159 if (ret == GS_ERROR)
3160 return ret;
3161
3162 /* Extract the operands to the arithmetic operation. */
3163 lhs = lvalue;
3164 rhs = TREE_OPERAND (*expr_p, 1);
3165
3166 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3167 that as the result value and in the postqueue operation. */
3168 if (postfix)
3169 {
3170 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3171 if (ret == GS_ERROR)
3172 return ret;
3173
3174 lhs = get_initialized_tmp_var (lhs, pre_p);
3175 }
3176
3177 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3178 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3179 {
3180 rhs = convert_to_ptrofftype_loc (loc, rhs);
3181 if (arith_code == MINUS_EXPR)
3182 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3183 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3184 }
3185 else
3186 t1 = fold_convert (TREE_TYPE (*expr_p),
3187 fold_build2 (arith_code, arith_type,
3188 fold_convert (arith_type, lhs),
3189 fold_convert (arith_type, rhs)));
3190
3191 if (postfix)
3192 {
3193 gimplify_assign (lvalue, t1, pre_p);
3194 gimplify_seq_add_seq (orig_post_p, post);
3195 *expr_p = lhs;
3196 return GS_ALL_DONE;
3197 }
3198 else
3199 {
3200 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3201 return GS_OK;
3202 }
3203 }
3204
3205 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3206
3207 static void
3208 maybe_with_size_expr (tree *expr_p)
3209 {
3210 tree expr = *expr_p;
3211 tree type = TREE_TYPE (expr);
3212 tree size;
3213
3214 /* If we've already wrapped this or the type is error_mark_node, we can't do
3215 anything. */
3216 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3217 || type == error_mark_node)
3218 return;
3219
3220 /* If the size isn't known or is a constant, we have nothing to do. */
3221 size = TYPE_SIZE_UNIT (type);
3222 if (!size || poly_int_tree_p (size))
3223 return;
3224
3225 /* Otherwise, make a WITH_SIZE_EXPR. */
3226 size = unshare_expr (size);
3227 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3228 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3229 }
3230
3231 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3232 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3233 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3234 gimplified to an SSA name. */
3235
3236 enum gimplify_status
3237 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3238 bool allow_ssa)
3239 {
3240 bool (*test) (tree);
3241 fallback_t fb;
3242
3243 /* In general, we allow lvalues for function arguments to avoid
3244 extra overhead of copying large aggregates out of even larger
3245 aggregates into temporaries only to copy the temporaries to
3246 the argument list. Make optimizers happy by pulling out to
3247 temporaries those types that fit in registers. */
3248 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3249 test = is_gimple_val, fb = fb_rvalue;
3250 else
3251 {
3252 test = is_gimple_lvalue, fb = fb_either;
3253 /* Also strip a TARGET_EXPR that would force an extra copy. */
3254 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3255 {
3256 tree init = TARGET_EXPR_INITIAL (*arg_p);
3257 if (init
3258 && !VOID_TYPE_P (TREE_TYPE (init)))
3259 *arg_p = init;
3260 }
3261 }
3262
3263 /* If this is a variable sized type, we must remember the size. */
3264 maybe_with_size_expr (arg_p);
3265
3266 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3267 /* Make sure arguments have the same location as the function call
3268 itself. */
3269 protected_set_expr_location (*arg_p, call_location);
3270
3271 /* There is a sequence point before a function call. Side effects in
3272 the argument list must occur before the actual call. So, when
3273 gimplifying arguments, force gimplify_expr to use an internal
3274 post queue which is then appended to the end of PRE_P. */
3275 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3276 }
3277
3278 /* Don't fold inside offloading or taskreg regions: it can break code by
3279 adding decl references that weren't in the source. We'll do it during
3280 omplower pass instead. */
3281
3282 static bool
3283 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3284 {
3285 struct gimplify_omp_ctx *ctx;
3286 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3287 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3288 return false;
3289 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3290 return false;
3291 /* Delay folding of builtins until the IL is in consistent state
3292 so the diagnostic machinery can do a better job. */
3293 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3294 return false;
3295 return fold_stmt (gsi);
3296 }
3297
3298 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3299 WANT_VALUE is true if the result of the call is desired. */
3300
3301 static enum gimplify_status
3302 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3303 {
3304 tree fndecl, parms, p, fnptrtype;
3305 enum gimplify_status ret;
3306 int i, nargs;
3307 gcall *call;
3308 bool builtin_va_start_p = false;
3309 location_t loc = EXPR_LOCATION (*expr_p);
3310
3311 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3312
3313 /* For reliable diagnostics during inlining, it is necessary that
3314 every call_expr be annotated with file and line. */
3315 if (! EXPR_HAS_LOCATION (*expr_p))
3316 SET_EXPR_LOCATION (*expr_p, input_location);
3317
3318 /* Gimplify internal functions created in the FEs. */
3319 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3320 {
3321 if (want_value)
3322 return GS_ALL_DONE;
3323
3324 nargs = call_expr_nargs (*expr_p);
3325 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3326 auto_vec<tree> vargs (nargs);
3327
3328 for (i = 0; i < nargs; i++)
3329 {
3330 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3331 EXPR_LOCATION (*expr_p));
3332 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3333 }
3334
3335 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3336 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3337 gimplify_seq_add_stmt (pre_p, call);
3338 return GS_ALL_DONE;
3339 }
3340
3341 /* This may be a call to a builtin function.
3342
3343 Builtin function calls may be transformed into different
3344 (and more efficient) builtin function calls under certain
3345 circumstances. Unfortunately, gimplification can muck things
3346 up enough that the builtin expanders are not aware that certain
3347 transformations are still valid.
3348
3349 So we attempt transformation/gimplification of the call before
3350 we gimplify the CALL_EXPR. At this time we do not manage to
3351 transform all calls in the same manner as the expanders do, but
3352 we do transform most of them. */
3353 fndecl = get_callee_fndecl (*expr_p);
3354 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3355 switch (DECL_FUNCTION_CODE (fndecl))
3356 {
3357 CASE_BUILT_IN_ALLOCA:
3358 /* If the call has been built for a variable-sized object, then we
3359 want to restore the stack level when the enclosing BIND_EXPR is
3360 exited to reclaim the allocated space; otherwise, we precisely
3361 need to do the opposite and preserve the latest stack level. */
3362 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3363 gimplify_ctxp->save_stack = true;
3364 else
3365 gimplify_ctxp->keep_stack = true;
3366 break;
3367
3368 case BUILT_IN_VA_START:
3369 {
3370 builtin_va_start_p = TRUE;
3371 if (call_expr_nargs (*expr_p) < 2)
3372 {
3373 error ("too few arguments to function %<va_start%>");
3374 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3375 return GS_OK;
3376 }
3377
3378 if (fold_builtin_next_arg (*expr_p, true))
3379 {
3380 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3381 return GS_OK;
3382 }
3383 break;
3384 }
3385
3386 case BUILT_IN_EH_RETURN:
3387 cfun->calls_eh_return = true;
3388 break;
3389
3390 default:
3391 ;
3392 }
3393 if (fndecl && fndecl_built_in_p (fndecl))
3394 {
3395 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3396 if (new_tree && new_tree != *expr_p)
3397 {
3398 /* There was a transformation of this call which computes the
3399 same value, but in a more efficient way. Return and try
3400 again. */
3401 *expr_p = new_tree;
3402 return GS_OK;
3403 }
3404 }
3405
3406 /* Remember the original function pointer type. */
3407 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3408
3409 if (flag_openmp
3410 && fndecl
3411 && cfun
3412 && (cfun->curr_properties & PROP_gimple_any) == 0)
3413 {
3414 tree variant = omp_resolve_declare_variant (fndecl);
3415 if (variant != fndecl)
3416 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3417 }
3418
3419 /* There is a sequence point before the call, so any side effects in
3420 the calling expression must occur before the actual call. Force
3421 gimplify_expr to use an internal post queue. */
3422 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3423 is_gimple_call_addr, fb_rvalue);
3424
3425 nargs = call_expr_nargs (*expr_p);
3426
3427 /* Get argument types for verification. */
3428 fndecl = get_callee_fndecl (*expr_p);
3429 parms = NULL_TREE;
3430 if (fndecl)
3431 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3432 else
3433 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3434
3435 if (fndecl && DECL_ARGUMENTS (fndecl))
3436 p = DECL_ARGUMENTS (fndecl);
3437 else if (parms)
3438 p = parms;
3439 else
3440 p = NULL_TREE;
3441 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3442 ;
3443
3444 /* If the last argument is __builtin_va_arg_pack () and it is not
3445 passed as a named argument, decrease the number of CALL_EXPR
3446 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3447 if (!p
3448 && i < nargs
3449 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3450 {
3451 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3452 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3453
3454 if (last_arg_fndecl
3455 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3456 {
3457 tree call = *expr_p;
3458
3459 --nargs;
3460 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3461 CALL_EXPR_FN (call),
3462 nargs, CALL_EXPR_ARGP (call));
3463
3464 /* Copy all CALL_EXPR flags, location and block, except
3465 CALL_EXPR_VA_ARG_PACK flag. */
3466 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3467 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3468 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3469 = CALL_EXPR_RETURN_SLOT_OPT (call);
3470 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3471 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3472
3473 /* Set CALL_EXPR_VA_ARG_PACK. */
3474 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3475 }
3476 }
3477
3478 /* If the call returns twice then after building the CFG the call
3479 argument computations will no longer dominate the call because
3480 we add an abnormal incoming edge to the call. So do not use SSA
3481 vars there. */
3482 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3483
3484 /* Gimplify the function arguments. */
3485 if (nargs > 0)
3486 {
3487 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3488 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3489 PUSH_ARGS_REVERSED ? i-- : i++)
3490 {
3491 enum gimplify_status t;
3492
3493 /* Avoid gimplifying the second argument to va_start, which needs to
3494 be the plain PARM_DECL. */
3495 if ((i != 1) || !builtin_va_start_p)
3496 {
3497 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3498 EXPR_LOCATION (*expr_p), ! returns_twice);
3499
3500 if (t == GS_ERROR)
3501 ret = GS_ERROR;
3502 }
3503 }
3504 }
3505
3506 /* Gimplify the static chain. */
3507 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3508 {
3509 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3510 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3511 else
3512 {
3513 enum gimplify_status t;
3514 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3515 EXPR_LOCATION (*expr_p), ! returns_twice);
3516 if (t == GS_ERROR)
3517 ret = GS_ERROR;
3518 }
3519 }
3520
3521 /* Verify the function result. */
3522 if (want_value && fndecl
3523 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3524 {
3525 error_at (loc, "using result of function returning %<void%>");
3526 ret = GS_ERROR;
3527 }
3528
3529 /* Try this again in case gimplification exposed something. */
3530 if (ret != GS_ERROR)
3531 {
3532 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3533
3534 if (new_tree && new_tree != *expr_p)
3535 {
3536 /* There was a transformation of this call which computes the
3537 same value, but in a more efficient way. Return and try
3538 again. */
3539 *expr_p = new_tree;
3540 return GS_OK;
3541 }
3542 }
3543 else
3544 {
3545 *expr_p = error_mark_node;
3546 return GS_ERROR;
3547 }
3548
3549 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3550 decl. This allows us to eliminate redundant or useless
3551 calls to "const" functions. */
3552 if (TREE_CODE (*expr_p) == CALL_EXPR)
3553 {
3554 int flags = call_expr_flags (*expr_p);
3555 if (flags & (ECF_CONST | ECF_PURE)
3556 /* An infinite loop is considered a side effect. */
3557 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3558 TREE_SIDE_EFFECTS (*expr_p) = 0;
3559 }
3560
3561 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3562 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3563 form and delegate the creation of a GIMPLE_CALL to
3564 gimplify_modify_expr. This is always possible because when
3565 WANT_VALUE is true, the caller wants the result of this call into
3566 a temporary, which means that we will emit an INIT_EXPR in
3567 internal_get_tmp_var which will then be handled by
3568 gimplify_modify_expr. */
3569 if (!want_value)
3570 {
3571 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3572 have to do is replicate it as a GIMPLE_CALL tuple. */
3573 gimple_stmt_iterator gsi;
3574 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3575 notice_special_calls (call);
3576 gimplify_seq_add_stmt (pre_p, call);
3577 gsi = gsi_last (*pre_p);
3578 maybe_fold_stmt (&gsi);
3579 *expr_p = NULL_TREE;
3580 }
3581 else
3582 /* Remember the original function type. */
3583 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3584 CALL_EXPR_FN (*expr_p));
3585
3586 return ret;
3587 }
3588
3589 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3590 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3591
3592 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3593 condition is true or false, respectively. If null, we should generate
3594 our own to skip over the evaluation of this specific expression.
3595
3596 LOCUS is the source location of the COND_EXPR.
3597
3598 This function is the tree equivalent of do_jump.
3599
3600 shortcut_cond_r should only be called by shortcut_cond_expr. */
3601
3602 static tree
3603 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3604 location_t locus)
3605 {
3606 tree local_label = NULL_TREE;
3607 tree t, expr = NULL;
3608
3609 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3610 retain the shortcut semantics. Just insert the gotos here;
3611 shortcut_cond_expr will append the real blocks later. */
3612 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3613 {
3614 location_t new_locus;
3615
3616 /* Turn if (a && b) into
3617
3618 if (a); else goto no;
3619 if (b) goto yes; else goto no;
3620 (no:) */
3621
3622 if (false_label_p == NULL)
3623 false_label_p = &local_label;
3624
3625 /* Keep the original source location on the first 'if'. */
3626 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3627 append_to_statement_list (t, &expr);
3628
3629 /* Set the source location of the && on the second 'if'. */
3630 new_locus = rexpr_location (pred, locus);
3631 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3632 new_locus);
3633 append_to_statement_list (t, &expr);
3634 }
3635 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3636 {
3637 location_t new_locus;
3638
3639 /* Turn if (a || b) into
3640
3641 if (a) goto yes;
3642 if (b) goto yes; else goto no;
3643 (yes:) */
3644
3645 if (true_label_p == NULL)
3646 true_label_p = &local_label;
3647
3648 /* Keep the original source location on the first 'if'. */
3649 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3650 append_to_statement_list (t, &expr);
3651
3652 /* Set the source location of the || on the second 'if'. */
3653 new_locus = rexpr_location (pred, locus);
3654 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3655 new_locus);
3656 append_to_statement_list (t, &expr);
3657 }
3658 else if (TREE_CODE (pred) == COND_EXPR
3659 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3660 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3661 {
3662 location_t new_locus;
3663
3664 /* As long as we're messing with gotos, turn if (a ? b : c) into
3665 if (a)
3666 if (b) goto yes; else goto no;
3667 else
3668 if (c) goto yes; else goto no;
3669
3670 Don't do this if one of the arms has void type, which can happen
3671 in C++ when the arm is throw. */
3672
3673 /* Keep the original source location on the first 'if'. Set the source
3674 location of the ? on the second 'if'. */
3675 new_locus = rexpr_location (pred, locus);
3676 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3677 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3678 false_label_p, locus),
3679 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3680 false_label_p, new_locus));
3681 }
3682 else
3683 {
3684 expr = build3 (COND_EXPR, void_type_node, pred,
3685 build_and_jump (true_label_p),
3686 build_and_jump (false_label_p));
3687 SET_EXPR_LOCATION (expr, locus);
3688 }
3689
3690 if (local_label)
3691 {
3692 t = build1 (LABEL_EXPR, void_type_node, local_label);
3693 append_to_statement_list (t, &expr);
3694 }
3695
3696 return expr;
3697 }
3698
3699 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3700 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3701 statement, if it is the last one. Otherwise, return NULL. */
3702
3703 static tree
3704 find_goto (tree expr)
3705 {
3706 if (!expr)
3707 return NULL_TREE;
3708
3709 if (TREE_CODE (expr) == GOTO_EXPR)
3710 return expr;
3711
3712 if (TREE_CODE (expr) != STATEMENT_LIST)
3713 return NULL_TREE;
3714
3715 tree_stmt_iterator i = tsi_start (expr);
3716
3717 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3718 tsi_next (&i);
3719
3720 if (!tsi_one_before_end_p (i))
3721 return NULL_TREE;
3722
3723 return find_goto (tsi_stmt (i));
3724 }
3725
3726 /* Same as find_goto, except that it returns NULL if the destination
3727 is not a LABEL_DECL. */
3728
3729 static inline tree
3730 find_goto_label (tree expr)
3731 {
3732 tree dest = find_goto (expr);
3733 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3734 return dest;
3735 return NULL_TREE;
3736 }
3737
3738 /* Given a conditional expression EXPR with short-circuit boolean
3739 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3740 predicate apart into the equivalent sequence of conditionals. */
3741
3742 static tree
3743 shortcut_cond_expr (tree expr)
3744 {
3745 tree pred = TREE_OPERAND (expr, 0);
3746 tree then_ = TREE_OPERAND (expr, 1);
3747 tree else_ = TREE_OPERAND (expr, 2);
3748 tree true_label, false_label, end_label, t;
3749 tree *true_label_p;
3750 tree *false_label_p;
3751 bool emit_end, emit_false, jump_over_else;
3752 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3753 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3754
3755 /* First do simple transformations. */
3756 if (!else_se)
3757 {
3758 /* If there is no 'else', turn
3759 if (a && b) then c
3760 into
3761 if (a) if (b) then c. */
3762 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3763 {
3764 /* Keep the original source location on the first 'if'. */
3765 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3766 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3767 /* Set the source location of the && on the second 'if'. */
3768 if (rexpr_has_location (pred))
3769 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3770 then_ = shortcut_cond_expr (expr);
3771 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3772 pred = TREE_OPERAND (pred, 0);
3773 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3774 SET_EXPR_LOCATION (expr, locus);
3775 }
3776 }
3777
3778 if (!then_se)
3779 {
3780 /* If there is no 'then', turn
3781 if (a || b); else d
3782 into
3783 if (a); else if (b); else d. */
3784 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3785 {
3786 /* Keep the original source location on the first 'if'. */
3787 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3788 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3789 /* Set the source location of the || on the second 'if'. */
3790 if (rexpr_has_location (pred))
3791 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3792 else_ = shortcut_cond_expr (expr);
3793 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3794 pred = TREE_OPERAND (pred, 0);
3795 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3796 SET_EXPR_LOCATION (expr, locus);
3797 }
3798 }
3799
3800 /* If we're done, great. */
3801 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3802 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3803 return expr;
3804
3805 /* Otherwise we need to mess with gotos. Change
3806 if (a) c; else d;
3807 to
3808 if (a); else goto no;
3809 c; goto end;
3810 no: d; end:
3811 and recursively gimplify the condition. */
3812
3813 true_label = false_label = end_label = NULL_TREE;
3814
3815 /* If our arms just jump somewhere, hijack those labels so we don't
3816 generate jumps to jumps. */
3817
3818 if (tree then_goto = find_goto_label (then_))
3819 {
3820 true_label = GOTO_DESTINATION (then_goto);
3821 then_ = NULL;
3822 then_se = false;
3823 }
3824
3825 if (tree else_goto = find_goto_label (else_))
3826 {
3827 false_label = GOTO_DESTINATION (else_goto);
3828 else_ = NULL;
3829 else_se = false;
3830 }
3831
3832 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3833 if (true_label)
3834 true_label_p = &true_label;
3835 else
3836 true_label_p = NULL;
3837
3838 /* The 'else' branch also needs a label if it contains interesting code. */
3839 if (false_label || else_se)
3840 false_label_p = &false_label;
3841 else
3842 false_label_p = NULL;
3843
3844 /* If there was nothing else in our arms, just forward the label(s). */
3845 if (!then_se && !else_se)
3846 return shortcut_cond_r (pred, true_label_p, false_label_p,
3847 EXPR_LOC_OR_LOC (expr, input_location));
3848
3849 /* If our last subexpression already has a terminal label, reuse it. */
3850 if (else_se)
3851 t = expr_last (else_);
3852 else if (then_se)
3853 t = expr_last (then_);
3854 else
3855 t = NULL;
3856 if (t && TREE_CODE (t) == LABEL_EXPR)
3857 end_label = LABEL_EXPR_LABEL (t);
3858
3859 /* If we don't care about jumping to the 'else' branch, jump to the end
3860 if the condition is false. */
3861 if (!false_label_p)
3862 false_label_p = &end_label;
3863
3864 /* We only want to emit these labels if we aren't hijacking them. */
3865 emit_end = (end_label == NULL_TREE);
3866 emit_false = (false_label == NULL_TREE);
3867
3868 /* We only emit the jump over the else clause if we have to--if the
3869 then clause may fall through. Otherwise we can wind up with a
3870 useless jump and a useless label at the end of gimplified code,
3871 which will cause us to think that this conditional as a whole
3872 falls through even if it doesn't. If we then inline a function
3873 which ends with such a condition, that can cause us to issue an
3874 inappropriate warning about control reaching the end of a
3875 non-void function. */
3876 jump_over_else = block_may_fallthru (then_);
3877
3878 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3879 EXPR_LOC_OR_LOC (expr, input_location));
3880
3881 expr = NULL;
3882 append_to_statement_list (pred, &expr);
3883
3884 append_to_statement_list (then_, &expr);
3885 if (else_se)
3886 {
3887 if (jump_over_else)
3888 {
3889 tree last = expr_last (expr);
3890 t = build_and_jump (&end_label);
3891 if (rexpr_has_location (last))
3892 SET_EXPR_LOCATION (t, rexpr_location (last));
3893 append_to_statement_list (t, &expr);
3894 }
3895 if (emit_false)
3896 {
3897 t = build1 (LABEL_EXPR, void_type_node, false_label);
3898 append_to_statement_list (t, &expr);
3899 }
3900 append_to_statement_list (else_, &expr);
3901 }
3902 if (emit_end && end_label)
3903 {
3904 t = build1 (LABEL_EXPR, void_type_node, end_label);
3905 append_to_statement_list (t, &expr);
3906 }
3907
3908 return expr;
3909 }
3910
3911 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3912
3913 tree
3914 gimple_boolify (tree expr)
3915 {
3916 tree type = TREE_TYPE (expr);
3917 location_t loc = EXPR_LOCATION (expr);
3918
3919 if (TREE_CODE (expr) == NE_EXPR
3920 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3921 && integer_zerop (TREE_OPERAND (expr, 1)))
3922 {
3923 tree call = TREE_OPERAND (expr, 0);
3924 tree fn = get_callee_fndecl (call);
3925
3926 /* For __builtin_expect ((long) (x), y) recurse into x as well
3927 if x is truth_value_p. */
3928 if (fn
3929 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3930 && call_expr_nargs (call) == 2)
3931 {
3932 tree arg = CALL_EXPR_ARG (call, 0);
3933 if (arg)
3934 {
3935 if (TREE_CODE (arg) == NOP_EXPR
3936 && TREE_TYPE (arg) == TREE_TYPE (call))
3937 arg = TREE_OPERAND (arg, 0);
3938 if (truth_value_p (TREE_CODE (arg)))
3939 {
3940 arg = gimple_boolify (arg);
3941 CALL_EXPR_ARG (call, 0)
3942 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3943 }
3944 }
3945 }
3946 }
3947
3948 switch (TREE_CODE (expr))
3949 {
3950 case TRUTH_AND_EXPR:
3951 case TRUTH_OR_EXPR:
3952 case TRUTH_XOR_EXPR:
3953 case TRUTH_ANDIF_EXPR:
3954 case TRUTH_ORIF_EXPR:
3955 /* Also boolify the arguments of truth exprs. */
3956 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3957 /* FALLTHRU */
3958
3959 case TRUTH_NOT_EXPR:
3960 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3961
3962 /* These expressions always produce boolean results. */
3963 if (TREE_CODE (type) != BOOLEAN_TYPE)
3964 TREE_TYPE (expr) = boolean_type_node;
3965 return expr;
3966
3967 case ANNOTATE_EXPR:
3968 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3969 {
3970 case annot_expr_ivdep_kind:
3971 case annot_expr_unroll_kind:
3972 case annot_expr_no_vector_kind:
3973 case annot_expr_vector_kind:
3974 case annot_expr_parallel_kind:
3975 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3976 if (TREE_CODE (type) != BOOLEAN_TYPE)
3977 TREE_TYPE (expr) = boolean_type_node;
3978 return expr;
3979 default:
3980 gcc_unreachable ();
3981 }
3982
3983 default:
3984 if (COMPARISON_CLASS_P (expr))
3985 {
3986 /* There expressions always prduce boolean results. */
3987 if (TREE_CODE (type) != BOOLEAN_TYPE)
3988 TREE_TYPE (expr) = boolean_type_node;
3989 return expr;
3990 }
3991 /* Other expressions that get here must have boolean values, but
3992 might need to be converted to the appropriate mode. */
3993 if (TREE_CODE (type) == BOOLEAN_TYPE)
3994 return expr;
3995 return fold_convert_loc (loc, boolean_type_node, expr);
3996 }
3997 }
3998
3999 /* Given a conditional expression *EXPR_P without side effects, gimplify
4000 its operands. New statements are inserted to PRE_P. */
4001
4002 static enum gimplify_status
4003 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4004 {
4005 tree expr = *expr_p, cond;
4006 enum gimplify_status ret, tret;
4007 enum tree_code code;
4008
4009 cond = gimple_boolify (COND_EXPR_COND (expr));
4010
4011 /* We need to handle && and || specially, as their gimplification
4012 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4013 code = TREE_CODE (cond);
4014 if (code == TRUTH_ANDIF_EXPR)
4015 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4016 else if (code == TRUTH_ORIF_EXPR)
4017 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4018 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4019 COND_EXPR_COND (*expr_p) = cond;
4020
4021 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4022 is_gimple_val, fb_rvalue);
4023 ret = MIN (ret, tret);
4024 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4025 is_gimple_val, fb_rvalue);
4026
4027 return MIN (ret, tret);
4028 }
4029
4030 /* Return true if evaluating EXPR could trap.
4031 EXPR is GENERIC, while tree_could_trap_p can be called
4032 only on GIMPLE. */
4033
4034 bool
4035 generic_expr_could_trap_p (tree expr)
4036 {
4037 unsigned i, n;
4038
4039 if (!expr || is_gimple_val (expr))
4040 return false;
4041
4042 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4043 return true;
4044
4045 n = TREE_OPERAND_LENGTH (expr);
4046 for (i = 0; i < n; i++)
4047 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4048 return true;
4049
4050 return false;
4051 }
4052
4053 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4054 into
4055
4056 if (p) if (p)
4057 t1 = a; a;
4058 else or else
4059 t1 = b; b;
4060 t1;
4061
4062 The second form is used when *EXPR_P is of type void.
4063
4064 PRE_P points to the list where side effects that must happen before
4065 *EXPR_P should be stored. */
4066
4067 static enum gimplify_status
4068 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4069 {
4070 tree expr = *expr_p;
4071 tree type = TREE_TYPE (expr);
4072 location_t loc = EXPR_LOCATION (expr);
4073 tree tmp, arm1, arm2;
4074 enum gimplify_status ret;
4075 tree label_true, label_false, label_cont;
4076 bool have_then_clause_p, have_else_clause_p;
4077 gcond *cond_stmt;
4078 enum tree_code pred_code;
4079 gimple_seq seq = NULL;
4080
4081 /* If this COND_EXPR has a value, copy the values into a temporary within
4082 the arms. */
4083 if (!VOID_TYPE_P (type))
4084 {
4085 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4086 tree result;
4087
4088 /* If either an rvalue is ok or we do not require an lvalue, create the
4089 temporary. But we cannot do that if the type is addressable. */
4090 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4091 && !TREE_ADDRESSABLE (type))
4092 {
4093 if (gimplify_ctxp->allow_rhs_cond_expr
4094 /* If either branch has side effects or could trap, it can't be
4095 evaluated unconditionally. */
4096 && !TREE_SIDE_EFFECTS (then_)
4097 && !generic_expr_could_trap_p (then_)
4098 && !TREE_SIDE_EFFECTS (else_)
4099 && !generic_expr_could_trap_p (else_))
4100 return gimplify_pure_cond_expr (expr_p, pre_p);
4101
4102 tmp = create_tmp_var (type, "iftmp");
4103 result = tmp;
4104 }
4105
4106 /* Otherwise, only create and copy references to the values. */
4107 else
4108 {
4109 type = build_pointer_type (type);
4110
4111 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4112 then_ = build_fold_addr_expr_loc (loc, then_);
4113
4114 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4115 else_ = build_fold_addr_expr_loc (loc, else_);
4116
4117 expr
4118 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4119
4120 tmp = create_tmp_var (type, "iftmp");
4121 result = build_simple_mem_ref_loc (loc, tmp);
4122 }
4123
4124 /* Build the new then clause, `tmp = then_;'. But don't build the
4125 assignment if the value is void; in C++ it can be if it's a throw. */
4126 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4127 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4128
4129 /* Similarly, build the new else clause, `tmp = else_;'. */
4130 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4131 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4132
4133 TREE_TYPE (expr) = void_type_node;
4134 recalculate_side_effects (expr);
4135
4136 /* Move the COND_EXPR to the prequeue. */
4137 gimplify_stmt (&expr, pre_p);
4138
4139 *expr_p = result;
4140 return GS_ALL_DONE;
4141 }
4142
4143 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4144 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4145 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4146 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4147
4148 /* Make sure the condition has BOOLEAN_TYPE. */
4149 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4150
4151 /* Break apart && and || conditions. */
4152 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4153 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4154 {
4155 expr = shortcut_cond_expr (expr);
4156
4157 if (expr != *expr_p)
4158 {
4159 *expr_p = expr;
4160
4161 /* We can't rely on gimplify_expr to re-gimplify the expanded
4162 form properly, as cleanups might cause the target labels to be
4163 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4164 set up a conditional context. */
4165 gimple_push_condition ();
4166 gimplify_stmt (expr_p, &seq);
4167 gimple_pop_condition (pre_p);
4168 gimple_seq_add_seq (pre_p, seq);
4169
4170 return GS_ALL_DONE;
4171 }
4172 }
4173
4174 /* Now do the normal gimplification. */
4175
4176 /* Gimplify condition. */
4177 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4178 is_gimple_condexpr_for_cond, fb_rvalue);
4179 if (ret == GS_ERROR)
4180 return GS_ERROR;
4181 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4182
4183 gimple_push_condition ();
4184
4185 have_then_clause_p = have_else_clause_p = false;
4186 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4187 if (label_true
4188 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4189 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4190 have different locations, otherwise we end up with incorrect
4191 location information on the branches. */
4192 && (optimize
4193 || !EXPR_HAS_LOCATION (expr)
4194 || !rexpr_has_location (label_true)
4195 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4196 {
4197 have_then_clause_p = true;
4198 label_true = GOTO_DESTINATION (label_true);
4199 }
4200 else
4201 label_true = create_artificial_label (UNKNOWN_LOCATION);
4202 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4203 if (label_false
4204 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4205 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4206 have different locations, otherwise we end up with incorrect
4207 location information on the branches. */
4208 && (optimize
4209 || !EXPR_HAS_LOCATION (expr)
4210 || !rexpr_has_location (label_false)
4211 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4212 {
4213 have_else_clause_p = true;
4214 label_false = GOTO_DESTINATION (label_false);
4215 }
4216 else
4217 label_false = create_artificial_label (UNKNOWN_LOCATION);
4218
4219 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4220 &arm2);
4221 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4222 label_false);
4223 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4224 gimplify_seq_add_stmt (&seq, cond_stmt);
4225 gimple_stmt_iterator gsi = gsi_last (seq);
4226 maybe_fold_stmt (&gsi);
4227
4228 label_cont = NULL_TREE;
4229 if (!have_then_clause_p)
4230 {
4231 /* For if (...) {} else { code; } put label_true after
4232 the else block. */
4233 if (TREE_OPERAND (expr, 1) == NULL_TREE
4234 && !have_else_clause_p
4235 && TREE_OPERAND (expr, 2) != NULL_TREE)
4236 label_cont = label_true;
4237 else
4238 {
4239 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4240 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4241 /* For if (...) { code; } else {} or
4242 if (...) { code; } else goto label; or
4243 if (...) { code; return; } else { ... }
4244 label_cont isn't needed. */
4245 if (!have_else_clause_p
4246 && TREE_OPERAND (expr, 2) != NULL_TREE
4247 && gimple_seq_may_fallthru (seq))
4248 {
4249 gimple *g;
4250 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4251
4252 g = gimple_build_goto (label_cont);
4253
4254 /* GIMPLE_COND's are very low level; they have embedded
4255 gotos. This particular embedded goto should not be marked
4256 with the location of the original COND_EXPR, as it would
4257 correspond to the COND_EXPR's condition, not the ELSE or the
4258 THEN arms. To avoid marking it with the wrong location, flag
4259 it as "no location". */
4260 gimple_set_do_not_emit_location (g);
4261
4262 gimplify_seq_add_stmt (&seq, g);
4263 }
4264 }
4265 }
4266 if (!have_else_clause_p)
4267 {
4268 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4269 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4270 }
4271 if (label_cont)
4272 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4273
4274 gimple_pop_condition (pre_p);
4275 gimple_seq_add_seq (pre_p, seq);
4276
4277 if (ret == GS_ERROR)
4278 ; /* Do nothing. */
4279 else if (have_then_clause_p || have_else_clause_p)
4280 ret = GS_ALL_DONE;
4281 else
4282 {
4283 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4284 expr = TREE_OPERAND (expr, 0);
4285 gimplify_stmt (&expr, pre_p);
4286 }
4287
4288 *expr_p = NULL;
4289 return ret;
4290 }
4291
4292 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4293 to be marked addressable.
4294
4295 We cannot rely on such an expression being directly markable if a temporary
4296 has been created by the gimplification. In this case, we create another
4297 temporary and initialize it with a copy, which will become a store after we
4298 mark it addressable. This can happen if the front-end passed us something
4299 that it could not mark addressable yet, like a Fortran pass-by-reference
4300 parameter (int) floatvar. */
4301
4302 static void
4303 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4304 {
4305 while (handled_component_p (*expr_p))
4306 expr_p = &TREE_OPERAND (*expr_p, 0);
4307 if (is_gimple_reg (*expr_p))
4308 {
4309 /* Do not allow an SSA name as the temporary. */
4310 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4311 DECL_GIMPLE_REG_P (var) = 0;
4312 *expr_p = var;
4313 }
4314 }
4315
4316 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4317 a call to __builtin_memcpy. */
4318
4319 static enum gimplify_status
4320 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4321 gimple_seq *seq_p)
4322 {
4323 tree t, to, to_ptr, from, from_ptr;
4324 gcall *gs;
4325 location_t loc = EXPR_LOCATION (*expr_p);
4326
4327 to = TREE_OPERAND (*expr_p, 0);
4328 from = TREE_OPERAND (*expr_p, 1);
4329
4330 /* Mark the RHS addressable. Beware that it may not be possible to do so
4331 directly if a temporary has been created by the gimplification. */
4332 prepare_gimple_addressable (&from, seq_p);
4333
4334 mark_addressable (from);
4335 from_ptr = build_fold_addr_expr_loc (loc, from);
4336 gimplify_arg (&from_ptr, seq_p, loc);
4337
4338 mark_addressable (to);
4339 to_ptr = build_fold_addr_expr_loc (loc, to);
4340 gimplify_arg (&to_ptr, seq_p, loc);
4341
4342 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4343
4344 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4345
4346 if (want_value)
4347 {
4348 /* tmp = memcpy() */
4349 t = create_tmp_var (TREE_TYPE (to_ptr));
4350 gimple_call_set_lhs (gs, t);
4351 gimplify_seq_add_stmt (seq_p, gs);
4352
4353 *expr_p = build_simple_mem_ref (t);
4354 return GS_ALL_DONE;
4355 }
4356
4357 gimplify_seq_add_stmt (seq_p, gs);
4358 *expr_p = NULL;
4359 return GS_ALL_DONE;
4360 }
4361
4362 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4363 a call to __builtin_memset. In this case we know that the RHS is
4364 a CONSTRUCTOR with an empty element list. */
4365
4366 static enum gimplify_status
4367 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4368 gimple_seq *seq_p)
4369 {
4370 tree t, from, to, to_ptr;
4371 gcall *gs;
4372 location_t loc = EXPR_LOCATION (*expr_p);
4373
4374 /* Assert our assumptions, to abort instead of producing wrong code
4375 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4376 not be immediately exposed. */
4377 from = TREE_OPERAND (*expr_p, 1);
4378 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4379 from = TREE_OPERAND (from, 0);
4380
4381 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4382 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4383
4384 /* Now proceed. */
4385 to = TREE_OPERAND (*expr_p, 0);
4386
4387 to_ptr = build_fold_addr_expr_loc (loc, to);
4388 gimplify_arg (&to_ptr, seq_p, loc);
4389 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4390
4391 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4392
4393 if (want_value)
4394 {
4395 /* tmp = memset() */
4396 t = create_tmp_var (TREE_TYPE (to_ptr));
4397 gimple_call_set_lhs (gs, t);
4398 gimplify_seq_add_stmt (seq_p, gs);
4399
4400 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4401 return GS_ALL_DONE;
4402 }
4403
4404 gimplify_seq_add_stmt (seq_p, gs);
4405 *expr_p = NULL;
4406 return GS_ALL_DONE;
4407 }
4408
4409 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4410 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4411 assignment. Return non-null if we detect a potential overlap. */
4412
4413 struct gimplify_init_ctor_preeval_data
4414 {
4415 /* The base decl of the lhs object. May be NULL, in which case we
4416 have to assume the lhs is indirect. */
4417 tree lhs_base_decl;
4418
4419 /* The alias set of the lhs object. */
4420 alias_set_type lhs_alias_set;
4421 };
4422
4423 static tree
4424 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4425 {
4426 struct gimplify_init_ctor_preeval_data *data
4427 = (struct gimplify_init_ctor_preeval_data *) xdata;
4428 tree t = *tp;
4429
4430 /* If we find the base object, obviously we have overlap. */
4431 if (data->lhs_base_decl == t)
4432 return t;
4433
4434 /* If the constructor component is indirect, determine if we have a
4435 potential overlap with the lhs. The only bits of information we
4436 have to go on at this point are addressability and alias sets. */
4437 if ((INDIRECT_REF_P (t)
4438 || TREE_CODE (t) == MEM_REF)
4439 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4440 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4441 return t;
4442
4443 /* If the constructor component is a call, determine if it can hide a
4444 potential overlap with the lhs through an INDIRECT_REF like above.
4445 ??? Ugh - this is completely broken. In fact this whole analysis
4446 doesn't look conservative. */
4447 if (TREE_CODE (t) == CALL_EXPR)
4448 {
4449 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4450
4451 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4452 if (POINTER_TYPE_P (TREE_VALUE (type))
4453 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4454 && alias_sets_conflict_p (data->lhs_alias_set,
4455 get_alias_set
4456 (TREE_TYPE (TREE_VALUE (type)))))
4457 return t;
4458 }
4459
4460 if (IS_TYPE_OR_DECL_P (t))
4461 *walk_subtrees = 0;
4462 return NULL;
4463 }
4464
4465 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4466 force values that overlap with the lhs (as described by *DATA)
4467 into temporaries. */
4468
4469 static void
4470 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4471 struct gimplify_init_ctor_preeval_data *data)
4472 {
4473 enum gimplify_status one;
4474
4475 /* If the value is constant, then there's nothing to pre-evaluate. */
4476 if (TREE_CONSTANT (*expr_p))
4477 {
4478 /* Ensure it does not have side effects, it might contain a reference to
4479 the object we're initializing. */
4480 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4481 return;
4482 }
4483
4484 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4485 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4486 return;
4487
4488 /* Recurse for nested constructors. */
4489 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4490 {
4491 unsigned HOST_WIDE_INT ix;
4492 constructor_elt *ce;
4493 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4494
4495 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4496 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4497
4498 return;
4499 }
4500
4501 /* If this is a variable sized type, we must remember the size. */
4502 maybe_with_size_expr (expr_p);
4503
4504 /* Gimplify the constructor element to something appropriate for the rhs
4505 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4506 the gimplifier will consider this a store to memory. Doing this
4507 gimplification now means that we won't have to deal with complicated
4508 language-specific trees, nor trees like SAVE_EXPR that can induce
4509 exponential search behavior. */
4510 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4511 if (one == GS_ERROR)
4512 {
4513 *expr_p = NULL;
4514 return;
4515 }
4516
4517 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4518 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4519 always be true for all scalars, since is_gimple_mem_rhs insists on a
4520 temporary variable for them. */
4521 if (DECL_P (*expr_p))
4522 return;
4523
4524 /* If this is of variable size, we have no choice but to assume it doesn't
4525 overlap since we can't make a temporary for it. */
4526 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4527 return;
4528
4529 /* Otherwise, we must search for overlap ... */
4530 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4531 return;
4532
4533 /* ... and if found, force the value into a temporary. */
4534 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4535 }
4536
4537 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4538 a RANGE_EXPR in a CONSTRUCTOR for an array.
4539
4540 var = lower;
4541 loop_entry:
4542 object[var] = value;
4543 if (var == upper)
4544 goto loop_exit;
4545 var = var + 1;
4546 goto loop_entry;
4547 loop_exit:
4548
4549 We increment var _after_ the loop exit check because we might otherwise
4550 fail if upper == TYPE_MAX_VALUE (type for upper).
4551
4552 Note that we never have to deal with SAVE_EXPRs here, because this has
4553 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4554
4555 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4556 gimple_seq *, bool);
4557
4558 static void
4559 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4560 tree value, tree array_elt_type,
4561 gimple_seq *pre_p, bool cleared)
4562 {
4563 tree loop_entry_label, loop_exit_label, fall_thru_label;
4564 tree var, var_type, cref, tmp;
4565
4566 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4567 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4568 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4569
4570 /* Create and initialize the index variable. */
4571 var_type = TREE_TYPE (upper);
4572 var = create_tmp_var (var_type);
4573 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4574
4575 /* Add the loop entry label. */
4576 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4577
4578 /* Build the reference. */
4579 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4580 var, NULL_TREE, NULL_TREE);
4581
4582 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4583 the store. Otherwise just assign value to the reference. */
4584
4585 if (TREE_CODE (value) == CONSTRUCTOR)
4586 /* NB we might have to call ourself recursively through
4587 gimplify_init_ctor_eval if the value is a constructor. */
4588 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4589 pre_p, cleared);
4590 else
4591 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4592
4593 /* We exit the loop when the index var is equal to the upper bound. */
4594 gimplify_seq_add_stmt (pre_p,
4595 gimple_build_cond (EQ_EXPR, var, upper,
4596 loop_exit_label, fall_thru_label));
4597
4598 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4599
4600 /* Otherwise, increment the index var... */
4601 tmp = build2 (PLUS_EXPR, var_type, var,
4602 fold_convert (var_type, integer_one_node));
4603 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4604
4605 /* ...and jump back to the loop entry. */
4606 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4607
4608 /* Add the loop exit label. */
4609 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4610 }
4611
4612 /* Return true if FDECL is accessing a field that is zero sized. */
4613
4614 static bool
4615 zero_sized_field_decl (const_tree fdecl)
4616 {
4617 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4618 && integer_zerop (DECL_SIZE (fdecl)))
4619 return true;
4620 return false;
4621 }
4622
4623 /* Return true if TYPE is zero sized. */
4624
4625 static bool
4626 zero_sized_type (const_tree type)
4627 {
4628 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4629 && integer_zerop (TYPE_SIZE (type)))
4630 return true;
4631 return false;
4632 }
4633
4634 /* A subroutine of gimplify_init_constructor. Generate individual
4635 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4636 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4637 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4638 zeroed first. */
4639
4640 static void
4641 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4642 gimple_seq *pre_p, bool cleared)
4643 {
4644 tree array_elt_type = NULL;
4645 unsigned HOST_WIDE_INT ix;
4646 tree purpose, value;
4647
4648 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4649 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4650
4651 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4652 {
4653 tree cref;
4654
4655 /* NULL values are created above for gimplification errors. */
4656 if (value == NULL)
4657 continue;
4658
4659 if (cleared && initializer_zerop (value))
4660 continue;
4661
4662 /* ??? Here's to hoping the front end fills in all of the indices,
4663 so we don't have to figure out what's missing ourselves. */
4664 gcc_assert (purpose);
4665
4666 /* Skip zero-sized fields, unless value has side-effects. This can
4667 happen with calls to functions returning a zero-sized type, which
4668 we shouldn't discard. As a number of downstream passes don't
4669 expect sets of zero-sized fields, we rely on the gimplification of
4670 the MODIFY_EXPR we make below to drop the assignment statement. */
4671 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4672 continue;
4673
4674 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4675 whole range. */
4676 if (TREE_CODE (purpose) == RANGE_EXPR)
4677 {
4678 tree lower = TREE_OPERAND (purpose, 0);
4679 tree upper = TREE_OPERAND (purpose, 1);
4680
4681 /* If the lower bound is equal to upper, just treat it as if
4682 upper was the index. */
4683 if (simple_cst_equal (lower, upper))
4684 purpose = upper;
4685 else
4686 {
4687 gimplify_init_ctor_eval_range (object, lower, upper, value,
4688 array_elt_type, pre_p, cleared);
4689 continue;
4690 }
4691 }
4692
4693 if (array_elt_type)
4694 {
4695 /* Do not use bitsizetype for ARRAY_REF indices. */
4696 if (TYPE_DOMAIN (TREE_TYPE (object)))
4697 purpose
4698 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4699 purpose);
4700 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4701 purpose, NULL_TREE, NULL_TREE);
4702 }
4703 else
4704 {
4705 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4706 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4707 unshare_expr (object), purpose, NULL_TREE);
4708 }
4709
4710 if (TREE_CODE (value) == CONSTRUCTOR
4711 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4712 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4713 pre_p, cleared);
4714 else
4715 {
4716 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4717 gimplify_and_add (init, pre_p);
4718 ggc_free (init);
4719 }
4720 }
4721 }
4722
4723 /* Return the appropriate RHS predicate for this LHS. */
4724
4725 gimple_predicate
4726 rhs_predicate_for (tree lhs)
4727 {
4728 if (is_gimple_reg (lhs))
4729 return is_gimple_reg_rhs_or_call;
4730 else
4731 return is_gimple_mem_rhs_or_call;
4732 }
4733
4734 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4735 before the LHS has been gimplified. */
4736
4737 static gimple_predicate
4738 initial_rhs_predicate_for (tree lhs)
4739 {
4740 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4741 return is_gimple_reg_rhs_or_call;
4742 else
4743 return is_gimple_mem_rhs_or_call;
4744 }
4745
4746 /* Gimplify a C99 compound literal expression. This just means adding
4747 the DECL_EXPR before the current statement and using its anonymous
4748 decl instead. */
4749
4750 static enum gimplify_status
4751 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4752 bool (*gimple_test_f) (tree),
4753 fallback_t fallback)
4754 {
4755 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4756 tree decl = DECL_EXPR_DECL (decl_s);
4757 tree init = DECL_INITIAL (decl);
4758 /* Mark the decl as addressable if the compound literal
4759 expression is addressable now, otherwise it is marked too late
4760 after we gimplify the initialization expression. */
4761 if (TREE_ADDRESSABLE (*expr_p))
4762 TREE_ADDRESSABLE (decl) = 1;
4763 /* Otherwise, if we don't need an lvalue and have a literal directly
4764 substitute it. Check if it matches the gimple predicate, as
4765 otherwise we'd generate a new temporary, and we can as well just
4766 use the decl we already have. */
4767 else if (!TREE_ADDRESSABLE (decl)
4768 && !TREE_THIS_VOLATILE (decl)
4769 && init
4770 && (fallback & fb_lvalue) == 0
4771 && gimple_test_f (init))
4772 {
4773 *expr_p = init;
4774 return GS_OK;
4775 }
4776
4777 /* Preliminarily mark non-addressed complex variables as eligible
4778 for promotion to gimple registers. We'll transform their uses
4779 as we find them. */
4780 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4781 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4782 && !TREE_THIS_VOLATILE (decl)
4783 && !needs_to_live_in_memory (decl))
4784 DECL_GIMPLE_REG_P (decl) = 1;
4785
4786 /* If the decl is not addressable, then it is being used in some
4787 expression or on the right hand side of a statement, and it can
4788 be put into a readonly data section. */
4789 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4790 TREE_READONLY (decl) = 1;
4791
4792 /* This decl isn't mentioned in the enclosing block, so add it to the
4793 list of temps. FIXME it seems a bit of a kludge to say that
4794 anonymous artificial vars aren't pushed, but everything else is. */
4795 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4796 gimple_add_tmp_var (decl);
4797
4798 gimplify_and_add (decl_s, pre_p);
4799 *expr_p = decl;
4800 return GS_OK;
4801 }
4802
4803 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4804 return a new CONSTRUCTOR if something changed. */
4805
4806 static tree
4807 optimize_compound_literals_in_ctor (tree orig_ctor)
4808 {
4809 tree ctor = orig_ctor;
4810 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4811 unsigned int idx, num = vec_safe_length (elts);
4812
4813 for (idx = 0; idx < num; idx++)
4814 {
4815 tree value = (*elts)[idx].value;
4816 tree newval = value;
4817 if (TREE_CODE (value) == CONSTRUCTOR)
4818 newval = optimize_compound_literals_in_ctor (value);
4819 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4820 {
4821 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4822 tree decl = DECL_EXPR_DECL (decl_s);
4823 tree init = DECL_INITIAL (decl);
4824
4825 if (!TREE_ADDRESSABLE (value)
4826 && !TREE_ADDRESSABLE (decl)
4827 && init
4828 && TREE_CODE (init) == CONSTRUCTOR)
4829 newval = optimize_compound_literals_in_ctor (init);
4830 }
4831 if (newval == value)
4832 continue;
4833
4834 if (ctor == orig_ctor)
4835 {
4836 ctor = copy_node (orig_ctor);
4837 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4838 elts = CONSTRUCTOR_ELTS (ctor);
4839 }
4840 (*elts)[idx].value = newval;
4841 }
4842 return ctor;
4843 }
4844
4845 /* A subroutine of gimplify_modify_expr. Break out elements of a
4846 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4847
4848 Note that we still need to clear any elements that don't have explicit
4849 initializers, so if not all elements are initialized we keep the
4850 original MODIFY_EXPR, we just remove all of the constructor elements.
4851
4852 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4853 GS_ERROR if we would have to create a temporary when gimplifying
4854 this constructor. Otherwise, return GS_OK.
4855
4856 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4857
4858 static enum gimplify_status
4859 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4860 bool want_value, bool notify_temp_creation)
4861 {
4862 tree object, ctor, type;
4863 enum gimplify_status ret;
4864 vec<constructor_elt, va_gc> *elts;
4865
4866 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4867
4868 if (!notify_temp_creation)
4869 {
4870 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4871 is_gimple_lvalue, fb_lvalue);
4872 if (ret == GS_ERROR)
4873 return ret;
4874 }
4875
4876 object = TREE_OPERAND (*expr_p, 0);
4877 ctor = TREE_OPERAND (*expr_p, 1)
4878 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4879 type = TREE_TYPE (ctor);
4880 elts = CONSTRUCTOR_ELTS (ctor);
4881 ret = GS_ALL_DONE;
4882
4883 switch (TREE_CODE (type))
4884 {
4885 case RECORD_TYPE:
4886 case UNION_TYPE:
4887 case QUAL_UNION_TYPE:
4888 case ARRAY_TYPE:
4889 {
4890 struct gimplify_init_ctor_preeval_data preeval_data;
4891 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4892 HOST_WIDE_INT num_unique_nonzero_elements;
4893 bool cleared, complete_p, valid_const_initializer;
4894 /* Use readonly data for initializers of this or smaller size
4895 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4896 ratio. */
4897 const HOST_WIDE_INT min_unique_size = 64;
4898 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4899 is smaller than this, use readonly data. */
4900 const int unique_nonzero_ratio = 8;
4901
4902 /* Aggregate types must lower constructors to initialization of
4903 individual elements. The exception is that a CONSTRUCTOR node
4904 with no elements indicates zero-initialization of the whole. */
4905 if (vec_safe_is_empty (elts))
4906 {
4907 if (notify_temp_creation)
4908 return GS_OK;
4909 break;
4910 }
4911
4912 /* Fetch information about the constructor to direct later processing.
4913 We might want to make static versions of it in various cases, and
4914 can only do so if it known to be a valid constant initializer. */
4915 valid_const_initializer
4916 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4917 &num_unique_nonzero_elements,
4918 &num_ctor_elements, &complete_p);
4919
4920 /* If a const aggregate variable is being initialized, then it
4921 should never be a lose to promote the variable to be static. */
4922 if (valid_const_initializer
4923 && num_nonzero_elements > 1
4924 && TREE_READONLY (object)
4925 && VAR_P (object)
4926 && !DECL_REGISTER (object)
4927 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4928 /* For ctors that have many repeated nonzero elements
4929 represented through RANGE_EXPRs, prefer initializing
4930 those through runtime loops over copies of large amounts
4931 of data from readonly data section. */
4932 && (num_unique_nonzero_elements
4933 > num_nonzero_elements / unique_nonzero_ratio
4934 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4935 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4936 {
4937 if (notify_temp_creation)
4938 return GS_ERROR;
4939 DECL_INITIAL (object) = ctor;
4940 TREE_STATIC (object) = 1;
4941 if (!DECL_NAME (object))
4942 DECL_NAME (object) = create_tmp_var_name ("C");
4943 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4944
4945 /* ??? C++ doesn't automatically append a .<number> to the
4946 assembler name, and even when it does, it looks at FE private
4947 data structures to figure out what that number should be,
4948 which are not set for this variable. I suppose this is
4949 important for local statics for inline functions, which aren't
4950 "local" in the object file sense. So in order to get a unique
4951 TU-local symbol, we must invoke the lhd version now. */
4952 lhd_set_decl_assembler_name (object);
4953
4954 *expr_p = NULL_TREE;
4955 break;
4956 }
4957
4958 /* If there are "lots" of initialized elements, even discounting
4959 those that are not address constants (and thus *must* be
4960 computed at runtime), then partition the constructor into
4961 constant and non-constant parts. Block copy the constant
4962 parts in, then generate code for the non-constant parts. */
4963 /* TODO. There's code in cp/typeck.c to do this. */
4964
4965 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4966 /* store_constructor will ignore the clearing of variable-sized
4967 objects. Initializers for such objects must explicitly set
4968 every field that needs to be set. */
4969 cleared = false;
4970 else if (!complete_p)
4971 /* If the constructor isn't complete, clear the whole object
4972 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4973
4974 ??? This ought not to be needed. For any element not present
4975 in the initializer, we should simply set them to zero. Except
4976 we'd need to *find* the elements that are not present, and that
4977 requires trickery to avoid quadratic compile-time behavior in
4978 large cases or excessive memory use in small cases. */
4979 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4980 else if (num_ctor_elements - num_nonzero_elements
4981 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4982 && num_nonzero_elements < num_ctor_elements / 4)
4983 /* If there are "lots" of zeros, it's more efficient to clear
4984 the memory and then set the nonzero elements. */
4985 cleared = true;
4986 else
4987 cleared = false;
4988
4989 /* If there are "lots" of initialized elements, and all of them
4990 are valid address constants, then the entire initializer can
4991 be dropped to memory, and then memcpy'd out. Don't do this
4992 for sparse arrays, though, as it's more efficient to follow
4993 the standard CONSTRUCTOR behavior of memset followed by
4994 individual element initialization. Also don't do this for small
4995 all-zero initializers (which aren't big enough to merit
4996 clearing), and don't try to make bitwise copies of
4997 TREE_ADDRESSABLE types. */
4998
4999 if (valid_const_initializer
5000 && !(cleared || num_nonzero_elements == 0)
5001 && !TREE_ADDRESSABLE (type))
5002 {
5003 HOST_WIDE_INT size = int_size_in_bytes (type);
5004 unsigned int align;
5005
5006 /* ??? We can still get unbounded array types, at least
5007 from the C++ front end. This seems wrong, but attempt
5008 to work around it for now. */
5009 if (size < 0)
5010 {
5011 size = int_size_in_bytes (TREE_TYPE (object));
5012 if (size >= 0)
5013 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5014 }
5015
5016 /* Find the maximum alignment we can assume for the object. */
5017 /* ??? Make use of DECL_OFFSET_ALIGN. */
5018 if (DECL_P (object))
5019 align = DECL_ALIGN (object);
5020 else
5021 align = TYPE_ALIGN (type);
5022
5023 /* Do a block move either if the size is so small as to make
5024 each individual move a sub-unit move on average, or if it
5025 is so large as to make individual moves inefficient. */
5026 if (size > 0
5027 && num_nonzero_elements > 1
5028 /* For ctors that have many repeated nonzero elements
5029 represented through RANGE_EXPRs, prefer initializing
5030 those through runtime loops over copies of large amounts
5031 of data from readonly data section. */
5032 && (num_unique_nonzero_elements
5033 > num_nonzero_elements / unique_nonzero_ratio
5034 || size <= min_unique_size)
5035 && (size < num_nonzero_elements
5036 || !can_move_by_pieces (size, align)))
5037 {
5038 if (notify_temp_creation)
5039 return GS_ERROR;
5040
5041 walk_tree (&ctor, force_labels_r, NULL, NULL);
5042 ctor = tree_output_constant_def (ctor);
5043 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5044 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5045 TREE_OPERAND (*expr_p, 1) = ctor;
5046
5047 /* This is no longer an assignment of a CONSTRUCTOR, but
5048 we still may have processing to do on the LHS. So
5049 pretend we didn't do anything here to let that happen. */
5050 return GS_UNHANDLED;
5051 }
5052 }
5053
5054 /* If the target is volatile, we have non-zero elements and more than
5055 one field to assign, initialize the target from a temporary. */
5056 if (TREE_THIS_VOLATILE (object)
5057 && !TREE_ADDRESSABLE (type)
5058 && (num_nonzero_elements > 0 || !cleared)
5059 && vec_safe_length (elts) > 1)
5060 {
5061 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5062 TREE_OPERAND (*expr_p, 0) = temp;
5063 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5064 *expr_p,
5065 build2 (MODIFY_EXPR, void_type_node,
5066 object, temp));
5067 return GS_OK;
5068 }
5069
5070 if (notify_temp_creation)
5071 return GS_OK;
5072
5073 /* If there are nonzero elements and if needed, pre-evaluate to capture
5074 elements overlapping with the lhs into temporaries. We must do this
5075 before clearing to fetch the values before they are zeroed-out. */
5076 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5077 {
5078 preeval_data.lhs_base_decl = get_base_address (object);
5079 if (!DECL_P (preeval_data.lhs_base_decl))
5080 preeval_data.lhs_base_decl = NULL;
5081 preeval_data.lhs_alias_set = get_alias_set (object);
5082
5083 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5084 pre_p, post_p, &preeval_data);
5085 }
5086
5087 bool ctor_has_side_effects_p
5088 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5089
5090 if (cleared)
5091 {
5092 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5093 Note that we still have to gimplify, in order to handle the
5094 case of variable sized types. Avoid shared tree structures. */
5095 CONSTRUCTOR_ELTS (ctor) = NULL;
5096 TREE_SIDE_EFFECTS (ctor) = 0;
5097 object = unshare_expr (object);
5098 gimplify_stmt (expr_p, pre_p);
5099 }
5100
5101 /* If we have not block cleared the object, or if there are nonzero
5102 elements in the constructor, or if the constructor has side effects,
5103 add assignments to the individual scalar fields of the object. */
5104 if (!cleared
5105 || num_nonzero_elements > 0
5106 || ctor_has_side_effects_p)
5107 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5108
5109 *expr_p = NULL_TREE;
5110 }
5111 break;
5112
5113 case COMPLEX_TYPE:
5114 {
5115 tree r, i;
5116
5117 if (notify_temp_creation)
5118 return GS_OK;
5119
5120 /* Extract the real and imaginary parts out of the ctor. */
5121 gcc_assert (elts->length () == 2);
5122 r = (*elts)[0].value;
5123 i = (*elts)[1].value;
5124 if (r == NULL || i == NULL)
5125 {
5126 tree zero = build_zero_cst (TREE_TYPE (type));
5127 if (r == NULL)
5128 r = zero;
5129 if (i == NULL)
5130 i = zero;
5131 }
5132
5133 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5134 represent creation of a complex value. */
5135 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5136 {
5137 ctor = build_complex (type, r, i);
5138 TREE_OPERAND (*expr_p, 1) = ctor;
5139 }
5140 else
5141 {
5142 ctor = build2 (COMPLEX_EXPR, type, r, i);
5143 TREE_OPERAND (*expr_p, 1) = ctor;
5144 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5145 pre_p,
5146 post_p,
5147 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5148 fb_rvalue);
5149 }
5150 }
5151 break;
5152
5153 case VECTOR_TYPE:
5154 {
5155 unsigned HOST_WIDE_INT ix;
5156 constructor_elt *ce;
5157
5158 if (notify_temp_creation)
5159 return GS_OK;
5160
5161 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5162 if (TREE_CONSTANT (ctor))
5163 {
5164 bool constant_p = true;
5165 tree value;
5166
5167 /* Even when ctor is constant, it might contain non-*_CST
5168 elements, such as addresses or trapping values like
5169 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5170 in VECTOR_CST nodes. */
5171 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5172 if (!CONSTANT_CLASS_P (value))
5173 {
5174 constant_p = false;
5175 break;
5176 }
5177
5178 if (constant_p)
5179 {
5180 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5181 break;
5182 }
5183
5184 TREE_CONSTANT (ctor) = 0;
5185 }
5186
5187 /* Vector types use CONSTRUCTOR all the way through gimple
5188 compilation as a general initializer. */
5189 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5190 {
5191 enum gimplify_status tret;
5192 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5193 fb_rvalue);
5194 if (tret == GS_ERROR)
5195 ret = GS_ERROR;
5196 else if (TREE_STATIC (ctor)
5197 && !initializer_constant_valid_p (ce->value,
5198 TREE_TYPE (ce->value)))
5199 TREE_STATIC (ctor) = 0;
5200 }
5201 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5202 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5203 }
5204 break;
5205
5206 default:
5207 /* So how did we get a CONSTRUCTOR for a scalar type? */
5208 gcc_unreachable ();
5209 }
5210
5211 if (ret == GS_ERROR)
5212 return GS_ERROR;
5213 /* If we have gimplified both sides of the initializer but have
5214 not emitted an assignment, do so now. */
5215 if (*expr_p)
5216 {
5217 tree lhs = TREE_OPERAND (*expr_p, 0);
5218 tree rhs = TREE_OPERAND (*expr_p, 1);
5219 if (want_value && object == lhs)
5220 lhs = unshare_expr (lhs);
5221 gassign *init = gimple_build_assign (lhs, rhs);
5222 gimplify_seq_add_stmt (pre_p, init);
5223 }
5224 if (want_value)
5225 {
5226 *expr_p = object;
5227 return GS_OK;
5228 }
5229 else
5230 {
5231 *expr_p = NULL;
5232 return GS_ALL_DONE;
5233 }
5234 }
5235
5236 /* Given a pointer value OP0, return a simplified version of an
5237 indirection through OP0, or NULL_TREE if no simplification is
5238 possible. This may only be applied to a rhs of an expression.
5239 Note that the resulting type may be different from the type pointed
5240 to in the sense that it is still compatible from the langhooks
5241 point of view. */
5242
5243 static tree
5244 gimple_fold_indirect_ref_rhs (tree t)
5245 {
5246 return gimple_fold_indirect_ref (t);
5247 }
5248
5249 /* Subroutine of gimplify_modify_expr to do simplifications of
5250 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5251 something changes. */
5252
5253 static enum gimplify_status
5254 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5255 gimple_seq *pre_p, gimple_seq *post_p,
5256 bool want_value)
5257 {
5258 enum gimplify_status ret = GS_UNHANDLED;
5259 bool changed;
5260
5261 do
5262 {
5263 changed = false;
5264 switch (TREE_CODE (*from_p))
5265 {
5266 case VAR_DECL:
5267 /* If we're assigning from a read-only variable initialized with
5268 a constructor, do the direct assignment from the constructor,
5269 but only if neither source nor target are volatile since this
5270 latter assignment might end up being done on a per-field basis. */
5271 if (DECL_INITIAL (*from_p)
5272 && TREE_READONLY (*from_p)
5273 && !TREE_THIS_VOLATILE (*from_p)
5274 && !TREE_THIS_VOLATILE (*to_p)
5275 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5276 {
5277 tree old_from = *from_p;
5278 enum gimplify_status subret;
5279
5280 /* Move the constructor into the RHS. */
5281 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5282
5283 /* Let's see if gimplify_init_constructor will need to put
5284 it in memory. */
5285 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5286 false, true);
5287 if (subret == GS_ERROR)
5288 {
5289 /* If so, revert the change. */
5290 *from_p = old_from;
5291 }
5292 else
5293 {
5294 ret = GS_OK;
5295 changed = true;
5296 }
5297 }
5298 break;
5299 case INDIRECT_REF:
5300 {
5301 /* If we have code like
5302
5303 *(const A*)(A*)&x
5304
5305 where the type of "x" is a (possibly cv-qualified variant
5306 of "A"), treat the entire expression as identical to "x".
5307 This kind of code arises in C++ when an object is bound
5308 to a const reference, and if "x" is a TARGET_EXPR we want
5309 to take advantage of the optimization below. */
5310 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5311 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5312 if (t)
5313 {
5314 if (TREE_THIS_VOLATILE (t) != volatile_p)
5315 {
5316 if (DECL_P (t))
5317 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5318 build_fold_addr_expr (t));
5319 if (REFERENCE_CLASS_P (t))
5320 TREE_THIS_VOLATILE (t) = volatile_p;
5321 }
5322 *from_p = t;
5323 ret = GS_OK;
5324 changed = true;
5325 }
5326 break;
5327 }
5328
5329 case TARGET_EXPR:
5330 {
5331 /* If we are initializing something from a TARGET_EXPR, strip the
5332 TARGET_EXPR and initialize it directly, if possible. This can't
5333 be done if the initializer is void, since that implies that the
5334 temporary is set in some non-trivial way.
5335
5336 ??? What about code that pulls out the temp and uses it
5337 elsewhere? I think that such code never uses the TARGET_EXPR as
5338 an initializer. If I'm wrong, we'll die because the temp won't
5339 have any RTL. In that case, I guess we'll need to replace
5340 references somehow. */
5341 tree init = TARGET_EXPR_INITIAL (*from_p);
5342
5343 if (init
5344 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5345 || !TARGET_EXPR_NO_ELIDE (*from_p))
5346 && !VOID_TYPE_P (TREE_TYPE (init)))
5347 {
5348 *from_p = init;
5349 ret = GS_OK;
5350 changed = true;
5351 }
5352 }
5353 break;
5354
5355 case COMPOUND_EXPR:
5356 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5357 caught. */
5358 gimplify_compound_expr (from_p, pre_p, true);
5359 ret = GS_OK;
5360 changed = true;
5361 break;
5362
5363 case CONSTRUCTOR:
5364 /* If we already made some changes, let the front end have a
5365 crack at this before we break it down. */
5366 if (ret != GS_UNHANDLED)
5367 break;
5368 /* If we're initializing from a CONSTRUCTOR, break this into
5369 individual MODIFY_EXPRs. */
5370 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5371 false);
5372
5373 case COND_EXPR:
5374 /* If we're assigning to a non-register type, push the assignment
5375 down into the branches. This is mandatory for ADDRESSABLE types,
5376 since we cannot generate temporaries for such, but it saves a
5377 copy in other cases as well. */
5378 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5379 {
5380 /* This code should mirror the code in gimplify_cond_expr. */
5381 enum tree_code code = TREE_CODE (*expr_p);
5382 tree cond = *from_p;
5383 tree result = *to_p;
5384
5385 ret = gimplify_expr (&result, pre_p, post_p,
5386 is_gimple_lvalue, fb_lvalue);
5387 if (ret != GS_ERROR)
5388 ret = GS_OK;
5389
5390 /* If we are going to write RESULT more than once, clear
5391 TREE_READONLY flag, otherwise we might incorrectly promote
5392 the variable to static const and initialize it at compile
5393 time in one of the branches. */
5394 if (VAR_P (result)
5395 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5396 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5397 TREE_READONLY (result) = 0;
5398 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5399 TREE_OPERAND (cond, 1)
5400 = build2 (code, void_type_node, result,
5401 TREE_OPERAND (cond, 1));
5402 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5403 TREE_OPERAND (cond, 2)
5404 = build2 (code, void_type_node, unshare_expr (result),
5405 TREE_OPERAND (cond, 2));
5406
5407 TREE_TYPE (cond) = void_type_node;
5408 recalculate_side_effects (cond);
5409
5410 if (want_value)
5411 {
5412 gimplify_and_add (cond, pre_p);
5413 *expr_p = unshare_expr (result);
5414 }
5415 else
5416 *expr_p = cond;
5417 return ret;
5418 }
5419 break;
5420
5421 case CALL_EXPR:
5422 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5423 return slot so that we don't generate a temporary. */
5424 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5425 && aggregate_value_p (*from_p, *from_p))
5426 {
5427 bool use_target;
5428
5429 if (!(rhs_predicate_for (*to_p))(*from_p))
5430 /* If we need a temporary, *to_p isn't accurate. */
5431 use_target = false;
5432 /* It's OK to use the return slot directly unless it's an NRV. */
5433 else if (TREE_CODE (*to_p) == RESULT_DECL
5434 && DECL_NAME (*to_p) == NULL_TREE
5435 && needs_to_live_in_memory (*to_p))
5436 use_target = true;
5437 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5438 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5439 /* Don't force regs into memory. */
5440 use_target = false;
5441 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5442 /* It's OK to use the target directly if it's being
5443 initialized. */
5444 use_target = true;
5445 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5446 != INTEGER_CST)
5447 /* Always use the target and thus RSO for variable-sized types.
5448 GIMPLE cannot deal with a variable-sized assignment
5449 embedded in a call statement. */
5450 use_target = true;
5451 else if (TREE_CODE (*to_p) != SSA_NAME
5452 && (!is_gimple_variable (*to_p)
5453 || needs_to_live_in_memory (*to_p)))
5454 /* Don't use the original target if it's already addressable;
5455 if its address escapes, and the called function uses the
5456 NRV optimization, a conforming program could see *to_p
5457 change before the called function returns; see c++/19317.
5458 When optimizing, the return_slot pass marks more functions
5459 as safe after we have escape info. */
5460 use_target = false;
5461 else
5462 use_target = true;
5463
5464 if (use_target)
5465 {
5466 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5467 mark_addressable (*to_p);
5468 }
5469 }
5470 break;
5471
5472 case WITH_SIZE_EXPR:
5473 /* Likewise for calls that return an aggregate of non-constant size,
5474 since we would not be able to generate a temporary at all. */
5475 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5476 {
5477 *from_p = TREE_OPERAND (*from_p, 0);
5478 /* We don't change ret in this case because the
5479 WITH_SIZE_EXPR might have been added in
5480 gimplify_modify_expr, so returning GS_OK would lead to an
5481 infinite loop. */
5482 changed = true;
5483 }
5484 break;
5485
5486 /* If we're initializing from a container, push the initialization
5487 inside it. */
5488 case CLEANUP_POINT_EXPR:
5489 case BIND_EXPR:
5490 case STATEMENT_LIST:
5491 {
5492 tree wrap = *from_p;
5493 tree t;
5494
5495 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5496 fb_lvalue);
5497 if (ret != GS_ERROR)
5498 ret = GS_OK;
5499
5500 t = voidify_wrapper_expr (wrap, *expr_p);
5501 gcc_assert (t == *expr_p);
5502
5503 if (want_value)
5504 {
5505 gimplify_and_add (wrap, pre_p);
5506 *expr_p = unshare_expr (*to_p);
5507 }
5508 else
5509 *expr_p = wrap;
5510 return GS_OK;
5511 }
5512
5513 case COMPOUND_LITERAL_EXPR:
5514 {
5515 tree complit = TREE_OPERAND (*expr_p, 1);
5516 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5517 tree decl = DECL_EXPR_DECL (decl_s);
5518 tree init = DECL_INITIAL (decl);
5519
5520 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5521 into struct T x = { 0, 1, 2 } if the address of the
5522 compound literal has never been taken. */
5523 if (!TREE_ADDRESSABLE (complit)
5524 && !TREE_ADDRESSABLE (decl)
5525 && init)
5526 {
5527 *expr_p = copy_node (*expr_p);
5528 TREE_OPERAND (*expr_p, 1) = init;
5529 return GS_OK;
5530 }
5531 }
5532
5533 default:
5534 break;
5535 }
5536 }
5537 while (changed);
5538
5539 return ret;
5540 }
5541
5542
5543 /* Return true if T looks like a valid GIMPLE statement. */
5544
5545 static bool
5546 is_gimple_stmt (tree t)
5547 {
5548 const enum tree_code code = TREE_CODE (t);
5549
5550 switch (code)
5551 {
5552 case NOP_EXPR:
5553 /* The only valid NOP_EXPR is the empty statement. */
5554 return IS_EMPTY_STMT (t);
5555
5556 case BIND_EXPR:
5557 case COND_EXPR:
5558 /* These are only valid if they're void. */
5559 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5560
5561 case SWITCH_EXPR:
5562 case GOTO_EXPR:
5563 case RETURN_EXPR:
5564 case LABEL_EXPR:
5565 case CASE_LABEL_EXPR:
5566 case TRY_CATCH_EXPR:
5567 case TRY_FINALLY_EXPR:
5568 case EH_FILTER_EXPR:
5569 case CATCH_EXPR:
5570 case ASM_EXPR:
5571 case STATEMENT_LIST:
5572 case OACC_PARALLEL:
5573 case OACC_KERNELS:
5574 case OACC_SERIAL:
5575 case OACC_DATA:
5576 case OACC_HOST_DATA:
5577 case OACC_DECLARE:
5578 case OACC_UPDATE:
5579 case OACC_ENTER_DATA:
5580 case OACC_EXIT_DATA:
5581 case OACC_CACHE:
5582 case OMP_PARALLEL:
5583 case OMP_FOR:
5584 case OMP_SIMD:
5585 case OMP_DISTRIBUTE:
5586 case OMP_LOOP:
5587 case OACC_LOOP:
5588 case OMP_SCAN:
5589 case OMP_SECTIONS:
5590 case OMP_SECTION:
5591 case OMP_SINGLE:
5592 case OMP_MASTER:
5593 case OMP_TASKGROUP:
5594 case OMP_ORDERED:
5595 case OMP_CRITICAL:
5596 case OMP_TASK:
5597 case OMP_TARGET:
5598 case OMP_TARGET_DATA:
5599 case OMP_TARGET_UPDATE:
5600 case OMP_TARGET_ENTER_DATA:
5601 case OMP_TARGET_EXIT_DATA:
5602 case OMP_TASKLOOP:
5603 case OMP_TEAMS:
5604 /* These are always void. */
5605 return true;
5606
5607 case CALL_EXPR:
5608 case MODIFY_EXPR:
5609 case PREDICT_EXPR:
5610 /* These are valid regardless of their type. */
5611 return true;
5612
5613 default:
5614 return false;
5615 }
5616 }
5617
5618
5619 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5620 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5621 DECL_GIMPLE_REG_P set.
5622
5623 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5624 other, unmodified part of the complex object just before the total store.
5625 As a consequence, if the object is still uninitialized, an undefined value
5626 will be loaded into a register, which may result in a spurious exception
5627 if the register is floating-point and the value happens to be a signaling
5628 NaN for example. Then the fully-fledged complex operations lowering pass
5629 followed by a DCE pass are necessary in order to fix things up. */
5630
5631 static enum gimplify_status
5632 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5633 bool want_value)
5634 {
5635 enum tree_code code, ocode;
5636 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5637
5638 lhs = TREE_OPERAND (*expr_p, 0);
5639 rhs = TREE_OPERAND (*expr_p, 1);
5640 code = TREE_CODE (lhs);
5641 lhs = TREE_OPERAND (lhs, 0);
5642
5643 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5644 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5645 TREE_NO_WARNING (other) = 1;
5646 other = get_formal_tmp_var (other, pre_p);
5647
5648 realpart = code == REALPART_EXPR ? rhs : other;
5649 imagpart = code == REALPART_EXPR ? other : rhs;
5650
5651 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5652 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5653 else
5654 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5655
5656 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5657 *expr_p = (want_value) ? rhs : NULL_TREE;
5658
5659 return GS_ALL_DONE;
5660 }
5661
5662 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5663
5664 modify_expr
5665 : varname '=' rhs
5666 | '*' ID '=' rhs
5667
5668 PRE_P points to the list where side effects that must happen before
5669 *EXPR_P should be stored.
5670
5671 POST_P points to the list where side effects that must happen after
5672 *EXPR_P should be stored.
5673
5674 WANT_VALUE is nonzero iff we want to use the value of this expression
5675 in another expression. */
5676
5677 static enum gimplify_status
5678 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5679 bool want_value)
5680 {
5681 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5682 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5683 enum gimplify_status ret = GS_UNHANDLED;
5684 gimple *assign;
5685 location_t loc = EXPR_LOCATION (*expr_p);
5686 gimple_stmt_iterator gsi;
5687
5688 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5689 || TREE_CODE (*expr_p) == INIT_EXPR);
5690
5691 /* Trying to simplify a clobber using normal logic doesn't work,
5692 so handle it here. */
5693 if (TREE_CLOBBER_P (*from_p))
5694 {
5695 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5696 if (ret == GS_ERROR)
5697 return ret;
5698 gcc_assert (!want_value);
5699 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5700 {
5701 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5702 pre_p, post_p);
5703 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5704 }
5705 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5706 *expr_p = NULL;
5707 return GS_ALL_DONE;
5708 }
5709
5710 /* Insert pointer conversions required by the middle-end that are not
5711 required by the frontend. This fixes middle-end type checking for
5712 for example gcc.dg/redecl-6.c. */
5713 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5714 {
5715 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5716 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5717 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5718 }
5719
5720 /* See if any simplifications can be done based on what the RHS is. */
5721 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5722 want_value);
5723 if (ret != GS_UNHANDLED)
5724 return ret;
5725
5726 /* For zero sized types only gimplify the left hand side and right hand
5727 side as statements and throw away the assignment. Do this after
5728 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5729 types properly. */
5730 if (zero_sized_type (TREE_TYPE (*from_p))
5731 && !want_value
5732 /* Don't do this for calls that return addressable types, expand_call
5733 relies on those having a lhs. */
5734 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5735 && TREE_CODE (*from_p) == CALL_EXPR))
5736 {
5737 gimplify_stmt (from_p, pre_p);
5738 gimplify_stmt (to_p, pre_p);
5739 *expr_p = NULL_TREE;
5740 return GS_ALL_DONE;
5741 }
5742
5743 /* If the value being copied is of variable width, compute the length
5744 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5745 before gimplifying any of the operands so that we can resolve any
5746 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5747 the size of the expression to be copied, not of the destination, so
5748 that is what we must do here. */
5749 maybe_with_size_expr (from_p);
5750
5751 /* As a special case, we have to temporarily allow for assignments
5752 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5753 a toplevel statement, when gimplifying the GENERIC expression
5754 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5755 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5756
5757 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5758 prevent gimplify_expr from trying to create a new temporary for
5759 foo's LHS, we tell it that it should only gimplify until it
5760 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5761 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5762 and all we need to do here is set 'a' to be its LHS. */
5763
5764 /* Gimplify the RHS first for C++17 and bug 71104. */
5765 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5766 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5767 if (ret == GS_ERROR)
5768 return ret;
5769
5770 /* Then gimplify the LHS. */
5771 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5772 twice we have to make sure to gimplify into non-SSA as otherwise
5773 the abnormal edge added later will make those defs not dominate
5774 their uses.
5775 ??? Technically this applies only to the registers used in the
5776 resulting non-register *TO_P. */
5777 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5778 if (saved_into_ssa
5779 && TREE_CODE (*from_p) == CALL_EXPR
5780 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5781 gimplify_ctxp->into_ssa = false;
5782 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5783 gimplify_ctxp->into_ssa = saved_into_ssa;
5784 if (ret == GS_ERROR)
5785 return ret;
5786
5787 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5788 guess for the predicate was wrong. */
5789 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5790 if (final_pred != initial_pred)
5791 {
5792 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5793 if (ret == GS_ERROR)
5794 return ret;
5795 }
5796
5797 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5798 size as argument to the call. */
5799 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5800 {
5801 tree call = TREE_OPERAND (*from_p, 0);
5802 tree vlasize = TREE_OPERAND (*from_p, 1);
5803
5804 if (TREE_CODE (call) == CALL_EXPR
5805 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5806 {
5807 int nargs = call_expr_nargs (call);
5808 tree type = TREE_TYPE (call);
5809 tree ap = CALL_EXPR_ARG (call, 0);
5810 tree tag = CALL_EXPR_ARG (call, 1);
5811 tree aptag = CALL_EXPR_ARG (call, 2);
5812 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5813 IFN_VA_ARG, type,
5814 nargs + 1, ap, tag,
5815 aptag, vlasize);
5816 TREE_OPERAND (*from_p, 0) = newcall;
5817 }
5818 }
5819
5820 /* Now see if the above changed *from_p to something we handle specially. */
5821 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5822 want_value);
5823 if (ret != GS_UNHANDLED)
5824 return ret;
5825
5826 /* If we've got a variable sized assignment between two lvalues (i.e. does
5827 not involve a call), then we can make things a bit more straightforward
5828 by converting the assignment to memcpy or memset. */
5829 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5830 {
5831 tree from = TREE_OPERAND (*from_p, 0);
5832 tree size = TREE_OPERAND (*from_p, 1);
5833
5834 if (TREE_CODE (from) == CONSTRUCTOR)
5835 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5836
5837 if (is_gimple_addressable (from))
5838 {
5839 *from_p = from;
5840 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5841 pre_p);
5842 }
5843 }
5844
5845 /* Transform partial stores to non-addressable complex variables into
5846 total stores. This allows us to use real instead of virtual operands
5847 for these variables, which improves optimization. */
5848 if ((TREE_CODE (*to_p) == REALPART_EXPR
5849 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5850 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5851 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5852
5853 /* Try to alleviate the effects of the gimplification creating artificial
5854 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5855 make sure not to create DECL_DEBUG_EXPR links across functions. */
5856 if (!gimplify_ctxp->into_ssa
5857 && VAR_P (*from_p)
5858 && DECL_IGNORED_P (*from_p)
5859 && DECL_P (*to_p)
5860 && !DECL_IGNORED_P (*to_p)
5861 && decl_function_context (*to_p) == current_function_decl
5862 && decl_function_context (*from_p) == current_function_decl)
5863 {
5864 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5865 DECL_NAME (*from_p)
5866 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5867 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5868 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5869 }
5870
5871 if (want_value && TREE_THIS_VOLATILE (*to_p))
5872 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5873
5874 if (TREE_CODE (*from_p) == CALL_EXPR)
5875 {
5876 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5877 instead of a GIMPLE_ASSIGN. */
5878 gcall *call_stmt;
5879 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5880 {
5881 /* Gimplify internal functions created in the FEs. */
5882 int nargs = call_expr_nargs (*from_p), i;
5883 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5884 auto_vec<tree> vargs (nargs);
5885
5886 for (i = 0; i < nargs; i++)
5887 {
5888 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5889 EXPR_LOCATION (*from_p));
5890 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5891 }
5892 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5893 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5894 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5895 }
5896 else
5897 {
5898 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5899 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5900 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5901 tree fndecl = get_callee_fndecl (*from_p);
5902 if (fndecl
5903 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5904 && call_expr_nargs (*from_p) == 3)
5905 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5906 CALL_EXPR_ARG (*from_p, 0),
5907 CALL_EXPR_ARG (*from_p, 1),
5908 CALL_EXPR_ARG (*from_p, 2));
5909 else
5910 {
5911 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5912 }
5913 }
5914 notice_special_calls (call_stmt);
5915 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5916 gimple_call_set_lhs (call_stmt, *to_p);
5917 else if (TREE_CODE (*to_p) == SSA_NAME)
5918 /* The above is somewhat premature, avoid ICEing later for a
5919 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5920 ??? This doesn't make it a default-def. */
5921 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5922
5923 assign = call_stmt;
5924 }
5925 else
5926 {
5927 assign = gimple_build_assign (*to_p, *from_p);
5928 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5929 if (COMPARISON_CLASS_P (*from_p))
5930 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5931 }
5932
5933 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5934 {
5935 /* We should have got an SSA name from the start. */
5936 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5937 || ! gimple_in_ssa_p (cfun));
5938 }
5939
5940 gimplify_seq_add_stmt (pre_p, assign);
5941 gsi = gsi_last (*pre_p);
5942 maybe_fold_stmt (&gsi);
5943
5944 if (want_value)
5945 {
5946 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5947 return GS_OK;
5948 }
5949 else
5950 *expr_p = NULL;
5951
5952 return GS_ALL_DONE;
5953 }
5954
5955 /* Gimplify a comparison between two variable-sized objects. Do this
5956 with a call to BUILT_IN_MEMCMP. */
5957
5958 static enum gimplify_status
5959 gimplify_variable_sized_compare (tree *expr_p)
5960 {
5961 location_t loc = EXPR_LOCATION (*expr_p);
5962 tree op0 = TREE_OPERAND (*expr_p, 0);
5963 tree op1 = TREE_OPERAND (*expr_p, 1);
5964 tree t, arg, dest, src, expr;
5965
5966 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5967 arg = unshare_expr (arg);
5968 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5969 src = build_fold_addr_expr_loc (loc, op1);
5970 dest = build_fold_addr_expr_loc (loc, op0);
5971 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5972 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5973
5974 expr
5975 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5976 SET_EXPR_LOCATION (expr, loc);
5977 *expr_p = expr;
5978
5979 return GS_OK;
5980 }
5981
5982 /* Gimplify a comparison between two aggregate objects of integral scalar
5983 mode as a comparison between the bitwise equivalent scalar values. */
5984
5985 static enum gimplify_status
5986 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5987 {
5988 location_t loc = EXPR_LOCATION (*expr_p);
5989 tree op0 = TREE_OPERAND (*expr_p, 0);
5990 tree op1 = TREE_OPERAND (*expr_p, 1);
5991
5992 tree type = TREE_TYPE (op0);
5993 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5994
5995 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5996 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5997
5998 *expr_p
5999 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6000
6001 return GS_OK;
6002 }
6003
6004 /* Gimplify an expression sequence. This function gimplifies each
6005 expression and rewrites the original expression with the last
6006 expression of the sequence in GIMPLE form.
6007
6008 PRE_P points to the list where the side effects for all the
6009 expressions in the sequence will be emitted.
6010
6011 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6012
6013 static enum gimplify_status
6014 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6015 {
6016 tree t = *expr_p;
6017
6018 do
6019 {
6020 tree *sub_p = &TREE_OPERAND (t, 0);
6021
6022 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6023 gimplify_compound_expr (sub_p, pre_p, false);
6024 else
6025 gimplify_stmt (sub_p, pre_p);
6026
6027 t = TREE_OPERAND (t, 1);
6028 }
6029 while (TREE_CODE (t) == COMPOUND_EXPR);
6030
6031 *expr_p = t;
6032 if (want_value)
6033 return GS_OK;
6034 else
6035 {
6036 gimplify_stmt (expr_p, pre_p);
6037 return GS_ALL_DONE;
6038 }
6039 }
6040
6041 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6042 gimplify. After gimplification, EXPR_P will point to a new temporary
6043 that holds the original value of the SAVE_EXPR node.
6044
6045 PRE_P points to the list where side effects that must happen before
6046 *EXPR_P should be stored. */
6047
6048 static enum gimplify_status
6049 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6050 {
6051 enum gimplify_status ret = GS_ALL_DONE;
6052 tree val;
6053
6054 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6055 val = TREE_OPERAND (*expr_p, 0);
6056
6057 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6058 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6059 {
6060 /* The operand may be a void-valued expression. It is
6061 being executed only for its side-effects. */
6062 if (TREE_TYPE (val) == void_type_node)
6063 {
6064 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6065 is_gimple_stmt, fb_none);
6066 val = NULL;
6067 }
6068 else
6069 /* The temporary may not be an SSA name as later abnormal and EH
6070 control flow may invalidate use/def domination. When in SSA
6071 form then assume there are no such issues and SAVE_EXPRs only
6072 appear via GENERIC foldings. */
6073 val = get_initialized_tmp_var (val, pre_p, post_p,
6074 gimple_in_ssa_p (cfun));
6075
6076 TREE_OPERAND (*expr_p, 0) = val;
6077 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6078 }
6079
6080 *expr_p = val;
6081
6082 return ret;
6083 }
6084
6085 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6086
6087 unary_expr
6088 : ...
6089 | '&' varname
6090 ...
6091
6092 PRE_P points to the list where side effects that must happen before
6093 *EXPR_P should be stored.
6094
6095 POST_P points to the list where side effects that must happen after
6096 *EXPR_P should be stored. */
6097
6098 static enum gimplify_status
6099 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6100 {
6101 tree expr = *expr_p;
6102 tree op0 = TREE_OPERAND (expr, 0);
6103 enum gimplify_status ret;
6104 location_t loc = EXPR_LOCATION (*expr_p);
6105
6106 switch (TREE_CODE (op0))
6107 {
6108 case INDIRECT_REF:
6109 do_indirect_ref:
6110 /* Check if we are dealing with an expression of the form '&*ptr'.
6111 While the front end folds away '&*ptr' into 'ptr', these
6112 expressions may be generated internally by the compiler (e.g.,
6113 builtins like __builtin_va_end). */
6114 /* Caution: the silent array decomposition semantics we allow for
6115 ADDR_EXPR means we can't always discard the pair. */
6116 /* Gimplification of the ADDR_EXPR operand may drop
6117 cv-qualification conversions, so make sure we add them if
6118 needed. */
6119 {
6120 tree op00 = TREE_OPERAND (op0, 0);
6121 tree t_expr = TREE_TYPE (expr);
6122 tree t_op00 = TREE_TYPE (op00);
6123
6124 if (!useless_type_conversion_p (t_expr, t_op00))
6125 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6126 *expr_p = op00;
6127 ret = GS_OK;
6128 }
6129 break;
6130
6131 case VIEW_CONVERT_EXPR:
6132 /* Take the address of our operand and then convert it to the type of
6133 this ADDR_EXPR.
6134
6135 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6136 all clear. The impact of this transformation is even less clear. */
6137
6138 /* If the operand is a useless conversion, look through it. Doing so
6139 guarantees that the ADDR_EXPR and its operand will remain of the
6140 same type. */
6141 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6142 op0 = TREE_OPERAND (op0, 0);
6143
6144 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6145 build_fold_addr_expr_loc (loc,
6146 TREE_OPERAND (op0, 0)));
6147 ret = GS_OK;
6148 break;
6149
6150 case MEM_REF:
6151 if (integer_zerop (TREE_OPERAND (op0, 1)))
6152 goto do_indirect_ref;
6153
6154 /* fall through */
6155
6156 default:
6157 /* If we see a call to a declared builtin or see its address
6158 being taken (we can unify those cases here) then we can mark
6159 the builtin for implicit generation by GCC. */
6160 if (TREE_CODE (op0) == FUNCTION_DECL
6161 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6162 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6163 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6164
6165 /* We use fb_either here because the C frontend sometimes takes
6166 the address of a call that returns a struct; see
6167 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6168 the implied temporary explicit. */
6169
6170 /* Make the operand addressable. */
6171 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6172 is_gimple_addressable, fb_either);
6173 if (ret == GS_ERROR)
6174 break;
6175
6176 /* Then mark it. Beware that it may not be possible to do so directly
6177 if a temporary has been created by the gimplification. */
6178 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6179
6180 op0 = TREE_OPERAND (expr, 0);
6181
6182 /* For various reasons, the gimplification of the expression
6183 may have made a new INDIRECT_REF. */
6184 if (TREE_CODE (op0) == INDIRECT_REF)
6185 goto do_indirect_ref;
6186
6187 mark_addressable (TREE_OPERAND (expr, 0));
6188
6189 /* The FEs may end up building ADDR_EXPRs early on a decl with
6190 an incomplete type. Re-build ADDR_EXPRs in canonical form
6191 here. */
6192 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6193 *expr_p = build_fold_addr_expr (op0);
6194
6195 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6196 recompute_tree_invariant_for_addr_expr (*expr_p);
6197
6198 /* If we re-built the ADDR_EXPR add a conversion to the original type
6199 if required. */
6200 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6201 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6202
6203 break;
6204 }
6205
6206 return ret;
6207 }
6208
6209 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6210 value; output operands should be a gimple lvalue. */
6211
6212 static enum gimplify_status
6213 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6214 {
6215 tree expr;
6216 int noutputs;
6217 const char **oconstraints;
6218 int i;
6219 tree link;
6220 const char *constraint;
6221 bool allows_mem, allows_reg, is_inout;
6222 enum gimplify_status ret, tret;
6223 gasm *stmt;
6224 vec<tree, va_gc> *inputs;
6225 vec<tree, va_gc> *outputs;
6226 vec<tree, va_gc> *clobbers;
6227 vec<tree, va_gc> *labels;
6228 tree link_next;
6229
6230 expr = *expr_p;
6231 noutputs = list_length (ASM_OUTPUTS (expr));
6232 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6233
6234 inputs = NULL;
6235 outputs = NULL;
6236 clobbers = NULL;
6237 labels = NULL;
6238
6239 ret = GS_ALL_DONE;
6240 link_next = NULL_TREE;
6241 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6242 {
6243 bool ok;
6244 size_t constraint_len;
6245
6246 link_next = TREE_CHAIN (link);
6247
6248 oconstraints[i]
6249 = constraint
6250 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6251 constraint_len = strlen (constraint);
6252 if (constraint_len == 0)
6253 continue;
6254
6255 ok = parse_output_constraint (&constraint, i, 0, 0,
6256 &allows_mem, &allows_reg, &is_inout);
6257 if (!ok)
6258 {
6259 ret = GS_ERROR;
6260 is_inout = false;
6261 }
6262
6263 /* If we can't make copies, we can only accept memory.
6264 Similarly for VLAs. */
6265 tree outtype = TREE_TYPE (TREE_VALUE (link));
6266 if (outtype != error_mark_node
6267 && (TREE_ADDRESSABLE (outtype)
6268 || !COMPLETE_TYPE_P (outtype)
6269 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6270 {
6271 if (allows_mem)
6272 allows_reg = 0;
6273 else
6274 {
6275 error ("impossible constraint in %<asm%>");
6276 error ("non-memory output %d must stay in memory", i);
6277 return GS_ERROR;
6278 }
6279 }
6280
6281 if (!allows_reg && allows_mem)
6282 mark_addressable (TREE_VALUE (link));
6283
6284 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6285 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6286 fb_lvalue | fb_mayfail);
6287 if (tret == GS_ERROR)
6288 {
6289 error ("invalid lvalue in %<asm%> output %d", i);
6290 ret = tret;
6291 }
6292
6293 /* If the constraint does not allow memory make sure we gimplify
6294 it to a register if it is not already but its base is. This
6295 happens for complex and vector components. */
6296 if (!allows_mem)
6297 {
6298 tree op = TREE_VALUE (link);
6299 if (! is_gimple_val (op)
6300 && is_gimple_reg_type (TREE_TYPE (op))
6301 && is_gimple_reg (get_base_address (op)))
6302 {
6303 tree tem = create_tmp_reg (TREE_TYPE (op));
6304 tree ass;
6305 if (is_inout)
6306 {
6307 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6308 tem, unshare_expr (op));
6309 gimplify_and_add (ass, pre_p);
6310 }
6311 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6312 gimplify_and_add (ass, post_p);
6313
6314 TREE_VALUE (link) = tem;
6315 tret = GS_OK;
6316 }
6317 }
6318
6319 vec_safe_push (outputs, link);
6320 TREE_CHAIN (link) = NULL_TREE;
6321
6322 if (is_inout)
6323 {
6324 /* An input/output operand. To give the optimizers more
6325 flexibility, split it into separate input and output
6326 operands. */
6327 tree input;
6328 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6329 char buf[11];
6330
6331 /* Turn the in/out constraint into an output constraint. */
6332 char *p = xstrdup (constraint);
6333 p[0] = '=';
6334 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6335
6336 /* And add a matching input constraint. */
6337 if (allows_reg)
6338 {
6339 sprintf (buf, "%u", i);
6340
6341 /* If there are multiple alternatives in the constraint,
6342 handle each of them individually. Those that allow register
6343 will be replaced with operand number, the others will stay
6344 unchanged. */
6345 if (strchr (p, ',') != NULL)
6346 {
6347 size_t len = 0, buflen = strlen (buf);
6348 char *beg, *end, *str, *dst;
6349
6350 for (beg = p + 1;;)
6351 {
6352 end = strchr (beg, ',');
6353 if (end == NULL)
6354 end = strchr (beg, '\0');
6355 if ((size_t) (end - beg) < buflen)
6356 len += buflen + 1;
6357 else
6358 len += end - beg + 1;
6359 if (*end)
6360 beg = end + 1;
6361 else
6362 break;
6363 }
6364
6365 str = (char *) alloca (len);
6366 for (beg = p + 1, dst = str;;)
6367 {
6368 const char *tem;
6369 bool mem_p, reg_p, inout_p;
6370
6371 end = strchr (beg, ',');
6372 if (end)
6373 *end = '\0';
6374 beg[-1] = '=';
6375 tem = beg - 1;
6376 parse_output_constraint (&tem, i, 0, 0,
6377 &mem_p, &reg_p, &inout_p);
6378 if (dst != str)
6379 *dst++ = ',';
6380 if (reg_p)
6381 {
6382 memcpy (dst, buf, buflen);
6383 dst += buflen;
6384 }
6385 else
6386 {
6387 if (end)
6388 len = end - beg;
6389 else
6390 len = strlen (beg);
6391 memcpy (dst, beg, len);
6392 dst += len;
6393 }
6394 if (end)
6395 beg = end + 1;
6396 else
6397 break;
6398 }
6399 *dst = '\0';
6400 input = build_string (dst - str, str);
6401 }
6402 else
6403 input = build_string (strlen (buf), buf);
6404 }
6405 else
6406 input = build_string (constraint_len - 1, constraint + 1);
6407
6408 free (p);
6409
6410 input = build_tree_list (build_tree_list (NULL_TREE, input),
6411 unshare_expr (TREE_VALUE (link)));
6412 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6413 }
6414 }
6415
6416 link_next = NULL_TREE;
6417 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6418 {
6419 link_next = TREE_CHAIN (link);
6420 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6421 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6422 oconstraints, &allows_mem, &allows_reg);
6423
6424 /* If we can't make copies, we can only accept memory. */
6425 tree intype = TREE_TYPE (TREE_VALUE (link));
6426 if (intype != error_mark_node
6427 && (TREE_ADDRESSABLE (intype)
6428 || !COMPLETE_TYPE_P (intype)
6429 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6430 {
6431 if (allows_mem)
6432 allows_reg = 0;
6433 else
6434 {
6435 error ("impossible constraint in %<asm%>");
6436 error ("non-memory input %d must stay in memory", i);
6437 return GS_ERROR;
6438 }
6439 }
6440
6441 /* If the operand is a memory input, it should be an lvalue. */
6442 if (!allows_reg && allows_mem)
6443 {
6444 tree inputv = TREE_VALUE (link);
6445 STRIP_NOPS (inputv);
6446 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6447 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6448 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6449 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6450 || TREE_CODE (inputv) == MODIFY_EXPR)
6451 TREE_VALUE (link) = error_mark_node;
6452 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6453 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6454 if (tret != GS_ERROR)
6455 {
6456 /* Unlike output operands, memory inputs are not guaranteed
6457 to be lvalues by the FE, and while the expressions are
6458 marked addressable there, if it is e.g. a statement
6459 expression, temporaries in it might not end up being
6460 addressable. They might be already used in the IL and thus
6461 it is too late to make them addressable now though. */
6462 tree x = TREE_VALUE (link);
6463 while (handled_component_p (x))
6464 x = TREE_OPERAND (x, 0);
6465 if (TREE_CODE (x) == MEM_REF
6466 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6467 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6468 if ((VAR_P (x)
6469 || TREE_CODE (x) == PARM_DECL
6470 || TREE_CODE (x) == RESULT_DECL)
6471 && !TREE_ADDRESSABLE (x)
6472 && is_gimple_reg (x))
6473 {
6474 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6475 input_location), 0,
6476 "memory input %d is not directly addressable",
6477 i);
6478 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6479 }
6480 }
6481 mark_addressable (TREE_VALUE (link));
6482 if (tret == GS_ERROR)
6483 {
6484 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6485 "memory input %d is not directly addressable", i);
6486 ret = tret;
6487 }
6488 }
6489 else
6490 {
6491 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6492 is_gimple_asm_val, fb_rvalue);
6493 if (tret == GS_ERROR)
6494 ret = tret;
6495 }
6496
6497 TREE_CHAIN (link) = NULL_TREE;
6498 vec_safe_push (inputs, link);
6499 }
6500
6501 link_next = NULL_TREE;
6502 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6503 {
6504 link_next = TREE_CHAIN (link);
6505 TREE_CHAIN (link) = NULL_TREE;
6506 vec_safe_push (clobbers, link);
6507 }
6508
6509 link_next = NULL_TREE;
6510 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6511 {
6512 link_next = TREE_CHAIN (link);
6513 TREE_CHAIN (link) = NULL_TREE;
6514 vec_safe_push (labels, link);
6515 }
6516
6517 /* Do not add ASMs with errors to the gimple IL stream. */
6518 if (ret != GS_ERROR)
6519 {
6520 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6521 inputs, outputs, clobbers, labels);
6522
6523 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6524 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6525 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6526
6527 gimplify_seq_add_stmt (pre_p, stmt);
6528 }
6529
6530 return ret;
6531 }
6532
6533 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6534 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6535 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6536 return to this function.
6537
6538 FIXME should we complexify the prequeue handling instead? Or use flags
6539 for all the cleanups and let the optimizer tighten them up? The current
6540 code seems pretty fragile; it will break on a cleanup within any
6541 non-conditional nesting. But any such nesting would be broken, anyway;
6542 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6543 and continues out of it. We can do that at the RTL level, though, so
6544 having an optimizer to tighten up try/finally regions would be a Good
6545 Thing. */
6546
6547 static enum gimplify_status
6548 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6549 {
6550 gimple_stmt_iterator iter;
6551 gimple_seq body_sequence = NULL;
6552
6553 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6554
6555 /* We only care about the number of conditions between the innermost
6556 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6557 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6558 int old_conds = gimplify_ctxp->conditions;
6559 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6560 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6561 gimplify_ctxp->conditions = 0;
6562 gimplify_ctxp->conditional_cleanups = NULL;
6563 gimplify_ctxp->in_cleanup_point_expr = true;
6564
6565 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6566
6567 gimplify_ctxp->conditions = old_conds;
6568 gimplify_ctxp->conditional_cleanups = old_cleanups;
6569 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6570
6571 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6572 {
6573 gimple *wce = gsi_stmt (iter);
6574
6575 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6576 {
6577 if (gsi_one_before_end_p (iter))
6578 {
6579 /* Note that gsi_insert_seq_before and gsi_remove do not
6580 scan operands, unlike some other sequence mutators. */
6581 if (!gimple_wce_cleanup_eh_only (wce))
6582 gsi_insert_seq_before_without_update (&iter,
6583 gimple_wce_cleanup (wce),
6584 GSI_SAME_STMT);
6585 gsi_remove (&iter, true);
6586 break;
6587 }
6588 else
6589 {
6590 gtry *gtry;
6591 gimple_seq seq;
6592 enum gimple_try_flags kind;
6593
6594 if (gimple_wce_cleanup_eh_only (wce))
6595 kind = GIMPLE_TRY_CATCH;
6596 else
6597 kind = GIMPLE_TRY_FINALLY;
6598 seq = gsi_split_seq_after (iter);
6599
6600 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6601 /* Do not use gsi_replace here, as it may scan operands.
6602 We want to do a simple structural modification only. */
6603 gsi_set_stmt (&iter, gtry);
6604 iter = gsi_start (gtry->eval);
6605 }
6606 }
6607 else
6608 gsi_next (&iter);
6609 }
6610
6611 gimplify_seq_add_seq (pre_p, body_sequence);
6612 if (temp)
6613 {
6614 *expr_p = temp;
6615 return GS_OK;
6616 }
6617 else
6618 {
6619 *expr_p = NULL;
6620 return GS_ALL_DONE;
6621 }
6622 }
6623
6624 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6625 is the cleanup action required. EH_ONLY is true if the cleanup should
6626 only be executed if an exception is thrown, not on normal exit.
6627 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6628 only valid for clobbers. */
6629
6630 static void
6631 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6632 bool force_uncond = false)
6633 {
6634 gimple *wce;
6635 gimple_seq cleanup_stmts = NULL;
6636
6637 /* Errors can result in improperly nested cleanups. Which results in
6638 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6639 if (seen_error ())
6640 return;
6641
6642 if (gimple_conditional_context ())
6643 {
6644 /* If we're in a conditional context, this is more complex. We only
6645 want to run the cleanup if we actually ran the initialization that
6646 necessitates it, but we want to run it after the end of the
6647 conditional context. So we wrap the try/finally around the
6648 condition and use a flag to determine whether or not to actually
6649 run the destructor. Thus
6650
6651 test ? f(A()) : 0
6652
6653 becomes (approximately)
6654
6655 flag = 0;
6656 try {
6657 if (test) { A::A(temp); flag = 1; val = f(temp); }
6658 else { val = 0; }
6659 } finally {
6660 if (flag) A::~A(temp);
6661 }
6662 val
6663 */
6664 if (force_uncond)
6665 {
6666 gimplify_stmt (&cleanup, &cleanup_stmts);
6667 wce = gimple_build_wce (cleanup_stmts);
6668 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6669 }
6670 else
6671 {
6672 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6673 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6674 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6675
6676 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6677 gimplify_stmt (&cleanup, &cleanup_stmts);
6678 wce = gimple_build_wce (cleanup_stmts);
6679
6680 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6681 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6682 gimplify_seq_add_stmt (pre_p, ftrue);
6683
6684 /* Because of this manipulation, and the EH edges that jump
6685 threading cannot redirect, the temporary (VAR) will appear
6686 to be used uninitialized. Don't warn. */
6687 TREE_NO_WARNING (var) = 1;
6688 }
6689 }
6690 else
6691 {
6692 gimplify_stmt (&cleanup, &cleanup_stmts);
6693 wce = gimple_build_wce (cleanup_stmts);
6694 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6695 gimplify_seq_add_stmt (pre_p, wce);
6696 }
6697 }
6698
6699 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6700
6701 static enum gimplify_status
6702 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6703 {
6704 tree targ = *expr_p;
6705 tree temp = TARGET_EXPR_SLOT (targ);
6706 tree init = TARGET_EXPR_INITIAL (targ);
6707 enum gimplify_status ret;
6708
6709 bool unpoison_empty_seq = false;
6710 gimple_stmt_iterator unpoison_it;
6711
6712 if (init)
6713 {
6714 tree cleanup = NULL_TREE;
6715
6716 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6717 to the temps list. Handle also variable length TARGET_EXPRs. */
6718 if (!poly_int_tree_p (DECL_SIZE (temp)))
6719 {
6720 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6721 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6722 gimplify_vla_decl (temp, pre_p);
6723 }
6724 else
6725 {
6726 /* Save location where we need to place unpoisoning. It's possible
6727 that a variable will be converted to needs_to_live_in_memory. */
6728 unpoison_it = gsi_last (*pre_p);
6729 unpoison_empty_seq = gsi_end_p (unpoison_it);
6730
6731 gimple_add_tmp_var (temp);
6732 }
6733
6734 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6735 expression is supposed to initialize the slot. */
6736 if (VOID_TYPE_P (TREE_TYPE (init)))
6737 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6738 else
6739 {
6740 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6741 init = init_expr;
6742 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6743 init = NULL;
6744 ggc_free (init_expr);
6745 }
6746 if (ret == GS_ERROR)
6747 {
6748 /* PR c++/28266 Make sure this is expanded only once. */
6749 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6750 return GS_ERROR;
6751 }
6752 if (init)
6753 gimplify_and_add (init, pre_p);
6754
6755 /* If needed, push the cleanup for the temp. */
6756 if (TARGET_EXPR_CLEANUP (targ))
6757 {
6758 if (CLEANUP_EH_ONLY (targ))
6759 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6760 CLEANUP_EH_ONLY (targ), pre_p);
6761 else
6762 cleanup = TARGET_EXPR_CLEANUP (targ);
6763 }
6764
6765 /* Add a clobber for the temporary going out of scope, like
6766 gimplify_bind_expr. */
6767 if (gimplify_ctxp->in_cleanup_point_expr
6768 && needs_to_live_in_memory (temp))
6769 {
6770 if (flag_stack_reuse == SR_ALL)
6771 {
6772 tree clobber = build_clobber (TREE_TYPE (temp));
6773 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6774 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6775 }
6776 if (asan_poisoned_variables
6777 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6778 && !TREE_STATIC (temp)
6779 && dbg_cnt (asan_use_after_scope)
6780 && !gimplify_omp_ctxp)
6781 {
6782 tree asan_cleanup = build_asan_poison_call_expr (temp);
6783 if (asan_cleanup)
6784 {
6785 if (unpoison_empty_seq)
6786 unpoison_it = gsi_start (*pre_p);
6787
6788 asan_poison_variable (temp, false, &unpoison_it,
6789 unpoison_empty_seq);
6790 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6791 }
6792 }
6793 }
6794 if (cleanup)
6795 gimple_push_cleanup (temp, cleanup, false, pre_p);
6796
6797 /* Only expand this once. */
6798 TREE_OPERAND (targ, 3) = init;
6799 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6800 }
6801 else
6802 /* We should have expanded this before. */
6803 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6804
6805 *expr_p = temp;
6806 return GS_OK;
6807 }
6808
6809 /* Gimplification of expression trees. */
6810
6811 /* Gimplify an expression which appears at statement context. The
6812 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6813 NULL, a new sequence is allocated.
6814
6815 Return true if we actually added a statement to the queue. */
6816
6817 bool
6818 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6819 {
6820 gimple_seq_node last;
6821
6822 last = gimple_seq_last (*seq_p);
6823 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6824 return last != gimple_seq_last (*seq_p);
6825 }
6826
6827 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6828 to CTX. If entries already exist, force them to be some flavor of private.
6829 If there is no enclosing parallel, do nothing. */
6830
6831 void
6832 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6833 {
6834 splay_tree_node n;
6835
6836 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6837 return;
6838
6839 do
6840 {
6841 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6842 if (n != NULL)
6843 {
6844 if (n->value & GOVD_SHARED)
6845 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6846 else if (n->value & GOVD_MAP)
6847 n->value |= GOVD_MAP_TO_ONLY;
6848 else
6849 return;
6850 }
6851 else if ((ctx->region_type & ORT_TARGET) != 0)
6852 {
6853 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6854 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6855 else
6856 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6857 }
6858 else if (ctx->region_type != ORT_WORKSHARE
6859 && ctx->region_type != ORT_TASKGROUP
6860 && ctx->region_type != ORT_SIMD
6861 && ctx->region_type != ORT_ACC
6862 && !(ctx->region_type & ORT_TARGET_DATA))
6863 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6864
6865 ctx = ctx->outer_context;
6866 }
6867 while (ctx);
6868 }
6869
6870 /* Similarly for each of the type sizes of TYPE. */
6871
6872 static void
6873 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6874 {
6875 if (type == NULL || type == error_mark_node)
6876 return;
6877 type = TYPE_MAIN_VARIANT (type);
6878
6879 if (ctx->privatized_types->add (type))
6880 return;
6881
6882 switch (TREE_CODE (type))
6883 {
6884 case INTEGER_TYPE:
6885 case ENUMERAL_TYPE:
6886 case BOOLEAN_TYPE:
6887 case REAL_TYPE:
6888 case FIXED_POINT_TYPE:
6889 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6890 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6891 break;
6892
6893 case ARRAY_TYPE:
6894 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6895 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6896 break;
6897
6898 case RECORD_TYPE:
6899 case UNION_TYPE:
6900 case QUAL_UNION_TYPE:
6901 {
6902 tree field;
6903 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6904 if (TREE_CODE (field) == FIELD_DECL)
6905 {
6906 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6907 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6908 }
6909 }
6910 break;
6911
6912 case POINTER_TYPE:
6913 case REFERENCE_TYPE:
6914 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6915 break;
6916
6917 default:
6918 break;
6919 }
6920
6921 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6922 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6923 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6924 }
6925
6926 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6927
6928 static void
6929 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6930 {
6931 splay_tree_node n;
6932 unsigned int nflags;
6933 tree t;
6934
6935 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6936 return;
6937
6938 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6939 there are constructors involved somewhere. Exception is a shared clause,
6940 there is nothing privatized in that case. */
6941 if ((flags & GOVD_SHARED) == 0
6942 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6943 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6944 flags |= GOVD_SEEN;
6945
6946 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6947 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6948 {
6949 /* We shouldn't be re-adding the decl with the same data
6950 sharing class. */
6951 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6952 nflags = n->value | flags;
6953 /* The only combination of data sharing classes we should see is
6954 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6955 reduction variables to be used in data sharing clauses. */
6956 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6957 || ((nflags & GOVD_DATA_SHARE_CLASS)
6958 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6959 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6960 n->value = nflags;
6961 return;
6962 }
6963
6964 /* When adding a variable-sized variable, we have to handle all sorts
6965 of additional bits of data: the pointer replacement variable, and
6966 the parameters of the type. */
6967 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6968 {
6969 /* Add the pointer replacement variable as PRIVATE if the variable
6970 replacement is private, else FIRSTPRIVATE since we'll need the
6971 address of the original variable either for SHARED, or for the
6972 copy into or out of the context. */
6973 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6974 {
6975 if (flags & GOVD_MAP)
6976 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6977 else if (flags & GOVD_PRIVATE)
6978 nflags = GOVD_PRIVATE;
6979 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6980 && (flags & GOVD_FIRSTPRIVATE))
6981 || (ctx->region_type == ORT_TARGET_DATA
6982 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6983 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6984 else
6985 nflags = GOVD_FIRSTPRIVATE;
6986 nflags |= flags & GOVD_SEEN;
6987 t = DECL_VALUE_EXPR (decl);
6988 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6989 t = TREE_OPERAND (t, 0);
6990 gcc_assert (DECL_P (t));
6991 omp_add_variable (ctx, t, nflags);
6992 }
6993
6994 /* Add all of the variable and type parameters (which should have
6995 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6996 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6997 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6998 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6999
7000 /* The variable-sized variable itself is never SHARED, only some form
7001 of PRIVATE. The sharing would take place via the pointer variable
7002 which we remapped above. */
7003 if (flags & GOVD_SHARED)
7004 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7005 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7006
7007 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7008 alloca statement we generate for the variable, so make sure it
7009 is available. This isn't automatically needed for the SHARED
7010 case, since we won't be allocating local storage then.
7011 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7012 in this case omp_notice_variable will be called later
7013 on when it is gimplified. */
7014 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7015 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7016 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7017 }
7018 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7019 && lang_hooks.decls.omp_privatize_by_reference (decl))
7020 {
7021 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7022
7023 /* Similar to the direct variable sized case above, we'll need the
7024 size of references being privatized. */
7025 if ((flags & GOVD_SHARED) == 0)
7026 {
7027 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7028 if (DECL_P (t))
7029 omp_notice_variable (ctx, t, true);
7030 }
7031 }
7032
7033 if (n != NULL)
7034 n->value |= flags;
7035 else
7036 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7037
7038 /* For reductions clauses in OpenACC loop directives, by default create a
7039 copy clause on the enclosing parallel construct for carrying back the
7040 results. */
7041 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7042 {
7043 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7044 while (outer_ctx)
7045 {
7046 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7047 if (n != NULL)
7048 {
7049 /* Ignore local variables and explicitly declared clauses. */
7050 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7051 break;
7052 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7053 {
7054 /* According to the OpenACC spec, such a reduction variable
7055 should already have a copy map on a kernels construct,
7056 verify that here. */
7057 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7058 && (n->value & GOVD_MAP));
7059 }
7060 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7061 {
7062 /* Remove firstprivate and make it a copy map. */
7063 n->value &= ~GOVD_FIRSTPRIVATE;
7064 n->value |= GOVD_MAP;
7065 }
7066 }
7067 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7068 {
7069 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7070 GOVD_MAP | GOVD_SEEN);
7071 break;
7072 }
7073 outer_ctx = outer_ctx->outer_context;
7074 }
7075 }
7076 }
7077
7078 /* Notice a threadprivate variable DECL used in OMP context CTX.
7079 This just prints out diagnostics about threadprivate variable uses
7080 in untied tasks. If DECL2 is non-NULL, prevent this warning
7081 on that variable. */
7082
7083 static bool
7084 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7085 tree decl2)
7086 {
7087 splay_tree_node n;
7088 struct gimplify_omp_ctx *octx;
7089
7090 for (octx = ctx; octx; octx = octx->outer_context)
7091 if ((octx->region_type & ORT_TARGET) != 0
7092 || octx->order_concurrent)
7093 {
7094 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7095 if (n == NULL)
7096 {
7097 if (octx->order_concurrent)
7098 {
7099 error ("threadprivate variable %qE used in a region with"
7100 " %<order(concurrent)%> clause", DECL_NAME (decl));
7101 error_at (octx->location, "enclosing region");
7102 }
7103 else
7104 {
7105 error ("threadprivate variable %qE used in target region",
7106 DECL_NAME (decl));
7107 error_at (octx->location, "enclosing target region");
7108 }
7109 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7110 }
7111 if (decl2)
7112 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7113 }
7114
7115 if (ctx->region_type != ORT_UNTIED_TASK)
7116 return false;
7117 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7118 if (n == NULL)
7119 {
7120 error ("threadprivate variable %qE used in untied task",
7121 DECL_NAME (decl));
7122 error_at (ctx->location, "enclosing task");
7123 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7124 }
7125 if (decl2)
7126 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7127 return false;
7128 }
7129
7130 /* Return true if global var DECL is device resident. */
7131
7132 static bool
7133 device_resident_p (tree decl)
7134 {
7135 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7136
7137 if (!attr)
7138 return false;
7139
7140 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7141 {
7142 tree c = TREE_VALUE (t);
7143 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7144 return true;
7145 }
7146
7147 return false;
7148 }
7149
7150 /* Return true if DECL has an ACC DECLARE attribute. */
7151
7152 static bool
7153 is_oacc_declared (tree decl)
7154 {
7155 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7156 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7157 return declared != NULL_TREE;
7158 }
7159
7160 /* Determine outer default flags for DECL mentioned in an OMP region
7161 but not declared in an enclosing clause.
7162
7163 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7164 remapped firstprivate instead of shared. To some extent this is
7165 addressed in omp_firstprivatize_type_sizes, but not
7166 effectively. */
7167
7168 static unsigned
7169 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7170 bool in_code, unsigned flags)
7171 {
7172 enum omp_clause_default_kind default_kind = ctx->default_kind;
7173 enum omp_clause_default_kind kind;
7174
7175 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7176 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7177 default_kind = kind;
7178 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7179 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7180
7181 switch (default_kind)
7182 {
7183 case OMP_CLAUSE_DEFAULT_NONE:
7184 {
7185 const char *rtype;
7186
7187 if (ctx->region_type & ORT_PARALLEL)
7188 rtype = "parallel";
7189 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7190 rtype = "taskloop";
7191 else if (ctx->region_type & ORT_TASK)
7192 rtype = "task";
7193 else if (ctx->region_type & ORT_TEAMS)
7194 rtype = "teams";
7195 else
7196 gcc_unreachable ();
7197
7198 error ("%qE not specified in enclosing %qs",
7199 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7200 error_at (ctx->location, "enclosing %qs", rtype);
7201 }
7202 /* FALLTHRU */
7203 case OMP_CLAUSE_DEFAULT_SHARED:
7204 flags |= GOVD_SHARED;
7205 break;
7206 case OMP_CLAUSE_DEFAULT_PRIVATE:
7207 flags |= GOVD_PRIVATE;
7208 break;
7209 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7210 flags |= GOVD_FIRSTPRIVATE;
7211 break;
7212 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7213 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7214 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7215 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7216 {
7217 omp_notice_variable (octx, decl, in_code);
7218 for (; octx; octx = octx->outer_context)
7219 {
7220 splay_tree_node n2;
7221
7222 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7223 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7224 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7225 continue;
7226 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7227 {
7228 flags |= GOVD_FIRSTPRIVATE;
7229 goto found_outer;
7230 }
7231 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7232 {
7233 flags |= GOVD_SHARED;
7234 goto found_outer;
7235 }
7236 }
7237 }
7238
7239 if (TREE_CODE (decl) == PARM_DECL
7240 || (!is_global_var (decl)
7241 && DECL_CONTEXT (decl) == current_function_decl))
7242 flags |= GOVD_FIRSTPRIVATE;
7243 else
7244 flags |= GOVD_SHARED;
7245 found_outer:
7246 break;
7247
7248 default:
7249 gcc_unreachable ();
7250 }
7251
7252 return flags;
7253 }
7254
7255
7256 /* Determine outer default flags for DECL mentioned in an OACC region
7257 but not declared in an enclosing clause. */
7258
7259 static unsigned
7260 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7261 {
7262 const char *rkind;
7263 bool on_device = false;
7264 bool is_private = false;
7265 bool declared = is_oacc_declared (decl);
7266 tree type = TREE_TYPE (decl);
7267
7268 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7269 type = TREE_TYPE (type);
7270
7271 /* For Fortran COMMON blocks, only used variables in those blocks are
7272 transfered and remapped. The block itself will have a private clause to
7273 avoid transfering the data twice.
7274 The hook evaluates to false by default. For a variable in Fortran's COMMON
7275 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7276 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7277 the whole block. For C++ and Fortran, it can also be true under certain
7278 other conditions, if DECL_HAS_VALUE_EXPR. */
7279 if (RECORD_OR_UNION_TYPE_P (type))
7280 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7281
7282 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7283 && is_global_var (decl)
7284 && device_resident_p (decl)
7285 && !is_private)
7286 {
7287 on_device = true;
7288 flags |= GOVD_MAP_TO_ONLY;
7289 }
7290
7291 switch (ctx->region_type)
7292 {
7293 case ORT_ACC_KERNELS:
7294 rkind = "kernels";
7295
7296 if (is_private)
7297 flags |= GOVD_FIRSTPRIVATE;
7298 else if (AGGREGATE_TYPE_P (type))
7299 {
7300 /* Aggregates default to 'present_or_copy', or 'present'. */
7301 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7302 flags |= GOVD_MAP;
7303 else
7304 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7305 }
7306 else
7307 /* Scalars default to 'copy'. */
7308 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7309
7310 break;
7311
7312 case ORT_ACC_PARALLEL:
7313 case ORT_ACC_SERIAL:
7314 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7315
7316 if (is_private)
7317 flags |= GOVD_FIRSTPRIVATE;
7318 else if (on_device || declared)
7319 flags |= GOVD_MAP;
7320 else if (AGGREGATE_TYPE_P (type))
7321 {
7322 /* Aggregates default to 'present_or_copy', or 'present'. */
7323 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7324 flags |= GOVD_MAP;
7325 else
7326 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7327 }
7328 else
7329 /* Scalars default to 'firstprivate'. */
7330 flags |= GOVD_FIRSTPRIVATE;
7331
7332 break;
7333
7334 default:
7335 gcc_unreachable ();
7336 }
7337
7338 if (DECL_ARTIFICIAL (decl))
7339 ; /* We can get compiler-generated decls, and should not complain
7340 about them. */
7341 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7342 {
7343 error ("%qE not specified in enclosing OpenACC %qs construct",
7344 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7345 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7346 }
7347 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7348 ; /* Handled above. */
7349 else
7350 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7351
7352 return flags;
7353 }
7354
7355 /* Record the fact that DECL was used within the OMP context CTX.
7356 IN_CODE is true when real code uses DECL, and false when we should
7357 merely emit default(none) errors. Return true if DECL is going to
7358 be remapped and thus DECL shouldn't be gimplified into its
7359 DECL_VALUE_EXPR (if any). */
7360
7361 static bool
7362 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7363 {
7364 splay_tree_node n;
7365 unsigned flags = in_code ? GOVD_SEEN : 0;
7366 bool ret = false, shared;
7367
7368 if (error_operand_p (decl))
7369 return false;
7370
7371 if (ctx->region_type == ORT_NONE)
7372 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7373
7374 if (is_global_var (decl))
7375 {
7376 /* Threadprivate variables are predetermined. */
7377 if (DECL_THREAD_LOCAL_P (decl))
7378 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7379
7380 if (DECL_HAS_VALUE_EXPR_P (decl))
7381 {
7382 if (ctx->region_type & ORT_ACC)
7383 /* For OpenACC, defer expansion of value to avoid transfering
7384 privatized common block data instead of im-/explicitly transfered
7385 variables which are in common blocks. */
7386 ;
7387 else
7388 {
7389 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7390
7391 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7392 return omp_notice_threadprivate_variable (ctx, decl, value);
7393 }
7394 }
7395
7396 if (gimplify_omp_ctxp->outer_context == NULL
7397 && VAR_P (decl)
7398 && oacc_get_fn_attrib (current_function_decl))
7399 {
7400 location_t loc = DECL_SOURCE_LOCATION (decl);
7401
7402 if (lookup_attribute ("omp declare target link",
7403 DECL_ATTRIBUTES (decl)))
7404 {
7405 error_at (loc,
7406 "%qE with %<link%> clause used in %<routine%> function",
7407 DECL_NAME (decl));
7408 return false;
7409 }
7410 else if (!lookup_attribute ("omp declare target",
7411 DECL_ATTRIBUTES (decl)))
7412 {
7413 error_at (loc,
7414 "%qE requires a %<declare%> directive for use "
7415 "in a %<routine%> function", DECL_NAME (decl));
7416 return false;
7417 }
7418 }
7419 }
7420
7421 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7422 if ((ctx->region_type & ORT_TARGET) != 0)
7423 {
7424 if (ctx->region_type & ORT_ACC)
7425 /* For OpenACC, as remarked above, defer expansion. */
7426 shared = false;
7427 else
7428 shared = true;
7429
7430 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7431 if (n == NULL)
7432 {
7433 unsigned nflags = flags;
7434 if ((ctx->region_type & ORT_ACC) == 0)
7435 {
7436 bool is_declare_target = false;
7437 if (is_global_var (decl)
7438 && varpool_node::get_create (decl)->offloadable)
7439 {
7440 struct gimplify_omp_ctx *octx;
7441 for (octx = ctx->outer_context;
7442 octx; octx = octx->outer_context)
7443 {
7444 n = splay_tree_lookup (octx->variables,
7445 (splay_tree_key)decl);
7446 if (n
7447 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7448 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7449 break;
7450 }
7451 is_declare_target = octx == NULL;
7452 }
7453 if (!is_declare_target)
7454 {
7455 int gdmk;
7456 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7457 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7458 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7459 == POINTER_TYPE)))
7460 gdmk = GDMK_POINTER;
7461 else if (lang_hooks.decls.omp_scalar_p (decl))
7462 gdmk = GDMK_SCALAR;
7463 else
7464 gdmk = GDMK_AGGREGATE;
7465 if (ctx->defaultmap[gdmk] == 0)
7466 {
7467 tree d = lang_hooks.decls.omp_report_decl (decl);
7468 error ("%qE not specified in enclosing %<target%>",
7469 DECL_NAME (d));
7470 error_at (ctx->location, "enclosing %<target%>");
7471 }
7472 else if (ctx->defaultmap[gdmk]
7473 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7474 nflags |= ctx->defaultmap[gdmk];
7475 else
7476 {
7477 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7478 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7479 }
7480 }
7481 }
7482
7483 struct gimplify_omp_ctx *octx = ctx->outer_context;
7484 if ((ctx->region_type & ORT_ACC) && octx)
7485 {
7486 /* Look in outer OpenACC contexts, to see if there's a
7487 data attribute for this variable. */
7488 omp_notice_variable (octx, decl, in_code);
7489
7490 for (; octx; octx = octx->outer_context)
7491 {
7492 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7493 break;
7494 splay_tree_node n2
7495 = splay_tree_lookup (octx->variables,
7496 (splay_tree_key) decl);
7497 if (n2)
7498 {
7499 if (octx->region_type == ORT_ACC_HOST_DATA)
7500 error ("variable %qE declared in enclosing "
7501 "%<host_data%> region", DECL_NAME (decl));
7502 nflags |= GOVD_MAP;
7503 if (octx->region_type == ORT_ACC_DATA
7504 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7505 nflags |= GOVD_MAP_0LEN_ARRAY;
7506 goto found_outer;
7507 }
7508 }
7509 }
7510
7511 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7512 | GOVD_MAP_ALLOC_ONLY)) == flags)
7513 {
7514 tree type = TREE_TYPE (decl);
7515
7516 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7517 && lang_hooks.decls.omp_privatize_by_reference (decl))
7518 type = TREE_TYPE (type);
7519 if (!lang_hooks.types.omp_mappable_type (type))
7520 {
7521 error ("%qD referenced in target region does not have "
7522 "a mappable type", decl);
7523 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7524 }
7525 else
7526 {
7527 if ((ctx->region_type & ORT_ACC) != 0)
7528 nflags = oacc_default_clause (ctx, decl, flags);
7529 else
7530 nflags |= GOVD_MAP;
7531 }
7532 }
7533 found_outer:
7534 omp_add_variable (ctx, decl, nflags);
7535 }
7536 else
7537 {
7538 /* If nothing changed, there's nothing left to do. */
7539 if ((n->value & flags) == flags)
7540 return ret;
7541 flags |= n->value;
7542 n->value = flags;
7543 }
7544 goto do_outer;
7545 }
7546
7547 if (n == NULL)
7548 {
7549 if (ctx->region_type == ORT_WORKSHARE
7550 || ctx->region_type == ORT_TASKGROUP
7551 || ctx->region_type == ORT_SIMD
7552 || ctx->region_type == ORT_ACC
7553 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7554 goto do_outer;
7555
7556 flags = omp_default_clause (ctx, decl, in_code, flags);
7557
7558 if ((flags & GOVD_PRIVATE)
7559 && lang_hooks.decls.omp_private_outer_ref (decl))
7560 flags |= GOVD_PRIVATE_OUTER_REF;
7561
7562 omp_add_variable (ctx, decl, flags);
7563
7564 shared = (flags & GOVD_SHARED) != 0;
7565 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7566 goto do_outer;
7567 }
7568
7569 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7570 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7571 && DECL_SIZE (decl))
7572 {
7573 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7574 {
7575 splay_tree_node n2;
7576 tree t = DECL_VALUE_EXPR (decl);
7577 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7578 t = TREE_OPERAND (t, 0);
7579 gcc_assert (DECL_P (t));
7580 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7581 n2->value |= GOVD_SEEN;
7582 }
7583 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7584 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7585 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7586 != INTEGER_CST))
7587 {
7588 splay_tree_node n2;
7589 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7590 gcc_assert (DECL_P (t));
7591 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7592 if (n2)
7593 omp_notice_variable (ctx, t, true);
7594 }
7595 }
7596
7597 if (ctx->region_type & ORT_ACC)
7598 /* For OpenACC, as remarked above, defer expansion. */
7599 shared = false;
7600 else
7601 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7602 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7603
7604 /* If nothing changed, there's nothing left to do. */
7605 if ((n->value & flags) == flags)
7606 return ret;
7607 flags |= n->value;
7608 n->value = flags;
7609
7610 do_outer:
7611 /* If the variable is private in the current context, then we don't
7612 need to propagate anything to an outer context. */
7613 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7614 return ret;
7615 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7616 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7617 return ret;
7618 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7619 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7620 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7621 return ret;
7622 if (ctx->outer_context
7623 && omp_notice_variable (ctx->outer_context, decl, in_code))
7624 return true;
7625 return ret;
7626 }
7627
7628 /* Verify that DECL is private within CTX. If there's specific information
7629 to the contrary in the innermost scope, generate an error. */
7630
7631 static bool
7632 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7633 {
7634 splay_tree_node n;
7635
7636 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7637 if (n != NULL)
7638 {
7639 if (n->value & GOVD_SHARED)
7640 {
7641 if (ctx == gimplify_omp_ctxp)
7642 {
7643 if (simd)
7644 error ("iteration variable %qE is predetermined linear",
7645 DECL_NAME (decl));
7646 else
7647 error ("iteration variable %qE should be private",
7648 DECL_NAME (decl));
7649 n->value = GOVD_PRIVATE;
7650 return true;
7651 }
7652 else
7653 return false;
7654 }
7655 else if ((n->value & GOVD_EXPLICIT) != 0
7656 && (ctx == gimplify_omp_ctxp
7657 || (ctx->region_type == ORT_COMBINED_PARALLEL
7658 && gimplify_omp_ctxp->outer_context == ctx)))
7659 {
7660 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7661 error ("iteration variable %qE should not be firstprivate",
7662 DECL_NAME (decl));
7663 else if ((n->value & GOVD_REDUCTION) != 0)
7664 error ("iteration variable %qE should not be reduction",
7665 DECL_NAME (decl));
7666 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7667 error ("iteration variable %qE should not be linear",
7668 DECL_NAME (decl));
7669 }
7670 return (ctx == gimplify_omp_ctxp
7671 || (ctx->region_type == ORT_COMBINED_PARALLEL
7672 && gimplify_omp_ctxp->outer_context == ctx));
7673 }
7674
7675 if (ctx->region_type != ORT_WORKSHARE
7676 && ctx->region_type != ORT_TASKGROUP
7677 && ctx->region_type != ORT_SIMD
7678 && ctx->region_type != ORT_ACC)
7679 return false;
7680 else if (ctx->outer_context)
7681 return omp_is_private (ctx->outer_context, decl, simd);
7682 return false;
7683 }
7684
7685 /* Return true if DECL is private within a parallel region
7686 that binds to the current construct's context or in parallel
7687 region's REDUCTION clause. */
7688
7689 static bool
7690 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7691 {
7692 splay_tree_node n;
7693
7694 do
7695 {
7696 ctx = ctx->outer_context;
7697 if (ctx == NULL)
7698 {
7699 if (is_global_var (decl))
7700 return false;
7701
7702 /* References might be private, but might be shared too,
7703 when checking for copyprivate, assume they might be
7704 private, otherwise assume they might be shared. */
7705 if (copyprivate)
7706 return true;
7707
7708 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7709 return false;
7710
7711 /* Treat C++ privatized non-static data members outside
7712 of the privatization the same. */
7713 if (omp_member_access_dummy_var (decl))
7714 return false;
7715
7716 return true;
7717 }
7718
7719 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7720
7721 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7722 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7723 continue;
7724
7725 if (n != NULL)
7726 {
7727 if ((n->value & GOVD_LOCAL) != 0
7728 && omp_member_access_dummy_var (decl))
7729 return false;
7730 return (n->value & GOVD_SHARED) == 0;
7731 }
7732 }
7733 while (ctx->region_type == ORT_WORKSHARE
7734 || ctx->region_type == ORT_TASKGROUP
7735 || ctx->region_type == ORT_SIMD
7736 || ctx->region_type == ORT_ACC);
7737 return false;
7738 }
7739
7740 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7741
7742 static tree
7743 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7744 {
7745 tree t = *tp;
7746
7747 /* If this node has been visited, unmark it and keep looking. */
7748 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7749 return t;
7750
7751 if (IS_TYPE_OR_DECL_P (t))
7752 *walk_subtrees = 0;
7753 return NULL_TREE;
7754 }
7755
7756 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7757 lower all the depend clauses by populating corresponding depend
7758 array. Returns 0 if there are no such depend clauses, or
7759 2 if all depend clauses should be removed, 1 otherwise. */
7760
7761 static int
7762 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7763 {
7764 tree c;
7765 gimple *g;
7766 size_t n[4] = { 0, 0, 0, 0 };
7767 bool unused[4];
7768 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7769 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7770 size_t i, j;
7771 location_t first_loc = UNKNOWN_LOCATION;
7772
7773 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7774 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7775 {
7776 switch (OMP_CLAUSE_DEPEND_KIND (c))
7777 {
7778 case OMP_CLAUSE_DEPEND_IN:
7779 i = 2;
7780 break;
7781 case OMP_CLAUSE_DEPEND_OUT:
7782 case OMP_CLAUSE_DEPEND_INOUT:
7783 i = 0;
7784 break;
7785 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7786 i = 1;
7787 break;
7788 case OMP_CLAUSE_DEPEND_DEPOBJ:
7789 i = 3;
7790 break;
7791 case OMP_CLAUSE_DEPEND_SOURCE:
7792 case OMP_CLAUSE_DEPEND_SINK:
7793 continue;
7794 default:
7795 gcc_unreachable ();
7796 }
7797 tree t = OMP_CLAUSE_DECL (c);
7798 if (first_loc == UNKNOWN_LOCATION)
7799 first_loc = OMP_CLAUSE_LOCATION (c);
7800 if (TREE_CODE (t) == TREE_LIST
7801 && TREE_PURPOSE (t)
7802 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7803 {
7804 if (TREE_PURPOSE (t) != last_iter)
7805 {
7806 tree tcnt = size_one_node;
7807 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7808 {
7809 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7810 is_gimple_val, fb_rvalue) == GS_ERROR
7811 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7812 is_gimple_val, fb_rvalue) == GS_ERROR
7813 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7814 is_gimple_val, fb_rvalue) == GS_ERROR
7815 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7816 is_gimple_val, fb_rvalue)
7817 == GS_ERROR))
7818 return 2;
7819 tree var = TREE_VEC_ELT (it, 0);
7820 tree begin = TREE_VEC_ELT (it, 1);
7821 tree end = TREE_VEC_ELT (it, 2);
7822 tree step = TREE_VEC_ELT (it, 3);
7823 tree orig_step = TREE_VEC_ELT (it, 4);
7824 tree type = TREE_TYPE (var);
7825 tree stype = TREE_TYPE (step);
7826 location_t loc = DECL_SOURCE_LOCATION (var);
7827 tree endmbegin;
7828 /* Compute count for this iterator as
7829 orig_step > 0
7830 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7831 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7832 and compute product of those for the entire depend
7833 clause. */
7834 if (POINTER_TYPE_P (type))
7835 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7836 stype, end, begin);
7837 else
7838 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7839 end, begin);
7840 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7841 step,
7842 build_int_cst (stype, 1));
7843 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7844 build_int_cst (stype, 1));
7845 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7846 unshare_expr (endmbegin),
7847 stepm1);
7848 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7849 pos, step);
7850 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7851 endmbegin, stepp1);
7852 if (TYPE_UNSIGNED (stype))
7853 {
7854 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7855 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7856 }
7857 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7858 neg, step);
7859 step = NULL_TREE;
7860 tree cond = fold_build2_loc (loc, LT_EXPR,
7861 boolean_type_node,
7862 begin, end);
7863 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7864 build_int_cst (stype, 0));
7865 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7866 end, begin);
7867 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7868 build_int_cst (stype, 0));
7869 tree osteptype = TREE_TYPE (orig_step);
7870 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7871 orig_step,
7872 build_int_cst (osteptype, 0));
7873 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7874 cond, pos, neg);
7875 cnt = fold_convert_loc (loc, sizetype, cnt);
7876 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7877 fb_rvalue) == GS_ERROR)
7878 return 2;
7879 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7880 }
7881 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7882 fb_rvalue) == GS_ERROR)
7883 return 2;
7884 last_iter = TREE_PURPOSE (t);
7885 last_count = tcnt;
7886 }
7887 if (counts[i] == NULL_TREE)
7888 counts[i] = last_count;
7889 else
7890 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7891 PLUS_EXPR, counts[i], last_count);
7892 }
7893 else
7894 n[i]++;
7895 }
7896 for (i = 0; i < 4; i++)
7897 if (counts[i])
7898 break;
7899 if (i == 4)
7900 return 0;
7901
7902 tree total = size_zero_node;
7903 for (i = 0; i < 4; i++)
7904 {
7905 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7906 if (counts[i] == NULL_TREE)
7907 counts[i] = size_zero_node;
7908 if (n[i])
7909 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7910 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7911 fb_rvalue) == GS_ERROR)
7912 return 2;
7913 total = size_binop (PLUS_EXPR, total, counts[i]);
7914 }
7915
7916 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7917 == GS_ERROR)
7918 return 2;
7919 bool is_old = unused[1] && unused[3];
7920 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7921 size_int (is_old ? 1 : 4));
7922 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7923 tree array = create_tmp_var_raw (type);
7924 TREE_ADDRESSABLE (array) = 1;
7925 if (!poly_int_tree_p (totalpx))
7926 {
7927 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7928 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7929 if (gimplify_omp_ctxp)
7930 {
7931 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7932 while (ctx
7933 && (ctx->region_type == ORT_WORKSHARE
7934 || ctx->region_type == ORT_TASKGROUP
7935 || ctx->region_type == ORT_SIMD
7936 || ctx->region_type == ORT_ACC))
7937 ctx = ctx->outer_context;
7938 if (ctx)
7939 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7940 }
7941 gimplify_vla_decl (array, pre_p);
7942 }
7943 else
7944 gimple_add_tmp_var (array);
7945 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7946 NULL_TREE);
7947 tree tem;
7948 if (!is_old)
7949 {
7950 tem = build2 (MODIFY_EXPR, void_type_node, r,
7951 build_int_cst (ptr_type_node, 0));
7952 gimplify_and_add (tem, pre_p);
7953 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7954 NULL_TREE);
7955 }
7956 tem = build2 (MODIFY_EXPR, void_type_node, r,
7957 fold_convert (ptr_type_node, total));
7958 gimplify_and_add (tem, pre_p);
7959 for (i = 1; i < (is_old ? 2 : 4); i++)
7960 {
7961 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7962 NULL_TREE, NULL_TREE);
7963 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7964 gimplify_and_add (tem, pre_p);
7965 }
7966
7967 tree cnts[4];
7968 for (j = 4; j; j--)
7969 if (!unused[j - 1])
7970 break;
7971 for (i = 0; i < 4; i++)
7972 {
7973 if (i && (i >= j || unused[i - 1]))
7974 {
7975 cnts[i] = cnts[i - 1];
7976 continue;
7977 }
7978 cnts[i] = create_tmp_var (sizetype);
7979 if (i == 0)
7980 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7981 else
7982 {
7983 tree t;
7984 if (is_old)
7985 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7986 else
7987 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7988 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7989 == GS_ERROR)
7990 return 2;
7991 g = gimple_build_assign (cnts[i], t);
7992 }
7993 gimple_seq_add_stmt (pre_p, g);
7994 }
7995
7996 last_iter = NULL_TREE;
7997 tree last_bind = NULL_TREE;
7998 tree *last_body = NULL;
7999 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8000 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8001 {
8002 switch (OMP_CLAUSE_DEPEND_KIND (c))
8003 {
8004 case OMP_CLAUSE_DEPEND_IN:
8005 i = 2;
8006 break;
8007 case OMP_CLAUSE_DEPEND_OUT:
8008 case OMP_CLAUSE_DEPEND_INOUT:
8009 i = 0;
8010 break;
8011 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8012 i = 1;
8013 break;
8014 case OMP_CLAUSE_DEPEND_DEPOBJ:
8015 i = 3;
8016 break;
8017 case OMP_CLAUSE_DEPEND_SOURCE:
8018 case OMP_CLAUSE_DEPEND_SINK:
8019 continue;
8020 default:
8021 gcc_unreachable ();
8022 }
8023 tree t = OMP_CLAUSE_DECL (c);
8024 if (TREE_CODE (t) == TREE_LIST
8025 && TREE_PURPOSE (t)
8026 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8027 {
8028 if (TREE_PURPOSE (t) != last_iter)
8029 {
8030 if (last_bind)
8031 gimplify_and_add (last_bind, pre_p);
8032 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8033 last_bind = build3 (BIND_EXPR, void_type_node,
8034 BLOCK_VARS (block), NULL, block);
8035 TREE_SIDE_EFFECTS (last_bind) = 1;
8036 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8037 tree *p = &BIND_EXPR_BODY (last_bind);
8038 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8039 {
8040 tree var = TREE_VEC_ELT (it, 0);
8041 tree begin = TREE_VEC_ELT (it, 1);
8042 tree end = TREE_VEC_ELT (it, 2);
8043 tree step = TREE_VEC_ELT (it, 3);
8044 tree orig_step = TREE_VEC_ELT (it, 4);
8045 tree type = TREE_TYPE (var);
8046 location_t loc = DECL_SOURCE_LOCATION (var);
8047 /* Emit:
8048 var = begin;
8049 goto cond_label;
8050 beg_label:
8051 ...
8052 var = var + step;
8053 cond_label:
8054 if (orig_step > 0) {
8055 if (var < end) goto beg_label;
8056 } else {
8057 if (var > end) goto beg_label;
8058 }
8059 for each iterator, with inner iterators added to
8060 the ... above. */
8061 tree beg_label = create_artificial_label (loc);
8062 tree cond_label = NULL_TREE;
8063 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8064 var, begin);
8065 append_to_statement_list_force (tem, p);
8066 tem = build_and_jump (&cond_label);
8067 append_to_statement_list_force (tem, p);
8068 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8069 append_to_statement_list (tem, p);
8070 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8071 NULL_TREE, NULL_TREE);
8072 TREE_SIDE_EFFECTS (bind) = 1;
8073 SET_EXPR_LOCATION (bind, loc);
8074 append_to_statement_list_force (bind, p);
8075 if (POINTER_TYPE_P (type))
8076 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8077 var, fold_convert_loc (loc, sizetype,
8078 step));
8079 else
8080 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8081 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8082 var, tem);
8083 append_to_statement_list_force (tem, p);
8084 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8085 append_to_statement_list (tem, p);
8086 tree cond = fold_build2_loc (loc, LT_EXPR,
8087 boolean_type_node,
8088 var, end);
8089 tree pos
8090 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8091 cond, build_and_jump (&beg_label),
8092 void_node);
8093 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8094 var, end);
8095 tree neg
8096 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8097 cond, build_and_jump (&beg_label),
8098 void_node);
8099 tree osteptype = TREE_TYPE (orig_step);
8100 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8101 orig_step,
8102 build_int_cst (osteptype, 0));
8103 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8104 cond, pos, neg);
8105 append_to_statement_list_force (tem, p);
8106 p = &BIND_EXPR_BODY (bind);
8107 }
8108 last_body = p;
8109 }
8110 last_iter = TREE_PURPOSE (t);
8111 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8112 {
8113 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8114 0), last_body);
8115 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8116 }
8117 if (error_operand_p (TREE_VALUE (t)))
8118 return 2;
8119 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8120 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8121 NULL_TREE, NULL_TREE);
8122 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8123 void_type_node, r, TREE_VALUE (t));
8124 append_to_statement_list_force (tem, last_body);
8125 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8126 void_type_node, cnts[i],
8127 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8128 append_to_statement_list_force (tem, last_body);
8129 TREE_VALUE (t) = null_pointer_node;
8130 }
8131 else
8132 {
8133 if (last_bind)
8134 {
8135 gimplify_and_add (last_bind, pre_p);
8136 last_bind = NULL_TREE;
8137 }
8138 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8139 {
8140 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8141 NULL, is_gimple_val, fb_rvalue);
8142 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8143 }
8144 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8145 return 2;
8146 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8147 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8148 is_gimple_val, fb_rvalue) == GS_ERROR)
8149 return 2;
8150 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8151 NULL_TREE, NULL_TREE);
8152 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8153 gimplify_and_add (tem, pre_p);
8154 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8155 size_int (1)));
8156 gimple_seq_add_stmt (pre_p, g);
8157 }
8158 }
8159 if (last_bind)
8160 gimplify_and_add (last_bind, pre_p);
8161 tree cond = boolean_false_node;
8162 if (is_old)
8163 {
8164 if (!unused[0])
8165 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8166 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8167 size_int (2)));
8168 if (!unused[2])
8169 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8170 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8171 cnts[2],
8172 size_binop_loc (first_loc, PLUS_EXPR,
8173 totalpx,
8174 size_int (1))));
8175 }
8176 else
8177 {
8178 tree prev = size_int (5);
8179 for (i = 0; i < 4; i++)
8180 {
8181 if (unused[i])
8182 continue;
8183 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8184 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8185 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8186 cnts[i], unshare_expr (prev)));
8187 }
8188 }
8189 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8190 build_call_expr_loc (first_loc,
8191 builtin_decl_explicit (BUILT_IN_TRAP),
8192 0), void_node);
8193 gimplify_and_add (tem, pre_p);
8194 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8195 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8196 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8197 OMP_CLAUSE_CHAIN (c) = *list_p;
8198 *list_p = c;
8199 return 1;
8200 }
8201
8202 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8203 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8204 the struct node to insert the new mapping after (when the struct node is
8205 initially created). PREV_NODE is the first of two or three mappings for a
8206 pointer, and is either:
8207 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8208 array section.
8209 - not the node before C. This is true when we have a reference-to-pointer
8210 type (with a mapping for the reference and for the pointer), or for
8211 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8212 If SCP is non-null, the new node is inserted before *SCP.
8213 if SCP is null, the new node is inserted before PREV_NODE.
8214 The return type is:
8215 - PREV_NODE, if SCP is non-null.
8216 - The newly-created ALLOC or RELEASE node, if SCP is null.
8217 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8218 reference to a pointer. */
8219
8220 static tree
8221 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8222 tree prev_node, tree *scp)
8223 {
8224 enum gomp_map_kind mkind
8225 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8226 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8227
8228 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8229 tree cl = scp ? prev_node : c2;
8230 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8231 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8232 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8233 if (OMP_CLAUSE_CHAIN (prev_node) != c
8234 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8235 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8236 == GOMP_MAP_TO_PSET))
8237 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8238 else
8239 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8240 if (struct_node)
8241 OMP_CLAUSE_CHAIN (struct_node) = c2;
8242
8243 /* We might need to create an additional mapping if we have a reference to a
8244 pointer (in C++). Don't do this if we have something other than a
8245 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8246 if (OMP_CLAUSE_CHAIN (prev_node) != c
8247 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8248 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8249 == GOMP_MAP_ALWAYS_POINTER)
8250 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8251 == GOMP_MAP_ATTACH_DETACH)))
8252 {
8253 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8254 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8255 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8256 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8257 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8258 OMP_CLAUSE_CHAIN (c3) = prev_node;
8259 if (!scp)
8260 OMP_CLAUSE_CHAIN (c2) = c3;
8261 else
8262 cl = c3;
8263 }
8264
8265 if (scp)
8266 *scp = c2;
8267
8268 return cl;
8269 }
8270
8271 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8272 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8273 If BASE_REF is non-NULL and the containing object is a reference, set
8274 *BASE_REF to that reference before dereferencing the object.
8275 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8276 has array type, else return NULL. */
8277
8278 static tree
8279 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8280 poly_offset_int *poffsetp)
8281 {
8282 tree offset;
8283 poly_int64 bitsize, bitpos;
8284 machine_mode mode;
8285 int unsignedp, reversep, volatilep = 0;
8286 poly_offset_int poffset;
8287
8288 if (base_ref)
8289 {
8290 *base_ref = NULL_TREE;
8291
8292 while (TREE_CODE (base) == ARRAY_REF)
8293 base = TREE_OPERAND (base, 0);
8294
8295 if (TREE_CODE (base) == INDIRECT_REF)
8296 base = TREE_OPERAND (base, 0);
8297 }
8298 else
8299 {
8300 if (TREE_CODE (base) == ARRAY_REF)
8301 {
8302 while (TREE_CODE (base) == ARRAY_REF)
8303 base = TREE_OPERAND (base, 0);
8304 if (TREE_CODE (base) != COMPONENT_REF
8305 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8306 return NULL_TREE;
8307 }
8308 else if (TREE_CODE (base) == INDIRECT_REF
8309 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8310 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8311 == REFERENCE_TYPE))
8312 base = TREE_OPERAND (base, 0);
8313 }
8314
8315 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8316 &unsignedp, &reversep, &volatilep);
8317
8318 tree orig_base = base;
8319
8320 if ((TREE_CODE (base) == INDIRECT_REF
8321 || (TREE_CODE (base) == MEM_REF
8322 && integer_zerop (TREE_OPERAND (base, 1))))
8323 && DECL_P (TREE_OPERAND (base, 0))
8324 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8325 base = TREE_OPERAND (base, 0);
8326
8327 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8328
8329 if (offset)
8330 poffset = wi::to_poly_offset (offset);
8331 else
8332 poffset = 0;
8333
8334 if (maybe_ne (bitpos, 0))
8335 poffset += bits_to_bytes_round_down (bitpos);
8336
8337 *bitposp = bitpos;
8338 *poffsetp = poffset;
8339
8340 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8341 if (base_ref && orig_base != base)
8342 *base_ref = orig_base;
8343
8344 return base;
8345 }
8346
8347 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8348 and previous omp contexts. */
8349
8350 static void
8351 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8352 enum omp_region_type region_type,
8353 enum tree_code code)
8354 {
8355 struct gimplify_omp_ctx *ctx, *outer_ctx;
8356 tree c;
8357 hash_map<tree, tree> *struct_map_to_clause = NULL;
8358 hash_set<tree> *struct_deref_set = NULL;
8359 tree *prev_list_p = NULL, *orig_list_p = list_p;
8360 int handled_depend_iterators = -1;
8361 int nowait = -1;
8362
8363 ctx = new_omp_context (region_type);
8364 ctx->code = code;
8365 outer_ctx = ctx->outer_context;
8366 if (code == OMP_TARGET)
8367 {
8368 if (!lang_GNU_Fortran ())
8369 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8370 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8371 }
8372 if (!lang_GNU_Fortran ())
8373 switch (code)
8374 {
8375 case OMP_TARGET:
8376 case OMP_TARGET_DATA:
8377 case OMP_TARGET_ENTER_DATA:
8378 case OMP_TARGET_EXIT_DATA:
8379 case OACC_DECLARE:
8380 case OACC_HOST_DATA:
8381 case OACC_PARALLEL:
8382 case OACC_KERNELS:
8383 ctx->target_firstprivatize_array_bases = true;
8384 default:
8385 break;
8386 }
8387
8388 while ((c = *list_p) != NULL)
8389 {
8390 bool remove = false;
8391 bool notice_outer = true;
8392 const char *check_non_private = NULL;
8393 unsigned int flags;
8394 tree decl;
8395
8396 switch (OMP_CLAUSE_CODE (c))
8397 {
8398 case OMP_CLAUSE_PRIVATE:
8399 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8400 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8401 {
8402 flags |= GOVD_PRIVATE_OUTER_REF;
8403 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8404 }
8405 else
8406 notice_outer = false;
8407 goto do_add;
8408 case OMP_CLAUSE_SHARED:
8409 flags = GOVD_SHARED | GOVD_EXPLICIT;
8410 goto do_add;
8411 case OMP_CLAUSE_FIRSTPRIVATE:
8412 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8413 check_non_private = "firstprivate";
8414 goto do_add;
8415 case OMP_CLAUSE_LASTPRIVATE:
8416 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8417 switch (code)
8418 {
8419 case OMP_DISTRIBUTE:
8420 error_at (OMP_CLAUSE_LOCATION (c),
8421 "conditional %<lastprivate%> clause on "
8422 "%qs construct", "distribute");
8423 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8424 break;
8425 case OMP_TASKLOOP:
8426 error_at (OMP_CLAUSE_LOCATION (c),
8427 "conditional %<lastprivate%> clause on "
8428 "%qs construct", "taskloop");
8429 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8430 break;
8431 default:
8432 break;
8433 }
8434 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8435 if (code != OMP_LOOP)
8436 check_non_private = "lastprivate";
8437 decl = OMP_CLAUSE_DECL (c);
8438 if (error_operand_p (decl))
8439 goto do_add;
8440 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8441 && !lang_hooks.decls.omp_scalar_p (decl))
8442 {
8443 error_at (OMP_CLAUSE_LOCATION (c),
8444 "non-scalar variable %qD in conditional "
8445 "%<lastprivate%> clause", decl);
8446 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8447 }
8448 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8449 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8450 if (outer_ctx
8451 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8452 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8453 == ORT_COMBINED_TEAMS))
8454 && splay_tree_lookup (outer_ctx->variables,
8455 (splay_tree_key) decl) == NULL)
8456 {
8457 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8458 if (outer_ctx->outer_context)
8459 omp_notice_variable (outer_ctx->outer_context, decl, true);
8460 }
8461 else if (outer_ctx
8462 && (outer_ctx->region_type & ORT_TASK) != 0
8463 && outer_ctx->combined_loop
8464 && splay_tree_lookup (outer_ctx->variables,
8465 (splay_tree_key) decl) == NULL)
8466 {
8467 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8468 if (outer_ctx->outer_context)
8469 omp_notice_variable (outer_ctx->outer_context, decl, true);
8470 }
8471 else if (outer_ctx
8472 && (outer_ctx->region_type == ORT_WORKSHARE
8473 || outer_ctx->region_type == ORT_ACC)
8474 && outer_ctx->combined_loop
8475 && splay_tree_lookup (outer_ctx->variables,
8476 (splay_tree_key) decl) == NULL
8477 && !omp_check_private (outer_ctx, decl, false))
8478 {
8479 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8480 if (outer_ctx->outer_context
8481 && (outer_ctx->outer_context->region_type
8482 == ORT_COMBINED_PARALLEL)
8483 && splay_tree_lookup (outer_ctx->outer_context->variables,
8484 (splay_tree_key) decl) == NULL)
8485 {
8486 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8487 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8488 if (octx->outer_context)
8489 {
8490 octx = octx->outer_context;
8491 if (octx->region_type == ORT_WORKSHARE
8492 && octx->combined_loop
8493 && splay_tree_lookup (octx->variables,
8494 (splay_tree_key) decl) == NULL
8495 && !omp_check_private (octx, decl, false))
8496 {
8497 omp_add_variable (octx, decl,
8498 GOVD_LASTPRIVATE | GOVD_SEEN);
8499 octx = octx->outer_context;
8500 if (octx
8501 && ((octx->region_type & ORT_COMBINED_TEAMS)
8502 == ORT_COMBINED_TEAMS)
8503 && (splay_tree_lookup (octx->variables,
8504 (splay_tree_key) decl)
8505 == NULL))
8506 {
8507 omp_add_variable (octx, decl,
8508 GOVD_SHARED | GOVD_SEEN);
8509 octx = octx->outer_context;
8510 }
8511 }
8512 if (octx)
8513 omp_notice_variable (octx, decl, true);
8514 }
8515 }
8516 else if (outer_ctx->outer_context)
8517 omp_notice_variable (outer_ctx->outer_context, decl, true);
8518 }
8519 goto do_add;
8520 case OMP_CLAUSE_REDUCTION:
8521 if (OMP_CLAUSE_REDUCTION_TASK (c))
8522 {
8523 if (region_type == ORT_WORKSHARE)
8524 {
8525 if (nowait == -1)
8526 nowait = omp_find_clause (*list_p,
8527 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8528 if (nowait
8529 && (outer_ctx == NULL
8530 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8531 {
8532 error_at (OMP_CLAUSE_LOCATION (c),
8533 "%<task%> reduction modifier on a construct "
8534 "with a %<nowait%> clause");
8535 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8536 }
8537 }
8538 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8539 {
8540 error_at (OMP_CLAUSE_LOCATION (c),
8541 "invalid %<task%> reduction modifier on construct "
8542 "other than %<parallel%>, %<for%> or %<sections%>");
8543 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8544 }
8545 }
8546 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8547 switch (code)
8548 {
8549 case OMP_SECTIONS:
8550 error_at (OMP_CLAUSE_LOCATION (c),
8551 "%<inscan%> %<reduction%> clause on "
8552 "%qs construct", "sections");
8553 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8554 break;
8555 case OMP_PARALLEL:
8556 error_at (OMP_CLAUSE_LOCATION (c),
8557 "%<inscan%> %<reduction%> clause on "
8558 "%qs construct", "parallel");
8559 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8560 break;
8561 case OMP_TEAMS:
8562 error_at (OMP_CLAUSE_LOCATION (c),
8563 "%<inscan%> %<reduction%> clause on "
8564 "%qs construct", "teams");
8565 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8566 break;
8567 case OMP_TASKLOOP:
8568 error_at (OMP_CLAUSE_LOCATION (c),
8569 "%<inscan%> %<reduction%> clause on "
8570 "%qs construct", "taskloop");
8571 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8572 break;
8573 default:
8574 break;
8575 }
8576 /* FALLTHRU */
8577 case OMP_CLAUSE_IN_REDUCTION:
8578 case OMP_CLAUSE_TASK_REDUCTION:
8579 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8580 /* OpenACC permits reductions on private variables. */
8581 if (!(region_type & ORT_ACC)
8582 /* taskgroup is actually not a worksharing region. */
8583 && code != OMP_TASKGROUP)
8584 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8585 decl = OMP_CLAUSE_DECL (c);
8586 if (TREE_CODE (decl) == MEM_REF)
8587 {
8588 tree type = TREE_TYPE (decl);
8589 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8590 NULL, is_gimple_val, fb_rvalue, false)
8591 == GS_ERROR)
8592 {
8593 remove = true;
8594 break;
8595 }
8596 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8597 if (DECL_P (v))
8598 {
8599 omp_firstprivatize_variable (ctx, v);
8600 omp_notice_variable (ctx, v, true);
8601 }
8602 decl = TREE_OPERAND (decl, 0);
8603 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8604 {
8605 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8606 NULL, is_gimple_val, fb_rvalue, false)
8607 == GS_ERROR)
8608 {
8609 remove = true;
8610 break;
8611 }
8612 v = TREE_OPERAND (decl, 1);
8613 if (DECL_P (v))
8614 {
8615 omp_firstprivatize_variable (ctx, v);
8616 omp_notice_variable (ctx, v, true);
8617 }
8618 decl = TREE_OPERAND (decl, 0);
8619 }
8620 if (TREE_CODE (decl) == ADDR_EXPR
8621 || TREE_CODE (decl) == INDIRECT_REF)
8622 decl = TREE_OPERAND (decl, 0);
8623 }
8624 goto do_add_decl;
8625 case OMP_CLAUSE_LINEAR:
8626 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8627 is_gimple_val, fb_rvalue) == GS_ERROR)
8628 {
8629 remove = true;
8630 break;
8631 }
8632 else
8633 {
8634 if (code == OMP_SIMD
8635 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8636 {
8637 struct gimplify_omp_ctx *octx = outer_ctx;
8638 if (octx
8639 && octx->region_type == ORT_WORKSHARE
8640 && octx->combined_loop
8641 && !octx->distribute)
8642 {
8643 if (octx->outer_context
8644 && (octx->outer_context->region_type
8645 == ORT_COMBINED_PARALLEL))
8646 octx = octx->outer_context->outer_context;
8647 else
8648 octx = octx->outer_context;
8649 }
8650 if (octx
8651 && octx->region_type == ORT_WORKSHARE
8652 && octx->combined_loop
8653 && octx->distribute)
8654 {
8655 error_at (OMP_CLAUSE_LOCATION (c),
8656 "%<linear%> clause for variable other than "
8657 "loop iterator specified on construct "
8658 "combined with %<distribute%>");
8659 remove = true;
8660 break;
8661 }
8662 }
8663 /* For combined #pragma omp parallel for simd, need to put
8664 lastprivate and perhaps firstprivate too on the
8665 parallel. Similarly for #pragma omp for simd. */
8666 struct gimplify_omp_ctx *octx = outer_ctx;
8667 decl = NULL_TREE;
8668 do
8669 {
8670 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8671 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8672 break;
8673 decl = OMP_CLAUSE_DECL (c);
8674 if (error_operand_p (decl))
8675 {
8676 decl = NULL_TREE;
8677 break;
8678 }
8679 flags = GOVD_SEEN;
8680 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8681 flags |= GOVD_FIRSTPRIVATE;
8682 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8683 flags |= GOVD_LASTPRIVATE;
8684 if (octx
8685 && octx->region_type == ORT_WORKSHARE
8686 && octx->combined_loop)
8687 {
8688 if (octx->outer_context
8689 && (octx->outer_context->region_type
8690 == ORT_COMBINED_PARALLEL))
8691 octx = octx->outer_context;
8692 else if (omp_check_private (octx, decl, false))
8693 break;
8694 }
8695 else if (octx
8696 && (octx->region_type & ORT_TASK) != 0
8697 && octx->combined_loop)
8698 ;
8699 else if (octx
8700 && octx->region_type == ORT_COMBINED_PARALLEL
8701 && ctx->region_type == ORT_WORKSHARE
8702 && octx == outer_ctx)
8703 flags = GOVD_SEEN | GOVD_SHARED;
8704 else if (octx
8705 && ((octx->region_type & ORT_COMBINED_TEAMS)
8706 == ORT_COMBINED_TEAMS))
8707 flags = GOVD_SEEN | GOVD_SHARED;
8708 else if (octx
8709 && octx->region_type == ORT_COMBINED_TARGET)
8710 {
8711 flags &= ~GOVD_LASTPRIVATE;
8712 if (flags == GOVD_SEEN)
8713 break;
8714 }
8715 else
8716 break;
8717 splay_tree_node on
8718 = splay_tree_lookup (octx->variables,
8719 (splay_tree_key) decl);
8720 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8721 {
8722 octx = NULL;
8723 break;
8724 }
8725 omp_add_variable (octx, decl, flags);
8726 if (octx->outer_context == NULL)
8727 break;
8728 octx = octx->outer_context;
8729 }
8730 while (1);
8731 if (octx
8732 && decl
8733 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8734 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8735 omp_notice_variable (octx, decl, true);
8736 }
8737 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8738 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8739 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8740 {
8741 notice_outer = false;
8742 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8743 }
8744 goto do_add;
8745
8746 case OMP_CLAUSE_MAP:
8747 decl = OMP_CLAUSE_DECL (c);
8748 if (error_operand_p (decl))
8749 remove = true;
8750 switch (code)
8751 {
8752 case OMP_TARGET:
8753 break;
8754 case OACC_DATA:
8755 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8756 break;
8757 /* FALLTHRU */
8758 case OMP_TARGET_DATA:
8759 case OMP_TARGET_ENTER_DATA:
8760 case OMP_TARGET_EXIT_DATA:
8761 case OACC_HOST_DATA:
8762 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8763 || (OMP_CLAUSE_MAP_KIND (c)
8764 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8765 /* For target {,enter ,exit }data only the array slice is
8766 mapped, but not the pointer to it. */
8767 remove = true;
8768 break;
8769 case OACC_ENTER_DATA:
8770 case OACC_EXIT_DATA:
8771 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8772 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET
8773 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8774 || (OMP_CLAUSE_MAP_KIND (c)
8775 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8776 remove = true;
8777 break;
8778 default:
8779 break;
8780 }
8781 /* For Fortran, not only the pointer to the data is mapped but also
8782 the address of the pointer, the array descriptor etc.; for
8783 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8784 does not make sense. Likewise, for 'update' only transferring the
8785 data itself is needed as the rest has been handled in previous
8786 directives. */
8787 if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8788 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8789 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8790 remove = true;
8791
8792 if (remove)
8793 break;
8794 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8795 {
8796 struct gimplify_omp_ctx *octx;
8797 for (octx = outer_ctx; octx; octx = octx->outer_context)
8798 {
8799 if (octx->region_type != ORT_ACC_HOST_DATA)
8800 break;
8801 splay_tree_node n2
8802 = splay_tree_lookup (octx->variables,
8803 (splay_tree_key) decl);
8804 if (n2)
8805 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8806 "declared in enclosing %<host_data%> region",
8807 DECL_NAME (decl));
8808 }
8809 }
8810 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8811 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8812 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8813 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8814 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8815 {
8816 remove = true;
8817 break;
8818 }
8819 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8820 || (OMP_CLAUSE_MAP_KIND (c)
8821 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8822 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8823 {
8824 OMP_CLAUSE_SIZE (c)
8825 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8826 false);
8827 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8828 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8829 }
8830 if (!DECL_P (decl))
8831 {
8832 tree d = decl, *pd;
8833 if (TREE_CODE (d) == ARRAY_REF)
8834 {
8835 while (TREE_CODE (d) == ARRAY_REF)
8836 d = TREE_OPERAND (d, 0);
8837 if (TREE_CODE (d) == COMPONENT_REF
8838 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8839 decl = d;
8840 }
8841 pd = &OMP_CLAUSE_DECL (c);
8842 if (d == decl
8843 && TREE_CODE (decl) == INDIRECT_REF
8844 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8845 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8846 == REFERENCE_TYPE))
8847 {
8848 pd = &TREE_OPERAND (decl, 0);
8849 decl = TREE_OPERAND (decl, 0);
8850 }
8851 bool indir_p = false;
8852 tree orig_decl = decl;
8853 tree decl_ref = NULL_TREE;
8854 if ((region_type & ORT_ACC) != 0
8855 && TREE_CODE (*pd) == COMPONENT_REF
8856 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
8857 && code != OACC_UPDATE)
8858 {
8859 while (TREE_CODE (decl) == COMPONENT_REF)
8860 {
8861 decl = TREE_OPERAND (decl, 0);
8862 if ((TREE_CODE (decl) == MEM_REF
8863 && integer_zerop (TREE_OPERAND (decl, 1)))
8864 || INDIRECT_REF_P (decl))
8865 {
8866 indir_p = true;
8867 decl = TREE_OPERAND (decl, 0);
8868 }
8869 if (TREE_CODE (decl) == INDIRECT_REF
8870 && DECL_P (TREE_OPERAND (decl, 0))
8871 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8872 == REFERENCE_TYPE))
8873 {
8874 decl_ref = decl;
8875 decl = TREE_OPERAND (decl, 0);
8876 }
8877 }
8878 }
8879 else if (TREE_CODE (decl) == COMPONENT_REF)
8880 {
8881 while (TREE_CODE (decl) == COMPONENT_REF)
8882 decl = TREE_OPERAND (decl, 0);
8883 if (TREE_CODE (decl) == INDIRECT_REF
8884 && DECL_P (TREE_OPERAND (decl, 0))
8885 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8886 == REFERENCE_TYPE))
8887 decl = TREE_OPERAND (decl, 0);
8888 }
8889 if (decl != orig_decl && DECL_P (decl) && indir_p)
8890 {
8891 gomp_map_kind k = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
8892 : GOMP_MAP_ATTACH;
8893 /* We have a dereference of a struct member. Make this an
8894 attach/detach operation, and ensure the base pointer is
8895 mapped as a FIRSTPRIVATE_POINTER. */
8896 OMP_CLAUSE_SET_MAP_KIND (c, k);
8897 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
8898 tree next_clause = OMP_CLAUSE_CHAIN (c);
8899 if (k == GOMP_MAP_ATTACH
8900 && code != OACC_ENTER_DATA
8901 && (!next_clause
8902 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
8903 || (OMP_CLAUSE_MAP_KIND (next_clause)
8904 != GOMP_MAP_POINTER)
8905 || OMP_CLAUSE_DECL (next_clause) != decl)
8906 && (!struct_deref_set
8907 || !struct_deref_set->contains (decl)))
8908 {
8909 if (!struct_deref_set)
8910 struct_deref_set = new hash_set<tree> ();
8911 /* As well as the attach, we also need a
8912 FIRSTPRIVATE_POINTER clause to properly map the
8913 pointer to the struct base. */
8914 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8915 OMP_CLAUSE_MAP);
8916 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
8917 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
8918 = 1;
8919 tree charptr_zero
8920 = build_int_cst (build_pointer_type (char_type_node),
8921 0);
8922 OMP_CLAUSE_DECL (c2)
8923 = build2 (MEM_REF, char_type_node,
8924 decl_ref ? decl_ref : decl, charptr_zero);
8925 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8926 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8927 OMP_CLAUSE_MAP);
8928 OMP_CLAUSE_SET_MAP_KIND (c3,
8929 GOMP_MAP_FIRSTPRIVATE_POINTER);
8930 OMP_CLAUSE_DECL (c3) = decl;
8931 OMP_CLAUSE_SIZE (c3) = size_zero_node;
8932 tree mapgrp = *prev_list_p;
8933 *prev_list_p = c2;
8934 OMP_CLAUSE_CHAIN (c3) = mapgrp;
8935 OMP_CLAUSE_CHAIN (c2) = c3;
8936
8937 struct_deref_set->add (decl);
8938 }
8939 goto do_add_decl;
8940 }
8941 /* An "attach/detach" operation on an update directive should
8942 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
8943 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8944 depends on the previous mapping. */
8945 if (code == OACC_UPDATE
8946 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8947 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
8948 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8949 == GS_ERROR)
8950 {
8951 remove = true;
8952 break;
8953 }
8954 if (DECL_P (decl)
8955 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
8956 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
8957 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
8958 && code != OACC_UPDATE)
8959 {
8960 if (error_operand_p (decl))
8961 {
8962 remove = true;
8963 break;
8964 }
8965
8966 tree stype = TREE_TYPE (decl);
8967 if (TREE_CODE (stype) == REFERENCE_TYPE)
8968 stype = TREE_TYPE (stype);
8969 if (TYPE_SIZE_UNIT (stype) == NULL
8970 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8971 {
8972 error_at (OMP_CLAUSE_LOCATION (c),
8973 "mapping field %qE of variable length "
8974 "structure", OMP_CLAUSE_DECL (c));
8975 remove = true;
8976 break;
8977 }
8978
8979 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
8980 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8981 {
8982 /* Error recovery. */
8983 if (prev_list_p == NULL)
8984 {
8985 remove = true;
8986 break;
8987 }
8988 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8989 {
8990 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8991 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8992 {
8993 remove = true;
8994 break;
8995 }
8996 }
8997 }
8998
8999 poly_offset_int offset1;
9000 poly_int64 bitpos1;
9001 tree base_ref;
9002
9003 tree base
9004 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9005 &bitpos1, &offset1);
9006
9007 gcc_assert (base == decl);
9008
9009 splay_tree_node n
9010 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9011 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9012 == GOMP_MAP_ALWAYS_POINTER);
9013 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9014 == GOMP_MAP_ATTACH_DETACH);
9015 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9016 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9017 bool has_attachments = false;
9018 /* For OpenACC, pointers in structs should trigger an
9019 attach action. */
9020 if (attach_detach && (region_type & ORT_ACC) != 0)
9021 {
9022 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9023 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9024 have detected a case that needs a GOMP_MAP_STRUCT
9025 mapping added. */
9026 gomp_map_kind k
9027 = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
9028 : GOMP_MAP_ATTACH;
9029 OMP_CLAUSE_SET_MAP_KIND (c, k);
9030 has_attachments = true;
9031 }
9032 if (n == NULL || (n->value & GOVD_MAP) == 0)
9033 {
9034 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9035 OMP_CLAUSE_MAP);
9036 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9037 : GOMP_MAP_STRUCT;
9038
9039 OMP_CLAUSE_SET_MAP_KIND (l, k);
9040 if (base_ref)
9041 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9042 else
9043 OMP_CLAUSE_DECL (l) = decl;
9044 OMP_CLAUSE_SIZE (l)
9045 = (!attach
9046 ? size_int (1)
9047 : DECL_P (OMP_CLAUSE_DECL (l))
9048 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9049 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9050 if (struct_map_to_clause == NULL)
9051 struct_map_to_clause = new hash_map<tree, tree>;
9052 struct_map_to_clause->put (decl, l);
9053 if (ptr || attach_detach)
9054 {
9055 insert_struct_comp_map (code, c, l, *prev_list_p,
9056 NULL);
9057 *prev_list_p = l;
9058 prev_list_p = NULL;
9059 }
9060 else
9061 {
9062 OMP_CLAUSE_CHAIN (l) = c;
9063 *list_p = l;
9064 list_p = &OMP_CLAUSE_CHAIN (l);
9065 }
9066 if (base_ref && code == OMP_TARGET)
9067 {
9068 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9069 OMP_CLAUSE_MAP);
9070 enum gomp_map_kind mkind
9071 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9072 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9073 OMP_CLAUSE_DECL (c2) = decl;
9074 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9075 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9076 OMP_CLAUSE_CHAIN (l) = c2;
9077 }
9078 flags = GOVD_MAP | GOVD_EXPLICIT;
9079 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9080 || ptr
9081 || attach_detach)
9082 flags |= GOVD_SEEN;
9083 if (has_attachments)
9084 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9085 goto do_add_decl;
9086 }
9087 else if (struct_map_to_clause)
9088 {
9089 tree *osc = struct_map_to_clause->get (decl);
9090 tree *sc = NULL, *scp = NULL;
9091 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9092 || ptr
9093 || attach_detach)
9094 n->value |= GOVD_SEEN;
9095 sc = &OMP_CLAUSE_CHAIN (*osc);
9096 if (*sc != c
9097 && (OMP_CLAUSE_MAP_KIND (*sc)
9098 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9099 sc = &OMP_CLAUSE_CHAIN (*sc);
9100 /* Here "prev_list_p" is the end of the inserted
9101 alloc/release nodes after the struct node, OSC. */
9102 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9103 if ((ptr || attach_detach) && sc == prev_list_p)
9104 break;
9105 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9106 != COMPONENT_REF
9107 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9108 != INDIRECT_REF)
9109 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9110 != ARRAY_REF))
9111 break;
9112 else
9113 {
9114 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9115 poly_offset_int offsetn;
9116 poly_int64 bitposn;
9117 tree base
9118 = extract_base_bit_offset (sc_decl, NULL,
9119 &bitposn, &offsetn);
9120 if (base != decl)
9121 break;
9122 if (scp)
9123 continue;
9124 tree d1 = OMP_CLAUSE_DECL (*sc);
9125 tree d2 = OMP_CLAUSE_DECL (c);
9126 while (TREE_CODE (d1) == ARRAY_REF)
9127 d1 = TREE_OPERAND (d1, 0);
9128 while (TREE_CODE (d2) == ARRAY_REF)
9129 d2 = TREE_OPERAND (d2, 0);
9130 if (TREE_CODE (d1) == INDIRECT_REF)
9131 d1 = TREE_OPERAND (d1, 0);
9132 if (TREE_CODE (d2) == INDIRECT_REF)
9133 d2 = TREE_OPERAND (d2, 0);
9134 while (TREE_CODE (d1) == COMPONENT_REF)
9135 if (TREE_CODE (d2) == COMPONENT_REF
9136 && TREE_OPERAND (d1, 1)
9137 == TREE_OPERAND (d2, 1))
9138 {
9139 d1 = TREE_OPERAND (d1, 0);
9140 d2 = TREE_OPERAND (d2, 0);
9141 }
9142 else
9143 break;
9144 if (d1 == d2)
9145 {
9146 error_at (OMP_CLAUSE_LOCATION (c),
9147 "%qE appears more than once in map "
9148 "clauses", OMP_CLAUSE_DECL (c));
9149 remove = true;
9150 break;
9151 }
9152 if (maybe_lt (offset1, offsetn)
9153 || (known_eq (offset1, offsetn)
9154 && maybe_lt (bitpos1, bitposn)))
9155 {
9156 if (ptr || attach_detach)
9157 scp = sc;
9158 else
9159 break;
9160 }
9161 }
9162 if (remove)
9163 break;
9164 if (!attach)
9165 OMP_CLAUSE_SIZE (*osc)
9166 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9167 size_one_node);
9168 if (ptr || attach_detach)
9169 {
9170 tree cl = insert_struct_comp_map (code, c, NULL,
9171 *prev_list_p, scp);
9172 if (sc == prev_list_p)
9173 {
9174 *sc = cl;
9175 prev_list_p = NULL;
9176 }
9177 else
9178 {
9179 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9180 list_p = prev_list_p;
9181 prev_list_p = NULL;
9182 OMP_CLAUSE_CHAIN (c) = *sc;
9183 *sc = cl;
9184 continue;
9185 }
9186 }
9187 else if (*sc != c)
9188 {
9189 *list_p = OMP_CLAUSE_CHAIN (c);
9190 OMP_CLAUSE_CHAIN (c) = *sc;
9191 *sc = c;
9192 continue;
9193 }
9194 }
9195 }
9196 if (!remove
9197 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9198 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9199 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9200 && OMP_CLAUSE_CHAIN (c)
9201 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9202 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9203 == GOMP_MAP_ALWAYS_POINTER)
9204 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9205 == GOMP_MAP_ATTACH_DETACH)
9206 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9207 == GOMP_MAP_TO_PSET)))
9208 prev_list_p = list_p;
9209
9210 break;
9211 }
9212 flags = GOVD_MAP | GOVD_EXPLICIT;
9213 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9214 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9215 flags |= GOVD_MAP_ALWAYS_TO;
9216 goto do_add;
9217
9218 case OMP_CLAUSE_DEPEND:
9219 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9220 {
9221 tree deps = OMP_CLAUSE_DECL (c);
9222 while (deps && TREE_CODE (deps) == TREE_LIST)
9223 {
9224 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9225 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9226 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9227 pre_p, NULL, is_gimple_val, fb_rvalue);
9228 deps = TREE_CHAIN (deps);
9229 }
9230 break;
9231 }
9232 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9233 break;
9234 if (handled_depend_iterators == -1)
9235 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9236 if (handled_depend_iterators)
9237 {
9238 if (handled_depend_iterators == 2)
9239 remove = true;
9240 break;
9241 }
9242 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9243 {
9244 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9245 NULL, is_gimple_val, fb_rvalue);
9246 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9247 }
9248 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9249 {
9250 remove = true;
9251 break;
9252 }
9253 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9254 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9255 is_gimple_val, fb_rvalue) == GS_ERROR)
9256 {
9257 remove = true;
9258 break;
9259 }
9260 break;
9261
9262 case OMP_CLAUSE_TO:
9263 case OMP_CLAUSE_FROM:
9264 case OMP_CLAUSE__CACHE_:
9265 decl = OMP_CLAUSE_DECL (c);
9266 if (error_operand_p (decl))
9267 {
9268 remove = true;
9269 break;
9270 }
9271 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9272 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9273 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9274 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9275 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9276 {
9277 remove = true;
9278 break;
9279 }
9280 if (!DECL_P (decl))
9281 {
9282 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9283 NULL, is_gimple_lvalue, fb_lvalue)
9284 == GS_ERROR)
9285 {
9286 remove = true;
9287 break;
9288 }
9289 break;
9290 }
9291 goto do_notice;
9292
9293 case OMP_CLAUSE_USE_DEVICE_PTR:
9294 case OMP_CLAUSE_USE_DEVICE_ADDR:
9295 flags = GOVD_EXPLICIT;
9296 goto do_add;
9297
9298 case OMP_CLAUSE_IS_DEVICE_PTR:
9299 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9300 goto do_add;
9301
9302 do_add:
9303 decl = OMP_CLAUSE_DECL (c);
9304 do_add_decl:
9305 if (error_operand_p (decl))
9306 {
9307 remove = true;
9308 break;
9309 }
9310 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9311 {
9312 tree t = omp_member_access_dummy_var (decl);
9313 if (t)
9314 {
9315 tree v = DECL_VALUE_EXPR (decl);
9316 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9317 if (outer_ctx)
9318 omp_notice_variable (outer_ctx, t, true);
9319 }
9320 }
9321 if (code == OACC_DATA
9322 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9323 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9324 flags |= GOVD_MAP_0LEN_ARRAY;
9325 omp_add_variable (ctx, decl, flags);
9326 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9327 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9328 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9329 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9330 {
9331 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9332 GOVD_LOCAL | GOVD_SEEN);
9333 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9334 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9335 find_decl_expr,
9336 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9337 NULL) == NULL_TREE)
9338 omp_add_variable (ctx,
9339 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9340 GOVD_LOCAL | GOVD_SEEN);
9341 gimplify_omp_ctxp = ctx;
9342 push_gimplify_context ();
9343
9344 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9345 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9346
9347 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9348 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9349 pop_gimplify_context
9350 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9351 push_gimplify_context ();
9352 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9353 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9354 pop_gimplify_context
9355 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9356 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9357 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9358
9359 gimplify_omp_ctxp = outer_ctx;
9360 }
9361 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9362 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9363 {
9364 gimplify_omp_ctxp = ctx;
9365 push_gimplify_context ();
9366 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9367 {
9368 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9369 NULL, NULL);
9370 TREE_SIDE_EFFECTS (bind) = 1;
9371 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9372 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9373 }
9374 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9375 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9376 pop_gimplify_context
9377 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9378 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9379
9380 gimplify_omp_ctxp = outer_ctx;
9381 }
9382 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9383 && OMP_CLAUSE_LINEAR_STMT (c))
9384 {
9385 gimplify_omp_ctxp = ctx;
9386 push_gimplify_context ();
9387 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9388 {
9389 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9390 NULL, NULL);
9391 TREE_SIDE_EFFECTS (bind) = 1;
9392 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9393 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9394 }
9395 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9396 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9397 pop_gimplify_context
9398 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9399 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9400
9401 gimplify_omp_ctxp = outer_ctx;
9402 }
9403 if (notice_outer)
9404 goto do_notice;
9405 break;
9406
9407 case OMP_CLAUSE_COPYIN:
9408 case OMP_CLAUSE_COPYPRIVATE:
9409 decl = OMP_CLAUSE_DECL (c);
9410 if (error_operand_p (decl))
9411 {
9412 remove = true;
9413 break;
9414 }
9415 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9416 && !remove
9417 && !omp_check_private (ctx, decl, true))
9418 {
9419 remove = true;
9420 if (is_global_var (decl))
9421 {
9422 if (DECL_THREAD_LOCAL_P (decl))
9423 remove = false;
9424 else if (DECL_HAS_VALUE_EXPR_P (decl))
9425 {
9426 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9427
9428 if (value
9429 && DECL_P (value)
9430 && DECL_THREAD_LOCAL_P (value))
9431 remove = false;
9432 }
9433 }
9434 if (remove)
9435 error_at (OMP_CLAUSE_LOCATION (c),
9436 "copyprivate variable %qE is not threadprivate"
9437 " or private in outer context", DECL_NAME (decl));
9438 }
9439 do_notice:
9440 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9441 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9442 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9443 && outer_ctx
9444 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9445 || (region_type == ORT_WORKSHARE
9446 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9447 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9448 || code == OMP_LOOP)))
9449 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9450 || (code == OMP_LOOP
9451 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9452 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9453 == ORT_COMBINED_TEAMS))))
9454 {
9455 splay_tree_node on
9456 = splay_tree_lookup (outer_ctx->variables,
9457 (splay_tree_key)decl);
9458 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9459 {
9460 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9461 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9462 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9463 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9464 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9465 == POINTER_TYPE))))
9466 omp_firstprivatize_variable (outer_ctx, decl);
9467 else
9468 {
9469 omp_add_variable (outer_ctx, decl,
9470 GOVD_SEEN | GOVD_SHARED);
9471 if (outer_ctx->outer_context)
9472 omp_notice_variable (outer_ctx->outer_context, decl,
9473 true);
9474 }
9475 }
9476 }
9477 if (outer_ctx)
9478 omp_notice_variable (outer_ctx, decl, true);
9479 if (check_non_private
9480 && region_type == ORT_WORKSHARE
9481 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9482 || decl == OMP_CLAUSE_DECL (c)
9483 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9484 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9485 == ADDR_EXPR
9486 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9487 == POINTER_PLUS_EXPR
9488 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9489 (OMP_CLAUSE_DECL (c), 0), 0))
9490 == ADDR_EXPR)))))
9491 && omp_check_private (ctx, decl, false))
9492 {
9493 error ("%s variable %qE is private in outer context",
9494 check_non_private, DECL_NAME (decl));
9495 remove = true;
9496 }
9497 break;
9498
9499 case OMP_CLAUSE_IF:
9500 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9501 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9502 {
9503 const char *p[2];
9504 for (int i = 0; i < 2; i++)
9505 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9506 {
9507 case VOID_CST: p[i] = "cancel"; break;
9508 case OMP_PARALLEL: p[i] = "parallel"; break;
9509 case OMP_SIMD: p[i] = "simd"; break;
9510 case OMP_TASK: p[i] = "task"; break;
9511 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9512 case OMP_TARGET_DATA: p[i] = "target data"; break;
9513 case OMP_TARGET: p[i] = "target"; break;
9514 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9515 case OMP_TARGET_ENTER_DATA:
9516 p[i] = "target enter data"; break;
9517 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9518 default: gcc_unreachable ();
9519 }
9520 error_at (OMP_CLAUSE_LOCATION (c),
9521 "expected %qs %<if%> clause modifier rather than %qs",
9522 p[0], p[1]);
9523 remove = true;
9524 }
9525 /* Fall through. */
9526
9527 case OMP_CLAUSE_FINAL:
9528 OMP_CLAUSE_OPERAND (c, 0)
9529 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9530 /* Fall through. */
9531
9532 case OMP_CLAUSE_SCHEDULE:
9533 case OMP_CLAUSE_NUM_THREADS:
9534 case OMP_CLAUSE_NUM_TEAMS:
9535 case OMP_CLAUSE_THREAD_LIMIT:
9536 case OMP_CLAUSE_DIST_SCHEDULE:
9537 case OMP_CLAUSE_DEVICE:
9538 case OMP_CLAUSE_PRIORITY:
9539 case OMP_CLAUSE_GRAINSIZE:
9540 case OMP_CLAUSE_NUM_TASKS:
9541 case OMP_CLAUSE_HINT:
9542 case OMP_CLAUSE_ASYNC:
9543 case OMP_CLAUSE_WAIT:
9544 case OMP_CLAUSE_NUM_GANGS:
9545 case OMP_CLAUSE_NUM_WORKERS:
9546 case OMP_CLAUSE_VECTOR_LENGTH:
9547 case OMP_CLAUSE_WORKER:
9548 case OMP_CLAUSE_VECTOR:
9549 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9550 is_gimple_val, fb_rvalue) == GS_ERROR)
9551 remove = true;
9552 break;
9553
9554 case OMP_CLAUSE_GANG:
9555 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9556 is_gimple_val, fb_rvalue) == GS_ERROR)
9557 remove = true;
9558 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9559 is_gimple_val, fb_rvalue) == GS_ERROR)
9560 remove = true;
9561 break;
9562
9563 case OMP_CLAUSE_NOWAIT:
9564 nowait = 1;
9565 break;
9566
9567 case OMP_CLAUSE_ORDERED:
9568 case OMP_CLAUSE_UNTIED:
9569 case OMP_CLAUSE_COLLAPSE:
9570 case OMP_CLAUSE_TILE:
9571 case OMP_CLAUSE_AUTO:
9572 case OMP_CLAUSE_SEQ:
9573 case OMP_CLAUSE_INDEPENDENT:
9574 case OMP_CLAUSE_MERGEABLE:
9575 case OMP_CLAUSE_PROC_BIND:
9576 case OMP_CLAUSE_SAFELEN:
9577 case OMP_CLAUSE_SIMDLEN:
9578 case OMP_CLAUSE_NOGROUP:
9579 case OMP_CLAUSE_THREADS:
9580 case OMP_CLAUSE_SIMD:
9581 case OMP_CLAUSE_BIND:
9582 case OMP_CLAUSE_IF_PRESENT:
9583 case OMP_CLAUSE_FINALIZE:
9584 break;
9585
9586 case OMP_CLAUSE_ORDER:
9587 ctx->order_concurrent = true;
9588 break;
9589
9590 case OMP_CLAUSE_DEFAULTMAP:
9591 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9592 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9593 {
9594 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9595 gdmkmin = GDMK_SCALAR;
9596 gdmkmax = GDMK_POINTER;
9597 break;
9598 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9599 gdmkmin = gdmkmax = GDMK_SCALAR;
9600 break;
9601 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9602 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9603 break;
9604 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9605 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9606 break;
9607 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9608 gdmkmin = gdmkmax = GDMK_POINTER;
9609 break;
9610 default:
9611 gcc_unreachable ();
9612 }
9613 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9614 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9615 {
9616 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9617 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9618 break;
9619 case OMP_CLAUSE_DEFAULTMAP_TO:
9620 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9621 break;
9622 case OMP_CLAUSE_DEFAULTMAP_FROM:
9623 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9624 break;
9625 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9626 ctx->defaultmap[gdmk] = GOVD_MAP;
9627 break;
9628 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9629 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9630 break;
9631 case OMP_CLAUSE_DEFAULTMAP_NONE:
9632 ctx->defaultmap[gdmk] = 0;
9633 break;
9634 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9635 switch (gdmk)
9636 {
9637 case GDMK_SCALAR:
9638 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9639 break;
9640 case GDMK_AGGREGATE:
9641 case GDMK_ALLOCATABLE:
9642 ctx->defaultmap[gdmk] = GOVD_MAP;
9643 break;
9644 case GDMK_POINTER:
9645 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9646 break;
9647 default:
9648 gcc_unreachable ();
9649 }
9650 break;
9651 default:
9652 gcc_unreachable ();
9653 }
9654 break;
9655
9656 case OMP_CLAUSE_ALIGNED:
9657 decl = OMP_CLAUSE_DECL (c);
9658 if (error_operand_p (decl))
9659 {
9660 remove = true;
9661 break;
9662 }
9663 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9664 is_gimple_val, fb_rvalue) == GS_ERROR)
9665 {
9666 remove = true;
9667 break;
9668 }
9669 if (!is_global_var (decl)
9670 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9671 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9672 break;
9673
9674 case OMP_CLAUSE_NONTEMPORAL:
9675 decl = OMP_CLAUSE_DECL (c);
9676 if (error_operand_p (decl))
9677 {
9678 remove = true;
9679 break;
9680 }
9681 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9682 break;
9683
9684 case OMP_CLAUSE_DEFAULT:
9685 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9686 break;
9687
9688 case OMP_CLAUSE_INCLUSIVE:
9689 case OMP_CLAUSE_EXCLUSIVE:
9690 decl = OMP_CLAUSE_DECL (c);
9691 {
9692 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9693 (splay_tree_key) decl);
9694 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9695 {
9696 error_at (OMP_CLAUSE_LOCATION (c),
9697 "%qD specified in %qs clause but not in %<inscan%> "
9698 "%<reduction%> clause on the containing construct",
9699 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9700 remove = true;
9701 }
9702 else
9703 {
9704 n->value |= GOVD_REDUCTION_INSCAN;
9705 if (outer_ctx->region_type == ORT_SIMD
9706 && outer_ctx->outer_context
9707 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9708 {
9709 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9710 (splay_tree_key) decl);
9711 if (n && (n->value & GOVD_REDUCTION) != 0)
9712 n->value |= GOVD_REDUCTION_INSCAN;
9713 }
9714 }
9715 }
9716 break;
9717
9718 default:
9719 gcc_unreachable ();
9720 }
9721
9722 if (code == OACC_DATA
9723 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9724 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9725 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9726 remove = true;
9727 if (remove)
9728 *list_p = OMP_CLAUSE_CHAIN (c);
9729 else
9730 list_p = &OMP_CLAUSE_CHAIN (c);
9731 }
9732
9733 ctx->clauses = *orig_list_p;
9734 gimplify_omp_ctxp = ctx;
9735 if (struct_map_to_clause)
9736 delete struct_map_to_clause;
9737 if (struct_deref_set)
9738 delete struct_deref_set;
9739 }
9740
9741 /* Return true if DECL is a candidate for shared to firstprivate
9742 optimization. We only consider non-addressable scalars, not
9743 too big, and not references. */
9744
9745 static bool
9746 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9747 {
9748 if (TREE_ADDRESSABLE (decl))
9749 return false;
9750 tree type = TREE_TYPE (decl);
9751 if (!is_gimple_reg_type (type)
9752 || TREE_CODE (type) == REFERENCE_TYPE
9753 || TREE_ADDRESSABLE (type))
9754 return false;
9755 /* Don't optimize too large decls, as each thread/task will have
9756 its own. */
9757 HOST_WIDE_INT len = int_size_in_bytes (type);
9758 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9759 return false;
9760 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9761 return false;
9762 return true;
9763 }
9764
9765 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9766 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9767 GOVD_WRITTEN in outer contexts. */
9768
9769 static void
9770 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9771 {
9772 for (; ctx; ctx = ctx->outer_context)
9773 {
9774 splay_tree_node n = splay_tree_lookup (ctx->variables,
9775 (splay_tree_key) decl);
9776 if (n == NULL)
9777 continue;
9778 else if (n->value & GOVD_SHARED)
9779 {
9780 n->value |= GOVD_WRITTEN;
9781 return;
9782 }
9783 else if (n->value & GOVD_DATA_SHARE_CLASS)
9784 return;
9785 }
9786 }
9787
9788 /* Helper callback for walk_gimple_seq to discover possible stores
9789 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9790 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9791 for those. */
9792
9793 static tree
9794 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9795 {
9796 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9797
9798 *walk_subtrees = 0;
9799 if (!wi->is_lhs)
9800 return NULL_TREE;
9801
9802 tree op = *tp;
9803 do
9804 {
9805 if (handled_component_p (op))
9806 op = TREE_OPERAND (op, 0);
9807 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9808 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9809 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9810 else
9811 break;
9812 }
9813 while (1);
9814 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9815 return NULL_TREE;
9816
9817 omp_mark_stores (gimplify_omp_ctxp, op);
9818 return NULL_TREE;
9819 }
9820
9821 /* Helper callback for walk_gimple_seq to discover possible stores
9822 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9823 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9824 for those. */
9825
9826 static tree
9827 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9828 bool *handled_ops_p,
9829 struct walk_stmt_info *wi)
9830 {
9831 gimple *stmt = gsi_stmt (*gsi_p);
9832 switch (gimple_code (stmt))
9833 {
9834 /* Don't recurse on OpenMP constructs for which
9835 gimplify_adjust_omp_clauses already handled the bodies,
9836 except handle gimple_omp_for_pre_body. */
9837 case GIMPLE_OMP_FOR:
9838 *handled_ops_p = true;
9839 if (gimple_omp_for_pre_body (stmt))
9840 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9841 omp_find_stores_stmt, omp_find_stores_op, wi);
9842 break;
9843 case GIMPLE_OMP_PARALLEL:
9844 case GIMPLE_OMP_TASK:
9845 case GIMPLE_OMP_SECTIONS:
9846 case GIMPLE_OMP_SINGLE:
9847 case GIMPLE_OMP_TARGET:
9848 case GIMPLE_OMP_TEAMS:
9849 case GIMPLE_OMP_CRITICAL:
9850 *handled_ops_p = true;
9851 break;
9852 default:
9853 break;
9854 }
9855 return NULL_TREE;
9856 }
9857
9858 struct gimplify_adjust_omp_clauses_data
9859 {
9860 tree *list_p;
9861 gimple_seq *pre_p;
9862 };
9863
9864 /* For all variables that were not actually used within the context,
9865 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9866
9867 static int
9868 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9869 {
9870 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9871 gimple_seq *pre_p
9872 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9873 tree decl = (tree) n->key;
9874 unsigned flags = n->value;
9875 enum omp_clause_code code;
9876 tree clause;
9877 bool private_debug;
9878
9879 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9880 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9881 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9882 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9883 return 0;
9884 if ((flags & GOVD_SEEN) == 0)
9885 return 0;
9886 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
9887 return 0;
9888 if (flags & GOVD_DEBUG_PRIVATE)
9889 {
9890 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9891 private_debug = true;
9892 }
9893 else if (flags & GOVD_MAP)
9894 private_debug = false;
9895 else
9896 private_debug
9897 = lang_hooks.decls.omp_private_debug_clause (decl,
9898 !!(flags & GOVD_SHARED));
9899 if (private_debug)
9900 code = OMP_CLAUSE_PRIVATE;
9901 else if (flags & GOVD_MAP)
9902 {
9903 code = OMP_CLAUSE_MAP;
9904 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9905 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9906 {
9907 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9908 return 0;
9909 }
9910 if (VAR_P (decl)
9911 && DECL_IN_CONSTANT_POOL (decl)
9912 && !lookup_attribute ("omp declare target",
9913 DECL_ATTRIBUTES (decl)))
9914 {
9915 tree id = get_identifier ("omp declare target");
9916 DECL_ATTRIBUTES (decl)
9917 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
9918 varpool_node *node = varpool_node::get (decl);
9919 if (node)
9920 {
9921 node->offloadable = 1;
9922 if (ENABLE_OFFLOADING)
9923 g->have_offload = true;
9924 }
9925 }
9926 }
9927 else if (flags & GOVD_SHARED)
9928 {
9929 if (is_global_var (decl))
9930 {
9931 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9932 while (ctx != NULL)
9933 {
9934 splay_tree_node on
9935 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9936 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9937 | GOVD_PRIVATE | GOVD_REDUCTION
9938 | GOVD_LINEAR | GOVD_MAP)) != 0)
9939 break;
9940 ctx = ctx->outer_context;
9941 }
9942 if (ctx == NULL)
9943 return 0;
9944 }
9945 code = OMP_CLAUSE_SHARED;
9946 }
9947 else if (flags & GOVD_PRIVATE)
9948 code = OMP_CLAUSE_PRIVATE;
9949 else if (flags & GOVD_FIRSTPRIVATE)
9950 {
9951 code = OMP_CLAUSE_FIRSTPRIVATE;
9952 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9953 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9954 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9955 {
9956 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9957 "%<target%> construct", decl);
9958 return 0;
9959 }
9960 }
9961 else if (flags & GOVD_LASTPRIVATE)
9962 code = OMP_CLAUSE_LASTPRIVATE;
9963 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9964 return 0;
9965 else if (flags & GOVD_CONDTEMP)
9966 {
9967 code = OMP_CLAUSE__CONDTEMP_;
9968 gimple_add_tmp_var (decl);
9969 }
9970 else
9971 gcc_unreachable ();
9972
9973 if (((flags & GOVD_LASTPRIVATE)
9974 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9975 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9976 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9977
9978 tree chain = *list_p;
9979 clause = build_omp_clause (input_location, code);
9980 OMP_CLAUSE_DECL (clause) = decl;
9981 OMP_CLAUSE_CHAIN (clause) = chain;
9982 if (private_debug)
9983 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9984 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9985 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9986 else if (code == OMP_CLAUSE_SHARED
9987 && (flags & GOVD_WRITTEN) == 0
9988 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9989 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9990 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9991 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9992 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9993 {
9994 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9995 OMP_CLAUSE_DECL (nc) = decl;
9996 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9997 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9998 OMP_CLAUSE_DECL (clause)
9999 = build_simple_mem_ref_loc (input_location, decl);
10000 OMP_CLAUSE_DECL (clause)
10001 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
10002 build_int_cst (build_pointer_type (char_type_node), 0));
10003 OMP_CLAUSE_SIZE (clause) = size_zero_node;
10004 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10005 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
10006 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
10007 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10008 OMP_CLAUSE_CHAIN (nc) = chain;
10009 OMP_CLAUSE_CHAIN (clause) = nc;
10010 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10011 gimplify_omp_ctxp = ctx->outer_context;
10012 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
10013 pre_p, NULL, is_gimple_val, fb_rvalue);
10014 gimplify_omp_ctxp = ctx;
10015 }
10016 else if (code == OMP_CLAUSE_MAP)
10017 {
10018 int kind;
10019 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10020 switch (flags & (GOVD_MAP_TO_ONLY
10021 | GOVD_MAP_FORCE
10022 | GOVD_MAP_FORCE_PRESENT
10023 | GOVD_MAP_ALLOC_ONLY
10024 | GOVD_MAP_FROM_ONLY))
10025 {
10026 case 0:
10027 kind = GOMP_MAP_TOFROM;
10028 break;
10029 case GOVD_MAP_FORCE:
10030 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10031 break;
10032 case GOVD_MAP_TO_ONLY:
10033 kind = GOMP_MAP_TO;
10034 break;
10035 case GOVD_MAP_FROM_ONLY:
10036 kind = GOMP_MAP_FROM;
10037 break;
10038 case GOVD_MAP_ALLOC_ONLY:
10039 kind = GOMP_MAP_ALLOC;
10040 break;
10041 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10042 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10043 break;
10044 case GOVD_MAP_FORCE_PRESENT:
10045 kind = GOMP_MAP_FORCE_PRESENT;
10046 break;
10047 default:
10048 gcc_unreachable ();
10049 }
10050 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10051 if (DECL_SIZE (decl)
10052 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10053 {
10054 tree decl2 = DECL_VALUE_EXPR (decl);
10055 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10056 decl2 = TREE_OPERAND (decl2, 0);
10057 gcc_assert (DECL_P (decl2));
10058 tree mem = build_simple_mem_ref (decl2);
10059 OMP_CLAUSE_DECL (clause) = mem;
10060 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10061 if (gimplify_omp_ctxp->outer_context)
10062 {
10063 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10064 omp_notice_variable (ctx, decl2, true);
10065 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10066 }
10067 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10068 OMP_CLAUSE_MAP);
10069 OMP_CLAUSE_DECL (nc) = decl;
10070 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10071 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10072 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10073 else
10074 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10075 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10076 OMP_CLAUSE_CHAIN (clause) = nc;
10077 }
10078 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10079 && lang_hooks.decls.omp_privatize_by_reference (decl))
10080 {
10081 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10082 OMP_CLAUSE_SIZE (clause)
10083 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10084 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10085 gimplify_omp_ctxp = ctx->outer_context;
10086 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10087 pre_p, NULL, is_gimple_val, fb_rvalue);
10088 gimplify_omp_ctxp = ctx;
10089 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10090 OMP_CLAUSE_MAP);
10091 OMP_CLAUSE_DECL (nc) = decl;
10092 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10093 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10094 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10095 OMP_CLAUSE_CHAIN (clause) = nc;
10096 }
10097 else
10098 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10099 }
10100 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10101 {
10102 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10103 OMP_CLAUSE_DECL (nc) = decl;
10104 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10105 OMP_CLAUSE_CHAIN (nc) = chain;
10106 OMP_CLAUSE_CHAIN (clause) = nc;
10107 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10108 gimplify_omp_ctxp = ctx->outer_context;
10109 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10110 gimplify_omp_ctxp = ctx;
10111 }
10112 *list_p = clause;
10113 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10114 gimplify_omp_ctxp = ctx->outer_context;
10115 lang_hooks.decls.omp_finish_clause (clause, pre_p);
10116 if (gimplify_omp_ctxp)
10117 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10118 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10119 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10120 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10121 true);
10122 gimplify_omp_ctxp = ctx;
10123 return 0;
10124 }
10125
10126 static void
10127 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10128 enum tree_code code)
10129 {
10130 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10131 tree *orig_list_p = list_p;
10132 tree c, decl;
10133 bool has_inscan_reductions = false;
10134
10135 if (body)
10136 {
10137 struct gimplify_omp_ctx *octx;
10138 for (octx = ctx; octx; octx = octx->outer_context)
10139 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10140 break;
10141 if (octx)
10142 {
10143 struct walk_stmt_info wi;
10144 memset (&wi, 0, sizeof (wi));
10145 walk_gimple_seq (body, omp_find_stores_stmt,
10146 omp_find_stores_op, &wi);
10147 }
10148 }
10149
10150 if (ctx->add_safelen1)
10151 {
10152 /* If there are VLAs in the body of simd loop, prevent
10153 vectorization. */
10154 gcc_assert (ctx->region_type == ORT_SIMD);
10155 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10156 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10157 OMP_CLAUSE_CHAIN (c) = *list_p;
10158 *list_p = c;
10159 list_p = &OMP_CLAUSE_CHAIN (c);
10160 }
10161
10162 if (ctx->region_type == ORT_WORKSHARE
10163 && ctx->outer_context
10164 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10165 {
10166 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10167 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10168 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10169 {
10170 decl = OMP_CLAUSE_DECL (c);
10171 splay_tree_node n
10172 = splay_tree_lookup (ctx->outer_context->variables,
10173 (splay_tree_key) decl);
10174 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10175 (splay_tree_key) decl));
10176 omp_add_variable (ctx, decl, n->value);
10177 tree c2 = copy_node (c);
10178 OMP_CLAUSE_CHAIN (c2) = *list_p;
10179 *list_p = c2;
10180 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10181 continue;
10182 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10183 OMP_CLAUSE_FIRSTPRIVATE);
10184 OMP_CLAUSE_DECL (c2) = decl;
10185 OMP_CLAUSE_CHAIN (c2) = *list_p;
10186 *list_p = c2;
10187 }
10188 }
10189 while ((c = *list_p) != NULL)
10190 {
10191 splay_tree_node n;
10192 bool remove = false;
10193
10194 switch (OMP_CLAUSE_CODE (c))
10195 {
10196 case OMP_CLAUSE_FIRSTPRIVATE:
10197 if ((ctx->region_type & ORT_TARGET)
10198 && (ctx->region_type & ORT_ACC) == 0
10199 && TYPE_ATOMIC (strip_array_types
10200 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10201 {
10202 error_at (OMP_CLAUSE_LOCATION (c),
10203 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10204 "%<target%> construct", OMP_CLAUSE_DECL (c));
10205 remove = true;
10206 break;
10207 }
10208 /* FALLTHRU */
10209 case OMP_CLAUSE_PRIVATE:
10210 case OMP_CLAUSE_SHARED:
10211 case OMP_CLAUSE_LINEAR:
10212 decl = OMP_CLAUSE_DECL (c);
10213 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10214 remove = !(n->value & GOVD_SEEN);
10215 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10216 && code == OMP_PARALLEL
10217 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10218 remove = true;
10219 if (! remove)
10220 {
10221 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10222 if ((n->value & GOVD_DEBUG_PRIVATE)
10223 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10224 {
10225 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10226 || ((n->value & GOVD_DATA_SHARE_CLASS)
10227 == GOVD_SHARED));
10228 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10229 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10230 }
10231 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10232 && (n->value & GOVD_WRITTEN) == 0
10233 && DECL_P (decl)
10234 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10235 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10236 else if (DECL_P (decl)
10237 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10238 && (n->value & GOVD_WRITTEN) != 0)
10239 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10240 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10241 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10242 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10243 }
10244 break;
10245
10246 case OMP_CLAUSE_LASTPRIVATE:
10247 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10248 accurately reflect the presence of a FIRSTPRIVATE clause. */
10249 decl = OMP_CLAUSE_DECL (c);
10250 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10251 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10252 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10253 if (code == OMP_DISTRIBUTE
10254 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10255 {
10256 remove = true;
10257 error_at (OMP_CLAUSE_LOCATION (c),
10258 "same variable used in %<firstprivate%> and "
10259 "%<lastprivate%> clauses on %<distribute%> "
10260 "construct");
10261 }
10262 if (!remove
10263 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10264 && DECL_P (decl)
10265 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10266 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10267 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10268 remove = true;
10269 break;
10270
10271 case OMP_CLAUSE_ALIGNED:
10272 decl = OMP_CLAUSE_DECL (c);
10273 if (!is_global_var (decl))
10274 {
10275 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10276 remove = n == NULL || !(n->value & GOVD_SEEN);
10277 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10278 {
10279 struct gimplify_omp_ctx *octx;
10280 if (n != NULL
10281 && (n->value & (GOVD_DATA_SHARE_CLASS
10282 & ~GOVD_FIRSTPRIVATE)))
10283 remove = true;
10284 else
10285 for (octx = ctx->outer_context; octx;
10286 octx = octx->outer_context)
10287 {
10288 n = splay_tree_lookup (octx->variables,
10289 (splay_tree_key) decl);
10290 if (n == NULL)
10291 continue;
10292 if (n->value & GOVD_LOCAL)
10293 break;
10294 /* We have to avoid assigning a shared variable
10295 to itself when trying to add
10296 __builtin_assume_aligned. */
10297 if (n->value & GOVD_SHARED)
10298 {
10299 remove = true;
10300 break;
10301 }
10302 }
10303 }
10304 }
10305 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10306 {
10307 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10308 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10309 remove = true;
10310 }
10311 break;
10312
10313 case OMP_CLAUSE_NONTEMPORAL:
10314 decl = OMP_CLAUSE_DECL (c);
10315 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10316 remove = n == NULL || !(n->value & GOVD_SEEN);
10317 break;
10318
10319 case OMP_CLAUSE_MAP:
10320 if (code == OMP_TARGET_EXIT_DATA
10321 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10322 {
10323 remove = true;
10324 break;
10325 }
10326 decl = OMP_CLAUSE_DECL (c);
10327 /* Data clauses associated with reductions must be
10328 compatible with present_or_copy. Warn and adjust the clause
10329 if that is not the case. */
10330 if (ctx->region_type == ORT_ACC_PARALLEL
10331 || ctx->region_type == ORT_ACC_SERIAL)
10332 {
10333 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10334 n = NULL;
10335
10336 if (DECL_P (t))
10337 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10338
10339 if (n && (n->value & GOVD_REDUCTION))
10340 {
10341 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10342
10343 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10344 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10345 && kind != GOMP_MAP_FORCE_PRESENT
10346 && kind != GOMP_MAP_POINTER)
10347 {
10348 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10349 "incompatible data clause with reduction "
10350 "on %qE; promoting to %<present_or_copy%>",
10351 DECL_NAME (t));
10352 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10353 }
10354 }
10355 }
10356 if (!DECL_P (decl))
10357 {
10358 if ((ctx->region_type & ORT_TARGET) != 0
10359 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10360 {
10361 if (TREE_CODE (decl) == INDIRECT_REF
10362 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10363 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10364 == REFERENCE_TYPE))
10365 decl = TREE_OPERAND (decl, 0);
10366 if (TREE_CODE (decl) == COMPONENT_REF)
10367 {
10368 while (TREE_CODE (decl) == COMPONENT_REF)
10369 decl = TREE_OPERAND (decl, 0);
10370 if (DECL_P (decl))
10371 {
10372 n = splay_tree_lookup (ctx->variables,
10373 (splay_tree_key) decl);
10374 if (!(n->value & GOVD_SEEN))
10375 remove = true;
10376 }
10377 }
10378 }
10379 break;
10380 }
10381 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10382 if ((ctx->region_type & ORT_TARGET) != 0
10383 && !(n->value & GOVD_SEEN)
10384 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10385 && (!is_global_var (decl)
10386 || !lookup_attribute ("omp declare target link",
10387 DECL_ATTRIBUTES (decl))))
10388 {
10389 remove = true;
10390 /* For struct element mapping, if struct is never referenced
10391 in target block and none of the mapping has always modifier,
10392 remove all the struct element mappings, which immediately
10393 follow the GOMP_MAP_STRUCT map clause. */
10394 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10395 {
10396 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10397 while (cnt--)
10398 OMP_CLAUSE_CHAIN (c)
10399 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10400 }
10401 }
10402 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10403 && code == OMP_TARGET_EXIT_DATA)
10404 remove = true;
10405 else if (DECL_SIZE (decl)
10406 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10407 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10408 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10409 && (OMP_CLAUSE_MAP_KIND (c)
10410 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10411 {
10412 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10413 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10414 INTEGER_CST. */
10415 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10416
10417 tree decl2 = DECL_VALUE_EXPR (decl);
10418 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10419 decl2 = TREE_OPERAND (decl2, 0);
10420 gcc_assert (DECL_P (decl2));
10421 tree mem = build_simple_mem_ref (decl2);
10422 OMP_CLAUSE_DECL (c) = mem;
10423 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10424 if (ctx->outer_context)
10425 {
10426 omp_notice_variable (ctx->outer_context, decl2, true);
10427 omp_notice_variable (ctx->outer_context,
10428 OMP_CLAUSE_SIZE (c), true);
10429 }
10430 if (((ctx->region_type & ORT_TARGET) != 0
10431 || !ctx->target_firstprivatize_array_bases)
10432 && ((n->value & GOVD_SEEN) == 0
10433 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10434 {
10435 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10436 OMP_CLAUSE_MAP);
10437 OMP_CLAUSE_DECL (nc) = decl;
10438 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10439 if (ctx->target_firstprivatize_array_bases)
10440 OMP_CLAUSE_SET_MAP_KIND (nc,
10441 GOMP_MAP_FIRSTPRIVATE_POINTER);
10442 else
10443 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10444 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10445 OMP_CLAUSE_CHAIN (c) = nc;
10446 c = nc;
10447 }
10448 }
10449 else
10450 {
10451 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10452 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10453 gcc_assert ((n->value & GOVD_SEEN) == 0
10454 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10455 == 0));
10456 }
10457 break;
10458
10459 case OMP_CLAUSE_TO:
10460 case OMP_CLAUSE_FROM:
10461 case OMP_CLAUSE__CACHE_:
10462 decl = OMP_CLAUSE_DECL (c);
10463 if (!DECL_P (decl))
10464 break;
10465 if (DECL_SIZE (decl)
10466 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10467 {
10468 tree decl2 = DECL_VALUE_EXPR (decl);
10469 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10470 decl2 = TREE_OPERAND (decl2, 0);
10471 gcc_assert (DECL_P (decl2));
10472 tree mem = build_simple_mem_ref (decl2);
10473 OMP_CLAUSE_DECL (c) = mem;
10474 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10475 if (ctx->outer_context)
10476 {
10477 omp_notice_variable (ctx->outer_context, decl2, true);
10478 omp_notice_variable (ctx->outer_context,
10479 OMP_CLAUSE_SIZE (c), true);
10480 }
10481 }
10482 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10483 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10484 break;
10485
10486 case OMP_CLAUSE_REDUCTION:
10487 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10488 {
10489 decl = OMP_CLAUSE_DECL (c);
10490 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10491 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10492 {
10493 remove = true;
10494 error_at (OMP_CLAUSE_LOCATION (c),
10495 "%qD specified in %<inscan%> %<reduction%> clause "
10496 "but not in %<scan%> directive clause", decl);
10497 break;
10498 }
10499 has_inscan_reductions = true;
10500 }
10501 /* FALLTHRU */
10502 case OMP_CLAUSE_IN_REDUCTION:
10503 case OMP_CLAUSE_TASK_REDUCTION:
10504 decl = OMP_CLAUSE_DECL (c);
10505 /* OpenACC reductions need a present_or_copy data clause.
10506 Add one if necessary. Emit error when the reduction is private. */
10507 if (ctx->region_type == ORT_ACC_PARALLEL
10508 || ctx->region_type == ORT_ACC_SERIAL)
10509 {
10510 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10511 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10512 {
10513 remove = true;
10514 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10515 "reduction on %qE", DECL_NAME (decl));
10516 }
10517 else if ((n->value & GOVD_MAP) == 0)
10518 {
10519 tree next = OMP_CLAUSE_CHAIN (c);
10520 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10521 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10522 OMP_CLAUSE_DECL (nc) = decl;
10523 OMP_CLAUSE_CHAIN (c) = nc;
10524 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10525 while (1)
10526 {
10527 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10528 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10529 break;
10530 nc = OMP_CLAUSE_CHAIN (nc);
10531 }
10532 OMP_CLAUSE_CHAIN (nc) = next;
10533 n->value |= GOVD_MAP;
10534 }
10535 }
10536 if (DECL_P (decl)
10537 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10538 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10539 break;
10540 case OMP_CLAUSE_COPYIN:
10541 case OMP_CLAUSE_COPYPRIVATE:
10542 case OMP_CLAUSE_IF:
10543 case OMP_CLAUSE_NUM_THREADS:
10544 case OMP_CLAUSE_NUM_TEAMS:
10545 case OMP_CLAUSE_THREAD_LIMIT:
10546 case OMP_CLAUSE_DIST_SCHEDULE:
10547 case OMP_CLAUSE_DEVICE:
10548 case OMP_CLAUSE_SCHEDULE:
10549 case OMP_CLAUSE_NOWAIT:
10550 case OMP_CLAUSE_ORDERED:
10551 case OMP_CLAUSE_DEFAULT:
10552 case OMP_CLAUSE_UNTIED:
10553 case OMP_CLAUSE_COLLAPSE:
10554 case OMP_CLAUSE_FINAL:
10555 case OMP_CLAUSE_MERGEABLE:
10556 case OMP_CLAUSE_PROC_BIND:
10557 case OMP_CLAUSE_SAFELEN:
10558 case OMP_CLAUSE_SIMDLEN:
10559 case OMP_CLAUSE_DEPEND:
10560 case OMP_CLAUSE_PRIORITY:
10561 case OMP_CLAUSE_GRAINSIZE:
10562 case OMP_CLAUSE_NUM_TASKS:
10563 case OMP_CLAUSE_NOGROUP:
10564 case OMP_CLAUSE_THREADS:
10565 case OMP_CLAUSE_SIMD:
10566 case OMP_CLAUSE_HINT:
10567 case OMP_CLAUSE_DEFAULTMAP:
10568 case OMP_CLAUSE_ORDER:
10569 case OMP_CLAUSE_BIND:
10570 case OMP_CLAUSE_USE_DEVICE_PTR:
10571 case OMP_CLAUSE_USE_DEVICE_ADDR:
10572 case OMP_CLAUSE_IS_DEVICE_PTR:
10573 case OMP_CLAUSE_ASYNC:
10574 case OMP_CLAUSE_WAIT:
10575 case OMP_CLAUSE_INDEPENDENT:
10576 case OMP_CLAUSE_NUM_GANGS:
10577 case OMP_CLAUSE_NUM_WORKERS:
10578 case OMP_CLAUSE_VECTOR_LENGTH:
10579 case OMP_CLAUSE_GANG:
10580 case OMP_CLAUSE_WORKER:
10581 case OMP_CLAUSE_VECTOR:
10582 case OMP_CLAUSE_AUTO:
10583 case OMP_CLAUSE_SEQ:
10584 case OMP_CLAUSE_TILE:
10585 case OMP_CLAUSE_IF_PRESENT:
10586 case OMP_CLAUSE_FINALIZE:
10587 case OMP_CLAUSE_INCLUSIVE:
10588 case OMP_CLAUSE_EXCLUSIVE:
10589 break;
10590
10591 default:
10592 gcc_unreachable ();
10593 }
10594
10595 if (remove)
10596 *list_p = OMP_CLAUSE_CHAIN (c);
10597 else
10598 list_p = &OMP_CLAUSE_CHAIN (c);
10599 }
10600
10601 /* Add in any implicit data sharing. */
10602 struct gimplify_adjust_omp_clauses_data data;
10603 data.list_p = list_p;
10604 data.pre_p = pre_p;
10605 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10606
10607 if (has_inscan_reductions)
10608 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10609 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10610 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10611 {
10612 error_at (OMP_CLAUSE_LOCATION (c),
10613 "%<inscan%> %<reduction%> clause used together with "
10614 "%<linear%> clause for a variable other than loop "
10615 "iterator");
10616 break;
10617 }
10618
10619 gimplify_omp_ctxp = ctx->outer_context;
10620 delete_omp_context (ctx);
10621 }
10622
10623 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10624 -1 if unknown yet (simd is involved, won't be known until vectorization)
10625 and 1 if they do. If SCORES is non-NULL, it should point to an array
10626 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10627 of the CONSTRUCTS (position -1 if it will never match) followed by
10628 number of constructs in the OpenMP context construct trait. If the
10629 score depends on whether it will be in a declare simd clone or not,
10630 the function returns 2 and there will be two sets of the scores, the first
10631 one for the case that it is not in a declare simd clone, the other
10632 that it is in a declare simd clone. */
10633
10634 int
10635 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10636 int *scores)
10637 {
10638 int matched = 0, cnt = 0;
10639 bool simd_seen = false;
10640 bool target_seen = false;
10641 int declare_simd_cnt = -1;
10642 auto_vec<enum tree_code, 16> codes;
10643 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10644 {
10645 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10646 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10647 == ORT_TARGET && ctx->code == OMP_TARGET)
10648 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10649 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10650 || (ctx->region_type == ORT_SIMD
10651 && ctx->code == OMP_SIMD
10652 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10653 {
10654 ++cnt;
10655 if (scores)
10656 codes.safe_push (ctx->code);
10657 else if (matched < nconstructs && ctx->code == constructs[matched])
10658 {
10659 if (ctx->code == OMP_SIMD)
10660 {
10661 if (matched)
10662 return 0;
10663 simd_seen = true;
10664 }
10665 ++matched;
10666 }
10667 if (ctx->code == OMP_TARGET)
10668 {
10669 if (scores == NULL)
10670 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10671 target_seen = true;
10672 break;
10673 }
10674 }
10675 else if (ctx->region_type == ORT_WORKSHARE
10676 && ctx->code == OMP_LOOP
10677 && ctx->outer_context
10678 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10679 && ctx->outer_context->outer_context
10680 && ctx->outer_context->outer_context->code == OMP_LOOP
10681 && ctx->outer_context->outer_context->distribute)
10682 ctx = ctx->outer_context->outer_context;
10683 ctx = ctx->outer_context;
10684 }
10685 if (!target_seen
10686 && lookup_attribute ("omp declare simd",
10687 DECL_ATTRIBUTES (current_function_decl)))
10688 {
10689 /* Declare simd is a maybe case, it is supposed to be added only to the
10690 omp-simd-clone.c added clones and not to the base function. */
10691 declare_simd_cnt = cnt++;
10692 if (scores)
10693 codes.safe_push (OMP_SIMD);
10694 else if (cnt == 0
10695 && constructs[0] == OMP_SIMD)
10696 {
10697 gcc_assert (matched == 0);
10698 simd_seen = true;
10699 if (++matched == nconstructs)
10700 return -1;
10701 }
10702 }
10703 if (tree attr = lookup_attribute ("omp declare variant variant",
10704 DECL_ATTRIBUTES (current_function_decl)))
10705 {
10706 enum tree_code variant_constructs[5];
10707 int variant_nconstructs = 0;
10708 if (!target_seen)
10709 variant_nconstructs
10710 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10711 variant_constructs);
10712 for (int i = 0; i < variant_nconstructs; i++)
10713 {
10714 ++cnt;
10715 if (scores)
10716 codes.safe_push (variant_constructs[i]);
10717 else if (matched < nconstructs
10718 && variant_constructs[i] == constructs[matched])
10719 {
10720 if (variant_constructs[i] == OMP_SIMD)
10721 {
10722 if (matched)
10723 return 0;
10724 simd_seen = true;
10725 }
10726 ++matched;
10727 }
10728 }
10729 }
10730 if (!target_seen
10731 && lookup_attribute ("omp declare target block",
10732 DECL_ATTRIBUTES (current_function_decl)))
10733 {
10734 if (scores)
10735 codes.safe_push (OMP_TARGET);
10736 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
10737 ++matched;
10738 }
10739 if (scores)
10740 {
10741 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10742 {
10743 int j = codes.length () - 1;
10744 for (int i = nconstructs - 1; i >= 0; i--)
10745 {
10746 while (j >= 0
10747 && (pass != 0 || declare_simd_cnt != j)
10748 && constructs[i] != codes[j])
10749 --j;
10750 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10751 *scores++ = j - 1;
10752 else
10753 *scores++ = j;
10754 }
10755 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10756 ? codes.length () - 1 : codes.length ());
10757 }
10758 return declare_simd_cnt == -1 ? 1 : 2;
10759 }
10760 if (matched == nconstructs)
10761 return simd_seen ? -1 : 1;
10762 return 0;
10763 }
10764
10765 /* Gimplify OACC_CACHE. */
10766
10767 static void
10768 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10769 {
10770 tree expr = *expr_p;
10771
10772 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10773 OACC_CACHE);
10774 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10775 OACC_CACHE);
10776
10777 /* TODO: Do something sensible with this information. */
10778
10779 *expr_p = NULL_TREE;
10780 }
10781
10782 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10783 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10784 kind. The entry kind will replace the one in CLAUSE, while the exit
10785 kind will be used in a new omp_clause and returned to the caller. */
10786
10787 static tree
10788 gimplify_oacc_declare_1 (tree clause)
10789 {
10790 HOST_WIDE_INT kind, new_op;
10791 bool ret = false;
10792 tree c = NULL;
10793
10794 kind = OMP_CLAUSE_MAP_KIND (clause);
10795
10796 switch (kind)
10797 {
10798 case GOMP_MAP_ALLOC:
10799 new_op = GOMP_MAP_RELEASE;
10800 ret = true;
10801 break;
10802
10803 case GOMP_MAP_FROM:
10804 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10805 new_op = GOMP_MAP_FROM;
10806 ret = true;
10807 break;
10808
10809 case GOMP_MAP_TOFROM:
10810 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10811 new_op = GOMP_MAP_FROM;
10812 ret = true;
10813 break;
10814
10815 case GOMP_MAP_DEVICE_RESIDENT:
10816 case GOMP_MAP_FORCE_DEVICEPTR:
10817 case GOMP_MAP_FORCE_PRESENT:
10818 case GOMP_MAP_LINK:
10819 case GOMP_MAP_POINTER:
10820 case GOMP_MAP_TO:
10821 break;
10822
10823 default:
10824 gcc_unreachable ();
10825 break;
10826 }
10827
10828 if (ret)
10829 {
10830 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10831 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10832 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10833 }
10834
10835 return c;
10836 }
10837
10838 /* Gimplify OACC_DECLARE. */
10839
10840 static void
10841 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10842 {
10843 tree expr = *expr_p;
10844 gomp_target *stmt;
10845 tree clauses, t, decl;
10846
10847 clauses = OACC_DECLARE_CLAUSES (expr);
10848
10849 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10850 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10851
10852 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10853 {
10854 decl = OMP_CLAUSE_DECL (t);
10855
10856 if (TREE_CODE (decl) == MEM_REF)
10857 decl = TREE_OPERAND (decl, 0);
10858
10859 if (VAR_P (decl) && !is_oacc_declared (decl))
10860 {
10861 tree attr = get_identifier ("oacc declare target");
10862 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10863 DECL_ATTRIBUTES (decl));
10864 }
10865
10866 if (VAR_P (decl)
10867 && !is_global_var (decl)
10868 && DECL_CONTEXT (decl) == current_function_decl)
10869 {
10870 tree c = gimplify_oacc_declare_1 (t);
10871 if (c)
10872 {
10873 if (oacc_declare_returns == NULL)
10874 oacc_declare_returns = new hash_map<tree, tree>;
10875
10876 oacc_declare_returns->put (decl, c);
10877 }
10878 }
10879
10880 if (gimplify_omp_ctxp)
10881 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10882 }
10883
10884 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10885 clauses);
10886
10887 gimplify_seq_add_stmt (pre_p, stmt);
10888
10889 *expr_p = NULL_TREE;
10890 }
10891
10892 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10893 gimplification of the body, as well as scanning the body for used
10894 variables. We need to do this scan now, because variable-sized
10895 decls will be decomposed during gimplification. */
10896
10897 static void
10898 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10899 {
10900 tree expr = *expr_p;
10901 gimple *g;
10902 gimple_seq body = NULL;
10903
10904 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10905 OMP_PARALLEL_COMBINED (expr)
10906 ? ORT_COMBINED_PARALLEL
10907 : ORT_PARALLEL, OMP_PARALLEL);
10908
10909 push_gimplify_context ();
10910
10911 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10912 if (gimple_code (g) == GIMPLE_BIND)
10913 pop_gimplify_context (g);
10914 else
10915 pop_gimplify_context (NULL);
10916
10917 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10918 OMP_PARALLEL);
10919
10920 g = gimple_build_omp_parallel (body,
10921 OMP_PARALLEL_CLAUSES (expr),
10922 NULL_TREE, NULL_TREE);
10923 if (OMP_PARALLEL_COMBINED (expr))
10924 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10925 gimplify_seq_add_stmt (pre_p, g);
10926 *expr_p = NULL_TREE;
10927 }
10928
10929 /* Gimplify the contents of an OMP_TASK statement. This involves
10930 gimplification of the body, as well as scanning the body for used
10931 variables. We need to do this scan now, because variable-sized
10932 decls will be decomposed during gimplification. */
10933
10934 static void
10935 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10936 {
10937 tree expr = *expr_p;
10938 gimple *g;
10939 gimple_seq body = NULL;
10940
10941 if (OMP_TASK_BODY (expr) == NULL_TREE)
10942 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10943 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10944 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10945 {
10946 error_at (OMP_CLAUSE_LOCATION (c),
10947 "%<mutexinoutset%> kind in %<depend%> clause on a "
10948 "%<taskwait%> construct");
10949 break;
10950 }
10951
10952 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10953 omp_find_clause (OMP_TASK_CLAUSES (expr),
10954 OMP_CLAUSE_UNTIED)
10955 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10956
10957 if (OMP_TASK_BODY (expr))
10958 {
10959 push_gimplify_context ();
10960
10961 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10962 if (gimple_code (g) == GIMPLE_BIND)
10963 pop_gimplify_context (g);
10964 else
10965 pop_gimplify_context (NULL);
10966 }
10967
10968 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10969 OMP_TASK);
10970
10971 g = gimple_build_omp_task (body,
10972 OMP_TASK_CLAUSES (expr),
10973 NULL_TREE, NULL_TREE,
10974 NULL_TREE, NULL_TREE, NULL_TREE);
10975 if (OMP_TASK_BODY (expr) == NULL_TREE)
10976 gimple_omp_task_set_taskwait_p (g, true);
10977 gimplify_seq_add_stmt (pre_p, g);
10978 *expr_p = NULL_TREE;
10979 }
10980
10981 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10982 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10983 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10984 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10985 OMP_FOR in between if any and pdata[3] is address of the inner
10986 OMP_FOR/OMP_SIMD. */
10987
10988 static tree
10989 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10990 {
10991 tree **pdata = (tree **) data;
10992 *walk_subtrees = 0;
10993 switch (TREE_CODE (*tp))
10994 {
10995 case OMP_FOR:
10996 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10997 {
10998 pdata[3] = tp;
10999 return *tp;
11000 }
11001 pdata[2] = tp;
11002 *walk_subtrees = 1;
11003 break;
11004 case OMP_SIMD:
11005 if (OMP_FOR_INIT (*tp) != NULL_TREE)
11006 {
11007 pdata[3] = tp;
11008 return *tp;
11009 }
11010 break;
11011 case BIND_EXPR:
11012 if (BIND_EXPR_VARS (*tp)
11013 || (BIND_EXPR_BLOCK (*tp)
11014 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
11015 pdata[0] = tp;
11016 *walk_subtrees = 1;
11017 break;
11018 case STATEMENT_LIST:
11019 if (!tsi_one_before_end_p (tsi_start (*tp)))
11020 pdata[0] = tp;
11021 *walk_subtrees = 1;
11022 break;
11023 case TRY_FINALLY_EXPR:
11024 pdata[0] = tp;
11025 *walk_subtrees = 1;
11026 break;
11027 case OMP_PARALLEL:
11028 pdata[1] = tp;
11029 *walk_subtrees = 1;
11030 break;
11031 default:
11032 break;
11033 }
11034 return NULL_TREE;
11035 }
11036
11037 /* Gimplify the gross structure of an OMP_FOR statement. */
11038
11039 static enum gimplify_status
11040 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11041 {
11042 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11043 enum gimplify_status ret = GS_ALL_DONE;
11044 enum gimplify_status tret;
11045 gomp_for *gfor;
11046 gimple_seq for_body, for_pre_body;
11047 int i;
11048 bitmap has_decl_expr = NULL;
11049 enum omp_region_type ort = ORT_WORKSHARE;
11050
11051 orig_for_stmt = for_stmt = *expr_p;
11052
11053 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11054 != NULL_TREE);
11055 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11056 {
11057 tree *data[4] = { NULL, NULL, NULL, NULL };
11058 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11059 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11060 find_combined_omp_for, data, NULL);
11061 if (inner_for_stmt == NULL_TREE)
11062 {
11063 gcc_assert (seen_error ());
11064 *expr_p = NULL_TREE;
11065 return GS_ERROR;
11066 }
11067 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11068 {
11069 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11070 &OMP_FOR_PRE_BODY (for_stmt));
11071 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11072 }
11073 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11074 {
11075 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11076 &OMP_FOR_PRE_BODY (for_stmt));
11077 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11078 }
11079
11080 if (data[0])
11081 {
11082 /* We have some statements or variable declarations in between
11083 the composite construct directives. Move them around the
11084 inner_for_stmt. */
11085 data[0] = expr_p;
11086 for (i = 0; i < 3; i++)
11087 if (data[i])
11088 {
11089 tree t = *data[i];
11090 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11091 data[i + 1] = data[i];
11092 *data[i] = OMP_BODY (t);
11093 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11094 NULL_TREE, make_node (BLOCK));
11095 OMP_BODY (t) = body;
11096 append_to_statement_list_force (inner_for_stmt,
11097 &BIND_EXPR_BODY (body));
11098 *data[3] = t;
11099 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11100 gcc_assert (*data[3] == inner_for_stmt);
11101 }
11102 return GS_OK;
11103 }
11104
11105 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11106 if (!loop_p
11107 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11108 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11109 i)) == TREE_LIST
11110 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11111 i)))
11112 {
11113 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11114 /* Class iterators aren't allowed on OMP_SIMD, so the only
11115 case we need to solve is distribute parallel for. They are
11116 allowed on the loop construct, but that is already handled
11117 in gimplify_omp_loop. */
11118 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11119 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11120 && data[1]);
11121 tree orig_decl = TREE_PURPOSE (orig);
11122 tree last = TREE_VALUE (orig);
11123 tree *pc;
11124 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11125 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11126 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11127 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11128 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11129 break;
11130 if (*pc == NULL_TREE)
11131 {
11132 tree *spc;
11133 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11134 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11135 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11136 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11137 break;
11138 if (*spc)
11139 {
11140 tree c = *spc;
11141 *spc = OMP_CLAUSE_CHAIN (c);
11142 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11143 *pc = c;
11144 }
11145 }
11146 if (*pc == NULL_TREE)
11147 ;
11148 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11149 {
11150 /* private clause will appear only on inner_for_stmt.
11151 Change it into firstprivate, and add private clause
11152 on for_stmt. */
11153 tree c = copy_node (*pc);
11154 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11155 OMP_FOR_CLAUSES (for_stmt) = c;
11156 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11157 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11158 }
11159 else
11160 {
11161 /* lastprivate clause will appear on both inner_for_stmt
11162 and for_stmt. Add firstprivate clause to
11163 inner_for_stmt. */
11164 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11165 OMP_CLAUSE_FIRSTPRIVATE);
11166 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11167 OMP_CLAUSE_CHAIN (c) = *pc;
11168 *pc = c;
11169 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11170 }
11171 tree c = build_omp_clause (UNKNOWN_LOCATION,
11172 OMP_CLAUSE_FIRSTPRIVATE);
11173 OMP_CLAUSE_DECL (c) = last;
11174 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11175 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11176 c = build_omp_clause (UNKNOWN_LOCATION,
11177 *pc ? OMP_CLAUSE_SHARED
11178 : OMP_CLAUSE_FIRSTPRIVATE);
11179 OMP_CLAUSE_DECL (c) = orig_decl;
11180 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11181 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11182 }
11183 /* Similarly, take care of C++ range for temporaries, those should
11184 be firstprivate on OMP_PARALLEL if any. */
11185 if (data[1])
11186 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11187 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11188 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11189 i)) == TREE_LIST
11190 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11191 i)))
11192 {
11193 tree orig
11194 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11195 tree v = TREE_CHAIN (orig);
11196 tree c = build_omp_clause (UNKNOWN_LOCATION,
11197 OMP_CLAUSE_FIRSTPRIVATE);
11198 /* First add firstprivate clause for the __for_end artificial
11199 decl. */
11200 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11201 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11202 == REFERENCE_TYPE)
11203 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11204 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11205 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11206 if (TREE_VEC_ELT (v, 0))
11207 {
11208 /* And now the same for __for_range artificial decl if it
11209 exists. */
11210 c = build_omp_clause (UNKNOWN_LOCATION,
11211 OMP_CLAUSE_FIRSTPRIVATE);
11212 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11213 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11214 == REFERENCE_TYPE)
11215 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11216 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11217 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11218 }
11219 }
11220 }
11221
11222 switch (TREE_CODE (for_stmt))
11223 {
11224 case OMP_FOR:
11225 case OMP_DISTRIBUTE:
11226 break;
11227 case OACC_LOOP:
11228 ort = ORT_ACC;
11229 break;
11230 case OMP_TASKLOOP:
11231 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11232 ort = ORT_UNTIED_TASKLOOP;
11233 else
11234 ort = ORT_TASKLOOP;
11235 break;
11236 case OMP_SIMD:
11237 ort = ORT_SIMD;
11238 break;
11239 default:
11240 gcc_unreachable ();
11241 }
11242
11243 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11244 clause for the IV. */
11245 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11246 {
11247 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11248 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11249 decl = TREE_OPERAND (t, 0);
11250 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11251 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11252 && OMP_CLAUSE_DECL (c) == decl)
11253 {
11254 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11255 break;
11256 }
11257 }
11258
11259 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11260 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11261 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11262 ? OMP_LOOP : TREE_CODE (for_stmt));
11263
11264 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11265 gimplify_omp_ctxp->distribute = true;
11266
11267 /* Handle OMP_FOR_INIT. */
11268 for_pre_body = NULL;
11269 if ((ort == ORT_SIMD
11270 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11271 && OMP_FOR_PRE_BODY (for_stmt))
11272 {
11273 has_decl_expr = BITMAP_ALLOC (NULL);
11274 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11275 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11276 == VAR_DECL)
11277 {
11278 t = OMP_FOR_PRE_BODY (for_stmt);
11279 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11280 }
11281 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11282 {
11283 tree_stmt_iterator si;
11284 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11285 tsi_next (&si))
11286 {
11287 t = tsi_stmt (si);
11288 if (TREE_CODE (t) == DECL_EXPR
11289 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11290 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11291 }
11292 }
11293 }
11294 if (OMP_FOR_PRE_BODY (for_stmt))
11295 {
11296 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11297 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11298 else
11299 {
11300 struct gimplify_omp_ctx ctx;
11301 memset (&ctx, 0, sizeof (ctx));
11302 ctx.region_type = ORT_NONE;
11303 gimplify_omp_ctxp = &ctx;
11304 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11305 gimplify_omp_ctxp = NULL;
11306 }
11307 }
11308 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11309
11310 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11311 for_stmt = inner_for_stmt;
11312
11313 /* For taskloop, need to gimplify the start, end and step before the
11314 taskloop, outside of the taskloop omp context. */
11315 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11316 {
11317 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11318 {
11319 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11320 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11321 {
11322 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11323 TREE_OPERAND (t, 1)
11324 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11325 gimple_seq_empty_p (for_pre_body)
11326 ? pre_p : &for_pre_body, NULL,
11327 false);
11328 /* Reference to pointer conversion is considered useless,
11329 but is significant for firstprivate clause. Force it
11330 here. */
11331 if (TREE_CODE (type) == POINTER_TYPE
11332 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11333 == REFERENCE_TYPE))
11334 {
11335 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11336 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11337 TREE_OPERAND (t, 1));
11338 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11339 ? pre_p : &for_pre_body);
11340 TREE_OPERAND (t, 1) = v;
11341 }
11342 tree c = build_omp_clause (input_location,
11343 OMP_CLAUSE_FIRSTPRIVATE);
11344 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11345 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11346 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11347 }
11348
11349 /* Handle OMP_FOR_COND. */
11350 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11351 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11352 {
11353 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11354 TREE_OPERAND (t, 1)
11355 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11356 gimple_seq_empty_p (for_pre_body)
11357 ? pre_p : &for_pre_body, NULL,
11358 false);
11359 /* Reference to pointer conversion is considered useless,
11360 but is significant for firstprivate clause. Force it
11361 here. */
11362 if (TREE_CODE (type) == POINTER_TYPE
11363 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11364 == REFERENCE_TYPE))
11365 {
11366 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11367 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11368 TREE_OPERAND (t, 1));
11369 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11370 ? pre_p : &for_pre_body);
11371 TREE_OPERAND (t, 1) = v;
11372 }
11373 tree c = build_omp_clause (input_location,
11374 OMP_CLAUSE_FIRSTPRIVATE);
11375 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11376 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11377 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11378 }
11379
11380 /* Handle OMP_FOR_INCR. */
11381 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11382 if (TREE_CODE (t) == MODIFY_EXPR)
11383 {
11384 decl = TREE_OPERAND (t, 0);
11385 t = TREE_OPERAND (t, 1);
11386 tree *tp = &TREE_OPERAND (t, 1);
11387 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11388 tp = &TREE_OPERAND (t, 0);
11389
11390 if (!is_gimple_constant (*tp))
11391 {
11392 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
11393 ? pre_p : &for_pre_body;
11394 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
11395 tree c = build_omp_clause (input_location,
11396 OMP_CLAUSE_FIRSTPRIVATE);
11397 OMP_CLAUSE_DECL (c) = *tp;
11398 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11399 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11400 }
11401 }
11402 }
11403
11404 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11405 OMP_TASKLOOP);
11406 }
11407
11408 if (orig_for_stmt != for_stmt)
11409 gimplify_omp_ctxp->combined_loop = true;
11410
11411 for_body = NULL;
11412 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11413 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11414 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11415 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11416
11417 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11418 bool is_doacross = false;
11419 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11420 {
11421 is_doacross = true;
11422 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11423 (OMP_FOR_INIT (for_stmt))
11424 * 2);
11425 }
11426 int collapse = 1, tile = 0;
11427 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11428 if (c)
11429 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11430 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11431 if (c)
11432 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11433 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11434 {
11435 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11436 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11437 decl = TREE_OPERAND (t, 0);
11438 gcc_assert (DECL_P (decl));
11439 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11440 || POINTER_TYPE_P (TREE_TYPE (decl)));
11441 if (is_doacross)
11442 {
11443 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11444 {
11445 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11446 if (TREE_CODE (orig_decl) == TREE_LIST)
11447 {
11448 orig_decl = TREE_PURPOSE (orig_decl);
11449 if (!orig_decl)
11450 orig_decl = decl;
11451 }
11452 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11453 }
11454 else
11455 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11456 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11457 }
11458
11459 /* Make sure the iteration variable is private. */
11460 tree c = NULL_TREE;
11461 tree c2 = NULL_TREE;
11462 if (orig_for_stmt != for_stmt)
11463 {
11464 /* Preserve this information until we gimplify the inner simd. */
11465 if (has_decl_expr
11466 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11467 TREE_PRIVATE (t) = 1;
11468 }
11469 else if (ort == ORT_SIMD)
11470 {
11471 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11472 (splay_tree_key) decl);
11473 omp_is_private (gimplify_omp_ctxp, decl,
11474 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11475 != 1));
11476 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11477 {
11478 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11479 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11480 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11481 OMP_CLAUSE_LASTPRIVATE);
11482 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11483 OMP_CLAUSE_LASTPRIVATE))
11484 if (OMP_CLAUSE_DECL (c3) == decl)
11485 {
11486 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11487 "conditional %<lastprivate%> on loop "
11488 "iterator %qD ignored", decl);
11489 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11490 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11491 }
11492 }
11493 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11494 {
11495 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11496 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11497 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11498 if ((has_decl_expr
11499 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11500 || TREE_PRIVATE (t))
11501 {
11502 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11503 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11504 }
11505 struct gimplify_omp_ctx *outer
11506 = gimplify_omp_ctxp->outer_context;
11507 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11508 {
11509 if (outer->region_type == ORT_WORKSHARE
11510 && outer->combined_loop)
11511 {
11512 n = splay_tree_lookup (outer->variables,
11513 (splay_tree_key)decl);
11514 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11515 {
11516 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11517 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11518 }
11519 else
11520 {
11521 struct gimplify_omp_ctx *octx = outer->outer_context;
11522 if (octx
11523 && octx->region_type == ORT_COMBINED_PARALLEL
11524 && octx->outer_context
11525 && (octx->outer_context->region_type
11526 == ORT_WORKSHARE)
11527 && octx->outer_context->combined_loop)
11528 {
11529 octx = octx->outer_context;
11530 n = splay_tree_lookup (octx->variables,
11531 (splay_tree_key)decl);
11532 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11533 {
11534 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11535 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11536 }
11537 }
11538 }
11539 }
11540 }
11541
11542 OMP_CLAUSE_DECL (c) = decl;
11543 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11544 OMP_FOR_CLAUSES (for_stmt) = c;
11545 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11546 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11547 {
11548 if (outer->region_type == ORT_WORKSHARE
11549 && outer->combined_loop)
11550 {
11551 if (outer->outer_context
11552 && (outer->outer_context->region_type
11553 == ORT_COMBINED_PARALLEL))
11554 outer = outer->outer_context;
11555 else if (omp_check_private (outer, decl, false))
11556 outer = NULL;
11557 }
11558 else if (((outer->region_type & ORT_TASKLOOP)
11559 == ORT_TASKLOOP)
11560 && outer->combined_loop
11561 && !omp_check_private (gimplify_omp_ctxp,
11562 decl, false))
11563 ;
11564 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11565 {
11566 omp_notice_variable (outer, decl, true);
11567 outer = NULL;
11568 }
11569 if (outer)
11570 {
11571 n = splay_tree_lookup (outer->variables,
11572 (splay_tree_key)decl);
11573 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11574 {
11575 omp_add_variable (outer, decl,
11576 GOVD_LASTPRIVATE | GOVD_SEEN);
11577 if (outer->region_type == ORT_COMBINED_PARALLEL
11578 && outer->outer_context
11579 && (outer->outer_context->region_type
11580 == ORT_WORKSHARE)
11581 && outer->outer_context->combined_loop)
11582 {
11583 outer = outer->outer_context;
11584 n = splay_tree_lookup (outer->variables,
11585 (splay_tree_key)decl);
11586 if (omp_check_private (outer, decl, false))
11587 outer = NULL;
11588 else if (n == NULL
11589 || ((n->value & GOVD_DATA_SHARE_CLASS)
11590 == 0))
11591 omp_add_variable (outer, decl,
11592 GOVD_LASTPRIVATE
11593 | GOVD_SEEN);
11594 else
11595 outer = NULL;
11596 }
11597 if (outer && outer->outer_context
11598 && ((outer->outer_context->region_type
11599 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11600 || (((outer->region_type & ORT_TASKLOOP)
11601 == ORT_TASKLOOP)
11602 && (outer->outer_context->region_type
11603 == ORT_COMBINED_PARALLEL))))
11604 {
11605 outer = outer->outer_context;
11606 n = splay_tree_lookup (outer->variables,
11607 (splay_tree_key)decl);
11608 if (n == NULL
11609 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11610 omp_add_variable (outer, decl,
11611 GOVD_SHARED | GOVD_SEEN);
11612 else
11613 outer = NULL;
11614 }
11615 if (outer && outer->outer_context)
11616 omp_notice_variable (outer->outer_context, decl,
11617 true);
11618 }
11619 }
11620 }
11621 }
11622 else
11623 {
11624 bool lastprivate
11625 = (!has_decl_expr
11626 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11627 if (TREE_PRIVATE (t))
11628 lastprivate = false;
11629 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11630 {
11631 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11632 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11633 lastprivate = false;
11634 }
11635
11636 struct gimplify_omp_ctx *outer
11637 = gimplify_omp_ctxp->outer_context;
11638 if (outer && lastprivate)
11639 {
11640 if (outer->region_type == ORT_WORKSHARE
11641 && outer->combined_loop)
11642 {
11643 n = splay_tree_lookup (outer->variables,
11644 (splay_tree_key)decl);
11645 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11646 {
11647 lastprivate = false;
11648 outer = NULL;
11649 }
11650 else if (outer->outer_context
11651 && (outer->outer_context->region_type
11652 == ORT_COMBINED_PARALLEL))
11653 outer = outer->outer_context;
11654 else if (omp_check_private (outer, decl, false))
11655 outer = NULL;
11656 }
11657 else if (((outer->region_type & ORT_TASKLOOP)
11658 == ORT_TASKLOOP)
11659 && outer->combined_loop
11660 && !omp_check_private (gimplify_omp_ctxp,
11661 decl, false))
11662 ;
11663 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11664 {
11665 omp_notice_variable (outer, decl, true);
11666 outer = NULL;
11667 }
11668 if (outer)
11669 {
11670 n = splay_tree_lookup (outer->variables,
11671 (splay_tree_key)decl);
11672 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11673 {
11674 omp_add_variable (outer, decl,
11675 GOVD_LASTPRIVATE | GOVD_SEEN);
11676 if (outer->region_type == ORT_COMBINED_PARALLEL
11677 && outer->outer_context
11678 && (outer->outer_context->region_type
11679 == ORT_WORKSHARE)
11680 && outer->outer_context->combined_loop)
11681 {
11682 outer = outer->outer_context;
11683 n = splay_tree_lookup (outer->variables,
11684 (splay_tree_key)decl);
11685 if (omp_check_private (outer, decl, false))
11686 outer = NULL;
11687 else if (n == NULL
11688 || ((n->value & GOVD_DATA_SHARE_CLASS)
11689 == 0))
11690 omp_add_variable (outer, decl,
11691 GOVD_LASTPRIVATE
11692 | GOVD_SEEN);
11693 else
11694 outer = NULL;
11695 }
11696 if (outer && outer->outer_context
11697 && ((outer->outer_context->region_type
11698 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11699 || (((outer->region_type & ORT_TASKLOOP)
11700 == ORT_TASKLOOP)
11701 && (outer->outer_context->region_type
11702 == ORT_COMBINED_PARALLEL))))
11703 {
11704 outer = outer->outer_context;
11705 n = splay_tree_lookup (outer->variables,
11706 (splay_tree_key)decl);
11707 if (n == NULL
11708 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11709 omp_add_variable (outer, decl,
11710 GOVD_SHARED | GOVD_SEEN);
11711 else
11712 outer = NULL;
11713 }
11714 if (outer && outer->outer_context)
11715 omp_notice_variable (outer->outer_context, decl,
11716 true);
11717 }
11718 }
11719 }
11720
11721 c = build_omp_clause (input_location,
11722 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11723 : OMP_CLAUSE_PRIVATE);
11724 OMP_CLAUSE_DECL (c) = decl;
11725 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11726 OMP_FOR_CLAUSES (for_stmt) = c;
11727 omp_add_variable (gimplify_omp_ctxp, decl,
11728 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11729 | GOVD_EXPLICIT | GOVD_SEEN);
11730 c = NULL_TREE;
11731 }
11732 }
11733 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11734 {
11735 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11736 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11737 (splay_tree_key) decl);
11738 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11739 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11740 OMP_CLAUSE_LASTPRIVATE);
11741 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11742 OMP_CLAUSE_LASTPRIVATE))
11743 if (OMP_CLAUSE_DECL (c3) == decl)
11744 {
11745 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11746 "conditional %<lastprivate%> on loop "
11747 "iterator %qD ignored", decl);
11748 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11749 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11750 }
11751 }
11752 else
11753 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11754
11755 /* If DECL is not a gimple register, create a temporary variable to act
11756 as an iteration counter. This is valid, since DECL cannot be
11757 modified in the body of the loop. Similarly for any iteration vars
11758 in simd with collapse > 1 where the iterator vars must be
11759 lastprivate. */
11760 if (orig_for_stmt != for_stmt)
11761 var = decl;
11762 else if (!is_gimple_reg (decl)
11763 || (ort == ORT_SIMD
11764 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11765 {
11766 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11767 /* Make sure omp_add_variable is not called on it prematurely.
11768 We call it ourselves a few lines later. */
11769 gimplify_omp_ctxp = NULL;
11770 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11771 gimplify_omp_ctxp = ctx;
11772 TREE_OPERAND (t, 0) = var;
11773
11774 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11775
11776 if (ort == ORT_SIMD
11777 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11778 {
11779 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11780 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11781 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11782 OMP_CLAUSE_DECL (c2) = var;
11783 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11784 OMP_FOR_CLAUSES (for_stmt) = c2;
11785 omp_add_variable (gimplify_omp_ctxp, var,
11786 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11787 if (c == NULL_TREE)
11788 {
11789 c = c2;
11790 c2 = NULL_TREE;
11791 }
11792 }
11793 else
11794 omp_add_variable (gimplify_omp_ctxp, var,
11795 GOVD_PRIVATE | GOVD_SEEN);
11796 }
11797 else
11798 var = decl;
11799
11800 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11801 is_gimple_val, fb_rvalue, false);
11802 ret = MIN (ret, tret);
11803 if (ret == GS_ERROR)
11804 return ret;
11805
11806 /* Handle OMP_FOR_COND. */
11807 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11808 gcc_assert (COMPARISON_CLASS_P (t));
11809 gcc_assert (TREE_OPERAND (t, 0) == decl);
11810
11811 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11812 is_gimple_val, fb_rvalue, false);
11813 ret = MIN (ret, tret);
11814
11815 /* Handle OMP_FOR_INCR. */
11816 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11817 switch (TREE_CODE (t))
11818 {
11819 case PREINCREMENT_EXPR:
11820 case POSTINCREMENT_EXPR:
11821 {
11822 tree decl = TREE_OPERAND (t, 0);
11823 /* c_omp_for_incr_canonicalize_ptr() should have been
11824 called to massage things appropriately. */
11825 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11826
11827 if (orig_for_stmt != for_stmt)
11828 break;
11829 t = build_int_cst (TREE_TYPE (decl), 1);
11830 if (c)
11831 OMP_CLAUSE_LINEAR_STEP (c) = t;
11832 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11833 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11834 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11835 break;
11836 }
11837
11838 case PREDECREMENT_EXPR:
11839 case POSTDECREMENT_EXPR:
11840 /* c_omp_for_incr_canonicalize_ptr() should have been
11841 called to massage things appropriately. */
11842 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11843 if (orig_for_stmt != for_stmt)
11844 break;
11845 t = build_int_cst (TREE_TYPE (decl), -1);
11846 if (c)
11847 OMP_CLAUSE_LINEAR_STEP (c) = t;
11848 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11849 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11850 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11851 break;
11852
11853 case MODIFY_EXPR:
11854 gcc_assert (TREE_OPERAND (t, 0) == decl);
11855 TREE_OPERAND (t, 0) = var;
11856
11857 t = TREE_OPERAND (t, 1);
11858 switch (TREE_CODE (t))
11859 {
11860 case PLUS_EXPR:
11861 if (TREE_OPERAND (t, 1) == decl)
11862 {
11863 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11864 TREE_OPERAND (t, 0) = var;
11865 break;
11866 }
11867
11868 /* Fallthru. */
11869 case MINUS_EXPR:
11870 case POINTER_PLUS_EXPR:
11871 gcc_assert (TREE_OPERAND (t, 0) == decl);
11872 TREE_OPERAND (t, 0) = var;
11873 break;
11874 default:
11875 gcc_unreachable ();
11876 }
11877
11878 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11879 is_gimple_val, fb_rvalue, false);
11880 ret = MIN (ret, tret);
11881 if (c)
11882 {
11883 tree step = TREE_OPERAND (t, 1);
11884 tree stept = TREE_TYPE (decl);
11885 if (POINTER_TYPE_P (stept))
11886 stept = sizetype;
11887 step = fold_convert (stept, step);
11888 if (TREE_CODE (t) == MINUS_EXPR)
11889 step = fold_build1 (NEGATE_EXPR, stept, step);
11890 OMP_CLAUSE_LINEAR_STEP (c) = step;
11891 if (step != TREE_OPERAND (t, 1))
11892 {
11893 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11894 &for_pre_body, NULL,
11895 is_gimple_val, fb_rvalue, false);
11896 ret = MIN (ret, tret);
11897 }
11898 }
11899 break;
11900
11901 default:
11902 gcc_unreachable ();
11903 }
11904
11905 if (c2)
11906 {
11907 gcc_assert (c);
11908 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11909 }
11910
11911 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11912 {
11913 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11914 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11915 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11916 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11917 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11918 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11919 && OMP_CLAUSE_DECL (c) == decl)
11920 {
11921 if (is_doacross && (collapse == 1 || i >= collapse))
11922 t = var;
11923 else
11924 {
11925 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11926 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11927 gcc_assert (TREE_OPERAND (t, 0) == var);
11928 t = TREE_OPERAND (t, 1);
11929 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11930 || TREE_CODE (t) == MINUS_EXPR
11931 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11932 gcc_assert (TREE_OPERAND (t, 0) == var);
11933 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11934 is_doacross ? var : decl,
11935 TREE_OPERAND (t, 1));
11936 }
11937 gimple_seq *seq;
11938 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11939 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11940 else
11941 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11942 push_gimplify_context ();
11943 gimplify_assign (decl, t, seq);
11944 gimple *bind = NULL;
11945 if (gimplify_ctxp->temps)
11946 {
11947 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11948 *seq = NULL;
11949 gimplify_seq_add_stmt (seq, bind);
11950 }
11951 pop_gimplify_context (bind);
11952 }
11953 }
11954 }
11955
11956 BITMAP_FREE (has_decl_expr);
11957
11958 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11959 || (loop_p && orig_for_stmt == for_stmt))
11960 {
11961 push_gimplify_context ();
11962 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11963 {
11964 OMP_FOR_BODY (orig_for_stmt)
11965 = build3 (BIND_EXPR, void_type_node, NULL,
11966 OMP_FOR_BODY (orig_for_stmt), NULL);
11967 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11968 }
11969 }
11970
11971 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11972 &for_body);
11973
11974 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11975 || (loop_p && orig_for_stmt == for_stmt))
11976 {
11977 if (gimple_code (g) == GIMPLE_BIND)
11978 pop_gimplify_context (g);
11979 else
11980 pop_gimplify_context (NULL);
11981 }
11982
11983 if (orig_for_stmt != for_stmt)
11984 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11985 {
11986 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11987 decl = TREE_OPERAND (t, 0);
11988 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11989 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11990 gimplify_omp_ctxp = ctx->outer_context;
11991 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11992 gimplify_omp_ctxp = ctx;
11993 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11994 TREE_OPERAND (t, 0) = var;
11995 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11996 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11997 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11998 }
11999
12000 gimplify_adjust_omp_clauses (pre_p, for_body,
12001 &OMP_FOR_CLAUSES (orig_for_stmt),
12002 TREE_CODE (orig_for_stmt));
12003
12004 int kind;
12005 switch (TREE_CODE (orig_for_stmt))
12006 {
12007 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
12008 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
12009 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
12010 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
12011 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
12012 default:
12013 gcc_unreachable ();
12014 }
12015 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12016 {
12017 gimplify_seq_add_seq (pre_p, for_pre_body);
12018 for_pre_body = NULL;
12019 }
12020 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12021 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12022 for_pre_body);
12023 if (orig_for_stmt != for_stmt)
12024 gimple_omp_for_set_combined_p (gfor, true);
12025 if (gimplify_omp_ctxp
12026 && (gimplify_omp_ctxp->combined_loop
12027 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12028 && gimplify_omp_ctxp->outer_context
12029 && gimplify_omp_ctxp->outer_context->combined_loop)))
12030 {
12031 gimple_omp_for_set_combined_into_p (gfor, true);
12032 if (gimplify_omp_ctxp->combined_loop)
12033 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12034 else
12035 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12036 }
12037
12038 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12039 {
12040 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12041 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12042 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12043 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12044 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12045 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12046 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12047 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12048 }
12049
12050 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12051 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12052 The outer taskloop stands for computing the number of iterations,
12053 counts for collapsed loops and holding taskloop specific clauses.
12054 The task construct stands for the effect of data sharing on the
12055 explicit task it creates and the inner taskloop stands for expansion
12056 of the static loop inside of the explicit task construct. */
12057 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12058 {
12059 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12060 tree task_clauses = NULL_TREE;
12061 tree c = *gfor_clauses_ptr;
12062 tree *gtask_clauses_ptr = &task_clauses;
12063 tree outer_for_clauses = NULL_TREE;
12064 tree *gforo_clauses_ptr = &outer_for_clauses;
12065 for (; c; c = OMP_CLAUSE_CHAIN (c))
12066 switch (OMP_CLAUSE_CODE (c))
12067 {
12068 /* These clauses are allowed on task, move them there. */
12069 case OMP_CLAUSE_SHARED:
12070 case OMP_CLAUSE_FIRSTPRIVATE:
12071 case OMP_CLAUSE_DEFAULT:
12072 case OMP_CLAUSE_IF:
12073 case OMP_CLAUSE_UNTIED:
12074 case OMP_CLAUSE_FINAL:
12075 case OMP_CLAUSE_MERGEABLE:
12076 case OMP_CLAUSE_PRIORITY:
12077 case OMP_CLAUSE_REDUCTION:
12078 case OMP_CLAUSE_IN_REDUCTION:
12079 *gtask_clauses_ptr = c;
12080 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12081 break;
12082 case OMP_CLAUSE_PRIVATE:
12083 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12084 {
12085 /* We want private on outer for and firstprivate
12086 on task. */
12087 *gtask_clauses_ptr
12088 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12089 OMP_CLAUSE_FIRSTPRIVATE);
12090 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12091 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12092 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12093 *gforo_clauses_ptr = c;
12094 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12095 }
12096 else
12097 {
12098 *gtask_clauses_ptr = c;
12099 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12100 }
12101 break;
12102 /* These clauses go into outer taskloop clauses. */
12103 case OMP_CLAUSE_GRAINSIZE:
12104 case OMP_CLAUSE_NUM_TASKS:
12105 case OMP_CLAUSE_NOGROUP:
12106 *gforo_clauses_ptr = c;
12107 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12108 break;
12109 /* Taskloop clause we duplicate on both taskloops. */
12110 case OMP_CLAUSE_COLLAPSE:
12111 *gfor_clauses_ptr = c;
12112 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12113 *gforo_clauses_ptr = copy_node (c);
12114 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12115 break;
12116 /* For lastprivate, keep the clause on inner taskloop, and add
12117 a shared clause on task. If the same decl is also firstprivate,
12118 add also firstprivate clause on the inner taskloop. */
12119 case OMP_CLAUSE_LASTPRIVATE:
12120 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12121 {
12122 /* For taskloop C++ lastprivate IVs, we want:
12123 1) private on outer taskloop
12124 2) firstprivate and shared on task
12125 3) lastprivate on inner taskloop */
12126 *gtask_clauses_ptr
12127 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12128 OMP_CLAUSE_FIRSTPRIVATE);
12129 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12130 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12131 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12132 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12133 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12134 OMP_CLAUSE_PRIVATE);
12135 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12136 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12137 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12138 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12139 }
12140 *gfor_clauses_ptr = c;
12141 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12142 *gtask_clauses_ptr
12143 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12144 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12145 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12146 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12147 gtask_clauses_ptr
12148 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12149 break;
12150 default:
12151 gcc_unreachable ();
12152 }
12153 *gfor_clauses_ptr = NULL_TREE;
12154 *gtask_clauses_ptr = NULL_TREE;
12155 *gforo_clauses_ptr = NULL_TREE;
12156 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12157 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12158 NULL_TREE, NULL_TREE, NULL_TREE);
12159 gimple_omp_task_set_taskloop_p (g, true);
12160 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12161 gomp_for *gforo
12162 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12163 gimple_omp_for_collapse (gfor),
12164 gimple_omp_for_pre_body (gfor));
12165 gimple_omp_for_set_pre_body (gfor, NULL);
12166 gimple_omp_for_set_combined_p (gforo, true);
12167 gimple_omp_for_set_combined_into_p (gfor, true);
12168 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12169 {
12170 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12171 tree v = create_tmp_var (type);
12172 gimple_omp_for_set_index (gforo, i, v);
12173 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12174 gimple_omp_for_set_initial (gforo, i, t);
12175 gimple_omp_for_set_cond (gforo, i,
12176 gimple_omp_for_cond (gfor, i));
12177 t = unshare_expr (gimple_omp_for_final (gfor, i));
12178 gimple_omp_for_set_final (gforo, i, t);
12179 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12180 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12181 TREE_OPERAND (t, 0) = v;
12182 gimple_omp_for_set_incr (gforo, i, t);
12183 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12184 OMP_CLAUSE_DECL (t) = v;
12185 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12186 gimple_omp_for_set_clauses (gforo, t);
12187 }
12188 gimplify_seq_add_stmt (pre_p, gforo);
12189 }
12190 else
12191 gimplify_seq_add_stmt (pre_p, gfor);
12192
12193 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12194 {
12195 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12196 unsigned lastprivate_conditional = 0;
12197 while (ctx
12198 && (ctx->region_type == ORT_TARGET_DATA
12199 || ctx->region_type == ORT_TASKGROUP))
12200 ctx = ctx->outer_context;
12201 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12202 for (tree c = gimple_omp_for_clauses (gfor);
12203 c; c = OMP_CLAUSE_CHAIN (c))
12204 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12205 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12206 ++lastprivate_conditional;
12207 if (lastprivate_conditional)
12208 {
12209 struct omp_for_data fd;
12210 omp_extract_for_data (gfor, &fd, NULL);
12211 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12212 lastprivate_conditional);
12213 tree var = create_tmp_var_raw (type);
12214 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12215 OMP_CLAUSE_DECL (c) = var;
12216 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12217 gimple_omp_for_set_clauses (gfor, c);
12218 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12219 }
12220 }
12221 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12222 {
12223 unsigned lastprivate_conditional = 0;
12224 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12225 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12226 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12227 ++lastprivate_conditional;
12228 if (lastprivate_conditional)
12229 {
12230 struct omp_for_data fd;
12231 omp_extract_for_data (gfor, &fd, NULL);
12232 tree type = unsigned_type_for (fd.iter_type);
12233 while (lastprivate_conditional--)
12234 {
12235 tree c = build_omp_clause (UNKNOWN_LOCATION,
12236 OMP_CLAUSE__CONDTEMP_);
12237 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12238 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12239 gimple_omp_for_set_clauses (gfor, c);
12240 }
12241 }
12242 }
12243
12244 if (ret != GS_ALL_DONE)
12245 return GS_ERROR;
12246 *expr_p = NULL_TREE;
12247 return GS_ALL_DONE;
12248 }
12249
12250 /* Helper for gimplify_omp_loop, called through walk_tree. */
12251
12252 static tree
12253 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12254 {
12255 if (DECL_P (*tp))
12256 {
12257 tree *d = (tree *) data;
12258 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12259 {
12260 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12261 *walk_subtrees = 0;
12262 }
12263 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12264 {
12265 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12266 *walk_subtrees = 0;
12267 }
12268 }
12269 return NULL_TREE;
12270 }
12271
12272 /* Gimplify the gross structure of an OMP_LOOP statement. */
12273
12274 static enum gimplify_status
12275 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12276 {
12277 tree for_stmt = *expr_p;
12278 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12279 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12280 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12281 int i;
12282
12283 /* If order is not present, the behavior is as if order(concurrent)
12284 appeared. */
12285 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12286 if (order == NULL_TREE)
12287 {
12288 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12289 OMP_CLAUSE_CHAIN (order) = clauses;
12290 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12291 }
12292
12293 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12294 if (bind == NULL_TREE)
12295 {
12296 if (!flag_openmp) /* flag_openmp_simd */
12297 ;
12298 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12299 kind = OMP_CLAUSE_BIND_TEAMS;
12300 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12301 kind = OMP_CLAUSE_BIND_PARALLEL;
12302 else
12303 {
12304 for (; octx; octx = octx->outer_context)
12305 {
12306 if ((octx->region_type & ORT_ACC) != 0
12307 || octx->region_type == ORT_NONE
12308 || octx->region_type == ORT_IMPLICIT_TARGET)
12309 continue;
12310 break;
12311 }
12312 if (octx == NULL && !in_omp_construct)
12313 error_at (EXPR_LOCATION (for_stmt),
12314 "%<bind%> clause not specified on a %<loop%> "
12315 "construct not nested inside another OpenMP construct");
12316 }
12317 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12318 OMP_CLAUSE_CHAIN (bind) = clauses;
12319 OMP_CLAUSE_BIND_KIND (bind) = kind;
12320 OMP_FOR_CLAUSES (for_stmt) = bind;
12321 }
12322 else
12323 switch (OMP_CLAUSE_BIND_KIND (bind))
12324 {
12325 case OMP_CLAUSE_BIND_THREAD:
12326 break;
12327 case OMP_CLAUSE_BIND_PARALLEL:
12328 if (!flag_openmp) /* flag_openmp_simd */
12329 {
12330 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12331 break;
12332 }
12333 for (; octx; octx = octx->outer_context)
12334 if (octx->region_type == ORT_SIMD
12335 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12336 {
12337 error_at (EXPR_LOCATION (for_stmt),
12338 "%<bind(parallel)%> on a %<loop%> construct nested "
12339 "inside %<simd%> construct");
12340 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12341 break;
12342 }
12343 kind = OMP_CLAUSE_BIND_PARALLEL;
12344 break;
12345 case OMP_CLAUSE_BIND_TEAMS:
12346 if (!flag_openmp) /* flag_openmp_simd */
12347 {
12348 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12349 break;
12350 }
12351 if ((octx
12352 && octx->region_type != ORT_IMPLICIT_TARGET
12353 && octx->region_type != ORT_NONE
12354 && (octx->region_type & ORT_TEAMS) == 0)
12355 || in_omp_construct)
12356 {
12357 error_at (EXPR_LOCATION (for_stmt),
12358 "%<bind(teams)%> on a %<loop%> region not strictly "
12359 "nested inside of a %<teams%> region");
12360 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12361 break;
12362 }
12363 kind = OMP_CLAUSE_BIND_TEAMS;
12364 break;
12365 default:
12366 gcc_unreachable ();
12367 }
12368
12369 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12370 switch (OMP_CLAUSE_CODE (*pc))
12371 {
12372 case OMP_CLAUSE_REDUCTION:
12373 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12374 {
12375 error_at (OMP_CLAUSE_LOCATION (*pc),
12376 "%<inscan%> %<reduction%> clause on "
12377 "%qs construct", "loop");
12378 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12379 }
12380 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12381 {
12382 error_at (OMP_CLAUSE_LOCATION (*pc),
12383 "invalid %<task%> reduction modifier on construct "
12384 "other than %<parallel%>, %<for%> or %<sections%>");
12385 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12386 }
12387 pc = &OMP_CLAUSE_CHAIN (*pc);
12388 break;
12389 case OMP_CLAUSE_LASTPRIVATE:
12390 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12391 {
12392 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12393 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12394 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12395 break;
12396 if (OMP_FOR_ORIG_DECLS (for_stmt)
12397 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12398 i)) == TREE_LIST
12399 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12400 i)))
12401 {
12402 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12403 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12404 break;
12405 }
12406 }
12407 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12408 {
12409 error_at (OMP_CLAUSE_LOCATION (*pc),
12410 "%<lastprivate%> clause on a %<loop%> construct refers "
12411 "to a variable %qD which is not the loop iterator",
12412 OMP_CLAUSE_DECL (*pc));
12413 *pc = OMP_CLAUSE_CHAIN (*pc);
12414 break;
12415 }
12416 pc = &OMP_CLAUSE_CHAIN (*pc);
12417 break;
12418 default:
12419 pc = &OMP_CLAUSE_CHAIN (*pc);
12420 break;
12421 }
12422
12423 TREE_SET_CODE (for_stmt, OMP_SIMD);
12424
12425 int last;
12426 switch (kind)
12427 {
12428 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12429 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12430 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12431 }
12432 for (int pass = 1; pass <= last; pass++)
12433 {
12434 if (pass == 2)
12435 {
12436 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12437 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12438 *expr_p = make_node (OMP_PARALLEL);
12439 TREE_TYPE (*expr_p) = void_type_node;
12440 OMP_PARALLEL_BODY (*expr_p) = bind;
12441 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12442 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12443 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12444 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12445 if (OMP_FOR_ORIG_DECLS (for_stmt)
12446 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12447 == TREE_LIST))
12448 {
12449 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12450 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12451 {
12452 *pc = build_omp_clause (UNKNOWN_LOCATION,
12453 OMP_CLAUSE_FIRSTPRIVATE);
12454 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12455 pc = &OMP_CLAUSE_CHAIN (*pc);
12456 }
12457 }
12458 }
12459 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12460 tree *pc = &OMP_FOR_CLAUSES (t);
12461 TREE_TYPE (t) = void_type_node;
12462 OMP_FOR_BODY (t) = *expr_p;
12463 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12464 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12465 switch (OMP_CLAUSE_CODE (c))
12466 {
12467 case OMP_CLAUSE_BIND:
12468 case OMP_CLAUSE_ORDER:
12469 case OMP_CLAUSE_COLLAPSE:
12470 *pc = copy_node (c);
12471 pc = &OMP_CLAUSE_CHAIN (*pc);
12472 break;
12473 case OMP_CLAUSE_PRIVATE:
12474 case OMP_CLAUSE_FIRSTPRIVATE:
12475 /* Only needed on innermost. */
12476 break;
12477 case OMP_CLAUSE_LASTPRIVATE:
12478 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12479 {
12480 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12481 OMP_CLAUSE_FIRSTPRIVATE);
12482 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12483 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12484 pc = &OMP_CLAUSE_CHAIN (*pc);
12485 }
12486 *pc = copy_node (c);
12487 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12488 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12489 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12490 {
12491 if (pass != last)
12492 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12493 else
12494 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12495 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12496 }
12497 pc = &OMP_CLAUSE_CHAIN (*pc);
12498 break;
12499 case OMP_CLAUSE_REDUCTION:
12500 *pc = copy_node (c);
12501 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12502 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12503 OMP_CLAUSE_REDUCTION_INIT (*pc)
12504 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12505 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12506 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12507 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12508 {
12509 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12510 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12511 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12512 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12513 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12514 tree nc = *pc;
12515 tree data[2] = { c, nc };
12516 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12517 replace_reduction_placeholders,
12518 data);
12519 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12520 replace_reduction_placeholders,
12521 data);
12522 }
12523 pc = &OMP_CLAUSE_CHAIN (*pc);
12524 break;
12525 default:
12526 gcc_unreachable ();
12527 }
12528 *pc = NULL_TREE;
12529 *expr_p = t;
12530 }
12531 return gimplify_omp_for (expr_p, pre_p);
12532 }
12533
12534
12535 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12536 of OMP_TARGET's body. */
12537
12538 static tree
12539 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12540 {
12541 *walk_subtrees = 0;
12542 switch (TREE_CODE (*tp))
12543 {
12544 case OMP_TEAMS:
12545 return *tp;
12546 case BIND_EXPR:
12547 case STATEMENT_LIST:
12548 *walk_subtrees = 1;
12549 break;
12550 default:
12551 break;
12552 }
12553 return NULL_TREE;
12554 }
12555
12556 /* Helper function of optimize_target_teams, determine if the expression
12557 can be computed safely before the target construct on the host. */
12558
12559 static tree
12560 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12561 {
12562 splay_tree_node n;
12563
12564 if (TYPE_P (*tp))
12565 {
12566 *walk_subtrees = 0;
12567 return NULL_TREE;
12568 }
12569 switch (TREE_CODE (*tp))
12570 {
12571 case VAR_DECL:
12572 case PARM_DECL:
12573 case RESULT_DECL:
12574 *walk_subtrees = 0;
12575 if (error_operand_p (*tp)
12576 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12577 || DECL_HAS_VALUE_EXPR_P (*tp)
12578 || DECL_THREAD_LOCAL_P (*tp)
12579 || TREE_SIDE_EFFECTS (*tp)
12580 || TREE_THIS_VOLATILE (*tp))
12581 return *tp;
12582 if (is_global_var (*tp)
12583 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12584 || lookup_attribute ("omp declare target link",
12585 DECL_ATTRIBUTES (*tp))))
12586 return *tp;
12587 if (VAR_P (*tp)
12588 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12589 && !is_global_var (*tp)
12590 && decl_function_context (*tp) == current_function_decl)
12591 return *tp;
12592 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12593 (splay_tree_key) *tp);
12594 if (n == NULL)
12595 {
12596 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12597 return NULL_TREE;
12598 return *tp;
12599 }
12600 else if (n->value & GOVD_LOCAL)
12601 return *tp;
12602 else if (n->value & GOVD_FIRSTPRIVATE)
12603 return NULL_TREE;
12604 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12605 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12606 return NULL_TREE;
12607 return *tp;
12608 case INTEGER_CST:
12609 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12610 return *tp;
12611 return NULL_TREE;
12612 case TARGET_EXPR:
12613 if (TARGET_EXPR_INITIAL (*tp)
12614 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12615 return *tp;
12616 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12617 walk_subtrees, NULL);
12618 /* Allow some reasonable subset of integral arithmetics. */
12619 case PLUS_EXPR:
12620 case MINUS_EXPR:
12621 case MULT_EXPR:
12622 case TRUNC_DIV_EXPR:
12623 case CEIL_DIV_EXPR:
12624 case FLOOR_DIV_EXPR:
12625 case ROUND_DIV_EXPR:
12626 case TRUNC_MOD_EXPR:
12627 case CEIL_MOD_EXPR:
12628 case FLOOR_MOD_EXPR:
12629 case ROUND_MOD_EXPR:
12630 case RDIV_EXPR:
12631 case EXACT_DIV_EXPR:
12632 case MIN_EXPR:
12633 case MAX_EXPR:
12634 case LSHIFT_EXPR:
12635 case RSHIFT_EXPR:
12636 case BIT_IOR_EXPR:
12637 case BIT_XOR_EXPR:
12638 case BIT_AND_EXPR:
12639 case NEGATE_EXPR:
12640 case ABS_EXPR:
12641 case BIT_NOT_EXPR:
12642 case NON_LVALUE_EXPR:
12643 CASE_CONVERT:
12644 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12645 return *tp;
12646 return NULL_TREE;
12647 /* And disallow anything else, except for comparisons. */
12648 default:
12649 if (COMPARISON_CLASS_P (*tp))
12650 return NULL_TREE;
12651 return *tp;
12652 }
12653 }
12654
12655 /* Try to determine if the num_teams and/or thread_limit expressions
12656 can have their values determined already before entering the
12657 target construct.
12658 INTEGER_CSTs trivially are,
12659 integral decls that are firstprivate (explicitly or implicitly)
12660 or explicitly map(always, to:) or map(always, tofrom:) on the target
12661 region too, and expressions involving simple arithmetics on those
12662 too, function calls are not ok, dereferencing something neither etc.
12663 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12664 EXPR based on what we find:
12665 0 stands for clause not specified at all, use implementation default
12666 -1 stands for value that can't be determined easily before entering
12667 the target construct.
12668 If teams construct is not present at all, use 1 for num_teams
12669 and 0 for thread_limit (only one team is involved, and the thread
12670 limit is implementation defined. */
12671
12672 static void
12673 optimize_target_teams (tree target, gimple_seq *pre_p)
12674 {
12675 tree body = OMP_BODY (target);
12676 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12677 tree num_teams = integer_zero_node;
12678 tree thread_limit = integer_zero_node;
12679 location_t num_teams_loc = EXPR_LOCATION (target);
12680 location_t thread_limit_loc = EXPR_LOCATION (target);
12681 tree c, *p, expr;
12682 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12683
12684 if (teams == NULL_TREE)
12685 num_teams = integer_one_node;
12686 else
12687 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12688 {
12689 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12690 {
12691 p = &num_teams;
12692 num_teams_loc = OMP_CLAUSE_LOCATION (c);
12693 }
12694 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12695 {
12696 p = &thread_limit;
12697 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12698 }
12699 else
12700 continue;
12701 expr = OMP_CLAUSE_OPERAND (c, 0);
12702 if (TREE_CODE (expr) == INTEGER_CST)
12703 {
12704 *p = expr;
12705 continue;
12706 }
12707 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12708 {
12709 *p = integer_minus_one_node;
12710 continue;
12711 }
12712 *p = expr;
12713 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
12714 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
12715 == GS_ERROR)
12716 {
12717 gimplify_omp_ctxp = target_ctx;
12718 *p = integer_minus_one_node;
12719 continue;
12720 }
12721 gimplify_omp_ctxp = target_ctx;
12722 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12723 OMP_CLAUSE_OPERAND (c, 0) = *p;
12724 }
12725 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12726 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12727 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12728 OMP_TARGET_CLAUSES (target) = c;
12729 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12730 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12731 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12732 OMP_TARGET_CLAUSES (target) = c;
12733 }
12734
12735 /* Gimplify the gross structure of several OMP constructs. */
12736
12737 static void
12738 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
12739 {
12740 tree expr = *expr_p;
12741 gimple *stmt;
12742 gimple_seq body = NULL;
12743 enum omp_region_type ort;
12744
12745 switch (TREE_CODE (expr))
12746 {
12747 case OMP_SECTIONS:
12748 case OMP_SINGLE:
12749 ort = ORT_WORKSHARE;
12750 break;
12751 case OMP_TARGET:
12752 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12753 break;
12754 case OACC_KERNELS:
12755 ort = ORT_ACC_KERNELS;
12756 break;
12757 case OACC_PARALLEL:
12758 ort = ORT_ACC_PARALLEL;
12759 break;
12760 case OACC_SERIAL:
12761 ort = ORT_ACC_SERIAL;
12762 break;
12763 case OACC_DATA:
12764 ort = ORT_ACC_DATA;
12765 break;
12766 case OMP_TARGET_DATA:
12767 ort = ORT_TARGET_DATA;
12768 break;
12769 case OMP_TEAMS:
12770 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
12771 if (gimplify_omp_ctxp == NULL
12772 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
12773 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
12774 break;
12775 case OACC_HOST_DATA:
12776 ort = ORT_ACC_HOST_DATA;
12777 break;
12778 default:
12779 gcc_unreachable ();
12780 }
12781
12782 bool save_in_omp_construct = in_omp_construct;
12783 if ((ort & ORT_ACC) == 0)
12784 in_omp_construct = false;
12785 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12786 TREE_CODE (expr));
12787 if (TREE_CODE (expr) == OMP_TARGET)
12788 optimize_target_teams (expr, pre_p);
12789 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12790 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12791 {
12792 push_gimplify_context ();
12793 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
12794 if (gimple_code (g) == GIMPLE_BIND)
12795 pop_gimplify_context (g);
12796 else
12797 pop_gimplify_context (NULL);
12798 if ((ort & ORT_TARGET_DATA) != 0)
12799 {
12800 enum built_in_function end_ix;
12801 switch (TREE_CODE (expr))
12802 {
12803 case OACC_DATA:
12804 case OACC_HOST_DATA:
12805 end_ix = BUILT_IN_GOACC_DATA_END;
12806 break;
12807 case OMP_TARGET_DATA:
12808 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12809 break;
12810 default:
12811 gcc_unreachable ();
12812 }
12813 tree fn = builtin_decl_explicit (end_ix);
12814 g = gimple_build_call (fn, 0);
12815 gimple_seq cleanup = NULL;
12816 gimple_seq_add_stmt (&cleanup, g);
12817 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12818 body = NULL;
12819 gimple_seq_add_stmt (&body, g);
12820 }
12821 }
12822 else
12823 gimplify_and_add (OMP_BODY (expr), &body);
12824 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12825 TREE_CODE (expr));
12826 in_omp_construct = save_in_omp_construct;
12827
12828 switch (TREE_CODE (expr))
12829 {
12830 case OACC_DATA:
12831 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12832 OMP_CLAUSES (expr));
12833 break;
12834 case OACC_HOST_DATA:
12835 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
12836 {
12837 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12838 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
12839 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
12840 }
12841
12842 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12843 OMP_CLAUSES (expr));
12844 break;
12845 case OACC_KERNELS:
12846 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12847 OMP_CLAUSES (expr));
12848 break;
12849 case OACC_PARALLEL:
12850 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12851 OMP_CLAUSES (expr));
12852 break;
12853 case OACC_SERIAL:
12854 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
12855 OMP_CLAUSES (expr));
12856 break;
12857 case OMP_SECTIONS:
12858 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12859 break;
12860 case OMP_SINGLE:
12861 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12862 break;
12863 case OMP_TARGET:
12864 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12865 OMP_CLAUSES (expr));
12866 break;
12867 case OMP_TARGET_DATA:
12868 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12869 to be evaluated before the use_device_{ptr,addr} clauses if they
12870 refer to the same variables. */
12871 {
12872 tree use_device_clauses;
12873 tree *pc, *uc = &use_device_clauses;
12874 for (pc = &OMP_CLAUSES (expr); *pc; )
12875 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12876 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12877 {
12878 *uc = *pc;
12879 *pc = OMP_CLAUSE_CHAIN (*pc);
12880 uc = &OMP_CLAUSE_CHAIN (*uc);
12881 }
12882 else
12883 pc = &OMP_CLAUSE_CHAIN (*pc);
12884 *uc = NULL_TREE;
12885 *pc = use_device_clauses;
12886 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12887 OMP_CLAUSES (expr));
12888 }
12889 break;
12890 case OMP_TEAMS:
12891 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12892 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12893 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12894 break;
12895 default:
12896 gcc_unreachable ();
12897 }
12898
12899 gimplify_seq_add_stmt (pre_p, stmt);
12900 *expr_p = NULL_TREE;
12901 }
12902
12903 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12904 target update constructs. */
12905
12906 static void
12907 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12908 {
12909 tree expr = *expr_p;
12910 int kind;
12911 gomp_target *stmt;
12912 enum omp_region_type ort = ORT_WORKSHARE;
12913
12914 switch (TREE_CODE (expr))
12915 {
12916 case OACC_ENTER_DATA:
12917 case OACC_EXIT_DATA:
12918 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12919 ort = ORT_ACC;
12920 break;
12921 case OACC_UPDATE:
12922 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12923 ort = ORT_ACC;
12924 break;
12925 case OMP_TARGET_UPDATE:
12926 kind = GF_OMP_TARGET_KIND_UPDATE;
12927 break;
12928 case OMP_TARGET_ENTER_DATA:
12929 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12930 break;
12931 case OMP_TARGET_EXIT_DATA:
12932 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12933 break;
12934 default:
12935 gcc_unreachable ();
12936 }
12937 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12938 ort, TREE_CODE (expr));
12939 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12940 TREE_CODE (expr));
12941 if (TREE_CODE (expr) == OACC_UPDATE
12942 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12943 OMP_CLAUSE_IF_PRESENT))
12944 {
12945 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12946 clause. */
12947 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12948 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12949 switch (OMP_CLAUSE_MAP_KIND (c))
12950 {
12951 case GOMP_MAP_FORCE_TO:
12952 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12953 break;
12954 case GOMP_MAP_FORCE_FROM:
12955 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12956 break;
12957 default:
12958 break;
12959 }
12960 }
12961 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12962 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12963 OMP_CLAUSE_FINALIZE))
12964 {
12965 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
12966 semantics. */
12967 bool have_clause = false;
12968 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12969 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12970 switch (OMP_CLAUSE_MAP_KIND (c))
12971 {
12972 case GOMP_MAP_FROM:
12973 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
12974 have_clause = true;
12975 break;
12976 case GOMP_MAP_RELEASE:
12977 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
12978 have_clause = true;
12979 break;
12980 case GOMP_MAP_POINTER:
12981 case GOMP_MAP_TO_PSET:
12982 /* TODO PR92929: we may see these here, but they'll always follow
12983 one of the clauses above, and will be handled by libgomp as
12984 one group, so no handling required here. */
12985 gcc_assert (have_clause);
12986 break;
12987 case GOMP_MAP_DETACH:
12988 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
12989 have_clause = false;
12990 break;
12991 case GOMP_MAP_STRUCT:
12992 have_clause = false;
12993 break;
12994 default:
12995 gcc_unreachable ();
12996 }
12997 }
12998 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
12999
13000 gimplify_seq_add_stmt (pre_p, stmt);
13001 *expr_p = NULL_TREE;
13002 }
13003
13004 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13005 stabilized the lhs of the atomic operation as *ADDR. Return true if
13006 EXPR is this stabilized form. */
13007
13008 static bool
13009 goa_lhs_expr_p (tree expr, tree addr)
13010 {
13011 /* Also include casts to other type variants. The C front end is fond
13012 of adding these for e.g. volatile variables. This is like
13013 STRIP_TYPE_NOPS but includes the main variant lookup. */
13014 STRIP_USELESS_TYPE_CONVERSION (expr);
13015
13016 if (TREE_CODE (expr) == INDIRECT_REF)
13017 {
13018 expr = TREE_OPERAND (expr, 0);
13019 while (expr != addr
13020 && (CONVERT_EXPR_P (expr)
13021 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13022 && TREE_CODE (expr) == TREE_CODE (addr)
13023 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13024 {
13025 expr = TREE_OPERAND (expr, 0);
13026 addr = TREE_OPERAND (addr, 0);
13027 }
13028 if (expr == addr)
13029 return true;
13030 return (TREE_CODE (addr) == ADDR_EXPR
13031 && TREE_CODE (expr) == ADDR_EXPR
13032 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13033 }
13034 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13035 return true;
13036 return false;
13037 }
13038
13039 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13040 expression does not involve the lhs, evaluate it into a temporary.
13041 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13042 or -1 if an error was encountered. */
13043
13044 static int
13045 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13046 tree lhs_var)
13047 {
13048 tree expr = *expr_p;
13049 int saw_lhs;
13050
13051 if (goa_lhs_expr_p (expr, lhs_addr))
13052 {
13053 *expr_p = lhs_var;
13054 return 1;
13055 }
13056 if (is_gimple_val (expr))
13057 return 0;
13058
13059 saw_lhs = 0;
13060 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13061 {
13062 case tcc_binary:
13063 case tcc_comparison:
13064 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13065 lhs_var);
13066 /* FALLTHRU */
13067 case tcc_unary:
13068 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13069 lhs_var);
13070 break;
13071 case tcc_expression:
13072 switch (TREE_CODE (expr))
13073 {
13074 case TRUTH_ANDIF_EXPR:
13075 case TRUTH_ORIF_EXPR:
13076 case TRUTH_AND_EXPR:
13077 case TRUTH_OR_EXPR:
13078 case TRUTH_XOR_EXPR:
13079 case BIT_INSERT_EXPR:
13080 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13081 lhs_addr, lhs_var);
13082 /* FALLTHRU */
13083 case TRUTH_NOT_EXPR:
13084 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13085 lhs_addr, lhs_var);
13086 break;
13087 case COMPOUND_EXPR:
13088 /* Break out any preevaluations from cp_build_modify_expr. */
13089 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13090 expr = TREE_OPERAND (expr, 1))
13091 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13092 *expr_p = expr;
13093 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13094 default:
13095 break;
13096 }
13097 break;
13098 case tcc_reference:
13099 if (TREE_CODE (expr) == BIT_FIELD_REF)
13100 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13101 lhs_addr, lhs_var);
13102 break;
13103 default:
13104 break;
13105 }
13106
13107 if (saw_lhs == 0)
13108 {
13109 enum gimplify_status gs;
13110 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13111 if (gs != GS_ALL_DONE)
13112 saw_lhs = -1;
13113 }
13114
13115 return saw_lhs;
13116 }
13117
13118 /* Gimplify an OMP_ATOMIC statement. */
13119
13120 static enum gimplify_status
13121 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13122 {
13123 tree addr = TREE_OPERAND (*expr_p, 0);
13124 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13125 ? NULL : TREE_OPERAND (*expr_p, 1);
13126 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13127 tree tmp_load;
13128 gomp_atomic_load *loadstmt;
13129 gomp_atomic_store *storestmt;
13130
13131 tmp_load = create_tmp_reg (type);
13132 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13133 return GS_ERROR;
13134
13135 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13136 != GS_ALL_DONE)
13137 return GS_ERROR;
13138
13139 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13140 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13141 gimplify_seq_add_stmt (pre_p, loadstmt);
13142 if (rhs)
13143 {
13144 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13145 representatives. Use BIT_FIELD_REF on the lhs instead. */
13146 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13147 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13148 {
13149 tree bitpos = TREE_OPERAND (rhs, 2);
13150 tree op1 = TREE_OPERAND (rhs, 1);
13151 tree bitsize;
13152 tree tmp_store = tmp_load;
13153 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13154 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13155 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13156 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13157 else
13158 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13159 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13160 tree t = build2_loc (EXPR_LOCATION (rhs),
13161 MODIFY_EXPR, void_type_node,
13162 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13163 TREE_TYPE (op1), tmp_store, bitsize,
13164 bitpos), op1);
13165 gimplify_and_add (t, pre_p);
13166 rhs = tmp_store;
13167 }
13168 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13169 != GS_ALL_DONE)
13170 return GS_ERROR;
13171 }
13172
13173 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13174 rhs = tmp_load;
13175 storestmt
13176 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13177 gimplify_seq_add_stmt (pre_p, storestmt);
13178 switch (TREE_CODE (*expr_p))
13179 {
13180 case OMP_ATOMIC_READ:
13181 case OMP_ATOMIC_CAPTURE_OLD:
13182 *expr_p = tmp_load;
13183 gimple_omp_atomic_set_need_value (loadstmt);
13184 break;
13185 case OMP_ATOMIC_CAPTURE_NEW:
13186 *expr_p = rhs;
13187 gimple_omp_atomic_set_need_value (storestmt);
13188 break;
13189 default:
13190 *expr_p = NULL;
13191 break;
13192 }
13193
13194 return GS_ALL_DONE;
13195 }
13196
13197 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13198 body, and adding some EH bits. */
13199
13200 static enum gimplify_status
13201 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13202 {
13203 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13204 gimple *body_stmt;
13205 gtransaction *trans_stmt;
13206 gimple_seq body = NULL;
13207 int subcode = 0;
13208
13209 /* Wrap the transaction body in a BIND_EXPR so we have a context
13210 where to put decls for OMP. */
13211 if (TREE_CODE (tbody) != BIND_EXPR)
13212 {
13213 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13214 TREE_SIDE_EFFECTS (bind) = 1;
13215 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13216 TRANSACTION_EXPR_BODY (expr) = bind;
13217 }
13218
13219 push_gimplify_context ();
13220 temp = voidify_wrapper_expr (*expr_p, NULL);
13221
13222 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13223 pop_gimplify_context (body_stmt);
13224
13225 trans_stmt = gimple_build_transaction (body);
13226 if (TRANSACTION_EXPR_OUTER (expr))
13227 subcode = GTMA_IS_OUTER;
13228 else if (TRANSACTION_EXPR_RELAXED (expr))
13229 subcode = GTMA_IS_RELAXED;
13230 gimple_transaction_set_subcode (trans_stmt, subcode);
13231
13232 gimplify_seq_add_stmt (pre_p, trans_stmt);
13233
13234 if (temp)
13235 {
13236 *expr_p = temp;
13237 return GS_OK;
13238 }
13239
13240 *expr_p = NULL_TREE;
13241 return GS_ALL_DONE;
13242 }
13243
13244 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13245 is the OMP_BODY of the original EXPR (which has already been
13246 gimplified so it's not present in the EXPR).
13247
13248 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13249
13250 static gimple *
13251 gimplify_omp_ordered (tree expr, gimple_seq body)
13252 {
13253 tree c, decls;
13254 int failures = 0;
13255 unsigned int i;
13256 tree source_c = NULL_TREE;
13257 tree sink_c = NULL_TREE;
13258
13259 if (gimplify_omp_ctxp)
13260 {
13261 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13262 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13263 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13264 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13265 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13266 {
13267 error_at (OMP_CLAUSE_LOCATION (c),
13268 "%<ordered%> construct with %<depend%> clause must be "
13269 "closely nested inside a loop with %<ordered%> clause "
13270 "with a parameter");
13271 failures++;
13272 }
13273 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13274 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13275 {
13276 bool fail = false;
13277 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13278 decls && TREE_CODE (decls) == TREE_LIST;
13279 decls = TREE_CHAIN (decls), ++i)
13280 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13281 continue;
13282 else if (TREE_VALUE (decls)
13283 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13284 {
13285 error_at (OMP_CLAUSE_LOCATION (c),
13286 "variable %qE is not an iteration "
13287 "of outermost loop %d, expected %qE",
13288 TREE_VALUE (decls), i + 1,
13289 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13290 fail = true;
13291 failures++;
13292 }
13293 else
13294 TREE_VALUE (decls)
13295 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13296 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13297 {
13298 error_at (OMP_CLAUSE_LOCATION (c),
13299 "number of variables in %<depend%> clause with "
13300 "%<sink%> modifier does not match number of "
13301 "iteration variables");
13302 failures++;
13303 }
13304 sink_c = c;
13305 }
13306 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13307 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13308 {
13309 if (source_c)
13310 {
13311 error_at (OMP_CLAUSE_LOCATION (c),
13312 "more than one %<depend%> clause with %<source%> "
13313 "modifier on an %<ordered%> construct");
13314 failures++;
13315 }
13316 else
13317 source_c = c;
13318 }
13319 }
13320 if (source_c && sink_c)
13321 {
13322 error_at (OMP_CLAUSE_LOCATION (source_c),
13323 "%<depend%> clause with %<source%> modifier specified "
13324 "together with %<depend%> clauses with %<sink%> modifier "
13325 "on the same construct");
13326 failures++;
13327 }
13328
13329 if (failures)
13330 return gimple_build_nop ();
13331 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13332 }
13333
13334 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13335 expression produces a value to be used as an operand inside a GIMPLE
13336 statement, the value will be stored back in *EXPR_P. This value will
13337 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13338 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13339 emitted in PRE_P and POST_P.
13340
13341 Additionally, this process may overwrite parts of the input
13342 expression during gimplification. Ideally, it should be
13343 possible to do non-destructive gimplification.
13344
13345 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13346 the expression needs to evaluate to a value to be used as
13347 an operand in a GIMPLE statement, this value will be stored in
13348 *EXPR_P on exit. This happens when the caller specifies one
13349 of fb_lvalue or fb_rvalue fallback flags.
13350
13351 PRE_P will contain the sequence of GIMPLE statements corresponding
13352 to the evaluation of EXPR and all the side-effects that must
13353 be executed before the main expression. On exit, the last
13354 statement of PRE_P is the core statement being gimplified. For
13355 instance, when gimplifying 'if (++a)' the last statement in
13356 PRE_P will be 'if (t.1)' where t.1 is the result of
13357 pre-incrementing 'a'.
13358
13359 POST_P will contain the sequence of GIMPLE statements corresponding
13360 to the evaluation of all the side-effects that must be executed
13361 after the main expression. If this is NULL, the post
13362 side-effects are stored at the end of PRE_P.
13363
13364 The reason why the output is split in two is to handle post
13365 side-effects explicitly. In some cases, an expression may have
13366 inner and outer post side-effects which need to be emitted in
13367 an order different from the one given by the recursive
13368 traversal. For instance, for the expression (*p--)++ the post
13369 side-effects of '--' must actually occur *after* the post
13370 side-effects of '++'. However, gimplification will first visit
13371 the inner expression, so if a separate POST sequence was not
13372 used, the resulting sequence would be:
13373
13374 1 t.1 = *p
13375 2 p = p - 1
13376 3 t.2 = t.1 + 1
13377 4 *p = t.2
13378
13379 However, the post-decrement operation in line #2 must not be
13380 evaluated until after the store to *p at line #4, so the
13381 correct sequence should be:
13382
13383 1 t.1 = *p
13384 2 t.2 = t.1 + 1
13385 3 *p = t.2
13386 4 p = p - 1
13387
13388 So, by specifying a separate post queue, it is possible
13389 to emit the post side-effects in the correct order.
13390 If POST_P is NULL, an internal queue will be used. Before
13391 returning to the caller, the sequence POST_P is appended to
13392 the main output sequence PRE_P.
13393
13394 GIMPLE_TEST_F points to a function that takes a tree T and
13395 returns nonzero if T is in the GIMPLE form requested by the
13396 caller. The GIMPLE predicates are in gimple.c.
13397
13398 FALLBACK tells the function what sort of a temporary we want if
13399 gimplification cannot produce an expression that complies with
13400 GIMPLE_TEST_F.
13401
13402 fb_none means that no temporary should be generated
13403 fb_rvalue means that an rvalue is OK to generate
13404 fb_lvalue means that an lvalue is OK to generate
13405 fb_either means that either is OK, but an lvalue is preferable.
13406 fb_mayfail means that gimplification may fail (in which case
13407 GS_ERROR will be returned)
13408
13409 The return value is either GS_ERROR or GS_ALL_DONE, since this
13410 function iterates until EXPR is completely gimplified or an error
13411 occurs. */
13412
13413 enum gimplify_status
13414 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13415 bool (*gimple_test_f) (tree), fallback_t fallback)
13416 {
13417 tree tmp;
13418 gimple_seq internal_pre = NULL;
13419 gimple_seq internal_post = NULL;
13420 tree save_expr;
13421 bool is_statement;
13422 location_t saved_location;
13423 enum gimplify_status ret;
13424 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13425 tree label;
13426
13427 save_expr = *expr_p;
13428 if (save_expr == NULL_TREE)
13429 return GS_ALL_DONE;
13430
13431 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13432 is_statement = gimple_test_f == is_gimple_stmt;
13433 if (is_statement)
13434 gcc_assert (pre_p);
13435
13436 /* Consistency checks. */
13437 if (gimple_test_f == is_gimple_reg)
13438 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13439 else if (gimple_test_f == is_gimple_val
13440 || gimple_test_f == is_gimple_call_addr
13441 || gimple_test_f == is_gimple_condexpr
13442 || gimple_test_f == is_gimple_condexpr_for_cond
13443 || gimple_test_f == is_gimple_mem_rhs
13444 || gimple_test_f == is_gimple_mem_rhs_or_call
13445 || gimple_test_f == is_gimple_reg_rhs
13446 || gimple_test_f == is_gimple_reg_rhs_or_call
13447 || gimple_test_f == is_gimple_asm_val
13448 || gimple_test_f == is_gimple_mem_ref_addr)
13449 gcc_assert (fallback & fb_rvalue);
13450 else if (gimple_test_f == is_gimple_min_lval
13451 || gimple_test_f == is_gimple_lvalue)
13452 gcc_assert (fallback & fb_lvalue);
13453 else if (gimple_test_f == is_gimple_addressable)
13454 gcc_assert (fallback & fb_either);
13455 else if (gimple_test_f == is_gimple_stmt)
13456 gcc_assert (fallback == fb_none);
13457 else
13458 {
13459 /* We should have recognized the GIMPLE_TEST_F predicate to
13460 know what kind of fallback to use in case a temporary is
13461 needed to hold the value or address of *EXPR_P. */
13462 gcc_unreachable ();
13463 }
13464
13465 /* We used to check the predicate here and return immediately if it
13466 succeeds. This is wrong; the design is for gimplification to be
13467 idempotent, and for the predicates to only test for valid forms, not
13468 whether they are fully simplified. */
13469 if (pre_p == NULL)
13470 pre_p = &internal_pre;
13471
13472 if (post_p == NULL)
13473 post_p = &internal_post;
13474
13475 /* Remember the last statements added to PRE_P and POST_P. Every
13476 new statement added by the gimplification helpers needs to be
13477 annotated with location information. To centralize the
13478 responsibility, we remember the last statement that had been
13479 added to both queues before gimplifying *EXPR_P. If
13480 gimplification produces new statements in PRE_P and POST_P, those
13481 statements will be annotated with the same location information
13482 as *EXPR_P. */
13483 pre_last_gsi = gsi_last (*pre_p);
13484 post_last_gsi = gsi_last (*post_p);
13485
13486 saved_location = input_location;
13487 if (save_expr != error_mark_node
13488 && EXPR_HAS_LOCATION (*expr_p))
13489 input_location = EXPR_LOCATION (*expr_p);
13490
13491 /* Loop over the specific gimplifiers until the toplevel node
13492 remains the same. */
13493 do
13494 {
13495 /* Strip away as many useless type conversions as possible
13496 at the toplevel. */
13497 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13498
13499 /* Remember the expr. */
13500 save_expr = *expr_p;
13501
13502 /* Die, die, die, my darling. */
13503 if (error_operand_p (save_expr))
13504 {
13505 ret = GS_ERROR;
13506 break;
13507 }
13508
13509 /* Do any language-specific gimplification. */
13510 ret = ((enum gimplify_status)
13511 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13512 if (ret == GS_OK)
13513 {
13514 if (*expr_p == NULL_TREE)
13515 break;
13516 if (*expr_p != save_expr)
13517 continue;
13518 }
13519 else if (ret != GS_UNHANDLED)
13520 break;
13521
13522 /* Make sure that all the cases set 'ret' appropriately. */
13523 ret = GS_UNHANDLED;
13524 switch (TREE_CODE (*expr_p))
13525 {
13526 /* First deal with the special cases. */
13527
13528 case POSTINCREMENT_EXPR:
13529 case POSTDECREMENT_EXPR:
13530 case PREINCREMENT_EXPR:
13531 case PREDECREMENT_EXPR:
13532 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13533 fallback != fb_none,
13534 TREE_TYPE (*expr_p));
13535 break;
13536
13537 case VIEW_CONVERT_EXPR:
13538 if ((fallback & fb_rvalue)
13539 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13540 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13541 {
13542 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13543 post_p, is_gimple_val, fb_rvalue);
13544 recalculate_side_effects (*expr_p);
13545 break;
13546 }
13547 /* Fallthru. */
13548
13549 case ARRAY_REF:
13550 case ARRAY_RANGE_REF:
13551 case REALPART_EXPR:
13552 case IMAGPART_EXPR:
13553 case COMPONENT_REF:
13554 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13555 fallback ? fallback : fb_rvalue);
13556 break;
13557
13558 case COND_EXPR:
13559 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13560
13561 /* C99 code may assign to an array in a structure value of a
13562 conditional expression, and this has undefined behavior
13563 only on execution, so create a temporary if an lvalue is
13564 required. */
13565 if (fallback == fb_lvalue)
13566 {
13567 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13568 mark_addressable (*expr_p);
13569 ret = GS_OK;
13570 }
13571 break;
13572
13573 case CALL_EXPR:
13574 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13575
13576 /* C99 code may assign to an array in a structure returned
13577 from a function, and this has undefined behavior only on
13578 execution, so create a temporary if an lvalue is
13579 required. */
13580 if (fallback == fb_lvalue)
13581 {
13582 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13583 mark_addressable (*expr_p);
13584 ret = GS_OK;
13585 }
13586 break;
13587
13588 case TREE_LIST:
13589 gcc_unreachable ();
13590
13591 case COMPOUND_EXPR:
13592 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13593 break;
13594
13595 case COMPOUND_LITERAL_EXPR:
13596 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13597 gimple_test_f, fallback);
13598 break;
13599
13600 case MODIFY_EXPR:
13601 case INIT_EXPR:
13602 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13603 fallback != fb_none);
13604 break;
13605
13606 case TRUTH_ANDIF_EXPR:
13607 case TRUTH_ORIF_EXPR:
13608 {
13609 /* Preserve the original type of the expression and the
13610 source location of the outer expression. */
13611 tree org_type = TREE_TYPE (*expr_p);
13612 *expr_p = gimple_boolify (*expr_p);
13613 *expr_p = build3_loc (input_location, COND_EXPR,
13614 org_type, *expr_p,
13615 fold_convert_loc
13616 (input_location,
13617 org_type, boolean_true_node),
13618 fold_convert_loc
13619 (input_location,
13620 org_type, boolean_false_node));
13621 ret = GS_OK;
13622 break;
13623 }
13624
13625 case TRUTH_NOT_EXPR:
13626 {
13627 tree type = TREE_TYPE (*expr_p);
13628 /* The parsers are careful to generate TRUTH_NOT_EXPR
13629 only with operands that are always zero or one.
13630 We do not fold here but handle the only interesting case
13631 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13632 *expr_p = gimple_boolify (*expr_p);
13633 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13634 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13635 TREE_TYPE (*expr_p),
13636 TREE_OPERAND (*expr_p, 0));
13637 else
13638 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13639 TREE_TYPE (*expr_p),
13640 TREE_OPERAND (*expr_p, 0),
13641 build_int_cst (TREE_TYPE (*expr_p), 1));
13642 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13643 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13644 ret = GS_OK;
13645 break;
13646 }
13647
13648 case ADDR_EXPR:
13649 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13650 break;
13651
13652 case ANNOTATE_EXPR:
13653 {
13654 tree cond = TREE_OPERAND (*expr_p, 0);
13655 tree kind = TREE_OPERAND (*expr_p, 1);
13656 tree data = TREE_OPERAND (*expr_p, 2);
13657 tree type = TREE_TYPE (cond);
13658 if (!INTEGRAL_TYPE_P (type))
13659 {
13660 *expr_p = cond;
13661 ret = GS_OK;
13662 break;
13663 }
13664 tree tmp = create_tmp_var (type);
13665 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13666 gcall *call
13667 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13668 gimple_call_set_lhs (call, tmp);
13669 gimplify_seq_add_stmt (pre_p, call);
13670 *expr_p = tmp;
13671 ret = GS_ALL_DONE;
13672 break;
13673 }
13674
13675 case VA_ARG_EXPR:
13676 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
13677 break;
13678
13679 CASE_CONVERT:
13680 if (IS_EMPTY_STMT (*expr_p))
13681 {
13682 ret = GS_ALL_DONE;
13683 break;
13684 }
13685
13686 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13687 || fallback == fb_none)
13688 {
13689 /* Just strip a conversion to void (or in void context) and
13690 try again. */
13691 *expr_p = TREE_OPERAND (*expr_p, 0);
13692 ret = GS_OK;
13693 break;
13694 }
13695
13696 ret = gimplify_conversion (expr_p);
13697 if (ret == GS_ERROR)
13698 break;
13699 if (*expr_p != save_expr)
13700 break;
13701 /* FALLTHRU */
13702
13703 case FIX_TRUNC_EXPR:
13704 /* unary_expr: ... | '(' cast ')' val | ... */
13705 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13706 is_gimple_val, fb_rvalue);
13707 recalculate_side_effects (*expr_p);
13708 break;
13709
13710 case INDIRECT_REF:
13711 {
13712 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
13713 bool notrap = TREE_THIS_NOTRAP (*expr_p);
13714 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13715
13716 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13717 if (*expr_p != save_expr)
13718 {
13719 ret = GS_OK;
13720 break;
13721 }
13722
13723 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13724 is_gimple_reg, fb_rvalue);
13725 if (ret == GS_ERROR)
13726 break;
13727
13728 recalculate_side_effects (*expr_p);
13729 *expr_p = fold_build2_loc (input_location, MEM_REF,
13730 TREE_TYPE (*expr_p),
13731 TREE_OPERAND (*expr_p, 0),
13732 build_int_cst (saved_ptr_type, 0));
13733 TREE_THIS_VOLATILE (*expr_p) = volatilep;
13734 TREE_THIS_NOTRAP (*expr_p) = notrap;
13735 ret = GS_OK;
13736 break;
13737 }
13738
13739 /* We arrive here through the various re-gimplifcation paths. */
13740 case MEM_REF:
13741 /* First try re-folding the whole thing. */
13742 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13743 TREE_OPERAND (*expr_p, 0),
13744 TREE_OPERAND (*expr_p, 1));
13745 if (tmp)
13746 {
13747 REF_REVERSE_STORAGE_ORDER (tmp)
13748 = REF_REVERSE_STORAGE_ORDER (*expr_p);
13749 *expr_p = tmp;
13750 recalculate_side_effects (*expr_p);
13751 ret = GS_OK;
13752 break;
13753 }
13754 /* Avoid re-gimplifying the address operand if it is already
13755 in suitable form. Re-gimplifying would mark the address
13756 operand addressable. Always gimplify when not in SSA form
13757 as we still may have to gimplify decls with value-exprs. */
13758 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
13759 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13760 {
13761 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13762 is_gimple_mem_ref_addr, fb_rvalue);
13763 if (ret == GS_ERROR)
13764 break;
13765 }
13766 recalculate_side_effects (*expr_p);
13767 ret = GS_ALL_DONE;
13768 break;
13769
13770 /* Constants need not be gimplified. */
13771 case INTEGER_CST:
13772 case REAL_CST:
13773 case FIXED_CST:
13774 case STRING_CST:
13775 case COMPLEX_CST:
13776 case VECTOR_CST:
13777 /* Drop the overflow flag on constants, we do not want
13778 that in the GIMPLE IL. */
13779 if (TREE_OVERFLOW_P (*expr_p))
13780 *expr_p = drop_tree_overflow (*expr_p);
13781 ret = GS_ALL_DONE;
13782 break;
13783
13784 case CONST_DECL:
13785 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13786 CONST_DECL node. Otherwise the decl is replaceable by its
13787 value. */
13788 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13789 if (fallback & fb_lvalue)
13790 ret = GS_ALL_DONE;
13791 else
13792 {
13793 *expr_p = DECL_INITIAL (*expr_p);
13794 ret = GS_OK;
13795 }
13796 break;
13797
13798 case DECL_EXPR:
13799 ret = gimplify_decl_expr (expr_p, pre_p);
13800 break;
13801
13802 case BIND_EXPR:
13803 ret = gimplify_bind_expr (expr_p, pre_p);
13804 break;
13805
13806 case LOOP_EXPR:
13807 ret = gimplify_loop_expr (expr_p, pre_p);
13808 break;
13809
13810 case SWITCH_EXPR:
13811 ret = gimplify_switch_expr (expr_p, pre_p);
13812 break;
13813
13814 case EXIT_EXPR:
13815 ret = gimplify_exit_expr (expr_p);
13816 break;
13817
13818 case GOTO_EXPR:
13819 /* If the target is not LABEL, then it is a computed jump
13820 and the target needs to be gimplified. */
13821 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
13822 {
13823 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13824 NULL, is_gimple_val, fb_rvalue);
13825 if (ret == GS_ERROR)
13826 break;
13827 }
13828 gimplify_seq_add_stmt (pre_p,
13829 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
13830 ret = GS_ALL_DONE;
13831 break;
13832
13833 case PREDICT_EXPR:
13834 gimplify_seq_add_stmt (pre_p,
13835 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13836 PREDICT_EXPR_OUTCOME (*expr_p)));
13837 ret = GS_ALL_DONE;
13838 break;
13839
13840 case LABEL_EXPR:
13841 ret = gimplify_label_expr (expr_p, pre_p);
13842 label = LABEL_EXPR_LABEL (*expr_p);
13843 gcc_assert (decl_function_context (label) == current_function_decl);
13844
13845 /* If the label is used in a goto statement, or address of the label
13846 is taken, we need to unpoison all variables that were seen so far.
13847 Doing so would prevent us from reporting a false positives. */
13848 if (asan_poisoned_variables
13849 && asan_used_labels != NULL
13850 && asan_used_labels->contains (label))
13851 asan_poison_variables (asan_poisoned_variables, false, pre_p);
13852 break;
13853
13854 case CASE_LABEL_EXPR:
13855 ret = gimplify_case_label_expr (expr_p, pre_p);
13856
13857 if (gimplify_ctxp->live_switch_vars)
13858 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13859 pre_p);
13860 break;
13861
13862 case RETURN_EXPR:
13863 ret = gimplify_return_expr (*expr_p, pre_p);
13864 break;
13865
13866 case CONSTRUCTOR:
13867 /* Don't reduce this in place; let gimplify_init_constructor work its
13868 magic. Buf if we're just elaborating this for side effects, just
13869 gimplify any element that has side-effects. */
13870 if (fallback == fb_none)
13871 {
13872 unsigned HOST_WIDE_INT ix;
13873 tree val;
13874 tree temp = NULL_TREE;
13875 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13876 if (TREE_SIDE_EFFECTS (val))
13877 append_to_statement_list (val, &temp);
13878
13879 *expr_p = temp;
13880 ret = temp ? GS_OK : GS_ALL_DONE;
13881 }
13882 /* C99 code may assign to an array in a constructed
13883 structure or union, and this has undefined behavior only
13884 on execution, so create a temporary if an lvalue is
13885 required. */
13886 else if (fallback == fb_lvalue)
13887 {
13888 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13889 mark_addressable (*expr_p);
13890 ret = GS_OK;
13891 }
13892 else
13893 ret = GS_ALL_DONE;
13894 break;
13895
13896 /* The following are special cases that are not handled by the
13897 original GIMPLE grammar. */
13898
13899 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13900 eliminated. */
13901 case SAVE_EXPR:
13902 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13903 break;
13904
13905 case BIT_FIELD_REF:
13906 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13907 post_p, is_gimple_lvalue, fb_either);
13908 recalculate_side_effects (*expr_p);
13909 break;
13910
13911 case TARGET_MEM_REF:
13912 {
13913 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13914
13915 if (TMR_BASE (*expr_p))
13916 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13917 post_p, is_gimple_mem_ref_addr, fb_either);
13918 if (TMR_INDEX (*expr_p))
13919 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13920 post_p, is_gimple_val, fb_rvalue);
13921 if (TMR_INDEX2 (*expr_p))
13922 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13923 post_p, is_gimple_val, fb_rvalue);
13924 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13925 ret = MIN (r0, r1);
13926 }
13927 break;
13928
13929 case NON_LVALUE_EXPR:
13930 /* This should have been stripped above. */
13931 gcc_unreachable ();
13932
13933 case ASM_EXPR:
13934 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13935 break;
13936
13937 case TRY_FINALLY_EXPR:
13938 case TRY_CATCH_EXPR:
13939 {
13940 gimple_seq eval, cleanup;
13941 gtry *try_;
13942
13943 /* Calls to destructors are generated automatically in FINALLY/CATCH
13944 block. They should have location as UNKNOWN_LOCATION. However,
13945 gimplify_call_expr will reset these call stmts to input_location
13946 if it finds stmt's location is unknown. To prevent resetting for
13947 destructors, we set the input_location to unknown.
13948 Note that this only affects the destructor calls in FINALLY/CATCH
13949 block, and will automatically reset to its original value by the
13950 end of gimplify_expr. */
13951 input_location = UNKNOWN_LOCATION;
13952 eval = cleanup = NULL;
13953 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13954 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13955 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13956 {
13957 gimple_seq n = NULL, e = NULL;
13958 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13959 0), &n);
13960 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13961 1), &e);
13962 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
13963 {
13964 geh_else *stmt = gimple_build_eh_else (n, e);
13965 gimple_seq_add_stmt (&cleanup, stmt);
13966 }
13967 }
13968 else
13969 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
13970 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13971 if (gimple_seq_empty_p (cleanup))
13972 {
13973 gimple_seq_add_seq (pre_p, eval);
13974 ret = GS_ALL_DONE;
13975 break;
13976 }
13977 try_ = gimple_build_try (eval, cleanup,
13978 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13979 ? GIMPLE_TRY_FINALLY
13980 : GIMPLE_TRY_CATCH);
13981 if (EXPR_HAS_LOCATION (save_expr))
13982 gimple_set_location (try_, EXPR_LOCATION (save_expr));
13983 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
13984 gimple_set_location (try_, saved_location);
13985 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
13986 gimple_try_set_catch_is_cleanup (try_,
13987 TRY_CATCH_IS_CLEANUP (*expr_p));
13988 gimplify_seq_add_stmt (pre_p, try_);
13989 ret = GS_ALL_DONE;
13990 break;
13991 }
13992
13993 case CLEANUP_POINT_EXPR:
13994 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
13995 break;
13996
13997 case TARGET_EXPR:
13998 ret = gimplify_target_expr (expr_p, pre_p, post_p);
13999 break;
14000
14001 case CATCH_EXPR:
14002 {
14003 gimple *c;
14004 gimple_seq handler = NULL;
14005 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
14006 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
14007 gimplify_seq_add_stmt (pre_p, c);
14008 ret = GS_ALL_DONE;
14009 break;
14010 }
14011
14012 case EH_FILTER_EXPR:
14013 {
14014 gimple *ehf;
14015 gimple_seq failure = NULL;
14016
14017 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14018 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
14019 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
14020 gimplify_seq_add_stmt (pre_p, ehf);
14021 ret = GS_ALL_DONE;
14022 break;
14023 }
14024
14025 case OBJ_TYPE_REF:
14026 {
14027 enum gimplify_status r0, r1;
14028 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14029 post_p, is_gimple_val, fb_rvalue);
14030 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14031 post_p, is_gimple_val, fb_rvalue);
14032 TREE_SIDE_EFFECTS (*expr_p) = 0;
14033 ret = MIN (r0, r1);
14034 }
14035 break;
14036
14037 case LABEL_DECL:
14038 /* We get here when taking the address of a label. We mark
14039 the label as "forced"; meaning it can never be removed and
14040 it is a potential target for any computed goto. */
14041 FORCED_LABEL (*expr_p) = 1;
14042 ret = GS_ALL_DONE;
14043 break;
14044
14045 case STATEMENT_LIST:
14046 ret = gimplify_statement_list (expr_p, pre_p);
14047 break;
14048
14049 case WITH_SIZE_EXPR:
14050 {
14051 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14052 post_p == &internal_post ? NULL : post_p,
14053 gimple_test_f, fallback);
14054 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14055 is_gimple_val, fb_rvalue);
14056 ret = GS_ALL_DONE;
14057 }
14058 break;
14059
14060 case VAR_DECL:
14061 case PARM_DECL:
14062 ret = gimplify_var_or_parm_decl (expr_p);
14063 break;
14064
14065 case RESULT_DECL:
14066 /* When within an OMP context, notice uses of variables. */
14067 if (gimplify_omp_ctxp)
14068 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14069 ret = GS_ALL_DONE;
14070 break;
14071
14072 case DEBUG_EXPR_DECL:
14073 gcc_unreachable ();
14074
14075 case DEBUG_BEGIN_STMT:
14076 gimplify_seq_add_stmt (pre_p,
14077 gimple_build_debug_begin_stmt
14078 (TREE_BLOCK (*expr_p),
14079 EXPR_LOCATION (*expr_p)));
14080 ret = GS_ALL_DONE;
14081 *expr_p = NULL;
14082 break;
14083
14084 case SSA_NAME:
14085 /* Allow callbacks into the gimplifier during optimization. */
14086 ret = GS_ALL_DONE;
14087 break;
14088
14089 case OMP_PARALLEL:
14090 gimplify_omp_parallel (expr_p, pre_p);
14091 ret = GS_ALL_DONE;
14092 break;
14093
14094 case OMP_TASK:
14095 gimplify_omp_task (expr_p, pre_p);
14096 ret = GS_ALL_DONE;
14097 break;
14098
14099 case OMP_FOR:
14100 case OMP_SIMD:
14101 case OMP_DISTRIBUTE:
14102 case OMP_TASKLOOP:
14103 case OACC_LOOP:
14104 ret = gimplify_omp_for (expr_p, pre_p);
14105 break;
14106
14107 case OMP_LOOP:
14108 ret = gimplify_omp_loop (expr_p, pre_p);
14109 break;
14110
14111 case OACC_CACHE:
14112 gimplify_oacc_cache (expr_p, pre_p);
14113 ret = GS_ALL_DONE;
14114 break;
14115
14116 case OACC_DECLARE:
14117 gimplify_oacc_declare (expr_p, pre_p);
14118 ret = GS_ALL_DONE;
14119 break;
14120
14121 case OACC_HOST_DATA:
14122 case OACC_DATA:
14123 case OACC_KERNELS:
14124 case OACC_PARALLEL:
14125 case OACC_SERIAL:
14126 case OMP_SECTIONS:
14127 case OMP_SINGLE:
14128 case OMP_TARGET:
14129 case OMP_TARGET_DATA:
14130 case OMP_TEAMS:
14131 gimplify_omp_workshare (expr_p, pre_p);
14132 ret = GS_ALL_DONE;
14133 break;
14134
14135 case OACC_ENTER_DATA:
14136 case OACC_EXIT_DATA:
14137 case OACC_UPDATE:
14138 case OMP_TARGET_UPDATE:
14139 case OMP_TARGET_ENTER_DATA:
14140 case OMP_TARGET_EXIT_DATA:
14141 gimplify_omp_target_update (expr_p, pre_p);
14142 ret = GS_ALL_DONE;
14143 break;
14144
14145 case OMP_SECTION:
14146 case OMP_MASTER:
14147 case OMP_ORDERED:
14148 case OMP_CRITICAL:
14149 case OMP_SCAN:
14150 {
14151 gimple_seq body = NULL;
14152 gimple *g;
14153 bool saved_in_omp_construct = in_omp_construct;
14154
14155 in_omp_construct = true;
14156 gimplify_and_add (OMP_BODY (*expr_p), &body);
14157 in_omp_construct = saved_in_omp_construct;
14158 switch (TREE_CODE (*expr_p))
14159 {
14160 case OMP_SECTION:
14161 g = gimple_build_omp_section (body);
14162 break;
14163 case OMP_MASTER:
14164 g = gimple_build_omp_master (body);
14165 break;
14166 case OMP_ORDERED:
14167 g = gimplify_omp_ordered (*expr_p, body);
14168 break;
14169 case OMP_CRITICAL:
14170 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14171 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14172 gimplify_adjust_omp_clauses (pre_p, body,
14173 &OMP_CRITICAL_CLAUSES (*expr_p),
14174 OMP_CRITICAL);
14175 g = gimple_build_omp_critical (body,
14176 OMP_CRITICAL_NAME (*expr_p),
14177 OMP_CRITICAL_CLAUSES (*expr_p));
14178 break;
14179 case OMP_SCAN:
14180 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14181 pre_p, ORT_WORKSHARE, OMP_SCAN);
14182 gimplify_adjust_omp_clauses (pre_p, body,
14183 &OMP_SCAN_CLAUSES (*expr_p),
14184 OMP_SCAN);
14185 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14186 break;
14187 default:
14188 gcc_unreachable ();
14189 }
14190 gimplify_seq_add_stmt (pre_p, g);
14191 ret = GS_ALL_DONE;
14192 break;
14193 }
14194
14195 case OMP_TASKGROUP:
14196 {
14197 gimple_seq body = NULL;
14198
14199 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14200 bool saved_in_omp_construct = in_omp_construct;
14201 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14202 OMP_TASKGROUP);
14203 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14204
14205 in_omp_construct = true;
14206 gimplify_and_add (OMP_BODY (*expr_p), &body);
14207 in_omp_construct = saved_in_omp_construct;
14208 gimple_seq cleanup = NULL;
14209 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14210 gimple *g = gimple_build_call (fn, 0);
14211 gimple_seq_add_stmt (&cleanup, g);
14212 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14213 body = NULL;
14214 gimple_seq_add_stmt (&body, g);
14215 g = gimple_build_omp_taskgroup (body, *pclauses);
14216 gimplify_seq_add_stmt (pre_p, g);
14217 ret = GS_ALL_DONE;
14218 break;
14219 }
14220
14221 case OMP_ATOMIC:
14222 case OMP_ATOMIC_READ:
14223 case OMP_ATOMIC_CAPTURE_OLD:
14224 case OMP_ATOMIC_CAPTURE_NEW:
14225 ret = gimplify_omp_atomic (expr_p, pre_p);
14226 break;
14227
14228 case TRANSACTION_EXPR:
14229 ret = gimplify_transaction (expr_p, pre_p);
14230 break;
14231
14232 case TRUTH_AND_EXPR:
14233 case TRUTH_OR_EXPR:
14234 case TRUTH_XOR_EXPR:
14235 {
14236 tree orig_type = TREE_TYPE (*expr_p);
14237 tree new_type, xop0, xop1;
14238 *expr_p = gimple_boolify (*expr_p);
14239 new_type = TREE_TYPE (*expr_p);
14240 if (!useless_type_conversion_p (orig_type, new_type))
14241 {
14242 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14243 ret = GS_OK;
14244 break;
14245 }
14246
14247 /* Boolified binary truth expressions are semantically equivalent
14248 to bitwise binary expressions. Canonicalize them to the
14249 bitwise variant. */
14250 switch (TREE_CODE (*expr_p))
14251 {
14252 case TRUTH_AND_EXPR:
14253 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14254 break;
14255 case TRUTH_OR_EXPR:
14256 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14257 break;
14258 case TRUTH_XOR_EXPR:
14259 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14260 break;
14261 default:
14262 break;
14263 }
14264 /* Now make sure that operands have compatible type to
14265 expression's new_type. */
14266 xop0 = TREE_OPERAND (*expr_p, 0);
14267 xop1 = TREE_OPERAND (*expr_p, 1);
14268 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14269 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14270 new_type,
14271 xop0);
14272 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14273 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14274 new_type,
14275 xop1);
14276 /* Continue classified as tcc_binary. */
14277 goto expr_2;
14278 }
14279
14280 case VEC_COND_EXPR:
14281 {
14282 enum gimplify_status r0, r1, r2;
14283
14284 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14285 post_p, is_gimple_condexpr, fb_rvalue);
14286 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14287 post_p, is_gimple_val, fb_rvalue);
14288 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14289 post_p, is_gimple_val, fb_rvalue);
14290
14291 ret = MIN (MIN (r0, r1), r2);
14292 recalculate_side_effects (*expr_p);
14293 }
14294 break;
14295
14296 case VEC_PERM_EXPR:
14297 /* Classified as tcc_expression. */
14298 goto expr_3;
14299
14300 case BIT_INSERT_EXPR:
14301 /* Argument 3 is a constant. */
14302 goto expr_2;
14303
14304 case POINTER_PLUS_EXPR:
14305 {
14306 enum gimplify_status r0, r1;
14307 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14308 post_p, is_gimple_val, fb_rvalue);
14309 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14310 post_p, is_gimple_val, fb_rvalue);
14311 recalculate_side_effects (*expr_p);
14312 ret = MIN (r0, r1);
14313 break;
14314 }
14315
14316 default:
14317 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14318 {
14319 case tcc_comparison:
14320 /* Handle comparison of objects of non scalar mode aggregates
14321 with a call to memcmp. It would be nice to only have to do
14322 this for variable-sized objects, but then we'd have to allow
14323 the same nest of reference nodes we allow for MODIFY_EXPR and
14324 that's too complex.
14325
14326 Compare scalar mode aggregates as scalar mode values. Using
14327 memcmp for them would be very inefficient at best, and is
14328 plain wrong if bitfields are involved. */
14329 {
14330 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14331
14332 /* Vector comparisons need no boolification. */
14333 if (TREE_CODE (type) == VECTOR_TYPE)
14334 goto expr_2;
14335 else if (!AGGREGATE_TYPE_P (type))
14336 {
14337 tree org_type = TREE_TYPE (*expr_p);
14338 *expr_p = gimple_boolify (*expr_p);
14339 if (!useless_type_conversion_p (org_type,
14340 TREE_TYPE (*expr_p)))
14341 {
14342 *expr_p = fold_convert_loc (input_location,
14343 org_type, *expr_p);
14344 ret = GS_OK;
14345 }
14346 else
14347 goto expr_2;
14348 }
14349 else if (TYPE_MODE (type) != BLKmode)
14350 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14351 else
14352 ret = gimplify_variable_sized_compare (expr_p);
14353
14354 break;
14355 }
14356
14357 /* If *EXPR_P does not need to be special-cased, handle it
14358 according to its class. */
14359 case tcc_unary:
14360 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14361 post_p, is_gimple_val, fb_rvalue);
14362 break;
14363
14364 case tcc_binary:
14365 expr_2:
14366 {
14367 enum gimplify_status r0, r1;
14368
14369 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14370 post_p, is_gimple_val, fb_rvalue);
14371 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14372 post_p, is_gimple_val, fb_rvalue);
14373
14374 ret = MIN (r0, r1);
14375 break;
14376 }
14377
14378 expr_3:
14379 {
14380 enum gimplify_status r0, r1, r2;
14381
14382 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14383 post_p, is_gimple_val, fb_rvalue);
14384 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14385 post_p, is_gimple_val, fb_rvalue);
14386 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14387 post_p, is_gimple_val, fb_rvalue);
14388
14389 ret = MIN (MIN (r0, r1), r2);
14390 break;
14391 }
14392
14393 case tcc_declaration:
14394 case tcc_constant:
14395 ret = GS_ALL_DONE;
14396 goto dont_recalculate;
14397
14398 default:
14399 gcc_unreachable ();
14400 }
14401
14402 recalculate_side_effects (*expr_p);
14403
14404 dont_recalculate:
14405 break;
14406 }
14407
14408 gcc_assert (*expr_p || ret != GS_OK);
14409 }
14410 while (ret == GS_OK);
14411
14412 /* If we encountered an error_mark somewhere nested inside, either
14413 stub out the statement or propagate the error back out. */
14414 if (ret == GS_ERROR)
14415 {
14416 if (is_statement)
14417 *expr_p = NULL;
14418 goto out;
14419 }
14420
14421 /* This was only valid as a return value from the langhook, which
14422 we handled. Make sure it doesn't escape from any other context. */
14423 gcc_assert (ret != GS_UNHANDLED);
14424
14425 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14426 {
14427 /* We aren't looking for a value, and we don't have a valid
14428 statement. If it doesn't have side-effects, throw it away.
14429 We can also get here with code such as "*&&L;", where L is
14430 a LABEL_DECL that is marked as FORCED_LABEL. */
14431 if (TREE_CODE (*expr_p) == LABEL_DECL
14432 || !TREE_SIDE_EFFECTS (*expr_p))
14433 *expr_p = NULL;
14434 else if (!TREE_THIS_VOLATILE (*expr_p))
14435 {
14436 /* This is probably a _REF that contains something nested that
14437 has side effects. Recurse through the operands to find it. */
14438 enum tree_code code = TREE_CODE (*expr_p);
14439
14440 switch (code)
14441 {
14442 case COMPONENT_REF:
14443 case REALPART_EXPR:
14444 case IMAGPART_EXPR:
14445 case VIEW_CONVERT_EXPR:
14446 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14447 gimple_test_f, fallback);
14448 break;
14449
14450 case ARRAY_REF:
14451 case ARRAY_RANGE_REF:
14452 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14453 gimple_test_f, fallback);
14454 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14455 gimple_test_f, fallback);
14456 break;
14457
14458 default:
14459 /* Anything else with side-effects must be converted to
14460 a valid statement before we get here. */
14461 gcc_unreachable ();
14462 }
14463
14464 *expr_p = NULL;
14465 }
14466 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14467 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14468 {
14469 /* Historically, the compiler has treated a bare reference
14470 to a non-BLKmode volatile lvalue as forcing a load. */
14471 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14472
14473 /* Normally, we do not want to create a temporary for a
14474 TREE_ADDRESSABLE type because such a type should not be
14475 copied by bitwise-assignment. However, we make an
14476 exception here, as all we are doing here is ensuring that
14477 we read the bytes that make up the type. We use
14478 create_tmp_var_raw because create_tmp_var will abort when
14479 given a TREE_ADDRESSABLE type. */
14480 tree tmp = create_tmp_var_raw (type, "vol");
14481 gimple_add_tmp_var (tmp);
14482 gimplify_assign (tmp, *expr_p, pre_p);
14483 *expr_p = NULL;
14484 }
14485 else
14486 /* We can't do anything useful with a volatile reference to
14487 an incomplete type, so just throw it away. Likewise for
14488 a BLKmode type, since any implicit inner load should
14489 already have been turned into an explicit one by the
14490 gimplification process. */
14491 *expr_p = NULL;
14492 }
14493
14494 /* If we are gimplifying at the statement level, we're done. Tack
14495 everything together and return. */
14496 if (fallback == fb_none || is_statement)
14497 {
14498 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14499 it out for GC to reclaim it. */
14500 *expr_p = NULL_TREE;
14501
14502 if (!gimple_seq_empty_p (internal_pre)
14503 || !gimple_seq_empty_p (internal_post))
14504 {
14505 gimplify_seq_add_seq (&internal_pre, internal_post);
14506 gimplify_seq_add_seq (pre_p, internal_pre);
14507 }
14508
14509 /* The result of gimplifying *EXPR_P is going to be the last few
14510 statements in *PRE_P and *POST_P. Add location information
14511 to all the statements that were added by the gimplification
14512 helpers. */
14513 if (!gimple_seq_empty_p (*pre_p))
14514 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14515
14516 if (!gimple_seq_empty_p (*post_p))
14517 annotate_all_with_location_after (*post_p, post_last_gsi,
14518 input_location);
14519
14520 goto out;
14521 }
14522
14523 #ifdef ENABLE_GIMPLE_CHECKING
14524 if (*expr_p)
14525 {
14526 enum tree_code code = TREE_CODE (*expr_p);
14527 /* These expressions should already be in gimple IR form. */
14528 gcc_assert (code != MODIFY_EXPR
14529 && code != ASM_EXPR
14530 && code != BIND_EXPR
14531 && code != CATCH_EXPR
14532 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14533 && code != EH_FILTER_EXPR
14534 && code != GOTO_EXPR
14535 && code != LABEL_EXPR
14536 && code != LOOP_EXPR
14537 && code != SWITCH_EXPR
14538 && code != TRY_FINALLY_EXPR
14539 && code != EH_ELSE_EXPR
14540 && code != OACC_PARALLEL
14541 && code != OACC_KERNELS
14542 && code != OACC_SERIAL
14543 && code != OACC_DATA
14544 && code != OACC_HOST_DATA
14545 && code != OACC_DECLARE
14546 && code != OACC_UPDATE
14547 && code != OACC_ENTER_DATA
14548 && code != OACC_EXIT_DATA
14549 && code != OACC_CACHE
14550 && code != OMP_CRITICAL
14551 && code != OMP_FOR
14552 && code != OACC_LOOP
14553 && code != OMP_MASTER
14554 && code != OMP_TASKGROUP
14555 && code != OMP_ORDERED
14556 && code != OMP_PARALLEL
14557 && code != OMP_SCAN
14558 && code != OMP_SECTIONS
14559 && code != OMP_SECTION
14560 && code != OMP_SINGLE);
14561 }
14562 #endif
14563
14564 /* Otherwise we're gimplifying a subexpression, so the resulting
14565 value is interesting. If it's a valid operand that matches
14566 GIMPLE_TEST_F, we're done. Unless we are handling some
14567 post-effects internally; if that's the case, we need to copy into
14568 a temporary before adding the post-effects to POST_P. */
14569 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14570 goto out;
14571
14572 /* Otherwise, we need to create a new temporary for the gimplified
14573 expression. */
14574
14575 /* We can't return an lvalue if we have an internal postqueue. The
14576 object the lvalue refers to would (probably) be modified by the
14577 postqueue; we need to copy the value out first, which means an
14578 rvalue. */
14579 if ((fallback & fb_lvalue)
14580 && gimple_seq_empty_p (internal_post)
14581 && is_gimple_addressable (*expr_p))
14582 {
14583 /* An lvalue will do. Take the address of the expression, store it
14584 in a temporary, and replace the expression with an INDIRECT_REF of
14585 that temporary. */
14586 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14587 unsigned int ref_align = get_object_alignment (*expr_p);
14588 tree ref_type = TREE_TYPE (*expr_p);
14589 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14590 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14591 if (TYPE_ALIGN (ref_type) != ref_align)
14592 ref_type = build_aligned_type (ref_type, ref_align);
14593 *expr_p = build2 (MEM_REF, ref_type,
14594 tmp, build_zero_cst (ref_alias_type));
14595 }
14596 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14597 {
14598 /* An rvalue will do. Assign the gimplified expression into a
14599 new temporary TMP and replace the original expression with
14600 TMP. First, make sure that the expression has a type so that
14601 it can be assigned into a temporary. */
14602 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14603 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14604 }
14605 else
14606 {
14607 #ifdef ENABLE_GIMPLE_CHECKING
14608 if (!(fallback & fb_mayfail))
14609 {
14610 fprintf (stderr, "gimplification failed:\n");
14611 print_generic_expr (stderr, *expr_p);
14612 debug_tree (*expr_p);
14613 internal_error ("gimplification failed");
14614 }
14615 #endif
14616 gcc_assert (fallback & fb_mayfail);
14617
14618 /* If this is an asm statement, and the user asked for the
14619 impossible, don't die. Fail and let gimplify_asm_expr
14620 issue an error. */
14621 ret = GS_ERROR;
14622 goto out;
14623 }
14624
14625 /* Make sure the temporary matches our predicate. */
14626 gcc_assert ((*gimple_test_f) (*expr_p));
14627
14628 if (!gimple_seq_empty_p (internal_post))
14629 {
14630 annotate_all_with_location (internal_post, input_location);
14631 gimplify_seq_add_seq (pre_p, internal_post);
14632 }
14633
14634 out:
14635 input_location = saved_location;
14636 return ret;
14637 }
14638
14639 /* Like gimplify_expr but make sure the gimplified result is not itself
14640 a SSA name (but a decl if it were). Temporaries required by
14641 evaluating *EXPR_P may be still SSA names. */
14642
14643 static enum gimplify_status
14644 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14645 bool (*gimple_test_f) (tree), fallback_t fallback,
14646 bool allow_ssa)
14647 {
14648 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14649 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14650 gimple_test_f, fallback);
14651 if (! allow_ssa
14652 && TREE_CODE (*expr_p) == SSA_NAME)
14653 {
14654 tree name = *expr_p;
14655 if (was_ssa_name_p)
14656 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14657 else
14658 {
14659 /* Avoid the extra copy if possible. */
14660 *expr_p = create_tmp_reg (TREE_TYPE (name));
14661 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
14662 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14663 release_ssa_name (name);
14664 }
14665 }
14666 return ret;
14667 }
14668
14669 /* Look through TYPE for variable-sized objects and gimplify each such
14670 size that we find. Add to LIST_P any statements generated. */
14671
14672 void
14673 gimplify_type_sizes (tree type, gimple_seq *list_p)
14674 {
14675 tree field, t;
14676
14677 if (type == NULL || type == error_mark_node)
14678 return;
14679
14680 /* We first do the main variant, then copy into any other variants. */
14681 type = TYPE_MAIN_VARIANT (type);
14682
14683 /* Avoid infinite recursion. */
14684 if (TYPE_SIZES_GIMPLIFIED (type))
14685 return;
14686
14687 TYPE_SIZES_GIMPLIFIED (type) = 1;
14688
14689 switch (TREE_CODE (type))
14690 {
14691 case INTEGER_TYPE:
14692 case ENUMERAL_TYPE:
14693 case BOOLEAN_TYPE:
14694 case REAL_TYPE:
14695 case FIXED_POINT_TYPE:
14696 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14697 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
14698
14699 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14700 {
14701 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14702 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
14703 }
14704 break;
14705
14706 case ARRAY_TYPE:
14707 /* These types may not have declarations, so handle them here. */
14708 gimplify_type_sizes (TREE_TYPE (type), list_p);
14709 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
14710 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14711 with assigned stack slots, for -O1+ -g they should be tracked
14712 by VTA. */
14713 if (!(TYPE_NAME (type)
14714 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14715 && DECL_IGNORED_P (TYPE_NAME (type)))
14716 && TYPE_DOMAIN (type)
14717 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14718 {
14719 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
14720 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14721 DECL_IGNORED_P (t) = 0;
14722 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14723 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14724 DECL_IGNORED_P (t) = 0;
14725 }
14726 break;
14727
14728 case RECORD_TYPE:
14729 case UNION_TYPE:
14730 case QUAL_UNION_TYPE:
14731 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14732 if (TREE_CODE (field) == FIELD_DECL)
14733 {
14734 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
14735 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14736 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
14737 gimplify_type_sizes (TREE_TYPE (field), list_p);
14738 }
14739 break;
14740
14741 case POINTER_TYPE:
14742 case REFERENCE_TYPE:
14743 /* We used to recurse on the pointed-to type here, which turned out to
14744 be incorrect because its definition might refer to variables not
14745 yet initialized at this point if a forward declaration is involved.
14746
14747 It was actually useful for anonymous pointed-to types to ensure
14748 that the sizes evaluation dominates every possible later use of the
14749 values. Restricting to such types here would be safe since there
14750 is no possible forward declaration around, but would introduce an
14751 undesirable middle-end semantic to anonymity. We then defer to
14752 front-ends the responsibility of ensuring that the sizes are
14753 evaluated both early and late enough, e.g. by attaching artificial
14754 type declarations to the tree. */
14755 break;
14756
14757 default:
14758 break;
14759 }
14760
14761 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14762 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
14763
14764 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14765 {
14766 TYPE_SIZE (t) = TYPE_SIZE (type);
14767 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14768 TYPE_SIZES_GIMPLIFIED (t) = 1;
14769 }
14770 }
14771
14772 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14773 a size or position, has had all of its SAVE_EXPRs evaluated.
14774 We add any required statements to *STMT_P. */
14775
14776 void
14777 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
14778 {
14779 tree expr = *expr_p;
14780
14781 /* We don't do anything if the value isn't there, is constant, or contains
14782 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14783 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14784 will want to replace it with a new variable, but that will cause problems
14785 if this type is from outside the function. It's OK to have that here. */
14786 if (expr == NULL_TREE
14787 || is_gimple_constant (expr)
14788 || TREE_CODE (expr) == VAR_DECL
14789 || CONTAINS_PLACEHOLDER_P (expr))
14790 return;
14791
14792 *expr_p = unshare_expr (expr);
14793
14794 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14795 if the def vanishes. */
14796 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
14797
14798 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14799 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14800 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14801 if (is_gimple_constant (*expr_p))
14802 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
14803 }
14804
14805 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14806 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14807 is true, also gimplify the parameters. */
14808
14809 gbind *
14810 gimplify_body (tree fndecl, bool do_parms)
14811 {
14812 location_t saved_location = input_location;
14813 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
14814 gimple *outer_stmt;
14815 gbind *outer_bind;
14816
14817 timevar_push (TV_TREE_GIMPLIFY);
14818
14819 init_tree_ssa (cfun);
14820
14821 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14822 gimplification. */
14823 default_rtl_profile ();
14824
14825 gcc_assert (gimplify_ctxp == NULL);
14826 push_gimplify_context (true);
14827
14828 if (flag_openacc || flag_openmp)
14829 {
14830 gcc_assert (gimplify_omp_ctxp == NULL);
14831 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
14832 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
14833 }
14834
14835 /* Unshare most shared trees in the body and in that of any nested functions.
14836 It would seem we don't have to do this for nested functions because
14837 they are supposed to be output and then the outer function gimplified
14838 first, but the g++ front end doesn't always do it that way. */
14839 unshare_body (fndecl);
14840 unvisit_body (fndecl);
14841
14842 /* Make sure input_location isn't set to something weird. */
14843 input_location = DECL_SOURCE_LOCATION (fndecl);
14844
14845 /* Resolve callee-copies. This has to be done before processing
14846 the body so that DECL_VALUE_EXPR gets processed correctly. */
14847 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
14848
14849 /* Gimplify the function's body. */
14850 seq = NULL;
14851 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
14852 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
14853 if (!outer_stmt)
14854 {
14855 outer_stmt = gimple_build_nop ();
14856 gimplify_seq_add_stmt (&seq, outer_stmt);
14857 }
14858
14859 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14860 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14861 if (gimple_code (outer_stmt) == GIMPLE_BIND
14862 && (gimple_seq_first_nondebug_stmt (seq)
14863 == gimple_seq_last_nondebug_stmt (seq)))
14864 {
14865 outer_bind = as_a <gbind *> (outer_stmt);
14866 if (gimple_seq_first_stmt (seq) != outer_stmt
14867 || gimple_seq_last_stmt (seq) != outer_stmt)
14868 {
14869 /* If there are debug stmts before or after outer_stmt, move them
14870 inside of outer_bind body. */
14871 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
14872 gimple_seq second_seq = NULL;
14873 if (gimple_seq_first_stmt (seq) != outer_stmt
14874 && gimple_seq_last_stmt (seq) != outer_stmt)
14875 {
14876 second_seq = gsi_split_seq_after (gsi);
14877 gsi_remove (&gsi, false);
14878 }
14879 else if (gimple_seq_first_stmt (seq) != outer_stmt)
14880 gsi_remove (&gsi, false);
14881 else
14882 {
14883 gsi_remove (&gsi, false);
14884 second_seq = seq;
14885 seq = NULL;
14886 }
14887 gimple_seq_add_seq_without_update (&seq,
14888 gimple_bind_body (outer_bind));
14889 gimple_seq_add_seq_without_update (&seq, second_seq);
14890 gimple_bind_set_body (outer_bind, seq);
14891 }
14892 }
14893 else
14894 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14895
14896 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14897
14898 /* If we had callee-copies statements, insert them at the beginning
14899 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14900 if (!gimple_seq_empty_p (parm_stmts))
14901 {
14902 tree parm;
14903
14904 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
14905 if (parm_cleanup)
14906 {
14907 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14908 GIMPLE_TRY_FINALLY);
14909 parm_stmts = NULL;
14910 gimple_seq_add_stmt (&parm_stmts, g);
14911 }
14912 gimple_bind_set_body (outer_bind, parm_stmts);
14913
14914 for (parm = DECL_ARGUMENTS (current_function_decl);
14915 parm; parm = DECL_CHAIN (parm))
14916 if (DECL_HAS_VALUE_EXPR_P (parm))
14917 {
14918 DECL_HAS_VALUE_EXPR_P (parm) = 0;
14919 DECL_IGNORED_P (parm) = 0;
14920 }
14921 }
14922
14923 if ((flag_openacc || flag_openmp || flag_openmp_simd)
14924 && gimplify_omp_ctxp)
14925 {
14926 delete_omp_context (gimplify_omp_ctxp);
14927 gimplify_omp_ctxp = NULL;
14928 }
14929
14930 pop_gimplify_context (outer_bind);
14931 gcc_assert (gimplify_ctxp == NULL);
14932
14933 if (flag_checking && !seen_error ())
14934 verify_gimple_in_seq (gimple_bind_body (outer_bind));
14935
14936 timevar_pop (TV_TREE_GIMPLIFY);
14937 input_location = saved_location;
14938
14939 return outer_bind;
14940 }
14941
14942 typedef char *char_p; /* For DEF_VEC_P. */
14943
14944 /* Return whether we should exclude FNDECL from instrumentation. */
14945
14946 static bool
14947 flag_instrument_functions_exclude_p (tree fndecl)
14948 {
14949 vec<char_p> *v;
14950
14951 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14952 if (v && v->length () > 0)
14953 {
14954 const char *name;
14955 int i;
14956 char *s;
14957
14958 name = lang_hooks.decl_printable_name (fndecl, 1);
14959 FOR_EACH_VEC_ELT (*v, i, s)
14960 if (strstr (name, s) != NULL)
14961 return true;
14962 }
14963
14964 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14965 if (v && v->length () > 0)
14966 {
14967 const char *name;
14968 int i;
14969 char *s;
14970
14971 name = DECL_SOURCE_FILE (fndecl);
14972 FOR_EACH_VEC_ELT (*v, i, s)
14973 if (strstr (name, s) != NULL)
14974 return true;
14975 }
14976
14977 return false;
14978 }
14979
14980 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14981 node for the function we want to gimplify.
14982
14983 Return the sequence of GIMPLE statements corresponding to the body
14984 of FNDECL. */
14985
14986 void
14987 gimplify_function_tree (tree fndecl)
14988 {
14989 tree parm, ret;
14990 gimple_seq seq;
14991 gbind *bind;
14992
14993 gcc_assert (!gimple_body (fndecl));
14994
14995 if (DECL_STRUCT_FUNCTION (fndecl))
14996 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
14997 else
14998 push_struct_function (fndecl);
14999
15000 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15001 if necessary. */
15002 cfun->curr_properties |= PROP_gimple_lva;
15003
15004 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
15005 {
15006 /* Preliminarily mark non-addressed complex variables as eligible
15007 for promotion to gimple registers. We'll transform their uses
15008 as we find them. */
15009 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
15010 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
15011 && !TREE_THIS_VOLATILE (parm)
15012 && !needs_to_live_in_memory (parm))
15013 DECL_GIMPLE_REG_P (parm) = 1;
15014 }
15015
15016 ret = DECL_RESULT (fndecl);
15017 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
15018 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
15019 && !needs_to_live_in_memory (ret))
15020 DECL_GIMPLE_REG_P (ret) = 1;
15021
15022 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
15023 asan_poisoned_variables = new hash_set<tree> ();
15024 bind = gimplify_body (fndecl, true);
15025 if (asan_poisoned_variables)
15026 {
15027 delete asan_poisoned_variables;
15028 asan_poisoned_variables = NULL;
15029 }
15030
15031 /* The tree body of the function is no longer needed, replace it
15032 with the new GIMPLE body. */
15033 seq = NULL;
15034 gimple_seq_add_stmt (&seq, bind);
15035 gimple_set_body (fndecl, seq);
15036
15037 /* If we're instrumenting function entry/exit, then prepend the call to
15038 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15039 catch the exit hook. */
15040 /* ??? Add some way to ignore exceptions for this TFE. */
15041 if (flag_instrument_function_entry_exit
15042 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
15043 /* Do not instrument extern inline functions. */
15044 && !(DECL_DECLARED_INLINE_P (fndecl)
15045 && DECL_EXTERNAL (fndecl)
15046 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
15047 && !flag_instrument_functions_exclude_p (fndecl))
15048 {
15049 tree x;
15050 gbind *new_bind;
15051 gimple *tf;
15052 gimple_seq cleanup = NULL, body = NULL;
15053 tree tmp_var, this_fn_addr;
15054 gcall *call;
15055
15056 /* The instrumentation hooks aren't going to call the instrumented
15057 function and the address they receive is expected to be matchable
15058 against symbol addresses. Make sure we don't create a trampoline,
15059 in case the current function is nested. */
15060 this_fn_addr = build_fold_addr_expr (current_function_decl);
15061 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15062
15063 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15064 call = gimple_build_call (x, 1, integer_zero_node);
15065 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15066 gimple_call_set_lhs (call, tmp_var);
15067 gimplify_seq_add_stmt (&cleanup, call);
15068 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15069 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15070 gimplify_seq_add_stmt (&cleanup, call);
15071 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15072
15073 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15074 call = gimple_build_call (x, 1, integer_zero_node);
15075 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15076 gimple_call_set_lhs (call, tmp_var);
15077 gimplify_seq_add_stmt (&body, call);
15078 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15079 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15080 gimplify_seq_add_stmt (&body, call);
15081 gimplify_seq_add_stmt (&body, tf);
15082 new_bind = gimple_build_bind (NULL, body, NULL);
15083
15084 /* Replace the current function body with the body
15085 wrapped in the try/finally TF. */
15086 seq = NULL;
15087 gimple_seq_add_stmt (&seq, new_bind);
15088 gimple_set_body (fndecl, seq);
15089 bind = new_bind;
15090 }
15091
15092 if (sanitize_flags_p (SANITIZE_THREAD))
15093 {
15094 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15095 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15096 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15097 /* Replace the current function body with the body
15098 wrapped in the try/finally TF. */
15099 seq = NULL;
15100 gimple_seq_add_stmt (&seq, new_bind);
15101 gimple_set_body (fndecl, seq);
15102 }
15103
15104 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15105 cfun->curr_properties |= PROP_gimple_any;
15106
15107 pop_cfun ();
15108
15109 dump_function (TDI_gimple, fndecl);
15110 }
15111
15112 /* Return a dummy expression of type TYPE in order to keep going after an
15113 error. */
15114
15115 static tree
15116 dummy_object (tree type)
15117 {
15118 tree t = build_int_cst (build_pointer_type (type), 0);
15119 return build2 (MEM_REF, type, t, t);
15120 }
15121
15122 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15123 builtin function, but a very special sort of operator. */
15124
15125 enum gimplify_status
15126 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15127 gimple_seq *post_p ATTRIBUTE_UNUSED)
15128 {
15129 tree promoted_type, have_va_type;
15130 tree valist = TREE_OPERAND (*expr_p, 0);
15131 tree type = TREE_TYPE (*expr_p);
15132 tree t, tag, aptag;
15133 location_t loc = EXPR_LOCATION (*expr_p);
15134
15135 /* Verify that valist is of the proper type. */
15136 have_va_type = TREE_TYPE (valist);
15137 if (have_va_type == error_mark_node)
15138 return GS_ERROR;
15139 have_va_type = targetm.canonical_va_list_type (have_va_type);
15140 if (have_va_type == NULL_TREE
15141 && POINTER_TYPE_P (TREE_TYPE (valist)))
15142 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15143 have_va_type
15144 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15145 gcc_assert (have_va_type != NULL_TREE);
15146
15147 /* Generate a diagnostic for requesting data of a type that cannot
15148 be passed through `...' due to type promotion at the call site. */
15149 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15150 != type)
15151 {
15152 static bool gave_help;
15153 bool warned;
15154 /* Use the expansion point to handle cases such as passing bool (defined
15155 in a system header) through `...'. */
15156 location_t xloc
15157 = expansion_point_location_if_in_system_header (loc);
15158
15159 /* Unfortunately, this is merely undefined, rather than a constraint
15160 violation, so we cannot make this an error. If this call is never
15161 executed, the program is still strictly conforming. */
15162 auto_diagnostic_group d;
15163 warned = warning_at (xloc, 0,
15164 "%qT is promoted to %qT when passed through %<...%>",
15165 type, promoted_type);
15166 if (!gave_help && warned)
15167 {
15168 gave_help = true;
15169 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15170 promoted_type, type);
15171 }
15172
15173 /* We can, however, treat "undefined" any way we please.
15174 Call abort to encourage the user to fix the program. */
15175 if (warned)
15176 inform (xloc, "if this code is reached, the program will abort");
15177 /* Before the abort, allow the evaluation of the va_list
15178 expression to exit or longjmp. */
15179 gimplify_and_add (valist, pre_p);
15180 t = build_call_expr_loc (loc,
15181 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15182 gimplify_and_add (t, pre_p);
15183
15184 /* This is dead code, but go ahead and finish so that the
15185 mode of the result comes out right. */
15186 *expr_p = dummy_object (type);
15187 return GS_ALL_DONE;
15188 }
15189
15190 tag = build_int_cst (build_pointer_type (type), 0);
15191 aptag = build_int_cst (TREE_TYPE (valist), 0);
15192
15193 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15194 valist, tag, aptag);
15195
15196 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15197 needs to be expanded. */
15198 cfun->curr_properties &= ~PROP_gimple_lva;
15199
15200 return GS_OK;
15201 }
15202
15203 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15204
15205 DST/SRC are the destination and source respectively. You can pass
15206 ungimplified trees in DST or SRC, in which case they will be
15207 converted to a gimple operand if necessary.
15208
15209 This function returns the newly created GIMPLE_ASSIGN tuple. */
15210
15211 gimple *
15212 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15213 {
15214 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15215 gimplify_and_add (t, seq_p);
15216 ggc_free (t);
15217 return gimple_seq_last_stmt (*seq_p);
15218 }
15219
15220 inline hashval_t
15221 gimplify_hasher::hash (const elt_t *p)
15222 {
15223 tree t = p->val;
15224 return iterative_hash_expr (t, 0);
15225 }
15226
15227 inline bool
15228 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15229 {
15230 tree t1 = p1->val;
15231 tree t2 = p2->val;
15232 enum tree_code code = TREE_CODE (t1);
15233
15234 if (TREE_CODE (t2) != code
15235 || TREE_TYPE (t1) != TREE_TYPE (t2))
15236 return false;
15237
15238 if (!operand_equal_p (t1, t2, 0))
15239 return false;
15240
15241 /* Only allow them to compare equal if they also hash equal; otherwise
15242 results are nondeterminate, and we fail bootstrap comparison. */
15243 gcc_checking_assert (hash (p1) == hash (p2));
15244
15245 return true;
15246 }