Fix ICE on unsupported FP comparison
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
73
74 enum gimplify_omp_var_data
75 {
76 GOVD_SEEN = 0x000001,
77 GOVD_EXPLICIT = 0x000002,
78 GOVD_SHARED = 0x000004,
79 GOVD_PRIVATE = 0x000008,
80 GOVD_FIRSTPRIVATE = 0x000010,
81 GOVD_LASTPRIVATE = 0x000020,
82 GOVD_REDUCTION = 0x000040,
83 GOVD_LOCAL = 0x00080,
84 GOVD_MAP = 0x000100,
85 GOVD_DEBUG_PRIVATE = 0x000200,
86 GOVD_PRIVATE_OUTER_REF = 0x000400,
87 GOVD_LINEAR = 0x000800,
88 GOVD_ALIGNED = 0x001000,
89
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY = 0x002000,
92
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
95
96 GOVD_MAP_0LEN_ARRAY = 0x008000,
97
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO = 0x010000,
100
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN = 0x020000,
103
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE = 0x040000,
106
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT = 0x080000,
109
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY = 0x100000,
112
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY = 0x200000,
115
116 GOVD_NONTEMPORAL = 0x400000,
117
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
120
121 GOVD_CONDTEMP = 0x1000000,
122
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN = 0x2000000,
125
126 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
127 fields. */
128 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
129
130 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
131 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
132 | GOVD_LOCAL)
133 };
134
135
136 enum omp_region_type
137 {
138 ORT_WORKSHARE = 0x00,
139 ORT_TASKGROUP = 0x01,
140 ORT_SIMD = 0x04,
141
142 ORT_PARALLEL = 0x08,
143 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
144
145 ORT_TASK = 0x10,
146 ORT_UNTIED_TASK = ORT_TASK | 1,
147 ORT_TASKLOOP = ORT_TASK | 2,
148 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
149
150 ORT_TEAMS = 0x20,
151 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
152 ORT_HOST_TEAMS = ORT_TEAMS | 2,
153 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
154
155 /* Data region. */
156 ORT_TARGET_DATA = 0x40,
157
158 /* Data region with offloading. */
159 ORT_TARGET = 0x80,
160 ORT_COMBINED_TARGET = ORT_TARGET | 1,
161 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
162
163 /* OpenACC variants. */
164 ORT_ACC = 0x100, /* A generic OpenACC region. */
165 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
166 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
167 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
168 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
169 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
170
171 /* Dummy OpenMP region, used to disable expansion of
172 DECL_VALUE_EXPRs in taskloop pre body. */
173 ORT_NONE = 0x200
174 };
175
176 /* Gimplify hashtable helper. */
177
178 struct gimplify_hasher : free_ptr_hash <elt_t>
179 {
180 static inline hashval_t hash (const elt_t *);
181 static inline bool equal (const elt_t *, const elt_t *);
182 };
183
184 struct gimplify_ctx
185 {
186 struct gimplify_ctx *prev_context;
187
188 vec<gbind *> bind_expr_stack;
189 tree temps;
190 gimple_seq conditional_cleanups;
191 tree exit_label;
192 tree return_temp;
193
194 vec<tree> case_labels;
195 hash_set<tree> *live_switch_vars;
196 /* The formal temporary table. Should this be persistent? */
197 hash_table<gimplify_hasher> *temp_htab;
198
199 int conditions;
200 unsigned into_ssa : 1;
201 unsigned allow_rhs_cond_expr : 1;
202 unsigned in_cleanup_point_expr : 1;
203 unsigned keep_stack : 1;
204 unsigned save_stack : 1;
205 unsigned in_switch_expr : 1;
206 };
207
208 enum gimplify_defaultmap_kind
209 {
210 GDMK_SCALAR,
211 GDMK_AGGREGATE,
212 GDMK_ALLOCATABLE,
213 GDMK_POINTER
214 };
215
216 struct gimplify_omp_ctx
217 {
218 struct gimplify_omp_ctx *outer_context;
219 splay_tree variables;
220 hash_set<tree> *privatized_types;
221 tree clauses;
222 /* Iteration variables in an OMP_FOR. */
223 vec<tree> loop_iter_var;
224 location_t location;
225 enum omp_clause_default_kind default_kind;
226 enum omp_region_type region_type;
227 enum tree_code code;
228 bool combined_loop;
229 bool distribute;
230 bool target_firstprivatize_array_bases;
231 bool add_safelen1;
232 bool order_concurrent;
233 int defaultmap[4];
234 };
235
236 static struct gimplify_ctx *gimplify_ctxp;
237 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
238 static bool in_omp_construct;
239
240 /* Forward declaration. */
241 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
242 static hash_map<tree, tree> *oacc_declare_returns;
243 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
244 bool (*) (tree), fallback_t, bool);
245
246 /* Shorter alias name for the above function for use in gimplify.c
247 only. */
248
249 static inline void
250 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
251 {
252 gimple_seq_add_stmt_without_update (seq_p, gs);
253 }
254
255 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
256 NULL, a new sequence is allocated. This function is
257 similar to gimple_seq_add_seq, but does not scan the operands.
258 During gimplification, we need to manipulate statement sequences
259 before the def/use vectors have been constructed. */
260
261 static void
262 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
263 {
264 gimple_stmt_iterator si;
265
266 if (src == NULL)
267 return;
268
269 si = gsi_last (*dst_p);
270 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
271 }
272
273
274 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
275 and popping gimplify contexts. */
276
277 static struct gimplify_ctx *ctx_pool = NULL;
278
279 /* Return a gimplify context struct from the pool. */
280
281 static inline struct gimplify_ctx *
282 ctx_alloc (void)
283 {
284 struct gimplify_ctx * c = ctx_pool;
285
286 if (c)
287 ctx_pool = c->prev_context;
288 else
289 c = XNEW (struct gimplify_ctx);
290
291 memset (c, '\0', sizeof (*c));
292 return c;
293 }
294
295 /* Put gimplify context C back into the pool. */
296
297 static inline void
298 ctx_free (struct gimplify_ctx *c)
299 {
300 c->prev_context = ctx_pool;
301 ctx_pool = c;
302 }
303
304 /* Free allocated ctx stack memory. */
305
306 void
307 free_gimplify_stack (void)
308 {
309 struct gimplify_ctx *c;
310
311 while ((c = ctx_pool))
312 {
313 ctx_pool = c->prev_context;
314 free (c);
315 }
316 }
317
318
319 /* Set up a context for the gimplifier. */
320
321 void
322 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
323 {
324 struct gimplify_ctx *c = ctx_alloc ();
325
326 c->prev_context = gimplify_ctxp;
327 gimplify_ctxp = c;
328 gimplify_ctxp->into_ssa = in_ssa;
329 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
330 }
331
332 /* Tear down a context for the gimplifier. If BODY is non-null, then
333 put the temporaries into the outer BIND_EXPR. Otherwise, put them
334 in the local_decls.
335
336 BODY is not a sequence, but the first tuple in a sequence. */
337
338 void
339 pop_gimplify_context (gimple *body)
340 {
341 struct gimplify_ctx *c = gimplify_ctxp;
342
343 gcc_assert (c
344 && (!c->bind_expr_stack.exists ()
345 || c->bind_expr_stack.is_empty ()));
346 c->bind_expr_stack.release ();
347 gimplify_ctxp = c->prev_context;
348
349 if (body)
350 declare_vars (c->temps, body, false);
351 else
352 record_vars (c->temps);
353
354 delete c->temp_htab;
355 c->temp_htab = NULL;
356 ctx_free (c);
357 }
358
359 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
360
361 static void
362 gimple_push_bind_expr (gbind *bind_stmt)
363 {
364 gimplify_ctxp->bind_expr_stack.reserve (8);
365 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
366 }
367
368 /* Pop the first element off the stack of bindings. */
369
370 static void
371 gimple_pop_bind_expr (void)
372 {
373 gimplify_ctxp->bind_expr_stack.pop ();
374 }
375
376 /* Return the first element of the stack of bindings. */
377
378 gbind *
379 gimple_current_bind_expr (void)
380 {
381 return gimplify_ctxp->bind_expr_stack.last ();
382 }
383
384 /* Return the stack of bindings created during gimplification. */
385
386 vec<gbind *>
387 gimple_bind_expr_stack (void)
388 {
389 return gimplify_ctxp->bind_expr_stack;
390 }
391
392 /* Return true iff there is a COND_EXPR between us and the innermost
393 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
394
395 static bool
396 gimple_conditional_context (void)
397 {
398 return gimplify_ctxp->conditions > 0;
399 }
400
401 /* Note that we've entered a COND_EXPR. */
402
403 static void
404 gimple_push_condition (void)
405 {
406 #ifdef ENABLE_GIMPLE_CHECKING
407 if (gimplify_ctxp->conditions == 0)
408 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
409 #endif
410 ++(gimplify_ctxp->conditions);
411 }
412
413 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
414 now, add any conditional cleanups we've seen to the prequeue. */
415
416 static void
417 gimple_pop_condition (gimple_seq *pre_p)
418 {
419 int conds = --(gimplify_ctxp->conditions);
420
421 gcc_assert (conds >= 0);
422 if (conds == 0)
423 {
424 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
425 gimplify_ctxp->conditional_cleanups = NULL;
426 }
427 }
428
429 /* A stable comparison routine for use with splay trees and DECLs. */
430
431 static int
432 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
433 {
434 tree a = (tree) xa;
435 tree b = (tree) xb;
436
437 return DECL_UID (a) - DECL_UID (b);
438 }
439
440 /* Create a new omp construct that deals with variable remapping. */
441
442 static struct gimplify_omp_ctx *
443 new_omp_context (enum omp_region_type region_type)
444 {
445 struct gimplify_omp_ctx *c;
446
447 c = XCNEW (struct gimplify_omp_ctx);
448 c->outer_context = gimplify_omp_ctxp;
449 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
450 c->privatized_types = new hash_set<tree>;
451 c->location = input_location;
452 c->region_type = region_type;
453 if ((region_type & ORT_TASK) == 0)
454 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
455 else
456 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
457 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
458 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
459 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
460 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
461
462 return c;
463 }
464
465 /* Destroy an omp construct that deals with variable remapping. */
466
467 static void
468 delete_omp_context (struct gimplify_omp_ctx *c)
469 {
470 splay_tree_delete (c->variables);
471 delete c->privatized_types;
472 c->loop_iter_var.release ();
473 XDELETE (c);
474 }
475
476 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
477 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
478
479 /* Both gimplify the statement T and append it to *SEQ_P. This function
480 behaves exactly as gimplify_stmt, but you don't have to pass T as a
481 reference. */
482
483 void
484 gimplify_and_add (tree t, gimple_seq *seq_p)
485 {
486 gimplify_stmt (&t, seq_p);
487 }
488
489 /* Gimplify statement T into sequence *SEQ_P, and return the first
490 tuple in the sequence of generated tuples for this statement.
491 Return NULL if gimplifying T produced no tuples. */
492
493 static gimple *
494 gimplify_and_return_first (tree t, gimple_seq *seq_p)
495 {
496 gimple_stmt_iterator last = gsi_last (*seq_p);
497
498 gimplify_and_add (t, seq_p);
499
500 if (!gsi_end_p (last))
501 {
502 gsi_next (&last);
503 return gsi_stmt (last);
504 }
505 else
506 return gimple_seq_first_stmt (*seq_p);
507 }
508
509 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
510 LHS, or for a call argument. */
511
512 static bool
513 is_gimple_mem_rhs (tree t)
514 {
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t)))
518 return is_gimple_val (t);
519 else
520 return is_gimple_val (t) || is_gimple_lvalue (t);
521 }
522
523 /* Return true if T is a CALL_EXPR or an expression that can be
524 assigned to a temporary. Note that this predicate should only be
525 used during gimplification. See the rationale for this in
526 gimplify_modify_expr. */
527
528 static bool
529 is_gimple_reg_rhs_or_call (tree t)
530 {
531 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
532 || TREE_CODE (t) == CALL_EXPR);
533 }
534
535 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
536 this predicate should only be used during gimplification. See the
537 rationale for this in gimplify_modify_expr. */
538
539 static bool
540 is_gimple_mem_rhs_or_call (tree t)
541 {
542 /* If we're dealing with a renamable type, either source or dest must be
543 a renamed variable. */
544 if (is_gimple_reg_type (TREE_TYPE (t)))
545 return is_gimple_val (t);
546 else
547 return (is_gimple_val (t)
548 || is_gimple_lvalue (t)
549 || TREE_CLOBBER_P (t)
550 || TREE_CODE (t) == CALL_EXPR);
551 }
552
553 /* Create a temporary with a name derived from VAL. Subroutine of
554 lookup_tmp_var; nobody else should call this function. */
555
556 static inline tree
557 create_tmp_from_val (tree val)
558 {
559 /* Drop all qualifiers and address-space information from the value type. */
560 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
561 tree var = create_tmp_var (type, get_name (val));
562 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
563 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
564 DECL_GIMPLE_REG_P (var) = 1;
565 return var;
566 }
567
568 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
569 an existing expression temporary. */
570
571 static tree
572 lookup_tmp_var (tree val, bool is_formal)
573 {
574 tree ret;
575
576 /* If not optimizing, never really reuse a temporary. local-alloc
577 won't allocate any variable that is used in more than one basic
578 block, which means it will go into memory, causing much extra
579 work in reload and final and poorer code generation, outweighing
580 the extra memory allocation here. */
581 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
582 ret = create_tmp_from_val (val);
583 else
584 {
585 elt_t elt, *elt_p;
586 elt_t **slot;
587
588 elt.val = val;
589 if (!gimplify_ctxp->temp_htab)
590 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
591 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
592 if (*slot == NULL)
593 {
594 elt_p = XNEW (elt_t);
595 elt_p->val = val;
596 elt_p->temp = ret = create_tmp_from_val (val);
597 *slot = elt_p;
598 }
599 else
600 {
601 elt_p = *slot;
602 ret = elt_p->temp;
603 }
604 }
605
606 return ret;
607 }
608
609 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
610
611 static tree
612 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
613 bool is_formal, bool allow_ssa)
614 {
615 tree t, mod;
616
617 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
618 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
619 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
620 fb_rvalue);
621
622 if (allow_ssa
623 && gimplify_ctxp->into_ssa
624 && is_gimple_reg_type (TREE_TYPE (val)))
625 {
626 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
627 if (! gimple_in_ssa_p (cfun))
628 {
629 const char *name = get_name (val);
630 if (name)
631 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
632 }
633 }
634 else
635 t = lookup_tmp_var (val, is_formal);
636
637 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
638
639 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
640
641 /* gimplify_modify_expr might want to reduce this further. */
642 gimplify_and_add (mod, pre_p);
643 ggc_free (mod);
644
645 return t;
646 }
647
648 /* Return a formal temporary variable initialized with VAL. PRE_P is as
649 in gimplify_expr. Only use this function if:
650
651 1) The value of the unfactored expression represented by VAL will not
652 change between the initialization and use of the temporary, and
653 2) The temporary will not be otherwise modified.
654
655 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
656 and #2 means it is inappropriate for && temps.
657
658 For other cases, use get_initialized_tmp_var instead. */
659
660 tree
661 get_formal_tmp_var (tree val, gimple_seq *pre_p)
662 {
663 return internal_get_tmp_var (val, pre_p, NULL, true, true);
664 }
665
666 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
667 are as in gimplify_expr. */
668
669 tree
670 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
671 gimple_seq *post_p /* = NULL */,
672 bool allow_ssa /* = true */)
673 {
674 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
675 }
676
677 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
678 generate debug info for them; otherwise don't. */
679
680 void
681 declare_vars (tree vars, gimple *gs, bool debug_info)
682 {
683 tree last = vars;
684 if (last)
685 {
686 tree temps, block;
687
688 gbind *scope = as_a <gbind *> (gs);
689
690 temps = nreverse (last);
691
692 block = gimple_bind_block (scope);
693 gcc_assert (!block || TREE_CODE (block) == BLOCK);
694 if (!block || !debug_info)
695 {
696 DECL_CHAIN (last) = gimple_bind_vars (scope);
697 gimple_bind_set_vars (scope, temps);
698 }
699 else
700 {
701 /* We need to attach the nodes both to the BIND_EXPR and to its
702 associated BLOCK for debugging purposes. The key point here
703 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
704 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
705 if (BLOCK_VARS (block))
706 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
707 else
708 {
709 gimple_bind_set_vars (scope,
710 chainon (gimple_bind_vars (scope), temps));
711 BLOCK_VARS (block) = temps;
712 }
713 }
714 }
715 }
716
717 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
718 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
719 no such upper bound can be obtained. */
720
721 static void
722 force_constant_size (tree var)
723 {
724 /* The only attempt we make is by querying the maximum size of objects
725 of the variable's type. */
726
727 HOST_WIDE_INT max_size;
728
729 gcc_assert (VAR_P (var));
730
731 max_size = max_int_size_in_bytes (TREE_TYPE (var));
732
733 gcc_assert (max_size >= 0);
734
735 DECL_SIZE_UNIT (var)
736 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
737 DECL_SIZE (var)
738 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
739 }
740
741 /* Push the temporary variable TMP into the current binding. */
742
743 void
744 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
745 {
746 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
747
748 /* Later processing assumes that the object size is constant, which might
749 not be true at this point. Force the use of a constant upper bound in
750 this case. */
751 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
752 force_constant_size (tmp);
753
754 DECL_CONTEXT (tmp) = fn->decl;
755 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
756
757 record_vars_into (tmp, fn->decl);
758 }
759
760 /* Push the temporary variable TMP into the current binding. */
761
762 void
763 gimple_add_tmp_var (tree tmp)
764 {
765 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
766
767 /* Later processing assumes that the object size is constant, which might
768 not be true at this point. Force the use of a constant upper bound in
769 this case. */
770 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
771 force_constant_size (tmp);
772
773 DECL_CONTEXT (tmp) = current_function_decl;
774 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
775
776 if (gimplify_ctxp)
777 {
778 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
779 gimplify_ctxp->temps = tmp;
780
781 /* Mark temporaries local within the nearest enclosing parallel. */
782 if (gimplify_omp_ctxp)
783 {
784 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
785 int flag = GOVD_LOCAL;
786 while (ctx
787 && (ctx->region_type == ORT_WORKSHARE
788 || ctx->region_type == ORT_TASKGROUP
789 || ctx->region_type == ORT_SIMD
790 || ctx->region_type == ORT_ACC))
791 {
792 if (ctx->region_type == ORT_SIMD
793 && TREE_ADDRESSABLE (tmp)
794 && !TREE_STATIC (tmp))
795 {
796 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
797 ctx->add_safelen1 = true;
798 else
799 flag = GOVD_PRIVATE;
800 break;
801 }
802 ctx = ctx->outer_context;
803 }
804 if (ctx)
805 omp_add_variable (ctx, tmp, flag | GOVD_SEEN);
806 }
807 }
808 else if (cfun)
809 record_vars (tmp);
810 else
811 {
812 gimple_seq body_seq;
813
814 /* This case is for nested functions. We need to expose the locals
815 they create. */
816 body_seq = gimple_body (current_function_decl);
817 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
818 }
819 }
820
821
822 \f
823 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
824 nodes that are referenced more than once in GENERIC functions. This is
825 necessary because gimplification (translation into GIMPLE) is performed
826 by modifying tree nodes in-place, so gimplication of a shared node in a
827 first context could generate an invalid GIMPLE form in a second context.
828
829 This is achieved with a simple mark/copy/unmark algorithm that walks the
830 GENERIC representation top-down, marks nodes with TREE_VISITED the first
831 time it encounters them, duplicates them if they already have TREE_VISITED
832 set, and finally removes the TREE_VISITED marks it has set.
833
834 The algorithm works only at the function level, i.e. it generates a GENERIC
835 representation of a function with no nodes shared within the function when
836 passed a GENERIC function (except for nodes that are allowed to be shared).
837
838 At the global level, it is also necessary to unshare tree nodes that are
839 referenced in more than one function, for the same aforementioned reason.
840 This requires some cooperation from the front-end. There are 2 strategies:
841
842 1. Manual unsharing. The front-end needs to call unshare_expr on every
843 expression that might end up being shared across functions.
844
845 2. Deep unsharing. This is an extension of regular unsharing. Instead
846 of calling unshare_expr on expressions that might be shared across
847 functions, the front-end pre-marks them with TREE_VISITED. This will
848 ensure that they are unshared on the first reference within functions
849 when the regular unsharing algorithm runs. The counterpart is that
850 this algorithm must look deeper than for manual unsharing, which is
851 specified by LANG_HOOKS_DEEP_UNSHARING.
852
853 If there are only few specific cases of node sharing across functions, it is
854 probably easier for a front-end to unshare the expressions manually. On the
855 contrary, if the expressions generated at the global level are as widespread
856 as expressions generated within functions, deep unsharing is very likely the
857 way to go. */
858
859 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
860 These nodes model computations that must be done once. If we were to
861 unshare something like SAVE_EXPR(i++), the gimplification process would
862 create wrong code. However, if DATA is non-null, it must hold a pointer
863 set that is used to unshare the subtrees of these nodes. */
864
865 static tree
866 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
867 {
868 tree t = *tp;
869 enum tree_code code = TREE_CODE (t);
870
871 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
872 copy their subtrees if we can make sure to do it only once. */
873 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
874 {
875 if (data && !((hash_set<tree> *)data)->add (t))
876 ;
877 else
878 *walk_subtrees = 0;
879 }
880
881 /* Stop at types, decls, constants like copy_tree_r. */
882 else if (TREE_CODE_CLASS (code) == tcc_type
883 || TREE_CODE_CLASS (code) == tcc_declaration
884 || TREE_CODE_CLASS (code) == tcc_constant)
885 *walk_subtrees = 0;
886
887 /* Cope with the statement expression extension. */
888 else if (code == STATEMENT_LIST)
889 ;
890
891 /* Leave the bulk of the work to copy_tree_r itself. */
892 else
893 copy_tree_r (tp, walk_subtrees, NULL);
894
895 return NULL_TREE;
896 }
897
898 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
899 If *TP has been visited already, then *TP is deeply copied by calling
900 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
901
902 static tree
903 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
904 {
905 tree t = *tp;
906 enum tree_code code = TREE_CODE (t);
907
908 /* Skip types, decls, and constants. But we do want to look at their
909 types and the bounds of types. Mark them as visited so we properly
910 unmark their subtrees on the unmark pass. If we've already seen them,
911 don't look down further. */
912 if (TREE_CODE_CLASS (code) == tcc_type
913 || TREE_CODE_CLASS (code) == tcc_declaration
914 || TREE_CODE_CLASS (code) == tcc_constant)
915 {
916 if (TREE_VISITED (t))
917 *walk_subtrees = 0;
918 else
919 TREE_VISITED (t) = 1;
920 }
921
922 /* If this node has been visited already, unshare it and don't look
923 any deeper. */
924 else if (TREE_VISITED (t))
925 {
926 walk_tree (tp, mostly_copy_tree_r, data, NULL);
927 *walk_subtrees = 0;
928 }
929
930 /* Otherwise, mark the node as visited and keep looking. */
931 else
932 TREE_VISITED (t) = 1;
933
934 return NULL_TREE;
935 }
936
937 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
938 copy_if_shared_r callback unmodified. */
939
940 void
941 copy_if_shared (tree *tp, void *data)
942 {
943 walk_tree (tp, copy_if_shared_r, data, NULL);
944 }
945
946 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
947 any nested functions. */
948
949 static void
950 unshare_body (tree fndecl)
951 {
952 struct cgraph_node *cgn = cgraph_node::get (fndecl);
953 /* If the language requires deep unsharing, we need a pointer set to make
954 sure we don't repeatedly unshare subtrees of unshareable nodes. */
955 hash_set<tree> *visited
956 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
957
958 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
959 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
960 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
961
962 delete visited;
963
964 if (cgn)
965 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
966 unshare_body (cgn->decl);
967 }
968
969 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
970 Subtrees are walked until the first unvisited node is encountered. */
971
972 static tree
973 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
974 {
975 tree t = *tp;
976
977 /* If this node has been visited, unmark it and keep looking. */
978 if (TREE_VISITED (t))
979 TREE_VISITED (t) = 0;
980
981 /* Otherwise, don't look any deeper. */
982 else
983 *walk_subtrees = 0;
984
985 return NULL_TREE;
986 }
987
988 /* Unmark the visited trees rooted at *TP. */
989
990 static inline void
991 unmark_visited (tree *tp)
992 {
993 walk_tree (tp, unmark_visited_r, NULL, NULL);
994 }
995
996 /* Likewise, but mark all trees as not visited. */
997
998 static void
999 unvisit_body (tree fndecl)
1000 {
1001 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1002
1003 unmark_visited (&DECL_SAVED_TREE (fndecl));
1004 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1005 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1006
1007 if (cgn)
1008 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1009 unvisit_body (cgn->decl);
1010 }
1011
1012 /* Unconditionally make an unshared copy of EXPR. This is used when using
1013 stored expressions which span multiple functions, such as BINFO_VTABLE,
1014 as the normal unsharing process can't tell that they're shared. */
1015
1016 tree
1017 unshare_expr (tree expr)
1018 {
1019 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1020 return expr;
1021 }
1022
1023 /* Worker for unshare_expr_without_location. */
1024
1025 static tree
1026 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1027 {
1028 if (EXPR_P (*tp))
1029 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1030 else
1031 *walk_subtrees = 0;
1032 return NULL_TREE;
1033 }
1034
1035 /* Similar to unshare_expr but also prune all expression locations
1036 from EXPR. */
1037
1038 tree
1039 unshare_expr_without_location (tree expr)
1040 {
1041 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1042 if (EXPR_P (expr))
1043 walk_tree (&expr, prune_expr_location, NULL, NULL);
1044 return expr;
1045 }
1046
1047 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1048 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1049 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1050 EXPR is the location of the EXPR. */
1051
1052 static location_t
1053 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1054 {
1055 if (!expr)
1056 return or_else;
1057
1058 if (EXPR_HAS_LOCATION (expr))
1059 return EXPR_LOCATION (expr);
1060
1061 if (TREE_CODE (expr) != STATEMENT_LIST)
1062 return or_else;
1063
1064 tree_stmt_iterator i = tsi_start (expr);
1065
1066 bool found = false;
1067 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1068 {
1069 found = true;
1070 tsi_next (&i);
1071 }
1072
1073 if (!found || !tsi_one_before_end_p (i))
1074 return or_else;
1075
1076 return rexpr_location (tsi_stmt (i), or_else);
1077 }
1078
1079 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1080 rexpr_location for the potential recursion. */
1081
1082 static inline bool
1083 rexpr_has_location (tree expr)
1084 {
1085 return rexpr_location (expr) != UNKNOWN_LOCATION;
1086 }
1087
1088 \f
1089 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1090 contain statements and have a value. Assign its value to a temporary
1091 and give it void_type_node. Return the temporary, or NULL_TREE if
1092 WRAPPER was already void. */
1093
1094 tree
1095 voidify_wrapper_expr (tree wrapper, tree temp)
1096 {
1097 tree type = TREE_TYPE (wrapper);
1098 if (type && !VOID_TYPE_P (type))
1099 {
1100 tree *p;
1101
1102 /* Set p to point to the body of the wrapper. Loop until we find
1103 something that isn't a wrapper. */
1104 for (p = &wrapper; p && *p; )
1105 {
1106 switch (TREE_CODE (*p))
1107 {
1108 case BIND_EXPR:
1109 TREE_SIDE_EFFECTS (*p) = 1;
1110 TREE_TYPE (*p) = void_type_node;
1111 /* For a BIND_EXPR, the body is operand 1. */
1112 p = &BIND_EXPR_BODY (*p);
1113 break;
1114
1115 case CLEANUP_POINT_EXPR:
1116 case TRY_FINALLY_EXPR:
1117 case TRY_CATCH_EXPR:
1118 TREE_SIDE_EFFECTS (*p) = 1;
1119 TREE_TYPE (*p) = void_type_node;
1120 p = &TREE_OPERAND (*p, 0);
1121 break;
1122
1123 case STATEMENT_LIST:
1124 {
1125 tree_stmt_iterator i = tsi_last (*p);
1126 TREE_SIDE_EFFECTS (*p) = 1;
1127 TREE_TYPE (*p) = void_type_node;
1128 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1129 }
1130 break;
1131
1132 case COMPOUND_EXPR:
1133 /* Advance to the last statement. Set all container types to
1134 void. */
1135 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1136 {
1137 TREE_SIDE_EFFECTS (*p) = 1;
1138 TREE_TYPE (*p) = void_type_node;
1139 }
1140 break;
1141
1142 case TRANSACTION_EXPR:
1143 TREE_SIDE_EFFECTS (*p) = 1;
1144 TREE_TYPE (*p) = void_type_node;
1145 p = &TRANSACTION_EXPR_BODY (*p);
1146 break;
1147
1148 default:
1149 /* Assume that any tree upon which voidify_wrapper_expr is
1150 directly called is a wrapper, and that its body is op0. */
1151 if (p == &wrapper)
1152 {
1153 TREE_SIDE_EFFECTS (*p) = 1;
1154 TREE_TYPE (*p) = void_type_node;
1155 p = &TREE_OPERAND (*p, 0);
1156 break;
1157 }
1158 goto out;
1159 }
1160 }
1161
1162 out:
1163 if (p == NULL || IS_EMPTY_STMT (*p))
1164 temp = NULL_TREE;
1165 else if (temp)
1166 {
1167 /* The wrapper is on the RHS of an assignment that we're pushing
1168 down. */
1169 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1170 || TREE_CODE (temp) == MODIFY_EXPR);
1171 TREE_OPERAND (temp, 1) = *p;
1172 *p = temp;
1173 }
1174 else
1175 {
1176 temp = create_tmp_var (type, "retval");
1177 *p = build2 (INIT_EXPR, type, temp, *p);
1178 }
1179
1180 return temp;
1181 }
1182
1183 return NULL_TREE;
1184 }
1185
1186 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1187 a temporary through which they communicate. */
1188
1189 static void
1190 build_stack_save_restore (gcall **save, gcall **restore)
1191 {
1192 tree tmp_var;
1193
1194 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1195 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1196 gimple_call_set_lhs (*save, tmp_var);
1197
1198 *restore
1199 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1200 1, tmp_var);
1201 }
1202
1203 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1204
1205 static tree
1206 build_asan_poison_call_expr (tree decl)
1207 {
1208 /* Do not poison variables that have size equal to zero. */
1209 tree unit_size = DECL_SIZE_UNIT (decl);
1210 if (zerop (unit_size))
1211 return NULL_TREE;
1212
1213 tree base = build_fold_addr_expr (decl);
1214
1215 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1216 void_type_node, 3,
1217 build_int_cst (integer_type_node,
1218 ASAN_MARK_POISON),
1219 base, unit_size);
1220 }
1221
1222 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1223 on POISON flag, shadow memory of a DECL variable. The call will be
1224 put on location identified by IT iterator, where BEFORE flag drives
1225 position where the stmt will be put. */
1226
1227 static void
1228 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1229 bool before)
1230 {
1231 tree unit_size = DECL_SIZE_UNIT (decl);
1232 tree base = build_fold_addr_expr (decl);
1233
1234 /* Do not poison variables that have size equal to zero. */
1235 if (zerop (unit_size))
1236 return;
1237
1238 /* It's necessary to have all stack variables aligned to ASAN granularity
1239 bytes. */
1240 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1241 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1242
1243 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1244
1245 gimple *g
1246 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1247 build_int_cst (integer_type_node, flags),
1248 base, unit_size);
1249
1250 if (before)
1251 gsi_insert_before (it, g, GSI_NEW_STMT);
1252 else
1253 gsi_insert_after (it, g, GSI_NEW_STMT);
1254 }
1255
1256 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1257 either poisons or unpoisons a DECL. Created statement is appended
1258 to SEQ_P gimple sequence. */
1259
1260 static void
1261 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1262 {
1263 gimple_stmt_iterator it = gsi_last (*seq_p);
1264 bool before = false;
1265
1266 if (gsi_end_p (it))
1267 before = true;
1268
1269 asan_poison_variable (decl, poison, &it, before);
1270 }
1271
1272 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1273
1274 static int
1275 sort_by_decl_uid (const void *a, const void *b)
1276 {
1277 const tree *t1 = (const tree *)a;
1278 const tree *t2 = (const tree *)b;
1279
1280 int uid1 = DECL_UID (*t1);
1281 int uid2 = DECL_UID (*t2);
1282
1283 if (uid1 < uid2)
1284 return -1;
1285 else if (uid1 > uid2)
1286 return 1;
1287 else
1288 return 0;
1289 }
1290
1291 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1292 depending on POISON flag. Created statement is appended
1293 to SEQ_P gimple sequence. */
1294
1295 static void
1296 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1297 {
1298 unsigned c = variables->elements ();
1299 if (c == 0)
1300 return;
1301
1302 auto_vec<tree> sorted_variables (c);
1303
1304 for (hash_set<tree>::iterator it = variables->begin ();
1305 it != variables->end (); ++it)
1306 sorted_variables.safe_push (*it);
1307
1308 sorted_variables.qsort (sort_by_decl_uid);
1309
1310 unsigned i;
1311 tree var;
1312 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1313 {
1314 asan_poison_variable (var, poison, seq_p);
1315
1316 /* Add use_after_scope_memory attribute for the variable in order
1317 to prevent re-written into SSA. */
1318 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1319 DECL_ATTRIBUTES (var)))
1320 DECL_ATTRIBUTES (var)
1321 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1322 integer_one_node,
1323 DECL_ATTRIBUTES (var));
1324 }
1325 }
1326
1327 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1328
1329 static enum gimplify_status
1330 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1331 {
1332 tree bind_expr = *expr_p;
1333 bool old_keep_stack = gimplify_ctxp->keep_stack;
1334 bool old_save_stack = gimplify_ctxp->save_stack;
1335 tree t;
1336 gbind *bind_stmt;
1337 gimple_seq body, cleanup;
1338 gcall *stack_save;
1339 location_t start_locus = 0, end_locus = 0;
1340 tree ret_clauses = NULL;
1341
1342 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1343
1344 /* Mark variables seen in this bind expr. */
1345 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1346 {
1347 if (VAR_P (t))
1348 {
1349 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1350
1351 /* Mark variable as local. */
1352 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1353 {
1354 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1355 || splay_tree_lookup (ctx->variables,
1356 (splay_tree_key) t) == NULL)
1357 {
1358 int flag = GOVD_LOCAL;
1359 if (ctx->region_type == ORT_SIMD
1360 && TREE_ADDRESSABLE (t)
1361 && !TREE_STATIC (t))
1362 {
1363 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1364 ctx->add_safelen1 = true;
1365 else
1366 flag = GOVD_PRIVATE;
1367 }
1368 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1369 }
1370 /* Static locals inside of target construct or offloaded
1371 routines need to be "omp declare target". */
1372 if (TREE_STATIC (t))
1373 for (; ctx; ctx = ctx->outer_context)
1374 if ((ctx->region_type & ORT_TARGET) != 0)
1375 {
1376 if (!lookup_attribute ("omp declare target",
1377 DECL_ATTRIBUTES (t)))
1378 {
1379 tree id = get_identifier ("omp declare target");
1380 DECL_ATTRIBUTES (t)
1381 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1382 varpool_node *node = varpool_node::get (t);
1383 if (node)
1384 {
1385 node->offloadable = 1;
1386 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1387 {
1388 g->have_offload = true;
1389 if (!in_lto_p)
1390 vec_safe_push (offload_vars, t);
1391 }
1392 }
1393 }
1394 break;
1395 }
1396 }
1397
1398 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1399
1400 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1401 cfun->has_local_explicit_reg_vars = true;
1402 }
1403
1404 /* Preliminarily mark non-addressed complex variables as eligible
1405 for promotion to gimple registers. We'll transform their uses
1406 as we find them. */
1407 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1408 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1409 && !TREE_THIS_VOLATILE (t)
1410 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1411 && !needs_to_live_in_memory (t))
1412 DECL_GIMPLE_REG_P (t) = 1;
1413 }
1414
1415 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1416 BIND_EXPR_BLOCK (bind_expr));
1417 gimple_push_bind_expr (bind_stmt);
1418
1419 gimplify_ctxp->keep_stack = false;
1420 gimplify_ctxp->save_stack = false;
1421
1422 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1423 body = NULL;
1424 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1425 gimple_bind_set_body (bind_stmt, body);
1426
1427 /* Source location wise, the cleanup code (stack_restore and clobbers)
1428 belongs to the end of the block, so propagate what we have. The
1429 stack_save operation belongs to the beginning of block, which we can
1430 infer from the bind_expr directly if the block has no explicit
1431 assignment. */
1432 if (BIND_EXPR_BLOCK (bind_expr))
1433 {
1434 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1435 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1436 }
1437 if (start_locus == 0)
1438 start_locus = EXPR_LOCATION (bind_expr);
1439
1440 cleanup = NULL;
1441 stack_save = NULL;
1442
1443 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1444 the stack space allocated to the VLAs. */
1445 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1446 {
1447 gcall *stack_restore;
1448
1449 /* Save stack on entry and restore it on exit. Add a try_finally
1450 block to achieve this. */
1451 build_stack_save_restore (&stack_save, &stack_restore);
1452
1453 gimple_set_location (stack_save, start_locus);
1454 gimple_set_location (stack_restore, end_locus);
1455
1456 gimplify_seq_add_stmt (&cleanup, stack_restore);
1457 }
1458
1459 /* Add clobbers for all variables that go out of scope. */
1460 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1461 {
1462 if (VAR_P (t)
1463 && !is_global_var (t)
1464 && DECL_CONTEXT (t) == current_function_decl)
1465 {
1466 if (!DECL_HARD_REGISTER (t)
1467 && !TREE_THIS_VOLATILE (t)
1468 && !DECL_HAS_VALUE_EXPR_P (t)
1469 /* Only care for variables that have to be in memory. Others
1470 will be rewritten into SSA names, hence moved to the
1471 top-level. */
1472 && !is_gimple_reg (t)
1473 && flag_stack_reuse != SR_NONE)
1474 {
1475 tree clobber = build_clobber (TREE_TYPE (t));
1476 gimple *clobber_stmt;
1477 clobber_stmt = gimple_build_assign (t, clobber);
1478 gimple_set_location (clobber_stmt, end_locus);
1479 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1480 }
1481
1482 if (flag_openacc && oacc_declare_returns != NULL)
1483 {
1484 tree *c = oacc_declare_returns->get (t);
1485 if (c != NULL)
1486 {
1487 if (ret_clauses)
1488 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1489
1490 ret_clauses = *c;
1491
1492 oacc_declare_returns->remove (t);
1493
1494 if (oacc_declare_returns->is_empty ())
1495 {
1496 delete oacc_declare_returns;
1497 oacc_declare_returns = NULL;
1498 }
1499 }
1500 }
1501 }
1502
1503 if (asan_poisoned_variables != NULL
1504 && asan_poisoned_variables->contains (t))
1505 {
1506 asan_poisoned_variables->remove (t);
1507 asan_poison_variable (t, true, &cleanup);
1508 }
1509
1510 if (gimplify_ctxp->live_switch_vars != NULL
1511 && gimplify_ctxp->live_switch_vars->contains (t))
1512 gimplify_ctxp->live_switch_vars->remove (t);
1513 }
1514
1515 if (ret_clauses)
1516 {
1517 gomp_target *stmt;
1518 gimple_stmt_iterator si = gsi_start (cleanup);
1519
1520 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1521 ret_clauses);
1522 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1523 }
1524
1525 if (cleanup)
1526 {
1527 gtry *gs;
1528 gimple_seq new_body;
1529
1530 new_body = NULL;
1531 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1532 GIMPLE_TRY_FINALLY);
1533
1534 if (stack_save)
1535 gimplify_seq_add_stmt (&new_body, stack_save);
1536 gimplify_seq_add_stmt (&new_body, gs);
1537 gimple_bind_set_body (bind_stmt, new_body);
1538 }
1539
1540 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1541 if (!gimplify_ctxp->keep_stack)
1542 gimplify_ctxp->keep_stack = old_keep_stack;
1543 gimplify_ctxp->save_stack = old_save_stack;
1544
1545 gimple_pop_bind_expr ();
1546
1547 gimplify_seq_add_stmt (pre_p, bind_stmt);
1548
1549 if (temp)
1550 {
1551 *expr_p = temp;
1552 return GS_OK;
1553 }
1554
1555 *expr_p = NULL_TREE;
1556 return GS_ALL_DONE;
1557 }
1558
1559 /* Maybe add early return predict statement to PRE_P sequence. */
1560
1561 static void
1562 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1563 {
1564 /* If we are not in a conditional context, add PREDICT statement. */
1565 if (gimple_conditional_context ())
1566 {
1567 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1568 NOT_TAKEN);
1569 gimplify_seq_add_stmt (pre_p, predict);
1570 }
1571 }
1572
1573 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1574 GIMPLE value, it is assigned to a new temporary and the statement is
1575 re-written to return the temporary.
1576
1577 PRE_P points to the sequence where side effects that must happen before
1578 STMT should be stored. */
1579
1580 static enum gimplify_status
1581 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1582 {
1583 greturn *ret;
1584 tree ret_expr = TREE_OPERAND (stmt, 0);
1585 tree result_decl, result;
1586
1587 if (ret_expr == error_mark_node)
1588 return GS_ERROR;
1589
1590 if (!ret_expr
1591 || TREE_CODE (ret_expr) == RESULT_DECL)
1592 {
1593 maybe_add_early_return_predict_stmt (pre_p);
1594 greturn *ret = gimple_build_return (ret_expr);
1595 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1596 gimplify_seq_add_stmt (pre_p, ret);
1597 return GS_ALL_DONE;
1598 }
1599
1600 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1601 result_decl = NULL_TREE;
1602 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1603 {
1604 /* Used in C++ for handling EH cleanup of the return value if a local
1605 cleanup throws. Assume the front-end knows what it's doing. */
1606 result_decl = DECL_RESULT (current_function_decl);
1607 /* But crash if we end up trying to modify ret_expr below. */
1608 ret_expr = NULL_TREE;
1609 }
1610 else
1611 {
1612 result_decl = TREE_OPERAND (ret_expr, 0);
1613
1614 /* See through a return by reference. */
1615 if (TREE_CODE (result_decl) == INDIRECT_REF)
1616 result_decl = TREE_OPERAND (result_decl, 0);
1617
1618 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1619 || TREE_CODE (ret_expr) == INIT_EXPR)
1620 && TREE_CODE (result_decl) == RESULT_DECL);
1621 }
1622
1623 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1624 Recall that aggregate_value_p is FALSE for any aggregate type that is
1625 returned in registers. If we're returning values in registers, then
1626 we don't want to extend the lifetime of the RESULT_DECL, particularly
1627 across another call. In addition, for those aggregates for which
1628 hard_function_value generates a PARALLEL, we'll die during normal
1629 expansion of structure assignments; there's special code in expand_return
1630 to handle this case that does not exist in expand_expr. */
1631 if (!result_decl)
1632 result = NULL_TREE;
1633 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1634 {
1635 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1636 {
1637 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1638 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1639 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1640 should be effectively allocated by the caller, i.e. all calls to
1641 this function must be subject to the Return Slot Optimization. */
1642 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1643 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1644 }
1645 result = result_decl;
1646 }
1647 else if (gimplify_ctxp->return_temp)
1648 result = gimplify_ctxp->return_temp;
1649 else
1650 {
1651 result = create_tmp_reg (TREE_TYPE (result_decl));
1652
1653 /* ??? With complex control flow (usually involving abnormal edges),
1654 we can wind up warning about an uninitialized value for this. Due
1655 to how this variable is constructed and initialized, this is never
1656 true. Give up and never warn. */
1657 TREE_NO_WARNING (result) = 1;
1658
1659 gimplify_ctxp->return_temp = result;
1660 }
1661
1662 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1663 Then gimplify the whole thing. */
1664 if (result != result_decl)
1665 TREE_OPERAND (ret_expr, 0) = result;
1666
1667 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1668
1669 maybe_add_early_return_predict_stmt (pre_p);
1670 ret = gimple_build_return (result);
1671 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1672 gimplify_seq_add_stmt (pre_p, ret);
1673
1674 return GS_ALL_DONE;
1675 }
1676
1677 /* Gimplify a variable-length array DECL. */
1678
1679 static void
1680 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1681 {
1682 /* This is a variable-sized decl. Simplify its size and mark it
1683 for deferred expansion. */
1684 tree t, addr, ptr_type;
1685
1686 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1687 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1688
1689 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1690 if (DECL_HAS_VALUE_EXPR_P (decl))
1691 return;
1692
1693 /* All occurrences of this decl in final gimplified code will be
1694 replaced by indirection. Setting DECL_VALUE_EXPR does two
1695 things: First, it lets the rest of the gimplifier know what
1696 replacement to use. Second, it lets the debug info know
1697 where to find the value. */
1698 ptr_type = build_pointer_type (TREE_TYPE (decl));
1699 addr = create_tmp_var (ptr_type, get_name (decl));
1700 DECL_IGNORED_P (addr) = 0;
1701 t = build_fold_indirect_ref (addr);
1702 TREE_THIS_NOTRAP (t) = 1;
1703 SET_DECL_VALUE_EXPR (decl, t);
1704 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1705
1706 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1707 max_int_size_in_bytes (TREE_TYPE (decl)));
1708 /* The call has been built for a variable-sized object. */
1709 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1710 t = fold_convert (ptr_type, t);
1711 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1712
1713 gimplify_and_add (t, seq_p);
1714
1715 /* Record the dynamic allocation associated with DECL if requested. */
1716 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1717 record_dynamic_alloc (decl);
1718 }
1719
1720 /* A helper function to be called via walk_tree. Mark all labels under *TP
1721 as being forced. To be called for DECL_INITIAL of static variables. */
1722
1723 static tree
1724 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1725 {
1726 if (TYPE_P (*tp))
1727 *walk_subtrees = 0;
1728 if (TREE_CODE (*tp) == LABEL_DECL)
1729 {
1730 FORCED_LABEL (*tp) = 1;
1731 cfun->has_forced_label_in_static = 1;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1738 and initialization explicit. */
1739
1740 static enum gimplify_status
1741 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1742 {
1743 tree stmt = *stmt_p;
1744 tree decl = DECL_EXPR_DECL (stmt);
1745
1746 *stmt_p = NULL_TREE;
1747
1748 if (TREE_TYPE (decl) == error_mark_node)
1749 return GS_ERROR;
1750
1751 if ((TREE_CODE (decl) == TYPE_DECL
1752 || VAR_P (decl))
1753 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1754 {
1755 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1756 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1757 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1758 }
1759
1760 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1761 in case its size expressions contain problematic nodes like CALL_EXPR. */
1762 if (TREE_CODE (decl) == TYPE_DECL
1763 && DECL_ORIGINAL_TYPE (decl)
1764 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1765 {
1766 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1767 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1768 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1769 }
1770
1771 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1772 {
1773 tree init = DECL_INITIAL (decl);
1774 bool is_vla = false;
1775
1776 poly_uint64 size;
1777 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1778 || (!TREE_STATIC (decl)
1779 && flag_stack_check == GENERIC_STACK_CHECK
1780 && maybe_gt (size,
1781 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1782 {
1783 gimplify_vla_decl (decl, seq_p);
1784 is_vla = true;
1785 }
1786
1787 if (asan_poisoned_variables
1788 && !is_vla
1789 && TREE_ADDRESSABLE (decl)
1790 && !TREE_STATIC (decl)
1791 && !DECL_HAS_VALUE_EXPR_P (decl)
1792 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1793 && dbg_cnt (asan_use_after_scope)
1794 && !gimplify_omp_ctxp)
1795 {
1796 asan_poisoned_variables->add (decl);
1797 asan_poison_variable (decl, false, seq_p);
1798 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1799 gimplify_ctxp->live_switch_vars->add (decl);
1800 }
1801
1802 /* Some front ends do not explicitly declare all anonymous
1803 artificial variables. We compensate here by declaring the
1804 variables, though it would be better if the front ends would
1805 explicitly declare them. */
1806 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1807 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1808 gimple_add_tmp_var (decl);
1809
1810 if (init && init != error_mark_node)
1811 {
1812 if (!TREE_STATIC (decl))
1813 {
1814 DECL_INITIAL (decl) = NULL_TREE;
1815 init = build2 (INIT_EXPR, void_type_node, decl, init);
1816 gimplify_and_add (init, seq_p);
1817 ggc_free (init);
1818 }
1819 else
1820 /* We must still examine initializers for static variables
1821 as they may contain a label address. */
1822 walk_tree (&init, force_labels_r, NULL, NULL);
1823 }
1824 }
1825
1826 return GS_ALL_DONE;
1827 }
1828
1829 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1830 and replacing the LOOP_EXPR with goto, but if the loop contains an
1831 EXIT_EXPR, we need to append a label for it to jump to. */
1832
1833 static enum gimplify_status
1834 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1835 {
1836 tree saved_label = gimplify_ctxp->exit_label;
1837 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1838
1839 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1840
1841 gimplify_ctxp->exit_label = NULL_TREE;
1842
1843 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1844
1845 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1846
1847 if (gimplify_ctxp->exit_label)
1848 gimplify_seq_add_stmt (pre_p,
1849 gimple_build_label (gimplify_ctxp->exit_label));
1850
1851 gimplify_ctxp->exit_label = saved_label;
1852
1853 *expr_p = NULL;
1854 return GS_ALL_DONE;
1855 }
1856
1857 /* Gimplify a statement list onto a sequence. These may be created either
1858 by an enlightened front-end, or by shortcut_cond_expr. */
1859
1860 static enum gimplify_status
1861 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1862 {
1863 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1864
1865 tree_stmt_iterator i = tsi_start (*expr_p);
1866
1867 while (!tsi_end_p (i))
1868 {
1869 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1870 tsi_delink (&i);
1871 }
1872
1873 if (temp)
1874 {
1875 *expr_p = temp;
1876 return GS_OK;
1877 }
1878
1879 return GS_ALL_DONE;
1880 }
1881
1882 /* Callback for walk_gimple_seq. */
1883
1884 static tree
1885 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1886 struct walk_stmt_info *wi)
1887 {
1888 gimple *stmt = gsi_stmt (*gsi_p);
1889
1890 *handled_ops_p = true;
1891 switch (gimple_code (stmt))
1892 {
1893 case GIMPLE_TRY:
1894 /* A compiler-generated cleanup or a user-written try block.
1895 If it's empty, don't dive into it--that would result in
1896 worse location info. */
1897 if (gimple_try_eval (stmt) == NULL)
1898 {
1899 wi->info = stmt;
1900 return integer_zero_node;
1901 }
1902 /* Fall through. */
1903 case GIMPLE_BIND:
1904 case GIMPLE_CATCH:
1905 case GIMPLE_EH_FILTER:
1906 case GIMPLE_TRANSACTION:
1907 /* Walk the sub-statements. */
1908 *handled_ops_p = false;
1909 break;
1910
1911 case GIMPLE_DEBUG:
1912 /* Ignore these. We may generate them before declarations that
1913 are never executed. If there's something to warn about,
1914 there will be non-debug stmts too, and we'll catch those. */
1915 break;
1916
1917 case GIMPLE_CALL:
1918 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1919 {
1920 *handled_ops_p = false;
1921 break;
1922 }
1923 /* Fall through. */
1924 default:
1925 /* Save the first "real" statement (not a decl/lexical scope/...). */
1926 wi->info = stmt;
1927 return integer_zero_node;
1928 }
1929 return NULL_TREE;
1930 }
1931
1932 /* Possibly warn about unreachable statements between switch's controlling
1933 expression and the first case. SEQ is the body of a switch expression. */
1934
1935 static void
1936 maybe_warn_switch_unreachable (gimple_seq seq)
1937 {
1938 if (!warn_switch_unreachable
1939 /* This warning doesn't play well with Fortran when optimizations
1940 are on. */
1941 || lang_GNU_Fortran ()
1942 || seq == NULL)
1943 return;
1944
1945 struct walk_stmt_info wi;
1946 memset (&wi, 0, sizeof (wi));
1947 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1948 gimple *stmt = (gimple *) wi.info;
1949
1950 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1951 {
1952 if (gimple_code (stmt) == GIMPLE_GOTO
1953 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1954 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1955 /* Don't warn for compiler-generated gotos. These occur
1956 in Duff's devices, for example. */;
1957 else
1958 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1959 "statement will never be executed");
1960 }
1961 }
1962
1963
1964 /* A label entry that pairs label and a location. */
1965 struct label_entry
1966 {
1967 tree label;
1968 location_t loc;
1969 };
1970
1971 /* Find LABEL in vector of label entries VEC. */
1972
1973 static struct label_entry *
1974 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1975 {
1976 unsigned int i;
1977 struct label_entry *l;
1978
1979 FOR_EACH_VEC_ELT (*vec, i, l)
1980 if (l->label == label)
1981 return l;
1982 return NULL;
1983 }
1984
1985 /* Return true if LABEL, a LABEL_DECL, represents a case label
1986 in a vector of labels CASES. */
1987
1988 static bool
1989 case_label_p (const vec<tree> *cases, tree label)
1990 {
1991 unsigned int i;
1992 tree l;
1993
1994 FOR_EACH_VEC_ELT (*cases, i, l)
1995 if (CASE_LABEL (l) == label)
1996 return true;
1997 return false;
1998 }
1999
2000 /* Find the last nondebug statement in a scope STMT. */
2001
2002 static gimple *
2003 last_stmt_in_scope (gimple *stmt)
2004 {
2005 if (!stmt)
2006 return NULL;
2007
2008 switch (gimple_code (stmt))
2009 {
2010 case GIMPLE_BIND:
2011 {
2012 gbind *bind = as_a <gbind *> (stmt);
2013 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2014 return last_stmt_in_scope (stmt);
2015 }
2016
2017 case GIMPLE_TRY:
2018 {
2019 gtry *try_stmt = as_a <gtry *> (stmt);
2020 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2021 gimple *last_eval = last_stmt_in_scope (stmt);
2022 if (gimple_stmt_may_fallthru (last_eval)
2023 && (last_eval == NULL
2024 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2025 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2026 {
2027 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2028 return last_stmt_in_scope (stmt);
2029 }
2030 else
2031 return last_eval;
2032 }
2033
2034 case GIMPLE_DEBUG:
2035 gcc_unreachable ();
2036
2037 default:
2038 return stmt;
2039 }
2040 }
2041
2042 /* Collect interesting labels in LABELS and return the statement preceding
2043 another case label, or a user-defined label. Store a location useful
2044 to give warnings at *PREVLOC (usually the location of the returned
2045 statement or of its surrounding scope). */
2046
2047 static gimple *
2048 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2049 auto_vec <struct label_entry> *labels,
2050 location_t *prevloc)
2051 {
2052 gimple *prev = NULL;
2053
2054 *prevloc = UNKNOWN_LOCATION;
2055 do
2056 {
2057 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2058 {
2059 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2060 which starts on a GIMPLE_SWITCH and ends with a break label.
2061 Handle that as a single statement that can fall through. */
2062 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2063 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2064 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2065 if (last
2066 && gimple_code (first) == GIMPLE_SWITCH
2067 && gimple_code (last) == GIMPLE_LABEL)
2068 {
2069 tree label = gimple_label_label (as_a <glabel *> (last));
2070 if (SWITCH_BREAK_LABEL_P (label))
2071 {
2072 prev = bind;
2073 gsi_next (gsi_p);
2074 continue;
2075 }
2076 }
2077 }
2078 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2079 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2080 {
2081 /* Nested scope. Only look at the last statement of
2082 the innermost scope. */
2083 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2084 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2085 if (last)
2086 {
2087 prev = last;
2088 /* It might be a label without a location. Use the
2089 location of the scope then. */
2090 if (!gimple_has_location (prev))
2091 *prevloc = bind_loc;
2092 }
2093 gsi_next (gsi_p);
2094 continue;
2095 }
2096
2097 /* Ifs are tricky. */
2098 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2099 {
2100 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2101 tree false_lab = gimple_cond_false_label (cond_stmt);
2102 location_t if_loc = gimple_location (cond_stmt);
2103
2104 /* If we have e.g.
2105 if (i > 1) goto <D.2259>; else goto D;
2106 we can't do much with the else-branch. */
2107 if (!DECL_ARTIFICIAL (false_lab))
2108 break;
2109
2110 /* Go on until the false label, then one step back. */
2111 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2112 {
2113 gimple *stmt = gsi_stmt (*gsi_p);
2114 if (gimple_code (stmt) == GIMPLE_LABEL
2115 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2116 break;
2117 }
2118
2119 /* Not found? Oops. */
2120 if (gsi_end_p (*gsi_p))
2121 break;
2122
2123 struct label_entry l = { false_lab, if_loc };
2124 labels->safe_push (l);
2125
2126 /* Go to the last statement of the then branch. */
2127 gsi_prev (gsi_p);
2128
2129 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2130 <D.1759>:
2131 <stmt>;
2132 goto <D.1761>;
2133 <D.1760>:
2134 */
2135 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2136 && !gimple_has_location (gsi_stmt (*gsi_p)))
2137 {
2138 /* Look at the statement before, it might be
2139 attribute fallthrough, in which case don't warn. */
2140 gsi_prev (gsi_p);
2141 bool fallthru_before_dest
2142 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2143 gsi_next (gsi_p);
2144 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2145 if (!fallthru_before_dest)
2146 {
2147 struct label_entry l = { goto_dest, if_loc };
2148 labels->safe_push (l);
2149 }
2150 }
2151 /* And move back. */
2152 gsi_next (gsi_p);
2153 }
2154
2155 /* Remember the last statement. Skip labels that are of no interest
2156 to us. */
2157 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2158 {
2159 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2160 if (find_label_entry (labels, label))
2161 prev = gsi_stmt (*gsi_p);
2162 }
2163 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2164 ;
2165 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2166 ;
2167 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2168 prev = gsi_stmt (*gsi_p);
2169 gsi_next (gsi_p);
2170 }
2171 while (!gsi_end_p (*gsi_p)
2172 /* Stop if we find a case or a user-defined label. */
2173 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2174 || !gimple_has_location (gsi_stmt (*gsi_p))));
2175
2176 if (prev && gimple_has_location (prev))
2177 *prevloc = gimple_location (prev);
2178 return prev;
2179 }
2180
2181 /* Return true if the switch fallthough warning should occur. LABEL is
2182 the label statement that we're falling through to. */
2183
2184 static bool
2185 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2186 {
2187 gimple_stmt_iterator gsi = *gsi_p;
2188
2189 /* Don't warn if the label is marked with a "falls through" comment. */
2190 if (FALLTHROUGH_LABEL_P (label))
2191 return false;
2192
2193 /* Don't warn for non-case labels followed by a statement:
2194 case 0:
2195 foo ();
2196 label:
2197 bar ();
2198 as these are likely intentional. */
2199 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2200 {
2201 tree l;
2202 while (!gsi_end_p (gsi)
2203 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2204 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2205 && !case_label_p (&gimplify_ctxp->case_labels, l))
2206 gsi_next_nondebug (&gsi);
2207 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2208 return false;
2209 }
2210
2211 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2212 immediately breaks. */
2213 gsi = *gsi_p;
2214
2215 /* Skip all immediately following labels. */
2216 while (!gsi_end_p (gsi)
2217 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2218 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2219 gsi_next_nondebug (&gsi);
2220
2221 /* { ... something; default:; } */
2222 if (gsi_end_p (gsi)
2223 /* { ... something; default: break; } or
2224 { ... something; default: goto L; } */
2225 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2226 /* { ... something; default: return; } */
2227 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2228 return false;
2229
2230 return true;
2231 }
2232
2233 /* Callback for walk_gimple_seq. */
2234
2235 static tree
2236 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2237 struct walk_stmt_info *)
2238 {
2239 gimple *stmt = gsi_stmt (*gsi_p);
2240
2241 *handled_ops_p = true;
2242 switch (gimple_code (stmt))
2243 {
2244 case GIMPLE_TRY:
2245 case GIMPLE_BIND:
2246 case GIMPLE_CATCH:
2247 case GIMPLE_EH_FILTER:
2248 case GIMPLE_TRANSACTION:
2249 /* Walk the sub-statements. */
2250 *handled_ops_p = false;
2251 break;
2252
2253 /* Find a sequence of form:
2254
2255 GIMPLE_LABEL
2256 [...]
2257 <may fallthru stmt>
2258 GIMPLE_LABEL
2259
2260 and possibly warn. */
2261 case GIMPLE_LABEL:
2262 {
2263 /* Found a label. Skip all immediately following labels. */
2264 while (!gsi_end_p (*gsi_p)
2265 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2266 gsi_next_nondebug (gsi_p);
2267
2268 /* There might be no more statements. */
2269 if (gsi_end_p (*gsi_p))
2270 return integer_zero_node;
2271
2272 /* Vector of labels that fall through. */
2273 auto_vec <struct label_entry> labels;
2274 location_t prevloc;
2275 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2276
2277 /* There might be no more statements. */
2278 if (gsi_end_p (*gsi_p))
2279 return integer_zero_node;
2280
2281 gimple *next = gsi_stmt (*gsi_p);
2282 tree label;
2283 /* If what follows is a label, then we may have a fallthrough. */
2284 if (gimple_code (next) == GIMPLE_LABEL
2285 && gimple_has_location (next)
2286 && (label = gimple_label_label (as_a <glabel *> (next)))
2287 && prev != NULL)
2288 {
2289 struct label_entry *l;
2290 bool warned_p = false;
2291 auto_diagnostic_group d;
2292 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2293 /* Quiet. */;
2294 else if (gimple_code (prev) == GIMPLE_LABEL
2295 && (label = gimple_label_label (as_a <glabel *> (prev)))
2296 && (l = find_label_entry (&labels, label)))
2297 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2298 "this statement may fall through");
2299 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2300 /* Try to be clever and don't warn when the statement
2301 can't actually fall through. */
2302 && gimple_stmt_may_fallthru (prev)
2303 && prevloc != UNKNOWN_LOCATION)
2304 warned_p = warning_at (prevloc,
2305 OPT_Wimplicit_fallthrough_,
2306 "this statement may fall through");
2307 if (warned_p)
2308 inform (gimple_location (next), "here");
2309
2310 /* Mark this label as processed so as to prevent multiple
2311 warnings in nested switches. */
2312 FALLTHROUGH_LABEL_P (label) = true;
2313
2314 /* So that next warn_implicit_fallthrough_r will start looking for
2315 a new sequence starting with this label. */
2316 gsi_prev (gsi_p);
2317 }
2318 }
2319 break;
2320 default:
2321 break;
2322 }
2323 return NULL_TREE;
2324 }
2325
2326 /* Warn when a switch case falls through. */
2327
2328 static void
2329 maybe_warn_implicit_fallthrough (gimple_seq seq)
2330 {
2331 if (!warn_implicit_fallthrough)
2332 return;
2333
2334 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2335 if (!(lang_GNU_C ()
2336 || lang_GNU_CXX ()
2337 || lang_GNU_OBJC ()))
2338 return;
2339
2340 struct walk_stmt_info wi;
2341 memset (&wi, 0, sizeof (wi));
2342 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2343 }
2344
2345 /* Callback for walk_gimple_seq. */
2346
2347 static tree
2348 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2349 struct walk_stmt_info *wi)
2350 {
2351 gimple *stmt = gsi_stmt (*gsi_p);
2352
2353 *handled_ops_p = true;
2354 switch (gimple_code (stmt))
2355 {
2356 case GIMPLE_TRY:
2357 case GIMPLE_BIND:
2358 case GIMPLE_CATCH:
2359 case GIMPLE_EH_FILTER:
2360 case GIMPLE_TRANSACTION:
2361 /* Walk the sub-statements. */
2362 *handled_ops_p = false;
2363 break;
2364 case GIMPLE_CALL:
2365 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2366 {
2367 gsi_remove (gsi_p, true);
2368 if (gsi_end_p (*gsi_p))
2369 {
2370 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2371 return integer_zero_node;
2372 }
2373
2374 bool found = false;
2375 location_t loc = gimple_location (stmt);
2376
2377 gimple_stmt_iterator gsi2 = *gsi_p;
2378 stmt = gsi_stmt (gsi2);
2379 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2380 {
2381 /* Go on until the artificial label. */
2382 tree goto_dest = gimple_goto_dest (stmt);
2383 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2384 {
2385 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2386 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2387 == goto_dest)
2388 break;
2389 }
2390
2391 /* Not found? Stop. */
2392 if (gsi_end_p (gsi2))
2393 break;
2394
2395 /* Look one past it. */
2396 gsi_next (&gsi2);
2397 }
2398
2399 /* We're looking for a case label or default label here. */
2400 while (!gsi_end_p (gsi2))
2401 {
2402 stmt = gsi_stmt (gsi2);
2403 if (gimple_code (stmt) == GIMPLE_LABEL)
2404 {
2405 tree label = gimple_label_label (as_a <glabel *> (stmt));
2406 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2407 {
2408 found = true;
2409 break;
2410 }
2411 }
2412 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2413 ;
2414 else if (!is_gimple_debug (stmt))
2415 /* Anything else is not expected. */
2416 break;
2417 gsi_next (&gsi2);
2418 }
2419 if (!found)
2420 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2421 "a case label or default label");
2422 }
2423 break;
2424 default:
2425 break;
2426 }
2427 return NULL_TREE;
2428 }
2429
2430 /* Expand all FALLTHROUGH () calls in SEQ. */
2431
2432 static void
2433 expand_FALLTHROUGH (gimple_seq *seq_p)
2434 {
2435 struct walk_stmt_info wi;
2436 location_t loc;
2437 memset (&wi, 0, sizeof (wi));
2438 wi.info = (void *) &loc;
2439 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2440 if (wi.callback_result == integer_zero_node)
2441 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2442 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2443 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2444 "a case label or default label");
2445 }
2446
2447 \f
2448 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2449 branch to. */
2450
2451 static enum gimplify_status
2452 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2453 {
2454 tree switch_expr = *expr_p;
2455 gimple_seq switch_body_seq = NULL;
2456 enum gimplify_status ret;
2457 tree index_type = TREE_TYPE (switch_expr);
2458 if (index_type == NULL_TREE)
2459 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2460
2461 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2462 fb_rvalue);
2463 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2464 return ret;
2465
2466 if (SWITCH_BODY (switch_expr))
2467 {
2468 vec<tree> labels;
2469 vec<tree> saved_labels;
2470 hash_set<tree> *saved_live_switch_vars = NULL;
2471 tree default_case = NULL_TREE;
2472 gswitch *switch_stmt;
2473
2474 /* Save old labels, get new ones from body, then restore the old
2475 labels. Save all the things from the switch body to append after. */
2476 saved_labels = gimplify_ctxp->case_labels;
2477 gimplify_ctxp->case_labels.create (8);
2478
2479 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2480 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2481 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2482 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2483 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2484 else
2485 gimplify_ctxp->live_switch_vars = NULL;
2486
2487 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2488 gimplify_ctxp->in_switch_expr = true;
2489
2490 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2491
2492 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2493 maybe_warn_switch_unreachable (switch_body_seq);
2494 maybe_warn_implicit_fallthrough (switch_body_seq);
2495 /* Only do this for the outermost GIMPLE_SWITCH. */
2496 if (!gimplify_ctxp->in_switch_expr)
2497 expand_FALLTHROUGH (&switch_body_seq);
2498
2499 labels = gimplify_ctxp->case_labels;
2500 gimplify_ctxp->case_labels = saved_labels;
2501
2502 if (gimplify_ctxp->live_switch_vars)
2503 {
2504 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2505 delete gimplify_ctxp->live_switch_vars;
2506 }
2507 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2508
2509 preprocess_case_label_vec_for_gimple (labels, index_type,
2510 &default_case);
2511
2512 bool add_bind = false;
2513 if (!default_case)
2514 {
2515 glabel *new_default;
2516
2517 default_case
2518 = build_case_label (NULL_TREE, NULL_TREE,
2519 create_artificial_label (UNKNOWN_LOCATION));
2520 if (old_in_switch_expr)
2521 {
2522 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2523 add_bind = true;
2524 }
2525 new_default = gimple_build_label (CASE_LABEL (default_case));
2526 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2527 }
2528 else if (old_in_switch_expr)
2529 {
2530 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2531 if (last && gimple_code (last) == GIMPLE_LABEL)
2532 {
2533 tree label = gimple_label_label (as_a <glabel *> (last));
2534 if (SWITCH_BREAK_LABEL_P (label))
2535 add_bind = true;
2536 }
2537 }
2538
2539 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2540 default_case, labels);
2541 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2542 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2543 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2544 so that we can easily find the start and end of the switch
2545 statement. */
2546 if (add_bind)
2547 {
2548 gimple_seq bind_body = NULL;
2549 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2550 gimple_seq_add_seq (&bind_body, switch_body_seq);
2551 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2552 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2553 gimplify_seq_add_stmt (pre_p, bind);
2554 }
2555 else
2556 {
2557 gimplify_seq_add_stmt (pre_p, switch_stmt);
2558 gimplify_seq_add_seq (pre_p, switch_body_seq);
2559 }
2560 labels.release ();
2561 }
2562 else
2563 gcc_unreachable ();
2564
2565 return GS_ALL_DONE;
2566 }
2567
2568 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2569
2570 static enum gimplify_status
2571 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2572 {
2573 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2574 == current_function_decl);
2575
2576 tree label = LABEL_EXPR_LABEL (*expr_p);
2577 glabel *label_stmt = gimple_build_label (label);
2578 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2579 gimplify_seq_add_stmt (pre_p, label_stmt);
2580
2581 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2582 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2583 NOT_TAKEN));
2584 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2585 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2586 TAKEN));
2587
2588 return GS_ALL_DONE;
2589 }
2590
2591 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2592
2593 static enum gimplify_status
2594 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2595 {
2596 struct gimplify_ctx *ctxp;
2597 glabel *label_stmt;
2598
2599 /* Invalid programs can play Duff's Device type games with, for example,
2600 #pragma omp parallel. At least in the C front end, we don't
2601 detect such invalid branches until after gimplification, in the
2602 diagnose_omp_blocks pass. */
2603 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2604 if (ctxp->case_labels.exists ())
2605 break;
2606
2607 tree label = CASE_LABEL (*expr_p);
2608 label_stmt = gimple_build_label (label);
2609 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2610 ctxp->case_labels.safe_push (*expr_p);
2611 gimplify_seq_add_stmt (pre_p, label_stmt);
2612
2613 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2614 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2615 NOT_TAKEN));
2616 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2617 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2618 TAKEN));
2619
2620 return GS_ALL_DONE;
2621 }
2622
2623 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2624 if necessary. */
2625
2626 tree
2627 build_and_jump (tree *label_p)
2628 {
2629 if (label_p == NULL)
2630 /* If there's nowhere to jump, just fall through. */
2631 return NULL_TREE;
2632
2633 if (*label_p == NULL_TREE)
2634 {
2635 tree label = create_artificial_label (UNKNOWN_LOCATION);
2636 *label_p = label;
2637 }
2638
2639 return build1 (GOTO_EXPR, void_type_node, *label_p);
2640 }
2641
2642 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2643 This also involves building a label to jump to and communicating it to
2644 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2645
2646 static enum gimplify_status
2647 gimplify_exit_expr (tree *expr_p)
2648 {
2649 tree cond = TREE_OPERAND (*expr_p, 0);
2650 tree expr;
2651
2652 expr = build_and_jump (&gimplify_ctxp->exit_label);
2653 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2654 *expr_p = expr;
2655
2656 return GS_OK;
2657 }
2658
2659 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2660 different from its canonical type, wrap the whole thing inside a
2661 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2662 type.
2663
2664 The canonical type of a COMPONENT_REF is the type of the field being
2665 referenced--unless the field is a bit-field which can be read directly
2666 in a smaller mode, in which case the canonical type is the
2667 sign-appropriate type corresponding to that mode. */
2668
2669 static void
2670 canonicalize_component_ref (tree *expr_p)
2671 {
2672 tree expr = *expr_p;
2673 tree type;
2674
2675 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2676
2677 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2678 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2679 else
2680 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2681
2682 /* One could argue that all the stuff below is not necessary for
2683 the non-bitfield case and declare it a FE error if type
2684 adjustment would be needed. */
2685 if (TREE_TYPE (expr) != type)
2686 {
2687 #ifdef ENABLE_TYPES_CHECKING
2688 tree old_type = TREE_TYPE (expr);
2689 #endif
2690 int type_quals;
2691
2692 /* We need to preserve qualifiers and propagate them from
2693 operand 0. */
2694 type_quals = TYPE_QUALS (type)
2695 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2696 if (TYPE_QUALS (type) != type_quals)
2697 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2698
2699 /* Set the type of the COMPONENT_REF to the underlying type. */
2700 TREE_TYPE (expr) = type;
2701
2702 #ifdef ENABLE_TYPES_CHECKING
2703 /* It is now a FE error, if the conversion from the canonical
2704 type to the original expression type is not useless. */
2705 gcc_assert (useless_type_conversion_p (old_type, type));
2706 #endif
2707 }
2708 }
2709
2710 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2711 to foo, embed that change in the ADDR_EXPR by converting
2712 T array[U];
2713 (T *)&array
2714 ==>
2715 &array[L]
2716 where L is the lower bound. For simplicity, only do this for constant
2717 lower bound.
2718 The constraint is that the type of &array[L] is trivially convertible
2719 to T *. */
2720
2721 static void
2722 canonicalize_addr_expr (tree *expr_p)
2723 {
2724 tree expr = *expr_p;
2725 tree addr_expr = TREE_OPERAND (expr, 0);
2726 tree datype, ddatype, pddatype;
2727
2728 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2729 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2730 || TREE_CODE (addr_expr) != ADDR_EXPR)
2731 return;
2732
2733 /* The addr_expr type should be a pointer to an array. */
2734 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2735 if (TREE_CODE (datype) != ARRAY_TYPE)
2736 return;
2737
2738 /* The pointer to element type shall be trivially convertible to
2739 the expression pointer type. */
2740 ddatype = TREE_TYPE (datype);
2741 pddatype = build_pointer_type (ddatype);
2742 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2743 pddatype))
2744 return;
2745
2746 /* The lower bound and element sizes must be constant. */
2747 if (!TYPE_SIZE_UNIT (ddatype)
2748 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2749 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2750 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2751 return;
2752
2753 /* All checks succeeded. Build a new node to merge the cast. */
2754 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2755 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2756 NULL_TREE, NULL_TREE);
2757 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2758
2759 /* We can have stripped a required restrict qualifier above. */
2760 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2761 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2762 }
2763
2764 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2765 underneath as appropriate. */
2766
2767 static enum gimplify_status
2768 gimplify_conversion (tree *expr_p)
2769 {
2770 location_t loc = EXPR_LOCATION (*expr_p);
2771 gcc_assert (CONVERT_EXPR_P (*expr_p));
2772
2773 /* Then strip away all but the outermost conversion. */
2774 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2775
2776 /* And remove the outermost conversion if it's useless. */
2777 if (tree_ssa_useless_type_conversion (*expr_p))
2778 *expr_p = TREE_OPERAND (*expr_p, 0);
2779
2780 /* If we still have a conversion at the toplevel,
2781 then canonicalize some constructs. */
2782 if (CONVERT_EXPR_P (*expr_p))
2783 {
2784 tree sub = TREE_OPERAND (*expr_p, 0);
2785
2786 /* If a NOP conversion is changing the type of a COMPONENT_REF
2787 expression, then canonicalize its type now in order to expose more
2788 redundant conversions. */
2789 if (TREE_CODE (sub) == COMPONENT_REF)
2790 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2791
2792 /* If a NOP conversion is changing a pointer to array of foo
2793 to a pointer to foo, embed that change in the ADDR_EXPR. */
2794 else if (TREE_CODE (sub) == ADDR_EXPR)
2795 canonicalize_addr_expr (expr_p);
2796 }
2797
2798 /* If we have a conversion to a non-register type force the
2799 use of a VIEW_CONVERT_EXPR instead. */
2800 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2801 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2802 TREE_OPERAND (*expr_p, 0));
2803
2804 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2805 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2806 TREE_SET_CODE (*expr_p, NOP_EXPR);
2807
2808 return GS_OK;
2809 }
2810
2811 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2812 DECL_VALUE_EXPR, and it's worth re-examining things. */
2813
2814 static enum gimplify_status
2815 gimplify_var_or_parm_decl (tree *expr_p)
2816 {
2817 tree decl = *expr_p;
2818
2819 /* ??? If this is a local variable, and it has not been seen in any
2820 outer BIND_EXPR, then it's probably the result of a duplicate
2821 declaration, for which we've already issued an error. It would
2822 be really nice if the front end wouldn't leak these at all.
2823 Currently the only known culprit is C++ destructors, as seen
2824 in g++.old-deja/g++.jason/binding.C. */
2825 if (VAR_P (decl)
2826 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2827 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2828 && decl_function_context (decl) == current_function_decl)
2829 {
2830 gcc_assert (seen_error ());
2831 return GS_ERROR;
2832 }
2833
2834 /* When within an OMP context, notice uses of variables. */
2835 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2836 return GS_ALL_DONE;
2837
2838 /* If the decl is an alias for another expression, substitute it now. */
2839 if (DECL_HAS_VALUE_EXPR_P (decl))
2840 {
2841 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2842 return GS_OK;
2843 }
2844
2845 return GS_ALL_DONE;
2846 }
2847
2848 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2849
2850 static void
2851 recalculate_side_effects (tree t)
2852 {
2853 enum tree_code code = TREE_CODE (t);
2854 int len = TREE_OPERAND_LENGTH (t);
2855 int i;
2856
2857 switch (TREE_CODE_CLASS (code))
2858 {
2859 case tcc_expression:
2860 switch (code)
2861 {
2862 case INIT_EXPR:
2863 case MODIFY_EXPR:
2864 case VA_ARG_EXPR:
2865 case PREDECREMENT_EXPR:
2866 case PREINCREMENT_EXPR:
2867 case POSTDECREMENT_EXPR:
2868 case POSTINCREMENT_EXPR:
2869 /* All of these have side-effects, no matter what their
2870 operands are. */
2871 return;
2872
2873 default:
2874 break;
2875 }
2876 /* Fall through. */
2877
2878 case tcc_comparison: /* a comparison expression */
2879 case tcc_unary: /* a unary arithmetic expression */
2880 case tcc_binary: /* a binary arithmetic expression */
2881 case tcc_reference: /* a reference */
2882 case tcc_vl_exp: /* a function call */
2883 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2884 for (i = 0; i < len; ++i)
2885 {
2886 tree op = TREE_OPERAND (t, i);
2887 if (op && TREE_SIDE_EFFECTS (op))
2888 TREE_SIDE_EFFECTS (t) = 1;
2889 }
2890 break;
2891
2892 case tcc_constant:
2893 /* No side-effects. */
2894 return;
2895
2896 default:
2897 gcc_unreachable ();
2898 }
2899 }
2900
2901 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2902 node *EXPR_P.
2903
2904 compound_lval
2905 : min_lval '[' val ']'
2906 | min_lval '.' ID
2907 | compound_lval '[' val ']'
2908 | compound_lval '.' ID
2909
2910 This is not part of the original SIMPLE definition, which separates
2911 array and member references, but it seems reasonable to handle them
2912 together. Also, this way we don't run into problems with union
2913 aliasing; gcc requires that for accesses through a union to alias, the
2914 union reference must be explicit, which was not always the case when we
2915 were splitting up array and member refs.
2916
2917 PRE_P points to the sequence where side effects that must happen before
2918 *EXPR_P should be stored.
2919
2920 POST_P points to the sequence where side effects that must happen after
2921 *EXPR_P should be stored. */
2922
2923 static enum gimplify_status
2924 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2925 fallback_t fallback)
2926 {
2927 tree *p;
2928 enum gimplify_status ret = GS_ALL_DONE, tret;
2929 int i;
2930 location_t loc = EXPR_LOCATION (*expr_p);
2931 tree expr = *expr_p;
2932
2933 /* Create a stack of the subexpressions so later we can walk them in
2934 order from inner to outer. */
2935 auto_vec<tree, 10> expr_stack;
2936
2937 /* We can handle anything that get_inner_reference can deal with. */
2938 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2939 {
2940 restart:
2941 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2942 if (TREE_CODE (*p) == INDIRECT_REF)
2943 *p = fold_indirect_ref_loc (loc, *p);
2944
2945 if (handled_component_p (*p))
2946 ;
2947 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2948 additional COMPONENT_REFs. */
2949 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2950 && gimplify_var_or_parm_decl (p) == GS_OK)
2951 goto restart;
2952 else
2953 break;
2954
2955 expr_stack.safe_push (*p);
2956 }
2957
2958 gcc_assert (expr_stack.length ());
2959
2960 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2961 walked through and P points to the innermost expression.
2962
2963 Java requires that we elaborated nodes in source order. That
2964 means we must gimplify the inner expression followed by each of
2965 the indices, in order. But we can't gimplify the inner
2966 expression until we deal with any variable bounds, sizes, or
2967 positions in order to deal with PLACEHOLDER_EXPRs.
2968
2969 So we do this in three steps. First we deal with the annotations
2970 for any variables in the components, then we gimplify the base,
2971 then we gimplify any indices, from left to right. */
2972 for (i = expr_stack.length () - 1; i >= 0; i--)
2973 {
2974 tree t = expr_stack[i];
2975
2976 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2977 {
2978 /* Gimplify the low bound and element type size and put them into
2979 the ARRAY_REF. If these values are set, they have already been
2980 gimplified. */
2981 if (TREE_OPERAND (t, 2) == NULL_TREE)
2982 {
2983 tree low = unshare_expr (array_ref_low_bound (t));
2984 if (!is_gimple_min_invariant (low))
2985 {
2986 TREE_OPERAND (t, 2) = low;
2987 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2988 post_p, is_gimple_reg,
2989 fb_rvalue);
2990 ret = MIN (ret, tret);
2991 }
2992 }
2993 else
2994 {
2995 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2996 is_gimple_reg, fb_rvalue);
2997 ret = MIN (ret, tret);
2998 }
2999
3000 if (TREE_OPERAND (t, 3) == NULL_TREE)
3001 {
3002 tree elmt_size = array_ref_element_size (t);
3003 if (!is_gimple_min_invariant (elmt_size))
3004 {
3005 elmt_size = unshare_expr (elmt_size);
3006 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3007 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3008
3009 /* Divide the element size by the alignment of the element
3010 type (above). */
3011 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3012 elmt_size, factor);
3013
3014 TREE_OPERAND (t, 3) = elmt_size;
3015 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3016 post_p, is_gimple_reg,
3017 fb_rvalue);
3018 ret = MIN (ret, tret);
3019 }
3020 }
3021 else
3022 {
3023 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3024 is_gimple_reg, fb_rvalue);
3025 ret = MIN (ret, tret);
3026 }
3027 }
3028 else if (TREE_CODE (t) == COMPONENT_REF)
3029 {
3030 /* Set the field offset into T and gimplify it. */
3031 if (TREE_OPERAND (t, 2) == NULL_TREE)
3032 {
3033 tree offset = component_ref_field_offset (t);
3034 if (!is_gimple_min_invariant (offset))
3035 {
3036 offset = unshare_expr (offset);
3037 tree field = TREE_OPERAND (t, 1);
3038 tree factor
3039 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3040
3041 /* Divide the offset by its alignment. */
3042 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3043 offset, factor);
3044
3045 TREE_OPERAND (t, 2) = offset;
3046 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3047 post_p, is_gimple_reg,
3048 fb_rvalue);
3049 ret = MIN (ret, tret);
3050 }
3051 }
3052 else
3053 {
3054 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3055 is_gimple_reg, fb_rvalue);
3056 ret = MIN (ret, tret);
3057 }
3058 }
3059 }
3060
3061 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3062 so as to match the min_lval predicate. Failure to do so may result
3063 in the creation of large aggregate temporaries. */
3064 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3065 fallback | fb_lvalue);
3066 ret = MIN (ret, tret);
3067
3068 /* And finally, the indices and operands of ARRAY_REF. During this
3069 loop we also remove any useless conversions. */
3070 for (; expr_stack.length () > 0; )
3071 {
3072 tree t = expr_stack.pop ();
3073
3074 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3075 {
3076 /* Gimplify the dimension. */
3077 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3078 {
3079 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3080 is_gimple_val, fb_rvalue);
3081 ret = MIN (ret, tret);
3082 }
3083 }
3084
3085 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3086
3087 /* The innermost expression P may have originally had
3088 TREE_SIDE_EFFECTS set which would have caused all the outer
3089 expressions in *EXPR_P leading to P to also have had
3090 TREE_SIDE_EFFECTS set. */
3091 recalculate_side_effects (t);
3092 }
3093
3094 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3095 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3096 {
3097 canonicalize_component_ref (expr_p);
3098 }
3099
3100 expr_stack.release ();
3101
3102 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3103
3104 return ret;
3105 }
3106
3107 /* Gimplify the self modifying expression pointed to by EXPR_P
3108 (++, --, +=, -=).
3109
3110 PRE_P points to the list where side effects that must happen before
3111 *EXPR_P should be stored.
3112
3113 POST_P points to the list where side effects that must happen after
3114 *EXPR_P should be stored.
3115
3116 WANT_VALUE is nonzero iff we want to use the value of this expression
3117 in another expression.
3118
3119 ARITH_TYPE is the type the computation should be performed in. */
3120
3121 enum gimplify_status
3122 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3123 bool want_value, tree arith_type)
3124 {
3125 enum tree_code code;
3126 tree lhs, lvalue, rhs, t1;
3127 gimple_seq post = NULL, *orig_post_p = post_p;
3128 bool postfix;
3129 enum tree_code arith_code;
3130 enum gimplify_status ret;
3131 location_t loc = EXPR_LOCATION (*expr_p);
3132
3133 code = TREE_CODE (*expr_p);
3134
3135 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3136 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3137
3138 /* Prefix or postfix? */
3139 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3140 /* Faster to treat as prefix if result is not used. */
3141 postfix = want_value;
3142 else
3143 postfix = false;
3144
3145 /* For postfix, make sure the inner expression's post side effects
3146 are executed after side effects from this expression. */
3147 if (postfix)
3148 post_p = &post;
3149
3150 /* Add or subtract? */
3151 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3152 arith_code = PLUS_EXPR;
3153 else
3154 arith_code = MINUS_EXPR;
3155
3156 /* Gimplify the LHS into a GIMPLE lvalue. */
3157 lvalue = TREE_OPERAND (*expr_p, 0);
3158 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3159 if (ret == GS_ERROR)
3160 return ret;
3161
3162 /* Extract the operands to the arithmetic operation. */
3163 lhs = lvalue;
3164 rhs = TREE_OPERAND (*expr_p, 1);
3165
3166 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3167 that as the result value and in the postqueue operation. */
3168 if (postfix)
3169 {
3170 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3171 if (ret == GS_ERROR)
3172 return ret;
3173
3174 lhs = get_initialized_tmp_var (lhs, pre_p);
3175 }
3176
3177 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3178 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3179 {
3180 rhs = convert_to_ptrofftype_loc (loc, rhs);
3181 if (arith_code == MINUS_EXPR)
3182 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3183 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3184 }
3185 else
3186 t1 = fold_convert (TREE_TYPE (*expr_p),
3187 fold_build2 (arith_code, arith_type,
3188 fold_convert (arith_type, lhs),
3189 fold_convert (arith_type, rhs)));
3190
3191 if (postfix)
3192 {
3193 gimplify_assign (lvalue, t1, pre_p);
3194 gimplify_seq_add_seq (orig_post_p, post);
3195 *expr_p = lhs;
3196 return GS_ALL_DONE;
3197 }
3198 else
3199 {
3200 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3201 return GS_OK;
3202 }
3203 }
3204
3205 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3206
3207 static void
3208 maybe_with_size_expr (tree *expr_p)
3209 {
3210 tree expr = *expr_p;
3211 tree type = TREE_TYPE (expr);
3212 tree size;
3213
3214 /* If we've already wrapped this or the type is error_mark_node, we can't do
3215 anything. */
3216 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3217 || type == error_mark_node)
3218 return;
3219
3220 /* If the size isn't known or is a constant, we have nothing to do. */
3221 size = TYPE_SIZE_UNIT (type);
3222 if (!size || poly_int_tree_p (size))
3223 return;
3224
3225 /* Otherwise, make a WITH_SIZE_EXPR. */
3226 size = unshare_expr (size);
3227 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3228 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3229 }
3230
3231 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3232 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3233 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3234 gimplified to an SSA name. */
3235
3236 enum gimplify_status
3237 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3238 bool allow_ssa)
3239 {
3240 bool (*test) (tree);
3241 fallback_t fb;
3242
3243 /* In general, we allow lvalues for function arguments to avoid
3244 extra overhead of copying large aggregates out of even larger
3245 aggregates into temporaries only to copy the temporaries to
3246 the argument list. Make optimizers happy by pulling out to
3247 temporaries those types that fit in registers. */
3248 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3249 test = is_gimple_val, fb = fb_rvalue;
3250 else
3251 {
3252 test = is_gimple_lvalue, fb = fb_either;
3253 /* Also strip a TARGET_EXPR that would force an extra copy. */
3254 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3255 {
3256 tree init = TARGET_EXPR_INITIAL (*arg_p);
3257 if (init
3258 && !VOID_TYPE_P (TREE_TYPE (init)))
3259 *arg_p = init;
3260 }
3261 }
3262
3263 /* If this is a variable sized type, we must remember the size. */
3264 maybe_with_size_expr (arg_p);
3265
3266 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3267 /* Make sure arguments have the same location as the function call
3268 itself. */
3269 protected_set_expr_location (*arg_p, call_location);
3270
3271 /* There is a sequence point before a function call. Side effects in
3272 the argument list must occur before the actual call. So, when
3273 gimplifying arguments, force gimplify_expr to use an internal
3274 post queue which is then appended to the end of PRE_P. */
3275 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3276 }
3277
3278 /* Don't fold inside offloading or taskreg regions: it can break code by
3279 adding decl references that weren't in the source. We'll do it during
3280 omplower pass instead. */
3281
3282 static bool
3283 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3284 {
3285 struct gimplify_omp_ctx *ctx;
3286 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3287 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3288 return false;
3289 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3290 return false;
3291 /* Delay folding of builtins until the IL is in consistent state
3292 so the diagnostic machinery can do a better job. */
3293 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3294 return false;
3295 return fold_stmt (gsi);
3296 }
3297
3298 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3299 WANT_VALUE is true if the result of the call is desired. */
3300
3301 static enum gimplify_status
3302 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3303 {
3304 tree fndecl, parms, p, fnptrtype;
3305 enum gimplify_status ret;
3306 int i, nargs;
3307 gcall *call;
3308 bool builtin_va_start_p = false;
3309 location_t loc = EXPR_LOCATION (*expr_p);
3310
3311 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3312
3313 /* For reliable diagnostics during inlining, it is necessary that
3314 every call_expr be annotated with file and line. */
3315 if (! EXPR_HAS_LOCATION (*expr_p))
3316 SET_EXPR_LOCATION (*expr_p, input_location);
3317
3318 /* Gimplify internal functions created in the FEs. */
3319 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3320 {
3321 if (want_value)
3322 return GS_ALL_DONE;
3323
3324 nargs = call_expr_nargs (*expr_p);
3325 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3326 auto_vec<tree> vargs (nargs);
3327
3328 for (i = 0; i < nargs; i++)
3329 {
3330 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3331 EXPR_LOCATION (*expr_p));
3332 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3333 }
3334
3335 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3336 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3337 gimplify_seq_add_stmt (pre_p, call);
3338 return GS_ALL_DONE;
3339 }
3340
3341 /* This may be a call to a builtin function.
3342
3343 Builtin function calls may be transformed into different
3344 (and more efficient) builtin function calls under certain
3345 circumstances. Unfortunately, gimplification can muck things
3346 up enough that the builtin expanders are not aware that certain
3347 transformations are still valid.
3348
3349 So we attempt transformation/gimplification of the call before
3350 we gimplify the CALL_EXPR. At this time we do not manage to
3351 transform all calls in the same manner as the expanders do, but
3352 we do transform most of them. */
3353 fndecl = get_callee_fndecl (*expr_p);
3354 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3355 switch (DECL_FUNCTION_CODE (fndecl))
3356 {
3357 CASE_BUILT_IN_ALLOCA:
3358 /* If the call has been built for a variable-sized object, then we
3359 want to restore the stack level when the enclosing BIND_EXPR is
3360 exited to reclaim the allocated space; otherwise, we precisely
3361 need to do the opposite and preserve the latest stack level. */
3362 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3363 gimplify_ctxp->save_stack = true;
3364 else
3365 gimplify_ctxp->keep_stack = true;
3366 break;
3367
3368 case BUILT_IN_VA_START:
3369 {
3370 builtin_va_start_p = TRUE;
3371 if (call_expr_nargs (*expr_p) < 2)
3372 {
3373 error ("too few arguments to function %<va_start%>");
3374 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3375 return GS_OK;
3376 }
3377
3378 if (fold_builtin_next_arg (*expr_p, true))
3379 {
3380 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3381 return GS_OK;
3382 }
3383 break;
3384 }
3385
3386 case BUILT_IN_EH_RETURN:
3387 cfun->calls_eh_return = true;
3388 break;
3389
3390 default:
3391 ;
3392 }
3393 if (fndecl && fndecl_built_in_p (fndecl))
3394 {
3395 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3396 if (new_tree && new_tree != *expr_p)
3397 {
3398 /* There was a transformation of this call which computes the
3399 same value, but in a more efficient way. Return and try
3400 again. */
3401 *expr_p = new_tree;
3402 return GS_OK;
3403 }
3404 }
3405
3406 /* Remember the original function pointer type. */
3407 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3408
3409 if (flag_openmp
3410 && fndecl
3411 && cfun
3412 && (cfun->curr_properties & PROP_gimple_any) == 0)
3413 {
3414 tree variant = omp_resolve_declare_variant (fndecl);
3415 if (variant != fndecl)
3416 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3417 }
3418
3419 /* There is a sequence point before the call, so any side effects in
3420 the calling expression must occur before the actual call. Force
3421 gimplify_expr to use an internal post queue. */
3422 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3423 is_gimple_call_addr, fb_rvalue);
3424
3425 nargs = call_expr_nargs (*expr_p);
3426
3427 /* Get argument types for verification. */
3428 fndecl = get_callee_fndecl (*expr_p);
3429 parms = NULL_TREE;
3430 if (fndecl)
3431 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3432 else
3433 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3434
3435 if (fndecl && DECL_ARGUMENTS (fndecl))
3436 p = DECL_ARGUMENTS (fndecl);
3437 else if (parms)
3438 p = parms;
3439 else
3440 p = NULL_TREE;
3441 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3442 ;
3443
3444 /* If the last argument is __builtin_va_arg_pack () and it is not
3445 passed as a named argument, decrease the number of CALL_EXPR
3446 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3447 if (!p
3448 && i < nargs
3449 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3450 {
3451 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3452 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3453
3454 if (last_arg_fndecl
3455 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3456 {
3457 tree call = *expr_p;
3458
3459 --nargs;
3460 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3461 CALL_EXPR_FN (call),
3462 nargs, CALL_EXPR_ARGP (call));
3463
3464 /* Copy all CALL_EXPR flags, location and block, except
3465 CALL_EXPR_VA_ARG_PACK flag. */
3466 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3467 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3468 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3469 = CALL_EXPR_RETURN_SLOT_OPT (call);
3470 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3471 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3472
3473 /* Set CALL_EXPR_VA_ARG_PACK. */
3474 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3475 }
3476 }
3477
3478 /* If the call returns twice then after building the CFG the call
3479 argument computations will no longer dominate the call because
3480 we add an abnormal incoming edge to the call. So do not use SSA
3481 vars there. */
3482 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3483
3484 /* Gimplify the function arguments. */
3485 if (nargs > 0)
3486 {
3487 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3488 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3489 PUSH_ARGS_REVERSED ? i-- : i++)
3490 {
3491 enum gimplify_status t;
3492
3493 /* Avoid gimplifying the second argument to va_start, which needs to
3494 be the plain PARM_DECL. */
3495 if ((i != 1) || !builtin_va_start_p)
3496 {
3497 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3498 EXPR_LOCATION (*expr_p), ! returns_twice);
3499
3500 if (t == GS_ERROR)
3501 ret = GS_ERROR;
3502 }
3503 }
3504 }
3505
3506 /* Gimplify the static chain. */
3507 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3508 {
3509 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3510 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3511 else
3512 {
3513 enum gimplify_status t;
3514 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3515 EXPR_LOCATION (*expr_p), ! returns_twice);
3516 if (t == GS_ERROR)
3517 ret = GS_ERROR;
3518 }
3519 }
3520
3521 /* Verify the function result. */
3522 if (want_value && fndecl
3523 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3524 {
3525 error_at (loc, "using result of function returning %<void%>");
3526 ret = GS_ERROR;
3527 }
3528
3529 /* Try this again in case gimplification exposed something. */
3530 if (ret != GS_ERROR)
3531 {
3532 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3533
3534 if (new_tree && new_tree != *expr_p)
3535 {
3536 /* There was a transformation of this call which computes the
3537 same value, but in a more efficient way. Return and try
3538 again. */
3539 *expr_p = new_tree;
3540 return GS_OK;
3541 }
3542 }
3543 else
3544 {
3545 *expr_p = error_mark_node;
3546 return GS_ERROR;
3547 }
3548
3549 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3550 decl. This allows us to eliminate redundant or useless
3551 calls to "const" functions. */
3552 if (TREE_CODE (*expr_p) == CALL_EXPR)
3553 {
3554 int flags = call_expr_flags (*expr_p);
3555 if (flags & (ECF_CONST | ECF_PURE)
3556 /* An infinite loop is considered a side effect. */
3557 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3558 TREE_SIDE_EFFECTS (*expr_p) = 0;
3559 }
3560
3561 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3562 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3563 form and delegate the creation of a GIMPLE_CALL to
3564 gimplify_modify_expr. This is always possible because when
3565 WANT_VALUE is true, the caller wants the result of this call into
3566 a temporary, which means that we will emit an INIT_EXPR in
3567 internal_get_tmp_var which will then be handled by
3568 gimplify_modify_expr. */
3569 if (!want_value)
3570 {
3571 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3572 have to do is replicate it as a GIMPLE_CALL tuple. */
3573 gimple_stmt_iterator gsi;
3574 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3575 notice_special_calls (call);
3576 gimplify_seq_add_stmt (pre_p, call);
3577 gsi = gsi_last (*pre_p);
3578 maybe_fold_stmt (&gsi);
3579 *expr_p = NULL_TREE;
3580 }
3581 else
3582 /* Remember the original function type. */
3583 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3584 CALL_EXPR_FN (*expr_p));
3585
3586 return ret;
3587 }
3588
3589 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3590 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3591
3592 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3593 condition is true or false, respectively. If null, we should generate
3594 our own to skip over the evaluation of this specific expression.
3595
3596 LOCUS is the source location of the COND_EXPR.
3597
3598 This function is the tree equivalent of do_jump.
3599
3600 shortcut_cond_r should only be called by shortcut_cond_expr. */
3601
3602 static tree
3603 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3604 location_t locus)
3605 {
3606 tree local_label = NULL_TREE;
3607 tree t, expr = NULL;
3608
3609 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3610 retain the shortcut semantics. Just insert the gotos here;
3611 shortcut_cond_expr will append the real blocks later. */
3612 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3613 {
3614 location_t new_locus;
3615
3616 /* Turn if (a && b) into
3617
3618 if (a); else goto no;
3619 if (b) goto yes; else goto no;
3620 (no:) */
3621
3622 if (false_label_p == NULL)
3623 false_label_p = &local_label;
3624
3625 /* Keep the original source location on the first 'if'. */
3626 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3627 append_to_statement_list (t, &expr);
3628
3629 /* Set the source location of the && on the second 'if'. */
3630 new_locus = rexpr_location (pred, locus);
3631 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3632 new_locus);
3633 append_to_statement_list (t, &expr);
3634 }
3635 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3636 {
3637 location_t new_locus;
3638
3639 /* Turn if (a || b) into
3640
3641 if (a) goto yes;
3642 if (b) goto yes; else goto no;
3643 (yes:) */
3644
3645 if (true_label_p == NULL)
3646 true_label_p = &local_label;
3647
3648 /* Keep the original source location on the first 'if'. */
3649 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3650 append_to_statement_list (t, &expr);
3651
3652 /* Set the source location of the || on the second 'if'. */
3653 new_locus = rexpr_location (pred, locus);
3654 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3655 new_locus);
3656 append_to_statement_list (t, &expr);
3657 }
3658 else if (TREE_CODE (pred) == COND_EXPR
3659 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3660 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3661 {
3662 location_t new_locus;
3663
3664 /* As long as we're messing with gotos, turn if (a ? b : c) into
3665 if (a)
3666 if (b) goto yes; else goto no;
3667 else
3668 if (c) goto yes; else goto no;
3669
3670 Don't do this if one of the arms has void type, which can happen
3671 in C++ when the arm is throw. */
3672
3673 /* Keep the original source location on the first 'if'. Set the source
3674 location of the ? on the second 'if'. */
3675 new_locus = rexpr_location (pred, locus);
3676 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3677 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3678 false_label_p, locus),
3679 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3680 false_label_p, new_locus));
3681 }
3682 else
3683 {
3684 expr = build3 (COND_EXPR, void_type_node, pred,
3685 build_and_jump (true_label_p),
3686 build_and_jump (false_label_p));
3687 SET_EXPR_LOCATION (expr, locus);
3688 }
3689
3690 if (local_label)
3691 {
3692 t = build1 (LABEL_EXPR, void_type_node, local_label);
3693 append_to_statement_list (t, &expr);
3694 }
3695
3696 return expr;
3697 }
3698
3699 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3700 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3701 statement, if it is the last one. Otherwise, return NULL. */
3702
3703 static tree
3704 find_goto (tree expr)
3705 {
3706 if (!expr)
3707 return NULL_TREE;
3708
3709 if (TREE_CODE (expr) == GOTO_EXPR)
3710 return expr;
3711
3712 if (TREE_CODE (expr) != STATEMENT_LIST)
3713 return NULL_TREE;
3714
3715 tree_stmt_iterator i = tsi_start (expr);
3716
3717 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3718 tsi_next (&i);
3719
3720 if (!tsi_one_before_end_p (i))
3721 return NULL_TREE;
3722
3723 return find_goto (tsi_stmt (i));
3724 }
3725
3726 /* Same as find_goto, except that it returns NULL if the destination
3727 is not a LABEL_DECL. */
3728
3729 static inline tree
3730 find_goto_label (tree expr)
3731 {
3732 tree dest = find_goto (expr);
3733 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3734 return dest;
3735 return NULL_TREE;
3736 }
3737
3738 /* Given a conditional expression EXPR with short-circuit boolean
3739 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3740 predicate apart into the equivalent sequence of conditionals. */
3741
3742 static tree
3743 shortcut_cond_expr (tree expr)
3744 {
3745 tree pred = TREE_OPERAND (expr, 0);
3746 tree then_ = TREE_OPERAND (expr, 1);
3747 tree else_ = TREE_OPERAND (expr, 2);
3748 tree true_label, false_label, end_label, t;
3749 tree *true_label_p;
3750 tree *false_label_p;
3751 bool emit_end, emit_false, jump_over_else;
3752 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3753 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3754
3755 /* First do simple transformations. */
3756 if (!else_se)
3757 {
3758 /* If there is no 'else', turn
3759 if (a && b) then c
3760 into
3761 if (a) if (b) then c. */
3762 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3763 {
3764 /* Keep the original source location on the first 'if'. */
3765 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3766 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3767 /* Set the source location of the && on the second 'if'. */
3768 if (rexpr_has_location (pred))
3769 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3770 then_ = shortcut_cond_expr (expr);
3771 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3772 pred = TREE_OPERAND (pred, 0);
3773 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3774 SET_EXPR_LOCATION (expr, locus);
3775 }
3776 }
3777
3778 if (!then_se)
3779 {
3780 /* If there is no 'then', turn
3781 if (a || b); else d
3782 into
3783 if (a); else if (b); else d. */
3784 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3785 {
3786 /* Keep the original source location on the first 'if'. */
3787 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3788 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3789 /* Set the source location of the || on the second 'if'. */
3790 if (rexpr_has_location (pred))
3791 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3792 else_ = shortcut_cond_expr (expr);
3793 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3794 pred = TREE_OPERAND (pred, 0);
3795 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3796 SET_EXPR_LOCATION (expr, locus);
3797 }
3798 }
3799
3800 /* If we're done, great. */
3801 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3802 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3803 return expr;
3804
3805 /* Otherwise we need to mess with gotos. Change
3806 if (a) c; else d;
3807 to
3808 if (a); else goto no;
3809 c; goto end;
3810 no: d; end:
3811 and recursively gimplify the condition. */
3812
3813 true_label = false_label = end_label = NULL_TREE;
3814
3815 /* If our arms just jump somewhere, hijack those labels so we don't
3816 generate jumps to jumps. */
3817
3818 if (tree then_goto = find_goto_label (then_))
3819 {
3820 true_label = GOTO_DESTINATION (then_goto);
3821 then_ = NULL;
3822 then_se = false;
3823 }
3824
3825 if (tree else_goto = find_goto_label (else_))
3826 {
3827 false_label = GOTO_DESTINATION (else_goto);
3828 else_ = NULL;
3829 else_se = false;
3830 }
3831
3832 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3833 if (true_label)
3834 true_label_p = &true_label;
3835 else
3836 true_label_p = NULL;
3837
3838 /* The 'else' branch also needs a label if it contains interesting code. */
3839 if (false_label || else_se)
3840 false_label_p = &false_label;
3841 else
3842 false_label_p = NULL;
3843
3844 /* If there was nothing else in our arms, just forward the label(s). */
3845 if (!then_se && !else_se)
3846 return shortcut_cond_r (pred, true_label_p, false_label_p,
3847 EXPR_LOC_OR_LOC (expr, input_location));
3848
3849 /* If our last subexpression already has a terminal label, reuse it. */
3850 if (else_se)
3851 t = expr_last (else_);
3852 else if (then_se)
3853 t = expr_last (then_);
3854 else
3855 t = NULL;
3856 if (t && TREE_CODE (t) == LABEL_EXPR)
3857 end_label = LABEL_EXPR_LABEL (t);
3858
3859 /* If we don't care about jumping to the 'else' branch, jump to the end
3860 if the condition is false. */
3861 if (!false_label_p)
3862 false_label_p = &end_label;
3863
3864 /* We only want to emit these labels if we aren't hijacking them. */
3865 emit_end = (end_label == NULL_TREE);
3866 emit_false = (false_label == NULL_TREE);
3867
3868 /* We only emit the jump over the else clause if we have to--if the
3869 then clause may fall through. Otherwise we can wind up with a
3870 useless jump and a useless label at the end of gimplified code,
3871 which will cause us to think that this conditional as a whole
3872 falls through even if it doesn't. If we then inline a function
3873 which ends with such a condition, that can cause us to issue an
3874 inappropriate warning about control reaching the end of a
3875 non-void function. */
3876 jump_over_else = block_may_fallthru (then_);
3877
3878 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3879 EXPR_LOC_OR_LOC (expr, input_location));
3880
3881 expr = NULL;
3882 append_to_statement_list (pred, &expr);
3883
3884 append_to_statement_list (then_, &expr);
3885 if (else_se)
3886 {
3887 if (jump_over_else)
3888 {
3889 tree last = expr_last (expr);
3890 t = build_and_jump (&end_label);
3891 if (rexpr_has_location (last))
3892 SET_EXPR_LOCATION (t, rexpr_location (last));
3893 append_to_statement_list (t, &expr);
3894 }
3895 if (emit_false)
3896 {
3897 t = build1 (LABEL_EXPR, void_type_node, false_label);
3898 append_to_statement_list (t, &expr);
3899 }
3900 append_to_statement_list (else_, &expr);
3901 }
3902 if (emit_end && end_label)
3903 {
3904 t = build1 (LABEL_EXPR, void_type_node, end_label);
3905 append_to_statement_list (t, &expr);
3906 }
3907
3908 return expr;
3909 }
3910
3911 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3912
3913 tree
3914 gimple_boolify (tree expr)
3915 {
3916 tree type = TREE_TYPE (expr);
3917 location_t loc = EXPR_LOCATION (expr);
3918
3919 if (TREE_CODE (expr) == NE_EXPR
3920 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3921 && integer_zerop (TREE_OPERAND (expr, 1)))
3922 {
3923 tree call = TREE_OPERAND (expr, 0);
3924 tree fn = get_callee_fndecl (call);
3925
3926 /* For __builtin_expect ((long) (x), y) recurse into x as well
3927 if x is truth_value_p. */
3928 if (fn
3929 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3930 && call_expr_nargs (call) == 2)
3931 {
3932 tree arg = CALL_EXPR_ARG (call, 0);
3933 if (arg)
3934 {
3935 if (TREE_CODE (arg) == NOP_EXPR
3936 && TREE_TYPE (arg) == TREE_TYPE (call))
3937 arg = TREE_OPERAND (arg, 0);
3938 if (truth_value_p (TREE_CODE (arg)))
3939 {
3940 arg = gimple_boolify (arg);
3941 CALL_EXPR_ARG (call, 0)
3942 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3943 }
3944 }
3945 }
3946 }
3947
3948 switch (TREE_CODE (expr))
3949 {
3950 case TRUTH_AND_EXPR:
3951 case TRUTH_OR_EXPR:
3952 case TRUTH_XOR_EXPR:
3953 case TRUTH_ANDIF_EXPR:
3954 case TRUTH_ORIF_EXPR:
3955 /* Also boolify the arguments of truth exprs. */
3956 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3957 /* FALLTHRU */
3958
3959 case TRUTH_NOT_EXPR:
3960 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3961
3962 /* These expressions always produce boolean results. */
3963 if (TREE_CODE (type) != BOOLEAN_TYPE)
3964 TREE_TYPE (expr) = boolean_type_node;
3965 return expr;
3966
3967 case ANNOTATE_EXPR:
3968 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3969 {
3970 case annot_expr_ivdep_kind:
3971 case annot_expr_unroll_kind:
3972 case annot_expr_no_vector_kind:
3973 case annot_expr_vector_kind:
3974 case annot_expr_parallel_kind:
3975 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3976 if (TREE_CODE (type) != BOOLEAN_TYPE)
3977 TREE_TYPE (expr) = boolean_type_node;
3978 return expr;
3979 default:
3980 gcc_unreachable ();
3981 }
3982
3983 default:
3984 if (COMPARISON_CLASS_P (expr))
3985 {
3986 /* There expressions always prduce boolean results. */
3987 if (TREE_CODE (type) != BOOLEAN_TYPE)
3988 TREE_TYPE (expr) = boolean_type_node;
3989 return expr;
3990 }
3991 /* Other expressions that get here must have boolean values, but
3992 might need to be converted to the appropriate mode. */
3993 if (TREE_CODE (type) == BOOLEAN_TYPE)
3994 return expr;
3995 return fold_convert_loc (loc, boolean_type_node, expr);
3996 }
3997 }
3998
3999 /* Given a conditional expression *EXPR_P without side effects, gimplify
4000 its operands. New statements are inserted to PRE_P. */
4001
4002 static enum gimplify_status
4003 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4004 {
4005 tree expr = *expr_p, cond;
4006 enum gimplify_status ret, tret;
4007 enum tree_code code;
4008
4009 cond = gimple_boolify (COND_EXPR_COND (expr));
4010
4011 /* We need to handle && and || specially, as their gimplification
4012 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4013 code = TREE_CODE (cond);
4014 if (code == TRUTH_ANDIF_EXPR)
4015 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4016 else if (code == TRUTH_ORIF_EXPR)
4017 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4018 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4019 COND_EXPR_COND (*expr_p) = cond;
4020
4021 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4022 is_gimple_val, fb_rvalue);
4023 ret = MIN (ret, tret);
4024 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4025 is_gimple_val, fb_rvalue);
4026
4027 return MIN (ret, tret);
4028 }
4029
4030 /* Return true if evaluating EXPR could trap.
4031 EXPR is GENERIC, while tree_could_trap_p can be called
4032 only on GIMPLE. */
4033
4034 bool
4035 generic_expr_could_trap_p (tree expr)
4036 {
4037 unsigned i, n;
4038
4039 if (!expr || is_gimple_val (expr))
4040 return false;
4041
4042 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4043 return true;
4044
4045 n = TREE_OPERAND_LENGTH (expr);
4046 for (i = 0; i < n; i++)
4047 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4048 return true;
4049
4050 return false;
4051 }
4052
4053 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4054 into
4055
4056 if (p) if (p)
4057 t1 = a; a;
4058 else or else
4059 t1 = b; b;
4060 t1;
4061
4062 The second form is used when *EXPR_P is of type void.
4063
4064 PRE_P points to the list where side effects that must happen before
4065 *EXPR_P should be stored. */
4066
4067 static enum gimplify_status
4068 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4069 {
4070 tree expr = *expr_p;
4071 tree type = TREE_TYPE (expr);
4072 location_t loc = EXPR_LOCATION (expr);
4073 tree tmp, arm1, arm2;
4074 enum gimplify_status ret;
4075 tree label_true, label_false, label_cont;
4076 bool have_then_clause_p, have_else_clause_p;
4077 gcond *cond_stmt;
4078 enum tree_code pred_code;
4079 gimple_seq seq = NULL;
4080
4081 /* If this COND_EXPR has a value, copy the values into a temporary within
4082 the arms. */
4083 if (!VOID_TYPE_P (type))
4084 {
4085 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4086 tree result;
4087
4088 /* If either an rvalue is ok or we do not require an lvalue, create the
4089 temporary. But we cannot do that if the type is addressable. */
4090 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4091 && !TREE_ADDRESSABLE (type))
4092 {
4093 if (gimplify_ctxp->allow_rhs_cond_expr
4094 /* If either branch has side effects or could trap, it can't be
4095 evaluated unconditionally. */
4096 && !TREE_SIDE_EFFECTS (then_)
4097 && !generic_expr_could_trap_p (then_)
4098 && !TREE_SIDE_EFFECTS (else_)
4099 && !generic_expr_could_trap_p (else_))
4100 return gimplify_pure_cond_expr (expr_p, pre_p);
4101
4102 tmp = create_tmp_var (type, "iftmp");
4103 result = tmp;
4104 }
4105
4106 /* Otherwise, only create and copy references to the values. */
4107 else
4108 {
4109 type = build_pointer_type (type);
4110
4111 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4112 then_ = build_fold_addr_expr_loc (loc, then_);
4113
4114 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4115 else_ = build_fold_addr_expr_loc (loc, else_);
4116
4117 expr
4118 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4119
4120 tmp = create_tmp_var (type, "iftmp");
4121 result = build_simple_mem_ref_loc (loc, tmp);
4122 }
4123
4124 /* Build the new then clause, `tmp = then_;'. But don't build the
4125 assignment if the value is void; in C++ it can be if it's a throw. */
4126 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4127 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4128
4129 /* Similarly, build the new else clause, `tmp = else_;'. */
4130 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4131 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4132
4133 TREE_TYPE (expr) = void_type_node;
4134 recalculate_side_effects (expr);
4135
4136 /* Move the COND_EXPR to the prequeue. */
4137 gimplify_stmt (&expr, pre_p);
4138
4139 *expr_p = result;
4140 return GS_ALL_DONE;
4141 }
4142
4143 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4144 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4145 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4146 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4147
4148 /* Make sure the condition has BOOLEAN_TYPE. */
4149 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4150
4151 /* Break apart && and || conditions. */
4152 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4153 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4154 {
4155 expr = shortcut_cond_expr (expr);
4156
4157 if (expr != *expr_p)
4158 {
4159 *expr_p = expr;
4160
4161 /* We can't rely on gimplify_expr to re-gimplify the expanded
4162 form properly, as cleanups might cause the target labels to be
4163 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4164 set up a conditional context. */
4165 gimple_push_condition ();
4166 gimplify_stmt (expr_p, &seq);
4167 gimple_pop_condition (pre_p);
4168 gimple_seq_add_seq (pre_p, seq);
4169
4170 return GS_ALL_DONE;
4171 }
4172 }
4173
4174 /* Now do the normal gimplification. */
4175
4176 /* Gimplify condition. */
4177 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4178 is_gimple_condexpr_for_cond, fb_rvalue);
4179 if (ret == GS_ERROR)
4180 return GS_ERROR;
4181 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4182
4183 gimple_push_condition ();
4184
4185 have_then_clause_p = have_else_clause_p = false;
4186 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4187 if (label_true
4188 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4189 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4190 have different locations, otherwise we end up with incorrect
4191 location information on the branches. */
4192 && (optimize
4193 || !EXPR_HAS_LOCATION (expr)
4194 || !rexpr_has_location (label_true)
4195 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4196 {
4197 have_then_clause_p = true;
4198 label_true = GOTO_DESTINATION (label_true);
4199 }
4200 else
4201 label_true = create_artificial_label (UNKNOWN_LOCATION);
4202 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4203 if (label_false
4204 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4205 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4206 have different locations, otherwise we end up with incorrect
4207 location information on the branches. */
4208 && (optimize
4209 || !EXPR_HAS_LOCATION (expr)
4210 || !rexpr_has_location (label_false)
4211 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4212 {
4213 have_else_clause_p = true;
4214 label_false = GOTO_DESTINATION (label_false);
4215 }
4216 else
4217 label_false = create_artificial_label (UNKNOWN_LOCATION);
4218
4219 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4220 &arm2);
4221 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4222 label_false);
4223 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4224 gimplify_seq_add_stmt (&seq, cond_stmt);
4225 gimple_stmt_iterator gsi = gsi_last (seq);
4226 maybe_fold_stmt (&gsi);
4227
4228 label_cont = NULL_TREE;
4229 if (!have_then_clause_p)
4230 {
4231 /* For if (...) {} else { code; } put label_true after
4232 the else block. */
4233 if (TREE_OPERAND (expr, 1) == NULL_TREE
4234 && !have_else_clause_p
4235 && TREE_OPERAND (expr, 2) != NULL_TREE)
4236 label_cont = label_true;
4237 else
4238 {
4239 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4240 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4241 /* For if (...) { code; } else {} or
4242 if (...) { code; } else goto label; or
4243 if (...) { code; return; } else { ... }
4244 label_cont isn't needed. */
4245 if (!have_else_clause_p
4246 && TREE_OPERAND (expr, 2) != NULL_TREE
4247 && gimple_seq_may_fallthru (seq))
4248 {
4249 gimple *g;
4250 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4251
4252 g = gimple_build_goto (label_cont);
4253
4254 /* GIMPLE_COND's are very low level; they have embedded
4255 gotos. This particular embedded goto should not be marked
4256 with the location of the original COND_EXPR, as it would
4257 correspond to the COND_EXPR's condition, not the ELSE or the
4258 THEN arms. To avoid marking it with the wrong location, flag
4259 it as "no location". */
4260 gimple_set_do_not_emit_location (g);
4261
4262 gimplify_seq_add_stmt (&seq, g);
4263 }
4264 }
4265 }
4266 if (!have_else_clause_p)
4267 {
4268 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4269 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4270 }
4271 if (label_cont)
4272 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4273
4274 gimple_pop_condition (pre_p);
4275 gimple_seq_add_seq (pre_p, seq);
4276
4277 if (ret == GS_ERROR)
4278 ; /* Do nothing. */
4279 else if (have_then_clause_p || have_else_clause_p)
4280 ret = GS_ALL_DONE;
4281 else
4282 {
4283 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4284 expr = TREE_OPERAND (expr, 0);
4285 gimplify_stmt (&expr, pre_p);
4286 }
4287
4288 *expr_p = NULL;
4289 return ret;
4290 }
4291
4292 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4293 to be marked addressable.
4294
4295 We cannot rely on such an expression being directly markable if a temporary
4296 has been created by the gimplification. In this case, we create another
4297 temporary and initialize it with a copy, which will become a store after we
4298 mark it addressable. This can happen if the front-end passed us something
4299 that it could not mark addressable yet, like a Fortran pass-by-reference
4300 parameter (int) floatvar. */
4301
4302 static void
4303 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4304 {
4305 while (handled_component_p (*expr_p))
4306 expr_p = &TREE_OPERAND (*expr_p, 0);
4307 if (is_gimple_reg (*expr_p))
4308 {
4309 /* Do not allow an SSA name as the temporary. */
4310 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4311 DECL_GIMPLE_REG_P (var) = 0;
4312 *expr_p = var;
4313 }
4314 }
4315
4316 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4317 a call to __builtin_memcpy. */
4318
4319 static enum gimplify_status
4320 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4321 gimple_seq *seq_p)
4322 {
4323 tree t, to, to_ptr, from, from_ptr;
4324 gcall *gs;
4325 location_t loc = EXPR_LOCATION (*expr_p);
4326
4327 to = TREE_OPERAND (*expr_p, 0);
4328 from = TREE_OPERAND (*expr_p, 1);
4329
4330 /* Mark the RHS addressable. Beware that it may not be possible to do so
4331 directly if a temporary has been created by the gimplification. */
4332 prepare_gimple_addressable (&from, seq_p);
4333
4334 mark_addressable (from);
4335 from_ptr = build_fold_addr_expr_loc (loc, from);
4336 gimplify_arg (&from_ptr, seq_p, loc);
4337
4338 mark_addressable (to);
4339 to_ptr = build_fold_addr_expr_loc (loc, to);
4340 gimplify_arg (&to_ptr, seq_p, loc);
4341
4342 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4343
4344 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4345
4346 if (want_value)
4347 {
4348 /* tmp = memcpy() */
4349 t = create_tmp_var (TREE_TYPE (to_ptr));
4350 gimple_call_set_lhs (gs, t);
4351 gimplify_seq_add_stmt (seq_p, gs);
4352
4353 *expr_p = build_simple_mem_ref (t);
4354 return GS_ALL_DONE;
4355 }
4356
4357 gimplify_seq_add_stmt (seq_p, gs);
4358 *expr_p = NULL;
4359 return GS_ALL_DONE;
4360 }
4361
4362 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4363 a call to __builtin_memset. In this case we know that the RHS is
4364 a CONSTRUCTOR with an empty element list. */
4365
4366 static enum gimplify_status
4367 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4368 gimple_seq *seq_p)
4369 {
4370 tree t, from, to, to_ptr;
4371 gcall *gs;
4372 location_t loc = EXPR_LOCATION (*expr_p);
4373
4374 /* Assert our assumptions, to abort instead of producing wrong code
4375 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4376 not be immediately exposed. */
4377 from = TREE_OPERAND (*expr_p, 1);
4378 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4379 from = TREE_OPERAND (from, 0);
4380
4381 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4382 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4383
4384 /* Now proceed. */
4385 to = TREE_OPERAND (*expr_p, 0);
4386
4387 to_ptr = build_fold_addr_expr_loc (loc, to);
4388 gimplify_arg (&to_ptr, seq_p, loc);
4389 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4390
4391 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4392
4393 if (want_value)
4394 {
4395 /* tmp = memset() */
4396 t = create_tmp_var (TREE_TYPE (to_ptr));
4397 gimple_call_set_lhs (gs, t);
4398 gimplify_seq_add_stmt (seq_p, gs);
4399
4400 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4401 return GS_ALL_DONE;
4402 }
4403
4404 gimplify_seq_add_stmt (seq_p, gs);
4405 *expr_p = NULL;
4406 return GS_ALL_DONE;
4407 }
4408
4409 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4410 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4411 assignment. Return non-null if we detect a potential overlap. */
4412
4413 struct gimplify_init_ctor_preeval_data
4414 {
4415 /* The base decl of the lhs object. May be NULL, in which case we
4416 have to assume the lhs is indirect. */
4417 tree lhs_base_decl;
4418
4419 /* The alias set of the lhs object. */
4420 alias_set_type lhs_alias_set;
4421 };
4422
4423 static tree
4424 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4425 {
4426 struct gimplify_init_ctor_preeval_data *data
4427 = (struct gimplify_init_ctor_preeval_data *) xdata;
4428 tree t = *tp;
4429
4430 /* If we find the base object, obviously we have overlap. */
4431 if (data->lhs_base_decl == t)
4432 return t;
4433
4434 /* If the constructor component is indirect, determine if we have a
4435 potential overlap with the lhs. The only bits of information we
4436 have to go on at this point are addressability and alias sets. */
4437 if ((INDIRECT_REF_P (t)
4438 || TREE_CODE (t) == MEM_REF)
4439 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4440 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4441 return t;
4442
4443 /* If the constructor component is a call, determine if it can hide a
4444 potential overlap with the lhs through an INDIRECT_REF like above.
4445 ??? Ugh - this is completely broken. In fact this whole analysis
4446 doesn't look conservative. */
4447 if (TREE_CODE (t) == CALL_EXPR)
4448 {
4449 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4450
4451 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4452 if (POINTER_TYPE_P (TREE_VALUE (type))
4453 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4454 && alias_sets_conflict_p (data->lhs_alias_set,
4455 get_alias_set
4456 (TREE_TYPE (TREE_VALUE (type)))))
4457 return t;
4458 }
4459
4460 if (IS_TYPE_OR_DECL_P (t))
4461 *walk_subtrees = 0;
4462 return NULL;
4463 }
4464
4465 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4466 force values that overlap with the lhs (as described by *DATA)
4467 into temporaries. */
4468
4469 static void
4470 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4471 struct gimplify_init_ctor_preeval_data *data)
4472 {
4473 enum gimplify_status one;
4474
4475 /* If the value is constant, then there's nothing to pre-evaluate. */
4476 if (TREE_CONSTANT (*expr_p))
4477 {
4478 /* Ensure it does not have side effects, it might contain a reference to
4479 the object we're initializing. */
4480 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4481 return;
4482 }
4483
4484 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4485 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4486 return;
4487
4488 /* Recurse for nested constructors. */
4489 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4490 {
4491 unsigned HOST_WIDE_INT ix;
4492 constructor_elt *ce;
4493 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4494
4495 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4496 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4497
4498 return;
4499 }
4500
4501 /* If this is a variable sized type, we must remember the size. */
4502 maybe_with_size_expr (expr_p);
4503
4504 /* Gimplify the constructor element to something appropriate for the rhs
4505 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4506 the gimplifier will consider this a store to memory. Doing this
4507 gimplification now means that we won't have to deal with complicated
4508 language-specific trees, nor trees like SAVE_EXPR that can induce
4509 exponential search behavior. */
4510 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4511 if (one == GS_ERROR)
4512 {
4513 *expr_p = NULL;
4514 return;
4515 }
4516
4517 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4518 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4519 always be true for all scalars, since is_gimple_mem_rhs insists on a
4520 temporary variable for them. */
4521 if (DECL_P (*expr_p))
4522 return;
4523
4524 /* If this is of variable size, we have no choice but to assume it doesn't
4525 overlap since we can't make a temporary for it. */
4526 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4527 return;
4528
4529 /* Otherwise, we must search for overlap ... */
4530 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4531 return;
4532
4533 /* ... and if found, force the value into a temporary. */
4534 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4535 }
4536
4537 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4538 a RANGE_EXPR in a CONSTRUCTOR for an array.
4539
4540 var = lower;
4541 loop_entry:
4542 object[var] = value;
4543 if (var == upper)
4544 goto loop_exit;
4545 var = var + 1;
4546 goto loop_entry;
4547 loop_exit:
4548
4549 We increment var _after_ the loop exit check because we might otherwise
4550 fail if upper == TYPE_MAX_VALUE (type for upper).
4551
4552 Note that we never have to deal with SAVE_EXPRs here, because this has
4553 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4554
4555 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4556 gimple_seq *, bool);
4557
4558 static void
4559 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4560 tree value, tree array_elt_type,
4561 gimple_seq *pre_p, bool cleared)
4562 {
4563 tree loop_entry_label, loop_exit_label, fall_thru_label;
4564 tree var, var_type, cref, tmp;
4565
4566 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4567 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4568 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4569
4570 /* Create and initialize the index variable. */
4571 var_type = TREE_TYPE (upper);
4572 var = create_tmp_var (var_type);
4573 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4574
4575 /* Add the loop entry label. */
4576 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4577
4578 /* Build the reference. */
4579 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4580 var, NULL_TREE, NULL_TREE);
4581
4582 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4583 the store. Otherwise just assign value to the reference. */
4584
4585 if (TREE_CODE (value) == CONSTRUCTOR)
4586 /* NB we might have to call ourself recursively through
4587 gimplify_init_ctor_eval if the value is a constructor. */
4588 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4589 pre_p, cleared);
4590 else
4591 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4592
4593 /* We exit the loop when the index var is equal to the upper bound. */
4594 gimplify_seq_add_stmt (pre_p,
4595 gimple_build_cond (EQ_EXPR, var, upper,
4596 loop_exit_label, fall_thru_label));
4597
4598 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4599
4600 /* Otherwise, increment the index var... */
4601 tmp = build2 (PLUS_EXPR, var_type, var,
4602 fold_convert (var_type, integer_one_node));
4603 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4604
4605 /* ...and jump back to the loop entry. */
4606 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4607
4608 /* Add the loop exit label. */
4609 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4610 }
4611
4612 /* Return true if FDECL is accessing a field that is zero sized. */
4613
4614 static bool
4615 zero_sized_field_decl (const_tree fdecl)
4616 {
4617 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4618 && integer_zerop (DECL_SIZE (fdecl)))
4619 return true;
4620 return false;
4621 }
4622
4623 /* Return true if TYPE is zero sized. */
4624
4625 static bool
4626 zero_sized_type (const_tree type)
4627 {
4628 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4629 && integer_zerop (TYPE_SIZE (type)))
4630 return true;
4631 return false;
4632 }
4633
4634 /* A subroutine of gimplify_init_constructor. Generate individual
4635 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4636 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4637 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4638 zeroed first. */
4639
4640 static void
4641 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4642 gimple_seq *pre_p, bool cleared)
4643 {
4644 tree array_elt_type = NULL;
4645 unsigned HOST_WIDE_INT ix;
4646 tree purpose, value;
4647
4648 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4649 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4650
4651 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4652 {
4653 tree cref;
4654
4655 /* NULL values are created above for gimplification errors. */
4656 if (value == NULL)
4657 continue;
4658
4659 if (cleared && initializer_zerop (value))
4660 continue;
4661
4662 /* ??? Here's to hoping the front end fills in all of the indices,
4663 so we don't have to figure out what's missing ourselves. */
4664 gcc_assert (purpose);
4665
4666 /* Skip zero-sized fields, unless value has side-effects. This can
4667 happen with calls to functions returning a zero-sized type, which
4668 we shouldn't discard. As a number of downstream passes don't
4669 expect sets of zero-sized fields, we rely on the gimplification of
4670 the MODIFY_EXPR we make below to drop the assignment statement. */
4671 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4672 continue;
4673
4674 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4675 whole range. */
4676 if (TREE_CODE (purpose) == RANGE_EXPR)
4677 {
4678 tree lower = TREE_OPERAND (purpose, 0);
4679 tree upper = TREE_OPERAND (purpose, 1);
4680
4681 /* If the lower bound is equal to upper, just treat it as if
4682 upper was the index. */
4683 if (simple_cst_equal (lower, upper))
4684 purpose = upper;
4685 else
4686 {
4687 gimplify_init_ctor_eval_range (object, lower, upper, value,
4688 array_elt_type, pre_p, cleared);
4689 continue;
4690 }
4691 }
4692
4693 if (array_elt_type)
4694 {
4695 /* Do not use bitsizetype for ARRAY_REF indices. */
4696 if (TYPE_DOMAIN (TREE_TYPE (object)))
4697 purpose
4698 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4699 purpose);
4700 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4701 purpose, NULL_TREE, NULL_TREE);
4702 }
4703 else
4704 {
4705 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4706 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4707 unshare_expr (object), purpose, NULL_TREE);
4708 }
4709
4710 if (TREE_CODE (value) == CONSTRUCTOR
4711 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4712 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4713 pre_p, cleared);
4714 else
4715 {
4716 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4717 gimplify_and_add (init, pre_p);
4718 ggc_free (init);
4719 }
4720 }
4721 }
4722
4723 /* Return the appropriate RHS predicate for this LHS. */
4724
4725 gimple_predicate
4726 rhs_predicate_for (tree lhs)
4727 {
4728 if (is_gimple_reg (lhs))
4729 return is_gimple_reg_rhs_or_call;
4730 else
4731 return is_gimple_mem_rhs_or_call;
4732 }
4733
4734 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4735 before the LHS has been gimplified. */
4736
4737 static gimple_predicate
4738 initial_rhs_predicate_for (tree lhs)
4739 {
4740 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4741 return is_gimple_reg_rhs_or_call;
4742 else
4743 return is_gimple_mem_rhs_or_call;
4744 }
4745
4746 /* Gimplify a C99 compound literal expression. This just means adding
4747 the DECL_EXPR before the current statement and using its anonymous
4748 decl instead. */
4749
4750 static enum gimplify_status
4751 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4752 bool (*gimple_test_f) (tree),
4753 fallback_t fallback)
4754 {
4755 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4756 tree decl = DECL_EXPR_DECL (decl_s);
4757 tree init = DECL_INITIAL (decl);
4758 /* Mark the decl as addressable if the compound literal
4759 expression is addressable now, otherwise it is marked too late
4760 after we gimplify the initialization expression. */
4761 if (TREE_ADDRESSABLE (*expr_p))
4762 TREE_ADDRESSABLE (decl) = 1;
4763 /* Otherwise, if we don't need an lvalue and have a literal directly
4764 substitute it. Check if it matches the gimple predicate, as
4765 otherwise we'd generate a new temporary, and we can as well just
4766 use the decl we already have. */
4767 else if (!TREE_ADDRESSABLE (decl)
4768 && !TREE_THIS_VOLATILE (decl)
4769 && init
4770 && (fallback & fb_lvalue) == 0
4771 && gimple_test_f (init))
4772 {
4773 *expr_p = init;
4774 return GS_OK;
4775 }
4776
4777 /* Preliminarily mark non-addressed complex variables as eligible
4778 for promotion to gimple registers. We'll transform their uses
4779 as we find them. */
4780 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4781 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4782 && !TREE_THIS_VOLATILE (decl)
4783 && !needs_to_live_in_memory (decl))
4784 DECL_GIMPLE_REG_P (decl) = 1;
4785
4786 /* If the decl is not addressable, then it is being used in some
4787 expression or on the right hand side of a statement, and it can
4788 be put into a readonly data section. */
4789 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4790 TREE_READONLY (decl) = 1;
4791
4792 /* This decl isn't mentioned in the enclosing block, so add it to the
4793 list of temps. FIXME it seems a bit of a kludge to say that
4794 anonymous artificial vars aren't pushed, but everything else is. */
4795 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4796 gimple_add_tmp_var (decl);
4797
4798 gimplify_and_add (decl_s, pre_p);
4799 *expr_p = decl;
4800 return GS_OK;
4801 }
4802
4803 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4804 return a new CONSTRUCTOR if something changed. */
4805
4806 static tree
4807 optimize_compound_literals_in_ctor (tree orig_ctor)
4808 {
4809 tree ctor = orig_ctor;
4810 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4811 unsigned int idx, num = vec_safe_length (elts);
4812
4813 for (idx = 0; idx < num; idx++)
4814 {
4815 tree value = (*elts)[idx].value;
4816 tree newval = value;
4817 if (TREE_CODE (value) == CONSTRUCTOR)
4818 newval = optimize_compound_literals_in_ctor (value);
4819 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4820 {
4821 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4822 tree decl = DECL_EXPR_DECL (decl_s);
4823 tree init = DECL_INITIAL (decl);
4824
4825 if (!TREE_ADDRESSABLE (value)
4826 && !TREE_ADDRESSABLE (decl)
4827 && init
4828 && TREE_CODE (init) == CONSTRUCTOR)
4829 newval = optimize_compound_literals_in_ctor (init);
4830 }
4831 if (newval == value)
4832 continue;
4833
4834 if (ctor == orig_ctor)
4835 {
4836 ctor = copy_node (orig_ctor);
4837 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4838 elts = CONSTRUCTOR_ELTS (ctor);
4839 }
4840 (*elts)[idx].value = newval;
4841 }
4842 return ctor;
4843 }
4844
4845 /* A subroutine of gimplify_modify_expr. Break out elements of a
4846 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4847
4848 Note that we still need to clear any elements that don't have explicit
4849 initializers, so if not all elements are initialized we keep the
4850 original MODIFY_EXPR, we just remove all of the constructor elements.
4851
4852 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4853 GS_ERROR if we would have to create a temporary when gimplifying
4854 this constructor. Otherwise, return GS_OK.
4855
4856 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4857
4858 static enum gimplify_status
4859 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4860 bool want_value, bool notify_temp_creation)
4861 {
4862 tree object, ctor, type;
4863 enum gimplify_status ret;
4864 vec<constructor_elt, va_gc> *elts;
4865
4866 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4867
4868 if (!notify_temp_creation)
4869 {
4870 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4871 is_gimple_lvalue, fb_lvalue);
4872 if (ret == GS_ERROR)
4873 return ret;
4874 }
4875
4876 object = TREE_OPERAND (*expr_p, 0);
4877 ctor = TREE_OPERAND (*expr_p, 1)
4878 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4879 type = TREE_TYPE (ctor);
4880 elts = CONSTRUCTOR_ELTS (ctor);
4881 ret = GS_ALL_DONE;
4882
4883 switch (TREE_CODE (type))
4884 {
4885 case RECORD_TYPE:
4886 case UNION_TYPE:
4887 case QUAL_UNION_TYPE:
4888 case ARRAY_TYPE:
4889 {
4890 struct gimplify_init_ctor_preeval_data preeval_data;
4891 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4892 HOST_WIDE_INT num_unique_nonzero_elements;
4893 bool cleared, complete_p, valid_const_initializer;
4894 /* Use readonly data for initializers of this or smaller size
4895 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4896 ratio. */
4897 const HOST_WIDE_INT min_unique_size = 64;
4898 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4899 is smaller than this, use readonly data. */
4900 const int unique_nonzero_ratio = 8;
4901
4902 /* Aggregate types must lower constructors to initialization of
4903 individual elements. The exception is that a CONSTRUCTOR node
4904 with no elements indicates zero-initialization of the whole. */
4905 if (vec_safe_is_empty (elts))
4906 {
4907 if (notify_temp_creation)
4908 return GS_OK;
4909 break;
4910 }
4911
4912 /* Fetch information about the constructor to direct later processing.
4913 We might want to make static versions of it in various cases, and
4914 can only do so if it known to be a valid constant initializer. */
4915 valid_const_initializer
4916 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4917 &num_unique_nonzero_elements,
4918 &num_ctor_elements, &complete_p);
4919
4920 /* If a const aggregate variable is being initialized, then it
4921 should never be a lose to promote the variable to be static. */
4922 if (valid_const_initializer
4923 && num_nonzero_elements > 1
4924 && TREE_READONLY (object)
4925 && VAR_P (object)
4926 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4927 /* For ctors that have many repeated nonzero elements
4928 represented through RANGE_EXPRs, prefer initializing
4929 those through runtime loops over copies of large amounts
4930 of data from readonly data section. */
4931 && (num_unique_nonzero_elements
4932 > num_nonzero_elements / unique_nonzero_ratio
4933 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4934 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4935 {
4936 if (notify_temp_creation)
4937 return GS_ERROR;
4938 DECL_INITIAL (object) = ctor;
4939 TREE_STATIC (object) = 1;
4940 if (!DECL_NAME (object))
4941 DECL_NAME (object) = create_tmp_var_name ("C");
4942 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4943
4944 /* ??? C++ doesn't automatically append a .<number> to the
4945 assembler name, and even when it does, it looks at FE private
4946 data structures to figure out what that number should be,
4947 which are not set for this variable. I suppose this is
4948 important for local statics for inline functions, which aren't
4949 "local" in the object file sense. So in order to get a unique
4950 TU-local symbol, we must invoke the lhd version now. */
4951 lhd_set_decl_assembler_name (object);
4952
4953 *expr_p = NULL_TREE;
4954 break;
4955 }
4956
4957 /* If there are "lots" of initialized elements, even discounting
4958 those that are not address constants (and thus *must* be
4959 computed at runtime), then partition the constructor into
4960 constant and non-constant parts. Block copy the constant
4961 parts in, then generate code for the non-constant parts. */
4962 /* TODO. There's code in cp/typeck.c to do this. */
4963
4964 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4965 /* store_constructor will ignore the clearing of variable-sized
4966 objects. Initializers for such objects must explicitly set
4967 every field that needs to be set. */
4968 cleared = false;
4969 else if (!complete_p)
4970 /* If the constructor isn't complete, clear the whole object
4971 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4972
4973 ??? This ought not to be needed. For any element not present
4974 in the initializer, we should simply set them to zero. Except
4975 we'd need to *find* the elements that are not present, and that
4976 requires trickery to avoid quadratic compile-time behavior in
4977 large cases or excessive memory use in small cases. */
4978 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4979 else if (num_ctor_elements - num_nonzero_elements
4980 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4981 && num_nonzero_elements < num_ctor_elements / 4)
4982 /* If there are "lots" of zeros, it's more efficient to clear
4983 the memory and then set the nonzero elements. */
4984 cleared = true;
4985 else
4986 cleared = false;
4987
4988 /* If there are "lots" of initialized elements, and all of them
4989 are valid address constants, then the entire initializer can
4990 be dropped to memory, and then memcpy'd out. Don't do this
4991 for sparse arrays, though, as it's more efficient to follow
4992 the standard CONSTRUCTOR behavior of memset followed by
4993 individual element initialization. Also don't do this for small
4994 all-zero initializers (which aren't big enough to merit
4995 clearing), and don't try to make bitwise copies of
4996 TREE_ADDRESSABLE types. */
4997
4998 if (valid_const_initializer
4999 && !(cleared || num_nonzero_elements == 0)
5000 && !TREE_ADDRESSABLE (type))
5001 {
5002 HOST_WIDE_INT size = int_size_in_bytes (type);
5003 unsigned int align;
5004
5005 /* ??? We can still get unbounded array types, at least
5006 from the C++ front end. This seems wrong, but attempt
5007 to work around it for now. */
5008 if (size < 0)
5009 {
5010 size = int_size_in_bytes (TREE_TYPE (object));
5011 if (size >= 0)
5012 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5013 }
5014
5015 /* Find the maximum alignment we can assume for the object. */
5016 /* ??? Make use of DECL_OFFSET_ALIGN. */
5017 if (DECL_P (object))
5018 align = DECL_ALIGN (object);
5019 else
5020 align = TYPE_ALIGN (type);
5021
5022 /* Do a block move either if the size is so small as to make
5023 each individual move a sub-unit move on average, or if it
5024 is so large as to make individual moves inefficient. */
5025 if (size > 0
5026 && num_nonzero_elements > 1
5027 /* For ctors that have many repeated nonzero elements
5028 represented through RANGE_EXPRs, prefer initializing
5029 those through runtime loops over copies of large amounts
5030 of data from readonly data section. */
5031 && (num_unique_nonzero_elements
5032 > num_nonzero_elements / unique_nonzero_ratio
5033 || size <= min_unique_size)
5034 && (size < num_nonzero_elements
5035 || !can_move_by_pieces (size, align)))
5036 {
5037 if (notify_temp_creation)
5038 return GS_ERROR;
5039
5040 walk_tree (&ctor, force_labels_r, NULL, NULL);
5041 ctor = tree_output_constant_def (ctor);
5042 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5043 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5044 TREE_OPERAND (*expr_p, 1) = ctor;
5045
5046 /* This is no longer an assignment of a CONSTRUCTOR, but
5047 we still may have processing to do on the LHS. So
5048 pretend we didn't do anything here to let that happen. */
5049 return GS_UNHANDLED;
5050 }
5051 }
5052
5053 /* If the target is volatile, we have non-zero elements and more than
5054 one field to assign, initialize the target from a temporary. */
5055 if (TREE_THIS_VOLATILE (object)
5056 && !TREE_ADDRESSABLE (type)
5057 && (num_nonzero_elements > 0 || !cleared)
5058 && vec_safe_length (elts) > 1)
5059 {
5060 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5061 TREE_OPERAND (*expr_p, 0) = temp;
5062 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5063 *expr_p,
5064 build2 (MODIFY_EXPR, void_type_node,
5065 object, temp));
5066 return GS_OK;
5067 }
5068
5069 if (notify_temp_creation)
5070 return GS_OK;
5071
5072 /* If there are nonzero elements and if needed, pre-evaluate to capture
5073 elements overlapping with the lhs into temporaries. We must do this
5074 before clearing to fetch the values before they are zeroed-out. */
5075 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5076 {
5077 preeval_data.lhs_base_decl = get_base_address (object);
5078 if (!DECL_P (preeval_data.lhs_base_decl))
5079 preeval_data.lhs_base_decl = NULL;
5080 preeval_data.lhs_alias_set = get_alias_set (object);
5081
5082 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5083 pre_p, post_p, &preeval_data);
5084 }
5085
5086 bool ctor_has_side_effects_p
5087 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5088
5089 if (cleared)
5090 {
5091 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5092 Note that we still have to gimplify, in order to handle the
5093 case of variable sized types. Avoid shared tree structures. */
5094 CONSTRUCTOR_ELTS (ctor) = NULL;
5095 TREE_SIDE_EFFECTS (ctor) = 0;
5096 object = unshare_expr (object);
5097 gimplify_stmt (expr_p, pre_p);
5098 }
5099
5100 /* If we have not block cleared the object, or if there are nonzero
5101 elements in the constructor, or if the constructor has side effects,
5102 add assignments to the individual scalar fields of the object. */
5103 if (!cleared
5104 || num_nonzero_elements > 0
5105 || ctor_has_side_effects_p)
5106 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5107
5108 *expr_p = NULL_TREE;
5109 }
5110 break;
5111
5112 case COMPLEX_TYPE:
5113 {
5114 tree r, i;
5115
5116 if (notify_temp_creation)
5117 return GS_OK;
5118
5119 /* Extract the real and imaginary parts out of the ctor. */
5120 gcc_assert (elts->length () == 2);
5121 r = (*elts)[0].value;
5122 i = (*elts)[1].value;
5123 if (r == NULL || i == NULL)
5124 {
5125 tree zero = build_zero_cst (TREE_TYPE (type));
5126 if (r == NULL)
5127 r = zero;
5128 if (i == NULL)
5129 i = zero;
5130 }
5131
5132 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5133 represent creation of a complex value. */
5134 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5135 {
5136 ctor = build_complex (type, r, i);
5137 TREE_OPERAND (*expr_p, 1) = ctor;
5138 }
5139 else
5140 {
5141 ctor = build2 (COMPLEX_EXPR, type, r, i);
5142 TREE_OPERAND (*expr_p, 1) = ctor;
5143 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5144 pre_p,
5145 post_p,
5146 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5147 fb_rvalue);
5148 }
5149 }
5150 break;
5151
5152 case VECTOR_TYPE:
5153 {
5154 unsigned HOST_WIDE_INT ix;
5155 constructor_elt *ce;
5156
5157 if (notify_temp_creation)
5158 return GS_OK;
5159
5160 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5161 if (TREE_CONSTANT (ctor))
5162 {
5163 bool constant_p = true;
5164 tree value;
5165
5166 /* Even when ctor is constant, it might contain non-*_CST
5167 elements, such as addresses or trapping values like
5168 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5169 in VECTOR_CST nodes. */
5170 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5171 if (!CONSTANT_CLASS_P (value))
5172 {
5173 constant_p = false;
5174 break;
5175 }
5176
5177 if (constant_p)
5178 {
5179 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5180 break;
5181 }
5182
5183 TREE_CONSTANT (ctor) = 0;
5184 }
5185
5186 /* Vector types use CONSTRUCTOR all the way through gimple
5187 compilation as a general initializer. */
5188 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5189 {
5190 enum gimplify_status tret;
5191 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5192 fb_rvalue);
5193 if (tret == GS_ERROR)
5194 ret = GS_ERROR;
5195 else if (TREE_STATIC (ctor)
5196 && !initializer_constant_valid_p (ce->value,
5197 TREE_TYPE (ce->value)))
5198 TREE_STATIC (ctor) = 0;
5199 }
5200 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5201 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5202 }
5203 break;
5204
5205 default:
5206 /* So how did we get a CONSTRUCTOR for a scalar type? */
5207 gcc_unreachable ();
5208 }
5209
5210 if (ret == GS_ERROR)
5211 return GS_ERROR;
5212 /* If we have gimplified both sides of the initializer but have
5213 not emitted an assignment, do so now. */
5214 if (*expr_p)
5215 {
5216 tree lhs = TREE_OPERAND (*expr_p, 0);
5217 tree rhs = TREE_OPERAND (*expr_p, 1);
5218 if (want_value && object == lhs)
5219 lhs = unshare_expr (lhs);
5220 gassign *init = gimple_build_assign (lhs, rhs);
5221 gimplify_seq_add_stmt (pre_p, init);
5222 }
5223 if (want_value)
5224 {
5225 *expr_p = object;
5226 return GS_OK;
5227 }
5228 else
5229 {
5230 *expr_p = NULL;
5231 return GS_ALL_DONE;
5232 }
5233 }
5234
5235 /* Given a pointer value OP0, return a simplified version of an
5236 indirection through OP0, or NULL_TREE if no simplification is
5237 possible. This may only be applied to a rhs of an expression.
5238 Note that the resulting type may be different from the type pointed
5239 to in the sense that it is still compatible from the langhooks
5240 point of view. */
5241
5242 static tree
5243 gimple_fold_indirect_ref_rhs (tree t)
5244 {
5245 return gimple_fold_indirect_ref (t);
5246 }
5247
5248 /* Subroutine of gimplify_modify_expr to do simplifications of
5249 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5250 something changes. */
5251
5252 static enum gimplify_status
5253 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5254 gimple_seq *pre_p, gimple_seq *post_p,
5255 bool want_value)
5256 {
5257 enum gimplify_status ret = GS_UNHANDLED;
5258 bool changed;
5259
5260 do
5261 {
5262 changed = false;
5263 switch (TREE_CODE (*from_p))
5264 {
5265 case VAR_DECL:
5266 /* If we're assigning from a read-only variable initialized with
5267 a constructor, do the direct assignment from the constructor,
5268 but only if neither source nor target are volatile since this
5269 latter assignment might end up being done on a per-field basis. */
5270 if (DECL_INITIAL (*from_p)
5271 && TREE_READONLY (*from_p)
5272 && !TREE_THIS_VOLATILE (*from_p)
5273 && !TREE_THIS_VOLATILE (*to_p)
5274 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5275 {
5276 tree old_from = *from_p;
5277 enum gimplify_status subret;
5278
5279 /* Move the constructor into the RHS. */
5280 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5281
5282 /* Let's see if gimplify_init_constructor will need to put
5283 it in memory. */
5284 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5285 false, true);
5286 if (subret == GS_ERROR)
5287 {
5288 /* If so, revert the change. */
5289 *from_p = old_from;
5290 }
5291 else
5292 {
5293 ret = GS_OK;
5294 changed = true;
5295 }
5296 }
5297 break;
5298 case INDIRECT_REF:
5299 {
5300 /* If we have code like
5301
5302 *(const A*)(A*)&x
5303
5304 where the type of "x" is a (possibly cv-qualified variant
5305 of "A"), treat the entire expression as identical to "x".
5306 This kind of code arises in C++ when an object is bound
5307 to a const reference, and if "x" is a TARGET_EXPR we want
5308 to take advantage of the optimization below. */
5309 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5310 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5311 if (t)
5312 {
5313 if (TREE_THIS_VOLATILE (t) != volatile_p)
5314 {
5315 if (DECL_P (t))
5316 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5317 build_fold_addr_expr (t));
5318 if (REFERENCE_CLASS_P (t))
5319 TREE_THIS_VOLATILE (t) = volatile_p;
5320 }
5321 *from_p = t;
5322 ret = GS_OK;
5323 changed = true;
5324 }
5325 break;
5326 }
5327
5328 case TARGET_EXPR:
5329 {
5330 /* If we are initializing something from a TARGET_EXPR, strip the
5331 TARGET_EXPR and initialize it directly, if possible. This can't
5332 be done if the initializer is void, since that implies that the
5333 temporary is set in some non-trivial way.
5334
5335 ??? What about code that pulls out the temp and uses it
5336 elsewhere? I think that such code never uses the TARGET_EXPR as
5337 an initializer. If I'm wrong, we'll die because the temp won't
5338 have any RTL. In that case, I guess we'll need to replace
5339 references somehow. */
5340 tree init = TARGET_EXPR_INITIAL (*from_p);
5341
5342 if (init
5343 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5344 || !TARGET_EXPR_NO_ELIDE (*from_p))
5345 && !VOID_TYPE_P (TREE_TYPE (init)))
5346 {
5347 *from_p = init;
5348 ret = GS_OK;
5349 changed = true;
5350 }
5351 }
5352 break;
5353
5354 case COMPOUND_EXPR:
5355 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5356 caught. */
5357 gimplify_compound_expr (from_p, pre_p, true);
5358 ret = GS_OK;
5359 changed = true;
5360 break;
5361
5362 case CONSTRUCTOR:
5363 /* If we already made some changes, let the front end have a
5364 crack at this before we break it down. */
5365 if (ret != GS_UNHANDLED)
5366 break;
5367 /* If we're initializing from a CONSTRUCTOR, break this into
5368 individual MODIFY_EXPRs. */
5369 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5370 false);
5371
5372 case COND_EXPR:
5373 /* If we're assigning to a non-register type, push the assignment
5374 down into the branches. This is mandatory for ADDRESSABLE types,
5375 since we cannot generate temporaries for such, but it saves a
5376 copy in other cases as well. */
5377 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5378 {
5379 /* This code should mirror the code in gimplify_cond_expr. */
5380 enum tree_code code = TREE_CODE (*expr_p);
5381 tree cond = *from_p;
5382 tree result = *to_p;
5383
5384 ret = gimplify_expr (&result, pre_p, post_p,
5385 is_gimple_lvalue, fb_lvalue);
5386 if (ret != GS_ERROR)
5387 ret = GS_OK;
5388
5389 /* If we are going to write RESULT more than once, clear
5390 TREE_READONLY flag, otherwise we might incorrectly promote
5391 the variable to static const and initialize it at compile
5392 time in one of the branches. */
5393 if (VAR_P (result)
5394 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5395 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5396 TREE_READONLY (result) = 0;
5397 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5398 TREE_OPERAND (cond, 1)
5399 = build2 (code, void_type_node, result,
5400 TREE_OPERAND (cond, 1));
5401 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5402 TREE_OPERAND (cond, 2)
5403 = build2 (code, void_type_node, unshare_expr (result),
5404 TREE_OPERAND (cond, 2));
5405
5406 TREE_TYPE (cond) = void_type_node;
5407 recalculate_side_effects (cond);
5408
5409 if (want_value)
5410 {
5411 gimplify_and_add (cond, pre_p);
5412 *expr_p = unshare_expr (result);
5413 }
5414 else
5415 *expr_p = cond;
5416 return ret;
5417 }
5418 break;
5419
5420 case CALL_EXPR:
5421 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5422 return slot so that we don't generate a temporary. */
5423 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5424 && aggregate_value_p (*from_p, *from_p))
5425 {
5426 bool use_target;
5427
5428 if (!(rhs_predicate_for (*to_p))(*from_p))
5429 /* If we need a temporary, *to_p isn't accurate. */
5430 use_target = false;
5431 /* It's OK to use the return slot directly unless it's an NRV. */
5432 else if (TREE_CODE (*to_p) == RESULT_DECL
5433 && DECL_NAME (*to_p) == NULL_TREE
5434 && needs_to_live_in_memory (*to_p))
5435 use_target = true;
5436 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5437 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5438 /* Don't force regs into memory. */
5439 use_target = false;
5440 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5441 /* It's OK to use the target directly if it's being
5442 initialized. */
5443 use_target = true;
5444 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5445 != INTEGER_CST)
5446 /* Always use the target and thus RSO for variable-sized types.
5447 GIMPLE cannot deal with a variable-sized assignment
5448 embedded in a call statement. */
5449 use_target = true;
5450 else if (TREE_CODE (*to_p) != SSA_NAME
5451 && (!is_gimple_variable (*to_p)
5452 || needs_to_live_in_memory (*to_p)))
5453 /* Don't use the original target if it's already addressable;
5454 if its address escapes, and the called function uses the
5455 NRV optimization, a conforming program could see *to_p
5456 change before the called function returns; see c++/19317.
5457 When optimizing, the return_slot pass marks more functions
5458 as safe after we have escape info. */
5459 use_target = false;
5460 else
5461 use_target = true;
5462
5463 if (use_target)
5464 {
5465 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5466 mark_addressable (*to_p);
5467 }
5468 }
5469 break;
5470
5471 case WITH_SIZE_EXPR:
5472 /* Likewise for calls that return an aggregate of non-constant size,
5473 since we would not be able to generate a temporary at all. */
5474 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5475 {
5476 *from_p = TREE_OPERAND (*from_p, 0);
5477 /* We don't change ret in this case because the
5478 WITH_SIZE_EXPR might have been added in
5479 gimplify_modify_expr, so returning GS_OK would lead to an
5480 infinite loop. */
5481 changed = true;
5482 }
5483 break;
5484
5485 /* If we're initializing from a container, push the initialization
5486 inside it. */
5487 case CLEANUP_POINT_EXPR:
5488 case BIND_EXPR:
5489 case STATEMENT_LIST:
5490 {
5491 tree wrap = *from_p;
5492 tree t;
5493
5494 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5495 fb_lvalue);
5496 if (ret != GS_ERROR)
5497 ret = GS_OK;
5498
5499 t = voidify_wrapper_expr (wrap, *expr_p);
5500 gcc_assert (t == *expr_p);
5501
5502 if (want_value)
5503 {
5504 gimplify_and_add (wrap, pre_p);
5505 *expr_p = unshare_expr (*to_p);
5506 }
5507 else
5508 *expr_p = wrap;
5509 return GS_OK;
5510 }
5511
5512 case COMPOUND_LITERAL_EXPR:
5513 {
5514 tree complit = TREE_OPERAND (*expr_p, 1);
5515 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5516 tree decl = DECL_EXPR_DECL (decl_s);
5517 tree init = DECL_INITIAL (decl);
5518
5519 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5520 into struct T x = { 0, 1, 2 } if the address of the
5521 compound literal has never been taken. */
5522 if (!TREE_ADDRESSABLE (complit)
5523 && !TREE_ADDRESSABLE (decl)
5524 && init)
5525 {
5526 *expr_p = copy_node (*expr_p);
5527 TREE_OPERAND (*expr_p, 1) = init;
5528 return GS_OK;
5529 }
5530 }
5531
5532 default:
5533 break;
5534 }
5535 }
5536 while (changed);
5537
5538 return ret;
5539 }
5540
5541
5542 /* Return true if T looks like a valid GIMPLE statement. */
5543
5544 static bool
5545 is_gimple_stmt (tree t)
5546 {
5547 const enum tree_code code = TREE_CODE (t);
5548
5549 switch (code)
5550 {
5551 case NOP_EXPR:
5552 /* The only valid NOP_EXPR is the empty statement. */
5553 return IS_EMPTY_STMT (t);
5554
5555 case BIND_EXPR:
5556 case COND_EXPR:
5557 /* These are only valid if they're void. */
5558 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5559
5560 case SWITCH_EXPR:
5561 case GOTO_EXPR:
5562 case RETURN_EXPR:
5563 case LABEL_EXPR:
5564 case CASE_LABEL_EXPR:
5565 case TRY_CATCH_EXPR:
5566 case TRY_FINALLY_EXPR:
5567 case EH_FILTER_EXPR:
5568 case CATCH_EXPR:
5569 case ASM_EXPR:
5570 case STATEMENT_LIST:
5571 case OACC_PARALLEL:
5572 case OACC_KERNELS:
5573 case OACC_SERIAL:
5574 case OACC_DATA:
5575 case OACC_HOST_DATA:
5576 case OACC_DECLARE:
5577 case OACC_UPDATE:
5578 case OACC_ENTER_DATA:
5579 case OACC_EXIT_DATA:
5580 case OACC_CACHE:
5581 case OMP_PARALLEL:
5582 case OMP_FOR:
5583 case OMP_SIMD:
5584 case OMP_DISTRIBUTE:
5585 case OMP_LOOP:
5586 case OACC_LOOP:
5587 case OMP_SCAN:
5588 case OMP_SECTIONS:
5589 case OMP_SECTION:
5590 case OMP_SINGLE:
5591 case OMP_MASTER:
5592 case OMP_TASKGROUP:
5593 case OMP_ORDERED:
5594 case OMP_CRITICAL:
5595 case OMP_TASK:
5596 case OMP_TARGET:
5597 case OMP_TARGET_DATA:
5598 case OMP_TARGET_UPDATE:
5599 case OMP_TARGET_ENTER_DATA:
5600 case OMP_TARGET_EXIT_DATA:
5601 case OMP_TASKLOOP:
5602 case OMP_TEAMS:
5603 /* These are always void. */
5604 return true;
5605
5606 case CALL_EXPR:
5607 case MODIFY_EXPR:
5608 case PREDICT_EXPR:
5609 /* These are valid regardless of their type. */
5610 return true;
5611
5612 default:
5613 return false;
5614 }
5615 }
5616
5617
5618 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5619 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5620 DECL_GIMPLE_REG_P set.
5621
5622 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5623 other, unmodified part of the complex object just before the total store.
5624 As a consequence, if the object is still uninitialized, an undefined value
5625 will be loaded into a register, which may result in a spurious exception
5626 if the register is floating-point and the value happens to be a signaling
5627 NaN for example. Then the fully-fledged complex operations lowering pass
5628 followed by a DCE pass are necessary in order to fix things up. */
5629
5630 static enum gimplify_status
5631 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5632 bool want_value)
5633 {
5634 enum tree_code code, ocode;
5635 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5636
5637 lhs = TREE_OPERAND (*expr_p, 0);
5638 rhs = TREE_OPERAND (*expr_p, 1);
5639 code = TREE_CODE (lhs);
5640 lhs = TREE_OPERAND (lhs, 0);
5641
5642 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5643 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5644 TREE_NO_WARNING (other) = 1;
5645 other = get_formal_tmp_var (other, pre_p);
5646
5647 realpart = code == REALPART_EXPR ? rhs : other;
5648 imagpart = code == REALPART_EXPR ? other : rhs;
5649
5650 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5651 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5652 else
5653 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5654
5655 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5656 *expr_p = (want_value) ? rhs : NULL_TREE;
5657
5658 return GS_ALL_DONE;
5659 }
5660
5661 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5662
5663 modify_expr
5664 : varname '=' rhs
5665 | '*' ID '=' rhs
5666
5667 PRE_P points to the list where side effects that must happen before
5668 *EXPR_P should be stored.
5669
5670 POST_P points to the list where side effects that must happen after
5671 *EXPR_P should be stored.
5672
5673 WANT_VALUE is nonzero iff we want to use the value of this expression
5674 in another expression. */
5675
5676 static enum gimplify_status
5677 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5678 bool want_value)
5679 {
5680 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5681 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5682 enum gimplify_status ret = GS_UNHANDLED;
5683 gimple *assign;
5684 location_t loc = EXPR_LOCATION (*expr_p);
5685 gimple_stmt_iterator gsi;
5686
5687 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5688 || TREE_CODE (*expr_p) == INIT_EXPR);
5689
5690 /* Trying to simplify a clobber using normal logic doesn't work,
5691 so handle it here. */
5692 if (TREE_CLOBBER_P (*from_p))
5693 {
5694 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5695 if (ret == GS_ERROR)
5696 return ret;
5697 gcc_assert (!want_value);
5698 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5699 {
5700 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5701 pre_p, post_p);
5702 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5703 }
5704 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5705 *expr_p = NULL;
5706 return GS_ALL_DONE;
5707 }
5708
5709 /* Insert pointer conversions required by the middle-end that are not
5710 required by the frontend. This fixes middle-end type checking for
5711 for example gcc.dg/redecl-6.c. */
5712 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5713 {
5714 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5715 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5716 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5717 }
5718
5719 /* See if any simplifications can be done based on what the RHS is. */
5720 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5721 want_value);
5722 if (ret != GS_UNHANDLED)
5723 return ret;
5724
5725 /* For zero sized types only gimplify the left hand side and right hand
5726 side as statements and throw away the assignment. Do this after
5727 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5728 types properly. */
5729 if (zero_sized_type (TREE_TYPE (*from_p))
5730 && !want_value
5731 /* Don't do this for calls that return addressable types, expand_call
5732 relies on those having a lhs. */
5733 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5734 && TREE_CODE (*from_p) == CALL_EXPR))
5735 {
5736 gimplify_stmt (from_p, pre_p);
5737 gimplify_stmt (to_p, pre_p);
5738 *expr_p = NULL_TREE;
5739 return GS_ALL_DONE;
5740 }
5741
5742 /* If the value being copied is of variable width, compute the length
5743 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5744 before gimplifying any of the operands so that we can resolve any
5745 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5746 the size of the expression to be copied, not of the destination, so
5747 that is what we must do here. */
5748 maybe_with_size_expr (from_p);
5749
5750 /* As a special case, we have to temporarily allow for assignments
5751 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5752 a toplevel statement, when gimplifying the GENERIC expression
5753 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5754 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5755
5756 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5757 prevent gimplify_expr from trying to create a new temporary for
5758 foo's LHS, we tell it that it should only gimplify until it
5759 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5760 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5761 and all we need to do here is set 'a' to be its LHS. */
5762
5763 /* Gimplify the RHS first for C++17 and bug 71104. */
5764 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5765 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5766 if (ret == GS_ERROR)
5767 return ret;
5768
5769 /* Then gimplify the LHS. */
5770 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5771 twice we have to make sure to gimplify into non-SSA as otherwise
5772 the abnormal edge added later will make those defs not dominate
5773 their uses.
5774 ??? Technically this applies only to the registers used in the
5775 resulting non-register *TO_P. */
5776 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5777 if (saved_into_ssa
5778 && TREE_CODE (*from_p) == CALL_EXPR
5779 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5780 gimplify_ctxp->into_ssa = false;
5781 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5782 gimplify_ctxp->into_ssa = saved_into_ssa;
5783 if (ret == GS_ERROR)
5784 return ret;
5785
5786 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5787 guess for the predicate was wrong. */
5788 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5789 if (final_pred != initial_pred)
5790 {
5791 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5792 if (ret == GS_ERROR)
5793 return ret;
5794 }
5795
5796 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5797 size as argument to the call. */
5798 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5799 {
5800 tree call = TREE_OPERAND (*from_p, 0);
5801 tree vlasize = TREE_OPERAND (*from_p, 1);
5802
5803 if (TREE_CODE (call) == CALL_EXPR
5804 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5805 {
5806 int nargs = call_expr_nargs (call);
5807 tree type = TREE_TYPE (call);
5808 tree ap = CALL_EXPR_ARG (call, 0);
5809 tree tag = CALL_EXPR_ARG (call, 1);
5810 tree aptag = CALL_EXPR_ARG (call, 2);
5811 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5812 IFN_VA_ARG, type,
5813 nargs + 1, ap, tag,
5814 aptag, vlasize);
5815 TREE_OPERAND (*from_p, 0) = newcall;
5816 }
5817 }
5818
5819 /* Now see if the above changed *from_p to something we handle specially. */
5820 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5821 want_value);
5822 if (ret != GS_UNHANDLED)
5823 return ret;
5824
5825 /* If we've got a variable sized assignment between two lvalues (i.e. does
5826 not involve a call), then we can make things a bit more straightforward
5827 by converting the assignment to memcpy or memset. */
5828 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5829 {
5830 tree from = TREE_OPERAND (*from_p, 0);
5831 tree size = TREE_OPERAND (*from_p, 1);
5832
5833 if (TREE_CODE (from) == CONSTRUCTOR)
5834 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5835
5836 if (is_gimple_addressable (from))
5837 {
5838 *from_p = from;
5839 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5840 pre_p);
5841 }
5842 }
5843
5844 /* Transform partial stores to non-addressable complex variables into
5845 total stores. This allows us to use real instead of virtual operands
5846 for these variables, which improves optimization. */
5847 if ((TREE_CODE (*to_p) == REALPART_EXPR
5848 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5849 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5850 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5851
5852 /* Try to alleviate the effects of the gimplification creating artificial
5853 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5854 make sure not to create DECL_DEBUG_EXPR links across functions. */
5855 if (!gimplify_ctxp->into_ssa
5856 && VAR_P (*from_p)
5857 && DECL_IGNORED_P (*from_p)
5858 && DECL_P (*to_p)
5859 && !DECL_IGNORED_P (*to_p)
5860 && decl_function_context (*to_p) == current_function_decl
5861 && decl_function_context (*from_p) == current_function_decl)
5862 {
5863 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5864 DECL_NAME (*from_p)
5865 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5866 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5867 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5868 }
5869
5870 if (want_value && TREE_THIS_VOLATILE (*to_p))
5871 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5872
5873 if (TREE_CODE (*from_p) == CALL_EXPR)
5874 {
5875 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5876 instead of a GIMPLE_ASSIGN. */
5877 gcall *call_stmt;
5878 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5879 {
5880 /* Gimplify internal functions created in the FEs. */
5881 int nargs = call_expr_nargs (*from_p), i;
5882 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5883 auto_vec<tree> vargs (nargs);
5884
5885 for (i = 0; i < nargs; i++)
5886 {
5887 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5888 EXPR_LOCATION (*from_p));
5889 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5890 }
5891 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5892 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5893 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5894 }
5895 else
5896 {
5897 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5898 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5899 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5900 tree fndecl = get_callee_fndecl (*from_p);
5901 if (fndecl
5902 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5903 && call_expr_nargs (*from_p) == 3)
5904 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5905 CALL_EXPR_ARG (*from_p, 0),
5906 CALL_EXPR_ARG (*from_p, 1),
5907 CALL_EXPR_ARG (*from_p, 2));
5908 else
5909 {
5910 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5911 }
5912 }
5913 notice_special_calls (call_stmt);
5914 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5915 gimple_call_set_lhs (call_stmt, *to_p);
5916 else if (TREE_CODE (*to_p) == SSA_NAME)
5917 /* The above is somewhat premature, avoid ICEing later for a
5918 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5919 ??? This doesn't make it a default-def. */
5920 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5921
5922 assign = call_stmt;
5923 }
5924 else
5925 {
5926 assign = gimple_build_assign (*to_p, *from_p);
5927 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5928 if (COMPARISON_CLASS_P (*from_p))
5929 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5930 }
5931
5932 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5933 {
5934 /* We should have got an SSA name from the start. */
5935 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5936 || ! gimple_in_ssa_p (cfun));
5937 }
5938
5939 gimplify_seq_add_stmt (pre_p, assign);
5940 gsi = gsi_last (*pre_p);
5941 maybe_fold_stmt (&gsi);
5942
5943 if (want_value)
5944 {
5945 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5946 return GS_OK;
5947 }
5948 else
5949 *expr_p = NULL;
5950
5951 return GS_ALL_DONE;
5952 }
5953
5954 /* Gimplify a comparison between two variable-sized objects. Do this
5955 with a call to BUILT_IN_MEMCMP. */
5956
5957 static enum gimplify_status
5958 gimplify_variable_sized_compare (tree *expr_p)
5959 {
5960 location_t loc = EXPR_LOCATION (*expr_p);
5961 tree op0 = TREE_OPERAND (*expr_p, 0);
5962 tree op1 = TREE_OPERAND (*expr_p, 1);
5963 tree t, arg, dest, src, expr;
5964
5965 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5966 arg = unshare_expr (arg);
5967 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5968 src = build_fold_addr_expr_loc (loc, op1);
5969 dest = build_fold_addr_expr_loc (loc, op0);
5970 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5971 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5972
5973 expr
5974 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5975 SET_EXPR_LOCATION (expr, loc);
5976 *expr_p = expr;
5977
5978 return GS_OK;
5979 }
5980
5981 /* Gimplify a comparison between two aggregate objects of integral scalar
5982 mode as a comparison between the bitwise equivalent scalar values. */
5983
5984 static enum gimplify_status
5985 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5986 {
5987 location_t loc = EXPR_LOCATION (*expr_p);
5988 tree op0 = TREE_OPERAND (*expr_p, 0);
5989 tree op1 = TREE_OPERAND (*expr_p, 1);
5990
5991 tree type = TREE_TYPE (op0);
5992 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5993
5994 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5995 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5996
5997 *expr_p
5998 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5999
6000 return GS_OK;
6001 }
6002
6003 /* Gimplify an expression sequence. This function gimplifies each
6004 expression and rewrites the original expression with the last
6005 expression of the sequence in GIMPLE form.
6006
6007 PRE_P points to the list where the side effects for all the
6008 expressions in the sequence will be emitted.
6009
6010 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6011
6012 static enum gimplify_status
6013 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6014 {
6015 tree t = *expr_p;
6016
6017 do
6018 {
6019 tree *sub_p = &TREE_OPERAND (t, 0);
6020
6021 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6022 gimplify_compound_expr (sub_p, pre_p, false);
6023 else
6024 gimplify_stmt (sub_p, pre_p);
6025
6026 t = TREE_OPERAND (t, 1);
6027 }
6028 while (TREE_CODE (t) == COMPOUND_EXPR);
6029
6030 *expr_p = t;
6031 if (want_value)
6032 return GS_OK;
6033 else
6034 {
6035 gimplify_stmt (expr_p, pre_p);
6036 return GS_ALL_DONE;
6037 }
6038 }
6039
6040 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6041 gimplify. After gimplification, EXPR_P will point to a new temporary
6042 that holds the original value of the SAVE_EXPR node.
6043
6044 PRE_P points to the list where side effects that must happen before
6045 *EXPR_P should be stored. */
6046
6047 static enum gimplify_status
6048 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6049 {
6050 enum gimplify_status ret = GS_ALL_DONE;
6051 tree val;
6052
6053 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6054 val = TREE_OPERAND (*expr_p, 0);
6055
6056 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6057 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6058 {
6059 /* The operand may be a void-valued expression. It is
6060 being executed only for its side-effects. */
6061 if (TREE_TYPE (val) == void_type_node)
6062 {
6063 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6064 is_gimple_stmt, fb_none);
6065 val = NULL;
6066 }
6067 else
6068 /* The temporary may not be an SSA name as later abnormal and EH
6069 control flow may invalidate use/def domination. When in SSA
6070 form then assume there are no such issues and SAVE_EXPRs only
6071 appear via GENERIC foldings. */
6072 val = get_initialized_tmp_var (val, pre_p, post_p,
6073 gimple_in_ssa_p (cfun));
6074
6075 TREE_OPERAND (*expr_p, 0) = val;
6076 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6077 }
6078
6079 *expr_p = val;
6080
6081 return ret;
6082 }
6083
6084 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6085
6086 unary_expr
6087 : ...
6088 | '&' varname
6089 ...
6090
6091 PRE_P points to the list where side effects that must happen before
6092 *EXPR_P should be stored.
6093
6094 POST_P points to the list where side effects that must happen after
6095 *EXPR_P should be stored. */
6096
6097 static enum gimplify_status
6098 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6099 {
6100 tree expr = *expr_p;
6101 tree op0 = TREE_OPERAND (expr, 0);
6102 enum gimplify_status ret;
6103 location_t loc = EXPR_LOCATION (*expr_p);
6104
6105 switch (TREE_CODE (op0))
6106 {
6107 case INDIRECT_REF:
6108 do_indirect_ref:
6109 /* Check if we are dealing with an expression of the form '&*ptr'.
6110 While the front end folds away '&*ptr' into 'ptr', these
6111 expressions may be generated internally by the compiler (e.g.,
6112 builtins like __builtin_va_end). */
6113 /* Caution: the silent array decomposition semantics we allow for
6114 ADDR_EXPR means we can't always discard the pair. */
6115 /* Gimplification of the ADDR_EXPR operand may drop
6116 cv-qualification conversions, so make sure we add them if
6117 needed. */
6118 {
6119 tree op00 = TREE_OPERAND (op0, 0);
6120 tree t_expr = TREE_TYPE (expr);
6121 tree t_op00 = TREE_TYPE (op00);
6122
6123 if (!useless_type_conversion_p (t_expr, t_op00))
6124 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6125 *expr_p = op00;
6126 ret = GS_OK;
6127 }
6128 break;
6129
6130 case VIEW_CONVERT_EXPR:
6131 /* Take the address of our operand and then convert it to the type of
6132 this ADDR_EXPR.
6133
6134 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6135 all clear. The impact of this transformation is even less clear. */
6136
6137 /* If the operand is a useless conversion, look through it. Doing so
6138 guarantees that the ADDR_EXPR and its operand will remain of the
6139 same type. */
6140 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6141 op0 = TREE_OPERAND (op0, 0);
6142
6143 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6144 build_fold_addr_expr_loc (loc,
6145 TREE_OPERAND (op0, 0)));
6146 ret = GS_OK;
6147 break;
6148
6149 case MEM_REF:
6150 if (integer_zerop (TREE_OPERAND (op0, 1)))
6151 goto do_indirect_ref;
6152
6153 /* fall through */
6154
6155 default:
6156 /* If we see a call to a declared builtin or see its address
6157 being taken (we can unify those cases here) then we can mark
6158 the builtin for implicit generation by GCC. */
6159 if (TREE_CODE (op0) == FUNCTION_DECL
6160 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6161 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6162 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6163
6164 /* We use fb_either here because the C frontend sometimes takes
6165 the address of a call that returns a struct; see
6166 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6167 the implied temporary explicit. */
6168
6169 /* Make the operand addressable. */
6170 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6171 is_gimple_addressable, fb_either);
6172 if (ret == GS_ERROR)
6173 break;
6174
6175 /* Then mark it. Beware that it may not be possible to do so directly
6176 if a temporary has been created by the gimplification. */
6177 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6178
6179 op0 = TREE_OPERAND (expr, 0);
6180
6181 /* For various reasons, the gimplification of the expression
6182 may have made a new INDIRECT_REF. */
6183 if (TREE_CODE (op0) == INDIRECT_REF)
6184 goto do_indirect_ref;
6185
6186 mark_addressable (TREE_OPERAND (expr, 0));
6187
6188 /* The FEs may end up building ADDR_EXPRs early on a decl with
6189 an incomplete type. Re-build ADDR_EXPRs in canonical form
6190 here. */
6191 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6192 *expr_p = build_fold_addr_expr (op0);
6193
6194 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6195 recompute_tree_invariant_for_addr_expr (*expr_p);
6196
6197 /* If we re-built the ADDR_EXPR add a conversion to the original type
6198 if required. */
6199 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6200 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6201
6202 break;
6203 }
6204
6205 return ret;
6206 }
6207
6208 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6209 value; output operands should be a gimple lvalue. */
6210
6211 static enum gimplify_status
6212 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6213 {
6214 tree expr;
6215 int noutputs;
6216 const char **oconstraints;
6217 int i;
6218 tree link;
6219 const char *constraint;
6220 bool allows_mem, allows_reg, is_inout;
6221 enum gimplify_status ret, tret;
6222 gasm *stmt;
6223 vec<tree, va_gc> *inputs;
6224 vec<tree, va_gc> *outputs;
6225 vec<tree, va_gc> *clobbers;
6226 vec<tree, va_gc> *labels;
6227 tree link_next;
6228
6229 expr = *expr_p;
6230 noutputs = list_length (ASM_OUTPUTS (expr));
6231 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6232
6233 inputs = NULL;
6234 outputs = NULL;
6235 clobbers = NULL;
6236 labels = NULL;
6237
6238 ret = GS_ALL_DONE;
6239 link_next = NULL_TREE;
6240 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6241 {
6242 bool ok;
6243 size_t constraint_len;
6244
6245 link_next = TREE_CHAIN (link);
6246
6247 oconstraints[i]
6248 = constraint
6249 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6250 constraint_len = strlen (constraint);
6251 if (constraint_len == 0)
6252 continue;
6253
6254 ok = parse_output_constraint (&constraint, i, 0, 0,
6255 &allows_mem, &allows_reg, &is_inout);
6256 if (!ok)
6257 {
6258 ret = GS_ERROR;
6259 is_inout = false;
6260 }
6261
6262 /* If we can't make copies, we can only accept memory.
6263 Similarly for VLAs. */
6264 tree outtype = TREE_TYPE (TREE_VALUE (link));
6265 if (outtype != error_mark_node
6266 && (TREE_ADDRESSABLE (outtype)
6267 || !COMPLETE_TYPE_P (outtype)
6268 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6269 {
6270 if (allows_mem)
6271 allows_reg = 0;
6272 else
6273 {
6274 error ("impossible constraint in %<asm%>");
6275 error ("non-memory output %d must stay in memory", i);
6276 return GS_ERROR;
6277 }
6278 }
6279
6280 if (!allows_reg && allows_mem)
6281 mark_addressable (TREE_VALUE (link));
6282
6283 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6284 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6285 fb_lvalue | fb_mayfail);
6286 if (tret == GS_ERROR)
6287 {
6288 error ("invalid lvalue in %<asm%> output %d", i);
6289 ret = tret;
6290 }
6291
6292 /* If the constraint does not allow memory make sure we gimplify
6293 it to a register if it is not already but its base is. This
6294 happens for complex and vector components. */
6295 if (!allows_mem)
6296 {
6297 tree op = TREE_VALUE (link);
6298 if (! is_gimple_val (op)
6299 && is_gimple_reg_type (TREE_TYPE (op))
6300 && is_gimple_reg (get_base_address (op)))
6301 {
6302 tree tem = create_tmp_reg (TREE_TYPE (op));
6303 tree ass;
6304 if (is_inout)
6305 {
6306 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6307 tem, unshare_expr (op));
6308 gimplify_and_add (ass, pre_p);
6309 }
6310 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6311 gimplify_and_add (ass, post_p);
6312
6313 TREE_VALUE (link) = tem;
6314 tret = GS_OK;
6315 }
6316 }
6317
6318 vec_safe_push (outputs, link);
6319 TREE_CHAIN (link) = NULL_TREE;
6320
6321 if (is_inout)
6322 {
6323 /* An input/output operand. To give the optimizers more
6324 flexibility, split it into separate input and output
6325 operands. */
6326 tree input;
6327 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6328 char buf[11];
6329
6330 /* Turn the in/out constraint into an output constraint. */
6331 char *p = xstrdup (constraint);
6332 p[0] = '=';
6333 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6334
6335 /* And add a matching input constraint. */
6336 if (allows_reg)
6337 {
6338 sprintf (buf, "%u", i);
6339
6340 /* If there are multiple alternatives in the constraint,
6341 handle each of them individually. Those that allow register
6342 will be replaced with operand number, the others will stay
6343 unchanged. */
6344 if (strchr (p, ',') != NULL)
6345 {
6346 size_t len = 0, buflen = strlen (buf);
6347 char *beg, *end, *str, *dst;
6348
6349 for (beg = p + 1;;)
6350 {
6351 end = strchr (beg, ',');
6352 if (end == NULL)
6353 end = strchr (beg, '\0');
6354 if ((size_t) (end - beg) < buflen)
6355 len += buflen + 1;
6356 else
6357 len += end - beg + 1;
6358 if (*end)
6359 beg = end + 1;
6360 else
6361 break;
6362 }
6363
6364 str = (char *) alloca (len);
6365 for (beg = p + 1, dst = str;;)
6366 {
6367 const char *tem;
6368 bool mem_p, reg_p, inout_p;
6369
6370 end = strchr (beg, ',');
6371 if (end)
6372 *end = '\0';
6373 beg[-1] = '=';
6374 tem = beg - 1;
6375 parse_output_constraint (&tem, i, 0, 0,
6376 &mem_p, &reg_p, &inout_p);
6377 if (dst != str)
6378 *dst++ = ',';
6379 if (reg_p)
6380 {
6381 memcpy (dst, buf, buflen);
6382 dst += buflen;
6383 }
6384 else
6385 {
6386 if (end)
6387 len = end - beg;
6388 else
6389 len = strlen (beg);
6390 memcpy (dst, beg, len);
6391 dst += len;
6392 }
6393 if (end)
6394 beg = end + 1;
6395 else
6396 break;
6397 }
6398 *dst = '\0';
6399 input = build_string (dst - str, str);
6400 }
6401 else
6402 input = build_string (strlen (buf), buf);
6403 }
6404 else
6405 input = build_string (constraint_len - 1, constraint + 1);
6406
6407 free (p);
6408
6409 input = build_tree_list (build_tree_list (NULL_TREE, input),
6410 unshare_expr (TREE_VALUE (link)));
6411 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6412 }
6413 }
6414
6415 link_next = NULL_TREE;
6416 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6417 {
6418 link_next = TREE_CHAIN (link);
6419 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6420 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6421 oconstraints, &allows_mem, &allows_reg);
6422
6423 /* If we can't make copies, we can only accept memory. */
6424 tree intype = TREE_TYPE (TREE_VALUE (link));
6425 if (intype != error_mark_node
6426 && (TREE_ADDRESSABLE (intype)
6427 || !COMPLETE_TYPE_P (intype)
6428 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6429 {
6430 if (allows_mem)
6431 allows_reg = 0;
6432 else
6433 {
6434 error ("impossible constraint in %<asm%>");
6435 error ("non-memory input %d must stay in memory", i);
6436 return GS_ERROR;
6437 }
6438 }
6439
6440 /* If the operand is a memory input, it should be an lvalue. */
6441 if (!allows_reg && allows_mem)
6442 {
6443 tree inputv = TREE_VALUE (link);
6444 STRIP_NOPS (inputv);
6445 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6446 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6447 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6448 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6449 || TREE_CODE (inputv) == MODIFY_EXPR)
6450 TREE_VALUE (link) = error_mark_node;
6451 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6452 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6453 if (tret != GS_ERROR)
6454 {
6455 /* Unlike output operands, memory inputs are not guaranteed
6456 to be lvalues by the FE, and while the expressions are
6457 marked addressable there, if it is e.g. a statement
6458 expression, temporaries in it might not end up being
6459 addressable. They might be already used in the IL and thus
6460 it is too late to make them addressable now though. */
6461 tree x = TREE_VALUE (link);
6462 while (handled_component_p (x))
6463 x = TREE_OPERAND (x, 0);
6464 if (TREE_CODE (x) == MEM_REF
6465 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6466 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6467 if ((VAR_P (x)
6468 || TREE_CODE (x) == PARM_DECL
6469 || TREE_CODE (x) == RESULT_DECL)
6470 && !TREE_ADDRESSABLE (x)
6471 && is_gimple_reg (x))
6472 {
6473 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6474 input_location), 0,
6475 "memory input %d is not directly addressable",
6476 i);
6477 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6478 }
6479 }
6480 mark_addressable (TREE_VALUE (link));
6481 if (tret == GS_ERROR)
6482 {
6483 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6484 "memory input %d is not directly addressable", i);
6485 ret = tret;
6486 }
6487 }
6488 else
6489 {
6490 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6491 is_gimple_asm_val, fb_rvalue);
6492 if (tret == GS_ERROR)
6493 ret = tret;
6494 }
6495
6496 TREE_CHAIN (link) = NULL_TREE;
6497 vec_safe_push (inputs, link);
6498 }
6499
6500 link_next = NULL_TREE;
6501 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6502 {
6503 link_next = TREE_CHAIN (link);
6504 TREE_CHAIN (link) = NULL_TREE;
6505 vec_safe_push (clobbers, link);
6506 }
6507
6508 link_next = NULL_TREE;
6509 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6510 {
6511 link_next = TREE_CHAIN (link);
6512 TREE_CHAIN (link) = NULL_TREE;
6513 vec_safe_push (labels, link);
6514 }
6515
6516 /* Do not add ASMs with errors to the gimple IL stream. */
6517 if (ret != GS_ERROR)
6518 {
6519 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6520 inputs, outputs, clobbers, labels);
6521
6522 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6523 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6524 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6525
6526 gimplify_seq_add_stmt (pre_p, stmt);
6527 }
6528
6529 return ret;
6530 }
6531
6532 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6533 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6534 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6535 return to this function.
6536
6537 FIXME should we complexify the prequeue handling instead? Or use flags
6538 for all the cleanups and let the optimizer tighten them up? The current
6539 code seems pretty fragile; it will break on a cleanup within any
6540 non-conditional nesting. But any such nesting would be broken, anyway;
6541 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6542 and continues out of it. We can do that at the RTL level, though, so
6543 having an optimizer to tighten up try/finally regions would be a Good
6544 Thing. */
6545
6546 static enum gimplify_status
6547 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6548 {
6549 gimple_stmt_iterator iter;
6550 gimple_seq body_sequence = NULL;
6551
6552 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6553
6554 /* We only care about the number of conditions between the innermost
6555 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6556 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6557 int old_conds = gimplify_ctxp->conditions;
6558 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6559 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6560 gimplify_ctxp->conditions = 0;
6561 gimplify_ctxp->conditional_cleanups = NULL;
6562 gimplify_ctxp->in_cleanup_point_expr = true;
6563
6564 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6565
6566 gimplify_ctxp->conditions = old_conds;
6567 gimplify_ctxp->conditional_cleanups = old_cleanups;
6568 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6569
6570 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6571 {
6572 gimple *wce = gsi_stmt (iter);
6573
6574 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6575 {
6576 if (gsi_one_before_end_p (iter))
6577 {
6578 /* Note that gsi_insert_seq_before and gsi_remove do not
6579 scan operands, unlike some other sequence mutators. */
6580 if (!gimple_wce_cleanup_eh_only (wce))
6581 gsi_insert_seq_before_without_update (&iter,
6582 gimple_wce_cleanup (wce),
6583 GSI_SAME_STMT);
6584 gsi_remove (&iter, true);
6585 break;
6586 }
6587 else
6588 {
6589 gtry *gtry;
6590 gimple_seq seq;
6591 enum gimple_try_flags kind;
6592
6593 if (gimple_wce_cleanup_eh_only (wce))
6594 kind = GIMPLE_TRY_CATCH;
6595 else
6596 kind = GIMPLE_TRY_FINALLY;
6597 seq = gsi_split_seq_after (iter);
6598
6599 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6600 /* Do not use gsi_replace here, as it may scan operands.
6601 We want to do a simple structural modification only. */
6602 gsi_set_stmt (&iter, gtry);
6603 iter = gsi_start (gtry->eval);
6604 }
6605 }
6606 else
6607 gsi_next (&iter);
6608 }
6609
6610 gimplify_seq_add_seq (pre_p, body_sequence);
6611 if (temp)
6612 {
6613 *expr_p = temp;
6614 return GS_OK;
6615 }
6616 else
6617 {
6618 *expr_p = NULL;
6619 return GS_ALL_DONE;
6620 }
6621 }
6622
6623 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6624 is the cleanup action required. EH_ONLY is true if the cleanup should
6625 only be executed if an exception is thrown, not on normal exit.
6626 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6627 only valid for clobbers. */
6628
6629 static void
6630 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6631 bool force_uncond = false)
6632 {
6633 gimple *wce;
6634 gimple_seq cleanup_stmts = NULL;
6635
6636 /* Errors can result in improperly nested cleanups. Which results in
6637 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6638 if (seen_error ())
6639 return;
6640
6641 if (gimple_conditional_context ())
6642 {
6643 /* If we're in a conditional context, this is more complex. We only
6644 want to run the cleanup if we actually ran the initialization that
6645 necessitates it, but we want to run it after the end of the
6646 conditional context. So we wrap the try/finally around the
6647 condition and use a flag to determine whether or not to actually
6648 run the destructor. Thus
6649
6650 test ? f(A()) : 0
6651
6652 becomes (approximately)
6653
6654 flag = 0;
6655 try {
6656 if (test) { A::A(temp); flag = 1; val = f(temp); }
6657 else { val = 0; }
6658 } finally {
6659 if (flag) A::~A(temp);
6660 }
6661 val
6662 */
6663 if (force_uncond)
6664 {
6665 gimplify_stmt (&cleanup, &cleanup_stmts);
6666 wce = gimple_build_wce (cleanup_stmts);
6667 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6668 }
6669 else
6670 {
6671 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6672 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6673 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6674
6675 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6676 gimplify_stmt (&cleanup, &cleanup_stmts);
6677 wce = gimple_build_wce (cleanup_stmts);
6678
6679 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6680 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6681 gimplify_seq_add_stmt (pre_p, ftrue);
6682
6683 /* Because of this manipulation, and the EH edges that jump
6684 threading cannot redirect, the temporary (VAR) will appear
6685 to be used uninitialized. Don't warn. */
6686 TREE_NO_WARNING (var) = 1;
6687 }
6688 }
6689 else
6690 {
6691 gimplify_stmt (&cleanup, &cleanup_stmts);
6692 wce = gimple_build_wce (cleanup_stmts);
6693 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6694 gimplify_seq_add_stmt (pre_p, wce);
6695 }
6696 }
6697
6698 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6699
6700 static enum gimplify_status
6701 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6702 {
6703 tree targ = *expr_p;
6704 tree temp = TARGET_EXPR_SLOT (targ);
6705 tree init = TARGET_EXPR_INITIAL (targ);
6706 enum gimplify_status ret;
6707
6708 bool unpoison_empty_seq = false;
6709 gimple_stmt_iterator unpoison_it;
6710
6711 if (init)
6712 {
6713 tree cleanup = NULL_TREE;
6714
6715 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6716 to the temps list. Handle also variable length TARGET_EXPRs. */
6717 if (!poly_int_tree_p (DECL_SIZE (temp)))
6718 {
6719 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6720 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6721 gimplify_vla_decl (temp, pre_p);
6722 }
6723 else
6724 {
6725 /* Save location where we need to place unpoisoning. It's possible
6726 that a variable will be converted to needs_to_live_in_memory. */
6727 unpoison_it = gsi_last (*pre_p);
6728 unpoison_empty_seq = gsi_end_p (unpoison_it);
6729
6730 gimple_add_tmp_var (temp);
6731 }
6732
6733 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6734 expression is supposed to initialize the slot. */
6735 if (VOID_TYPE_P (TREE_TYPE (init)))
6736 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6737 else
6738 {
6739 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6740 init = init_expr;
6741 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6742 init = NULL;
6743 ggc_free (init_expr);
6744 }
6745 if (ret == GS_ERROR)
6746 {
6747 /* PR c++/28266 Make sure this is expanded only once. */
6748 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6749 return GS_ERROR;
6750 }
6751 if (init)
6752 gimplify_and_add (init, pre_p);
6753
6754 /* If needed, push the cleanup for the temp. */
6755 if (TARGET_EXPR_CLEANUP (targ))
6756 {
6757 if (CLEANUP_EH_ONLY (targ))
6758 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6759 CLEANUP_EH_ONLY (targ), pre_p);
6760 else
6761 cleanup = TARGET_EXPR_CLEANUP (targ);
6762 }
6763
6764 /* Add a clobber for the temporary going out of scope, like
6765 gimplify_bind_expr. */
6766 if (gimplify_ctxp->in_cleanup_point_expr
6767 && needs_to_live_in_memory (temp))
6768 {
6769 if (flag_stack_reuse == SR_ALL)
6770 {
6771 tree clobber = build_clobber (TREE_TYPE (temp));
6772 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6773 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6774 }
6775 if (asan_poisoned_variables
6776 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6777 && !TREE_STATIC (temp)
6778 && dbg_cnt (asan_use_after_scope)
6779 && !gimplify_omp_ctxp)
6780 {
6781 tree asan_cleanup = build_asan_poison_call_expr (temp);
6782 if (asan_cleanup)
6783 {
6784 if (unpoison_empty_seq)
6785 unpoison_it = gsi_start (*pre_p);
6786
6787 asan_poison_variable (temp, false, &unpoison_it,
6788 unpoison_empty_seq);
6789 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6790 }
6791 }
6792 }
6793 if (cleanup)
6794 gimple_push_cleanup (temp, cleanup, false, pre_p);
6795
6796 /* Only expand this once. */
6797 TREE_OPERAND (targ, 3) = init;
6798 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6799 }
6800 else
6801 /* We should have expanded this before. */
6802 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6803
6804 *expr_p = temp;
6805 return GS_OK;
6806 }
6807
6808 /* Gimplification of expression trees. */
6809
6810 /* Gimplify an expression which appears at statement context. The
6811 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6812 NULL, a new sequence is allocated.
6813
6814 Return true if we actually added a statement to the queue. */
6815
6816 bool
6817 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6818 {
6819 gimple_seq_node last;
6820
6821 last = gimple_seq_last (*seq_p);
6822 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6823 return last != gimple_seq_last (*seq_p);
6824 }
6825
6826 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6827 to CTX. If entries already exist, force them to be some flavor of private.
6828 If there is no enclosing parallel, do nothing. */
6829
6830 void
6831 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6832 {
6833 splay_tree_node n;
6834
6835 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6836 return;
6837
6838 do
6839 {
6840 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6841 if (n != NULL)
6842 {
6843 if (n->value & GOVD_SHARED)
6844 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6845 else if (n->value & GOVD_MAP)
6846 n->value |= GOVD_MAP_TO_ONLY;
6847 else
6848 return;
6849 }
6850 else if ((ctx->region_type & ORT_TARGET) != 0)
6851 {
6852 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6853 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6854 else
6855 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6856 }
6857 else if (ctx->region_type != ORT_WORKSHARE
6858 && ctx->region_type != ORT_TASKGROUP
6859 && ctx->region_type != ORT_SIMD
6860 && ctx->region_type != ORT_ACC
6861 && !(ctx->region_type & ORT_TARGET_DATA))
6862 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6863
6864 ctx = ctx->outer_context;
6865 }
6866 while (ctx);
6867 }
6868
6869 /* Similarly for each of the type sizes of TYPE. */
6870
6871 static void
6872 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6873 {
6874 if (type == NULL || type == error_mark_node)
6875 return;
6876 type = TYPE_MAIN_VARIANT (type);
6877
6878 if (ctx->privatized_types->add (type))
6879 return;
6880
6881 switch (TREE_CODE (type))
6882 {
6883 case INTEGER_TYPE:
6884 case ENUMERAL_TYPE:
6885 case BOOLEAN_TYPE:
6886 case REAL_TYPE:
6887 case FIXED_POINT_TYPE:
6888 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6889 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6890 break;
6891
6892 case ARRAY_TYPE:
6893 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6894 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6895 break;
6896
6897 case RECORD_TYPE:
6898 case UNION_TYPE:
6899 case QUAL_UNION_TYPE:
6900 {
6901 tree field;
6902 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6903 if (TREE_CODE (field) == FIELD_DECL)
6904 {
6905 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6906 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6907 }
6908 }
6909 break;
6910
6911 case POINTER_TYPE:
6912 case REFERENCE_TYPE:
6913 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6914 break;
6915
6916 default:
6917 break;
6918 }
6919
6920 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6921 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6922 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6923 }
6924
6925 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6926
6927 static void
6928 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6929 {
6930 splay_tree_node n;
6931 unsigned int nflags;
6932 tree t;
6933
6934 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6935 return;
6936
6937 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6938 there are constructors involved somewhere. Exception is a shared clause,
6939 there is nothing privatized in that case. */
6940 if ((flags & GOVD_SHARED) == 0
6941 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6942 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6943 flags |= GOVD_SEEN;
6944
6945 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6946 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6947 {
6948 /* We shouldn't be re-adding the decl with the same data
6949 sharing class. */
6950 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6951 nflags = n->value | flags;
6952 /* The only combination of data sharing classes we should see is
6953 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6954 reduction variables to be used in data sharing clauses. */
6955 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6956 || ((nflags & GOVD_DATA_SHARE_CLASS)
6957 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6958 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6959 n->value = nflags;
6960 return;
6961 }
6962
6963 /* When adding a variable-sized variable, we have to handle all sorts
6964 of additional bits of data: the pointer replacement variable, and
6965 the parameters of the type. */
6966 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6967 {
6968 /* Add the pointer replacement variable as PRIVATE if the variable
6969 replacement is private, else FIRSTPRIVATE since we'll need the
6970 address of the original variable either for SHARED, or for the
6971 copy into or out of the context. */
6972 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6973 {
6974 if (flags & GOVD_MAP)
6975 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6976 else if (flags & GOVD_PRIVATE)
6977 nflags = GOVD_PRIVATE;
6978 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6979 && (flags & GOVD_FIRSTPRIVATE))
6980 || (ctx->region_type == ORT_TARGET_DATA
6981 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6982 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6983 else
6984 nflags = GOVD_FIRSTPRIVATE;
6985 nflags |= flags & GOVD_SEEN;
6986 t = DECL_VALUE_EXPR (decl);
6987 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6988 t = TREE_OPERAND (t, 0);
6989 gcc_assert (DECL_P (t));
6990 omp_add_variable (ctx, t, nflags);
6991 }
6992
6993 /* Add all of the variable and type parameters (which should have
6994 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6995 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6996 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6997 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6998
6999 /* The variable-sized variable itself is never SHARED, only some form
7000 of PRIVATE. The sharing would take place via the pointer variable
7001 which we remapped above. */
7002 if (flags & GOVD_SHARED)
7003 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7004 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7005
7006 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7007 alloca statement we generate for the variable, so make sure it
7008 is available. This isn't automatically needed for the SHARED
7009 case, since we won't be allocating local storage then.
7010 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7011 in this case omp_notice_variable will be called later
7012 on when it is gimplified. */
7013 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7014 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7015 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7016 }
7017 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7018 && lang_hooks.decls.omp_privatize_by_reference (decl))
7019 {
7020 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7021
7022 /* Similar to the direct variable sized case above, we'll need the
7023 size of references being privatized. */
7024 if ((flags & GOVD_SHARED) == 0)
7025 {
7026 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7027 if (DECL_P (t))
7028 omp_notice_variable (ctx, t, true);
7029 }
7030 }
7031
7032 if (n != NULL)
7033 n->value |= flags;
7034 else
7035 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7036
7037 /* For reductions clauses in OpenACC loop directives, by default create a
7038 copy clause on the enclosing parallel construct for carrying back the
7039 results. */
7040 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7041 {
7042 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7043 while (outer_ctx)
7044 {
7045 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7046 if (n != NULL)
7047 {
7048 /* Ignore local variables and explicitly declared clauses. */
7049 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7050 break;
7051 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7052 {
7053 /* According to the OpenACC spec, such a reduction variable
7054 should already have a copy map on a kernels construct,
7055 verify that here. */
7056 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7057 && (n->value & GOVD_MAP));
7058 }
7059 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7060 {
7061 /* Remove firstprivate and make it a copy map. */
7062 n->value &= ~GOVD_FIRSTPRIVATE;
7063 n->value |= GOVD_MAP;
7064 }
7065 }
7066 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7067 {
7068 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7069 GOVD_MAP | GOVD_SEEN);
7070 break;
7071 }
7072 outer_ctx = outer_ctx->outer_context;
7073 }
7074 }
7075 }
7076
7077 /* Notice a threadprivate variable DECL used in OMP context CTX.
7078 This just prints out diagnostics about threadprivate variable uses
7079 in untied tasks. If DECL2 is non-NULL, prevent this warning
7080 on that variable. */
7081
7082 static bool
7083 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7084 tree decl2)
7085 {
7086 splay_tree_node n;
7087 struct gimplify_omp_ctx *octx;
7088
7089 for (octx = ctx; octx; octx = octx->outer_context)
7090 if ((octx->region_type & ORT_TARGET) != 0
7091 || octx->order_concurrent)
7092 {
7093 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7094 if (n == NULL)
7095 {
7096 if (octx->order_concurrent)
7097 {
7098 error ("threadprivate variable %qE used in a region with"
7099 " %<order(concurrent)%> clause", DECL_NAME (decl));
7100 error_at (octx->location, "enclosing region");
7101 }
7102 else
7103 {
7104 error ("threadprivate variable %qE used in target region",
7105 DECL_NAME (decl));
7106 error_at (octx->location, "enclosing target region");
7107 }
7108 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7109 }
7110 if (decl2)
7111 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7112 }
7113
7114 if (ctx->region_type != ORT_UNTIED_TASK)
7115 return false;
7116 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7117 if (n == NULL)
7118 {
7119 error ("threadprivate variable %qE used in untied task",
7120 DECL_NAME (decl));
7121 error_at (ctx->location, "enclosing task");
7122 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7123 }
7124 if (decl2)
7125 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7126 return false;
7127 }
7128
7129 /* Return true if global var DECL is device resident. */
7130
7131 static bool
7132 device_resident_p (tree decl)
7133 {
7134 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7135
7136 if (!attr)
7137 return false;
7138
7139 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7140 {
7141 tree c = TREE_VALUE (t);
7142 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7143 return true;
7144 }
7145
7146 return false;
7147 }
7148
7149 /* Return true if DECL has an ACC DECLARE attribute. */
7150
7151 static bool
7152 is_oacc_declared (tree decl)
7153 {
7154 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7155 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7156 return declared != NULL_TREE;
7157 }
7158
7159 /* Determine outer default flags for DECL mentioned in an OMP region
7160 but not declared in an enclosing clause.
7161
7162 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7163 remapped firstprivate instead of shared. To some extent this is
7164 addressed in omp_firstprivatize_type_sizes, but not
7165 effectively. */
7166
7167 static unsigned
7168 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7169 bool in_code, unsigned flags)
7170 {
7171 enum omp_clause_default_kind default_kind = ctx->default_kind;
7172 enum omp_clause_default_kind kind;
7173
7174 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7175 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7176 default_kind = kind;
7177 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7178 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7179
7180 switch (default_kind)
7181 {
7182 case OMP_CLAUSE_DEFAULT_NONE:
7183 {
7184 const char *rtype;
7185
7186 if (ctx->region_type & ORT_PARALLEL)
7187 rtype = "parallel";
7188 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7189 rtype = "taskloop";
7190 else if (ctx->region_type & ORT_TASK)
7191 rtype = "task";
7192 else if (ctx->region_type & ORT_TEAMS)
7193 rtype = "teams";
7194 else
7195 gcc_unreachable ();
7196
7197 error ("%qE not specified in enclosing %qs",
7198 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7199 error_at (ctx->location, "enclosing %qs", rtype);
7200 }
7201 /* FALLTHRU */
7202 case OMP_CLAUSE_DEFAULT_SHARED:
7203 flags |= GOVD_SHARED;
7204 break;
7205 case OMP_CLAUSE_DEFAULT_PRIVATE:
7206 flags |= GOVD_PRIVATE;
7207 break;
7208 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7209 flags |= GOVD_FIRSTPRIVATE;
7210 break;
7211 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7212 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7213 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7214 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7215 {
7216 omp_notice_variable (octx, decl, in_code);
7217 for (; octx; octx = octx->outer_context)
7218 {
7219 splay_tree_node n2;
7220
7221 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7222 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7223 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7224 continue;
7225 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7226 {
7227 flags |= GOVD_FIRSTPRIVATE;
7228 goto found_outer;
7229 }
7230 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7231 {
7232 flags |= GOVD_SHARED;
7233 goto found_outer;
7234 }
7235 }
7236 }
7237
7238 if (TREE_CODE (decl) == PARM_DECL
7239 || (!is_global_var (decl)
7240 && DECL_CONTEXT (decl) == current_function_decl))
7241 flags |= GOVD_FIRSTPRIVATE;
7242 else
7243 flags |= GOVD_SHARED;
7244 found_outer:
7245 break;
7246
7247 default:
7248 gcc_unreachable ();
7249 }
7250
7251 return flags;
7252 }
7253
7254
7255 /* Determine outer default flags for DECL mentioned in an OACC region
7256 but not declared in an enclosing clause. */
7257
7258 static unsigned
7259 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7260 {
7261 const char *rkind;
7262 bool on_device = false;
7263 bool is_private = false;
7264 bool declared = is_oacc_declared (decl);
7265 tree type = TREE_TYPE (decl);
7266
7267 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7268 type = TREE_TYPE (type);
7269
7270 /* For Fortran COMMON blocks, only used variables in those blocks are
7271 transfered and remapped. The block itself will have a private clause to
7272 avoid transfering the data twice.
7273 The hook evaluates to false by default. For a variable in Fortran's COMMON
7274 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7275 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7276 the whole block. For C++ and Fortran, it can also be true under certain
7277 other conditions, if DECL_HAS_VALUE_EXPR. */
7278 if (RECORD_OR_UNION_TYPE_P (type))
7279 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7280
7281 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7282 && is_global_var (decl)
7283 && device_resident_p (decl)
7284 && !is_private)
7285 {
7286 on_device = true;
7287 flags |= GOVD_MAP_TO_ONLY;
7288 }
7289
7290 switch (ctx->region_type)
7291 {
7292 case ORT_ACC_KERNELS:
7293 rkind = "kernels";
7294
7295 if (is_private)
7296 flags |= GOVD_FIRSTPRIVATE;
7297 else if (AGGREGATE_TYPE_P (type))
7298 {
7299 /* Aggregates default to 'present_or_copy', or 'present'. */
7300 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7301 flags |= GOVD_MAP;
7302 else
7303 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7304 }
7305 else
7306 /* Scalars default to 'copy'. */
7307 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7308
7309 break;
7310
7311 case ORT_ACC_PARALLEL:
7312 case ORT_ACC_SERIAL:
7313 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7314
7315 if (is_private)
7316 flags |= GOVD_FIRSTPRIVATE;
7317 else if (on_device || declared)
7318 flags |= GOVD_MAP;
7319 else if (AGGREGATE_TYPE_P (type))
7320 {
7321 /* Aggregates default to 'present_or_copy', or 'present'. */
7322 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7323 flags |= GOVD_MAP;
7324 else
7325 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7326 }
7327 else
7328 /* Scalars default to 'firstprivate'. */
7329 flags |= GOVD_FIRSTPRIVATE;
7330
7331 break;
7332
7333 default:
7334 gcc_unreachable ();
7335 }
7336
7337 if (DECL_ARTIFICIAL (decl))
7338 ; /* We can get compiler-generated decls, and should not complain
7339 about them. */
7340 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7341 {
7342 error ("%qE not specified in enclosing OpenACC %qs construct",
7343 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7344 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7345 }
7346 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7347 ; /* Handled above. */
7348 else
7349 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7350
7351 return flags;
7352 }
7353
7354 /* Record the fact that DECL was used within the OMP context CTX.
7355 IN_CODE is true when real code uses DECL, and false when we should
7356 merely emit default(none) errors. Return true if DECL is going to
7357 be remapped and thus DECL shouldn't be gimplified into its
7358 DECL_VALUE_EXPR (if any). */
7359
7360 static bool
7361 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7362 {
7363 splay_tree_node n;
7364 unsigned flags = in_code ? GOVD_SEEN : 0;
7365 bool ret = false, shared;
7366
7367 if (error_operand_p (decl))
7368 return false;
7369
7370 if (ctx->region_type == ORT_NONE)
7371 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7372
7373 if (is_global_var (decl))
7374 {
7375 /* Threadprivate variables are predetermined. */
7376 if (DECL_THREAD_LOCAL_P (decl))
7377 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7378
7379 if (DECL_HAS_VALUE_EXPR_P (decl))
7380 {
7381 if (ctx->region_type & ORT_ACC)
7382 /* For OpenACC, defer expansion of value to avoid transfering
7383 privatized common block data instead of im-/explicitly transfered
7384 variables which are in common blocks. */
7385 ;
7386 else
7387 {
7388 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7389
7390 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7391 return omp_notice_threadprivate_variable (ctx, decl, value);
7392 }
7393 }
7394
7395 if (gimplify_omp_ctxp->outer_context == NULL
7396 && VAR_P (decl)
7397 && oacc_get_fn_attrib (current_function_decl))
7398 {
7399 location_t loc = DECL_SOURCE_LOCATION (decl);
7400
7401 if (lookup_attribute ("omp declare target link",
7402 DECL_ATTRIBUTES (decl)))
7403 {
7404 error_at (loc,
7405 "%qE with %<link%> clause used in %<routine%> function",
7406 DECL_NAME (decl));
7407 return false;
7408 }
7409 else if (!lookup_attribute ("omp declare target",
7410 DECL_ATTRIBUTES (decl)))
7411 {
7412 error_at (loc,
7413 "%qE requires a %<declare%> directive for use "
7414 "in a %<routine%> function", DECL_NAME (decl));
7415 return false;
7416 }
7417 }
7418 }
7419
7420 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7421 if ((ctx->region_type & ORT_TARGET) != 0)
7422 {
7423 if (ctx->region_type & ORT_ACC)
7424 /* For OpenACC, as remarked above, defer expansion. */
7425 shared = false;
7426 else
7427 shared = true;
7428
7429 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7430 if (n == NULL)
7431 {
7432 unsigned nflags = flags;
7433 if ((ctx->region_type & ORT_ACC) == 0)
7434 {
7435 bool is_declare_target = false;
7436 if (is_global_var (decl)
7437 && varpool_node::get_create (decl)->offloadable)
7438 {
7439 struct gimplify_omp_ctx *octx;
7440 for (octx = ctx->outer_context;
7441 octx; octx = octx->outer_context)
7442 {
7443 n = splay_tree_lookup (octx->variables,
7444 (splay_tree_key)decl);
7445 if (n
7446 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7447 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7448 break;
7449 }
7450 is_declare_target = octx == NULL;
7451 }
7452 if (!is_declare_target)
7453 {
7454 int gdmk;
7455 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7456 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7457 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7458 == POINTER_TYPE)))
7459 gdmk = GDMK_POINTER;
7460 else if (lang_hooks.decls.omp_scalar_p (decl))
7461 gdmk = GDMK_SCALAR;
7462 else
7463 gdmk = GDMK_AGGREGATE;
7464 if (ctx->defaultmap[gdmk] == 0)
7465 {
7466 tree d = lang_hooks.decls.omp_report_decl (decl);
7467 error ("%qE not specified in enclosing %<target%>",
7468 DECL_NAME (d));
7469 error_at (ctx->location, "enclosing %<target%>");
7470 }
7471 else if (ctx->defaultmap[gdmk]
7472 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7473 nflags |= ctx->defaultmap[gdmk];
7474 else
7475 {
7476 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7477 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7478 }
7479 }
7480 }
7481
7482 struct gimplify_omp_ctx *octx = ctx->outer_context;
7483 if ((ctx->region_type & ORT_ACC) && octx)
7484 {
7485 /* Look in outer OpenACC contexts, to see if there's a
7486 data attribute for this variable. */
7487 omp_notice_variable (octx, decl, in_code);
7488
7489 for (; octx; octx = octx->outer_context)
7490 {
7491 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7492 break;
7493 splay_tree_node n2
7494 = splay_tree_lookup (octx->variables,
7495 (splay_tree_key) decl);
7496 if (n2)
7497 {
7498 if (octx->region_type == ORT_ACC_HOST_DATA)
7499 error ("variable %qE declared in enclosing "
7500 "%<host_data%> region", DECL_NAME (decl));
7501 nflags |= GOVD_MAP;
7502 if (octx->region_type == ORT_ACC_DATA
7503 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7504 nflags |= GOVD_MAP_0LEN_ARRAY;
7505 goto found_outer;
7506 }
7507 }
7508 }
7509
7510 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7511 | GOVD_MAP_ALLOC_ONLY)) == flags)
7512 {
7513 tree type = TREE_TYPE (decl);
7514
7515 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7516 && lang_hooks.decls.omp_privatize_by_reference (decl))
7517 type = TREE_TYPE (type);
7518 if (!lang_hooks.types.omp_mappable_type (type))
7519 {
7520 error ("%qD referenced in target region does not have "
7521 "a mappable type", decl);
7522 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7523 }
7524 else
7525 {
7526 if ((ctx->region_type & ORT_ACC) != 0)
7527 nflags = oacc_default_clause (ctx, decl, flags);
7528 else
7529 nflags |= GOVD_MAP;
7530 }
7531 }
7532 found_outer:
7533 omp_add_variable (ctx, decl, nflags);
7534 }
7535 else
7536 {
7537 /* If nothing changed, there's nothing left to do. */
7538 if ((n->value & flags) == flags)
7539 return ret;
7540 flags |= n->value;
7541 n->value = flags;
7542 }
7543 goto do_outer;
7544 }
7545
7546 if (n == NULL)
7547 {
7548 if (ctx->region_type == ORT_WORKSHARE
7549 || ctx->region_type == ORT_TASKGROUP
7550 || ctx->region_type == ORT_SIMD
7551 || ctx->region_type == ORT_ACC
7552 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7553 goto do_outer;
7554
7555 flags = omp_default_clause (ctx, decl, in_code, flags);
7556
7557 if ((flags & GOVD_PRIVATE)
7558 && lang_hooks.decls.omp_private_outer_ref (decl))
7559 flags |= GOVD_PRIVATE_OUTER_REF;
7560
7561 omp_add_variable (ctx, decl, flags);
7562
7563 shared = (flags & GOVD_SHARED) != 0;
7564 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7565 goto do_outer;
7566 }
7567
7568 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7569 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7570 && DECL_SIZE (decl))
7571 {
7572 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7573 {
7574 splay_tree_node n2;
7575 tree t = DECL_VALUE_EXPR (decl);
7576 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7577 t = TREE_OPERAND (t, 0);
7578 gcc_assert (DECL_P (t));
7579 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7580 n2->value |= GOVD_SEEN;
7581 }
7582 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7583 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7584 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7585 != INTEGER_CST))
7586 {
7587 splay_tree_node n2;
7588 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7589 gcc_assert (DECL_P (t));
7590 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7591 if (n2)
7592 omp_notice_variable (ctx, t, true);
7593 }
7594 }
7595
7596 if (ctx->region_type & ORT_ACC)
7597 /* For OpenACC, as remarked above, defer expansion. */
7598 shared = false;
7599 else
7600 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7601 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7602
7603 /* If nothing changed, there's nothing left to do. */
7604 if ((n->value & flags) == flags)
7605 return ret;
7606 flags |= n->value;
7607 n->value = flags;
7608
7609 do_outer:
7610 /* If the variable is private in the current context, then we don't
7611 need to propagate anything to an outer context. */
7612 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7613 return ret;
7614 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7615 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7616 return ret;
7617 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7618 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7619 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7620 return ret;
7621 if (ctx->outer_context
7622 && omp_notice_variable (ctx->outer_context, decl, in_code))
7623 return true;
7624 return ret;
7625 }
7626
7627 /* Verify that DECL is private within CTX. If there's specific information
7628 to the contrary in the innermost scope, generate an error. */
7629
7630 static bool
7631 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7632 {
7633 splay_tree_node n;
7634
7635 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7636 if (n != NULL)
7637 {
7638 if (n->value & GOVD_SHARED)
7639 {
7640 if (ctx == gimplify_omp_ctxp)
7641 {
7642 if (simd)
7643 error ("iteration variable %qE is predetermined linear",
7644 DECL_NAME (decl));
7645 else
7646 error ("iteration variable %qE should be private",
7647 DECL_NAME (decl));
7648 n->value = GOVD_PRIVATE;
7649 return true;
7650 }
7651 else
7652 return false;
7653 }
7654 else if ((n->value & GOVD_EXPLICIT) != 0
7655 && (ctx == gimplify_omp_ctxp
7656 || (ctx->region_type == ORT_COMBINED_PARALLEL
7657 && gimplify_omp_ctxp->outer_context == ctx)))
7658 {
7659 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7660 error ("iteration variable %qE should not be firstprivate",
7661 DECL_NAME (decl));
7662 else if ((n->value & GOVD_REDUCTION) != 0)
7663 error ("iteration variable %qE should not be reduction",
7664 DECL_NAME (decl));
7665 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7666 error ("iteration variable %qE should not be linear",
7667 DECL_NAME (decl));
7668 }
7669 return (ctx == gimplify_omp_ctxp
7670 || (ctx->region_type == ORT_COMBINED_PARALLEL
7671 && gimplify_omp_ctxp->outer_context == ctx));
7672 }
7673
7674 if (ctx->region_type != ORT_WORKSHARE
7675 && ctx->region_type != ORT_TASKGROUP
7676 && ctx->region_type != ORT_SIMD
7677 && ctx->region_type != ORT_ACC)
7678 return false;
7679 else if (ctx->outer_context)
7680 return omp_is_private (ctx->outer_context, decl, simd);
7681 return false;
7682 }
7683
7684 /* Return true if DECL is private within a parallel region
7685 that binds to the current construct's context or in parallel
7686 region's REDUCTION clause. */
7687
7688 static bool
7689 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7690 {
7691 splay_tree_node n;
7692
7693 do
7694 {
7695 ctx = ctx->outer_context;
7696 if (ctx == NULL)
7697 {
7698 if (is_global_var (decl))
7699 return false;
7700
7701 /* References might be private, but might be shared too,
7702 when checking for copyprivate, assume they might be
7703 private, otherwise assume they might be shared. */
7704 if (copyprivate)
7705 return true;
7706
7707 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7708 return false;
7709
7710 /* Treat C++ privatized non-static data members outside
7711 of the privatization the same. */
7712 if (omp_member_access_dummy_var (decl))
7713 return false;
7714
7715 return true;
7716 }
7717
7718 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7719
7720 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7721 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7722 continue;
7723
7724 if (n != NULL)
7725 {
7726 if ((n->value & GOVD_LOCAL) != 0
7727 && omp_member_access_dummy_var (decl))
7728 return false;
7729 return (n->value & GOVD_SHARED) == 0;
7730 }
7731 }
7732 while (ctx->region_type == ORT_WORKSHARE
7733 || ctx->region_type == ORT_TASKGROUP
7734 || ctx->region_type == ORT_SIMD
7735 || ctx->region_type == ORT_ACC);
7736 return false;
7737 }
7738
7739 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7740
7741 static tree
7742 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7743 {
7744 tree t = *tp;
7745
7746 /* If this node has been visited, unmark it and keep looking. */
7747 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7748 return t;
7749
7750 if (IS_TYPE_OR_DECL_P (t))
7751 *walk_subtrees = 0;
7752 return NULL_TREE;
7753 }
7754
7755 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7756 lower all the depend clauses by populating corresponding depend
7757 array. Returns 0 if there are no such depend clauses, or
7758 2 if all depend clauses should be removed, 1 otherwise. */
7759
7760 static int
7761 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7762 {
7763 tree c;
7764 gimple *g;
7765 size_t n[4] = { 0, 0, 0, 0 };
7766 bool unused[4];
7767 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7768 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7769 size_t i, j;
7770 location_t first_loc = UNKNOWN_LOCATION;
7771
7772 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7773 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7774 {
7775 switch (OMP_CLAUSE_DEPEND_KIND (c))
7776 {
7777 case OMP_CLAUSE_DEPEND_IN:
7778 i = 2;
7779 break;
7780 case OMP_CLAUSE_DEPEND_OUT:
7781 case OMP_CLAUSE_DEPEND_INOUT:
7782 i = 0;
7783 break;
7784 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7785 i = 1;
7786 break;
7787 case OMP_CLAUSE_DEPEND_DEPOBJ:
7788 i = 3;
7789 break;
7790 case OMP_CLAUSE_DEPEND_SOURCE:
7791 case OMP_CLAUSE_DEPEND_SINK:
7792 continue;
7793 default:
7794 gcc_unreachable ();
7795 }
7796 tree t = OMP_CLAUSE_DECL (c);
7797 if (first_loc == UNKNOWN_LOCATION)
7798 first_loc = OMP_CLAUSE_LOCATION (c);
7799 if (TREE_CODE (t) == TREE_LIST
7800 && TREE_PURPOSE (t)
7801 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7802 {
7803 if (TREE_PURPOSE (t) != last_iter)
7804 {
7805 tree tcnt = size_one_node;
7806 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7807 {
7808 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7809 is_gimple_val, fb_rvalue) == GS_ERROR
7810 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7811 is_gimple_val, fb_rvalue) == GS_ERROR
7812 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7813 is_gimple_val, fb_rvalue) == GS_ERROR
7814 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7815 is_gimple_val, fb_rvalue)
7816 == GS_ERROR))
7817 return 2;
7818 tree var = TREE_VEC_ELT (it, 0);
7819 tree begin = TREE_VEC_ELT (it, 1);
7820 tree end = TREE_VEC_ELT (it, 2);
7821 tree step = TREE_VEC_ELT (it, 3);
7822 tree orig_step = TREE_VEC_ELT (it, 4);
7823 tree type = TREE_TYPE (var);
7824 tree stype = TREE_TYPE (step);
7825 location_t loc = DECL_SOURCE_LOCATION (var);
7826 tree endmbegin;
7827 /* Compute count for this iterator as
7828 orig_step > 0
7829 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7830 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7831 and compute product of those for the entire depend
7832 clause. */
7833 if (POINTER_TYPE_P (type))
7834 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7835 stype, end, begin);
7836 else
7837 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7838 end, begin);
7839 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7840 step,
7841 build_int_cst (stype, 1));
7842 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7843 build_int_cst (stype, 1));
7844 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7845 unshare_expr (endmbegin),
7846 stepm1);
7847 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7848 pos, step);
7849 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7850 endmbegin, stepp1);
7851 if (TYPE_UNSIGNED (stype))
7852 {
7853 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7854 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7855 }
7856 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7857 neg, step);
7858 step = NULL_TREE;
7859 tree cond = fold_build2_loc (loc, LT_EXPR,
7860 boolean_type_node,
7861 begin, end);
7862 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7863 build_int_cst (stype, 0));
7864 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7865 end, begin);
7866 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7867 build_int_cst (stype, 0));
7868 tree osteptype = TREE_TYPE (orig_step);
7869 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7870 orig_step,
7871 build_int_cst (osteptype, 0));
7872 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7873 cond, pos, neg);
7874 cnt = fold_convert_loc (loc, sizetype, cnt);
7875 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7876 fb_rvalue) == GS_ERROR)
7877 return 2;
7878 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7879 }
7880 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7881 fb_rvalue) == GS_ERROR)
7882 return 2;
7883 last_iter = TREE_PURPOSE (t);
7884 last_count = tcnt;
7885 }
7886 if (counts[i] == NULL_TREE)
7887 counts[i] = last_count;
7888 else
7889 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7890 PLUS_EXPR, counts[i], last_count);
7891 }
7892 else
7893 n[i]++;
7894 }
7895 for (i = 0; i < 4; i++)
7896 if (counts[i])
7897 break;
7898 if (i == 4)
7899 return 0;
7900
7901 tree total = size_zero_node;
7902 for (i = 0; i < 4; i++)
7903 {
7904 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7905 if (counts[i] == NULL_TREE)
7906 counts[i] = size_zero_node;
7907 if (n[i])
7908 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7909 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7910 fb_rvalue) == GS_ERROR)
7911 return 2;
7912 total = size_binop (PLUS_EXPR, total, counts[i]);
7913 }
7914
7915 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7916 == GS_ERROR)
7917 return 2;
7918 bool is_old = unused[1] && unused[3];
7919 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7920 size_int (is_old ? 1 : 4));
7921 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7922 tree array = create_tmp_var_raw (type);
7923 TREE_ADDRESSABLE (array) = 1;
7924 if (!poly_int_tree_p (totalpx))
7925 {
7926 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7927 gimplify_type_sizes (TREE_TYPE (array), pre_p);
7928 if (gimplify_omp_ctxp)
7929 {
7930 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7931 while (ctx
7932 && (ctx->region_type == ORT_WORKSHARE
7933 || ctx->region_type == ORT_TASKGROUP
7934 || ctx->region_type == ORT_SIMD
7935 || ctx->region_type == ORT_ACC))
7936 ctx = ctx->outer_context;
7937 if (ctx)
7938 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7939 }
7940 gimplify_vla_decl (array, pre_p);
7941 }
7942 else
7943 gimple_add_tmp_var (array);
7944 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7945 NULL_TREE);
7946 tree tem;
7947 if (!is_old)
7948 {
7949 tem = build2 (MODIFY_EXPR, void_type_node, r,
7950 build_int_cst (ptr_type_node, 0));
7951 gimplify_and_add (tem, pre_p);
7952 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7953 NULL_TREE);
7954 }
7955 tem = build2 (MODIFY_EXPR, void_type_node, r,
7956 fold_convert (ptr_type_node, total));
7957 gimplify_and_add (tem, pre_p);
7958 for (i = 1; i < (is_old ? 2 : 4); i++)
7959 {
7960 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7961 NULL_TREE, NULL_TREE);
7962 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7963 gimplify_and_add (tem, pre_p);
7964 }
7965
7966 tree cnts[4];
7967 for (j = 4; j; j--)
7968 if (!unused[j - 1])
7969 break;
7970 for (i = 0; i < 4; i++)
7971 {
7972 if (i && (i >= j || unused[i - 1]))
7973 {
7974 cnts[i] = cnts[i - 1];
7975 continue;
7976 }
7977 cnts[i] = create_tmp_var (sizetype);
7978 if (i == 0)
7979 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
7980 else
7981 {
7982 tree t;
7983 if (is_old)
7984 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
7985 else
7986 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
7987 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
7988 == GS_ERROR)
7989 return 2;
7990 g = gimple_build_assign (cnts[i], t);
7991 }
7992 gimple_seq_add_stmt (pre_p, g);
7993 }
7994
7995 last_iter = NULL_TREE;
7996 tree last_bind = NULL_TREE;
7997 tree *last_body = NULL;
7998 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7999 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8000 {
8001 switch (OMP_CLAUSE_DEPEND_KIND (c))
8002 {
8003 case OMP_CLAUSE_DEPEND_IN:
8004 i = 2;
8005 break;
8006 case OMP_CLAUSE_DEPEND_OUT:
8007 case OMP_CLAUSE_DEPEND_INOUT:
8008 i = 0;
8009 break;
8010 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8011 i = 1;
8012 break;
8013 case OMP_CLAUSE_DEPEND_DEPOBJ:
8014 i = 3;
8015 break;
8016 case OMP_CLAUSE_DEPEND_SOURCE:
8017 case OMP_CLAUSE_DEPEND_SINK:
8018 continue;
8019 default:
8020 gcc_unreachable ();
8021 }
8022 tree t = OMP_CLAUSE_DECL (c);
8023 if (TREE_CODE (t) == TREE_LIST
8024 && TREE_PURPOSE (t)
8025 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8026 {
8027 if (TREE_PURPOSE (t) != last_iter)
8028 {
8029 if (last_bind)
8030 gimplify_and_add (last_bind, pre_p);
8031 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8032 last_bind = build3 (BIND_EXPR, void_type_node,
8033 BLOCK_VARS (block), NULL, block);
8034 TREE_SIDE_EFFECTS (last_bind) = 1;
8035 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8036 tree *p = &BIND_EXPR_BODY (last_bind);
8037 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8038 {
8039 tree var = TREE_VEC_ELT (it, 0);
8040 tree begin = TREE_VEC_ELT (it, 1);
8041 tree end = TREE_VEC_ELT (it, 2);
8042 tree step = TREE_VEC_ELT (it, 3);
8043 tree orig_step = TREE_VEC_ELT (it, 4);
8044 tree type = TREE_TYPE (var);
8045 location_t loc = DECL_SOURCE_LOCATION (var);
8046 /* Emit:
8047 var = begin;
8048 goto cond_label;
8049 beg_label:
8050 ...
8051 var = var + step;
8052 cond_label:
8053 if (orig_step > 0) {
8054 if (var < end) goto beg_label;
8055 } else {
8056 if (var > end) goto beg_label;
8057 }
8058 for each iterator, with inner iterators added to
8059 the ... above. */
8060 tree beg_label = create_artificial_label (loc);
8061 tree cond_label = NULL_TREE;
8062 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8063 var, begin);
8064 append_to_statement_list_force (tem, p);
8065 tem = build_and_jump (&cond_label);
8066 append_to_statement_list_force (tem, p);
8067 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8068 append_to_statement_list (tem, p);
8069 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8070 NULL_TREE, NULL_TREE);
8071 TREE_SIDE_EFFECTS (bind) = 1;
8072 SET_EXPR_LOCATION (bind, loc);
8073 append_to_statement_list_force (bind, p);
8074 if (POINTER_TYPE_P (type))
8075 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8076 var, fold_convert_loc (loc, sizetype,
8077 step));
8078 else
8079 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8080 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8081 var, tem);
8082 append_to_statement_list_force (tem, p);
8083 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8084 append_to_statement_list (tem, p);
8085 tree cond = fold_build2_loc (loc, LT_EXPR,
8086 boolean_type_node,
8087 var, end);
8088 tree pos
8089 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8090 cond, build_and_jump (&beg_label),
8091 void_node);
8092 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8093 var, end);
8094 tree neg
8095 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8096 cond, build_and_jump (&beg_label),
8097 void_node);
8098 tree osteptype = TREE_TYPE (orig_step);
8099 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8100 orig_step,
8101 build_int_cst (osteptype, 0));
8102 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8103 cond, pos, neg);
8104 append_to_statement_list_force (tem, p);
8105 p = &BIND_EXPR_BODY (bind);
8106 }
8107 last_body = p;
8108 }
8109 last_iter = TREE_PURPOSE (t);
8110 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8111 {
8112 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8113 0), last_body);
8114 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8115 }
8116 if (error_operand_p (TREE_VALUE (t)))
8117 return 2;
8118 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8119 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8120 NULL_TREE, NULL_TREE);
8121 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8122 void_type_node, r, TREE_VALUE (t));
8123 append_to_statement_list_force (tem, last_body);
8124 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8125 void_type_node, cnts[i],
8126 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8127 append_to_statement_list_force (tem, last_body);
8128 TREE_VALUE (t) = null_pointer_node;
8129 }
8130 else
8131 {
8132 if (last_bind)
8133 {
8134 gimplify_and_add (last_bind, pre_p);
8135 last_bind = NULL_TREE;
8136 }
8137 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8138 {
8139 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8140 NULL, is_gimple_val, fb_rvalue);
8141 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8142 }
8143 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8144 return 2;
8145 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8146 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8147 is_gimple_val, fb_rvalue) == GS_ERROR)
8148 return 2;
8149 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8150 NULL_TREE, NULL_TREE);
8151 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8152 gimplify_and_add (tem, pre_p);
8153 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8154 size_int (1)));
8155 gimple_seq_add_stmt (pre_p, g);
8156 }
8157 }
8158 if (last_bind)
8159 gimplify_and_add (last_bind, pre_p);
8160 tree cond = boolean_false_node;
8161 if (is_old)
8162 {
8163 if (!unused[0])
8164 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8165 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8166 size_int (2)));
8167 if (!unused[2])
8168 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8169 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8170 cnts[2],
8171 size_binop_loc (first_loc, PLUS_EXPR,
8172 totalpx,
8173 size_int (1))));
8174 }
8175 else
8176 {
8177 tree prev = size_int (5);
8178 for (i = 0; i < 4; i++)
8179 {
8180 if (unused[i])
8181 continue;
8182 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8183 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8184 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8185 cnts[i], unshare_expr (prev)));
8186 }
8187 }
8188 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8189 build_call_expr_loc (first_loc,
8190 builtin_decl_explicit (BUILT_IN_TRAP),
8191 0), void_node);
8192 gimplify_and_add (tem, pre_p);
8193 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8194 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8195 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8196 OMP_CLAUSE_CHAIN (c) = *list_p;
8197 *list_p = c;
8198 return 1;
8199 }
8200
8201 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8202 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8203 the struct node to insert the new mapping after (when the struct node is
8204 initially created). PREV_NODE is the first of two or three mappings for a
8205 pointer, and is either:
8206 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8207 array section.
8208 - not the node before C. This is true when we have a reference-to-pointer
8209 type (with a mapping for the reference and for the pointer), or for
8210 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8211 If SCP is non-null, the new node is inserted before *SCP.
8212 if SCP is null, the new node is inserted before PREV_NODE.
8213 The return type is:
8214 - PREV_NODE, if SCP is non-null.
8215 - The newly-created ALLOC or RELEASE node, if SCP is null.
8216 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8217 reference to a pointer. */
8218
8219 static tree
8220 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8221 tree prev_node, tree *scp)
8222 {
8223 enum gomp_map_kind mkind
8224 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8225 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8226
8227 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8228 tree cl = scp ? prev_node : c2;
8229 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8230 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8231 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8232 if (OMP_CLAUSE_CHAIN (prev_node) != c
8233 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8234 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8235 == GOMP_MAP_TO_PSET))
8236 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8237 else
8238 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8239 if (struct_node)
8240 OMP_CLAUSE_CHAIN (struct_node) = c2;
8241
8242 /* We might need to create an additional mapping if we have a reference to a
8243 pointer (in C++). Don't do this if we have something other than a
8244 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8245 if (OMP_CLAUSE_CHAIN (prev_node) != c
8246 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8247 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8248 == GOMP_MAP_ALWAYS_POINTER)
8249 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8250 == GOMP_MAP_ATTACH_DETACH)))
8251 {
8252 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8253 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8254 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8255 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8256 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8257 OMP_CLAUSE_CHAIN (c3) = prev_node;
8258 if (!scp)
8259 OMP_CLAUSE_CHAIN (c2) = c3;
8260 else
8261 cl = c3;
8262 }
8263
8264 if (scp)
8265 *scp = c2;
8266
8267 return cl;
8268 }
8269
8270 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8271 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8272 If BASE_REF is non-NULL and the containing object is a reference, set
8273 *BASE_REF to that reference before dereferencing the object.
8274 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8275 has array type, else return NULL. */
8276
8277 static tree
8278 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8279 poly_offset_int *poffsetp)
8280 {
8281 tree offset;
8282 poly_int64 bitsize, bitpos;
8283 machine_mode mode;
8284 int unsignedp, reversep, volatilep = 0;
8285 poly_offset_int poffset;
8286
8287 if (base_ref)
8288 {
8289 *base_ref = NULL_TREE;
8290
8291 while (TREE_CODE (base) == ARRAY_REF)
8292 base = TREE_OPERAND (base, 0);
8293
8294 if (TREE_CODE (base) == INDIRECT_REF)
8295 base = TREE_OPERAND (base, 0);
8296 }
8297 else
8298 {
8299 if (TREE_CODE (base) == ARRAY_REF)
8300 {
8301 while (TREE_CODE (base) == ARRAY_REF)
8302 base = TREE_OPERAND (base, 0);
8303 if (TREE_CODE (base) != COMPONENT_REF
8304 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8305 return NULL_TREE;
8306 }
8307 else if (TREE_CODE (base) == INDIRECT_REF
8308 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8309 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8310 == REFERENCE_TYPE))
8311 base = TREE_OPERAND (base, 0);
8312 }
8313
8314 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8315 &unsignedp, &reversep, &volatilep);
8316
8317 tree orig_base = base;
8318
8319 if ((TREE_CODE (base) == INDIRECT_REF
8320 || (TREE_CODE (base) == MEM_REF
8321 && integer_zerop (TREE_OPERAND (base, 1))))
8322 && DECL_P (TREE_OPERAND (base, 0))
8323 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8324 base = TREE_OPERAND (base, 0);
8325
8326 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8327
8328 if (offset)
8329 poffset = wi::to_poly_offset (offset);
8330 else
8331 poffset = 0;
8332
8333 if (maybe_ne (bitpos, 0))
8334 poffset += bits_to_bytes_round_down (bitpos);
8335
8336 *bitposp = bitpos;
8337 *poffsetp = poffset;
8338
8339 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8340 if (base_ref && orig_base != base)
8341 *base_ref = orig_base;
8342
8343 return base;
8344 }
8345
8346 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8347 and previous omp contexts. */
8348
8349 static void
8350 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8351 enum omp_region_type region_type,
8352 enum tree_code code)
8353 {
8354 struct gimplify_omp_ctx *ctx, *outer_ctx;
8355 tree c;
8356 hash_map<tree, tree> *struct_map_to_clause = NULL;
8357 hash_set<tree> *struct_deref_set = NULL;
8358 tree *prev_list_p = NULL, *orig_list_p = list_p;
8359 int handled_depend_iterators = -1;
8360 int nowait = -1;
8361
8362 ctx = new_omp_context (region_type);
8363 ctx->code = code;
8364 outer_ctx = ctx->outer_context;
8365 if (code == OMP_TARGET)
8366 {
8367 if (!lang_GNU_Fortran ())
8368 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8369 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8370 }
8371 if (!lang_GNU_Fortran ())
8372 switch (code)
8373 {
8374 case OMP_TARGET:
8375 case OMP_TARGET_DATA:
8376 case OMP_TARGET_ENTER_DATA:
8377 case OMP_TARGET_EXIT_DATA:
8378 case OACC_DECLARE:
8379 case OACC_HOST_DATA:
8380 case OACC_PARALLEL:
8381 case OACC_KERNELS:
8382 ctx->target_firstprivatize_array_bases = true;
8383 default:
8384 break;
8385 }
8386
8387 while ((c = *list_p) != NULL)
8388 {
8389 bool remove = false;
8390 bool notice_outer = true;
8391 const char *check_non_private = NULL;
8392 unsigned int flags;
8393 tree decl;
8394
8395 switch (OMP_CLAUSE_CODE (c))
8396 {
8397 case OMP_CLAUSE_PRIVATE:
8398 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8399 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8400 {
8401 flags |= GOVD_PRIVATE_OUTER_REF;
8402 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8403 }
8404 else
8405 notice_outer = false;
8406 goto do_add;
8407 case OMP_CLAUSE_SHARED:
8408 flags = GOVD_SHARED | GOVD_EXPLICIT;
8409 goto do_add;
8410 case OMP_CLAUSE_FIRSTPRIVATE:
8411 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8412 check_non_private = "firstprivate";
8413 goto do_add;
8414 case OMP_CLAUSE_LASTPRIVATE:
8415 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8416 switch (code)
8417 {
8418 case OMP_DISTRIBUTE:
8419 error_at (OMP_CLAUSE_LOCATION (c),
8420 "conditional %<lastprivate%> clause on "
8421 "%qs construct", "distribute");
8422 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8423 break;
8424 case OMP_TASKLOOP:
8425 error_at (OMP_CLAUSE_LOCATION (c),
8426 "conditional %<lastprivate%> clause on "
8427 "%qs construct", "taskloop");
8428 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8429 break;
8430 default:
8431 break;
8432 }
8433 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8434 if (code != OMP_LOOP)
8435 check_non_private = "lastprivate";
8436 decl = OMP_CLAUSE_DECL (c);
8437 if (error_operand_p (decl))
8438 goto do_add;
8439 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8440 && !lang_hooks.decls.omp_scalar_p (decl))
8441 {
8442 error_at (OMP_CLAUSE_LOCATION (c),
8443 "non-scalar variable %qD in conditional "
8444 "%<lastprivate%> clause", decl);
8445 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8446 }
8447 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8448 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8449 if (outer_ctx
8450 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8451 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8452 == ORT_COMBINED_TEAMS))
8453 && splay_tree_lookup (outer_ctx->variables,
8454 (splay_tree_key) decl) == NULL)
8455 {
8456 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8457 if (outer_ctx->outer_context)
8458 omp_notice_variable (outer_ctx->outer_context, decl, true);
8459 }
8460 else if (outer_ctx
8461 && (outer_ctx->region_type & ORT_TASK) != 0
8462 && outer_ctx->combined_loop
8463 && splay_tree_lookup (outer_ctx->variables,
8464 (splay_tree_key) decl) == NULL)
8465 {
8466 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8467 if (outer_ctx->outer_context)
8468 omp_notice_variable (outer_ctx->outer_context, decl, true);
8469 }
8470 else if (outer_ctx
8471 && (outer_ctx->region_type == ORT_WORKSHARE
8472 || outer_ctx->region_type == ORT_ACC)
8473 && outer_ctx->combined_loop
8474 && splay_tree_lookup (outer_ctx->variables,
8475 (splay_tree_key) decl) == NULL
8476 && !omp_check_private (outer_ctx, decl, false))
8477 {
8478 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8479 if (outer_ctx->outer_context
8480 && (outer_ctx->outer_context->region_type
8481 == ORT_COMBINED_PARALLEL)
8482 && splay_tree_lookup (outer_ctx->outer_context->variables,
8483 (splay_tree_key) decl) == NULL)
8484 {
8485 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8486 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8487 if (octx->outer_context)
8488 {
8489 octx = octx->outer_context;
8490 if (octx->region_type == ORT_WORKSHARE
8491 && octx->combined_loop
8492 && splay_tree_lookup (octx->variables,
8493 (splay_tree_key) decl) == NULL
8494 && !omp_check_private (octx, decl, false))
8495 {
8496 omp_add_variable (octx, decl,
8497 GOVD_LASTPRIVATE | GOVD_SEEN);
8498 octx = octx->outer_context;
8499 if (octx
8500 && ((octx->region_type & ORT_COMBINED_TEAMS)
8501 == ORT_COMBINED_TEAMS)
8502 && (splay_tree_lookup (octx->variables,
8503 (splay_tree_key) decl)
8504 == NULL))
8505 {
8506 omp_add_variable (octx, decl,
8507 GOVD_SHARED | GOVD_SEEN);
8508 octx = octx->outer_context;
8509 }
8510 }
8511 if (octx)
8512 omp_notice_variable (octx, decl, true);
8513 }
8514 }
8515 else if (outer_ctx->outer_context)
8516 omp_notice_variable (outer_ctx->outer_context, decl, true);
8517 }
8518 goto do_add;
8519 case OMP_CLAUSE_REDUCTION:
8520 if (OMP_CLAUSE_REDUCTION_TASK (c))
8521 {
8522 if (region_type == ORT_WORKSHARE)
8523 {
8524 if (nowait == -1)
8525 nowait = omp_find_clause (*list_p,
8526 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8527 if (nowait
8528 && (outer_ctx == NULL
8529 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8530 {
8531 error_at (OMP_CLAUSE_LOCATION (c),
8532 "%<task%> reduction modifier on a construct "
8533 "with a %<nowait%> clause");
8534 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8535 }
8536 }
8537 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8538 {
8539 error_at (OMP_CLAUSE_LOCATION (c),
8540 "invalid %<task%> reduction modifier on construct "
8541 "other than %<parallel%>, %<for%> or %<sections%>");
8542 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8543 }
8544 }
8545 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8546 switch (code)
8547 {
8548 case OMP_SECTIONS:
8549 error_at (OMP_CLAUSE_LOCATION (c),
8550 "%<inscan%> %<reduction%> clause on "
8551 "%qs construct", "sections");
8552 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8553 break;
8554 case OMP_PARALLEL:
8555 error_at (OMP_CLAUSE_LOCATION (c),
8556 "%<inscan%> %<reduction%> clause on "
8557 "%qs construct", "parallel");
8558 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8559 break;
8560 case OMP_TEAMS:
8561 error_at (OMP_CLAUSE_LOCATION (c),
8562 "%<inscan%> %<reduction%> clause on "
8563 "%qs construct", "teams");
8564 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8565 break;
8566 case OMP_TASKLOOP:
8567 error_at (OMP_CLAUSE_LOCATION (c),
8568 "%<inscan%> %<reduction%> clause on "
8569 "%qs construct", "taskloop");
8570 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8571 break;
8572 default:
8573 break;
8574 }
8575 /* FALLTHRU */
8576 case OMP_CLAUSE_IN_REDUCTION:
8577 case OMP_CLAUSE_TASK_REDUCTION:
8578 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8579 /* OpenACC permits reductions on private variables. */
8580 if (!(region_type & ORT_ACC)
8581 /* taskgroup is actually not a worksharing region. */
8582 && code != OMP_TASKGROUP)
8583 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8584 decl = OMP_CLAUSE_DECL (c);
8585 if (TREE_CODE (decl) == MEM_REF)
8586 {
8587 tree type = TREE_TYPE (decl);
8588 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8589 NULL, is_gimple_val, fb_rvalue, false)
8590 == GS_ERROR)
8591 {
8592 remove = true;
8593 break;
8594 }
8595 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8596 if (DECL_P (v))
8597 {
8598 omp_firstprivatize_variable (ctx, v);
8599 omp_notice_variable (ctx, v, true);
8600 }
8601 decl = TREE_OPERAND (decl, 0);
8602 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8603 {
8604 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8605 NULL, is_gimple_val, fb_rvalue, false)
8606 == GS_ERROR)
8607 {
8608 remove = true;
8609 break;
8610 }
8611 v = TREE_OPERAND (decl, 1);
8612 if (DECL_P (v))
8613 {
8614 omp_firstprivatize_variable (ctx, v);
8615 omp_notice_variable (ctx, v, true);
8616 }
8617 decl = TREE_OPERAND (decl, 0);
8618 }
8619 if (TREE_CODE (decl) == ADDR_EXPR
8620 || TREE_CODE (decl) == INDIRECT_REF)
8621 decl = TREE_OPERAND (decl, 0);
8622 }
8623 goto do_add_decl;
8624 case OMP_CLAUSE_LINEAR:
8625 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8626 is_gimple_val, fb_rvalue) == GS_ERROR)
8627 {
8628 remove = true;
8629 break;
8630 }
8631 else
8632 {
8633 if (code == OMP_SIMD
8634 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8635 {
8636 struct gimplify_omp_ctx *octx = outer_ctx;
8637 if (octx
8638 && octx->region_type == ORT_WORKSHARE
8639 && octx->combined_loop
8640 && !octx->distribute)
8641 {
8642 if (octx->outer_context
8643 && (octx->outer_context->region_type
8644 == ORT_COMBINED_PARALLEL))
8645 octx = octx->outer_context->outer_context;
8646 else
8647 octx = octx->outer_context;
8648 }
8649 if (octx
8650 && octx->region_type == ORT_WORKSHARE
8651 && octx->combined_loop
8652 && octx->distribute)
8653 {
8654 error_at (OMP_CLAUSE_LOCATION (c),
8655 "%<linear%> clause for variable other than "
8656 "loop iterator specified on construct "
8657 "combined with %<distribute%>");
8658 remove = true;
8659 break;
8660 }
8661 }
8662 /* For combined #pragma omp parallel for simd, need to put
8663 lastprivate and perhaps firstprivate too on the
8664 parallel. Similarly for #pragma omp for simd. */
8665 struct gimplify_omp_ctx *octx = outer_ctx;
8666 decl = NULL_TREE;
8667 do
8668 {
8669 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8670 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8671 break;
8672 decl = OMP_CLAUSE_DECL (c);
8673 if (error_operand_p (decl))
8674 {
8675 decl = NULL_TREE;
8676 break;
8677 }
8678 flags = GOVD_SEEN;
8679 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8680 flags |= GOVD_FIRSTPRIVATE;
8681 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8682 flags |= GOVD_LASTPRIVATE;
8683 if (octx
8684 && octx->region_type == ORT_WORKSHARE
8685 && octx->combined_loop)
8686 {
8687 if (octx->outer_context
8688 && (octx->outer_context->region_type
8689 == ORT_COMBINED_PARALLEL))
8690 octx = octx->outer_context;
8691 else if (omp_check_private (octx, decl, false))
8692 break;
8693 }
8694 else if (octx
8695 && (octx->region_type & ORT_TASK) != 0
8696 && octx->combined_loop)
8697 ;
8698 else if (octx
8699 && octx->region_type == ORT_COMBINED_PARALLEL
8700 && ctx->region_type == ORT_WORKSHARE
8701 && octx == outer_ctx)
8702 flags = GOVD_SEEN | GOVD_SHARED;
8703 else if (octx
8704 && ((octx->region_type & ORT_COMBINED_TEAMS)
8705 == ORT_COMBINED_TEAMS))
8706 flags = GOVD_SEEN | GOVD_SHARED;
8707 else if (octx
8708 && octx->region_type == ORT_COMBINED_TARGET)
8709 {
8710 flags &= ~GOVD_LASTPRIVATE;
8711 if (flags == GOVD_SEEN)
8712 break;
8713 }
8714 else
8715 break;
8716 splay_tree_node on
8717 = splay_tree_lookup (octx->variables,
8718 (splay_tree_key) decl);
8719 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8720 {
8721 octx = NULL;
8722 break;
8723 }
8724 omp_add_variable (octx, decl, flags);
8725 if (octx->outer_context == NULL)
8726 break;
8727 octx = octx->outer_context;
8728 }
8729 while (1);
8730 if (octx
8731 && decl
8732 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8733 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8734 omp_notice_variable (octx, decl, true);
8735 }
8736 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8737 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8738 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8739 {
8740 notice_outer = false;
8741 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8742 }
8743 goto do_add;
8744
8745 case OMP_CLAUSE_MAP:
8746 decl = OMP_CLAUSE_DECL (c);
8747 if (error_operand_p (decl))
8748 remove = true;
8749 switch (code)
8750 {
8751 case OMP_TARGET:
8752 break;
8753 case OACC_DATA:
8754 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8755 break;
8756 /* FALLTHRU */
8757 case OMP_TARGET_DATA:
8758 case OMP_TARGET_ENTER_DATA:
8759 case OMP_TARGET_EXIT_DATA:
8760 case OACC_HOST_DATA:
8761 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8762 || (OMP_CLAUSE_MAP_KIND (c)
8763 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8764 /* For target {,enter ,exit }data only the array slice is
8765 mapped, but not the pointer to it. */
8766 remove = true;
8767 break;
8768 case OACC_ENTER_DATA:
8769 case OACC_EXIT_DATA:
8770 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8771 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET
8772 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8773 || (OMP_CLAUSE_MAP_KIND (c)
8774 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8775 remove = true;
8776 break;
8777 default:
8778 break;
8779 }
8780 /* For Fortran, not only the pointer to the data is mapped but also
8781 the address of the pointer, the array descriptor etc.; for
8782 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8783 does not make sense. Likewise, for 'update' only transferring the
8784 data itself is needed as the rest has been handled in previous
8785 directives. */
8786 if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8787 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8788 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8789 remove = true;
8790
8791 if (remove)
8792 break;
8793 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8794 {
8795 struct gimplify_omp_ctx *octx;
8796 for (octx = outer_ctx; octx; octx = octx->outer_context)
8797 {
8798 if (octx->region_type != ORT_ACC_HOST_DATA)
8799 break;
8800 splay_tree_node n2
8801 = splay_tree_lookup (octx->variables,
8802 (splay_tree_key) decl);
8803 if (n2)
8804 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8805 "declared in enclosing %<host_data%> region",
8806 DECL_NAME (decl));
8807 }
8808 }
8809 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8810 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8811 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8812 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8813 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8814 {
8815 remove = true;
8816 break;
8817 }
8818 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8819 || (OMP_CLAUSE_MAP_KIND (c)
8820 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8821 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8822 {
8823 OMP_CLAUSE_SIZE (c)
8824 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8825 false);
8826 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8827 GOVD_FIRSTPRIVATE | GOVD_SEEN);
8828 }
8829 if (!DECL_P (decl))
8830 {
8831 tree d = decl, *pd;
8832 if (TREE_CODE (d) == ARRAY_REF)
8833 {
8834 while (TREE_CODE (d) == ARRAY_REF)
8835 d = TREE_OPERAND (d, 0);
8836 if (TREE_CODE (d) == COMPONENT_REF
8837 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8838 decl = d;
8839 }
8840 pd = &OMP_CLAUSE_DECL (c);
8841 if (d == decl
8842 && TREE_CODE (decl) == INDIRECT_REF
8843 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8844 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8845 == REFERENCE_TYPE))
8846 {
8847 pd = &TREE_OPERAND (decl, 0);
8848 decl = TREE_OPERAND (decl, 0);
8849 }
8850 bool indir_p = false;
8851 tree orig_decl = decl;
8852 tree decl_ref = NULL_TREE;
8853 if ((region_type & ORT_ACC) != 0
8854 && TREE_CODE (*pd) == COMPONENT_REF
8855 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
8856 && code != OACC_UPDATE)
8857 {
8858 while (TREE_CODE (decl) == COMPONENT_REF)
8859 {
8860 decl = TREE_OPERAND (decl, 0);
8861 if ((TREE_CODE (decl) == MEM_REF
8862 && integer_zerop (TREE_OPERAND (decl, 1)))
8863 || INDIRECT_REF_P (decl))
8864 {
8865 indir_p = true;
8866 decl = TREE_OPERAND (decl, 0);
8867 }
8868 if (TREE_CODE (decl) == INDIRECT_REF
8869 && DECL_P (TREE_OPERAND (decl, 0))
8870 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8871 == REFERENCE_TYPE))
8872 {
8873 decl_ref = decl;
8874 decl = TREE_OPERAND (decl, 0);
8875 }
8876 }
8877 }
8878 else if (TREE_CODE (decl) == COMPONENT_REF)
8879 {
8880 while (TREE_CODE (decl) == COMPONENT_REF)
8881 decl = TREE_OPERAND (decl, 0);
8882 if (TREE_CODE (decl) == INDIRECT_REF
8883 && DECL_P (TREE_OPERAND (decl, 0))
8884 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8885 == REFERENCE_TYPE))
8886 decl = TREE_OPERAND (decl, 0);
8887 }
8888 if (decl != orig_decl && DECL_P (decl) && indir_p)
8889 {
8890 gomp_map_kind k = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
8891 : GOMP_MAP_ATTACH;
8892 /* We have a dereference of a struct member. Make this an
8893 attach/detach operation, and ensure the base pointer is
8894 mapped as a FIRSTPRIVATE_POINTER. */
8895 OMP_CLAUSE_SET_MAP_KIND (c, k);
8896 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
8897 tree next_clause = OMP_CLAUSE_CHAIN (c);
8898 if (k == GOMP_MAP_ATTACH
8899 && code != OACC_ENTER_DATA
8900 && (!next_clause
8901 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
8902 || (OMP_CLAUSE_MAP_KIND (next_clause)
8903 != GOMP_MAP_POINTER)
8904 || OMP_CLAUSE_DECL (next_clause) != decl)
8905 && (!struct_deref_set
8906 || !struct_deref_set->contains (decl)))
8907 {
8908 if (!struct_deref_set)
8909 struct_deref_set = new hash_set<tree> ();
8910 /* As well as the attach, we also need a
8911 FIRSTPRIVATE_POINTER clause to properly map the
8912 pointer to the struct base. */
8913 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8914 OMP_CLAUSE_MAP);
8915 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
8916 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
8917 = 1;
8918 tree charptr_zero
8919 = build_int_cst (build_pointer_type (char_type_node),
8920 0);
8921 OMP_CLAUSE_DECL (c2)
8922 = build2 (MEM_REF, char_type_node,
8923 decl_ref ? decl_ref : decl, charptr_zero);
8924 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8925 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8926 OMP_CLAUSE_MAP);
8927 OMP_CLAUSE_SET_MAP_KIND (c3,
8928 GOMP_MAP_FIRSTPRIVATE_POINTER);
8929 OMP_CLAUSE_DECL (c3) = decl;
8930 OMP_CLAUSE_SIZE (c3) = size_zero_node;
8931 tree mapgrp = *prev_list_p;
8932 *prev_list_p = c2;
8933 OMP_CLAUSE_CHAIN (c3) = mapgrp;
8934 OMP_CLAUSE_CHAIN (c2) = c3;
8935
8936 struct_deref_set->add (decl);
8937 }
8938 goto do_add_decl;
8939 }
8940 /* An "attach/detach" operation on an update directive should
8941 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
8942 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8943 depends on the previous mapping. */
8944 if (code == OACC_UPDATE
8945 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8946 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
8947 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8948 == GS_ERROR)
8949 {
8950 remove = true;
8951 break;
8952 }
8953 if (DECL_P (decl)
8954 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
8955 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
8956 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
8957 && code != OACC_UPDATE)
8958 {
8959 if (error_operand_p (decl))
8960 {
8961 remove = true;
8962 break;
8963 }
8964
8965 tree stype = TREE_TYPE (decl);
8966 if (TREE_CODE (stype) == REFERENCE_TYPE)
8967 stype = TREE_TYPE (stype);
8968 if (TYPE_SIZE_UNIT (stype) == NULL
8969 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
8970 {
8971 error_at (OMP_CLAUSE_LOCATION (c),
8972 "mapping field %qE of variable length "
8973 "structure", OMP_CLAUSE_DECL (c));
8974 remove = true;
8975 break;
8976 }
8977
8978 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
8979 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8980 {
8981 /* Error recovery. */
8982 if (prev_list_p == NULL)
8983 {
8984 remove = true;
8985 break;
8986 }
8987 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8988 {
8989 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8990 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8991 {
8992 remove = true;
8993 break;
8994 }
8995 }
8996 }
8997
8998 poly_offset_int offset1;
8999 poly_int64 bitpos1;
9000 tree base_ref;
9001
9002 tree base
9003 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9004 &bitpos1, &offset1);
9005
9006 gcc_assert (base == decl);
9007
9008 splay_tree_node n
9009 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9010 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9011 == GOMP_MAP_ALWAYS_POINTER);
9012 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9013 == GOMP_MAP_ATTACH_DETACH);
9014 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9015 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9016 bool has_attachments = false;
9017 /* For OpenACC, pointers in structs should trigger an
9018 attach action. */
9019 if (attach_detach && (region_type & ORT_ACC) != 0)
9020 {
9021 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9022 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9023 have detected a case that needs a GOMP_MAP_STRUCT
9024 mapping added. */
9025 gomp_map_kind k
9026 = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
9027 : GOMP_MAP_ATTACH;
9028 OMP_CLAUSE_SET_MAP_KIND (c, k);
9029 has_attachments = true;
9030 }
9031 if (n == NULL || (n->value & GOVD_MAP) == 0)
9032 {
9033 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9034 OMP_CLAUSE_MAP);
9035 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9036 : GOMP_MAP_STRUCT;
9037
9038 OMP_CLAUSE_SET_MAP_KIND (l, k);
9039 if (base_ref)
9040 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9041 else
9042 OMP_CLAUSE_DECL (l) = decl;
9043 OMP_CLAUSE_SIZE (l)
9044 = (!attach
9045 ? size_int (1)
9046 : DECL_P (OMP_CLAUSE_DECL (l))
9047 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9048 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9049 if (struct_map_to_clause == NULL)
9050 struct_map_to_clause = new hash_map<tree, tree>;
9051 struct_map_to_clause->put (decl, l);
9052 if (ptr || attach_detach)
9053 {
9054 insert_struct_comp_map (code, c, l, *prev_list_p,
9055 NULL);
9056 *prev_list_p = l;
9057 prev_list_p = NULL;
9058 }
9059 else
9060 {
9061 OMP_CLAUSE_CHAIN (l) = c;
9062 *list_p = l;
9063 list_p = &OMP_CLAUSE_CHAIN (l);
9064 }
9065 if (base_ref && code == OMP_TARGET)
9066 {
9067 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9068 OMP_CLAUSE_MAP);
9069 enum gomp_map_kind mkind
9070 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9071 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9072 OMP_CLAUSE_DECL (c2) = decl;
9073 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9074 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9075 OMP_CLAUSE_CHAIN (l) = c2;
9076 }
9077 flags = GOVD_MAP | GOVD_EXPLICIT;
9078 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9079 || ptr
9080 || attach_detach)
9081 flags |= GOVD_SEEN;
9082 if (has_attachments)
9083 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9084 goto do_add_decl;
9085 }
9086 else if (struct_map_to_clause)
9087 {
9088 tree *osc = struct_map_to_clause->get (decl);
9089 tree *sc = NULL, *scp = NULL;
9090 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9091 || ptr
9092 || attach_detach)
9093 n->value |= GOVD_SEEN;
9094 sc = &OMP_CLAUSE_CHAIN (*osc);
9095 if (*sc != c
9096 && (OMP_CLAUSE_MAP_KIND (*sc)
9097 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9098 sc = &OMP_CLAUSE_CHAIN (*sc);
9099 /* Here "prev_list_p" is the end of the inserted
9100 alloc/release nodes after the struct node, OSC. */
9101 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9102 if ((ptr || attach_detach) && sc == prev_list_p)
9103 break;
9104 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9105 != COMPONENT_REF
9106 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9107 != INDIRECT_REF)
9108 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9109 != ARRAY_REF))
9110 break;
9111 else
9112 {
9113 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9114 poly_offset_int offsetn;
9115 poly_int64 bitposn;
9116 tree base
9117 = extract_base_bit_offset (sc_decl, NULL,
9118 &bitposn, &offsetn);
9119 if (base != decl)
9120 break;
9121 if (scp)
9122 continue;
9123 tree d1 = OMP_CLAUSE_DECL (*sc);
9124 tree d2 = OMP_CLAUSE_DECL (c);
9125 while (TREE_CODE (d1) == ARRAY_REF)
9126 d1 = TREE_OPERAND (d1, 0);
9127 while (TREE_CODE (d2) == ARRAY_REF)
9128 d2 = TREE_OPERAND (d2, 0);
9129 if (TREE_CODE (d1) == INDIRECT_REF)
9130 d1 = TREE_OPERAND (d1, 0);
9131 if (TREE_CODE (d2) == INDIRECT_REF)
9132 d2 = TREE_OPERAND (d2, 0);
9133 while (TREE_CODE (d1) == COMPONENT_REF)
9134 if (TREE_CODE (d2) == COMPONENT_REF
9135 && TREE_OPERAND (d1, 1)
9136 == TREE_OPERAND (d2, 1))
9137 {
9138 d1 = TREE_OPERAND (d1, 0);
9139 d2 = TREE_OPERAND (d2, 0);
9140 }
9141 else
9142 break;
9143 if (d1 == d2)
9144 {
9145 error_at (OMP_CLAUSE_LOCATION (c),
9146 "%qE appears more than once in map "
9147 "clauses", OMP_CLAUSE_DECL (c));
9148 remove = true;
9149 break;
9150 }
9151 if (maybe_lt (offset1, offsetn)
9152 || (known_eq (offset1, offsetn)
9153 && maybe_lt (bitpos1, bitposn)))
9154 {
9155 if (ptr || attach_detach)
9156 scp = sc;
9157 else
9158 break;
9159 }
9160 }
9161 if (remove)
9162 break;
9163 if (!attach)
9164 OMP_CLAUSE_SIZE (*osc)
9165 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9166 size_one_node);
9167 if (ptr || attach_detach)
9168 {
9169 tree cl = insert_struct_comp_map (code, c, NULL,
9170 *prev_list_p, scp);
9171 if (sc == prev_list_p)
9172 {
9173 *sc = cl;
9174 prev_list_p = NULL;
9175 }
9176 else
9177 {
9178 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9179 list_p = prev_list_p;
9180 prev_list_p = NULL;
9181 OMP_CLAUSE_CHAIN (c) = *sc;
9182 *sc = cl;
9183 continue;
9184 }
9185 }
9186 else if (*sc != c)
9187 {
9188 *list_p = OMP_CLAUSE_CHAIN (c);
9189 OMP_CLAUSE_CHAIN (c) = *sc;
9190 *sc = c;
9191 continue;
9192 }
9193 }
9194 }
9195 if (!remove
9196 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9197 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9198 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9199 && OMP_CLAUSE_CHAIN (c)
9200 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9201 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9202 == GOMP_MAP_ALWAYS_POINTER)
9203 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9204 == GOMP_MAP_ATTACH_DETACH)
9205 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9206 == GOMP_MAP_TO_PSET)))
9207 prev_list_p = list_p;
9208
9209 break;
9210 }
9211 flags = GOVD_MAP | GOVD_EXPLICIT;
9212 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9213 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9214 flags |= GOVD_MAP_ALWAYS_TO;
9215 goto do_add;
9216
9217 case OMP_CLAUSE_DEPEND:
9218 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9219 {
9220 tree deps = OMP_CLAUSE_DECL (c);
9221 while (deps && TREE_CODE (deps) == TREE_LIST)
9222 {
9223 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9224 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9225 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9226 pre_p, NULL, is_gimple_val, fb_rvalue);
9227 deps = TREE_CHAIN (deps);
9228 }
9229 break;
9230 }
9231 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9232 break;
9233 if (handled_depend_iterators == -1)
9234 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9235 if (handled_depend_iterators)
9236 {
9237 if (handled_depend_iterators == 2)
9238 remove = true;
9239 break;
9240 }
9241 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9242 {
9243 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9244 NULL, is_gimple_val, fb_rvalue);
9245 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9246 }
9247 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9248 {
9249 remove = true;
9250 break;
9251 }
9252 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9253 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9254 is_gimple_val, fb_rvalue) == GS_ERROR)
9255 {
9256 remove = true;
9257 break;
9258 }
9259 break;
9260
9261 case OMP_CLAUSE_TO:
9262 case OMP_CLAUSE_FROM:
9263 case OMP_CLAUSE__CACHE_:
9264 decl = OMP_CLAUSE_DECL (c);
9265 if (error_operand_p (decl))
9266 {
9267 remove = true;
9268 break;
9269 }
9270 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9271 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9272 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9273 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9274 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9275 {
9276 remove = true;
9277 break;
9278 }
9279 if (!DECL_P (decl))
9280 {
9281 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9282 NULL, is_gimple_lvalue, fb_lvalue)
9283 == GS_ERROR)
9284 {
9285 remove = true;
9286 break;
9287 }
9288 break;
9289 }
9290 goto do_notice;
9291
9292 case OMP_CLAUSE_USE_DEVICE_PTR:
9293 case OMP_CLAUSE_USE_DEVICE_ADDR:
9294 flags = GOVD_EXPLICIT;
9295 goto do_add;
9296
9297 case OMP_CLAUSE_IS_DEVICE_PTR:
9298 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9299 goto do_add;
9300
9301 do_add:
9302 decl = OMP_CLAUSE_DECL (c);
9303 do_add_decl:
9304 if (error_operand_p (decl))
9305 {
9306 remove = true;
9307 break;
9308 }
9309 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9310 {
9311 tree t = omp_member_access_dummy_var (decl);
9312 if (t)
9313 {
9314 tree v = DECL_VALUE_EXPR (decl);
9315 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9316 if (outer_ctx)
9317 omp_notice_variable (outer_ctx, t, true);
9318 }
9319 }
9320 if (code == OACC_DATA
9321 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9322 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9323 flags |= GOVD_MAP_0LEN_ARRAY;
9324 omp_add_variable (ctx, decl, flags);
9325 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9326 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9327 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9328 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9329 {
9330 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9331 GOVD_LOCAL | GOVD_SEEN);
9332 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9333 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9334 find_decl_expr,
9335 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9336 NULL) == NULL_TREE)
9337 omp_add_variable (ctx,
9338 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9339 GOVD_LOCAL | GOVD_SEEN);
9340 gimplify_omp_ctxp = ctx;
9341 push_gimplify_context ();
9342
9343 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9344 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9345
9346 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9347 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9348 pop_gimplify_context
9349 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9350 push_gimplify_context ();
9351 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9352 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9353 pop_gimplify_context
9354 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9355 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9356 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9357
9358 gimplify_omp_ctxp = outer_ctx;
9359 }
9360 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9361 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9362 {
9363 gimplify_omp_ctxp = ctx;
9364 push_gimplify_context ();
9365 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9366 {
9367 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9368 NULL, NULL);
9369 TREE_SIDE_EFFECTS (bind) = 1;
9370 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9371 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9372 }
9373 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9374 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9375 pop_gimplify_context
9376 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9377 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9378
9379 gimplify_omp_ctxp = outer_ctx;
9380 }
9381 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9382 && OMP_CLAUSE_LINEAR_STMT (c))
9383 {
9384 gimplify_omp_ctxp = ctx;
9385 push_gimplify_context ();
9386 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9387 {
9388 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9389 NULL, NULL);
9390 TREE_SIDE_EFFECTS (bind) = 1;
9391 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9392 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9393 }
9394 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9395 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9396 pop_gimplify_context
9397 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9398 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9399
9400 gimplify_omp_ctxp = outer_ctx;
9401 }
9402 if (notice_outer)
9403 goto do_notice;
9404 break;
9405
9406 case OMP_CLAUSE_COPYIN:
9407 case OMP_CLAUSE_COPYPRIVATE:
9408 decl = OMP_CLAUSE_DECL (c);
9409 if (error_operand_p (decl))
9410 {
9411 remove = true;
9412 break;
9413 }
9414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9415 && !remove
9416 && !omp_check_private (ctx, decl, true))
9417 {
9418 remove = true;
9419 if (is_global_var (decl))
9420 {
9421 if (DECL_THREAD_LOCAL_P (decl))
9422 remove = false;
9423 else if (DECL_HAS_VALUE_EXPR_P (decl))
9424 {
9425 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9426
9427 if (value
9428 && DECL_P (value)
9429 && DECL_THREAD_LOCAL_P (value))
9430 remove = false;
9431 }
9432 }
9433 if (remove)
9434 error_at (OMP_CLAUSE_LOCATION (c),
9435 "copyprivate variable %qE is not threadprivate"
9436 " or private in outer context", DECL_NAME (decl));
9437 }
9438 do_notice:
9439 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9440 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9441 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9442 && outer_ctx
9443 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9444 || (region_type == ORT_WORKSHARE
9445 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9446 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9447 || code == OMP_LOOP)))
9448 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9449 || (code == OMP_LOOP
9450 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9451 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9452 == ORT_COMBINED_TEAMS))))
9453 {
9454 splay_tree_node on
9455 = splay_tree_lookup (outer_ctx->variables,
9456 (splay_tree_key)decl);
9457 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9458 {
9459 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9460 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9461 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9462 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9463 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9464 == POINTER_TYPE))))
9465 omp_firstprivatize_variable (outer_ctx, decl);
9466 else
9467 omp_add_variable (outer_ctx, decl,
9468 GOVD_SEEN | GOVD_SHARED);
9469 omp_notice_variable (outer_ctx, decl, true);
9470 }
9471 }
9472 if (outer_ctx)
9473 omp_notice_variable (outer_ctx, decl, true);
9474 if (check_non_private
9475 && region_type == ORT_WORKSHARE
9476 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9477 || decl == OMP_CLAUSE_DECL (c)
9478 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9479 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9480 == ADDR_EXPR
9481 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9482 == POINTER_PLUS_EXPR
9483 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9484 (OMP_CLAUSE_DECL (c), 0), 0))
9485 == ADDR_EXPR)))))
9486 && omp_check_private (ctx, decl, false))
9487 {
9488 error ("%s variable %qE is private in outer context",
9489 check_non_private, DECL_NAME (decl));
9490 remove = true;
9491 }
9492 break;
9493
9494 case OMP_CLAUSE_IF:
9495 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9496 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9497 {
9498 const char *p[2];
9499 for (int i = 0; i < 2; i++)
9500 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9501 {
9502 case VOID_CST: p[i] = "cancel"; break;
9503 case OMP_PARALLEL: p[i] = "parallel"; break;
9504 case OMP_SIMD: p[i] = "simd"; break;
9505 case OMP_TASK: p[i] = "task"; break;
9506 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9507 case OMP_TARGET_DATA: p[i] = "target data"; break;
9508 case OMP_TARGET: p[i] = "target"; break;
9509 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9510 case OMP_TARGET_ENTER_DATA:
9511 p[i] = "target enter data"; break;
9512 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9513 default: gcc_unreachable ();
9514 }
9515 error_at (OMP_CLAUSE_LOCATION (c),
9516 "expected %qs %<if%> clause modifier rather than %qs",
9517 p[0], p[1]);
9518 remove = true;
9519 }
9520 /* Fall through. */
9521
9522 case OMP_CLAUSE_FINAL:
9523 OMP_CLAUSE_OPERAND (c, 0)
9524 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9525 /* Fall through. */
9526
9527 case OMP_CLAUSE_SCHEDULE:
9528 case OMP_CLAUSE_NUM_THREADS:
9529 case OMP_CLAUSE_NUM_TEAMS:
9530 case OMP_CLAUSE_THREAD_LIMIT:
9531 case OMP_CLAUSE_DIST_SCHEDULE:
9532 case OMP_CLAUSE_DEVICE:
9533 case OMP_CLAUSE_PRIORITY:
9534 case OMP_CLAUSE_GRAINSIZE:
9535 case OMP_CLAUSE_NUM_TASKS:
9536 case OMP_CLAUSE_HINT:
9537 case OMP_CLAUSE_ASYNC:
9538 case OMP_CLAUSE_WAIT:
9539 case OMP_CLAUSE_NUM_GANGS:
9540 case OMP_CLAUSE_NUM_WORKERS:
9541 case OMP_CLAUSE_VECTOR_LENGTH:
9542 case OMP_CLAUSE_WORKER:
9543 case OMP_CLAUSE_VECTOR:
9544 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9545 is_gimple_val, fb_rvalue) == GS_ERROR)
9546 remove = true;
9547 break;
9548
9549 case OMP_CLAUSE_GANG:
9550 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9551 is_gimple_val, fb_rvalue) == GS_ERROR)
9552 remove = true;
9553 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9554 is_gimple_val, fb_rvalue) == GS_ERROR)
9555 remove = true;
9556 break;
9557
9558 case OMP_CLAUSE_NOWAIT:
9559 nowait = 1;
9560 break;
9561
9562 case OMP_CLAUSE_ORDERED:
9563 case OMP_CLAUSE_UNTIED:
9564 case OMP_CLAUSE_COLLAPSE:
9565 case OMP_CLAUSE_TILE:
9566 case OMP_CLAUSE_AUTO:
9567 case OMP_CLAUSE_SEQ:
9568 case OMP_CLAUSE_INDEPENDENT:
9569 case OMP_CLAUSE_MERGEABLE:
9570 case OMP_CLAUSE_PROC_BIND:
9571 case OMP_CLAUSE_SAFELEN:
9572 case OMP_CLAUSE_SIMDLEN:
9573 case OMP_CLAUSE_NOGROUP:
9574 case OMP_CLAUSE_THREADS:
9575 case OMP_CLAUSE_SIMD:
9576 case OMP_CLAUSE_BIND:
9577 case OMP_CLAUSE_IF_PRESENT:
9578 case OMP_CLAUSE_FINALIZE:
9579 break;
9580
9581 case OMP_CLAUSE_ORDER:
9582 ctx->order_concurrent = true;
9583 break;
9584
9585 case OMP_CLAUSE_DEFAULTMAP:
9586 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9587 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9588 {
9589 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9590 gdmkmin = GDMK_SCALAR;
9591 gdmkmax = GDMK_POINTER;
9592 break;
9593 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9594 gdmkmin = gdmkmax = GDMK_SCALAR;
9595 break;
9596 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9597 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9598 break;
9599 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9600 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9601 break;
9602 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9603 gdmkmin = gdmkmax = GDMK_POINTER;
9604 break;
9605 default:
9606 gcc_unreachable ();
9607 }
9608 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9609 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9610 {
9611 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9612 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9613 break;
9614 case OMP_CLAUSE_DEFAULTMAP_TO:
9615 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9616 break;
9617 case OMP_CLAUSE_DEFAULTMAP_FROM:
9618 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9619 break;
9620 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9621 ctx->defaultmap[gdmk] = GOVD_MAP;
9622 break;
9623 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9624 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9625 break;
9626 case OMP_CLAUSE_DEFAULTMAP_NONE:
9627 ctx->defaultmap[gdmk] = 0;
9628 break;
9629 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9630 switch (gdmk)
9631 {
9632 case GDMK_SCALAR:
9633 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9634 break;
9635 case GDMK_AGGREGATE:
9636 case GDMK_ALLOCATABLE:
9637 ctx->defaultmap[gdmk] = GOVD_MAP;
9638 break;
9639 case GDMK_POINTER:
9640 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9641 break;
9642 default:
9643 gcc_unreachable ();
9644 }
9645 break;
9646 default:
9647 gcc_unreachable ();
9648 }
9649 break;
9650
9651 case OMP_CLAUSE_ALIGNED:
9652 decl = OMP_CLAUSE_DECL (c);
9653 if (error_operand_p (decl))
9654 {
9655 remove = true;
9656 break;
9657 }
9658 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9659 is_gimple_val, fb_rvalue) == GS_ERROR)
9660 {
9661 remove = true;
9662 break;
9663 }
9664 if (!is_global_var (decl)
9665 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9666 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9667 break;
9668
9669 case OMP_CLAUSE_NONTEMPORAL:
9670 decl = OMP_CLAUSE_DECL (c);
9671 if (error_operand_p (decl))
9672 {
9673 remove = true;
9674 break;
9675 }
9676 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9677 break;
9678
9679 case OMP_CLAUSE_DEFAULT:
9680 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9681 break;
9682
9683 case OMP_CLAUSE_INCLUSIVE:
9684 case OMP_CLAUSE_EXCLUSIVE:
9685 decl = OMP_CLAUSE_DECL (c);
9686 {
9687 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9688 (splay_tree_key) decl);
9689 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9690 {
9691 error_at (OMP_CLAUSE_LOCATION (c),
9692 "%qD specified in %qs clause but not in %<inscan%> "
9693 "%<reduction%> clause on the containing construct",
9694 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9695 remove = true;
9696 }
9697 else
9698 {
9699 n->value |= GOVD_REDUCTION_INSCAN;
9700 if (outer_ctx->region_type == ORT_SIMD
9701 && outer_ctx->outer_context
9702 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9703 {
9704 n = splay_tree_lookup (outer_ctx->outer_context->variables,
9705 (splay_tree_key) decl);
9706 if (n && (n->value & GOVD_REDUCTION) != 0)
9707 n->value |= GOVD_REDUCTION_INSCAN;
9708 }
9709 }
9710 }
9711 break;
9712
9713 default:
9714 gcc_unreachable ();
9715 }
9716
9717 if (code == OACC_DATA
9718 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9719 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9720 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9721 remove = true;
9722 if (remove)
9723 *list_p = OMP_CLAUSE_CHAIN (c);
9724 else
9725 list_p = &OMP_CLAUSE_CHAIN (c);
9726 }
9727
9728 ctx->clauses = *orig_list_p;
9729 gimplify_omp_ctxp = ctx;
9730 if (struct_map_to_clause)
9731 delete struct_map_to_clause;
9732 if (struct_deref_set)
9733 delete struct_deref_set;
9734 }
9735
9736 /* Return true if DECL is a candidate for shared to firstprivate
9737 optimization. We only consider non-addressable scalars, not
9738 too big, and not references. */
9739
9740 static bool
9741 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9742 {
9743 if (TREE_ADDRESSABLE (decl))
9744 return false;
9745 tree type = TREE_TYPE (decl);
9746 if (!is_gimple_reg_type (type)
9747 || TREE_CODE (type) == REFERENCE_TYPE
9748 || TREE_ADDRESSABLE (type))
9749 return false;
9750 /* Don't optimize too large decls, as each thread/task will have
9751 its own. */
9752 HOST_WIDE_INT len = int_size_in_bytes (type);
9753 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9754 return false;
9755 if (lang_hooks.decls.omp_privatize_by_reference (decl))
9756 return false;
9757 return true;
9758 }
9759
9760 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9761 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9762 GOVD_WRITTEN in outer contexts. */
9763
9764 static void
9765 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9766 {
9767 for (; ctx; ctx = ctx->outer_context)
9768 {
9769 splay_tree_node n = splay_tree_lookup (ctx->variables,
9770 (splay_tree_key) decl);
9771 if (n == NULL)
9772 continue;
9773 else if (n->value & GOVD_SHARED)
9774 {
9775 n->value |= GOVD_WRITTEN;
9776 return;
9777 }
9778 else if (n->value & GOVD_DATA_SHARE_CLASS)
9779 return;
9780 }
9781 }
9782
9783 /* Helper callback for walk_gimple_seq to discover possible stores
9784 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9785 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9786 for those. */
9787
9788 static tree
9789 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9790 {
9791 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9792
9793 *walk_subtrees = 0;
9794 if (!wi->is_lhs)
9795 return NULL_TREE;
9796
9797 tree op = *tp;
9798 do
9799 {
9800 if (handled_component_p (op))
9801 op = TREE_OPERAND (op, 0);
9802 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9803 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9804 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9805 else
9806 break;
9807 }
9808 while (1);
9809 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9810 return NULL_TREE;
9811
9812 omp_mark_stores (gimplify_omp_ctxp, op);
9813 return NULL_TREE;
9814 }
9815
9816 /* Helper callback for walk_gimple_seq to discover possible stores
9817 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9818 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9819 for those. */
9820
9821 static tree
9822 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9823 bool *handled_ops_p,
9824 struct walk_stmt_info *wi)
9825 {
9826 gimple *stmt = gsi_stmt (*gsi_p);
9827 switch (gimple_code (stmt))
9828 {
9829 /* Don't recurse on OpenMP constructs for which
9830 gimplify_adjust_omp_clauses already handled the bodies,
9831 except handle gimple_omp_for_pre_body. */
9832 case GIMPLE_OMP_FOR:
9833 *handled_ops_p = true;
9834 if (gimple_omp_for_pre_body (stmt))
9835 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9836 omp_find_stores_stmt, omp_find_stores_op, wi);
9837 break;
9838 case GIMPLE_OMP_PARALLEL:
9839 case GIMPLE_OMP_TASK:
9840 case GIMPLE_OMP_SECTIONS:
9841 case GIMPLE_OMP_SINGLE:
9842 case GIMPLE_OMP_TARGET:
9843 case GIMPLE_OMP_TEAMS:
9844 case GIMPLE_OMP_CRITICAL:
9845 *handled_ops_p = true;
9846 break;
9847 default:
9848 break;
9849 }
9850 return NULL_TREE;
9851 }
9852
9853 struct gimplify_adjust_omp_clauses_data
9854 {
9855 tree *list_p;
9856 gimple_seq *pre_p;
9857 };
9858
9859 /* For all variables that were not actually used within the context,
9860 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9861
9862 static int
9863 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9864 {
9865 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9866 gimple_seq *pre_p
9867 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9868 tree decl = (tree) n->key;
9869 unsigned flags = n->value;
9870 enum omp_clause_code code;
9871 tree clause;
9872 bool private_debug;
9873
9874 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9875 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9876 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9877 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9878 return 0;
9879 if ((flags & GOVD_SEEN) == 0)
9880 return 0;
9881 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
9882 return 0;
9883 if (flags & GOVD_DEBUG_PRIVATE)
9884 {
9885 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9886 private_debug = true;
9887 }
9888 else if (flags & GOVD_MAP)
9889 private_debug = false;
9890 else
9891 private_debug
9892 = lang_hooks.decls.omp_private_debug_clause (decl,
9893 !!(flags & GOVD_SHARED));
9894 if (private_debug)
9895 code = OMP_CLAUSE_PRIVATE;
9896 else if (flags & GOVD_MAP)
9897 {
9898 code = OMP_CLAUSE_MAP;
9899 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9900 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9901 {
9902 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9903 return 0;
9904 }
9905 }
9906 else if (flags & GOVD_SHARED)
9907 {
9908 if (is_global_var (decl))
9909 {
9910 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9911 while (ctx != NULL)
9912 {
9913 splay_tree_node on
9914 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9915 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9916 | GOVD_PRIVATE | GOVD_REDUCTION
9917 | GOVD_LINEAR | GOVD_MAP)) != 0)
9918 break;
9919 ctx = ctx->outer_context;
9920 }
9921 if (ctx == NULL)
9922 return 0;
9923 }
9924 code = OMP_CLAUSE_SHARED;
9925 }
9926 else if (flags & GOVD_PRIVATE)
9927 code = OMP_CLAUSE_PRIVATE;
9928 else if (flags & GOVD_FIRSTPRIVATE)
9929 {
9930 code = OMP_CLAUSE_FIRSTPRIVATE;
9931 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
9932 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9933 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9934 {
9935 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9936 "%<target%> construct", decl);
9937 return 0;
9938 }
9939 }
9940 else if (flags & GOVD_LASTPRIVATE)
9941 code = OMP_CLAUSE_LASTPRIVATE;
9942 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
9943 return 0;
9944 else if (flags & GOVD_CONDTEMP)
9945 {
9946 code = OMP_CLAUSE__CONDTEMP_;
9947 gimple_add_tmp_var (decl);
9948 }
9949 else
9950 gcc_unreachable ();
9951
9952 if (((flags & GOVD_LASTPRIVATE)
9953 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
9954 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9955 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9956
9957 tree chain = *list_p;
9958 clause = build_omp_clause (input_location, code);
9959 OMP_CLAUSE_DECL (clause) = decl;
9960 OMP_CLAUSE_CHAIN (clause) = chain;
9961 if (private_debug)
9962 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
9963 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
9964 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
9965 else if (code == OMP_CLAUSE_SHARED
9966 && (flags & GOVD_WRITTEN) == 0
9967 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9968 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
9969 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
9970 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
9971 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
9972 {
9973 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
9974 OMP_CLAUSE_DECL (nc) = decl;
9975 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
9977 OMP_CLAUSE_DECL (clause)
9978 = build_simple_mem_ref_loc (input_location, decl);
9979 OMP_CLAUSE_DECL (clause)
9980 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
9981 build_int_cst (build_pointer_type (char_type_node), 0));
9982 OMP_CLAUSE_SIZE (clause) = size_zero_node;
9983 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9984 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
9985 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
9986 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
9987 OMP_CLAUSE_CHAIN (nc) = chain;
9988 OMP_CLAUSE_CHAIN (clause) = nc;
9989 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9990 gimplify_omp_ctxp = ctx->outer_context;
9991 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
9992 pre_p, NULL, is_gimple_val, fb_rvalue);
9993 gimplify_omp_ctxp = ctx;
9994 }
9995 else if (code == OMP_CLAUSE_MAP)
9996 {
9997 int kind;
9998 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9999 switch (flags & (GOVD_MAP_TO_ONLY
10000 | GOVD_MAP_FORCE
10001 | GOVD_MAP_FORCE_PRESENT
10002 | GOVD_MAP_ALLOC_ONLY
10003 | GOVD_MAP_FROM_ONLY))
10004 {
10005 case 0:
10006 kind = GOMP_MAP_TOFROM;
10007 break;
10008 case GOVD_MAP_FORCE:
10009 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10010 break;
10011 case GOVD_MAP_TO_ONLY:
10012 kind = GOMP_MAP_TO;
10013 break;
10014 case GOVD_MAP_FROM_ONLY:
10015 kind = GOMP_MAP_FROM;
10016 break;
10017 case GOVD_MAP_ALLOC_ONLY:
10018 kind = GOMP_MAP_ALLOC;
10019 break;
10020 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10021 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10022 break;
10023 case GOVD_MAP_FORCE_PRESENT:
10024 kind = GOMP_MAP_FORCE_PRESENT;
10025 break;
10026 default:
10027 gcc_unreachable ();
10028 }
10029 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10030 if (DECL_SIZE (decl)
10031 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10032 {
10033 tree decl2 = DECL_VALUE_EXPR (decl);
10034 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10035 decl2 = TREE_OPERAND (decl2, 0);
10036 gcc_assert (DECL_P (decl2));
10037 tree mem = build_simple_mem_ref (decl2);
10038 OMP_CLAUSE_DECL (clause) = mem;
10039 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10040 if (gimplify_omp_ctxp->outer_context)
10041 {
10042 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10043 omp_notice_variable (ctx, decl2, true);
10044 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10045 }
10046 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10047 OMP_CLAUSE_MAP);
10048 OMP_CLAUSE_DECL (nc) = decl;
10049 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10050 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10051 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10052 else
10053 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10054 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10055 OMP_CLAUSE_CHAIN (clause) = nc;
10056 }
10057 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10058 && lang_hooks.decls.omp_privatize_by_reference (decl))
10059 {
10060 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10061 OMP_CLAUSE_SIZE (clause)
10062 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10063 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10064 gimplify_omp_ctxp = ctx->outer_context;
10065 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10066 pre_p, NULL, is_gimple_val, fb_rvalue);
10067 gimplify_omp_ctxp = ctx;
10068 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10069 OMP_CLAUSE_MAP);
10070 OMP_CLAUSE_DECL (nc) = decl;
10071 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10072 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10073 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10074 OMP_CLAUSE_CHAIN (clause) = nc;
10075 }
10076 else
10077 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10078 }
10079 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10080 {
10081 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10082 OMP_CLAUSE_DECL (nc) = decl;
10083 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10084 OMP_CLAUSE_CHAIN (nc) = chain;
10085 OMP_CLAUSE_CHAIN (clause) = nc;
10086 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10087 gimplify_omp_ctxp = ctx->outer_context;
10088 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10089 gimplify_omp_ctxp = ctx;
10090 }
10091 *list_p = clause;
10092 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10093 gimplify_omp_ctxp = ctx->outer_context;
10094 lang_hooks.decls.omp_finish_clause (clause, pre_p);
10095 if (gimplify_omp_ctxp)
10096 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10097 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10098 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10099 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10100 true);
10101 gimplify_omp_ctxp = ctx;
10102 return 0;
10103 }
10104
10105 static void
10106 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10107 enum tree_code code)
10108 {
10109 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10110 tree *orig_list_p = list_p;
10111 tree c, decl;
10112 bool has_inscan_reductions = false;
10113
10114 if (body)
10115 {
10116 struct gimplify_omp_ctx *octx;
10117 for (octx = ctx; octx; octx = octx->outer_context)
10118 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10119 break;
10120 if (octx)
10121 {
10122 struct walk_stmt_info wi;
10123 memset (&wi, 0, sizeof (wi));
10124 walk_gimple_seq (body, omp_find_stores_stmt,
10125 omp_find_stores_op, &wi);
10126 }
10127 }
10128
10129 if (ctx->add_safelen1)
10130 {
10131 /* If there are VLAs in the body of simd loop, prevent
10132 vectorization. */
10133 gcc_assert (ctx->region_type == ORT_SIMD);
10134 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10135 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10136 OMP_CLAUSE_CHAIN (c) = *list_p;
10137 *list_p = c;
10138 list_p = &OMP_CLAUSE_CHAIN (c);
10139 }
10140
10141 if (ctx->region_type == ORT_WORKSHARE
10142 && ctx->outer_context
10143 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10144 {
10145 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10146 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10147 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10148 {
10149 decl = OMP_CLAUSE_DECL (c);
10150 splay_tree_node n
10151 = splay_tree_lookup (ctx->outer_context->variables,
10152 (splay_tree_key) decl);
10153 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10154 (splay_tree_key) decl));
10155 omp_add_variable (ctx, decl, n->value);
10156 tree c2 = copy_node (c);
10157 OMP_CLAUSE_CHAIN (c2) = *list_p;
10158 *list_p = c2;
10159 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10160 continue;
10161 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10162 OMP_CLAUSE_FIRSTPRIVATE);
10163 OMP_CLAUSE_DECL (c2) = decl;
10164 OMP_CLAUSE_CHAIN (c2) = *list_p;
10165 *list_p = c2;
10166 }
10167 }
10168 while ((c = *list_p) != NULL)
10169 {
10170 splay_tree_node n;
10171 bool remove = false;
10172
10173 switch (OMP_CLAUSE_CODE (c))
10174 {
10175 case OMP_CLAUSE_FIRSTPRIVATE:
10176 if ((ctx->region_type & ORT_TARGET)
10177 && (ctx->region_type & ORT_ACC) == 0
10178 && TYPE_ATOMIC (strip_array_types
10179 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10180 {
10181 error_at (OMP_CLAUSE_LOCATION (c),
10182 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10183 "%<target%> construct", OMP_CLAUSE_DECL (c));
10184 remove = true;
10185 break;
10186 }
10187 /* FALLTHRU */
10188 case OMP_CLAUSE_PRIVATE:
10189 case OMP_CLAUSE_SHARED:
10190 case OMP_CLAUSE_LINEAR:
10191 decl = OMP_CLAUSE_DECL (c);
10192 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10193 remove = !(n->value & GOVD_SEEN);
10194 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10195 && code == OMP_PARALLEL
10196 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10197 remove = true;
10198 if (! remove)
10199 {
10200 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10201 if ((n->value & GOVD_DEBUG_PRIVATE)
10202 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10203 {
10204 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10205 || ((n->value & GOVD_DATA_SHARE_CLASS)
10206 == GOVD_SHARED));
10207 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10208 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10209 }
10210 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10211 && (n->value & GOVD_WRITTEN) == 0
10212 && DECL_P (decl)
10213 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10214 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10215 else if (DECL_P (decl)
10216 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10217 && (n->value & GOVD_WRITTEN) != 0)
10218 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10219 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10220 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10221 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10222 }
10223 break;
10224
10225 case OMP_CLAUSE_LASTPRIVATE:
10226 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10227 accurately reflect the presence of a FIRSTPRIVATE clause. */
10228 decl = OMP_CLAUSE_DECL (c);
10229 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10230 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10231 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10232 if (code == OMP_DISTRIBUTE
10233 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10234 {
10235 remove = true;
10236 error_at (OMP_CLAUSE_LOCATION (c),
10237 "same variable used in %<firstprivate%> and "
10238 "%<lastprivate%> clauses on %<distribute%> "
10239 "construct");
10240 }
10241 if (!remove
10242 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10243 && DECL_P (decl)
10244 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10245 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10246 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10247 remove = true;
10248 break;
10249
10250 case OMP_CLAUSE_ALIGNED:
10251 decl = OMP_CLAUSE_DECL (c);
10252 if (!is_global_var (decl))
10253 {
10254 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10255 remove = n == NULL || !(n->value & GOVD_SEEN);
10256 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10257 {
10258 struct gimplify_omp_ctx *octx;
10259 if (n != NULL
10260 && (n->value & (GOVD_DATA_SHARE_CLASS
10261 & ~GOVD_FIRSTPRIVATE)))
10262 remove = true;
10263 else
10264 for (octx = ctx->outer_context; octx;
10265 octx = octx->outer_context)
10266 {
10267 n = splay_tree_lookup (octx->variables,
10268 (splay_tree_key) decl);
10269 if (n == NULL)
10270 continue;
10271 if (n->value & GOVD_LOCAL)
10272 break;
10273 /* We have to avoid assigning a shared variable
10274 to itself when trying to add
10275 __builtin_assume_aligned. */
10276 if (n->value & GOVD_SHARED)
10277 {
10278 remove = true;
10279 break;
10280 }
10281 }
10282 }
10283 }
10284 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10285 {
10286 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10287 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10288 remove = true;
10289 }
10290 break;
10291
10292 case OMP_CLAUSE_NONTEMPORAL:
10293 decl = OMP_CLAUSE_DECL (c);
10294 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10295 remove = n == NULL || !(n->value & GOVD_SEEN);
10296 break;
10297
10298 case OMP_CLAUSE_MAP:
10299 if (code == OMP_TARGET_EXIT_DATA
10300 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10301 {
10302 remove = true;
10303 break;
10304 }
10305 decl = OMP_CLAUSE_DECL (c);
10306 /* Data clauses associated with reductions must be
10307 compatible with present_or_copy. Warn and adjust the clause
10308 if that is not the case. */
10309 if (ctx->region_type == ORT_ACC_PARALLEL
10310 || ctx->region_type == ORT_ACC_SERIAL)
10311 {
10312 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10313 n = NULL;
10314
10315 if (DECL_P (t))
10316 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10317
10318 if (n && (n->value & GOVD_REDUCTION))
10319 {
10320 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10321
10322 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10323 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10324 && kind != GOMP_MAP_FORCE_PRESENT
10325 && kind != GOMP_MAP_POINTER)
10326 {
10327 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10328 "incompatible data clause with reduction "
10329 "on %qE; promoting to %<present_or_copy%>",
10330 DECL_NAME (t));
10331 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10332 }
10333 }
10334 }
10335 if (!DECL_P (decl))
10336 {
10337 if ((ctx->region_type & ORT_TARGET) != 0
10338 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10339 {
10340 if (TREE_CODE (decl) == INDIRECT_REF
10341 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10342 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10343 == REFERENCE_TYPE))
10344 decl = TREE_OPERAND (decl, 0);
10345 if (TREE_CODE (decl) == COMPONENT_REF)
10346 {
10347 while (TREE_CODE (decl) == COMPONENT_REF)
10348 decl = TREE_OPERAND (decl, 0);
10349 if (DECL_P (decl))
10350 {
10351 n = splay_tree_lookup (ctx->variables,
10352 (splay_tree_key) decl);
10353 if (!(n->value & GOVD_SEEN))
10354 remove = true;
10355 }
10356 }
10357 }
10358 break;
10359 }
10360 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10361 if ((ctx->region_type & ORT_TARGET) != 0
10362 && !(n->value & GOVD_SEEN)
10363 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10364 && (!is_global_var (decl)
10365 || !lookup_attribute ("omp declare target link",
10366 DECL_ATTRIBUTES (decl))))
10367 {
10368 remove = true;
10369 /* For struct element mapping, if struct is never referenced
10370 in target block and none of the mapping has always modifier,
10371 remove all the struct element mappings, which immediately
10372 follow the GOMP_MAP_STRUCT map clause. */
10373 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10374 {
10375 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10376 while (cnt--)
10377 OMP_CLAUSE_CHAIN (c)
10378 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10379 }
10380 }
10381 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10382 && code == OMP_TARGET_EXIT_DATA)
10383 remove = true;
10384 else if (DECL_SIZE (decl)
10385 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10386 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10387 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10388 && (OMP_CLAUSE_MAP_KIND (c)
10389 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10390 {
10391 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10392 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10393 INTEGER_CST. */
10394 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10395
10396 tree decl2 = DECL_VALUE_EXPR (decl);
10397 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10398 decl2 = TREE_OPERAND (decl2, 0);
10399 gcc_assert (DECL_P (decl2));
10400 tree mem = build_simple_mem_ref (decl2);
10401 OMP_CLAUSE_DECL (c) = mem;
10402 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10403 if (ctx->outer_context)
10404 {
10405 omp_notice_variable (ctx->outer_context, decl2, true);
10406 omp_notice_variable (ctx->outer_context,
10407 OMP_CLAUSE_SIZE (c), true);
10408 }
10409 if (((ctx->region_type & ORT_TARGET) != 0
10410 || !ctx->target_firstprivatize_array_bases)
10411 && ((n->value & GOVD_SEEN) == 0
10412 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10413 {
10414 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10415 OMP_CLAUSE_MAP);
10416 OMP_CLAUSE_DECL (nc) = decl;
10417 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10418 if (ctx->target_firstprivatize_array_bases)
10419 OMP_CLAUSE_SET_MAP_KIND (nc,
10420 GOMP_MAP_FIRSTPRIVATE_POINTER);
10421 else
10422 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10423 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10424 OMP_CLAUSE_CHAIN (c) = nc;
10425 c = nc;
10426 }
10427 }
10428 else
10429 {
10430 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10431 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10432 gcc_assert ((n->value & GOVD_SEEN) == 0
10433 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10434 == 0));
10435 }
10436 break;
10437
10438 case OMP_CLAUSE_TO:
10439 case OMP_CLAUSE_FROM:
10440 case OMP_CLAUSE__CACHE_:
10441 decl = OMP_CLAUSE_DECL (c);
10442 if (!DECL_P (decl))
10443 break;
10444 if (DECL_SIZE (decl)
10445 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10446 {
10447 tree decl2 = DECL_VALUE_EXPR (decl);
10448 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10449 decl2 = TREE_OPERAND (decl2, 0);
10450 gcc_assert (DECL_P (decl2));
10451 tree mem = build_simple_mem_ref (decl2);
10452 OMP_CLAUSE_DECL (c) = mem;
10453 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10454 if (ctx->outer_context)
10455 {
10456 omp_notice_variable (ctx->outer_context, decl2, true);
10457 omp_notice_variable (ctx->outer_context,
10458 OMP_CLAUSE_SIZE (c), true);
10459 }
10460 }
10461 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10462 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10463 break;
10464
10465 case OMP_CLAUSE_REDUCTION:
10466 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10467 {
10468 decl = OMP_CLAUSE_DECL (c);
10469 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10470 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10471 {
10472 remove = true;
10473 error_at (OMP_CLAUSE_LOCATION (c),
10474 "%qD specified in %<inscan%> %<reduction%> clause "
10475 "but not in %<scan%> directive clause", decl);
10476 break;
10477 }
10478 has_inscan_reductions = true;
10479 }
10480 /* FALLTHRU */
10481 case OMP_CLAUSE_IN_REDUCTION:
10482 case OMP_CLAUSE_TASK_REDUCTION:
10483 decl = OMP_CLAUSE_DECL (c);
10484 /* OpenACC reductions need a present_or_copy data clause.
10485 Add one if necessary. Emit error when the reduction is private. */
10486 if (ctx->region_type == ORT_ACC_PARALLEL
10487 || ctx->region_type == ORT_ACC_SERIAL)
10488 {
10489 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10490 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10491 {
10492 remove = true;
10493 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10494 "reduction on %qE", DECL_NAME (decl));
10495 }
10496 else if ((n->value & GOVD_MAP) == 0)
10497 {
10498 tree next = OMP_CLAUSE_CHAIN (c);
10499 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10500 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10501 OMP_CLAUSE_DECL (nc) = decl;
10502 OMP_CLAUSE_CHAIN (c) = nc;
10503 lang_hooks.decls.omp_finish_clause (nc, pre_p);
10504 while (1)
10505 {
10506 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10507 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10508 break;
10509 nc = OMP_CLAUSE_CHAIN (nc);
10510 }
10511 OMP_CLAUSE_CHAIN (nc) = next;
10512 n->value |= GOVD_MAP;
10513 }
10514 }
10515 if (DECL_P (decl)
10516 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10517 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10518 break;
10519 case OMP_CLAUSE_COPYIN:
10520 case OMP_CLAUSE_COPYPRIVATE:
10521 case OMP_CLAUSE_IF:
10522 case OMP_CLAUSE_NUM_THREADS:
10523 case OMP_CLAUSE_NUM_TEAMS:
10524 case OMP_CLAUSE_THREAD_LIMIT:
10525 case OMP_CLAUSE_DIST_SCHEDULE:
10526 case OMP_CLAUSE_DEVICE:
10527 case OMP_CLAUSE_SCHEDULE:
10528 case OMP_CLAUSE_NOWAIT:
10529 case OMP_CLAUSE_ORDERED:
10530 case OMP_CLAUSE_DEFAULT:
10531 case OMP_CLAUSE_UNTIED:
10532 case OMP_CLAUSE_COLLAPSE:
10533 case OMP_CLAUSE_FINAL:
10534 case OMP_CLAUSE_MERGEABLE:
10535 case OMP_CLAUSE_PROC_BIND:
10536 case OMP_CLAUSE_SAFELEN:
10537 case OMP_CLAUSE_SIMDLEN:
10538 case OMP_CLAUSE_DEPEND:
10539 case OMP_CLAUSE_PRIORITY:
10540 case OMP_CLAUSE_GRAINSIZE:
10541 case OMP_CLAUSE_NUM_TASKS:
10542 case OMP_CLAUSE_NOGROUP:
10543 case OMP_CLAUSE_THREADS:
10544 case OMP_CLAUSE_SIMD:
10545 case OMP_CLAUSE_HINT:
10546 case OMP_CLAUSE_DEFAULTMAP:
10547 case OMP_CLAUSE_ORDER:
10548 case OMP_CLAUSE_BIND:
10549 case OMP_CLAUSE_USE_DEVICE_PTR:
10550 case OMP_CLAUSE_USE_DEVICE_ADDR:
10551 case OMP_CLAUSE_IS_DEVICE_PTR:
10552 case OMP_CLAUSE_ASYNC:
10553 case OMP_CLAUSE_WAIT:
10554 case OMP_CLAUSE_INDEPENDENT:
10555 case OMP_CLAUSE_NUM_GANGS:
10556 case OMP_CLAUSE_NUM_WORKERS:
10557 case OMP_CLAUSE_VECTOR_LENGTH:
10558 case OMP_CLAUSE_GANG:
10559 case OMP_CLAUSE_WORKER:
10560 case OMP_CLAUSE_VECTOR:
10561 case OMP_CLAUSE_AUTO:
10562 case OMP_CLAUSE_SEQ:
10563 case OMP_CLAUSE_TILE:
10564 case OMP_CLAUSE_IF_PRESENT:
10565 case OMP_CLAUSE_FINALIZE:
10566 case OMP_CLAUSE_INCLUSIVE:
10567 case OMP_CLAUSE_EXCLUSIVE:
10568 break;
10569
10570 default:
10571 gcc_unreachable ();
10572 }
10573
10574 if (remove)
10575 *list_p = OMP_CLAUSE_CHAIN (c);
10576 else
10577 list_p = &OMP_CLAUSE_CHAIN (c);
10578 }
10579
10580 /* Add in any implicit data sharing. */
10581 struct gimplify_adjust_omp_clauses_data data;
10582 data.list_p = list_p;
10583 data.pre_p = pre_p;
10584 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10585
10586 if (has_inscan_reductions)
10587 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10588 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10589 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10590 {
10591 error_at (OMP_CLAUSE_LOCATION (c),
10592 "%<inscan%> %<reduction%> clause used together with "
10593 "%<linear%> clause for a variable other than loop "
10594 "iterator");
10595 break;
10596 }
10597
10598 gimplify_omp_ctxp = ctx->outer_context;
10599 delete_omp_context (ctx);
10600 }
10601
10602 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10603 -1 if unknown yet (simd is involved, won't be known until vectorization)
10604 and 1 if they do. If SCORES is non-NULL, it should point to an array
10605 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10606 of the CONSTRUCTS (position -1 if it will never match) followed by
10607 number of constructs in the OpenMP context construct trait. If the
10608 score depends on whether it will be in a declare simd clone or not,
10609 the function returns 2 and there will be two sets of the scores, the first
10610 one for the case that it is not in a declare simd clone, the other
10611 that it is in a declare simd clone. */
10612
10613 int
10614 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10615 int *scores)
10616 {
10617 int matched = 0, cnt = 0;
10618 bool simd_seen = false;
10619 bool target_seen = false;
10620 int declare_simd_cnt = -1;
10621 auto_vec<enum tree_code, 16> codes;
10622 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10623 {
10624 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10625 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10626 == ORT_TARGET && ctx->code == OMP_TARGET)
10627 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10628 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10629 || (ctx->region_type == ORT_SIMD
10630 && ctx->code == OMP_SIMD
10631 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10632 {
10633 ++cnt;
10634 if (scores)
10635 codes.safe_push (ctx->code);
10636 else if (matched < nconstructs && ctx->code == constructs[matched])
10637 {
10638 if (ctx->code == OMP_SIMD)
10639 {
10640 if (matched)
10641 return 0;
10642 simd_seen = true;
10643 }
10644 ++matched;
10645 }
10646 if (ctx->code == OMP_TARGET)
10647 {
10648 if (scores == NULL)
10649 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10650 target_seen = true;
10651 break;
10652 }
10653 }
10654 else if (ctx->region_type == ORT_WORKSHARE
10655 && ctx->code == OMP_LOOP
10656 && ctx->outer_context
10657 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10658 && ctx->outer_context->outer_context
10659 && ctx->outer_context->outer_context->code == OMP_LOOP
10660 && ctx->outer_context->outer_context->distribute)
10661 ctx = ctx->outer_context->outer_context;
10662 ctx = ctx->outer_context;
10663 }
10664 if (!target_seen
10665 && lookup_attribute ("omp declare simd",
10666 DECL_ATTRIBUTES (current_function_decl)))
10667 {
10668 /* Declare simd is a maybe case, it is supposed to be added only to the
10669 omp-simd-clone.c added clones and not to the base function. */
10670 declare_simd_cnt = cnt++;
10671 if (scores)
10672 codes.safe_push (OMP_SIMD);
10673 else if (cnt == 0
10674 && constructs[0] == OMP_SIMD)
10675 {
10676 gcc_assert (matched == 0);
10677 simd_seen = true;
10678 if (++matched == nconstructs)
10679 return -1;
10680 }
10681 }
10682 if (tree attr = lookup_attribute ("omp declare variant variant",
10683 DECL_ATTRIBUTES (current_function_decl)))
10684 {
10685 enum tree_code variant_constructs[5];
10686 int variant_nconstructs = 0;
10687 if (!target_seen)
10688 variant_nconstructs
10689 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10690 variant_constructs);
10691 for (int i = 0; i < variant_nconstructs; i++)
10692 {
10693 ++cnt;
10694 if (scores)
10695 codes.safe_push (variant_constructs[i]);
10696 else if (matched < nconstructs
10697 && variant_constructs[i] == constructs[matched])
10698 {
10699 if (variant_constructs[i] == OMP_SIMD)
10700 {
10701 if (matched)
10702 return 0;
10703 simd_seen = true;
10704 }
10705 ++matched;
10706 }
10707 }
10708 }
10709 if (!target_seen
10710 && lookup_attribute ("omp declare target block",
10711 DECL_ATTRIBUTES (current_function_decl)))
10712 {
10713 if (scores)
10714 codes.safe_push (OMP_TARGET);
10715 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
10716 ++matched;
10717 }
10718 if (scores)
10719 {
10720 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10721 {
10722 int j = codes.length () - 1;
10723 for (int i = nconstructs - 1; i >= 0; i--)
10724 {
10725 while (j >= 0
10726 && (pass != 0 || declare_simd_cnt != j)
10727 && constructs[i] != codes[j])
10728 --j;
10729 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10730 *scores++ = j - 1;
10731 else
10732 *scores++ = j;
10733 }
10734 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10735 ? codes.length () - 1 : codes.length ());
10736 }
10737 return declare_simd_cnt == -1 ? 1 : 2;
10738 }
10739 if (matched == nconstructs)
10740 return simd_seen ? -1 : 1;
10741 return 0;
10742 }
10743
10744 /* Gimplify OACC_CACHE. */
10745
10746 static void
10747 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10748 {
10749 tree expr = *expr_p;
10750
10751 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10752 OACC_CACHE);
10753 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10754 OACC_CACHE);
10755
10756 /* TODO: Do something sensible with this information. */
10757
10758 *expr_p = NULL_TREE;
10759 }
10760
10761 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10762 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10763 kind. The entry kind will replace the one in CLAUSE, while the exit
10764 kind will be used in a new omp_clause and returned to the caller. */
10765
10766 static tree
10767 gimplify_oacc_declare_1 (tree clause)
10768 {
10769 HOST_WIDE_INT kind, new_op;
10770 bool ret = false;
10771 tree c = NULL;
10772
10773 kind = OMP_CLAUSE_MAP_KIND (clause);
10774
10775 switch (kind)
10776 {
10777 case GOMP_MAP_ALLOC:
10778 new_op = GOMP_MAP_RELEASE;
10779 ret = true;
10780 break;
10781
10782 case GOMP_MAP_FROM:
10783 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10784 new_op = GOMP_MAP_FROM;
10785 ret = true;
10786 break;
10787
10788 case GOMP_MAP_TOFROM:
10789 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10790 new_op = GOMP_MAP_FROM;
10791 ret = true;
10792 break;
10793
10794 case GOMP_MAP_DEVICE_RESIDENT:
10795 case GOMP_MAP_FORCE_DEVICEPTR:
10796 case GOMP_MAP_FORCE_PRESENT:
10797 case GOMP_MAP_LINK:
10798 case GOMP_MAP_POINTER:
10799 case GOMP_MAP_TO:
10800 break;
10801
10802 default:
10803 gcc_unreachable ();
10804 break;
10805 }
10806
10807 if (ret)
10808 {
10809 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10810 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10811 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10812 }
10813
10814 return c;
10815 }
10816
10817 /* Gimplify OACC_DECLARE. */
10818
10819 static void
10820 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10821 {
10822 tree expr = *expr_p;
10823 gomp_target *stmt;
10824 tree clauses, t, decl;
10825
10826 clauses = OACC_DECLARE_CLAUSES (expr);
10827
10828 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10829 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10830
10831 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10832 {
10833 decl = OMP_CLAUSE_DECL (t);
10834
10835 if (TREE_CODE (decl) == MEM_REF)
10836 decl = TREE_OPERAND (decl, 0);
10837
10838 if (VAR_P (decl) && !is_oacc_declared (decl))
10839 {
10840 tree attr = get_identifier ("oacc declare target");
10841 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10842 DECL_ATTRIBUTES (decl));
10843 }
10844
10845 if (VAR_P (decl)
10846 && !is_global_var (decl)
10847 && DECL_CONTEXT (decl) == current_function_decl)
10848 {
10849 tree c = gimplify_oacc_declare_1 (t);
10850 if (c)
10851 {
10852 if (oacc_declare_returns == NULL)
10853 oacc_declare_returns = new hash_map<tree, tree>;
10854
10855 oacc_declare_returns->put (decl, c);
10856 }
10857 }
10858
10859 if (gimplify_omp_ctxp)
10860 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10861 }
10862
10863 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10864 clauses);
10865
10866 gimplify_seq_add_stmt (pre_p, stmt);
10867
10868 *expr_p = NULL_TREE;
10869 }
10870
10871 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10872 gimplification of the body, as well as scanning the body for used
10873 variables. We need to do this scan now, because variable-sized
10874 decls will be decomposed during gimplification. */
10875
10876 static void
10877 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10878 {
10879 tree expr = *expr_p;
10880 gimple *g;
10881 gimple_seq body = NULL;
10882
10883 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10884 OMP_PARALLEL_COMBINED (expr)
10885 ? ORT_COMBINED_PARALLEL
10886 : ORT_PARALLEL, OMP_PARALLEL);
10887
10888 push_gimplify_context ();
10889
10890 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10891 if (gimple_code (g) == GIMPLE_BIND)
10892 pop_gimplify_context (g);
10893 else
10894 pop_gimplify_context (NULL);
10895
10896 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10897 OMP_PARALLEL);
10898
10899 g = gimple_build_omp_parallel (body,
10900 OMP_PARALLEL_CLAUSES (expr),
10901 NULL_TREE, NULL_TREE);
10902 if (OMP_PARALLEL_COMBINED (expr))
10903 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10904 gimplify_seq_add_stmt (pre_p, g);
10905 *expr_p = NULL_TREE;
10906 }
10907
10908 /* Gimplify the contents of an OMP_TASK statement. This involves
10909 gimplification of the body, as well as scanning the body for used
10910 variables. We need to do this scan now, because variable-sized
10911 decls will be decomposed during gimplification. */
10912
10913 static void
10914 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10915 {
10916 tree expr = *expr_p;
10917 gimple *g;
10918 gimple_seq body = NULL;
10919
10920 if (OMP_TASK_BODY (expr) == NULL_TREE)
10921 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10922 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10923 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
10924 {
10925 error_at (OMP_CLAUSE_LOCATION (c),
10926 "%<mutexinoutset%> kind in %<depend%> clause on a "
10927 "%<taskwait%> construct");
10928 break;
10929 }
10930
10931 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
10932 omp_find_clause (OMP_TASK_CLAUSES (expr),
10933 OMP_CLAUSE_UNTIED)
10934 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
10935
10936 if (OMP_TASK_BODY (expr))
10937 {
10938 push_gimplify_context ();
10939
10940 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
10941 if (gimple_code (g) == GIMPLE_BIND)
10942 pop_gimplify_context (g);
10943 else
10944 pop_gimplify_context (NULL);
10945 }
10946
10947 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
10948 OMP_TASK);
10949
10950 g = gimple_build_omp_task (body,
10951 OMP_TASK_CLAUSES (expr),
10952 NULL_TREE, NULL_TREE,
10953 NULL_TREE, NULL_TREE, NULL_TREE);
10954 if (OMP_TASK_BODY (expr) == NULL_TREE)
10955 gimple_omp_task_set_taskwait_p (g, true);
10956 gimplify_seq_add_stmt (pre_p, g);
10957 *expr_p = NULL_TREE;
10958 }
10959
10960 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10961 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10962 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10963 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10964 OMP_FOR in between if any and pdata[3] is address of the inner
10965 OMP_FOR/OMP_SIMD. */
10966
10967 static tree
10968 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data)
10969 {
10970 tree **pdata = (tree **) data;
10971 *walk_subtrees = 0;
10972 switch (TREE_CODE (*tp))
10973 {
10974 case OMP_FOR:
10975 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10976 {
10977 pdata[3] = tp;
10978 return *tp;
10979 }
10980 pdata[2] = tp;
10981 *walk_subtrees = 1;
10982 break;
10983 case OMP_SIMD:
10984 if (OMP_FOR_INIT (*tp) != NULL_TREE)
10985 {
10986 pdata[3] = tp;
10987 return *tp;
10988 }
10989 break;
10990 case BIND_EXPR:
10991 if (BIND_EXPR_VARS (*tp)
10992 || (BIND_EXPR_BLOCK (*tp)
10993 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp))))
10994 pdata[0] = tp;
10995 *walk_subtrees = 1;
10996 break;
10997 case STATEMENT_LIST:
10998 if (!tsi_one_before_end_p (tsi_start (*tp)))
10999 pdata[0] = tp;
11000 *walk_subtrees = 1;
11001 break;
11002 case TRY_FINALLY_EXPR:
11003 pdata[0] = tp;
11004 *walk_subtrees = 1;
11005 break;
11006 case OMP_PARALLEL:
11007 pdata[1] = tp;
11008 *walk_subtrees = 1;
11009 break;
11010 default:
11011 break;
11012 }
11013 return NULL_TREE;
11014 }
11015
11016 /* Gimplify the gross structure of an OMP_FOR statement. */
11017
11018 static enum gimplify_status
11019 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11020 {
11021 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11022 enum gimplify_status ret = GS_ALL_DONE;
11023 enum gimplify_status tret;
11024 gomp_for *gfor;
11025 gimple_seq for_body, for_pre_body;
11026 int i;
11027 bitmap has_decl_expr = NULL;
11028 enum omp_region_type ort = ORT_WORKSHARE;
11029
11030 orig_for_stmt = for_stmt = *expr_p;
11031
11032 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11033 != NULL_TREE);
11034 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11035 {
11036 tree *data[4] = { NULL, NULL, NULL, NULL };
11037 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11038 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11039 find_combined_omp_for, data, NULL);
11040 if (inner_for_stmt == NULL_TREE)
11041 {
11042 gcc_assert (seen_error ());
11043 *expr_p = NULL_TREE;
11044 return GS_ERROR;
11045 }
11046 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11047 {
11048 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11049 &OMP_FOR_PRE_BODY (for_stmt));
11050 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11051 }
11052 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11053 {
11054 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11055 &OMP_FOR_PRE_BODY (for_stmt));
11056 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11057 }
11058
11059 if (data[0])
11060 {
11061 /* We have some statements or variable declarations in between
11062 the composite construct directives. Move them around the
11063 inner_for_stmt. */
11064 data[0] = expr_p;
11065 for (i = 0; i < 3; i++)
11066 if (data[i])
11067 {
11068 tree t = *data[i];
11069 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11070 data[i + 1] = data[i];
11071 *data[i] = OMP_BODY (t);
11072 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11073 NULL_TREE, make_node (BLOCK));
11074 OMP_BODY (t) = body;
11075 append_to_statement_list_force (inner_for_stmt,
11076 &BIND_EXPR_BODY (body));
11077 *data[3] = t;
11078 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11079 gcc_assert (*data[3] == inner_for_stmt);
11080 }
11081 return GS_OK;
11082 }
11083
11084 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11085 if (!loop_p
11086 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11087 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11088 i)) == TREE_LIST
11089 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11090 i)))
11091 {
11092 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11093 /* Class iterators aren't allowed on OMP_SIMD, so the only
11094 case we need to solve is distribute parallel for. They are
11095 allowed on the loop construct, but that is already handled
11096 in gimplify_omp_loop. */
11097 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11098 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11099 && data[1]);
11100 tree orig_decl = TREE_PURPOSE (orig);
11101 tree last = TREE_VALUE (orig);
11102 tree *pc;
11103 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11104 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11105 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11106 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11107 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11108 break;
11109 if (*pc == NULL_TREE)
11110 {
11111 tree *spc;
11112 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11113 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11114 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11115 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11116 break;
11117 if (*spc)
11118 {
11119 tree c = *spc;
11120 *spc = OMP_CLAUSE_CHAIN (c);
11121 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11122 *pc = c;
11123 }
11124 }
11125 if (*pc == NULL_TREE)
11126 ;
11127 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11128 {
11129 /* private clause will appear only on inner_for_stmt.
11130 Change it into firstprivate, and add private clause
11131 on for_stmt. */
11132 tree c = copy_node (*pc);
11133 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11134 OMP_FOR_CLAUSES (for_stmt) = c;
11135 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11136 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11137 }
11138 else
11139 {
11140 /* lastprivate clause will appear on both inner_for_stmt
11141 and for_stmt. Add firstprivate clause to
11142 inner_for_stmt. */
11143 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11144 OMP_CLAUSE_FIRSTPRIVATE);
11145 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11146 OMP_CLAUSE_CHAIN (c) = *pc;
11147 *pc = c;
11148 lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11149 }
11150 tree c = build_omp_clause (UNKNOWN_LOCATION,
11151 OMP_CLAUSE_FIRSTPRIVATE);
11152 OMP_CLAUSE_DECL (c) = last;
11153 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11154 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11155 c = build_omp_clause (UNKNOWN_LOCATION,
11156 *pc ? OMP_CLAUSE_SHARED
11157 : OMP_CLAUSE_FIRSTPRIVATE);
11158 OMP_CLAUSE_DECL (c) = orig_decl;
11159 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11160 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11161 }
11162 /* Similarly, take care of C++ range for temporaries, those should
11163 be firstprivate on OMP_PARALLEL if any. */
11164 if (data[1])
11165 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11166 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11167 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11168 i)) == TREE_LIST
11169 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11170 i)))
11171 {
11172 tree orig
11173 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11174 tree v = TREE_CHAIN (orig);
11175 tree c = build_omp_clause (UNKNOWN_LOCATION,
11176 OMP_CLAUSE_FIRSTPRIVATE);
11177 /* First add firstprivate clause for the __for_end artificial
11178 decl. */
11179 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11180 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11181 == REFERENCE_TYPE)
11182 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11183 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11184 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11185 if (TREE_VEC_ELT (v, 0))
11186 {
11187 /* And now the same for __for_range artificial decl if it
11188 exists. */
11189 c = build_omp_clause (UNKNOWN_LOCATION,
11190 OMP_CLAUSE_FIRSTPRIVATE);
11191 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11192 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11193 == REFERENCE_TYPE)
11194 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11195 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11196 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11197 }
11198 }
11199 }
11200
11201 switch (TREE_CODE (for_stmt))
11202 {
11203 case OMP_FOR:
11204 case OMP_DISTRIBUTE:
11205 break;
11206 case OACC_LOOP:
11207 ort = ORT_ACC;
11208 break;
11209 case OMP_TASKLOOP:
11210 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11211 ort = ORT_UNTIED_TASKLOOP;
11212 else
11213 ort = ORT_TASKLOOP;
11214 break;
11215 case OMP_SIMD:
11216 ort = ORT_SIMD;
11217 break;
11218 default:
11219 gcc_unreachable ();
11220 }
11221
11222 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11223 clause for the IV. */
11224 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11225 {
11226 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11227 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11228 decl = TREE_OPERAND (t, 0);
11229 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11230 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11231 && OMP_CLAUSE_DECL (c) == decl)
11232 {
11233 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11234 break;
11235 }
11236 }
11237
11238 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11239 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11240 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11241 ? OMP_LOOP : TREE_CODE (for_stmt));
11242
11243 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11244 gimplify_omp_ctxp->distribute = true;
11245
11246 /* Handle OMP_FOR_INIT. */
11247 for_pre_body = NULL;
11248 if ((ort == ORT_SIMD
11249 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11250 && OMP_FOR_PRE_BODY (for_stmt))
11251 {
11252 has_decl_expr = BITMAP_ALLOC (NULL);
11253 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11254 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11255 == VAR_DECL)
11256 {
11257 t = OMP_FOR_PRE_BODY (for_stmt);
11258 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11259 }
11260 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11261 {
11262 tree_stmt_iterator si;
11263 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11264 tsi_next (&si))
11265 {
11266 t = tsi_stmt (si);
11267 if (TREE_CODE (t) == DECL_EXPR
11268 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11269 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11270 }
11271 }
11272 }
11273 if (OMP_FOR_PRE_BODY (for_stmt))
11274 {
11275 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11276 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11277 else
11278 {
11279 struct gimplify_omp_ctx ctx;
11280 memset (&ctx, 0, sizeof (ctx));
11281 ctx.region_type = ORT_NONE;
11282 gimplify_omp_ctxp = &ctx;
11283 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11284 gimplify_omp_ctxp = NULL;
11285 }
11286 }
11287 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11288
11289 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11290 for_stmt = inner_for_stmt;
11291
11292 /* For taskloop, need to gimplify the start, end and step before the
11293 taskloop, outside of the taskloop omp context. */
11294 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11295 {
11296 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11297 {
11298 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11299 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11300 {
11301 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11302 TREE_OPERAND (t, 1)
11303 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11304 gimple_seq_empty_p (for_pre_body)
11305 ? pre_p : &for_pre_body, NULL,
11306 false);
11307 /* Reference to pointer conversion is considered useless,
11308 but is significant for firstprivate clause. Force it
11309 here. */
11310 if (TREE_CODE (type) == POINTER_TYPE
11311 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11312 == REFERENCE_TYPE))
11313 {
11314 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11315 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11316 TREE_OPERAND (t, 1));
11317 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11318 ? pre_p : &for_pre_body);
11319 TREE_OPERAND (t, 1) = v;
11320 }
11321 tree c = build_omp_clause (input_location,
11322 OMP_CLAUSE_FIRSTPRIVATE);
11323 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11324 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11325 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11326 }
11327
11328 /* Handle OMP_FOR_COND. */
11329 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11330 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11331 {
11332 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11333 TREE_OPERAND (t, 1)
11334 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
11335 gimple_seq_empty_p (for_pre_body)
11336 ? pre_p : &for_pre_body, NULL,
11337 false);
11338 /* Reference to pointer conversion is considered useless,
11339 but is significant for firstprivate clause. Force it
11340 here. */
11341 if (TREE_CODE (type) == POINTER_TYPE
11342 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11343 == REFERENCE_TYPE))
11344 {
11345 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11346 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11347 TREE_OPERAND (t, 1));
11348 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11349 ? pre_p : &for_pre_body);
11350 TREE_OPERAND (t, 1) = v;
11351 }
11352 tree c = build_omp_clause (input_location,
11353 OMP_CLAUSE_FIRSTPRIVATE);
11354 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11355 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11356 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11357 }
11358
11359 /* Handle OMP_FOR_INCR. */
11360 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11361 if (TREE_CODE (t) == MODIFY_EXPR)
11362 {
11363 decl = TREE_OPERAND (t, 0);
11364 t = TREE_OPERAND (t, 1);
11365 tree *tp = &TREE_OPERAND (t, 1);
11366 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11367 tp = &TREE_OPERAND (t, 0);
11368
11369 if (!is_gimple_constant (*tp))
11370 {
11371 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
11372 ? pre_p : &for_pre_body;
11373 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
11374 tree c = build_omp_clause (input_location,
11375 OMP_CLAUSE_FIRSTPRIVATE);
11376 OMP_CLAUSE_DECL (c) = *tp;
11377 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11378 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11379 }
11380 }
11381 }
11382
11383 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11384 OMP_TASKLOOP);
11385 }
11386
11387 if (orig_for_stmt != for_stmt)
11388 gimplify_omp_ctxp->combined_loop = true;
11389
11390 for_body = NULL;
11391 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11392 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11393 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11394 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11395
11396 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11397 bool is_doacross = false;
11398 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11399 {
11400 is_doacross = true;
11401 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11402 (OMP_FOR_INIT (for_stmt))
11403 * 2);
11404 }
11405 int collapse = 1, tile = 0;
11406 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11407 if (c)
11408 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11409 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11410 if (c)
11411 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11412 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11413 {
11414 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11415 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11416 decl = TREE_OPERAND (t, 0);
11417 gcc_assert (DECL_P (decl));
11418 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11419 || POINTER_TYPE_P (TREE_TYPE (decl)));
11420 if (is_doacross)
11421 {
11422 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11423 {
11424 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11425 if (TREE_CODE (orig_decl) == TREE_LIST)
11426 {
11427 orig_decl = TREE_PURPOSE (orig_decl);
11428 if (!orig_decl)
11429 orig_decl = decl;
11430 }
11431 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11432 }
11433 else
11434 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11435 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11436 }
11437
11438 /* Make sure the iteration variable is private. */
11439 tree c = NULL_TREE;
11440 tree c2 = NULL_TREE;
11441 if (orig_for_stmt != for_stmt)
11442 {
11443 /* Preserve this information until we gimplify the inner simd. */
11444 if (has_decl_expr
11445 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11446 TREE_PRIVATE (t) = 1;
11447 }
11448 else if (ort == ORT_SIMD)
11449 {
11450 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11451 (splay_tree_key) decl);
11452 omp_is_private (gimplify_omp_ctxp, decl,
11453 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11454 != 1));
11455 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11456 {
11457 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11458 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11459 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11460 OMP_CLAUSE_LASTPRIVATE);
11461 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11462 OMP_CLAUSE_LASTPRIVATE))
11463 if (OMP_CLAUSE_DECL (c3) == decl)
11464 {
11465 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11466 "conditional %<lastprivate%> on loop "
11467 "iterator %qD ignored", decl);
11468 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11469 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11470 }
11471 }
11472 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11473 {
11474 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11475 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11476 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11477 if ((has_decl_expr
11478 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11479 || TREE_PRIVATE (t))
11480 {
11481 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11482 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11483 }
11484 struct gimplify_omp_ctx *outer
11485 = gimplify_omp_ctxp->outer_context;
11486 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11487 {
11488 if (outer->region_type == ORT_WORKSHARE
11489 && outer->combined_loop)
11490 {
11491 n = splay_tree_lookup (outer->variables,
11492 (splay_tree_key)decl);
11493 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11494 {
11495 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11496 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11497 }
11498 else
11499 {
11500 struct gimplify_omp_ctx *octx = outer->outer_context;
11501 if (octx
11502 && octx->region_type == ORT_COMBINED_PARALLEL
11503 && octx->outer_context
11504 && (octx->outer_context->region_type
11505 == ORT_WORKSHARE)
11506 && octx->outer_context->combined_loop)
11507 {
11508 octx = octx->outer_context;
11509 n = splay_tree_lookup (octx->variables,
11510 (splay_tree_key)decl);
11511 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11512 {
11513 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11514 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11515 }
11516 }
11517 }
11518 }
11519 }
11520
11521 OMP_CLAUSE_DECL (c) = decl;
11522 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11523 OMP_FOR_CLAUSES (for_stmt) = c;
11524 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11525 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11526 {
11527 if (outer->region_type == ORT_WORKSHARE
11528 && outer->combined_loop)
11529 {
11530 if (outer->outer_context
11531 && (outer->outer_context->region_type
11532 == ORT_COMBINED_PARALLEL))
11533 outer = outer->outer_context;
11534 else if (omp_check_private (outer, decl, false))
11535 outer = NULL;
11536 }
11537 else if (((outer->region_type & ORT_TASKLOOP)
11538 == ORT_TASKLOOP)
11539 && outer->combined_loop
11540 && !omp_check_private (gimplify_omp_ctxp,
11541 decl, false))
11542 ;
11543 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11544 {
11545 omp_notice_variable (outer, decl, true);
11546 outer = NULL;
11547 }
11548 if (outer)
11549 {
11550 n = splay_tree_lookup (outer->variables,
11551 (splay_tree_key)decl);
11552 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11553 {
11554 omp_add_variable (outer, decl,
11555 GOVD_LASTPRIVATE | GOVD_SEEN);
11556 if (outer->region_type == ORT_COMBINED_PARALLEL
11557 && outer->outer_context
11558 && (outer->outer_context->region_type
11559 == ORT_WORKSHARE)
11560 && outer->outer_context->combined_loop)
11561 {
11562 outer = outer->outer_context;
11563 n = splay_tree_lookup (outer->variables,
11564 (splay_tree_key)decl);
11565 if (omp_check_private (outer, decl, false))
11566 outer = NULL;
11567 else if (n == NULL
11568 || ((n->value & GOVD_DATA_SHARE_CLASS)
11569 == 0))
11570 omp_add_variable (outer, decl,
11571 GOVD_LASTPRIVATE
11572 | GOVD_SEEN);
11573 else
11574 outer = NULL;
11575 }
11576 if (outer && outer->outer_context
11577 && ((outer->outer_context->region_type
11578 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11579 || (((outer->region_type & ORT_TASKLOOP)
11580 == ORT_TASKLOOP)
11581 && (outer->outer_context->region_type
11582 == ORT_COMBINED_PARALLEL))))
11583 {
11584 outer = outer->outer_context;
11585 n = splay_tree_lookup (outer->variables,
11586 (splay_tree_key)decl);
11587 if (n == NULL
11588 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11589 omp_add_variable (outer, decl,
11590 GOVD_SHARED | GOVD_SEEN);
11591 else
11592 outer = NULL;
11593 }
11594 if (outer && outer->outer_context)
11595 omp_notice_variable (outer->outer_context, decl,
11596 true);
11597 }
11598 }
11599 }
11600 }
11601 else
11602 {
11603 bool lastprivate
11604 = (!has_decl_expr
11605 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11606 if (TREE_PRIVATE (t))
11607 lastprivate = false;
11608 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11609 {
11610 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11611 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11612 lastprivate = false;
11613 }
11614
11615 struct gimplify_omp_ctx *outer
11616 = gimplify_omp_ctxp->outer_context;
11617 if (outer && lastprivate)
11618 {
11619 if (outer->region_type == ORT_WORKSHARE
11620 && outer->combined_loop)
11621 {
11622 n = splay_tree_lookup (outer->variables,
11623 (splay_tree_key)decl);
11624 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11625 {
11626 lastprivate = false;
11627 outer = NULL;
11628 }
11629 else if (outer->outer_context
11630 && (outer->outer_context->region_type
11631 == ORT_COMBINED_PARALLEL))
11632 outer = outer->outer_context;
11633 else if (omp_check_private (outer, decl, false))
11634 outer = NULL;
11635 }
11636 else if (((outer->region_type & ORT_TASKLOOP)
11637 == ORT_TASKLOOP)
11638 && outer->combined_loop
11639 && !omp_check_private (gimplify_omp_ctxp,
11640 decl, false))
11641 ;
11642 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11643 {
11644 omp_notice_variable (outer, decl, true);
11645 outer = NULL;
11646 }
11647 if (outer)
11648 {
11649 n = splay_tree_lookup (outer->variables,
11650 (splay_tree_key)decl);
11651 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11652 {
11653 omp_add_variable (outer, decl,
11654 GOVD_LASTPRIVATE | GOVD_SEEN);
11655 if (outer->region_type == ORT_COMBINED_PARALLEL
11656 && outer->outer_context
11657 && (outer->outer_context->region_type
11658 == ORT_WORKSHARE)
11659 && outer->outer_context->combined_loop)
11660 {
11661 outer = outer->outer_context;
11662 n = splay_tree_lookup (outer->variables,
11663 (splay_tree_key)decl);
11664 if (omp_check_private (outer, decl, false))
11665 outer = NULL;
11666 else if (n == NULL
11667 || ((n->value & GOVD_DATA_SHARE_CLASS)
11668 == 0))
11669 omp_add_variable (outer, decl,
11670 GOVD_LASTPRIVATE
11671 | GOVD_SEEN);
11672 else
11673 outer = NULL;
11674 }
11675 if (outer && outer->outer_context
11676 && ((outer->outer_context->region_type
11677 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11678 || (((outer->region_type & ORT_TASKLOOP)
11679 == ORT_TASKLOOP)
11680 && (outer->outer_context->region_type
11681 == ORT_COMBINED_PARALLEL))))
11682 {
11683 outer = outer->outer_context;
11684 n = splay_tree_lookup (outer->variables,
11685 (splay_tree_key)decl);
11686 if (n == NULL
11687 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11688 omp_add_variable (outer, decl,
11689 GOVD_SHARED | GOVD_SEEN);
11690 else
11691 outer = NULL;
11692 }
11693 if (outer && outer->outer_context)
11694 omp_notice_variable (outer->outer_context, decl,
11695 true);
11696 }
11697 }
11698 }
11699
11700 c = build_omp_clause (input_location,
11701 lastprivate ? OMP_CLAUSE_LASTPRIVATE
11702 : OMP_CLAUSE_PRIVATE);
11703 OMP_CLAUSE_DECL (c) = decl;
11704 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11705 OMP_FOR_CLAUSES (for_stmt) = c;
11706 omp_add_variable (gimplify_omp_ctxp, decl,
11707 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11708 | GOVD_EXPLICIT | GOVD_SEEN);
11709 c = NULL_TREE;
11710 }
11711 }
11712 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11713 {
11714 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11715 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11716 (splay_tree_key) decl);
11717 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11718 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11719 OMP_CLAUSE_LASTPRIVATE);
11720 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11721 OMP_CLAUSE_LASTPRIVATE))
11722 if (OMP_CLAUSE_DECL (c3) == decl)
11723 {
11724 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11725 "conditional %<lastprivate%> on loop "
11726 "iterator %qD ignored", decl);
11727 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11728 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11729 }
11730 }
11731 else
11732 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11733
11734 /* If DECL is not a gimple register, create a temporary variable to act
11735 as an iteration counter. This is valid, since DECL cannot be
11736 modified in the body of the loop. Similarly for any iteration vars
11737 in simd with collapse > 1 where the iterator vars must be
11738 lastprivate. */
11739 if (orig_for_stmt != for_stmt)
11740 var = decl;
11741 else if (!is_gimple_reg (decl)
11742 || (ort == ORT_SIMD
11743 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11744 {
11745 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11746 /* Make sure omp_add_variable is not called on it prematurely.
11747 We call it ourselves a few lines later. */
11748 gimplify_omp_ctxp = NULL;
11749 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11750 gimplify_omp_ctxp = ctx;
11751 TREE_OPERAND (t, 0) = var;
11752
11753 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11754
11755 if (ort == ORT_SIMD
11756 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11757 {
11758 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11759 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11760 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11761 OMP_CLAUSE_DECL (c2) = var;
11762 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11763 OMP_FOR_CLAUSES (for_stmt) = c2;
11764 omp_add_variable (gimplify_omp_ctxp, var,
11765 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11766 if (c == NULL_TREE)
11767 {
11768 c = c2;
11769 c2 = NULL_TREE;
11770 }
11771 }
11772 else
11773 omp_add_variable (gimplify_omp_ctxp, var,
11774 GOVD_PRIVATE | GOVD_SEEN);
11775 }
11776 else
11777 var = decl;
11778
11779 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11780 is_gimple_val, fb_rvalue, false);
11781 ret = MIN (ret, tret);
11782 if (ret == GS_ERROR)
11783 return ret;
11784
11785 /* Handle OMP_FOR_COND. */
11786 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11787 gcc_assert (COMPARISON_CLASS_P (t));
11788 gcc_assert (TREE_OPERAND (t, 0) == decl);
11789
11790 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11791 is_gimple_val, fb_rvalue, false);
11792 ret = MIN (ret, tret);
11793
11794 /* Handle OMP_FOR_INCR. */
11795 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11796 switch (TREE_CODE (t))
11797 {
11798 case PREINCREMENT_EXPR:
11799 case POSTINCREMENT_EXPR:
11800 {
11801 tree decl = TREE_OPERAND (t, 0);
11802 /* c_omp_for_incr_canonicalize_ptr() should have been
11803 called to massage things appropriately. */
11804 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11805
11806 if (orig_for_stmt != for_stmt)
11807 break;
11808 t = build_int_cst (TREE_TYPE (decl), 1);
11809 if (c)
11810 OMP_CLAUSE_LINEAR_STEP (c) = t;
11811 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11812 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11813 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11814 break;
11815 }
11816
11817 case PREDECREMENT_EXPR:
11818 case POSTDECREMENT_EXPR:
11819 /* c_omp_for_incr_canonicalize_ptr() should have been
11820 called to massage things appropriately. */
11821 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11822 if (orig_for_stmt != for_stmt)
11823 break;
11824 t = build_int_cst (TREE_TYPE (decl), -1);
11825 if (c)
11826 OMP_CLAUSE_LINEAR_STEP (c) = t;
11827 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11828 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11829 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11830 break;
11831
11832 case MODIFY_EXPR:
11833 gcc_assert (TREE_OPERAND (t, 0) == decl);
11834 TREE_OPERAND (t, 0) = var;
11835
11836 t = TREE_OPERAND (t, 1);
11837 switch (TREE_CODE (t))
11838 {
11839 case PLUS_EXPR:
11840 if (TREE_OPERAND (t, 1) == decl)
11841 {
11842 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11843 TREE_OPERAND (t, 0) = var;
11844 break;
11845 }
11846
11847 /* Fallthru. */
11848 case MINUS_EXPR:
11849 case POINTER_PLUS_EXPR:
11850 gcc_assert (TREE_OPERAND (t, 0) == decl);
11851 TREE_OPERAND (t, 0) = var;
11852 break;
11853 default:
11854 gcc_unreachable ();
11855 }
11856
11857 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11858 is_gimple_val, fb_rvalue, false);
11859 ret = MIN (ret, tret);
11860 if (c)
11861 {
11862 tree step = TREE_OPERAND (t, 1);
11863 tree stept = TREE_TYPE (decl);
11864 if (POINTER_TYPE_P (stept))
11865 stept = sizetype;
11866 step = fold_convert (stept, step);
11867 if (TREE_CODE (t) == MINUS_EXPR)
11868 step = fold_build1 (NEGATE_EXPR, stept, step);
11869 OMP_CLAUSE_LINEAR_STEP (c) = step;
11870 if (step != TREE_OPERAND (t, 1))
11871 {
11872 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11873 &for_pre_body, NULL,
11874 is_gimple_val, fb_rvalue, false);
11875 ret = MIN (ret, tret);
11876 }
11877 }
11878 break;
11879
11880 default:
11881 gcc_unreachable ();
11882 }
11883
11884 if (c2)
11885 {
11886 gcc_assert (c);
11887 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11888 }
11889
11890 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11891 {
11892 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11893 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11894 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11895 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11896 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11897 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11898 && OMP_CLAUSE_DECL (c) == decl)
11899 {
11900 if (is_doacross && (collapse == 1 || i >= collapse))
11901 t = var;
11902 else
11903 {
11904 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11905 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11906 gcc_assert (TREE_OPERAND (t, 0) == var);
11907 t = TREE_OPERAND (t, 1);
11908 gcc_assert (TREE_CODE (t) == PLUS_EXPR
11909 || TREE_CODE (t) == MINUS_EXPR
11910 || TREE_CODE (t) == POINTER_PLUS_EXPR);
11911 gcc_assert (TREE_OPERAND (t, 0) == var);
11912 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11913 is_doacross ? var : decl,
11914 TREE_OPERAND (t, 1));
11915 }
11916 gimple_seq *seq;
11917 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11918 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11919 else
11920 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11921 push_gimplify_context ();
11922 gimplify_assign (decl, t, seq);
11923 gimple *bind = NULL;
11924 if (gimplify_ctxp->temps)
11925 {
11926 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11927 *seq = NULL;
11928 gimplify_seq_add_stmt (seq, bind);
11929 }
11930 pop_gimplify_context (bind);
11931 }
11932 }
11933 }
11934
11935 BITMAP_FREE (has_decl_expr);
11936
11937 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11938 || (loop_p && orig_for_stmt == for_stmt))
11939 {
11940 push_gimplify_context ();
11941 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11942 {
11943 OMP_FOR_BODY (orig_for_stmt)
11944 = build3 (BIND_EXPR, void_type_node, NULL,
11945 OMP_FOR_BODY (orig_for_stmt), NULL);
11946 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11947 }
11948 }
11949
11950 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11951 &for_body);
11952
11953 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11954 || (loop_p && orig_for_stmt == for_stmt))
11955 {
11956 if (gimple_code (g) == GIMPLE_BIND)
11957 pop_gimplify_context (g);
11958 else
11959 pop_gimplify_context (NULL);
11960 }
11961
11962 if (orig_for_stmt != for_stmt)
11963 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11964 {
11965 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11966 decl = TREE_OPERAND (t, 0);
11967 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11968 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11969 gimplify_omp_ctxp = ctx->outer_context;
11970 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11971 gimplify_omp_ctxp = ctx;
11972 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11973 TREE_OPERAND (t, 0) = var;
11974 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11975 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
11976 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
11977 }
11978
11979 gimplify_adjust_omp_clauses (pre_p, for_body,
11980 &OMP_FOR_CLAUSES (orig_for_stmt),
11981 TREE_CODE (orig_for_stmt));
11982
11983 int kind;
11984 switch (TREE_CODE (orig_for_stmt))
11985 {
11986 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
11987 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
11988 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
11989 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
11990 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
11991 default:
11992 gcc_unreachable ();
11993 }
11994 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
11995 {
11996 gimplify_seq_add_seq (pre_p, for_pre_body);
11997 for_pre_body = NULL;
11998 }
11999 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12000 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12001 for_pre_body);
12002 if (orig_for_stmt != for_stmt)
12003 gimple_omp_for_set_combined_p (gfor, true);
12004 if (gimplify_omp_ctxp
12005 && (gimplify_omp_ctxp->combined_loop
12006 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12007 && gimplify_omp_ctxp->outer_context
12008 && gimplify_omp_ctxp->outer_context->combined_loop)))
12009 {
12010 gimple_omp_for_set_combined_into_p (gfor, true);
12011 if (gimplify_omp_ctxp->combined_loop)
12012 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12013 else
12014 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12015 }
12016
12017 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12018 {
12019 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12020 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12021 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12022 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12023 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12024 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12025 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12026 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12027 }
12028
12029 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12030 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12031 The outer taskloop stands for computing the number of iterations,
12032 counts for collapsed loops and holding taskloop specific clauses.
12033 The task construct stands for the effect of data sharing on the
12034 explicit task it creates and the inner taskloop stands for expansion
12035 of the static loop inside of the explicit task construct. */
12036 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12037 {
12038 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12039 tree task_clauses = NULL_TREE;
12040 tree c = *gfor_clauses_ptr;
12041 tree *gtask_clauses_ptr = &task_clauses;
12042 tree outer_for_clauses = NULL_TREE;
12043 tree *gforo_clauses_ptr = &outer_for_clauses;
12044 for (; c; c = OMP_CLAUSE_CHAIN (c))
12045 switch (OMP_CLAUSE_CODE (c))
12046 {
12047 /* These clauses are allowed on task, move them there. */
12048 case OMP_CLAUSE_SHARED:
12049 case OMP_CLAUSE_FIRSTPRIVATE:
12050 case OMP_CLAUSE_DEFAULT:
12051 case OMP_CLAUSE_IF:
12052 case OMP_CLAUSE_UNTIED:
12053 case OMP_CLAUSE_FINAL:
12054 case OMP_CLAUSE_MERGEABLE:
12055 case OMP_CLAUSE_PRIORITY:
12056 case OMP_CLAUSE_REDUCTION:
12057 case OMP_CLAUSE_IN_REDUCTION:
12058 *gtask_clauses_ptr = c;
12059 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12060 break;
12061 case OMP_CLAUSE_PRIVATE:
12062 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12063 {
12064 /* We want private on outer for and firstprivate
12065 on task. */
12066 *gtask_clauses_ptr
12067 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12068 OMP_CLAUSE_FIRSTPRIVATE);
12069 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12070 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12071 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12072 *gforo_clauses_ptr = c;
12073 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12074 }
12075 else
12076 {
12077 *gtask_clauses_ptr = c;
12078 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12079 }
12080 break;
12081 /* These clauses go into outer taskloop clauses. */
12082 case OMP_CLAUSE_GRAINSIZE:
12083 case OMP_CLAUSE_NUM_TASKS:
12084 case OMP_CLAUSE_NOGROUP:
12085 *gforo_clauses_ptr = c;
12086 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12087 break;
12088 /* Taskloop clause we duplicate on both taskloops. */
12089 case OMP_CLAUSE_COLLAPSE:
12090 *gfor_clauses_ptr = c;
12091 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12092 *gforo_clauses_ptr = copy_node (c);
12093 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12094 break;
12095 /* For lastprivate, keep the clause on inner taskloop, and add
12096 a shared clause on task. If the same decl is also firstprivate,
12097 add also firstprivate clause on the inner taskloop. */
12098 case OMP_CLAUSE_LASTPRIVATE:
12099 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12100 {
12101 /* For taskloop C++ lastprivate IVs, we want:
12102 1) private on outer taskloop
12103 2) firstprivate and shared on task
12104 3) lastprivate on inner taskloop */
12105 *gtask_clauses_ptr
12106 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12107 OMP_CLAUSE_FIRSTPRIVATE);
12108 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12109 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12110 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12111 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12112 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12113 OMP_CLAUSE_PRIVATE);
12114 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12115 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12116 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12117 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12118 }
12119 *gfor_clauses_ptr = c;
12120 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12121 *gtask_clauses_ptr
12122 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12123 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12124 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12125 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12126 gtask_clauses_ptr
12127 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12128 break;
12129 default:
12130 gcc_unreachable ();
12131 }
12132 *gfor_clauses_ptr = NULL_TREE;
12133 *gtask_clauses_ptr = NULL_TREE;
12134 *gforo_clauses_ptr = NULL_TREE;
12135 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12136 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12137 NULL_TREE, NULL_TREE, NULL_TREE);
12138 gimple_omp_task_set_taskloop_p (g, true);
12139 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12140 gomp_for *gforo
12141 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12142 gimple_omp_for_collapse (gfor),
12143 gimple_omp_for_pre_body (gfor));
12144 gimple_omp_for_set_pre_body (gfor, NULL);
12145 gimple_omp_for_set_combined_p (gforo, true);
12146 gimple_omp_for_set_combined_into_p (gfor, true);
12147 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12148 {
12149 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12150 tree v = create_tmp_var (type);
12151 gimple_omp_for_set_index (gforo, i, v);
12152 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12153 gimple_omp_for_set_initial (gforo, i, t);
12154 gimple_omp_for_set_cond (gforo, i,
12155 gimple_omp_for_cond (gfor, i));
12156 t = unshare_expr (gimple_omp_for_final (gfor, i));
12157 gimple_omp_for_set_final (gforo, i, t);
12158 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12159 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12160 TREE_OPERAND (t, 0) = v;
12161 gimple_omp_for_set_incr (gforo, i, t);
12162 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12163 OMP_CLAUSE_DECL (t) = v;
12164 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12165 gimple_omp_for_set_clauses (gforo, t);
12166 }
12167 gimplify_seq_add_stmt (pre_p, gforo);
12168 }
12169 else
12170 gimplify_seq_add_stmt (pre_p, gfor);
12171
12172 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12173 {
12174 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12175 unsigned lastprivate_conditional = 0;
12176 while (ctx
12177 && (ctx->region_type == ORT_TARGET_DATA
12178 || ctx->region_type == ORT_TASKGROUP))
12179 ctx = ctx->outer_context;
12180 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12181 for (tree c = gimple_omp_for_clauses (gfor);
12182 c; c = OMP_CLAUSE_CHAIN (c))
12183 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12184 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12185 ++lastprivate_conditional;
12186 if (lastprivate_conditional)
12187 {
12188 struct omp_for_data fd;
12189 omp_extract_for_data (gfor, &fd, NULL);
12190 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12191 lastprivate_conditional);
12192 tree var = create_tmp_var_raw (type);
12193 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12194 OMP_CLAUSE_DECL (c) = var;
12195 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12196 gimple_omp_for_set_clauses (gfor, c);
12197 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12198 }
12199 }
12200 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12201 {
12202 unsigned lastprivate_conditional = 0;
12203 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12204 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12205 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12206 ++lastprivate_conditional;
12207 if (lastprivate_conditional)
12208 {
12209 struct omp_for_data fd;
12210 omp_extract_for_data (gfor, &fd, NULL);
12211 tree type = unsigned_type_for (fd.iter_type);
12212 while (lastprivate_conditional--)
12213 {
12214 tree c = build_omp_clause (UNKNOWN_LOCATION,
12215 OMP_CLAUSE__CONDTEMP_);
12216 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12217 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12218 gimple_omp_for_set_clauses (gfor, c);
12219 }
12220 }
12221 }
12222
12223 if (ret != GS_ALL_DONE)
12224 return GS_ERROR;
12225 *expr_p = NULL_TREE;
12226 return GS_ALL_DONE;
12227 }
12228
12229 /* Helper for gimplify_omp_loop, called through walk_tree. */
12230
12231 static tree
12232 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12233 {
12234 if (DECL_P (*tp))
12235 {
12236 tree *d = (tree *) data;
12237 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12238 {
12239 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12240 *walk_subtrees = 0;
12241 }
12242 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12243 {
12244 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12245 *walk_subtrees = 0;
12246 }
12247 }
12248 return NULL_TREE;
12249 }
12250
12251 /* Gimplify the gross structure of an OMP_LOOP statement. */
12252
12253 static enum gimplify_status
12254 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12255 {
12256 tree for_stmt = *expr_p;
12257 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12258 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12259 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12260 int i;
12261
12262 /* If order is not present, the behavior is as if order(concurrent)
12263 appeared. */
12264 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12265 if (order == NULL_TREE)
12266 {
12267 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12268 OMP_CLAUSE_CHAIN (order) = clauses;
12269 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12270 }
12271
12272 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12273 if (bind == NULL_TREE)
12274 {
12275 if (!flag_openmp) /* flag_openmp_simd */
12276 ;
12277 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12278 kind = OMP_CLAUSE_BIND_TEAMS;
12279 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12280 kind = OMP_CLAUSE_BIND_PARALLEL;
12281 else
12282 {
12283 for (; octx; octx = octx->outer_context)
12284 {
12285 if ((octx->region_type & ORT_ACC) != 0
12286 || octx->region_type == ORT_NONE
12287 || octx->region_type == ORT_IMPLICIT_TARGET)
12288 continue;
12289 break;
12290 }
12291 if (octx == NULL && !in_omp_construct)
12292 error_at (EXPR_LOCATION (for_stmt),
12293 "%<bind%> clause not specified on a %<loop%> "
12294 "construct not nested inside another OpenMP construct");
12295 }
12296 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12297 OMP_CLAUSE_CHAIN (bind) = clauses;
12298 OMP_CLAUSE_BIND_KIND (bind) = kind;
12299 OMP_FOR_CLAUSES (for_stmt) = bind;
12300 }
12301 else
12302 switch (OMP_CLAUSE_BIND_KIND (bind))
12303 {
12304 case OMP_CLAUSE_BIND_THREAD:
12305 break;
12306 case OMP_CLAUSE_BIND_PARALLEL:
12307 if (!flag_openmp) /* flag_openmp_simd */
12308 {
12309 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12310 break;
12311 }
12312 for (; octx; octx = octx->outer_context)
12313 if (octx->region_type == ORT_SIMD
12314 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12315 {
12316 error_at (EXPR_LOCATION (for_stmt),
12317 "%<bind(parallel)%> on a %<loop%> construct nested "
12318 "inside %<simd%> construct");
12319 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12320 break;
12321 }
12322 kind = OMP_CLAUSE_BIND_PARALLEL;
12323 break;
12324 case OMP_CLAUSE_BIND_TEAMS:
12325 if (!flag_openmp) /* flag_openmp_simd */
12326 {
12327 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12328 break;
12329 }
12330 if ((octx
12331 && octx->region_type != ORT_IMPLICIT_TARGET
12332 && octx->region_type != ORT_NONE
12333 && (octx->region_type & ORT_TEAMS) == 0)
12334 || in_omp_construct)
12335 {
12336 error_at (EXPR_LOCATION (for_stmt),
12337 "%<bind(teams)%> on a %<loop%> region not strictly "
12338 "nested inside of a %<teams%> region");
12339 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12340 break;
12341 }
12342 kind = OMP_CLAUSE_BIND_TEAMS;
12343 break;
12344 default:
12345 gcc_unreachable ();
12346 }
12347
12348 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12349 switch (OMP_CLAUSE_CODE (*pc))
12350 {
12351 case OMP_CLAUSE_REDUCTION:
12352 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12353 {
12354 error_at (OMP_CLAUSE_LOCATION (*pc),
12355 "%<inscan%> %<reduction%> clause on "
12356 "%qs construct", "loop");
12357 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12358 }
12359 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12360 {
12361 error_at (OMP_CLAUSE_LOCATION (*pc),
12362 "invalid %<task%> reduction modifier on construct "
12363 "other than %<parallel%>, %<for%> or %<sections%>");
12364 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12365 }
12366 pc = &OMP_CLAUSE_CHAIN (*pc);
12367 break;
12368 case OMP_CLAUSE_LASTPRIVATE:
12369 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12370 {
12371 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12372 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12373 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12374 break;
12375 if (OMP_FOR_ORIG_DECLS (for_stmt)
12376 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12377 i)) == TREE_LIST
12378 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12379 i)))
12380 {
12381 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12382 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12383 break;
12384 }
12385 }
12386 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12387 {
12388 error_at (OMP_CLAUSE_LOCATION (*pc),
12389 "%<lastprivate%> clause on a %<loop%> construct refers "
12390 "to a variable %qD which is not the loop iterator",
12391 OMP_CLAUSE_DECL (*pc));
12392 *pc = OMP_CLAUSE_CHAIN (*pc);
12393 break;
12394 }
12395 pc = &OMP_CLAUSE_CHAIN (*pc);
12396 break;
12397 default:
12398 pc = &OMP_CLAUSE_CHAIN (*pc);
12399 break;
12400 }
12401
12402 TREE_SET_CODE (for_stmt, OMP_SIMD);
12403
12404 int last;
12405 switch (kind)
12406 {
12407 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12408 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12409 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12410 }
12411 for (int pass = 1; pass <= last; pass++)
12412 {
12413 if (pass == 2)
12414 {
12415 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12416 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12417 *expr_p = make_node (OMP_PARALLEL);
12418 TREE_TYPE (*expr_p) = void_type_node;
12419 OMP_PARALLEL_BODY (*expr_p) = bind;
12420 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12421 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12422 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12423 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12424 if (OMP_FOR_ORIG_DECLS (for_stmt)
12425 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12426 == TREE_LIST))
12427 {
12428 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12429 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12430 {
12431 *pc = build_omp_clause (UNKNOWN_LOCATION,
12432 OMP_CLAUSE_FIRSTPRIVATE);
12433 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12434 pc = &OMP_CLAUSE_CHAIN (*pc);
12435 }
12436 }
12437 }
12438 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12439 tree *pc = &OMP_FOR_CLAUSES (t);
12440 TREE_TYPE (t) = void_type_node;
12441 OMP_FOR_BODY (t) = *expr_p;
12442 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12443 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12444 switch (OMP_CLAUSE_CODE (c))
12445 {
12446 case OMP_CLAUSE_BIND:
12447 case OMP_CLAUSE_ORDER:
12448 case OMP_CLAUSE_COLLAPSE:
12449 *pc = copy_node (c);
12450 pc = &OMP_CLAUSE_CHAIN (*pc);
12451 break;
12452 case OMP_CLAUSE_PRIVATE:
12453 case OMP_CLAUSE_FIRSTPRIVATE:
12454 /* Only needed on innermost. */
12455 break;
12456 case OMP_CLAUSE_LASTPRIVATE:
12457 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12458 {
12459 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12460 OMP_CLAUSE_FIRSTPRIVATE);
12461 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12462 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12463 pc = &OMP_CLAUSE_CHAIN (*pc);
12464 }
12465 *pc = copy_node (c);
12466 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12467 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12468 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12469 {
12470 if (pass != last)
12471 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12472 else
12473 lang_hooks.decls.omp_finish_clause (*pc, NULL);
12474 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12475 }
12476 pc = &OMP_CLAUSE_CHAIN (*pc);
12477 break;
12478 case OMP_CLAUSE_REDUCTION:
12479 *pc = copy_node (c);
12480 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12481 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12482 OMP_CLAUSE_REDUCTION_INIT (*pc)
12483 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12484 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12485 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12486 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12487 {
12488 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12489 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12490 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12491 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12492 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12493 tree nc = *pc;
12494 tree data[2] = { c, nc };
12495 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12496 replace_reduction_placeholders,
12497 data);
12498 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12499 replace_reduction_placeholders,
12500 data);
12501 }
12502 pc = &OMP_CLAUSE_CHAIN (*pc);
12503 break;
12504 default:
12505 gcc_unreachable ();
12506 }
12507 *pc = NULL_TREE;
12508 *expr_p = t;
12509 }
12510 return gimplify_omp_for (expr_p, pre_p);
12511 }
12512
12513
12514 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12515 of OMP_TARGET's body. */
12516
12517 static tree
12518 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12519 {
12520 *walk_subtrees = 0;
12521 switch (TREE_CODE (*tp))
12522 {
12523 case OMP_TEAMS:
12524 return *tp;
12525 case BIND_EXPR:
12526 case STATEMENT_LIST:
12527 *walk_subtrees = 1;
12528 break;
12529 default:
12530 break;
12531 }
12532 return NULL_TREE;
12533 }
12534
12535 /* Helper function of optimize_target_teams, determine if the expression
12536 can be computed safely before the target construct on the host. */
12537
12538 static tree
12539 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12540 {
12541 splay_tree_node n;
12542
12543 if (TYPE_P (*tp))
12544 {
12545 *walk_subtrees = 0;
12546 return NULL_TREE;
12547 }
12548 switch (TREE_CODE (*tp))
12549 {
12550 case VAR_DECL:
12551 case PARM_DECL:
12552 case RESULT_DECL:
12553 *walk_subtrees = 0;
12554 if (error_operand_p (*tp)
12555 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12556 || DECL_HAS_VALUE_EXPR_P (*tp)
12557 || DECL_THREAD_LOCAL_P (*tp)
12558 || TREE_SIDE_EFFECTS (*tp)
12559 || TREE_THIS_VOLATILE (*tp))
12560 return *tp;
12561 if (is_global_var (*tp)
12562 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12563 || lookup_attribute ("omp declare target link",
12564 DECL_ATTRIBUTES (*tp))))
12565 return *tp;
12566 if (VAR_P (*tp)
12567 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12568 && !is_global_var (*tp)
12569 && decl_function_context (*tp) == current_function_decl)
12570 return *tp;
12571 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12572 (splay_tree_key) *tp);
12573 if (n == NULL)
12574 {
12575 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12576 return NULL_TREE;
12577 return *tp;
12578 }
12579 else if (n->value & GOVD_LOCAL)
12580 return *tp;
12581 else if (n->value & GOVD_FIRSTPRIVATE)
12582 return NULL_TREE;
12583 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12584 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12585 return NULL_TREE;
12586 return *tp;
12587 case INTEGER_CST:
12588 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12589 return *tp;
12590 return NULL_TREE;
12591 case TARGET_EXPR:
12592 if (TARGET_EXPR_INITIAL (*tp)
12593 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12594 return *tp;
12595 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12596 walk_subtrees, NULL);
12597 /* Allow some reasonable subset of integral arithmetics. */
12598 case PLUS_EXPR:
12599 case MINUS_EXPR:
12600 case MULT_EXPR:
12601 case TRUNC_DIV_EXPR:
12602 case CEIL_DIV_EXPR:
12603 case FLOOR_DIV_EXPR:
12604 case ROUND_DIV_EXPR:
12605 case TRUNC_MOD_EXPR:
12606 case CEIL_MOD_EXPR:
12607 case FLOOR_MOD_EXPR:
12608 case ROUND_MOD_EXPR:
12609 case RDIV_EXPR:
12610 case EXACT_DIV_EXPR:
12611 case MIN_EXPR:
12612 case MAX_EXPR:
12613 case LSHIFT_EXPR:
12614 case RSHIFT_EXPR:
12615 case BIT_IOR_EXPR:
12616 case BIT_XOR_EXPR:
12617 case BIT_AND_EXPR:
12618 case NEGATE_EXPR:
12619 case ABS_EXPR:
12620 case BIT_NOT_EXPR:
12621 case NON_LVALUE_EXPR:
12622 CASE_CONVERT:
12623 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12624 return *tp;
12625 return NULL_TREE;
12626 /* And disallow anything else, except for comparisons. */
12627 default:
12628 if (COMPARISON_CLASS_P (*tp))
12629 return NULL_TREE;
12630 return *tp;
12631 }
12632 }
12633
12634 /* Try to determine if the num_teams and/or thread_limit expressions
12635 can have their values determined already before entering the
12636 target construct.
12637 INTEGER_CSTs trivially are,
12638 integral decls that are firstprivate (explicitly or implicitly)
12639 or explicitly map(always, to:) or map(always, tofrom:) on the target
12640 region too, and expressions involving simple arithmetics on those
12641 too, function calls are not ok, dereferencing something neither etc.
12642 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12643 EXPR based on what we find:
12644 0 stands for clause not specified at all, use implementation default
12645 -1 stands for value that can't be determined easily before entering
12646 the target construct.
12647 If teams construct is not present at all, use 1 for num_teams
12648 and 0 for thread_limit (only one team is involved, and the thread
12649 limit is implementation defined. */
12650
12651 static void
12652 optimize_target_teams (tree target, gimple_seq *pre_p)
12653 {
12654 tree body = OMP_BODY (target);
12655 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12656 tree num_teams = integer_zero_node;
12657 tree thread_limit = integer_zero_node;
12658 location_t num_teams_loc = EXPR_LOCATION (target);
12659 location_t thread_limit_loc = EXPR_LOCATION (target);
12660 tree c, *p, expr;
12661 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12662
12663 if (teams == NULL_TREE)
12664 num_teams = integer_one_node;
12665 else
12666 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12667 {
12668 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12669 {
12670 p = &num_teams;
12671 num_teams_loc = OMP_CLAUSE_LOCATION (c);
12672 }
12673 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12674 {
12675 p = &thread_limit;
12676 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12677 }
12678 else
12679 continue;
12680 expr = OMP_CLAUSE_OPERAND (c, 0);
12681 if (TREE_CODE (expr) == INTEGER_CST)
12682 {
12683 *p = expr;
12684 continue;
12685 }
12686 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12687 {
12688 *p = integer_minus_one_node;
12689 continue;
12690 }
12691 *p = expr;
12692 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
12693 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
12694 == GS_ERROR)
12695 {
12696 gimplify_omp_ctxp = target_ctx;
12697 *p = integer_minus_one_node;
12698 continue;
12699 }
12700 gimplify_omp_ctxp = target_ctx;
12701 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12702 OMP_CLAUSE_OPERAND (c, 0) = *p;
12703 }
12704 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12705 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12706 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12707 OMP_TARGET_CLAUSES (target) = c;
12708 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12709 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12710 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12711 OMP_TARGET_CLAUSES (target) = c;
12712 }
12713
12714 /* Gimplify the gross structure of several OMP constructs. */
12715
12716 static void
12717 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
12718 {
12719 tree expr = *expr_p;
12720 gimple *stmt;
12721 gimple_seq body = NULL;
12722 enum omp_region_type ort;
12723
12724 switch (TREE_CODE (expr))
12725 {
12726 case OMP_SECTIONS:
12727 case OMP_SINGLE:
12728 ort = ORT_WORKSHARE;
12729 break;
12730 case OMP_TARGET:
12731 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12732 break;
12733 case OACC_KERNELS:
12734 ort = ORT_ACC_KERNELS;
12735 break;
12736 case OACC_PARALLEL:
12737 ort = ORT_ACC_PARALLEL;
12738 break;
12739 case OACC_SERIAL:
12740 ort = ORT_ACC_SERIAL;
12741 break;
12742 case OACC_DATA:
12743 ort = ORT_ACC_DATA;
12744 break;
12745 case OMP_TARGET_DATA:
12746 ort = ORT_TARGET_DATA;
12747 break;
12748 case OMP_TEAMS:
12749 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
12750 if (gimplify_omp_ctxp == NULL
12751 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
12752 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
12753 break;
12754 case OACC_HOST_DATA:
12755 ort = ORT_ACC_HOST_DATA;
12756 break;
12757 default:
12758 gcc_unreachable ();
12759 }
12760
12761 bool save_in_omp_construct = in_omp_construct;
12762 if ((ort & ORT_ACC) == 0)
12763 in_omp_construct = false;
12764 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12765 TREE_CODE (expr));
12766 if (TREE_CODE (expr) == OMP_TARGET)
12767 optimize_target_teams (expr, pre_p);
12768 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12769 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12770 {
12771 push_gimplify_context ();
12772 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
12773 if (gimple_code (g) == GIMPLE_BIND)
12774 pop_gimplify_context (g);
12775 else
12776 pop_gimplify_context (NULL);
12777 if ((ort & ORT_TARGET_DATA) != 0)
12778 {
12779 enum built_in_function end_ix;
12780 switch (TREE_CODE (expr))
12781 {
12782 case OACC_DATA:
12783 case OACC_HOST_DATA:
12784 end_ix = BUILT_IN_GOACC_DATA_END;
12785 break;
12786 case OMP_TARGET_DATA:
12787 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12788 break;
12789 default:
12790 gcc_unreachable ();
12791 }
12792 tree fn = builtin_decl_explicit (end_ix);
12793 g = gimple_build_call (fn, 0);
12794 gimple_seq cleanup = NULL;
12795 gimple_seq_add_stmt (&cleanup, g);
12796 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12797 body = NULL;
12798 gimple_seq_add_stmt (&body, g);
12799 }
12800 }
12801 else
12802 gimplify_and_add (OMP_BODY (expr), &body);
12803 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12804 TREE_CODE (expr));
12805 in_omp_construct = save_in_omp_construct;
12806
12807 switch (TREE_CODE (expr))
12808 {
12809 case OACC_DATA:
12810 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12811 OMP_CLAUSES (expr));
12812 break;
12813 case OACC_HOST_DATA:
12814 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
12815 {
12816 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12817 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
12818 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
12819 }
12820
12821 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12822 OMP_CLAUSES (expr));
12823 break;
12824 case OACC_KERNELS:
12825 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12826 OMP_CLAUSES (expr));
12827 break;
12828 case OACC_PARALLEL:
12829 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12830 OMP_CLAUSES (expr));
12831 break;
12832 case OACC_SERIAL:
12833 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
12834 OMP_CLAUSES (expr));
12835 break;
12836 case OMP_SECTIONS:
12837 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12838 break;
12839 case OMP_SINGLE:
12840 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12841 break;
12842 case OMP_TARGET:
12843 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12844 OMP_CLAUSES (expr));
12845 break;
12846 case OMP_TARGET_DATA:
12847 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12848 to be evaluated before the use_device_{ptr,addr} clauses if they
12849 refer to the same variables. */
12850 {
12851 tree use_device_clauses;
12852 tree *pc, *uc = &use_device_clauses;
12853 for (pc = &OMP_CLAUSES (expr); *pc; )
12854 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12855 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12856 {
12857 *uc = *pc;
12858 *pc = OMP_CLAUSE_CHAIN (*pc);
12859 uc = &OMP_CLAUSE_CHAIN (*uc);
12860 }
12861 else
12862 pc = &OMP_CLAUSE_CHAIN (*pc);
12863 *uc = NULL_TREE;
12864 *pc = use_device_clauses;
12865 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12866 OMP_CLAUSES (expr));
12867 }
12868 break;
12869 case OMP_TEAMS:
12870 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12871 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12872 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12873 break;
12874 default:
12875 gcc_unreachable ();
12876 }
12877
12878 gimplify_seq_add_stmt (pre_p, stmt);
12879 *expr_p = NULL_TREE;
12880 }
12881
12882 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12883 target update constructs. */
12884
12885 static void
12886 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12887 {
12888 tree expr = *expr_p;
12889 int kind;
12890 gomp_target *stmt;
12891 enum omp_region_type ort = ORT_WORKSHARE;
12892
12893 switch (TREE_CODE (expr))
12894 {
12895 case OACC_ENTER_DATA:
12896 case OACC_EXIT_DATA:
12897 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12898 ort = ORT_ACC;
12899 break;
12900 case OACC_UPDATE:
12901 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12902 ort = ORT_ACC;
12903 break;
12904 case OMP_TARGET_UPDATE:
12905 kind = GF_OMP_TARGET_KIND_UPDATE;
12906 break;
12907 case OMP_TARGET_ENTER_DATA:
12908 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12909 break;
12910 case OMP_TARGET_EXIT_DATA:
12911 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12912 break;
12913 default:
12914 gcc_unreachable ();
12915 }
12916 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12917 ort, TREE_CODE (expr));
12918 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12919 TREE_CODE (expr));
12920 if (TREE_CODE (expr) == OACC_UPDATE
12921 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12922 OMP_CLAUSE_IF_PRESENT))
12923 {
12924 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12925 clause. */
12926 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12927 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12928 switch (OMP_CLAUSE_MAP_KIND (c))
12929 {
12930 case GOMP_MAP_FORCE_TO:
12931 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12932 break;
12933 case GOMP_MAP_FORCE_FROM:
12934 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12935 break;
12936 default:
12937 break;
12938 }
12939 }
12940 else if (TREE_CODE (expr) == OACC_EXIT_DATA
12941 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12942 OMP_CLAUSE_FINALIZE))
12943 {
12944 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
12945 semantics. */
12946 bool have_clause = false;
12947 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12948 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12949 switch (OMP_CLAUSE_MAP_KIND (c))
12950 {
12951 case GOMP_MAP_FROM:
12952 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
12953 have_clause = true;
12954 break;
12955 case GOMP_MAP_RELEASE:
12956 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
12957 have_clause = true;
12958 break;
12959 case GOMP_MAP_POINTER:
12960 case GOMP_MAP_TO_PSET:
12961 /* TODO PR92929: we may see these here, but they'll always follow
12962 one of the clauses above, and will be handled by libgomp as
12963 one group, so no handling required here. */
12964 gcc_assert (have_clause);
12965 break;
12966 case GOMP_MAP_DETACH:
12967 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
12968 have_clause = false;
12969 break;
12970 case GOMP_MAP_STRUCT:
12971 have_clause = false;
12972 break;
12973 default:
12974 gcc_unreachable ();
12975 }
12976 }
12977 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
12978
12979 gimplify_seq_add_stmt (pre_p, stmt);
12980 *expr_p = NULL_TREE;
12981 }
12982
12983 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12984 stabilized the lhs of the atomic operation as *ADDR. Return true if
12985 EXPR is this stabilized form. */
12986
12987 static bool
12988 goa_lhs_expr_p (tree expr, tree addr)
12989 {
12990 /* Also include casts to other type variants. The C front end is fond
12991 of adding these for e.g. volatile variables. This is like
12992 STRIP_TYPE_NOPS but includes the main variant lookup. */
12993 STRIP_USELESS_TYPE_CONVERSION (expr);
12994
12995 if (TREE_CODE (expr) == INDIRECT_REF)
12996 {
12997 expr = TREE_OPERAND (expr, 0);
12998 while (expr != addr
12999 && (CONVERT_EXPR_P (expr)
13000 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13001 && TREE_CODE (expr) == TREE_CODE (addr)
13002 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13003 {
13004 expr = TREE_OPERAND (expr, 0);
13005 addr = TREE_OPERAND (addr, 0);
13006 }
13007 if (expr == addr)
13008 return true;
13009 return (TREE_CODE (addr) == ADDR_EXPR
13010 && TREE_CODE (expr) == ADDR_EXPR
13011 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13012 }
13013 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13014 return true;
13015 return false;
13016 }
13017
13018 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13019 expression does not involve the lhs, evaluate it into a temporary.
13020 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13021 or -1 if an error was encountered. */
13022
13023 static int
13024 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13025 tree lhs_var)
13026 {
13027 tree expr = *expr_p;
13028 int saw_lhs;
13029
13030 if (goa_lhs_expr_p (expr, lhs_addr))
13031 {
13032 *expr_p = lhs_var;
13033 return 1;
13034 }
13035 if (is_gimple_val (expr))
13036 return 0;
13037
13038 saw_lhs = 0;
13039 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13040 {
13041 case tcc_binary:
13042 case tcc_comparison:
13043 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13044 lhs_var);
13045 /* FALLTHRU */
13046 case tcc_unary:
13047 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13048 lhs_var);
13049 break;
13050 case tcc_expression:
13051 switch (TREE_CODE (expr))
13052 {
13053 case TRUTH_ANDIF_EXPR:
13054 case TRUTH_ORIF_EXPR:
13055 case TRUTH_AND_EXPR:
13056 case TRUTH_OR_EXPR:
13057 case TRUTH_XOR_EXPR:
13058 case BIT_INSERT_EXPR:
13059 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13060 lhs_addr, lhs_var);
13061 /* FALLTHRU */
13062 case TRUTH_NOT_EXPR:
13063 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13064 lhs_addr, lhs_var);
13065 break;
13066 case COMPOUND_EXPR:
13067 /* Break out any preevaluations from cp_build_modify_expr. */
13068 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13069 expr = TREE_OPERAND (expr, 1))
13070 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13071 *expr_p = expr;
13072 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13073 default:
13074 break;
13075 }
13076 break;
13077 case tcc_reference:
13078 if (TREE_CODE (expr) == BIT_FIELD_REF)
13079 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13080 lhs_addr, lhs_var);
13081 break;
13082 default:
13083 break;
13084 }
13085
13086 if (saw_lhs == 0)
13087 {
13088 enum gimplify_status gs;
13089 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13090 if (gs != GS_ALL_DONE)
13091 saw_lhs = -1;
13092 }
13093
13094 return saw_lhs;
13095 }
13096
13097 /* Gimplify an OMP_ATOMIC statement. */
13098
13099 static enum gimplify_status
13100 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13101 {
13102 tree addr = TREE_OPERAND (*expr_p, 0);
13103 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13104 ? NULL : TREE_OPERAND (*expr_p, 1);
13105 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13106 tree tmp_load;
13107 gomp_atomic_load *loadstmt;
13108 gomp_atomic_store *storestmt;
13109
13110 tmp_load = create_tmp_reg (type);
13111 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13112 return GS_ERROR;
13113
13114 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13115 != GS_ALL_DONE)
13116 return GS_ERROR;
13117
13118 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13119 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13120 gimplify_seq_add_stmt (pre_p, loadstmt);
13121 if (rhs)
13122 {
13123 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13124 representatives. Use BIT_FIELD_REF on the lhs instead. */
13125 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13126 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13127 {
13128 tree bitpos = TREE_OPERAND (rhs, 2);
13129 tree op1 = TREE_OPERAND (rhs, 1);
13130 tree bitsize;
13131 tree tmp_store = tmp_load;
13132 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13133 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13134 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13135 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13136 else
13137 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13138 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13139 tree t = build2_loc (EXPR_LOCATION (rhs),
13140 MODIFY_EXPR, void_type_node,
13141 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13142 TREE_TYPE (op1), tmp_store, bitsize,
13143 bitpos), op1);
13144 gimplify_and_add (t, pre_p);
13145 rhs = tmp_store;
13146 }
13147 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13148 != GS_ALL_DONE)
13149 return GS_ERROR;
13150 }
13151
13152 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13153 rhs = tmp_load;
13154 storestmt
13155 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13156 gimplify_seq_add_stmt (pre_p, storestmt);
13157 switch (TREE_CODE (*expr_p))
13158 {
13159 case OMP_ATOMIC_READ:
13160 case OMP_ATOMIC_CAPTURE_OLD:
13161 *expr_p = tmp_load;
13162 gimple_omp_atomic_set_need_value (loadstmt);
13163 break;
13164 case OMP_ATOMIC_CAPTURE_NEW:
13165 *expr_p = rhs;
13166 gimple_omp_atomic_set_need_value (storestmt);
13167 break;
13168 default:
13169 *expr_p = NULL;
13170 break;
13171 }
13172
13173 return GS_ALL_DONE;
13174 }
13175
13176 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13177 body, and adding some EH bits. */
13178
13179 static enum gimplify_status
13180 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13181 {
13182 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13183 gimple *body_stmt;
13184 gtransaction *trans_stmt;
13185 gimple_seq body = NULL;
13186 int subcode = 0;
13187
13188 /* Wrap the transaction body in a BIND_EXPR so we have a context
13189 where to put decls for OMP. */
13190 if (TREE_CODE (tbody) != BIND_EXPR)
13191 {
13192 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13193 TREE_SIDE_EFFECTS (bind) = 1;
13194 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13195 TRANSACTION_EXPR_BODY (expr) = bind;
13196 }
13197
13198 push_gimplify_context ();
13199 temp = voidify_wrapper_expr (*expr_p, NULL);
13200
13201 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13202 pop_gimplify_context (body_stmt);
13203
13204 trans_stmt = gimple_build_transaction (body);
13205 if (TRANSACTION_EXPR_OUTER (expr))
13206 subcode = GTMA_IS_OUTER;
13207 else if (TRANSACTION_EXPR_RELAXED (expr))
13208 subcode = GTMA_IS_RELAXED;
13209 gimple_transaction_set_subcode (trans_stmt, subcode);
13210
13211 gimplify_seq_add_stmt (pre_p, trans_stmt);
13212
13213 if (temp)
13214 {
13215 *expr_p = temp;
13216 return GS_OK;
13217 }
13218
13219 *expr_p = NULL_TREE;
13220 return GS_ALL_DONE;
13221 }
13222
13223 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13224 is the OMP_BODY of the original EXPR (which has already been
13225 gimplified so it's not present in the EXPR).
13226
13227 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13228
13229 static gimple *
13230 gimplify_omp_ordered (tree expr, gimple_seq body)
13231 {
13232 tree c, decls;
13233 int failures = 0;
13234 unsigned int i;
13235 tree source_c = NULL_TREE;
13236 tree sink_c = NULL_TREE;
13237
13238 if (gimplify_omp_ctxp)
13239 {
13240 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13242 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13243 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13244 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13245 {
13246 error_at (OMP_CLAUSE_LOCATION (c),
13247 "%<ordered%> construct with %<depend%> clause must be "
13248 "closely nested inside a loop with %<ordered%> clause "
13249 "with a parameter");
13250 failures++;
13251 }
13252 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13253 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13254 {
13255 bool fail = false;
13256 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13257 decls && TREE_CODE (decls) == TREE_LIST;
13258 decls = TREE_CHAIN (decls), ++i)
13259 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13260 continue;
13261 else if (TREE_VALUE (decls)
13262 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13263 {
13264 error_at (OMP_CLAUSE_LOCATION (c),
13265 "variable %qE is not an iteration "
13266 "of outermost loop %d, expected %qE",
13267 TREE_VALUE (decls), i + 1,
13268 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13269 fail = true;
13270 failures++;
13271 }
13272 else
13273 TREE_VALUE (decls)
13274 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13275 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13276 {
13277 error_at (OMP_CLAUSE_LOCATION (c),
13278 "number of variables in %<depend%> clause with "
13279 "%<sink%> modifier does not match number of "
13280 "iteration variables");
13281 failures++;
13282 }
13283 sink_c = c;
13284 }
13285 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13286 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13287 {
13288 if (source_c)
13289 {
13290 error_at (OMP_CLAUSE_LOCATION (c),
13291 "more than one %<depend%> clause with %<source%> "
13292 "modifier on an %<ordered%> construct");
13293 failures++;
13294 }
13295 else
13296 source_c = c;
13297 }
13298 }
13299 if (source_c && sink_c)
13300 {
13301 error_at (OMP_CLAUSE_LOCATION (source_c),
13302 "%<depend%> clause with %<source%> modifier specified "
13303 "together with %<depend%> clauses with %<sink%> modifier "
13304 "on the same construct");
13305 failures++;
13306 }
13307
13308 if (failures)
13309 return gimple_build_nop ();
13310 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13311 }
13312
13313 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13314 expression produces a value to be used as an operand inside a GIMPLE
13315 statement, the value will be stored back in *EXPR_P. This value will
13316 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13317 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13318 emitted in PRE_P and POST_P.
13319
13320 Additionally, this process may overwrite parts of the input
13321 expression during gimplification. Ideally, it should be
13322 possible to do non-destructive gimplification.
13323
13324 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13325 the expression needs to evaluate to a value to be used as
13326 an operand in a GIMPLE statement, this value will be stored in
13327 *EXPR_P on exit. This happens when the caller specifies one
13328 of fb_lvalue or fb_rvalue fallback flags.
13329
13330 PRE_P will contain the sequence of GIMPLE statements corresponding
13331 to the evaluation of EXPR and all the side-effects that must
13332 be executed before the main expression. On exit, the last
13333 statement of PRE_P is the core statement being gimplified. For
13334 instance, when gimplifying 'if (++a)' the last statement in
13335 PRE_P will be 'if (t.1)' where t.1 is the result of
13336 pre-incrementing 'a'.
13337
13338 POST_P will contain the sequence of GIMPLE statements corresponding
13339 to the evaluation of all the side-effects that must be executed
13340 after the main expression. If this is NULL, the post
13341 side-effects are stored at the end of PRE_P.
13342
13343 The reason why the output is split in two is to handle post
13344 side-effects explicitly. In some cases, an expression may have
13345 inner and outer post side-effects which need to be emitted in
13346 an order different from the one given by the recursive
13347 traversal. For instance, for the expression (*p--)++ the post
13348 side-effects of '--' must actually occur *after* the post
13349 side-effects of '++'. However, gimplification will first visit
13350 the inner expression, so if a separate POST sequence was not
13351 used, the resulting sequence would be:
13352
13353 1 t.1 = *p
13354 2 p = p - 1
13355 3 t.2 = t.1 + 1
13356 4 *p = t.2
13357
13358 However, the post-decrement operation in line #2 must not be
13359 evaluated until after the store to *p at line #4, so the
13360 correct sequence should be:
13361
13362 1 t.1 = *p
13363 2 t.2 = t.1 + 1
13364 3 *p = t.2
13365 4 p = p - 1
13366
13367 So, by specifying a separate post queue, it is possible
13368 to emit the post side-effects in the correct order.
13369 If POST_P is NULL, an internal queue will be used. Before
13370 returning to the caller, the sequence POST_P is appended to
13371 the main output sequence PRE_P.
13372
13373 GIMPLE_TEST_F points to a function that takes a tree T and
13374 returns nonzero if T is in the GIMPLE form requested by the
13375 caller. The GIMPLE predicates are in gimple.c.
13376
13377 FALLBACK tells the function what sort of a temporary we want if
13378 gimplification cannot produce an expression that complies with
13379 GIMPLE_TEST_F.
13380
13381 fb_none means that no temporary should be generated
13382 fb_rvalue means that an rvalue is OK to generate
13383 fb_lvalue means that an lvalue is OK to generate
13384 fb_either means that either is OK, but an lvalue is preferable.
13385 fb_mayfail means that gimplification may fail (in which case
13386 GS_ERROR will be returned)
13387
13388 The return value is either GS_ERROR or GS_ALL_DONE, since this
13389 function iterates until EXPR is completely gimplified or an error
13390 occurs. */
13391
13392 enum gimplify_status
13393 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13394 bool (*gimple_test_f) (tree), fallback_t fallback)
13395 {
13396 tree tmp;
13397 gimple_seq internal_pre = NULL;
13398 gimple_seq internal_post = NULL;
13399 tree save_expr;
13400 bool is_statement;
13401 location_t saved_location;
13402 enum gimplify_status ret;
13403 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13404 tree label;
13405
13406 save_expr = *expr_p;
13407 if (save_expr == NULL_TREE)
13408 return GS_ALL_DONE;
13409
13410 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13411 is_statement = gimple_test_f == is_gimple_stmt;
13412 if (is_statement)
13413 gcc_assert (pre_p);
13414
13415 /* Consistency checks. */
13416 if (gimple_test_f == is_gimple_reg)
13417 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13418 else if (gimple_test_f == is_gimple_val
13419 || gimple_test_f == is_gimple_call_addr
13420 || gimple_test_f == is_gimple_condexpr
13421 || gimple_test_f == is_gimple_condexpr_for_cond
13422 || gimple_test_f == is_gimple_mem_rhs
13423 || gimple_test_f == is_gimple_mem_rhs_or_call
13424 || gimple_test_f == is_gimple_reg_rhs
13425 || gimple_test_f == is_gimple_reg_rhs_or_call
13426 || gimple_test_f == is_gimple_asm_val
13427 || gimple_test_f == is_gimple_mem_ref_addr)
13428 gcc_assert (fallback & fb_rvalue);
13429 else if (gimple_test_f == is_gimple_min_lval
13430 || gimple_test_f == is_gimple_lvalue)
13431 gcc_assert (fallback & fb_lvalue);
13432 else if (gimple_test_f == is_gimple_addressable)
13433 gcc_assert (fallback & fb_either);
13434 else if (gimple_test_f == is_gimple_stmt)
13435 gcc_assert (fallback == fb_none);
13436 else
13437 {
13438 /* We should have recognized the GIMPLE_TEST_F predicate to
13439 know what kind of fallback to use in case a temporary is
13440 needed to hold the value or address of *EXPR_P. */
13441 gcc_unreachable ();
13442 }
13443
13444 /* We used to check the predicate here and return immediately if it
13445 succeeds. This is wrong; the design is for gimplification to be
13446 idempotent, and for the predicates to only test for valid forms, not
13447 whether they are fully simplified. */
13448 if (pre_p == NULL)
13449 pre_p = &internal_pre;
13450
13451 if (post_p == NULL)
13452 post_p = &internal_post;
13453
13454 /* Remember the last statements added to PRE_P and POST_P. Every
13455 new statement added by the gimplification helpers needs to be
13456 annotated with location information. To centralize the
13457 responsibility, we remember the last statement that had been
13458 added to both queues before gimplifying *EXPR_P. If
13459 gimplification produces new statements in PRE_P and POST_P, those
13460 statements will be annotated with the same location information
13461 as *EXPR_P. */
13462 pre_last_gsi = gsi_last (*pre_p);
13463 post_last_gsi = gsi_last (*post_p);
13464
13465 saved_location = input_location;
13466 if (save_expr != error_mark_node
13467 && EXPR_HAS_LOCATION (*expr_p))
13468 input_location = EXPR_LOCATION (*expr_p);
13469
13470 /* Loop over the specific gimplifiers until the toplevel node
13471 remains the same. */
13472 do
13473 {
13474 /* Strip away as many useless type conversions as possible
13475 at the toplevel. */
13476 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13477
13478 /* Remember the expr. */
13479 save_expr = *expr_p;
13480
13481 /* Die, die, die, my darling. */
13482 if (error_operand_p (save_expr))
13483 {
13484 ret = GS_ERROR;
13485 break;
13486 }
13487
13488 /* Do any language-specific gimplification. */
13489 ret = ((enum gimplify_status)
13490 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13491 if (ret == GS_OK)
13492 {
13493 if (*expr_p == NULL_TREE)
13494 break;
13495 if (*expr_p != save_expr)
13496 continue;
13497 }
13498 else if (ret != GS_UNHANDLED)
13499 break;
13500
13501 /* Make sure that all the cases set 'ret' appropriately. */
13502 ret = GS_UNHANDLED;
13503 switch (TREE_CODE (*expr_p))
13504 {
13505 /* First deal with the special cases. */
13506
13507 case POSTINCREMENT_EXPR:
13508 case POSTDECREMENT_EXPR:
13509 case PREINCREMENT_EXPR:
13510 case PREDECREMENT_EXPR:
13511 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13512 fallback != fb_none,
13513 TREE_TYPE (*expr_p));
13514 break;
13515
13516 case VIEW_CONVERT_EXPR:
13517 if ((fallback & fb_rvalue)
13518 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13519 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13520 {
13521 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13522 post_p, is_gimple_val, fb_rvalue);
13523 recalculate_side_effects (*expr_p);
13524 break;
13525 }
13526 /* Fallthru. */
13527
13528 case ARRAY_REF:
13529 case ARRAY_RANGE_REF:
13530 case REALPART_EXPR:
13531 case IMAGPART_EXPR:
13532 case COMPONENT_REF:
13533 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13534 fallback ? fallback : fb_rvalue);
13535 break;
13536
13537 case COND_EXPR:
13538 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13539
13540 /* C99 code may assign to an array in a structure value of a
13541 conditional expression, and this has undefined behavior
13542 only on execution, so create a temporary if an lvalue is
13543 required. */
13544 if (fallback == fb_lvalue)
13545 {
13546 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13547 mark_addressable (*expr_p);
13548 ret = GS_OK;
13549 }
13550 break;
13551
13552 case CALL_EXPR:
13553 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13554
13555 /* C99 code may assign to an array in a structure returned
13556 from a function, and this has undefined behavior only on
13557 execution, so create a temporary if an lvalue is
13558 required. */
13559 if (fallback == fb_lvalue)
13560 {
13561 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13562 mark_addressable (*expr_p);
13563 ret = GS_OK;
13564 }
13565 break;
13566
13567 case TREE_LIST:
13568 gcc_unreachable ();
13569
13570 case COMPOUND_EXPR:
13571 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13572 break;
13573
13574 case COMPOUND_LITERAL_EXPR:
13575 ret = gimplify_compound_literal_expr (expr_p, pre_p,
13576 gimple_test_f, fallback);
13577 break;
13578
13579 case MODIFY_EXPR:
13580 case INIT_EXPR:
13581 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13582 fallback != fb_none);
13583 break;
13584
13585 case TRUTH_ANDIF_EXPR:
13586 case TRUTH_ORIF_EXPR:
13587 {
13588 /* Preserve the original type of the expression and the
13589 source location of the outer expression. */
13590 tree org_type = TREE_TYPE (*expr_p);
13591 *expr_p = gimple_boolify (*expr_p);
13592 *expr_p = build3_loc (input_location, COND_EXPR,
13593 org_type, *expr_p,
13594 fold_convert_loc
13595 (input_location,
13596 org_type, boolean_true_node),
13597 fold_convert_loc
13598 (input_location,
13599 org_type, boolean_false_node));
13600 ret = GS_OK;
13601 break;
13602 }
13603
13604 case TRUTH_NOT_EXPR:
13605 {
13606 tree type = TREE_TYPE (*expr_p);
13607 /* The parsers are careful to generate TRUTH_NOT_EXPR
13608 only with operands that are always zero or one.
13609 We do not fold here but handle the only interesting case
13610 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13611 *expr_p = gimple_boolify (*expr_p);
13612 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13613 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13614 TREE_TYPE (*expr_p),
13615 TREE_OPERAND (*expr_p, 0));
13616 else
13617 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13618 TREE_TYPE (*expr_p),
13619 TREE_OPERAND (*expr_p, 0),
13620 build_int_cst (TREE_TYPE (*expr_p), 1));
13621 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13622 *expr_p = fold_convert_loc (input_location, type, *expr_p);
13623 ret = GS_OK;
13624 break;
13625 }
13626
13627 case ADDR_EXPR:
13628 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13629 break;
13630
13631 case ANNOTATE_EXPR:
13632 {
13633 tree cond = TREE_OPERAND (*expr_p, 0);
13634 tree kind = TREE_OPERAND (*expr_p, 1);
13635 tree data = TREE_OPERAND (*expr_p, 2);
13636 tree type = TREE_TYPE (cond);
13637 if (!INTEGRAL_TYPE_P (type))
13638 {
13639 *expr_p = cond;
13640 ret = GS_OK;
13641 break;
13642 }
13643 tree tmp = create_tmp_var (type);
13644 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13645 gcall *call
13646 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13647 gimple_call_set_lhs (call, tmp);
13648 gimplify_seq_add_stmt (pre_p, call);
13649 *expr_p = tmp;
13650 ret = GS_ALL_DONE;
13651 break;
13652 }
13653
13654 case VA_ARG_EXPR:
13655 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
13656 break;
13657
13658 CASE_CONVERT:
13659 if (IS_EMPTY_STMT (*expr_p))
13660 {
13661 ret = GS_ALL_DONE;
13662 break;
13663 }
13664
13665 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13666 || fallback == fb_none)
13667 {
13668 /* Just strip a conversion to void (or in void context) and
13669 try again. */
13670 *expr_p = TREE_OPERAND (*expr_p, 0);
13671 ret = GS_OK;
13672 break;
13673 }
13674
13675 ret = gimplify_conversion (expr_p);
13676 if (ret == GS_ERROR)
13677 break;
13678 if (*expr_p != save_expr)
13679 break;
13680 /* FALLTHRU */
13681
13682 case FIX_TRUNC_EXPR:
13683 /* unary_expr: ... | '(' cast ')' val | ... */
13684 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13685 is_gimple_val, fb_rvalue);
13686 recalculate_side_effects (*expr_p);
13687 break;
13688
13689 case INDIRECT_REF:
13690 {
13691 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
13692 bool notrap = TREE_THIS_NOTRAP (*expr_p);
13693 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13694
13695 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13696 if (*expr_p != save_expr)
13697 {
13698 ret = GS_OK;
13699 break;
13700 }
13701
13702 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13703 is_gimple_reg, fb_rvalue);
13704 if (ret == GS_ERROR)
13705 break;
13706
13707 recalculate_side_effects (*expr_p);
13708 *expr_p = fold_build2_loc (input_location, MEM_REF,
13709 TREE_TYPE (*expr_p),
13710 TREE_OPERAND (*expr_p, 0),
13711 build_int_cst (saved_ptr_type, 0));
13712 TREE_THIS_VOLATILE (*expr_p) = volatilep;
13713 TREE_THIS_NOTRAP (*expr_p) = notrap;
13714 ret = GS_OK;
13715 break;
13716 }
13717
13718 /* We arrive here through the various re-gimplifcation paths. */
13719 case MEM_REF:
13720 /* First try re-folding the whole thing. */
13721 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13722 TREE_OPERAND (*expr_p, 0),
13723 TREE_OPERAND (*expr_p, 1));
13724 if (tmp)
13725 {
13726 REF_REVERSE_STORAGE_ORDER (tmp)
13727 = REF_REVERSE_STORAGE_ORDER (*expr_p);
13728 *expr_p = tmp;
13729 recalculate_side_effects (*expr_p);
13730 ret = GS_OK;
13731 break;
13732 }
13733 /* Avoid re-gimplifying the address operand if it is already
13734 in suitable form. Re-gimplifying would mark the address
13735 operand addressable. Always gimplify when not in SSA form
13736 as we still may have to gimplify decls with value-exprs. */
13737 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
13738 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13739 {
13740 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13741 is_gimple_mem_ref_addr, fb_rvalue);
13742 if (ret == GS_ERROR)
13743 break;
13744 }
13745 recalculate_side_effects (*expr_p);
13746 ret = GS_ALL_DONE;
13747 break;
13748
13749 /* Constants need not be gimplified. */
13750 case INTEGER_CST:
13751 case REAL_CST:
13752 case FIXED_CST:
13753 case STRING_CST:
13754 case COMPLEX_CST:
13755 case VECTOR_CST:
13756 /* Drop the overflow flag on constants, we do not want
13757 that in the GIMPLE IL. */
13758 if (TREE_OVERFLOW_P (*expr_p))
13759 *expr_p = drop_tree_overflow (*expr_p);
13760 ret = GS_ALL_DONE;
13761 break;
13762
13763 case CONST_DECL:
13764 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13765 CONST_DECL node. Otherwise the decl is replaceable by its
13766 value. */
13767 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13768 if (fallback & fb_lvalue)
13769 ret = GS_ALL_DONE;
13770 else
13771 {
13772 *expr_p = DECL_INITIAL (*expr_p);
13773 ret = GS_OK;
13774 }
13775 break;
13776
13777 case DECL_EXPR:
13778 ret = gimplify_decl_expr (expr_p, pre_p);
13779 break;
13780
13781 case BIND_EXPR:
13782 ret = gimplify_bind_expr (expr_p, pre_p);
13783 break;
13784
13785 case LOOP_EXPR:
13786 ret = gimplify_loop_expr (expr_p, pre_p);
13787 break;
13788
13789 case SWITCH_EXPR:
13790 ret = gimplify_switch_expr (expr_p, pre_p);
13791 break;
13792
13793 case EXIT_EXPR:
13794 ret = gimplify_exit_expr (expr_p);
13795 break;
13796
13797 case GOTO_EXPR:
13798 /* If the target is not LABEL, then it is a computed jump
13799 and the target needs to be gimplified. */
13800 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
13801 {
13802 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13803 NULL, is_gimple_val, fb_rvalue);
13804 if (ret == GS_ERROR)
13805 break;
13806 }
13807 gimplify_seq_add_stmt (pre_p,
13808 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
13809 ret = GS_ALL_DONE;
13810 break;
13811
13812 case PREDICT_EXPR:
13813 gimplify_seq_add_stmt (pre_p,
13814 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13815 PREDICT_EXPR_OUTCOME (*expr_p)));
13816 ret = GS_ALL_DONE;
13817 break;
13818
13819 case LABEL_EXPR:
13820 ret = gimplify_label_expr (expr_p, pre_p);
13821 label = LABEL_EXPR_LABEL (*expr_p);
13822 gcc_assert (decl_function_context (label) == current_function_decl);
13823
13824 /* If the label is used in a goto statement, or address of the label
13825 is taken, we need to unpoison all variables that were seen so far.
13826 Doing so would prevent us from reporting a false positives. */
13827 if (asan_poisoned_variables
13828 && asan_used_labels != NULL
13829 && asan_used_labels->contains (label))
13830 asan_poison_variables (asan_poisoned_variables, false, pre_p);
13831 break;
13832
13833 case CASE_LABEL_EXPR:
13834 ret = gimplify_case_label_expr (expr_p, pre_p);
13835
13836 if (gimplify_ctxp->live_switch_vars)
13837 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13838 pre_p);
13839 break;
13840
13841 case RETURN_EXPR:
13842 ret = gimplify_return_expr (*expr_p, pre_p);
13843 break;
13844
13845 case CONSTRUCTOR:
13846 /* Don't reduce this in place; let gimplify_init_constructor work its
13847 magic. Buf if we're just elaborating this for side effects, just
13848 gimplify any element that has side-effects. */
13849 if (fallback == fb_none)
13850 {
13851 unsigned HOST_WIDE_INT ix;
13852 tree val;
13853 tree temp = NULL_TREE;
13854 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13855 if (TREE_SIDE_EFFECTS (val))
13856 append_to_statement_list (val, &temp);
13857
13858 *expr_p = temp;
13859 ret = temp ? GS_OK : GS_ALL_DONE;
13860 }
13861 /* C99 code may assign to an array in a constructed
13862 structure or union, and this has undefined behavior only
13863 on execution, so create a temporary if an lvalue is
13864 required. */
13865 else if (fallback == fb_lvalue)
13866 {
13867 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13868 mark_addressable (*expr_p);
13869 ret = GS_OK;
13870 }
13871 else
13872 ret = GS_ALL_DONE;
13873 break;
13874
13875 /* The following are special cases that are not handled by the
13876 original GIMPLE grammar. */
13877
13878 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13879 eliminated. */
13880 case SAVE_EXPR:
13881 ret = gimplify_save_expr (expr_p, pre_p, post_p);
13882 break;
13883
13884 case BIT_FIELD_REF:
13885 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13886 post_p, is_gimple_lvalue, fb_either);
13887 recalculate_side_effects (*expr_p);
13888 break;
13889
13890 case TARGET_MEM_REF:
13891 {
13892 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13893
13894 if (TMR_BASE (*expr_p))
13895 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13896 post_p, is_gimple_mem_ref_addr, fb_either);
13897 if (TMR_INDEX (*expr_p))
13898 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13899 post_p, is_gimple_val, fb_rvalue);
13900 if (TMR_INDEX2 (*expr_p))
13901 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13902 post_p, is_gimple_val, fb_rvalue);
13903 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13904 ret = MIN (r0, r1);
13905 }
13906 break;
13907
13908 case NON_LVALUE_EXPR:
13909 /* This should have been stripped above. */
13910 gcc_unreachable ();
13911
13912 case ASM_EXPR:
13913 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13914 break;
13915
13916 case TRY_FINALLY_EXPR:
13917 case TRY_CATCH_EXPR:
13918 {
13919 gimple_seq eval, cleanup;
13920 gtry *try_;
13921
13922 /* Calls to destructors are generated automatically in FINALLY/CATCH
13923 block. They should have location as UNKNOWN_LOCATION. However,
13924 gimplify_call_expr will reset these call stmts to input_location
13925 if it finds stmt's location is unknown. To prevent resetting for
13926 destructors, we set the input_location to unknown.
13927 Note that this only affects the destructor calls in FINALLY/CATCH
13928 block, and will automatically reset to its original value by the
13929 end of gimplify_expr. */
13930 input_location = UNKNOWN_LOCATION;
13931 eval = cleanup = NULL;
13932 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13933 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13934 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13935 {
13936 gimple_seq n = NULL, e = NULL;
13937 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13938 0), &n);
13939 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
13940 1), &e);
13941 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
13942 {
13943 geh_else *stmt = gimple_build_eh_else (n, e);
13944 gimple_seq_add_stmt (&cleanup, stmt);
13945 }
13946 }
13947 else
13948 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
13949 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13950 if (gimple_seq_empty_p (cleanup))
13951 {
13952 gimple_seq_add_seq (pre_p, eval);
13953 ret = GS_ALL_DONE;
13954 break;
13955 }
13956 try_ = gimple_build_try (eval, cleanup,
13957 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13958 ? GIMPLE_TRY_FINALLY
13959 : GIMPLE_TRY_CATCH);
13960 if (EXPR_HAS_LOCATION (save_expr))
13961 gimple_set_location (try_, EXPR_LOCATION (save_expr));
13962 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
13963 gimple_set_location (try_, saved_location);
13964 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
13965 gimple_try_set_catch_is_cleanup (try_,
13966 TRY_CATCH_IS_CLEANUP (*expr_p));
13967 gimplify_seq_add_stmt (pre_p, try_);
13968 ret = GS_ALL_DONE;
13969 break;
13970 }
13971
13972 case CLEANUP_POINT_EXPR:
13973 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
13974 break;
13975
13976 case TARGET_EXPR:
13977 ret = gimplify_target_expr (expr_p, pre_p, post_p);
13978 break;
13979
13980 case CATCH_EXPR:
13981 {
13982 gimple *c;
13983 gimple_seq handler = NULL;
13984 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
13985 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
13986 gimplify_seq_add_stmt (pre_p, c);
13987 ret = GS_ALL_DONE;
13988 break;
13989 }
13990
13991 case EH_FILTER_EXPR:
13992 {
13993 gimple *ehf;
13994 gimple_seq failure = NULL;
13995
13996 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
13997 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
13998 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
13999 gimplify_seq_add_stmt (pre_p, ehf);
14000 ret = GS_ALL_DONE;
14001 break;
14002 }
14003
14004 case OBJ_TYPE_REF:
14005 {
14006 enum gimplify_status r0, r1;
14007 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14008 post_p, is_gimple_val, fb_rvalue);
14009 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14010 post_p, is_gimple_val, fb_rvalue);
14011 TREE_SIDE_EFFECTS (*expr_p) = 0;
14012 ret = MIN (r0, r1);
14013 }
14014 break;
14015
14016 case LABEL_DECL:
14017 /* We get here when taking the address of a label. We mark
14018 the label as "forced"; meaning it can never be removed and
14019 it is a potential target for any computed goto. */
14020 FORCED_LABEL (*expr_p) = 1;
14021 ret = GS_ALL_DONE;
14022 break;
14023
14024 case STATEMENT_LIST:
14025 ret = gimplify_statement_list (expr_p, pre_p);
14026 break;
14027
14028 case WITH_SIZE_EXPR:
14029 {
14030 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14031 post_p == &internal_post ? NULL : post_p,
14032 gimple_test_f, fallback);
14033 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14034 is_gimple_val, fb_rvalue);
14035 ret = GS_ALL_DONE;
14036 }
14037 break;
14038
14039 case VAR_DECL:
14040 case PARM_DECL:
14041 ret = gimplify_var_or_parm_decl (expr_p);
14042 break;
14043
14044 case RESULT_DECL:
14045 /* When within an OMP context, notice uses of variables. */
14046 if (gimplify_omp_ctxp)
14047 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14048 ret = GS_ALL_DONE;
14049 break;
14050
14051 case DEBUG_EXPR_DECL:
14052 gcc_unreachable ();
14053
14054 case DEBUG_BEGIN_STMT:
14055 gimplify_seq_add_stmt (pre_p,
14056 gimple_build_debug_begin_stmt
14057 (TREE_BLOCK (*expr_p),
14058 EXPR_LOCATION (*expr_p)));
14059 ret = GS_ALL_DONE;
14060 *expr_p = NULL;
14061 break;
14062
14063 case SSA_NAME:
14064 /* Allow callbacks into the gimplifier during optimization. */
14065 ret = GS_ALL_DONE;
14066 break;
14067
14068 case OMP_PARALLEL:
14069 gimplify_omp_parallel (expr_p, pre_p);
14070 ret = GS_ALL_DONE;
14071 break;
14072
14073 case OMP_TASK:
14074 gimplify_omp_task (expr_p, pre_p);
14075 ret = GS_ALL_DONE;
14076 break;
14077
14078 case OMP_FOR:
14079 case OMP_SIMD:
14080 case OMP_DISTRIBUTE:
14081 case OMP_TASKLOOP:
14082 case OACC_LOOP:
14083 ret = gimplify_omp_for (expr_p, pre_p);
14084 break;
14085
14086 case OMP_LOOP:
14087 ret = gimplify_omp_loop (expr_p, pre_p);
14088 break;
14089
14090 case OACC_CACHE:
14091 gimplify_oacc_cache (expr_p, pre_p);
14092 ret = GS_ALL_DONE;
14093 break;
14094
14095 case OACC_DECLARE:
14096 gimplify_oacc_declare (expr_p, pre_p);
14097 ret = GS_ALL_DONE;
14098 break;
14099
14100 case OACC_HOST_DATA:
14101 case OACC_DATA:
14102 case OACC_KERNELS:
14103 case OACC_PARALLEL:
14104 case OACC_SERIAL:
14105 case OMP_SECTIONS:
14106 case OMP_SINGLE:
14107 case OMP_TARGET:
14108 case OMP_TARGET_DATA:
14109 case OMP_TEAMS:
14110 gimplify_omp_workshare (expr_p, pre_p);
14111 ret = GS_ALL_DONE;
14112 break;
14113
14114 case OACC_ENTER_DATA:
14115 case OACC_EXIT_DATA:
14116 case OACC_UPDATE:
14117 case OMP_TARGET_UPDATE:
14118 case OMP_TARGET_ENTER_DATA:
14119 case OMP_TARGET_EXIT_DATA:
14120 gimplify_omp_target_update (expr_p, pre_p);
14121 ret = GS_ALL_DONE;
14122 break;
14123
14124 case OMP_SECTION:
14125 case OMP_MASTER:
14126 case OMP_ORDERED:
14127 case OMP_CRITICAL:
14128 case OMP_SCAN:
14129 {
14130 gimple_seq body = NULL;
14131 gimple *g;
14132 bool saved_in_omp_construct = in_omp_construct;
14133
14134 in_omp_construct = true;
14135 gimplify_and_add (OMP_BODY (*expr_p), &body);
14136 in_omp_construct = saved_in_omp_construct;
14137 switch (TREE_CODE (*expr_p))
14138 {
14139 case OMP_SECTION:
14140 g = gimple_build_omp_section (body);
14141 break;
14142 case OMP_MASTER:
14143 g = gimple_build_omp_master (body);
14144 break;
14145 case OMP_ORDERED:
14146 g = gimplify_omp_ordered (*expr_p, body);
14147 break;
14148 case OMP_CRITICAL:
14149 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14150 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14151 gimplify_adjust_omp_clauses (pre_p, body,
14152 &OMP_CRITICAL_CLAUSES (*expr_p),
14153 OMP_CRITICAL);
14154 g = gimple_build_omp_critical (body,
14155 OMP_CRITICAL_NAME (*expr_p),
14156 OMP_CRITICAL_CLAUSES (*expr_p));
14157 break;
14158 case OMP_SCAN:
14159 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14160 pre_p, ORT_WORKSHARE, OMP_SCAN);
14161 gimplify_adjust_omp_clauses (pre_p, body,
14162 &OMP_SCAN_CLAUSES (*expr_p),
14163 OMP_SCAN);
14164 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14165 break;
14166 default:
14167 gcc_unreachable ();
14168 }
14169 gimplify_seq_add_stmt (pre_p, g);
14170 ret = GS_ALL_DONE;
14171 break;
14172 }
14173
14174 case OMP_TASKGROUP:
14175 {
14176 gimple_seq body = NULL;
14177
14178 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14179 bool saved_in_omp_construct = in_omp_construct;
14180 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14181 OMP_TASKGROUP);
14182 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14183
14184 in_omp_construct = true;
14185 gimplify_and_add (OMP_BODY (*expr_p), &body);
14186 in_omp_construct = saved_in_omp_construct;
14187 gimple_seq cleanup = NULL;
14188 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14189 gimple *g = gimple_build_call (fn, 0);
14190 gimple_seq_add_stmt (&cleanup, g);
14191 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14192 body = NULL;
14193 gimple_seq_add_stmt (&body, g);
14194 g = gimple_build_omp_taskgroup (body, *pclauses);
14195 gimplify_seq_add_stmt (pre_p, g);
14196 ret = GS_ALL_DONE;
14197 break;
14198 }
14199
14200 case OMP_ATOMIC:
14201 case OMP_ATOMIC_READ:
14202 case OMP_ATOMIC_CAPTURE_OLD:
14203 case OMP_ATOMIC_CAPTURE_NEW:
14204 ret = gimplify_omp_atomic (expr_p, pre_p);
14205 break;
14206
14207 case TRANSACTION_EXPR:
14208 ret = gimplify_transaction (expr_p, pre_p);
14209 break;
14210
14211 case TRUTH_AND_EXPR:
14212 case TRUTH_OR_EXPR:
14213 case TRUTH_XOR_EXPR:
14214 {
14215 tree orig_type = TREE_TYPE (*expr_p);
14216 tree new_type, xop0, xop1;
14217 *expr_p = gimple_boolify (*expr_p);
14218 new_type = TREE_TYPE (*expr_p);
14219 if (!useless_type_conversion_p (orig_type, new_type))
14220 {
14221 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14222 ret = GS_OK;
14223 break;
14224 }
14225
14226 /* Boolified binary truth expressions are semantically equivalent
14227 to bitwise binary expressions. Canonicalize them to the
14228 bitwise variant. */
14229 switch (TREE_CODE (*expr_p))
14230 {
14231 case TRUTH_AND_EXPR:
14232 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14233 break;
14234 case TRUTH_OR_EXPR:
14235 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14236 break;
14237 case TRUTH_XOR_EXPR:
14238 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14239 break;
14240 default:
14241 break;
14242 }
14243 /* Now make sure that operands have compatible type to
14244 expression's new_type. */
14245 xop0 = TREE_OPERAND (*expr_p, 0);
14246 xop1 = TREE_OPERAND (*expr_p, 1);
14247 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14248 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14249 new_type,
14250 xop0);
14251 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14252 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14253 new_type,
14254 xop1);
14255 /* Continue classified as tcc_binary. */
14256 goto expr_2;
14257 }
14258
14259 case VEC_COND_EXPR:
14260 {
14261 enum gimplify_status r0, r1, r2;
14262
14263 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14264 post_p, is_gimple_condexpr, fb_rvalue);
14265 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14266 post_p, is_gimple_val, fb_rvalue);
14267 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14268 post_p, is_gimple_val, fb_rvalue);
14269
14270 ret = MIN (MIN (r0, r1), r2);
14271 recalculate_side_effects (*expr_p);
14272 }
14273 break;
14274
14275 case VEC_PERM_EXPR:
14276 /* Classified as tcc_expression. */
14277 goto expr_3;
14278
14279 case BIT_INSERT_EXPR:
14280 /* Argument 3 is a constant. */
14281 goto expr_2;
14282
14283 case POINTER_PLUS_EXPR:
14284 {
14285 enum gimplify_status r0, r1;
14286 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14287 post_p, is_gimple_val, fb_rvalue);
14288 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14289 post_p, is_gimple_val, fb_rvalue);
14290 recalculate_side_effects (*expr_p);
14291 ret = MIN (r0, r1);
14292 break;
14293 }
14294
14295 default:
14296 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14297 {
14298 case tcc_comparison:
14299 /* Handle comparison of objects of non scalar mode aggregates
14300 with a call to memcmp. It would be nice to only have to do
14301 this for variable-sized objects, but then we'd have to allow
14302 the same nest of reference nodes we allow for MODIFY_EXPR and
14303 that's too complex.
14304
14305 Compare scalar mode aggregates as scalar mode values. Using
14306 memcmp for them would be very inefficient at best, and is
14307 plain wrong if bitfields are involved. */
14308 {
14309 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14310
14311 /* Vector comparisons need no boolification. */
14312 if (TREE_CODE (type) == VECTOR_TYPE)
14313 goto expr_2;
14314 else if (!AGGREGATE_TYPE_P (type))
14315 {
14316 tree org_type = TREE_TYPE (*expr_p);
14317 *expr_p = gimple_boolify (*expr_p);
14318 if (!useless_type_conversion_p (org_type,
14319 TREE_TYPE (*expr_p)))
14320 {
14321 *expr_p = fold_convert_loc (input_location,
14322 org_type, *expr_p);
14323 ret = GS_OK;
14324 }
14325 else
14326 goto expr_2;
14327 }
14328 else if (TYPE_MODE (type) != BLKmode)
14329 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14330 else
14331 ret = gimplify_variable_sized_compare (expr_p);
14332
14333 break;
14334 }
14335
14336 /* If *EXPR_P does not need to be special-cased, handle it
14337 according to its class. */
14338 case tcc_unary:
14339 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14340 post_p, is_gimple_val, fb_rvalue);
14341 break;
14342
14343 case tcc_binary:
14344 expr_2:
14345 {
14346 enum gimplify_status r0, r1;
14347
14348 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14349 post_p, is_gimple_val, fb_rvalue);
14350 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14351 post_p, is_gimple_val, fb_rvalue);
14352
14353 ret = MIN (r0, r1);
14354 break;
14355 }
14356
14357 expr_3:
14358 {
14359 enum gimplify_status r0, r1, r2;
14360
14361 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14362 post_p, is_gimple_val, fb_rvalue);
14363 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14364 post_p, is_gimple_val, fb_rvalue);
14365 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14366 post_p, is_gimple_val, fb_rvalue);
14367
14368 ret = MIN (MIN (r0, r1), r2);
14369 break;
14370 }
14371
14372 case tcc_declaration:
14373 case tcc_constant:
14374 ret = GS_ALL_DONE;
14375 goto dont_recalculate;
14376
14377 default:
14378 gcc_unreachable ();
14379 }
14380
14381 recalculate_side_effects (*expr_p);
14382
14383 dont_recalculate:
14384 break;
14385 }
14386
14387 gcc_assert (*expr_p || ret != GS_OK);
14388 }
14389 while (ret == GS_OK);
14390
14391 /* If we encountered an error_mark somewhere nested inside, either
14392 stub out the statement or propagate the error back out. */
14393 if (ret == GS_ERROR)
14394 {
14395 if (is_statement)
14396 *expr_p = NULL;
14397 goto out;
14398 }
14399
14400 /* This was only valid as a return value from the langhook, which
14401 we handled. Make sure it doesn't escape from any other context. */
14402 gcc_assert (ret != GS_UNHANDLED);
14403
14404 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14405 {
14406 /* We aren't looking for a value, and we don't have a valid
14407 statement. If it doesn't have side-effects, throw it away.
14408 We can also get here with code such as "*&&L;", where L is
14409 a LABEL_DECL that is marked as FORCED_LABEL. */
14410 if (TREE_CODE (*expr_p) == LABEL_DECL
14411 || !TREE_SIDE_EFFECTS (*expr_p))
14412 *expr_p = NULL;
14413 else if (!TREE_THIS_VOLATILE (*expr_p))
14414 {
14415 /* This is probably a _REF that contains something nested that
14416 has side effects. Recurse through the operands to find it. */
14417 enum tree_code code = TREE_CODE (*expr_p);
14418
14419 switch (code)
14420 {
14421 case COMPONENT_REF:
14422 case REALPART_EXPR:
14423 case IMAGPART_EXPR:
14424 case VIEW_CONVERT_EXPR:
14425 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14426 gimple_test_f, fallback);
14427 break;
14428
14429 case ARRAY_REF:
14430 case ARRAY_RANGE_REF:
14431 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14432 gimple_test_f, fallback);
14433 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14434 gimple_test_f, fallback);
14435 break;
14436
14437 default:
14438 /* Anything else with side-effects must be converted to
14439 a valid statement before we get here. */
14440 gcc_unreachable ();
14441 }
14442
14443 *expr_p = NULL;
14444 }
14445 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14446 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14447 {
14448 /* Historically, the compiler has treated a bare reference
14449 to a non-BLKmode volatile lvalue as forcing a load. */
14450 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14451
14452 /* Normally, we do not want to create a temporary for a
14453 TREE_ADDRESSABLE type because such a type should not be
14454 copied by bitwise-assignment. However, we make an
14455 exception here, as all we are doing here is ensuring that
14456 we read the bytes that make up the type. We use
14457 create_tmp_var_raw because create_tmp_var will abort when
14458 given a TREE_ADDRESSABLE type. */
14459 tree tmp = create_tmp_var_raw (type, "vol");
14460 gimple_add_tmp_var (tmp);
14461 gimplify_assign (tmp, *expr_p, pre_p);
14462 *expr_p = NULL;
14463 }
14464 else
14465 /* We can't do anything useful with a volatile reference to
14466 an incomplete type, so just throw it away. Likewise for
14467 a BLKmode type, since any implicit inner load should
14468 already have been turned into an explicit one by the
14469 gimplification process. */
14470 *expr_p = NULL;
14471 }
14472
14473 /* If we are gimplifying at the statement level, we're done. Tack
14474 everything together and return. */
14475 if (fallback == fb_none || is_statement)
14476 {
14477 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14478 it out for GC to reclaim it. */
14479 *expr_p = NULL_TREE;
14480
14481 if (!gimple_seq_empty_p (internal_pre)
14482 || !gimple_seq_empty_p (internal_post))
14483 {
14484 gimplify_seq_add_seq (&internal_pre, internal_post);
14485 gimplify_seq_add_seq (pre_p, internal_pre);
14486 }
14487
14488 /* The result of gimplifying *EXPR_P is going to be the last few
14489 statements in *PRE_P and *POST_P. Add location information
14490 to all the statements that were added by the gimplification
14491 helpers. */
14492 if (!gimple_seq_empty_p (*pre_p))
14493 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14494
14495 if (!gimple_seq_empty_p (*post_p))
14496 annotate_all_with_location_after (*post_p, post_last_gsi,
14497 input_location);
14498
14499 goto out;
14500 }
14501
14502 #ifdef ENABLE_GIMPLE_CHECKING
14503 if (*expr_p)
14504 {
14505 enum tree_code code = TREE_CODE (*expr_p);
14506 /* These expressions should already be in gimple IR form. */
14507 gcc_assert (code != MODIFY_EXPR
14508 && code != ASM_EXPR
14509 && code != BIND_EXPR
14510 && code != CATCH_EXPR
14511 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14512 && code != EH_FILTER_EXPR
14513 && code != GOTO_EXPR
14514 && code != LABEL_EXPR
14515 && code != LOOP_EXPR
14516 && code != SWITCH_EXPR
14517 && code != TRY_FINALLY_EXPR
14518 && code != EH_ELSE_EXPR
14519 && code != OACC_PARALLEL
14520 && code != OACC_KERNELS
14521 && code != OACC_SERIAL
14522 && code != OACC_DATA
14523 && code != OACC_HOST_DATA
14524 && code != OACC_DECLARE
14525 && code != OACC_UPDATE
14526 && code != OACC_ENTER_DATA
14527 && code != OACC_EXIT_DATA
14528 && code != OACC_CACHE
14529 && code != OMP_CRITICAL
14530 && code != OMP_FOR
14531 && code != OACC_LOOP
14532 && code != OMP_MASTER
14533 && code != OMP_TASKGROUP
14534 && code != OMP_ORDERED
14535 && code != OMP_PARALLEL
14536 && code != OMP_SCAN
14537 && code != OMP_SECTIONS
14538 && code != OMP_SECTION
14539 && code != OMP_SINGLE);
14540 }
14541 #endif
14542
14543 /* Otherwise we're gimplifying a subexpression, so the resulting
14544 value is interesting. If it's a valid operand that matches
14545 GIMPLE_TEST_F, we're done. Unless we are handling some
14546 post-effects internally; if that's the case, we need to copy into
14547 a temporary before adding the post-effects to POST_P. */
14548 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14549 goto out;
14550
14551 /* Otherwise, we need to create a new temporary for the gimplified
14552 expression. */
14553
14554 /* We can't return an lvalue if we have an internal postqueue. The
14555 object the lvalue refers to would (probably) be modified by the
14556 postqueue; we need to copy the value out first, which means an
14557 rvalue. */
14558 if ((fallback & fb_lvalue)
14559 && gimple_seq_empty_p (internal_post)
14560 && is_gimple_addressable (*expr_p))
14561 {
14562 /* An lvalue will do. Take the address of the expression, store it
14563 in a temporary, and replace the expression with an INDIRECT_REF of
14564 that temporary. */
14565 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14566 unsigned int ref_align = get_object_alignment (*expr_p);
14567 tree ref_type = TREE_TYPE (*expr_p);
14568 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14569 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14570 if (TYPE_ALIGN (ref_type) != ref_align)
14571 ref_type = build_aligned_type (ref_type, ref_align);
14572 *expr_p = build2 (MEM_REF, ref_type,
14573 tmp, build_zero_cst (ref_alias_type));
14574 }
14575 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14576 {
14577 /* An rvalue will do. Assign the gimplified expression into a
14578 new temporary TMP and replace the original expression with
14579 TMP. First, make sure that the expression has a type so that
14580 it can be assigned into a temporary. */
14581 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14582 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14583 }
14584 else
14585 {
14586 #ifdef ENABLE_GIMPLE_CHECKING
14587 if (!(fallback & fb_mayfail))
14588 {
14589 fprintf (stderr, "gimplification failed:\n");
14590 print_generic_expr (stderr, *expr_p);
14591 debug_tree (*expr_p);
14592 internal_error ("gimplification failed");
14593 }
14594 #endif
14595 gcc_assert (fallback & fb_mayfail);
14596
14597 /* If this is an asm statement, and the user asked for the
14598 impossible, don't die. Fail and let gimplify_asm_expr
14599 issue an error. */
14600 ret = GS_ERROR;
14601 goto out;
14602 }
14603
14604 /* Make sure the temporary matches our predicate. */
14605 gcc_assert ((*gimple_test_f) (*expr_p));
14606
14607 if (!gimple_seq_empty_p (internal_post))
14608 {
14609 annotate_all_with_location (internal_post, input_location);
14610 gimplify_seq_add_seq (pre_p, internal_post);
14611 }
14612
14613 out:
14614 input_location = saved_location;
14615 return ret;
14616 }
14617
14618 /* Like gimplify_expr but make sure the gimplified result is not itself
14619 a SSA name (but a decl if it were). Temporaries required by
14620 evaluating *EXPR_P may be still SSA names. */
14621
14622 static enum gimplify_status
14623 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14624 bool (*gimple_test_f) (tree), fallback_t fallback,
14625 bool allow_ssa)
14626 {
14627 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14628 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14629 gimple_test_f, fallback);
14630 if (! allow_ssa
14631 && TREE_CODE (*expr_p) == SSA_NAME)
14632 {
14633 tree name = *expr_p;
14634 if (was_ssa_name_p)
14635 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14636 else
14637 {
14638 /* Avoid the extra copy if possible. */
14639 *expr_p = create_tmp_reg (TREE_TYPE (name));
14640 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
14641 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14642 release_ssa_name (name);
14643 }
14644 }
14645 return ret;
14646 }
14647
14648 /* Look through TYPE for variable-sized objects and gimplify each such
14649 size that we find. Add to LIST_P any statements generated. */
14650
14651 void
14652 gimplify_type_sizes (tree type, gimple_seq *list_p)
14653 {
14654 tree field, t;
14655
14656 if (type == NULL || type == error_mark_node)
14657 return;
14658
14659 /* We first do the main variant, then copy into any other variants. */
14660 type = TYPE_MAIN_VARIANT (type);
14661
14662 /* Avoid infinite recursion. */
14663 if (TYPE_SIZES_GIMPLIFIED (type))
14664 return;
14665
14666 TYPE_SIZES_GIMPLIFIED (type) = 1;
14667
14668 switch (TREE_CODE (type))
14669 {
14670 case INTEGER_TYPE:
14671 case ENUMERAL_TYPE:
14672 case BOOLEAN_TYPE:
14673 case REAL_TYPE:
14674 case FIXED_POINT_TYPE:
14675 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14676 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
14677
14678 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14679 {
14680 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14681 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
14682 }
14683 break;
14684
14685 case ARRAY_TYPE:
14686 /* These types may not have declarations, so handle them here. */
14687 gimplify_type_sizes (TREE_TYPE (type), list_p);
14688 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
14689 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14690 with assigned stack slots, for -O1+ -g they should be tracked
14691 by VTA. */
14692 if (!(TYPE_NAME (type)
14693 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14694 && DECL_IGNORED_P (TYPE_NAME (type)))
14695 && TYPE_DOMAIN (type)
14696 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14697 {
14698 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
14699 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14700 DECL_IGNORED_P (t) = 0;
14701 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14702 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14703 DECL_IGNORED_P (t) = 0;
14704 }
14705 break;
14706
14707 case RECORD_TYPE:
14708 case UNION_TYPE:
14709 case QUAL_UNION_TYPE:
14710 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14711 if (TREE_CODE (field) == FIELD_DECL)
14712 {
14713 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
14714 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14715 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
14716 gimplify_type_sizes (TREE_TYPE (field), list_p);
14717 }
14718 break;
14719
14720 case POINTER_TYPE:
14721 case REFERENCE_TYPE:
14722 /* We used to recurse on the pointed-to type here, which turned out to
14723 be incorrect because its definition might refer to variables not
14724 yet initialized at this point if a forward declaration is involved.
14725
14726 It was actually useful for anonymous pointed-to types to ensure
14727 that the sizes evaluation dominates every possible later use of the
14728 values. Restricting to such types here would be safe since there
14729 is no possible forward declaration around, but would introduce an
14730 undesirable middle-end semantic to anonymity. We then defer to
14731 front-ends the responsibility of ensuring that the sizes are
14732 evaluated both early and late enough, e.g. by attaching artificial
14733 type declarations to the tree. */
14734 break;
14735
14736 default:
14737 break;
14738 }
14739
14740 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14741 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
14742
14743 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14744 {
14745 TYPE_SIZE (t) = TYPE_SIZE (type);
14746 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14747 TYPE_SIZES_GIMPLIFIED (t) = 1;
14748 }
14749 }
14750
14751 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14752 a size or position, has had all of its SAVE_EXPRs evaluated.
14753 We add any required statements to *STMT_P. */
14754
14755 void
14756 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
14757 {
14758 tree expr = *expr_p;
14759
14760 /* We don't do anything if the value isn't there, is constant, or contains
14761 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14762 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14763 will want to replace it with a new variable, but that will cause problems
14764 if this type is from outside the function. It's OK to have that here. */
14765 if (expr == NULL_TREE
14766 || is_gimple_constant (expr)
14767 || TREE_CODE (expr) == VAR_DECL
14768 || CONTAINS_PLACEHOLDER_P (expr))
14769 return;
14770
14771 *expr_p = unshare_expr (expr);
14772
14773 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14774 if the def vanishes. */
14775 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
14776
14777 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14778 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14779 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14780 if (is_gimple_constant (*expr_p))
14781 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
14782 }
14783
14784 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14785 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14786 is true, also gimplify the parameters. */
14787
14788 gbind *
14789 gimplify_body (tree fndecl, bool do_parms)
14790 {
14791 location_t saved_location = input_location;
14792 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
14793 gimple *outer_stmt;
14794 gbind *outer_bind;
14795
14796 timevar_push (TV_TREE_GIMPLIFY);
14797
14798 init_tree_ssa (cfun);
14799
14800 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14801 gimplification. */
14802 default_rtl_profile ();
14803
14804 gcc_assert (gimplify_ctxp == NULL);
14805 push_gimplify_context (true);
14806
14807 if (flag_openacc || flag_openmp)
14808 {
14809 gcc_assert (gimplify_omp_ctxp == NULL);
14810 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
14811 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
14812 }
14813
14814 /* Unshare most shared trees in the body and in that of any nested functions.
14815 It would seem we don't have to do this for nested functions because
14816 they are supposed to be output and then the outer function gimplified
14817 first, but the g++ front end doesn't always do it that way. */
14818 unshare_body (fndecl);
14819 unvisit_body (fndecl);
14820
14821 /* Make sure input_location isn't set to something weird. */
14822 input_location = DECL_SOURCE_LOCATION (fndecl);
14823
14824 /* Resolve callee-copies. This has to be done before processing
14825 the body so that DECL_VALUE_EXPR gets processed correctly. */
14826 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
14827
14828 /* Gimplify the function's body. */
14829 seq = NULL;
14830 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
14831 outer_stmt = gimple_seq_first_stmt (seq);
14832 if (!outer_stmt)
14833 {
14834 outer_stmt = gimple_build_nop ();
14835 gimplify_seq_add_stmt (&seq, outer_stmt);
14836 }
14837
14838 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14839 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14840 if (gimple_code (outer_stmt) == GIMPLE_BIND
14841 && gimple_seq_first (seq) == gimple_seq_last (seq))
14842 outer_bind = as_a <gbind *> (outer_stmt);
14843 else
14844 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14845
14846 DECL_SAVED_TREE (fndecl) = NULL_TREE;
14847
14848 /* If we had callee-copies statements, insert them at the beginning
14849 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14850 if (!gimple_seq_empty_p (parm_stmts))
14851 {
14852 tree parm;
14853
14854 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
14855 if (parm_cleanup)
14856 {
14857 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14858 GIMPLE_TRY_FINALLY);
14859 parm_stmts = NULL;
14860 gimple_seq_add_stmt (&parm_stmts, g);
14861 }
14862 gimple_bind_set_body (outer_bind, parm_stmts);
14863
14864 for (parm = DECL_ARGUMENTS (current_function_decl);
14865 parm; parm = DECL_CHAIN (parm))
14866 if (DECL_HAS_VALUE_EXPR_P (parm))
14867 {
14868 DECL_HAS_VALUE_EXPR_P (parm) = 0;
14869 DECL_IGNORED_P (parm) = 0;
14870 }
14871 }
14872
14873 if ((flag_openacc || flag_openmp || flag_openmp_simd)
14874 && gimplify_omp_ctxp)
14875 {
14876 delete_omp_context (gimplify_omp_ctxp);
14877 gimplify_omp_ctxp = NULL;
14878 }
14879
14880 pop_gimplify_context (outer_bind);
14881 gcc_assert (gimplify_ctxp == NULL);
14882
14883 if (flag_checking && !seen_error ())
14884 verify_gimple_in_seq (gimple_bind_body (outer_bind));
14885
14886 timevar_pop (TV_TREE_GIMPLIFY);
14887 input_location = saved_location;
14888
14889 return outer_bind;
14890 }
14891
14892 typedef char *char_p; /* For DEF_VEC_P. */
14893
14894 /* Return whether we should exclude FNDECL from instrumentation. */
14895
14896 static bool
14897 flag_instrument_functions_exclude_p (tree fndecl)
14898 {
14899 vec<char_p> *v;
14900
14901 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14902 if (v && v->length () > 0)
14903 {
14904 const char *name;
14905 int i;
14906 char *s;
14907
14908 name = lang_hooks.decl_printable_name (fndecl, 1);
14909 FOR_EACH_VEC_ELT (*v, i, s)
14910 if (strstr (name, s) != NULL)
14911 return true;
14912 }
14913
14914 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
14915 if (v && v->length () > 0)
14916 {
14917 const char *name;
14918 int i;
14919 char *s;
14920
14921 name = DECL_SOURCE_FILE (fndecl);
14922 FOR_EACH_VEC_ELT (*v, i, s)
14923 if (strstr (name, s) != NULL)
14924 return true;
14925 }
14926
14927 return false;
14928 }
14929
14930 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14931 node for the function we want to gimplify.
14932
14933 Return the sequence of GIMPLE statements corresponding to the body
14934 of FNDECL. */
14935
14936 void
14937 gimplify_function_tree (tree fndecl)
14938 {
14939 tree parm, ret;
14940 gimple_seq seq;
14941 gbind *bind;
14942
14943 gcc_assert (!gimple_body (fndecl));
14944
14945 if (DECL_STRUCT_FUNCTION (fndecl))
14946 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
14947 else
14948 push_struct_function (fndecl);
14949
14950 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14951 if necessary. */
14952 cfun->curr_properties |= PROP_gimple_lva;
14953
14954 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
14955 {
14956 /* Preliminarily mark non-addressed complex variables as eligible
14957 for promotion to gimple registers. We'll transform their uses
14958 as we find them. */
14959 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
14960 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
14961 && !TREE_THIS_VOLATILE (parm)
14962 && !needs_to_live_in_memory (parm))
14963 DECL_GIMPLE_REG_P (parm) = 1;
14964 }
14965
14966 ret = DECL_RESULT (fndecl);
14967 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
14968 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
14969 && !needs_to_live_in_memory (ret))
14970 DECL_GIMPLE_REG_P (ret) = 1;
14971
14972 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
14973 asan_poisoned_variables = new hash_set<tree> ();
14974 bind = gimplify_body (fndecl, true);
14975 if (asan_poisoned_variables)
14976 {
14977 delete asan_poisoned_variables;
14978 asan_poisoned_variables = NULL;
14979 }
14980
14981 /* The tree body of the function is no longer needed, replace it
14982 with the new GIMPLE body. */
14983 seq = NULL;
14984 gimple_seq_add_stmt (&seq, bind);
14985 gimple_set_body (fndecl, seq);
14986
14987 /* If we're instrumenting function entry/exit, then prepend the call to
14988 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14989 catch the exit hook. */
14990 /* ??? Add some way to ignore exceptions for this TFE. */
14991 if (flag_instrument_function_entry_exit
14992 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
14993 /* Do not instrument extern inline functions. */
14994 && !(DECL_DECLARED_INLINE_P (fndecl)
14995 && DECL_EXTERNAL (fndecl)
14996 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
14997 && !flag_instrument_functions_exclude_p (fndecl))
14998 {
14999 tree x;
15000 gbind *new_bind;
15001 gimple *tf;
15002 gimple_seq cleanup = NULL, body = NULL;
15003 tree tmp_var, this_fn_addr;
15004 gcall *call;
15005
15006 /* The instrumentation hooks aren't going to call the instrumented
15007 function and the address they receive is expected to be matchable
15008 against symbol addresses. Make sure we don't create a trampoline,
15009 in case the current function is nested. */
15010 this_fn_addr = build_fold_addr_expr (current_function_decl);
15011 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15012
15013 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15014 call = gimple_build_call (x, 1, integer_zero_node);
15015 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15016 gimple_call_set_lhs (call, tmp_var);
15017 gimplify_seq_add_stmt (&cleanup, call);
15018 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15019 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15020 gimplify_seq_add_stmt (&cleanup, call);
15021 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15022
15023 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15024 call = gimple_build_call (x, 1, integer_zero_node);
15025 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15026 gimple_call_set_lhs (call, tmp_var);
15027 gimplify_seq_add_stmt (&body, call);
15028 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15029 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15030 gimplify_seq_add_stmt (&body, call);
15031 gimplify_seq_add_stmt (&body, tf);
15032 new_bind = gimple_build_bind (NULL, body, NULL);
15033
15034 /* Replace the current function body with the body
15035 wrapped in the try/finally TF. */
15036 seq = NULL;
15037 gimple_seq_add_stmt (&seq, new_bind);
15038 gimple_set_body (fndecl, seq);
15039 bind = new_bind;
15040 }
15041
15042 if (sanitize_flags_p (SANITIZE_THREAD))
15043 {
15044 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15045 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15046 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15047 /* Replace the current function body with the body
15048 wrapped in the try/finally TF. */
15049 seq = NULL;
15050 gimple_seq_add_stmt (&seq, new_bind);
15051 gimple_set_body (fndecl, seq);
15052 }
15053
15054 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15055 cfun->curr_properties |= PROP_gimple_any;
15056
15057 pop_cfun ();
15058
15059 dump_function (TDI_gimple, fndecl);
15060 }
15061
15062 /* Return a dummy expression of type TYPE in order to keep going after an
15063 error. */
15064
15065 static tree
15066 dummy_object (tree type)
15067 {
15068 tree t = build_int_cst (build_pointer_type (type), 0);
15069 return build2 (MEM_REF, type, t, t);
15070 }
15071
15072 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15073 builtin function, but a very special sort of operator. */
15074
15075 enum gimplify_status
15076 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15077 gimple_seq *post_p ATTRIBUTE_UNUSED)
15078 {
15079 tree promoted_type, have_va_type;
15080 tree valist = TREE_OPERAND (*expr_p, 0);
15081 tree type = TREE_TYPE (*expr_p);
15082 tree t, tag, aptag;
15083 location_t loc = EXPR_LOCATION (*expr_p);
15084
15085 /* Verify that valist is of the proper type. */
15086 have_va_type = TREE_TYPE (valist);
15087 if (have_va_type == error_mark_node)
15088 return GS_ERROR;
15089 have_va_type = targetm.canonical_va_list_type (have_va_type);
15090 if (have_va_type == NULL_TREE
15091 && POINTER_TYPE_P (TREE_TYPE (valist)))
15092 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15093 have_va_type
15094 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15095 gcc_assert (have_va_type != NULL_TREE);
15096
15097 /* Generate a diagnostic for requesting data of a type that cannot
15098 be passed through `...' due to type promotion at the call site. */
15099 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15100 != type)
15101 {
15102 static bool gave_help;
15103 bool warned;
15104 /* Use the expansion point to handle cases such as passing bool (defined
15105 in a system header) through `...'. */
15106 location_t xloc
15107 = expansion_point_location_if_in_system_header (loc);
15108
15109 /* Unfortunately, this is merely undefined, rather than a constraint
15110 violation, so we cannot make this an error. If this call is never
15111 executed, the program is still strictly conforming. */
15112 auto_diagnostic_group d;
15113 warned = warning_at (xloc, 0,
15114 "%qT is promoted to %qT when passed through %<...%>",
15115 type, promoted_type);
15116 if (!gave_help && warned)
15117 {
15118 gave_help = true;
15119 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15120 promoted_type, type);
15121 }
15122
15123 /* We can, however, treat "undefined" any way we please.
15124 Call abort to encourage the user to fix the program. */
15125 if (warned)
15126 inform (xloc, "if this code is reached, the program will abort");
15127 /* Before the abort, allow the evaluation of the va_list
15128 expression to exit or longjmp. */
15129 gimplify_and_add (valist, pre_p);
15130 t = build_call_expr_loc (loc,
15131 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15132 gimplify_and_add (t, pre_p);
15133
15134 /* This is dead code, but go ahead and finish so that the
15135 mode of the result comes out right. */
15136 *expr_p = dummy_object (type);
15137 return GS_ALL_DONE;
15138 }
15139
15140 tag = build_int_cst (build_pointer_type (type), 0);
15141 aptag = build_int_cst (TREE_TYPE (valist), 0);
15142
15143 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15144 valist, tag, aptag);
15145
15146 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15147 needs to be expanded. */
15148 cfun->curr_properties &= ~PROP_gimple_lva;
15149
15150 return GS_OK;
15151 }
15152
15153 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15154
15155 DST/SRC are the destination and source respectively. You can pass
15156 ungimplified trees in DST or SRC, in which case they will be
15157 converted to a gimple operand if necessary.
15158
15159 This function returns the newly created GIMPLE_ASSIGN tuple. */
15160
15161 gimple *
15162 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15163 {
15164 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15165 gimplify_and_add (t, seq_p);
15166 ggc_free (t);
15167 return gimple_seq_last_stmt (*seq_p);
15168 }
15169
15170 inline hashval_t
15171 gimplify_hasher::hash (const elt_t *p)
15172 {
15173 tree t = p->val;
15174 return iterative_hash_expr (t, 0);
15175 }
15176
15177 inline bool
15178 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15179 {
15180 tree t1 = p1->val;
15181 tree t2 = p2->val;
15182 enum tree_code code = TREE_CODE (t1);
15183
15184 if (TREE_CODE (t2) != code
15185 || TREE_TYPE (t1) != TREE_TYPE (t2))
15186 return false;
15187
15188 if (!operand_equal_p (t1, t2, 0))
15189 return false;
15190
15191 /* Only allow them to compare equal if they also hash equal; otherwise
15192 results are nondeterminate, and we fail bootstrap comparison. */
15193 gcc_checking_assert (hash (p1) == hash (p2));
15194
15195 return true;
15196 }