re PR target/69894 (dependency of gcc-plugin.h not installed on aarch64-linux-gnu)
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
62
63 enum gimplify_omp_var_data
64 {
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
76 GOVD_LINEAR = 2048,
77 GOVD_ALIGNED = 4096,
78
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
81
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
84
85 GOVD_MAP_0LEN_ARRAY = 32768,
86
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
89
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
92
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
95
96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 | GOVD_LOCAL)
99 };
100
101
102 enum omp_region_type
103 {
104 ORT_WORKSHARE = 0x00,
105 ORT_SIMD = 0x01,
106
107 ORT_PARALLEL = 0x02,
108 ORT_COMBINED_PARALLEL = 0x03,
109
110 ORT_TASK = 0x04,
111 ORT_UNTIED_TASK = 0x05,
112
113 ORT_TEAMS = 0x08,
114 ORT_COMBINED_TEAMS = 0x09,
115
116 /* Data region. */
117 ORT_TARGET_DATA = 0x10,
118
119 /* Data region with offloading. */
120 ORT_TARGET = 0x20,
121 ORT_COMBINED_TARGET = 0x21,
122
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
129
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
132 ORT_NONE = 0x100
133 };
134
135 /* Gimplify hashtable helper. */
136
137 struct gimplify_hasher : free_ptr_hash <elt_t>
138 {
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
141 };
142
143 struct gimplify_ctx
144 {
145 struct gimplify_ctx *prev_context;
146
147 vec<gbind *> bind_expr_stack;
148 tree temps;
149 gimple_seq conditional_cleanups;
150 tree exit_label;
151 tree return_temp;
152
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
155 hash_table<gimplify_hasher> *temp_htab;
156
157 int conditions;
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
163 };
164
165 struct gimplify_omp_ctx
166 {
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
169 hash_set<tree> *privatized_types;
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
172 location_t location;
173 enum omp_clause_default_kind default_kind;
174 enum omp_region_type region_type;
175 bool combined_loop;
176 bool distribute;
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
180 };
181
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184
185 /* Forward declaration. */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 static hash_map<tree, tree> *oacc_declare_returns;
188
189 /* Shorter alias name for the above function for use in gimplify.c
190 only. */
191
192 static inline void
193 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
194 {
195 gimple_seq_add_stmt_without_update (seq_p, gs);
196 }
197
198 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
199 NULL, a new sequence is allocated. This function is
200 similar to gimple_seq_add_seq, but does not scan the operands.
201 During gimplification, we need to manipulate statement sequences
202 before the def/use vectors have been constructed. */
203
204 static void
205 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
206 {
207 gimple_stmt_iterator si;
208
209 if (src == NULL)
210 return;
211
212 si = gsi_last (*dst_p);
213 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
214 }
215
216
217 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
218 and popping gimplify contexts. */
219
220 static struct gimplify_ctx *ctx_pool = NULL;
221
222 /* Return a gimplify context struct from the pool. */
223
224 static inline struct gimplify_ctx *
225 ctx_alloc (void)
226 {
227 struct gimplify_ctx * c = ctx_pool;
228
229 if (c)
230 ctx_pool = c->prev_context;
231 else
232 c = XNEW (struct gimplify_ctx);
233
234 memset (c, '\0', sizeof (*c));
235 return c;
236 }
237
238 /* Put gimplify context C back into the pool. */
239
240 static inline void
241 ctx_free (struct gimplify_ctx *c)
242 {
243 c->prev_context = ctx_pool;
244 ctx_pool = c;
245 }
246
247 /* Free allocated ctx stack memory. */
248
249 void
250 free_gimplify_stack (void)
251 {
252 struct gimplify_ctx *c;
253
254 while ((c = ctx_pool))
255 {
256 ctx_pool = c->prev_context;
257 free (c);
258 }
259 }
260
261
262 /* Set up a context for the gimplifier. */
263
264 void
265 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
266 {
267 struct gimplify_ctx *c = ctx_alloc ();
268
269 c->prev_context = gimplify_ctxp;
270 gimplify_ctxp = c;
271 gimplify_ctxp->into_ssa = in_ssa;
272 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
273 }
274
275 /* Tear down a context for the gimplifier. If BODY is non-null, then
276 put the temporaries into the outer BIND_EXPR. Otherwise, put them
277 in the local_decls.
278
279 BODY is not a sequence, but the first tuple in a sequence. */
280
281 void
282 pop_gimplify_context (gimple *body)
283 {
284 struct gimplify_ctx *c = gimplify_ctxp;
285
286 gcc_assert (c
287 && (!c->bind_expr_stack.exists ()
288 || c->bind_expr_stack.is_empty ()));
289 c->bind_expr_stack.release ();
290 gimplify_ctxp = c->prev_context;
291
292 if (body)
293 declare_vars (c->temps, body, false);
294 else
295 record_vars (c->temps);
296
297 delete c->temp_htab;
298 c->temp_htab = NULL;
299 ctx_free (c);
300 }
301
302 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
303
304 static void
305 gimple_push_bind_expr (gbind *bind_stmt)
306 {
307 gimplify_ctxp->bind_expr_stack.reserve (8);
308 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
309 }
310
311 /* Pop the first element off the stack of bindings. */
312
313 static void
314 gimple_pop_bind_expr (void)
315 {
316 gimplify_ctxp->bind_expr_stack.pop ();
317 }
318
319 /* Return the first element of the stack of bindings. */
320
321 gbind *
322 gimple_current_bind_expr (void)
323 {
324 return gimplify_ctxp->bind_expr_stack.last ();
325 }
326
327 /* Return the stack of bindings created during gimplification. */
328
329 vec<gbind *>
330 gimple_bind_expr_stack (void)
331 {
332 return gimplify_ctxp->bind_expr_stack;
333 }
334
335 /* Return true iff there is a COND_EXPR between us and the innermost
336 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
337
338 static bool
339 gimple_conditional_context (void)
340 {
341 return gimplify_ctxp->conditions > 0;
342 }
343
344 /* Note that we've entered a COND_EXPR. */
345
346 static void
347 gimple_push_condition (void)
348 {
349 #ifdef ENABLE_GIMPLE_CHECKING
350 if (gimplify_ctxp->conditions == 0)
351 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
352 #endif
353 ++(gimplify_ctxp->conditions);
354 }
355
356 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
357 now, add any conditional cleanups we've seen to the prequeue. */
358
359 static void
360 gimple_pop_condition (gimple_seq *pre_p)
361 {
362 int conds = --(gimplify_ctxp->conditions);
363
364 gcc_assert (conds >= 0);
365 if (conds == 0)
366 {
367 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
368 gimplify_ctxp->conditional_cleanups = NULL;
369 }
370 }
371
372 /* A stable comparison routine for use with splay trees and DECLs. */
373
374 static int
375 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
376 {
377 tree a = (tree) xa;
378 tree b = (tree) xb;
379
380 return DECL_UID (a) - DECL_UID (b);
381 }
382
383 /* Create a new omp construct that deals with variable remapping. */
384
385 static struct gimplify_omp_ctx *
386 new_omp_context (enum omp_region_type region_type)
387 {
388 struct gimplify_omp_ctx *c;
389
390 c = XCNEW (struct gimplify_omp_ctx);
391 c->outer_context = gimplify_omp_ctxp;
392 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
393 c->privatized_types = new hash_set<tree>;
394 c->location = input_location;
395 c->region_type = region_type;
396 if ((region_type & ORT_TASK) == 0)
397 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
398 else
399 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
400
401 return c;
402 }
403
404 /* Destroy an omp construct that deals with variable remapping. */
405
406 static void
407 delete_omp_context (struct gimplify_omp_ctx *c)
408 {
409 splay_tree_delete (c->variables);
410 delete c->privatized_types;
411 c->loop_iter_var.release ();
412 XDELETE (c);
413 }
414
415 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
416 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
417
418 /* Both gimplify the statement T and append it to *SEQ_P. This function
419 behaves exactly as gimplify_stmt, but you don't have to pass T as a
420 reference. */
421
422 void
423 gimplify_and_add (tree t, gimple_seq *seq_p)
424 {
425 gimplify_stmt (&t, seq_p);
426 }
427
428 /* Gimplify statement T into sequence *SEQ_P, and return the first
429 tuple in the sequence of generated tuples for this statement.
430 Return NULL if gimplifying T produced no tuples. */
431
432 static gimple *
433 gimplify_and_return_first (tree t, gimple_seq *seq_p)
434 {
435 gimple_stmt_iterator last = gsi_last (*seq_p);
436
437 gimplify_and_add (t, seq_p);
438
439 if (!gsi_end_p (last))
440 {
441 gsi_next (&last);
442 return gsi_stmt (last);
443 }
444 else
445 return gimple_seq_first_stmt (*seq_p);
446 }
447
448 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
449 LHS, or for a call argument. */
450
451 static bool
452 is_gimple_mem_rhs (tree t)
453 {
454 /* If we're dealing with a renamable type, either source or dest must be
455 a renamed variable. */
456 if (is_gimple_reg_type (TREE_TYPE (t)))
457 return is_gimple_val (t);
458 else
459 return is_gimple_val (t) || is_gimple_lvalue (t);
460 }
461
462 /* Return true if T is a CALL_EXPR or an expression that can be
463 assigned to a temporary. Note that this predicate should only be
464 used during gimplification. See the rationale for this in
465 gimplify_modify_expr. */
466
467 static bool
468 is_gimple_reg_rhs_or_call (tree t)
469 {
470 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
471 || TREE_CODE (t) == CALL_EXPR);
472 }
473
474 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
475 this predicate should only be used during gimplification. See the
476 rationale for this in gimplify_modify_expr. */
477
478 static bool
479 is_gimple_mem_rhs_or_call (tree t)
480 {
481 /* If we're dealing with a renamable type, either source or dest must be
482 a renamed variable. */
483 if (is_gimple_reg_type (TREE_TYPE (t)))
484 return is_gimple_val (t);
485 else
486 return (is_gimple_val (t) || is_gimple_lvalue (t)
487 || TREE_CODE (t) == CALL_EXPR);
488 }
489
490 /* Create a temporary with a name derived from VAL. Subroutine of
491 lookup_tmp_var; nobody else should call this function. */
492
493 static inline tree
494 create_tmp_from_val (tree val)
495 {
496 /* Drop all qualifiers and address-space information from the value type. */
497 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
498 tree var = create_tmp_var (type, get_name (val));
499 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
500 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
501 DECL_GIMPLE_REG_P (var) = 1;
502 return var;
503 }
504
505 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
506 an existing expression temporary. */
507
508 static tree
509 lookup_tmp_var (tree val, bool is_formal)
510 {
511 tree ret;
512
513 /* If not optimizing, never really reuse a temporary. local-alloc
514 won't allocate any variable that is used in more than one basic
515 block, which means it will go into memory, causing much extra
516 work in reload and final and poorer code generation, outweighing
517 the extra memory allocation here. */
518 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
519 ret = create_tmp_from_val (val);
520 else
521 {
522 elt_t elt, *elt_p;
523 elt_t **slot;
524
525 elt.val = val;
526 if (!gimplify_ctxp->temp_htab)
527 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
528 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
529 if (*slot == NULL)
530 {
531 elt_p = XNEW (elt_t);
532 elt_p->val = val;
533 elt_p->temp = ret = create_tmp_from_val (val);
534 *slot = elt_p;
535 }
536 else
537 {
538 elt_p = *slot;
539 ret = elt_p->temp;
540 }
541 }
542
543 return ret;
544 }
545
546 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
547
548 static tree
549 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
550 bool is_formal)
551 {
552 tree t, mod;
553
554 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
555 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
556 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
557 fb_rvalue);
558
559 if (gimplify_ctxp->into_ssa
560 && is_gimple_reg_type (TREE_TYPE (val)))
561 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
562 else
563 t = lookup_tmp_var (val, is_formal);
564
565 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
566
567 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
568
569 /* gimplify_modify_expr might want to reduce this further. */
570 gimplify_and_add (mod, pre_p);
571 ggc_free (mod);
572
573 return t;
574 }
575
576 /* Return a formal temporary variable initialized with VAL. PRE_P is as
577 in gimplify_expr. Only use this function if:
578
579 1) The value of the unfactored expression represented by VAL will not
580 change between the initialization and use of the temporary, and
581 2) The temporary will not be otherwise modified.
582
583 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
584 and #2 means it is inappropriate for && temps.
585
586 For other cases, use get_initialized_tmp_var instead. */
587
588 tree
589 get_formal_tmp_var (tree val, gimple_seq *pre_p)
590 {
591 return internal_get_tmp_var (val, pre_p, NULL, true);
592 }
593
594 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
595 are as in gimplify_expr. */
596
597 tree
598 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
599 {
600 return internal_get_tmp_var (val, pre_p, post_p, false);
601 }
602
603 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
604 generate debug info for them; otherwise don't. */
605
606 void
607 declare_vars (tree vars, gimple *gs, bool debug_info)
608 {
609 tree last = vars;
610 if (last)
611 {
612 tree temps, block;
613
614 gbind *scope = as_a <gbind *> (gs);
615
616 temps = nreverse (last);
617
618 block = gimple_bind_block (scope);
619 gcc_assert (!block || TREE_CODE (block) == BLOCK);
620 if (!block || !debug_info)
621 {
622 DECL_CHAIN (last) = gimple_bind_vars (scope);
623 gimple_bind_set_vars (scope, temps);
624 }
625 else
626 {
627 /* We need to attach the nodes both to the BIND_EXPR and to its
628 associated BLOCK for debugging purposes. The key point here
629 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
630 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
631 if (BLOCK_VARS (block))
632 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
633 else
634 {
635 gimple_bind_set_vars (scope,
636 chainon (gimple_bind_vars (scope), temps));
637 BLOCK_VARS (block) = temps;
638 }
639 }
640 }
641 }
642
643 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
644 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
645 no such upper bound can be obtained. */
646
647 static void
648 force_constant_size (tree var)
649 {
650 /* The only attempt we make is by querying the maximum size of objects
651 of the variable's type. */
652
653 HOST_WIDE_INT max_size;
654
655 gcc_assert (TREE_CODE (var) == VAR_DECL);
656
657 max_size = max_int_size_in_bytes (TREE_TYPE (var));
658
659 gcc_assert (max_size >= 0);
660
661 DECL_SIZE_UNIT (var)
662 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
663 DECL_SIZE (var)
664 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
665 }
666
667 /* Push the temporary variable TMP into the current binding. */
668
669 void
670 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
671 {
672 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
673
674 /* Later processing assumes that the object size is constant, which might
675 not be true at this point. Force the use of a constant upper bound in
676 this case. */
677 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
678 force_constant_size (tmp);
679
680 DECL_CONTEXT (tmp) = fn->decl;
681 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
682
683 record_vars_into (tmp, fn->decl);
684 }
685
686 /* Push the temporary variable TMP into the current binding. */
687
688 void
689 gimple_add_tmp_var (tree tmp)
690 {
691 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
692
693 /* Later processing assumes that the object size is constant, which might
694 not be true at this point. Force the use of a constant upper bound in
695 this case. */
696 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
697 force_constant_size (tmp);
698
699 DECL_CONTEXT (tmp) = current_function_decl;
700 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
701
702 if (gimplify_ctxp)
703 {
704 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
705 gimplify_ctxp->temps = tmp;
706
707 /* Mark temporaries local within the nearest enclosing parallel. */
708 if (gimplify_omp_ctxp)
709 {
710 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
711 while (ctx
712 && (ctx->region_type == ORT_WORKSHARE
713 || ctx->region_type == ORT_SIMD
714 || ctx->region_type == ORT_ACC))
715 ctx = ctx->outer_context;
716 if (ctx)
717 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
718 }
719 }
720 else if (cfun)
721 record_vars (tmp);
722 else
723 {
724 gimple_seq body_seq;
725
726 /* This case is for nested functions. We need to expose the locals
727 they create. */
728 body_seq = gimple_body (current_function_decl);
729 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
730 }
731 }
732
733
734 \f
735 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
736 nodes that are referenced more than once in GENERIC functions. This is
737 necessary because gimplification (translation into GIMPLE) is performed
738 by modifying tree nodes in-place, so gimplication of a shared node in a
739 first context could generate an invalid GIMPLE form in a second context.
740
741 This is achieved with a simple mark/copy/unmark algorithm that walks the
742 GENERIC representation top-down, marks nodes with TREE_VISITED the first
743 time it encounters them, duplicates them if they already have TREE_VISITED
744 set, and finally removes the TREE_VISITED marks it has set.
745
746 The algorithm works only at the function level, i.e. it generates a GENERIC
747 representation of a function with no nodes shared within the function when
748 passed a GENERIC function (except for nodes that are allowed to be shared).
749
750 At the global level, it is also necessary to unshare tree nodes that are
751 referenced in more than one function, for the same aforementioned reason.
752 This requires some cooperation from the front-end. There are 2 strategies:
753
754 1. Manual unsharing. The front-end needs to call unshare_expr on every
755 expression that might end up being shared across functions.
756
757 2. Deep unsharing. This is an extension of regular unsharing. Instead
758 of calling unshare_expr on expressions that might be shared across
759 functions, the front-end pre-marks them with TREE_VISITED. This will
760 ensure that they are unshared on the first reference within functions
761 when the regular unsharing algorithm runs. The counterpart is that
762 this algorithm must look deeper than for manual unsharing, which is
763 specified by LANG_HOOKS_DEEP_UNSHARING.
764
765 If there are only few specific cases of node sharing across functions, it is
766 probably easier for a front-end to unshare the expressions manually. On the
767 contrary, if the expressions generated at the global level are as widespread
768 as expressions generated within functions, deep unsharing is very likely the
769 way to go. */
770
771 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
772 These nodes model computations that must be done once. If we were to
773 unshare something like SAVE_EXPR(i++), the gimplification process would
774 create wrong code. However, if DATA is non-null, it must hold a pointer
775 set that is used to unshare the subtrees of these nodes. */
776
777 static tree
778 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
779 {
780 tree t = *tp;
781 enum tree_code code = TREE_CODE (t);
782
783 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
784 copy their subtrees if we can make sure to do it only once. */
785 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
786 {
787 if (data && !((hash_set<tree> *)data)->add (t))
788 ;
789 else
790 *walk_subtrees = 0;
791 }
792
793 /* Stop at types, decls, constants like copy_tree_r. */
794 else if (TREE_CODE_CLASS (code) == tcc_type
795 || TREE_CODE_CLASS (code) == tcc_declaration
796 || TREE_CODE_CLASS (code) == tcc_constant
797 /* We can't do anything sensible with a BLOCK used as an
798 expression, but we also can't just die when we see it
799 because of non-expression uses. So we avert our eyes
800 and cross our fingers. Silly Java. */
801 || code == BLOCK)
802 *walk_subtrees = 0;
803
804 /* Cope with the statement expression extension. */
805 else if (code == STATEMENT_LIST)
806 ;
807
808 /* Leave the bulk of the work to copy_tree_r itself. */
809 else
810 copy_tree_r (tp, walk_subtrees, NULL);
811
812 return NULL_TREE;
813 }
814
815 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
816 If *TP has been visited already, then *TP is deeply copied by calling
817 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
818
819 static tree
820 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
821 {
822 tree t = *tp;
823 enum tree_code code = TREE_CODE (t);
824
825 /* Skip types, decls, and constants. But we do want to look at their
826 types and the bounds of types. Mark them as visited so we properly
827 unmark their subtrees on the unmark pass. If we've already seen them,
828 don't look down further. */
829 if (TREE_CODE_CLASS (code) == tcc_type
830 || TREE_CODE_CLASS (code) == tcc_declaration
831 || TREE_CODE_CLASS (code) == tcc_constant)
832 {
833 if (TREE_VISITED (t))
834 *walk_subtrees = 0;
835 else
836 TREE_VISITED (t) = 1;
837 }
838
839 /* If this node has been visited already, unshare it and don't look
840 any deeper. */
841 else if (TREE_VISITED (t))
842 {
843 walk_tree (tp, mostly_copy_tree_r, data, NULL);
844 *walk_subtrees = 0;
845 }
846
847 /* Otherwise, mark the node as visited and keep looking. */
848 else
849 TREE_VISITED (t) = 1;
850
851 return NULL_TREE;
852 }
853
854 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
855 copy_if_shared_r callback unmodified. */
856
857 static inline void
858 copy_if_shared (tree *tp, void *data)
859 {
860 walk_tree (tp, copy_if_shared_r, data, NULL);
861 }
862
863 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
864 any nested functions. */
865
866 static void
867 unshare_body (tree fndecl)
868 {
869 struct cgraph_node *cgn = cgraph_node::get (fndecl);
870 /* If the language requires deep unsharing, we need a pointer set to make
871 sure we don't repeatedly unshare subtrees of unshareable nodes. */
872 hash_set<tree> *visited
873 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
874
875 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
876 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
877 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
878
879 delete visited;
880
881 if (cgn)
882 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
883 unshare_body (cgn->decl);
884 }
885
886 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
887 Subtrees are walked until the first unvisited node is encountered. */
888
889 static tree
890 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
891 {
892 tree t = *tp;
893
894 /* If this node has been visited, unmark it and keep looking. */
895 if (TREE_VISITED (t))
896 TREE_VISITED (t) = 0;
897
898 /* Otherwise, don't look any deeper. */
899 else
900 *walk_subtrees = 0;
901
902 return NULL_TREE;
903 }
904
905 /* Unmark the visited trees rooted at *TP. */
906
907 static inline void
908 unmark_visited (tree *tp)
909 {
910 walk_tree (tp, unmark_visited_r, NULL, NULL);
911 }
912
913 /* Likewise, but mark all trees as not visited. */
914
915 static void
916 unvisit_body (tree fndecl)
917 {
918 struct cgraph_node *cgn = cgraph_node::get (fndecl);
919
920 unmark_visited (&DECL_SAVED_TREE (fndecl));
921 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
922 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
923
924 if (cgn)
925 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
926 unvisit_body (cgn->decl);
927 }
928
929 /* Unconditionally make an unshared copy of EXPR. This is used when using
930 stored expressions which span multiple functions, such as BINFO_VTABLE,
931 as the normal unsharing process can't tell that they're shared. */
932
933 tree
934 unshare_expr (tree expr)
935 {
936 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
937 return expr;
938 }
939
940 /* Worker for unshare_expr_without_location. */
941
942 static tree
943 prune_expr_location (tree *tp, int *walk_subtrees, void *)
944 {
945 if (EXPR_P (*tp))
946 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
947 else
948 *walk_subtrees = 0;
949 return NULL_TREE;
950 }
951
952 /* Similar to unshare_expr but also prune all expression locations
953 from EXPR. */
954
955 tree
956 unshare_expr_without_location (tree expr)
957 {
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 if (EXPR_P (expr))
960 walk_tree (&expr, prune_expr_location, NULL, NULL);
961 return expr;
962 }
963 \f
964 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
965 contain statements and have a value. Assign its value to a temporary
966 and give it void_type_node. Return the temporary, or NULL_TREE if
967 WRAPPER was already void. */
968
969 tree
970 voidify_wrapper_expr (tree wrapper, tree temp)
971 {
972 tree type = TREE_TYPE (wrapper);
973 if (type && !VOID_TYPE_P (type))
974 {
975 tree *p;
976
977 /* Set p to point to the body of the wrapper. Loop until we find
978 something that isn't a wrapper. */
979 for (p = &wrapper; p && *p; )
980 {
981 switch (TREE_CODE (*p))
982 {
983 case BIND_EXPR:
984 TREE_SIDE_EFFECTS (*p) = 1;
985 TREE_TYPE (*p) = void_type_node;
986 /* For a BIND_EXPR, the body is operand 1. */
987 p = &BIND_EXPR_BODY (*p);
988 break;
989
990 case CLEANUP_POINT_EXPR:
991 case TRY_FINALLY_EXPR:
992 case TRY_CATCH_EXPR:
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
995 p = &TREE_OPERAND (*p, 0);
996 break;
997
998 case STATEMENT_LIST:
999 {
1000 tree_stmt_iterator i = tsi_last (*p);
1001 TREE_SIDE_EFFECTS (*p) = 1;
1002 TREE_TYPE (*p) = void_type_node;
1003 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1004 }
1005 break;
1006
1007 case COMPOUND_EXPR:
1008 /* Advance to the last statement. Set all container types to
1009 void. */
1010 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1011 {
1012 TREE_SIDE_EFFECTS (*p) = 1;
1013 TREE_TYPE (*p) = void_type_node;
1014 }
1015 break;
1016
1017 case TRANSACTION_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TRANSACTION_EXPR_BODY (*p);
1021 break;
1022
1023 default:
1024 /* Assume that any tree upon which voidify_wrapper_expr is
1025 directly called is a wrapper, and that its body is op0. */
1026 if (p == &wrapper)
1027 {
1028 TREE_SIDE_EFFECTS (*p) = 1;
1029 TREE_TYPE (*p) = void_type_node;
1030 p = &TREE_OPERAND (*p, 0);
1031 break;
1032 }
1033 goto out;
1034 }
1035 }
1036
1037 out:
1038 if (p == NULL || IS_EMPTY_STMT (*p))
1039 temp = NULL_TREE;
1040 else if (temp)
1041 {
1042 /* The wrapper is on the RHS of an assignment that we're pushing
1043 down. */
1044 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1045 || TREE_CODE (temp) == MODIFY_EXPR);
1046 TREE_OPERAND (temp, 1) = *p;
1047 *p = temp;
1048 }
1049 else
1050 {
1051 temp = create_tmp_var (type, "retval");
1052 *p = build2 (INIT_EXPR, type, temp, *p);
1053 }
1054
1055 return temp;
1056 }
1057
1058 return NULL_TREE;
1059 }
1060
1061 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1062 a temporary through which they communicate. */
1063
1064 static void
1065 build_stack_save_restore (gcall **save, gcall **restore)
1066 {
1067 tree tmp_var;
1068
1069 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1070 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1071 gimple_call_set_lhs (*save, tmp_var);
1072
1073 *restore
1074 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1075 1, tmp_var);
1076 }
1077
1078 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1079
1080 static enum gimplify_status
1081 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1082 {
1083 tree bind_expr = *expr_p;
1084 bool old_keep_stack = gimplify_ctxp->keep_stack;
1085 bool old_save_stack = gimplify_ctxp->save_stack;
1086 tree t;
1087 gbind *bind_stmt;
1088 gimple_seq body, cleanup;
1089 gcall *stack_save;
1090 location_t start_locus = 0, end_locus = 0;
1091 tree ret_clauses = NULL;
1092
1093 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1094
1095 /* Mark variables seen in this bind expr. */
1096 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1097 {
1098 if (TREE_CODE (t) == VAR_DECL)
1099 {
1100 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1101
1102 /* Mark variable as local. */
1103 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1104 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1105 || splay_tree_lookup (ctx->variables,
1106 (splay_tree_key) t) == NULL))
1107 {
1108 if (ctx->region_type == ORT_SIMD
1109 && TREE_ADDRESSABLE (t)
1110 && !TREE_STATIC (t))
1111 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1112 else
1113 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1114 }
1115
1116 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1117
1118 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1119 cfun->has_local_explicit_reg_vars = true;
1120 }
1121
1122 /* Preliminarily mark non-addressed complex variables as eligible
1123 for promotion to gimple registers. We'll transform their uses
1124 as we find them. */
1125 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1126 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1127 && !TREE_THIS_VOLATILE (t)
1128 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1129 && !needs_to_live_in_memory (t))
1130 DECL_GIMPLE_REG_P (t) = 1;
1131 }
1132
1133 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1134 BIND_EXPR_BLOCK (bind_expr));
1135 gimple_push_bind_expr (bind_stmt);
1136
1137 gimplify_ctxp->keep_stack = false;
1138 gimplify_ctxp->save_stack = false;
1139
1140 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1141 body = NULL;
1142 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1143 gimple_bind_set_body (bind_stmt, body);
1144
1145 /* Source location wise, the cleanup code (stack_restore and clobbers)
1146 belongs to the end of the block, so propagate what we have. The
1147 stack_save operation belongs to the beginning of block, which we can
1148 infer from the bind_expr directly if the block has no explicit
1149 assignment. */
1150 if (BIND_EXPR_BLOCK (bind_expr))
1151 {
1152 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1153 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1154 }
1155 if (start_locus == 0)
1156 start_locus = EXPR_LOCATION (bind_expr);
1157
1158 cleanup = NULL;
1159 stack_save = NULL;
1160
1161 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1162 the stack space allocated to the VLAs. */
1163 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1164 {
1165 gcall *stack_restore;
1166
1167 /* Save stack on entry and restore it on exit. Add a try_finally
1168 block to achieve this. */
1169 build_stack_save_restore (&stack_save, &stack_restore);
1170
1171 gimple_set_location (stack_save, start_locus);
1172 gimple_set_location (stack_restore, end_locus);
1173
1174 gimplify_seq_add_stmt (&cleanup, stack_restore);
1175 }
1176
1177 /* Add clobbers for all variables that go out of scope. */
1178 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1179 {
1180 if (TREE_CODE (t) == VAR_DECL
1181 && !is_global_var (t)
1182 && DECL_CONTEXT (t) == current_function_decl
1183 && !DECL_HARD_REGISTER (t)
1184 && !TREE_THIS_VOLATILE (t)
1185 && !DECL_HAS_VALUE_EXPR_P (t)
1186 /* Only care for variables that have to be in memory. Others
1187 will be rewritten into SSA names, hence moved to the top-level. */
1188 && !is_gimple_reg (t)
1189 && flag_stack_reuse != SR_NONE)
1190 {
1191 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1192 gimple *clobber_stmt;
1193 TREE_THIS_VOLATILE (clobber) = 1;
1194 clobber_stmt = gimple_build_assign (t, clobber);
1195 gimple_set_location (clobber_stmt, end_locus);
1196 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1197
1198 if (flag_openacc && oacc_declare_returns != NULL)
1199 {
1200 tree *c = oacc_declare_returns->get (t);
1201 if (c != NULL)
1202 {
1203 if (ret_clauses)
1204 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1205
1206 ret_clauses = *c;
1207
1208 oacc_declare_returns->remove (t);
1209
1210 if (oacc_declare_returns->elements () == 0)
1211 {
1212 delete oacc_declare_returns;
1213 oacc_declare_returns = NULL;
1214 }
1215 }
1216 }
1217 }
1218 }
1219
1220 if (ret_clauses)
1221 {
1222 gomp_target *stmt;
1223 gimple_stmt_iterator si = gsi_start (cleanup);
1224
1225 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1226 ret_clauses);
1227 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1228 }
1229
1230 if (cleanup)
1231 {
1232 gtry *gs;
1233 gimple_seq new_body;
1234
1235 new_body = NULL;
1236 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1237 GIMPLE_TRY_FINALLY);
1238
1239 if (stack_save)
1240 gimplify_seq_add_stmt (&new_body, stack_save);
1241 gimplify_seq_add_stmt (&new_body, gs);
1242 gimple_bind_set_body (bind_stmt, new_body);
1243 }
1244
1245 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1246 if (!gimplify_ctxp->keep_stack)
1247 gimplify_ctxp->keep_stack = old_keep_stack;
1248 gimplify_ctxp->save_stack = old_save_stack;
1249
1250 gimple_pop_bind_expr ();
1251
1252 gimplify_seq_add_stmt (pre_p, bind_stmt);
1253
1254 if (temp)
1255 {
1256 *expr_p = temp;
1257 return GS_OK;
1258 }
1259
1260 *expr_p = NULL_TREE;
1261 return GS_ALL_DONE;
1262 }
1263
1264 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1265 GIMPLE value, it is assigned to a new temporary and the statement is
1266 re-written to return the temporary.
1267
1268 PRE_P points to the sequence where side effects that must happen before
1269 STMT should be stored. */
1270
1271 static enum gimplify_status
1272 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1273 {
1274 greturn *ret;
1275 tree ret_expr = TREE_OPERAND (stmt, 0);
1276 tree result_decl, result;
1277
1278 if (ret_expr == error_mark_node)
1279 return GS_ERROR;
1280
1281 /* Implicit _Cilk_sync must be inserted right before any return statement
1282 if there is a _Cilk_spawn in the function. If the user has provided a
1283 _Cilk_sync, the optimizer should remove this duplicate one. */
1284 if (fn_contains_cilk_spawn_p (cfun))
1285 {
1286 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1287 gimplify_and_add (impl_sync, pre_p);
1288 }
1289
1290 if (!ret_expr
1291 || TREE_CODE (ret_expr) == RESULT_DECL
1292 || ret_expr == error_mark_node)
1293 {
1294 greturn *ret = gimple_build_return (ret_expr);
1295 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1296 gimplify_seq_add_stmt (pre_p, ret);
1297 return GS_ALL_DONE;
1298 }
1299
1300 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1301 result_decl = NULL_TREE;
1302 else
1303 {
1304 result_decl = TREE_OPERAND (ret_expr, 0);
1305
1306 /* See through a return by reference. */
1307 if (TREE_CODE (result_decl) == INDIRECT_REF)
1308 result_decl = TREE_OPERAND (result_decl, 0);
1309
1310 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1311 || TREE_CODE (ret_expr) == INIT_EXPR)
1312 && TREE_CODE (result_decl) == RESULT_DECL);
1313 }
1314
1315 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1316 Recall that aggregate_value_p is FALSE for any aggregate type that is
1317 returned in registers. If we're returning values in registers, then
1318 we don't want to extend the lifetime of the RESULT_DECL, particularly
1319 across another call. In addition, for those aggregates for which
1320 hard_function_value generates a PARALLEL, we'll die during normal
1321 expansion of structure assignments; there's special code in expand_return
1322 to handle this case that does not exist in expand_expr. */
1323 if (!result_decl)
1324 result = NULL_TREE;
1325 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1326 {
1327 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1328 {
1329 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1330 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1331 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1332 should be effectively allocated by the caller, i.e. all calls to
1333 this function must be subject to the Return Slot Optimization. */
1334 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1335 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1336 }
1337 result = result_decl;
1338 }
1339 else if (gimplify_ctxp->return_temp)
1340 result = gimplify_ctxp->return_temp;
1341 else
1342 {
1343 result = create_tmp_reg (TREE_TYPE (result_decl));
1344
1345 /* ??? With complex control flow (usually involving abnormal edges),
1346 we can wind up warning about an uninitialized value for this. Due
1347 to how this variable is constructed and initialized, this is never
1348 true. Give up and never warn. */
1349 TREE_NO_WARNING (result) = 1;
1350
1351 gimplify_ctxp->return_temp = result;
1352 }
1353
1354 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1355 Then gimplify the whole thing. */
1356 if (result != result_decl)
1357 TREE_OPERAND (ret_expr, 0) = result;
1358
1359 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1360
1361 ret = gimple_build_return (result);
1362 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1363 gimplify_seq_add_stmt (pre_p, ret);
1364
1365 return GS_ALL_DONE;
1366 }
1367
1368 /* Gimplify a variable-length array DECL. */
1369
1370 static void
1371 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1372 {
1373 /* This is a variable-sized decl. Simplify its size and mark it
1374 for deferred expansion. */
1375 tree t, addr, ptr_type;
1376
1377 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1378 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1379
1380 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1381 if (DECL_HAS_VALUE_EXPR_P (decl))
1382 return;
1383
1384 /* All occurrences of this decl in final gimplified code will be
1385 replaced by indirection. Setting DECL_VALUE_EXPR does two
1386 things: First, it lets the rest of the gimplifier know what
1387 replacement to use. Second, it lets the debug info know
1388 where to find the value. */
1389 ptr_type = build_pointer_type (TREE_TYPE (decl));
1390 addr = create_tmp_var (ptr_type, get_name (decl));
1391 DECL_IGNORED_P (addr) = 0;
1392 t = build_fold_indirect_ref (addr);
1393 TREE_THIS_NOTRAP (t) = 1;
1394 SET_DECL_VALUE_EXPR (decl, t);
1395 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1396
1397 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1398 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1399 size_int (DECL_ALIGN (decl)));
1400 /* The call has been built for a variable-sized object. */
1401 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1402 t = fold_convert (ptr_type, t);
1403 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1404
1405 gimplify_and_add (t, seq_p);
1406 }
1407
1408 /* A helper function to be called via walk_tree. Mark all labels under *TP
1409 as being forced. To be called for DECL_INITIAL of static variables. */
1410
1411 static tree
1412 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1413 {
1414 if (TYPE_P (*tp))
1415 *walk_subtrees = 0;
1416 if (TREE_CODE (*tp) == LABEL_DECL)
1417 FORCED_LABEL (*tp) = 1;
1418
1419 return NULL_TREE;
1420 }
1421
1422 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1423 and initialization explicit. */
1424
1425 static enum gimplify_status
1426 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1427 {
1428 tree stmt = *stmt_p;
1429 tree decl = DECL_EXPR_DECL (stmt);
1430
1431 *stmt_p = NULL_TREE;
1432
1433 if (TREE_TYPE (decl) == error_mark_node)
1434 return GS_ERROR;
1435
1436 if ((TREE_CODE (decl) == TYPE_DECL
1437 || TREE_CODE (decl) == VAR_DECL)
1438 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1439 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1440
1441 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1442 in case its size expressions contain problematic nodes like CALL_EXPR. */
1443 if (TREE_CODE (decl) == TYPE_DECL
1444 && DECL_ORIGINAL_TYPE (decl)
1445 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1446 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1447
1448 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1449 {
1450 tree init = DECL_INITIAL (decl);
1451
1452 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1453 || (!TREE_STATIC (decl)
1454 && flag_stack_check == GENERIC_STACK_CHECK
1455 && compare_tree_int (DECL_SIZE_UNIT (decl),
1456 STACK_CHECK_MAX_VAR_SIZE) > 0))
1457 gimplify_vla_decl (decl, seq_p);
1458
1459 /* Some front ends do not explicitly declare all anonymous
1460 artificial variables. We compensate here by declaring the
1461 variables, though it would be better if the front ends would
1462 explicitly declare them. */
1463 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1464 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1465 gimple_add_tmp_var (decl);
1466
1467 if (init && init != error_mark_node)
1468 {
1469 if (!TREE_STATIC (decl))
1470 {
1471 DECL_INITIAL (decl) = NULL_TREE;
1472 init = build2 (INIT_EXPR, void_type_node, decl, init);
1473 gimplify_and_add (init, seq_p);
1474 ggc_free (init);
1475 }
1476 else
1477 /* We must still examine initializers for static variables
1478 as they may contain a label address. */
1479 walk_tree (&init, force_labels_r, NULL, NULL);
1480 }
1481 }
1482
1483 return GS_ALL_DONE;
1484 }
1485
1486 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1487 and replacing the LOOP_EXPR with goto, but if the loop contains an
1488 EXIT_EXPR, we need to append a label for it to jump to. */
1489
1490 static enum gimplify_status
1491 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1492 {
1493 tree saved_label = gimplify_ctxp->exit_label;
1494 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1495
1496 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1497
1498 gimplify_ctxp->exit_label = NULL_TREE;
1499
1500 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1501
1502 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1503
1504 if (gimplify_ctxp->exit_label)
1505 gimplify_seq_add_stmt (pre_p,
1506 gimple_build_label (gimplify_ctxp->exit_label));
1507
1508 gimplify_ctxp->exit_label = saved_label;
1509
1510 *expr_p = NULL;
1511 return GS_ALL_DONE;
1512 }
1513
1514 /* Gimplify a statement list onto a sequence. These may be created either
1515 by an enlightened front-end, or by shortcut_cond_expr. */
1516
1517 static enum gimplify_status
1518 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1519 {
1520 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1521
1522 tree_stmt_iterator i = tsi_start (*expr_p);
1523
1524 while (!tsi_end_p (i))
1525 {
1526 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1527 tsi_delink (&i);
1528 }
1529
1530 if (temp)
1531 {
1532 *expr_p = temp;
1533 return GS_OK;
1534 }
1535
1536 return GS_ALL_DONE;
1537 }
1538
1539 \f
1540 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1541 branch to. */
1542
1543 static enum gimplify_status
1544 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1545 {
1546 tree switch_expr = *expr_p;
1547 gimple_seq switch_body_seq = NULL;
1548 enum gimplify_status ret;
1549 tree index_type = TREE_TYPE (switch_expr);
1550 if (index_type == NULL_TREE)
1551 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1552
1553 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1554 fb_rvalue);
1555 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1556 return ret;
1557
1558 if (SWITCH_BODY (switch_expr))
1559 {
1560 vec<tree> labels;
1561 vec<tree> saved_labels;
1562 tree default_case = NULL_TREE;
1563 gswitch *switch_stmt;
1564
1565 /* If someone can be bothered to fill in the labels, they can
1566 be bothered to null out the body too. */
1567 gcc_assert (!SWITCH_LABELS (switch_expr));
1568
1569 /* Save old labels, get new ones from body, then restore the old
1570 labels. Save all the things from the switch body to append after. */
1571 saved_labels = gimplify_ctxp->case_labels;
1572 gimplify_ctxp->case_labels.create (8);
1573
1574 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1575 labels = gimplify_ctxp->case_labels;
1576 gimplify_ctxp->case_labels = saved_labels;
1577
1578 preprocess_case_label_vec_for_gimple (labels, index_type,
1579 &default_case);
1580
1581 if (!default_case)
1582 {
1583 glabel *new_default;
1584
1585 default_case
1586 = build_case_label (NULL_TREE, NULL_TREE,
1587 create_artificial_label (UNKNOWN_LOCATION));
1588 new_default = gimple_build_label (CASE_LABEL (default_case));
1589 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1590 }
1591
1592 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1593 default_case, labels);
1594 gimplify_seq_add_stmt (pre_p, switch_stmt);
1595 gimplify_seq_add_seq (pre_p, switch_body_seq);
1596 labels.release ();
1597 }
1598 else
1599 gcc_assert (SWITCH_LABELS (switch_expr));
1600
1601 return GS_ALL_DONE;
1602 }
1603
1604 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1605
1606 static enum gimplify_status
1607 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1608 {
1609 struct gimplify_ctx *ctxp;
1610 glabel *label_stmt;
1611
1612 /* Invalid programs can play Duff's Device type games with, for example,
1613 #pragma omp parallel. At least in the C front end, we don't
1614 detect such invalid branches until after gimplification, in the
1615 diagnose_omp_blocks pass. */
1616 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1617 if (ctxp->case_labels.exists ())
1618 break;
1619
1620 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1621 ctxp->case_labels.safe_push (*expr_p);
1622 gimplify_seq_add_stmt (pre_p, label_stmt);
1623
1624 return GS_ALL_DONE;
1625 }
1626
1627 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1628 if necessary. */
1629
1630 tree
1631 build_and_jump (tree *label_p)
1632 {
1633 if (label_p == NULL)
1634 /* If there's nowhere to jump, just fall through. */
1635 return NULL_TREE;
1636
1637 if (*label_p == NULL_TREE)
1638 {
1639 tree label = create_artificial_label (UNKNOWN_LOCATION);
1640 *label_p = label;
1641 }
1642
1643 return build1 (GOTO_EXPR, void_type_node, *label_p);
1644 }
1645
1646 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1647 This also involves building a label to jump to and communicating it to
1648 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1649
1650 static enum gimplify_status
1651 gimplify_exit_expr (tree *expr_p)
1652 {
1653 tree cond = TREE_OPERAND (*expr_p, 0);
1654 tree expr;
1655
1656 expr = build_and_jump (&gimplify_ctxp->exit_label);
1657 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1658 *expr_p = expr;
1659
1660 return GS_OK;
1661 }
1662
1663 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1664 different from its canonical type, wrap the whole thing inside a
1665 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1666 type.
1667
1668 The canonical type of a COMPONENT_REF is the type of the field being
1669 referenced--unless the field is a bit-field which can be read directly
1670 in a smaller mode, in which case the canonical type is the
1671 sign-appropriate type corresponding to that mode. */
1672
1673 static void
1674 canonicalize_component_ref (tree *expr_p)
1675 {
1676 tree expr = *expr_p;
1677 tree type;
1678
1679 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1680
1681 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1682 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1683 else
1684 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1685
1686 /* One could argue that all the stuff below is not necessary for
1687 the non-bitfield case and declare it a FE error if type
1688 adjustment would be needed. */
1689 if (TREE_TYPE (expr) != type)
1690 {
1691 #ifdef ENABLE_TYPES_CHECKING
1692 tree old_type = TREE_TYPE (expr);
1693 #endif
1694 int type_quals;
1695
1696 /* We need to preserve qualifiers and propagate them from
1697 operand 0. */
1698 type_quals = TYPE_QUALS (type)
1699 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1700 if (TYPE_QUALS (type) != type_quals)
1701 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1702
1703 /* Set the type of the COMPONENT_REF to the underlying type. */
1704 TREE_TYPE (expr) = type;
1705
1706 #ifdef ENABLE_TYPES_CHECKING
1707 /* It is now a FE error, if the conversion from the canonical
1708 type to the original expression type is not useless. */
1709 gcc_assert (useless_type_conversion_p (old_type, type));
1710 #endif
1711 }
1712 }
1713
1714 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1715 to foo, embed that change in the ADDR_EXPR by converting
1716 T array[U];
1717 (T *)&array
1718 ==>
1719 &array[L]
1720 where L is the lower bound. For simplicity, only do this for constant
1721 lower bound.
1722 The constraint is that the type of &array[L] is trivially convertible
1723 to T *. */
1724
1725 static void
1726 canonicalize_addr_expr (tree *expr_p)
1727 {
1728 tree expr = *expr_p;
1729 tree addr_expr = TREE_OPERAND (expr, 0);
1730 tree datype, ddatype, pddatype;
1731
1732 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1733 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1734 || TREE_CODE (addr_expr) != ADDR_EXPR)
1735 return;
1736
1737 /* The addr_expr type should be a pointer to an array. */
1738 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1739 if (TREE_CODE (datype) != ARRAY_TYPE)
1740 return;
1741
1742 /* The pointer to element type shall be trivially convertible to
1743 the expression pointer type. */
1744 ddatype = TREE_TYPE (datype);
1745 pddatype = build_pointer_type (ddatype);
1746 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1747 pddatype))
1748 return;
1749
1750 /* The lower bound and element sizes must be constant. */
1751 if (!TYPE_SIZE_UNIT (ddatype)
1752 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1753 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1754 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1755 return;
1756
1757 /* All checks succeeded. Build a new node to merge the cast. */
1758 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1759 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1760 NULL_TREE, NULL_TREE);
1761 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1762
1763 /* We can have stripped a required restrict qualifier above. */
1764 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1765 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1766 }
1767
1768 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1769 underneath as appropriate. */
1770
1771 static enum gimplify_status
1772 gimplify_conversion (tree *expr_p)
1773 {
1774 location_t loc = EXPR_LOCATION (*expr_p);
1775 gcc_assert (CONVERT_EXPR_P (*expr_p));
1776
1777 /* Then strip away all but the outermost conversion. */
1778 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1779
1780 /* And remove the outermost conversion if it's useless. */
1781 if (tree_ssa_useless_type_conversion (*expr_p))
1782 *expr_p = TREE_OPERAND (*expr_p, 0);
1783
1784 /* If we still have a conversion at the toplevel,
1785 then canonicalize some constructs. */
1786 if (CONVERT_EXPR_P (*expr_p))
1787 {
1788 tree sub = TREE_OPERAND (*expr_p, 0);
1789
1790 /* If a NOP conversion is changing the type of a COMPONENT_REF
1791 expression, then canonicalize its type now in order to expose more
1792 redundant conversions. */
1793 if (TREE_CODE (sub) == COMPONENT_REF)
1794 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1795
1796 /* If a NOP conversion is changing a pointer to array of foo
1797 to a pointer to foo, embed that change in the ADDR_EXPR. */
1798 else if (TREE_CODE (sub) == ADDR_EXPR)
1799 canonicalize_addr_expr (expr_p);
1800 }
1801
1802 /* If we have a conversion to a non-register type force the
1803 use of a VIEW_CONVERT_EXPR instead. */
1804 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1805 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1806 TREE_OPERAND (*expr_p, 0));
1807
1808 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1809 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1810 TREE_SET_CODE (*expr_p, NOP_EXPR);
1811
1812 return GS_OK;
1813 }
1814
1815 /* Nonlocal VLAs seen in the current function. */
1816 static hash_set<tree> *nonlocal_vlas;
1817
1818 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1819 static tree nonlocal_vla_vars;
1820
1821 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1822 DECL_VALUE_EXPR, and it's worth re-examining things. */
1823
1824 static enum gimplify_status
1825 gimplify_var_or_parm_decl (tree *expr_p)
1826 {
1827 tree decl = *expr_p;
1828
1829 /* ??? If this is a local variable, and it has not been seen in any
1830 outer BIND_EXPR, then it's probably the result of a duplicate
1831 declaration, for which we've already issued an error. It would
1832 be really nice if the front end wouldn't leak these at all.
1833 Currently the only known culprit is C++ destructors, as seen
1834 in g++.old-deja/g++.jason/binding.C. */
1835 if (TREE_CODE (decl) == VAR_DECL
1836 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1837 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1838 && decl_function_context (decl) == current_function_decl)
1839 {
1840 gcc_assert (seen_error ());
1841 return GS_ERROR;
1842 }
1843
1844 /* When within an OMP context, notice uses of variables. */
1845 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1846 return GS_ALL_DONE;
1847
1848 /* If the decl is an alias for another expression, substitute it now. */
1849 if (DECL_HAS_VALUE_EXPR_P (decl))
1850 {
1851 tree value_expr = DECL_VALUE_EXPR (decl);
1852
1853 /* For referenced nonlocal VLAs add a decl for debugging purposes
1854 to the current function. */
1855 if (TREE_CODE (decl) == VAR_DECL
1856 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1857 && nonlocal_vlas != NULL
1858 && TREE_CODE (value_expr) == INDIRECT_REF
1859 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1860 && decl_function_context (decl) != current_function_decl)
1861 {
1862 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1863 while (ctx
1864 && (ctx->region_type == ORT_WORKSHARE
1865 || ctx->region_type == ORT_SIMD
1866 || ctx->region_type == ORT_ACC))
1867 ctx = ctx->outer_context;
1868 if (!ctx && !nonlocal_vlas->add (decl))
1869 {
1870 tree copy = copy_node (decl);
1871
1872 lang_hooks.dup_lang_specific_decl (copy);
1873 SET_DECL_RTL (copy, 0);
1874 TREE_USED (copy) = 1;
1875 DECL_CHAIN (copy) = nonlocal_vla_vars;
1876 nonlocal_vla_vars = copy;
1877 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1878 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1879 }
1880 }
1881
1882 *expr_p = unshare_expr (value_expr);
1883 return GS_OK;
1884 }
1885
1886 return GS_ALL_DONE;
1887 }
1888
1889 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1890
1891 static void
1892 recalculate_side_effects (tree t)
1893 {
1894 enum tree_code code = TREE_CODE (t);
1895 int len = TREE_OPERAND_LENGTH (t);
1896 int i;
1897
1898 switch (TREE_CODE_CLASS (code))
1899 {
1900 case tcc_expression:
1901 switch (code)
1902 {
1903 case INIT_EXPR:
1904 case MODIFY_EXPR:
1905 case VA_ARG_EXPR:
1906 case PREDECREMENT_EXPR:
1907 case PREINCREMENT_EXPR:
1908 case POSTDECREMENT_EXPR:
1909 case POSTINCREMENT_EXPR:
1910 /* All of these have side-effects, no matter what their
1911 operands are. */
1912 return;
1913
1914 default:
1915 break;
1916 }
1917 /* Fall through. */
1918
1919 case tcc_comparison: /* a comparison expression */
1920 case tcc_unary: /* a unary arithmetic expression */
1921 case tcc_binary: /* a binary arithmetic expression */
1922 case tcc_reference: /* a reference */
1923 case tcc_vl_exp: /* a function call */
1924 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1925 for (i = 0; i < len; ++i)
1926 {
1927 tree op = TREE_OPERAND (t, i);
1928 if (op && TREE_SIDE_EFFECTS (op))
1929 TREE_SIDE_EFFECTS (t) = 1;
1930 }
1931 break;
1932
1933 case tcc_constant:
1934 /* No side-effects. */
1935 return;
1936
1937 default:
1938 gcc_unreachable ();
1939 }
1940 }
1941
1942 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1943 node *EXPR_P.
1944
1945 compound_lval
1946 : min_lval '[' val ']'
1947 | min_lval '.' ID
1948 | compound_lval '[' val ']'
1949 | compound_lval '.' ID
1950
1951 This is not part of the original SIMPLE definition, which separates
1952 array and member references, but it seems reasonable to handle them
1953 together. Also, this way we don't run into problems with union
1954 aliasing; gcc requires that for accesses through a union to alias, the
1955 union reference must be explicit, which was not always the case when we
1956 were splitting up array and member refs.
1957
1958 PRE_P points to the sequence where side effects that must happen before
1959 *EXPR_P should be stored.
1960
1961 POST_P points to the sequence where side effects that must happen after
1962 *EXPR_P should be stored. */
1963
1964 static enum gimplify_status
1965 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1966 fallback_t fallback)
1967 {
1968 tree *p;
1969 enum gimplify_status ret = GS_ALL_DONE, tret;
1970 int i;
1971 location_t loc = EXPR_LOCATION (*expr_p);
1972 tree expr = *expr_p;
1973
1974 /* Create a stack of the subexpressions so later we can walk them in
1975 order from inner to outer. */
1976 auto_vec<tree, 10> expr_stack;
1977
1978 /* We can handle anything that get_inner_reference can deal with. */
1979 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1980 {
1981 restart:
1982 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1983 if (TREE_CODE (*p) == INDIRECT_REF)
1984 *p = fold_indirect_ref_loc (loc, *p);
1985
1986 if (handled_component_p (*p))
1987 ;
1988 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1989 additional COMPONENT_REFs. */
1990 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1991 && gimplify_var_or_parm_decl (p) == GS_OK)
1992 goto restart;
1993 else
1994 break;
1995
1996 expr_stack.safe_push (*p);
1997 }
1998
1999 gcc_assert (expr_stack.length ());
2000
2001 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2002 walked through and P points to the innermost expression.
2003
2004 Java requires that we elaborated nodes in source order. That
2005 means we must gimplify the inner expression followed by each of
2006 the indices, in order. But we can't gimplify the inner
2007 expression until we deal with any variable bounds, sizes, or
2008 positions in order to deal with PLACEHOLDER_EXPRs.
2009
2010 So we do this in three steps. First we deal with the annotations
2011 for any variables in the components, then we gimplify the base,
2012 then we gimplify any indices, from left to right. */
2013 for (i = expr_stack.length () - 1; i >= 0; i--)
2014 {
2015 tree t = expr_stack[i];
2016
2017 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2018 {
2019 /* Gimplify the low bound and element type size and put them into
2020 the ARRAY_REF. If these values are set, they have already been
2021 gimplified. */
2022 if (TREE_OPERAND (t, 2) == NULL_TREE)
2023 {
2024 tree low = unshare_expr (array_ref_low_bound (t));
2025 if (!is_gimple_min_invariant (low))
2026 {
2027 TREE_OPERAND (t, 2) = low;
2028 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2029 post_p, is_gimple_reg,
2030 fb_rvalue);
2031 ret = MIN (ret, tret);
2032 }
2033 }
2034 else
2035 {
2036 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2037 is_gimple_reg, fb_rvalue);
2038 ret = MIN (ret, tret);
2039 }
2040
2041 if (TREE_OPERAND (t, 3) == NULL_TREE)
2042 {
2043 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2044 tree elmt_size = unshare_expr (array_ref_element_size (t));
2045 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2046
2047 /* Divide the element size by the alignment of the element
2048 type (above). */
2049 elmt_size
2050 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2051
2052 if (!is_gimple_min_invariant (elmt_size))
2053 {
2054 TREE_OPERAND (t, 3) = elmt_size;
2055 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2056 post_p, is_gimple_reg,
2057 fb_rvalue);
2058 ret = MIN (ret, tret);
2059 }
2060 }
2061 else
2062 {
2063 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2064 is_gimple_reg, fb_rvalue);
2065 ret = MIN (ret, tret);
2066 }
2067 }
2068 else if (TREE_CODE (t) == COMPONENT_REF)
2069 {
2070 /* Set the field offset into T and gimplify it. */
2071 if (TREE_OPERAND (t, 2) == NULL_TREE)
2072 {
2073 tree offset = unshare_expr (component_ref_field_offset (t));
2074 tree field = TREE_OPERAND (t, 1);
2075 tree factor
2076 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2077
2078 /* Divide the offset by its alignment. */
2079 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2080
2081 if (!is_gimple_min_invariant (offset))
2082 {
2083 TREE_OPERAND (t, 2) = offset;
2084 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2085 post_p, is_gimple_reg,
2086 fb_rvalue);
2087 ret = MIN (ret, tret);
2088 }
2089 }
2090 else
2091 {
2092 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2093 is_gimple_reg, fb_rvalue);
2094 ret = MIN (ret, tret);
2095 }
2096 }
2097 }
2098
2099 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2100 so as to match the min_lval predicate. Failure to do so may result
2101 in the creation of large aggregate temporaries. */
2102 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2103 fallback | fb_lvalue);
2104 ret = MIN (ret, tret);
2105
2106 /* And finally, the indices and operands of ARRAY_REF. During this
2107 loop we also remove any useless conversions. */
2108 for (; expr_stack.length () > 0; )
2109 {
2110 tree t = expr_stack.pop ();
2111
2112 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2113 {
2114 /* Gimplify the dimension. */
2115 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2116 {
2117 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2118 is_gimple_val, fb_rvalue);
2119 ret = MIN (ret, tret);
2120 }
2121 }
2122
2123 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2124
2125 /* The innermost expression P may have originally had
2126 TREE_SIDE_EFFECTS set which would have caused all the outer
2127 expressions in *EXPR_P leading to P to also have had
2128 TREE_SIDE_EFFECTS set. */
2129 recalculate_side_effects (t);
2130 }
2131
2132 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2133 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2134 {
2135 canonicalize_component_ref (expr_p);
2136 }
2137
2138 expr_stack.release ();
2139
2140 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2141
2142 return ret;
2143 }
2144
2145 /* Gimplify the self modifying expression pointed to by EXPR_P
2146 (++, --, +=, -=).
2147
2148 PRE_P points to the list where side effects that must happen before
2149 *EXPR_P should be stored.
2150
2151 POST_P points to the list where side effects that must happen after
2152 *EXPR_P should be stored.
2153
2154 WANT_VALUE is nonzero iff we want to use the value of this expression
2155 in another expression.
2156
2157 ARITH_TYPE is the type the computation should be performed in. */
2158
2159 enum gimplify_status
2160 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2161 bool want_value, tree arith_type)
2162 {
2163 enum tree_code code;
2164 tree lhs, lvalue, rhs, t1;
2165 gimple_seq post = NULL, *orig_post_p = post_p;
2166 bool postfix;
2167 enum tree_code arith_code;
2168 enum gimplify_status ret;
2169 location_t loc = EXPR_LOCATION (*expr_p);
2170
2171 code = TREE_CODE (*expr_p);
2172
2173 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2174 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2175
2176 /* Prefix or postfix? */
2177 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2178 /* Faster to treat as prefix if result is not used. */
2179 postfix = want_value;
2180 else
2181 postfix = false;
2182
2183 /* For postfix, make sure the inner expression's post side effects
2184 are executed after side effects from this expression. */
2185 if (postfix)
2186 post_p = &post;
2187
2188 /* Add or subtract? */
2189 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2190 arith_code = PLUS_EXPR;
2191 else
2192 arith_code = MINUS_EXPR;
2193
2194 /* Gimplify the LHS into a GIMPLE lvalue. */
2195 lvalue = TREE_OPERAND (*expr_p, 0);
2196 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2197 if (ret == GS_ERROR)
2198 return ret;
2199
2200 /* Extract the operands to the arithmetic operation. */
2201 lhs = lvalue;
2202 rhs = TREE_OPERAND (*expr_p, 1);
2203
2204 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2205 that as the result value and in the postqueue operation. */
2206 if (postfix)
2207 {
2208 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2209 if (ret == GS_ERROR)
2210 return ret;
2211
2212 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2213 }
2214
2215 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2216 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2217 {
2218 rhs = convert_to_ptrofftype_loc (loc, rhs);
2219 if (arith_code == MINUS_EXPR)
2220 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2221 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2222 }
2223 else
2224 t1 = fold_convert (TREE_TYPE (*expr_p),
2225 fold_build2 (arith_code, arith_type,
2226 fold_convert (arith_type, lhs),
2227 fold_convert (arith_type, rhs)));
2228
2229 if (postfix)
2230 {
2231 gimplify_assign (lvalue, t1, pre_p);
2232 gimplify_seq_add_seq (orig_post_p, post);
2233 *expr_p = lhs;
2234 return GS_ALL_DONE;
2235 }
2236 else
2237 {
2238 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2239 return GS_OK;
2240 }
2241 }
2242
2243 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2244
2245 static void
2246 maybe_with_size_expr (tree *expr_p)
2247 {
2248 tree expr = *expr_p;
2249 tree type = TREE_TYPE (expr);
2250 tree size;
2251
2252 /* If we've already wrapped this or the type is error_mark_node, we can't do
2253 anything. */
2254 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2255 || type == error_mark_node)
2256 return;
2257
2258 /* If the size isn't known or is a constant, we have nothing to do. */
2259 size = TYPE_SIZE_UNIT (type);
2260 if (!size || TREE_CODE (size) == INTEGER_CST)
2261 return;
2262
2263 /* Otherwise, make a WITH_SIZE_EXPR. */
2264 size = unshare_expr (size);
2265 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2266 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2267 }
2268
2269 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2270 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2271 the CALL_EXPR. */
2272
2273 enum gimplify_status
2274 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2275 {
2276 bool (*test) (tree);
2277 fallback_t fb;
2278
2279 /* In general, we allow lvalues for function arguments to avoid
2280 extra overhead of copying large aggregates out of even larger
2281 aggregates into temporaries only to copy the temporaries to
2282 the argument list. Make optimizers happy by pulling out to
2283 temporaries those types that fit in registers. */
2284 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2285 test = is_gimple_val, fb = fb_rvalue;
2286 else
2287 {
2288 test = is_gimple_lvalue, fb = fb_either;
2289 /* Also strip a TARGET_EXPR that would force an extra copy. */
2290 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2291 {
2292 tree init = TARGET_EXPR_INITIAL (*arg_p);
2293 if (init
2294 && !VOID_TYPE_P (TREE_TYPE (init)))
2295 *arg_p = init;
2296 }
2297 }
2298
2299 /* If this is a variable sized type, we must remember the size. */
2300 maybe_with_size_expr (arg_p);
2301
2302 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2303 /* Make sure arguments have the same location as the function call
2304 itself. */
2305 protected_set_expr_location (*arg_p, call_location);
2306
2307 /* There is a sequence point before a function call. Side effects in
2308 the argument list must occur before the actual call. So, when
2309 gimplifying arguments, force gimplify_expr to use an internal
2310 post queue which is then appended to the end of PRE_P. */
2311 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2312 }
2313
2314 /* Don't fold inside offloading or taskreg regions: it can break code by
2315 adding decl references that weren't in the source. We'll do it during
2316 omplower pass instead. */
2317
2318 static bool
2319 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2320 {
2321 struct gimplify_omp_ctx *ctx;
2322 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2323 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2324 return false;
2325 return fold_stmt (gsi);
2326 }
2327
2328 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2329 WANT_VALUE is true if the result of the call is desired. */
2330
2331 static enum gimplify_status
2332 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2333 {
2334 tree fndecl, parms, p, fnptrtype;
2335 enum gimplify_status ret;
2336 int i, nargs;
2337 gcall *call;
2338 bool builtin_va_start_p = false;
2339 location_t loc = EXPR_LOCATION (*expr_p);
2340
2341 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2342
2343 /* For reliable diagnostics during inlining, it is necessary that
2344 every call_expr be annotated with file and line. */
2345 if (! EXPR_HAS_LOCATION (*expr_p))
2346 SET_EXPR_LOCATION (*expr_p, input_location);
2347
2348 /* Gimplify internal functions created in the FEs. */
2349 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2350 {
2351 if (want_value)
2352 return GS_ALL_DONE;
2353
2354 nargs = call_expr_nargs (*expr_p);
2355 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2356 auto_vec<tree> vargs (nargs);
2357
2358 for (i = 0; i < nargs; i++)
2359 {
2360 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2361 EXPR_LOCATION (*expr_p));
2362 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2363 }
2364 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2365 gimplify_seq_add_stmt (pre_p, call);
2366 return GS_ALL_DONE;
2367 }
2368
2369 /* This may be a call to a builtin function.
2370
2371 Builtin function calls may be transformed into different
2372 (and more efficient) builtin function calls under certain
2373 circumstances. Unfortunately, gimplification can muck things
2374 up enough that the builtin expanders are not aware that certain
2375 transformations are still valid.
2376
2377 So we attempt transformation/gimplification of the call before
2378 we gimplify the CALL_EXPR. At this time we do not manage to
2379 transform all calls in the same manner as the expanders do, but
2380 we do transform most of them. */
2381 fndecl = get_callee_fndecl (*expr_p);
2382 if (fndecl
2383 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2384 switch (DECL_FUNCTION_CODE (fndecl))
2385 {
2386 case BUILT_IN_ALLOCA:
2387 case BUILT_IN_ALLOCA_WITH_ALIGN:
2388 /* If the call has been built for a variable-sized object, then we
2389 want to restore the stack level when the enclosing BIND_EXPR is
2390 exited to reclaim the allocated space; otherwise, we precisely
2391 need to do the opposite and preserve the latest stack level. */
2392 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2393 gimplify_ctxp->save_stack = true;
2394 else
2395 gimplify_ctxp->keep_stack = true;
2396 break;
2397
2398 case BUILT_IN_VA_START:
2399 {
2400 builtin_va_start_p = TRUE;
2401 if (call_expr_nargs (*expr_p) < 2)
2402 {
2403 error ("too few arguments to function %<va_start%>");
2404 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2405 return GS_OK;
2406 }
2407
2408 if (fold_builtin_next_arg (*expr_p, true))
2409 {
2410 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2411 return GS_OK;
2412 }
2413 break;
2414 }
2415 case BUILT_IN_LINE:
2416 {
2417 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2418 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2419 return GS_OK;
2420 }
2421 case BUILT_IN_FILE:
2422 {
2423 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2424 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2425 return GS_OK;
2426 }
2427 case BUILT_IN_FUNCTION:
2428 {
2429 const char *function;
2430 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2431 *expr_p = build_string_literal (strlen (function) + 1, function);
2432 return GS_OK;
2433 }
2434 default:
2435 ;
2436 }
2437 if (fndecl && DECL_BUILT_IN (fndecl))
2438 {
2439 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2440 if (new_tree && new_tree != *expr_p)
2441 {
2442 /* There was a transformation of this call which computes the
2443 same value, but in a more efficient way. Return and try
2444 again. */
2445 *expr_p = new_tree;
2446 return GS_OK;
2447 }
2448 }
2449
2450 /* Remember the original function pointer type. */
2451 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2452
2453 /* There is a sequence point before the call, so any side effects in
2454 the calling expression must occur before the actual call. Force
2455 gimplify_expr to use an internal post queue. */
2456 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2457 is_gimple_call_addr, fb_rvalue);
2458
2459 nargs = call_expr_nargs (*expr_p);
2460
2461 /* Get argument types for verification. */
2462 fndecl = get_callee_fndecl (*expr_p);
2463 parms = NULL_TREE;
2464 if (fndecl)
2465 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2466 else
2467 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2468
2469 if (fndecl && DECL_ARGUMENTS (fndecl))
2470 p = DECL_ARGUMENTS (fndecl);
2471 else if (parms)
2472 p = parms;
2473 else
2474 p = NULL_TREE;
2475 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2476 ;
2477
2478 /* If the last argument is __builtin_va_arg_pack () and it is not
2479 passed as a named argument, decrease the number of CALL_EXPR
2480 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2481 if (!p
2482 && i < nargs
2483 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2484 {
2485 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2486 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2487
2488 if (last_arg_fndecl
2489 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2490 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2491 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2492 {
2493 tree call = *expr_p;
2494
2495 --nargs;
2496 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2497 CALL_EXPR_FN (call),
2498 nargs, CALL_EXPR_ARGP (call));
2499
2500 /* Copy all CALL_EXPR flags, location and block, except
2501 CALL_EXPR_VA_ARG_PACK flag. */
2502 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2503 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2504 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2505 = CALL_EXPR_RETURN_SLOT_OPT (call);
2506 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2507 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2508
2509 /* Set CALL_EXPR_VA_ARG_PACK. */
2510 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2511 }
2512 }
2513
2514 /* Gimplify the function arguments. */
2515 if (nargs > 0)
2516 {
2517 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2518 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2519 PUSH_ARGS_REVERSED ? i-- : i++)
2520 {
2521 enum gimplify_status t;
2522
2523 /* Avoid gimplifying the second argument to va_start, which needs to
2524 be the plain PARM_DECL. */
2525 if ((i != 1) || !builtin_va_start_p)
2526 {
2527 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2528 EXPR_LOCATION (*expr_p));
2529
2530 if (t == GS_ERROR)
2531 ret = GS_ERROR;
2532 }
2533 }
2534 }
2535
2536 /* Gimplify the static chain. */
2537 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2538 {
2539 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2540 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2541 else
2542 {
2543 enum gimplify_status t;
2544 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2545 EXPR_LOCATION (*expr_p));
2546 if (t == GS_ERROR)
2547 ret = GS_ERROR;
2548 }
2549 }
2550
2551 /* Verify the function result. */
2552 if (want_value && fndecl
2553 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2554 {
2555 error_at (loc, "using result of function returning %<void%>");
2556 ret = GS_ERROR;
2557 }
2558
2559 /* Try this again in case gimplification exposed something. */
2560 if (ret != GS_ERROR)
2561 {
2562 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2563
2564 if (new_tree && new_tree != *expr_p)
2565 {
2566 /* There was a transformation of this call which computes the
2567 same value, but in a more efficient way. Return and try
2568 again. */
2569 *expr_p = new_tree;
2570 return GS_OK;
2571 }
2572 }
2573 else
2574 {
2575 *expr_p = error_mark_node;
2576 return GS_ERROR;
2577 }
2578
2579 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2580 decl. This allows us to eliminate redundant or useless
2581 calls to "const" functions. */
2582 if (TREE_CODE (*expr_p) == CALL_EXPR)
2583 {
2584 int flags = call_expr_flags (*expr_p);
2585 if (flags & (ECF_CONST | ECF_PURE)
2586 /* An infinite loop is considered a side effect. */
2587 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2588 TREE_SIDE_EFFECTS (*expr_p) = 0;
2589 }
2590
2591 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2592 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2593 form and delegate the creation of a GIMPLE_CALL to
2594 gimplify_modify_expr. This is always possible because when
2595 WANT_VALUE is true, the caller wants the result of this call into
2596 a temporary, which means that we will emit an INIT_EXPR in
2597 internal_get_tmp_var which will then be handled by
2598 gimplify_modify_expr. */
2599 if (!want_value)
2600 {
2601 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2602 have to do is replicate it as a GIMPLE_CALL tuple. */
2603 gimple_stmt_iterator gsi;
2604 call = gimple_build_call_from_tree (*expr_p);
2605 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2606 notice_special_calls (call);
2607 gimplify_seq_add_stmt (pre_p, call);
2608 gsi = gsi_last (*pre_p);
2609 maybe_fold_stmt (&gsi);
2610 *expr_p = NULL_TREE;
2611 }
2612 else
2613 /* Remember the original function type. */
2614 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2615 CALL_EXPR_FN (*expr_p));
2616
2617 return ret;
2618 }
2619
2620 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2621 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2622
2623 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2624 condition is true or false, respectively. If null, we should generate
2625 our own to skip over the evaluation of this specific expression.
2626
2627 LOCUS is the source location of the COND_EXPR.
2628
2629 This function is the tree equivalent of do_jump.
2630
2631 shortcut_cond_r should only be called by shortcut_cond_expr. */
2632
2633 static tree
2634 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2635 location_t locus)
2636 {
2637 tree local_label = NULL_TREE;
2638 tree t, expr = NULL;
2639
2640 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2641 retain the shortcut semantics. Just insert the gotos here;
2642 shortcut_cond_expr will append the real blocks later. */
2643 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2644 {
2645 location_t new_locus;
2646
2647 /* Turn if (a && b) into
2648
2649 if (a); else goto no;
2650 if (b) goto yes; else goto no;
2651 (no:) */
2652
2653 if (false_label_p == NULL)
2654 false_label_p = &local_label;
2655
2656 /* Keep the original source location on the first 'if'. */
2657 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2658 append_to_statement_list (t, &expr);
2659
2660 /* Set the source location of the && on the second 'if'. */
2661 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2662 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2663 new_locus);
2664 append_to_statement_list (t, &expr);
2665 }
2666 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2667 {
2668 location_t new_locus;
2669
2670 /* Turn if (a || b) into
2671
2672 if (a) goto yes;
2673 if (b) goto yes; else goto no;
2674 (yes:) */
2675
2676 if (true_label_p == NULL)
2677 true_label_p = &local_label;
2678
2679 /* Keep the original source location on the first 'if'. */
2680 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2681 append_to_statement_list (t, &expr);
2682
2683 /* Set the source location of the || on the second 'if'. */
2684 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2685 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2686 new_locus);
2687 append_to_statement_list (t, &expr);
2688 }
2689 else if (TREE_CODE (pred) == COND_EXPR
2690 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2691 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2692 {
2693 location_t new_locus;
2694
2695 /* As long as we're messing with gotos, turn if (a ? b : c) into
2696 if (a)
2697 if (b) goto yes; else goto no;
2698 else
2699 if (c) goto yes; else goto no;
2700
2701 Don't do this if one of the arms has void type, which can happen
2702 in C++ when the arm is throw. */
2703
2704 /* Keep the original source location on the first 'if'. Set the source
2705 location of the ? on the second 'if'. */
2706 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2707 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2708 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2709 false_label_p, locus),
2710 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2711 false_label_p, new_locus));
2712 }
2713 else
2714 {
2715 expr = build3 (COND_EXPR, void_type_node, pred,
2716 build_and_jump (true_label_p),
2717 build_and_jump (false_label_p));
2718 SET_EXPR_LOCATION (expr, locus);
2719 }
2720
2721 if (local_label)
2722 {
2723 t = build1 (LABEL_EXPR, void_type_node, local_label);
2724 append_to_statement_list (t, &expr);
2725 }
2726
2727 return expr;
2728 }
2729
2730 /* Given a conditional expression EXPR with short-circuit boolean
2731 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2732 predicate apart into the equivalent sequence of conditionals. */
2733
2734 static tree
2735 shortcut_cond_expr (tree expr)
2736 {
2737 tree pred = TREE_OPERAND (expr, 0);
2738 tree then_ = TREE_OPERAND (expr, 1);
2739 tree else_ = TREE_OPERAND (expr, 2);
2740 tree true_label, false_label, end_label, t;
2741 tree *true_label_p;
2742 tree *false_label_p;
2743 bool emit_end, emit_false, jump_over_else;
2744 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2745 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2746
2747 /* First do simple transformations. */
2748 if (!else_se)
2749 {
2750 /* If there is no 'else', turn
2751 if (a && b) then c
2752 into
2753 if (a) if (b) then c. */
2754 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2755 {
2756 /* Keep the original source location on the first 'if'. */
2757 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2758 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2759 /* Set the source location of the && on the second 'if'. */
2760 if (EXPR_HAS_LOCATION (pred))
2761 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2762 then_ = shortcut_cond_expr (expr);
2763 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2764 pred = TREE_OPERAND (pred, 0);
2765 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2766 SET_EXPR_LOCATION (expr, locus);
2767 }
2768 }
2769
2770 if (!then_se)
2771 {
2772 /* If there is no 'then', turn
2773 if (a || b); else d
2774 into
2775 if (a); else if (b); else d. */
2776 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2777 {
2778 /* Keep the original source location on the first 'if'. */
2779 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2780 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2781 /* Set the source location of the || on the second 'if'. */
2782 if (EXPR_HAS_LOCATION (pred))
2783 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2784 else_ = shortcut_cond_expr (expr);
2785 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2786 pred = TREE_OPERAND (pred, 0);
2787 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2788 SET_EXPR_LOCATION (expr, locus);
2789 }
2790 }
2791
2792 /* If we're done, great. */
2793 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2794 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2795 return expr;
2796
2797 /* Otherwise we need to mess with gotos. Change
2798 if (a) c; else d;
2799 to
2800 if (a); else goto no;
2801 c; goto end;
2802 no: d; end:
2803 and recursively gimplify the condition. */
2804
2805 true_label = false_label = end_label = NULL_TREE;
2806
2807 /* If our arms just jump somewhere, hijack those labels so we don't
2808 generate jumps to jumps. */
2809
2810 if (then_
2811 && TREE_CODE (then_) == GOTO_EXPR
2812 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2813 {
2814 true_label = GOTO_DESTINATION (then_);
2815 then_ = NULL;
2816 then_se = false;
2817 }
2818
2819 if (else_
2820 && TREE_CODE (else_) == GOTO_EXPR
2821 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2822 {
2823 false_label = GOTO_DESTINATION (else_);
2824 else_ = NULL;
2825 else_se = false;
2826 }
2827
2828 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2829 if (true_label)
2830 true_label_p = &true_label;
2831 else
2832 true_label_p = NULL;
2833
2834 /* The 'else' branch also needs a label if it contains interesting code. */
2835 if (false_label || else_se)
2836 false_label_p = &false_label;
2837 else
2838 false_label_p = NULL;
2839
2840 /* If there was nothing else in our arms, just forward the label(s). */
2841 if (!then_se && !else_se)
2842 return shortcut_cond_r (pred, true_label_p, false_label_p,
2843 EXPR_LOC_OR_LOC (expr, input_location));
2844
2845 /* If our last subexpression already has a terminal label, reuse it. */
2846 if (else_se)
2847 t = expr_last (else_);
2848 else if (then_se)
2849 t = expr_last (then_);
2850 else
2851 t = NULL;
2852 if (t && TREE_CODE (t) == LABEL_EXPR)
2853 end_label = LABEL_EXPR_LABEL (t);
2854
2855 /* If we don't care about jumping to the 'else' branch, jump to the end
2856 if the condition is false. */
2857 if (!false_label_p)
2858 false_label_p = &end_label;
2859
2860 /* We only want to emit these labels if we aren't hijacking them. */
2861 emit_end = (end_label == NULL_TREE);
2862 emit_false = (false_label == NULL_TREE);
2863
2864 /* We only emit the jump over the else clause if we have to--if the
2865 then clause may fall through. Otherwise we can wind up with a
2866 useless jump and a useless label at the end of gimplified code,
2867 which will cause us to think that this conditional as a whole
2868 falls through even if it doesn't. If we then inline a function
2869 which ends with such a condition, that can cause us to issue an
2870 inappropriate warning about control reaching the end of a
2871 non-void function. */
2872 jump_over_else = block_may_fallthru (then_);
2873
2874 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2875 EXPR_LOC_OR_LOC (expr, input_location));
2876
2877 expr = NULL;
2878 append_to_statement_list (pred, &expr);
2879
2880 append_to_statement_list (then_, &expr);
2881 if (else_se)
2882 {
2883 if (jump_over_else)
2884 {
2885 tree last = expr_last (expr);
2886 t = build_and_jump (&end_label);
2887 if (EXPR_HAS_LOCATION (last))
2888 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2889 append_to_statement_list (t, &expr);
2890 }
2891 if (emit_false)
2892 {
2893 t = build1 (LABEL_EXPR, void_type_node, false_label);
2894 append_to_statement_list (t, &expr);
2895 }
2896 append_to_statement_list (else_, &expr);
2897 }
2898 if (emit_end && end_label)
2899 {
2900 t = build1 (LABEL_EXPR, void_type_node, end_label);
2901 append_to_statement_list (t, &expr);
2902 }
2903
2904 return expr;
2905 }
2906
2907 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2908
2909 tree
2910 gimple_boolify (tree expr)
2911 {
2912 tree type = TREE_TYPE (expr);
2913 location_t loc = EXPR_LOCATION (expr);
2914
2915 if (TREE_CODE (expr) == NE_EXPR
2916 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2917 && integer_zerop (TREE_OPERAND (expr, 1)))
2918 {
2919 tree call = TREE_OPERAND (expr, 0);
2920 tree fn = get_callee_fndecl (call);
2921
2922 /* For __builtin_expect ((long) (x), y) recurse into x as well
2923 if x is truth_value_p. */
2924 if (fn
2925 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2926 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2927 && call_expr_nargs (call) == 2)
2928 {
2929 tree arg = CALL_EXPR_ARG (call, 0);
2930 if (arg)
2931 {
2932 if (TREE_CODE (arg) == NOP_EXPR
2933 && TREE_TYPE (arg) == TREE_TYPE (call))
2934 arg = TREE_OPERAND (arg, 0);
2935 if (truth_value_p (TREE_CODE (arg)))
2936 {
2937 arg = gimple_boolify (arg);
2938 CALL_EXPR_ARG (call, 0)
2939 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2940 }
2941 }
2942 }
2943 }
2944
2945 switch (TREE_CODE (expr))
2946 {
2947 case TRUTH_AND_EXPR:
2948 case TRUTH_OR_EXPR:
2949 case TRUTH_XOR_EXPR:
2950 case TRUTH_ANDIF_EXPR:
2951 case TRUTH_ORIF_EXPR:
2952 /* Also boolify the arguments of truth exprs. */
2953 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2954 /* FALLTHRU */
2955
2956 case TRUTH_NOT_EXPR:
2957 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2958
2959 /* These expressions always produce boolean results. */
2960 if (TREE_CODE (type) != BOOLEAN_TYPE)
2961 TREE_TYPE (expr) = boolean_type_node;
2962 return expr;
2963
2964 case ANNOTATE_EXPR:
2965 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2966 {
2967 case annot_expr_ivdep_kind:
2968 case annot_expr_no_vector_kind:
2969 case annot_expr_vector_kind:
2970 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2971 if (TREE_CODE (type) != BOOLEAN_TYPE)
2972 TREE_TYPE (expr) = boolean_type_node;
2973 return expr;
2974 default:
2975 gcc_unreachable ();
2976 }
2977
2978 default:
2979 if (COMPARISON_CLASS_P (expr))
2980 {
2981 /* There expressions always prduce boolean results. */
2982 if (TREE_CODE (type) != BOOLEAN_TYPE)
2983 TREE_TYPE (expr) = boolean_type_node;
2984 return expr;
2985 }
2986 /* Other expressions that get here must have boolean values, but
2987 might need to be converted to the appropriate mode. */
2988 if (TREE_CODE (type) == BOOLEAN_TYPE)
2989 return expr;
2990 return fold_convert_loc (loc, boolean_type_node, expr);
2991 }
2992 }
2993
2994 /* Given a conditional expression *EXPR_P without side effects, gimplify
2995 its operands. New statements are inserted to PRE_P. */
2996
2997 static enum gimplify_status
2998 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2999 {
3000 tree expr = *expr_p, cond;
3001 enum gimplify_status ret, tret;
3002 enum tree_code code;
3003
3004 cond = gimple_boolify (COND_EXPR_COND (expr));
3005
3006 /* We need to handle && and || specially, as their gimplification
3007 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3008 code = TREE_CODE (cond);
3009 if (code == TRUTH_ANDIF_EXPR)
3010 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3011 else if (code == TRUTH_ORIF_EXPR)
3012 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3013 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3014 COND_EXPR_COND (*expr_p) = cond;
3015
3016 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3017 is_gimple_val, fb_rvalue);
3018 ret = MIN (ret, tret);
3019 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3020 is_gimple_val, fb_rvalue);
3021
3022 return MIN (ret, tret);
3023 }
3024
3025 /* Return true if evaluating EXPR could trap.
3026 EXPR is GENERIC, while tree_could_trap_p can be called
3027 only on GIMPLE. */
3028
3029 static bool
3030 generic_expr_could_trap_p (tree expr)
3031 {
3032 unsigned i, n;
3033
3034 if (!expr || is_gimple_val (expr))
3035 return false;
3036
3037 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3038 return true;
3039
3040 n = TREE_OPERAND_LENGTH (expr);
3041 for (i = 0; i < n; i++)
3042 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3043 return true;
3044
3045 return false;
3046 }
3047
3048 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3049 into
3050
3051 if (p) if (p)
3052 t1 = a; a;
3053 else or else
3054 t1 = b; b;
3055 t1;
3056
3057 The second form is used when *EXPR_P is of type void.
3058
3059 PRE_P points to the list where side effects that must happen before
3060 *EXPR_P should be stored. */
3061
3062 static enum gimplify_status
3063 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3064 {
3065 tree expr = *expr_p;
3066 tree type = TREE_TYPE (expr);
3067 location_t loc = EXPR_LOCATION (expr);
3068 tree tmp, arm1, arm2;
3069 enum gimplify_status ret;
3070 tree label_true, label_false, label_cont;
3071 bool have_then_clause_p, have_else_clause_p;
3072 gcond *cond_stmt;
3073 enum tree_code pred_code;
3074 gimple_seq seq = NULL;
3075
3076 /* If this COND_EXPR has a value, copy the values into a temporary within
3077 the arms. */
3078 if (!VOID_TYPE_P (type))
3079 {
3080 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3081 tree result;
3082
3083 /* If either an rvalue is ok or we do not require an lvalue, create the
3084 temporary. But we cannot do that if the type is addressable. */
3085 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3086 && !TREE_ADDRESSABLE (type))
3087 {
3088 if (gimplify_ctxp->allow_rhs_cond_expr
3089 /* If either branch has side effects or could trap, it can't be
3090 evaluated unconditionally. */
3091 && !TREE_SIDE_EFFECTS (then_)
3092 && !generic_expr_could_trap_p (then_)
3093 && !TREE_SIDE_EFFECTS (else_)
3094 && !generic_expr_could_trap_p (else_))
3095 return gimplify_pure_cond_expr (expr_p, pre_p);
3096
3097 tmp = create_tmp_var (type, "iftmp");
3098 result = tmp;
3099 }
3100
3101 /* Otherwise, only create and copy references to the values. */
3102 else
3103 {
3104 type = build_pointer_type (type);
3105
3106 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3107 then_ = build_fold_addr_expr_loc (loc, then_);
3108
3109 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3110 else_ = build_fold_addr_expr_loc (loc, else_);
3111
3112 expr
3113 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3114
3115 tmp = create_tmp_var (type, "iftmp");
3116 result = build_simple_mem_ref_loc (loc, tmp);
3117 }
3118
3119 /* Build the new then clause, `tmp = then_;'. But don't build the
3120 assignment if the value is void; in C++ it can be if it's a throw. */
3121 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3122 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3123
3124 /* Similarly, build the new else clause, `tmp = else_;'. */
3125 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3126 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3127
3128 TREE_TYPE (expr) = void_type_node;
3129 recalculate_side_effects (expr);
3130
3131 /* Move the COND_EXPR to the prequeue. */
3132 gimplify_stmt (&expr, pre_p);
3133
3134 *expr_p = result;
3135 return GS_ALL_DONE;
3136 }
3137
3138 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3139 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3140 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3141 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3142
3143 /* Make sure the condition has BOOLEAN_TYPE. */
3144 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3145
3146 /* Break apart && and || conditions. */
3147 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3148 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3149 {
3150 expr = shortcut_cond_expr (expr);
3151
3152 if (expr != *expr_p)
3153 {
3154 *expr_p = expr;
3155
3156 /* We can't rely on gimplify_expr to re-gimplify the expanded
3157 form properly, as cleanups might cause the target labels to be
3158 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3159 set up a conditional context. */
3160 gimple_push_condition ();
3161 gimplify_stmt (expr_p, &seq);
3162 gimple_pop_condition (pre_p);
3163 gimple_seq_add_seq (pre_p, seq);
3164
3165 return GS_ALL_DONE;
3166 }
3167 }
3168
3169 /* Now do the normal gimplification. */
3170
3171 /* Gimplify condition. */
3172 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3173 fb_rvalue);
3174 if (ret == GS_ERROR)
3175 return GS_ERROR;
3176 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3177
3178 gimple_push_condition ();
3179
3180 have_then_clause_p = have_else_clause_p = false;
3181 if (TREE_OPERAND (expr, 1) != NULL
3182 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3183 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3184 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3185 == current_function_decl)
3186 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3187 have different locations, otherwise we end up with incorrect
3188 location information on the branches. */
3189 && (optimize
3190 || !EXPR_HAS_LOCATION (expr)
3191 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3192 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3193 {
3194 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3195 have_then_clause_p = true;
3196 }
3197 else
3198 label_true = create_artificial_label (UNKNOWN_LOCATION);
3199 if (TREE_OPERAND (expr, 2) != NULL
3200 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3201 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3202 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3203 == current_function_decl)
3204 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3205 have different locations, otherwise we end up with incorrect
3206 location information on the branches. */
3207 && (optimize
3208 || !EXPR_HAS_LOCATION (expr)
3209 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3210 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3211 {
3212 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3213 have_else_clause_p = true;
3214 }
3215 else
3216 label_false = create_artificial_label (UNKNOWN_LOCATION);
3217
3218 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3219 &arm2);
3220 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3221 label_false);
3222 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3223 gimplify_seq_add_stmt (&seq, cond_stmt);
3224 gimple_stmt_iterator gsi = gsi_last (seq);
3225 maybe_fold_stmt (&gsi);
3226
3227 label_cont = NULL_TREE;
3228 if (!have_then_clause_p)
3229 {
3230 /* For if (...) {} else { code; } put label_true after
3231 the else block. */
3232 if (TREE_OPERAND (expr, 1) == NULL_TREE
3233 && !have_else_clause_p
3234 && TREE_OPERAND (expr, 2) != NULL_TREE)
3235 label_cont = label_true;
3236 else
3237 {
3238 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3239 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3240 /* For if (...) { code; } else {} or
3241 if (...) { code; } else goto label; or
3242 if (...) { code; return; } else { ... }
3243 label_cont isn't needed. */
3244 if (!have_else_clause_p
3245 && TREE_OPERAND (expr, 2) != NULL_TREE
3246 && gimple_seq_may_fallthru (seq))
3247 {
3248 gimple *g;
3249 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3250
3251 g = gimple_build_goto (label_cont);
3252
3253 /* GIMPLE_COND's are very low level; they have embedded
3254 gotos. This particular embedded goto should not be marked
3255 with the location of the original COND_EXPR, as it would
3256 correspond to the COND_EXPR's condition, not the ELSE or the
3257 THEN arms. To avoid marking it with the wrong location, flag
3258 it as "no location". */
3259 gimple_set_do_not_emit_location (g);
3260
3261 gimplify_seq_add_stmt (&seq, g);
3262 }
3263 }
3264 }
3265 if (!have_else_clause_p)
3266 {
3267 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3268 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3269 }
3270 if (label_cont)
3271 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3272
3273 gimple_pop_condition (pre_p);
3274 gimple_seq_add_seq (pre_p, seq);
3275
3276 if (ret == GS_ERROR)
3277 ; /* Do nothing. */
3278 else if (have_then_clause_p || have_else_clause_p)
3279 ret = GS_ALL_DONE;
3280 else
3281 {
3282 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3283 expr = TREE_OPERAND (expr, 0);
3284 gimplify_stmt (&expr, pre_p);
3285 }
3286
3287 *expr_p = NULL;
3288 return ret;
3289 }
3290
3291 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3292 to be marked addressable.
3293
3294 We cannot rely on such an expression being directly markable if a temporary
3295 has been created by the gimplification. In this case, we create another
3296 temporary and initialize it with a copy, which will become a store after we
3297 mark it addressable. This can happen if the front-end passed us something
3298 that it could not mark addressable yet, like a Fortran pass-by-reference
3299 parameter (int) floatvar. */
3300
3301 static void
3302 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3303 {
3304 while (handled_component_p (*expr_p))
3305 expr_p = &TREE_OPERAND (*expr_p, 0);
3306 if (is_gimple_reg (*expr_p))
3307 {
3308 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3309 DECL_GIMPLE_REG_P (var) = 0;
3310 *expr_p = var;
3311 }
3312 }
3313
3314 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3315 a call to __builtin_memcpy. */
3316
3317 static enum gimplify_status
3318 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3319 gimple_seq *seq_p)
3320 {
3321 tree t, to, to_ptr, from, from_ptr;
3322 gcall *gs;
3323 location_t loc = EXPR_LOCATION (*expr_p);
3324
3325 to = TREE_OPERAND (*expr_p, 0);
3326 from = TREE_OPERAND (*expr_p, 1);
3327
3328 /* Mark the RHS addressable. Beware that it may not be possible to do so
3329 directly if a temporary has been created by the gimplification. */
3330 prepare_gimple_addressable (&from, seq_p);
3331
3332 mark_addressable (from);
3333 from_ptr = build_fold_addr_expr_loc (loc, from);
3334 gimplify_arg (&from_ptr, seq_p, loc);
3335
3336 mark_addressable (to);
3337 to_ptr = build_fold_addr_expr_loc (loc, to);
3338 gimplify_arg (&to_ptr, seq_p, loc);
3339
3340 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3341
3342 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3343
3344 if (want_value)
3345 {
3346 /* tmp = memcpy() */
3347 t = create_tmp_var (TREE_TYPE (to_ptr));
3348 gimple_call_set_lhs (gs, t);
3349 gimplify_seq_add_stmt (seq_p, gs);
3350
3351 *expr_p = build_simple_mem_ref (t);
3352 return GS_ALL_DONE;
3353 }
3354
3355 gimplify_seq_add_stmt (seq_p, gs);
3356 *expr_p = NULL;
3357 return GS_ALL_DONE;
3358 }
3359
3360 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3361 a call to __builtin_memset. In this case we know that the RHS is
3362 a CONSTRUCTOR with an empty element list. */
3363
3364 static enum gimplify_status
3365 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3366 gimple_seq *seq_p)
3367 {
3368 tree t, from, to, to_ptr;
3369 gcall *gs;
3370 location_t loc = EXPR_LOCATION (*expr_p);
3371
3372 /* Assert our assumptions, to abort instead of producing wrong code
3373 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3374 not be immediately exposed. */
3375 from = TREE_OPERAND (*expr_p, 1);
3376 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3377 from = TREE_OPERAND (from, 0);
3378
3379 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3380 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3381
3382 /* Now proceed. */
3383 to = TREE_OPERAND (*expr_p, 0);
3384
3385 to_ptr = build_fold_addr_expr_loc (loc, to);
3386 gimplify_arg (&to_ptr, seq_p, loc);
3387 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3388
3389 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3390
3391 if (want_value)
3392 {
3393 /* tmp = memset() */
3394 t = create_tmp_var (TREE_TYPE (to_ptr));
3395 gimple_call_set_lhs (gs, t);
3396 gimplify_seq_add_stmt (seq_p, gs);
3397
3398 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3399 return GS_ALL_DONE;
3400 }
3401
3402 gimplify_seq_add_stmt (seq_p, gs);
3403 *expr_p = NULL;
3404 return GS_ALL_DONE;
3405 }
3406
3407 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3408 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3409 assignment. Return non-null if we detect a potential overlap. */
3410
3411 struct gimplify_init_ctor_preeval_data
3412 {
3413 /* The base decl of the lhs object. May be NULL, in which case we
3414 have to assume the lhs is indirect. */
3415 tree lhs_base_decl;
3416
3417 /* The alias set of the lhs object. */
3418 alias_set_type lhs_alias_set;
3419 };
3420
3421 static tree
3422 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3423 {
3424 struct gimplify_init_ctor_preeval_data *data
3425 = (struct gimplify_init_ctor_preeval_data *) xdata;
3426 tree t = *tp;
3427
3428 /* If we find the base object, obviously we have overlap. */
3429 if (data->lhs_base_decl == t)
3430 return t;
3431
3432 /* If the constructor component is indirect, determine if we have a
3433 potential overlap with the lhs. The only bits of information we
3434 have to go on at this point are addressability and alias sets. */
3435 if ((INDIRECT_REF_P (t)
3436 || TREE_CODE (t) == MEM_REF)
3437 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3438 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3439 return t;
3440
3441 /* If the constructor component is a call, determine if it can hide a
3442 potential overlap with the lhs through an INDIRECT_REF like above.
3443 ??? Ugh - this is completely broken. In fact this whole analysis
3444 doesn't look conservative. */
3445 if (TREE_CODE (t) == CALL_EXPR)
3446 {
3447 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3448
3449 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3450 if (POINTER_TYPE_P (TREE_VALUE (type))
3451 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3452 && alias_sets_conflict_p (data->lhs_alias_set,
3453 get_alias_set
3454 (TREE_TYPE (TREE_VALUE (type)))))
3455 return t;
3456 }
3457
3458 if (IS_TYPE_OR_DECL_P (t))
3459 *walk_subtrees = 0;
3460 return NULL;
3461 }
3462
3463 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3464 force values that overlap with the lhs (as described by *DATA)
3465 into temporaries. */
3466
3467 static void
3468 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3469 struct gimplify_init_ctor_preeval_data *data)
3470 {
3471 enum gimplify_status one;
3472
3473 /* If the value is constant, then there's nothing to pre-evaluate. */
3474 if (TREE_CONSTANT (*expr_p))
3475 {
3476 /* Ensure it does not have side effects, it might contain a reference to
3477 the object we're initializing. */
3478 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3479 return;
3480 }
3481
3482 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3483 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3484 return;
3485
3486 /* Recurse for nested constructors. */
3487 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3488 {
3489 unsigned HOST_WIDE_INT ix;
3490 constructor_elt *ce;
3491 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3492
3493 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3494 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3495
3496 return;
3497 }
3498
3499 /* If this is a variable sized type, we must remember the size. */
3500 maybe_with_size_expr (expr_p);
3501
3502 /* Gimplify the constructor element to something appropriate for the rhs
3503 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3504 the gimplifier will consider this a store to memory. Doing this
3505 gimplification now means that we won't have to deal with complicated
3506 language-specific trees, nor trees like SAVE_EXPR that can induce
3507 exponential search behavior. */
3508 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3509 if (one == GS_ERROR)
3510 {
3511 *expr_p = NULL;
3512 return;
3513 }
3514
3515 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3516 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3517 always be true for all scalars, since is_gimple_mem_rhs insists on a
3518 temporary variable for them. */
3519 if (DECL_P (*expr_p))
3520 return;
3521
3522 /* If this is of variable size, we have no choice but to assume it doesn't
3523 overlap since we can't make a temporary for it. */
3524 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3525 return;
3526
3527 /* Otherwise, we must search for overlap ... */
3528 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3529 return;
3530
3531 /* ... and if found, force the value into a temporary. */
3532 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3533 }
3534
3535 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3536 a RANGE_EXPR in a CONSTRUCTOR for an array.
3537
3538 var = lower;
3539 loop_entry:
3540 object[var] = value;
3541 if (var == upper)
3542 goto loop_exit;
3543 var = var + 1;
3544 goto loop_entry;
3545 loop_exit:
3546
3547 We increment var _after_ the loop exit check because we might otherwise
3548 fail if upper == TYPE_MAX_VALUE (type for upper).
3549
3550 Note that we never have to deal with SAVE_EXPRs here, because this has
3551 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3552
3553 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3554 gimple_seq *, bool);
3555
3556 static void
3557 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3558 tree value, tree array_elt_type,
3559 gimple_seq *pre_p, bool cleared)
3560 {
3561 tree loop_entry_label, loop_exit_label, fall_thru_label;
3562 tree var, var_type, cref, tmp;
3563
3564 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3565 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3566 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3567
3568 /* Create and initialize the index variable. */
3569 var_type = TREE_TYPE (upper);
3570 var = create_tmp_var (var_type);
3571 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3572
3573 /* Add the loop entry label. */
3574 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3575
3576 /* Build the reference. */
3577 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3578 var, NULL_TREE, NULL_TREE);
3579
3580 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3581 the store. Otherwise just assign value to the reference. */
3582
3583 if (TREE_CODE (value) == CONSTRUCTOR)
3584 /* NB we might have to call ourself recursively through
3585 gimplify_init_ctor_eval if the value is a constructor. */
3586 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3587 pre_p, cleared);
3588 else
3589 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3590
3591 /* We exit the loop when the index var is equal to the upper bound. */
3592 gimplify_seq_add_stmt (pre_p,
3593 gimple_build_cond (EQ_EXPR, var, upper,
3594 loop_exit_label, fall_thru_label));
3595
3596 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3597
3598 /* Otherwise, increment the index var... */
3599 tmp = build2 (PLUS_EXPR, var_type, var,
3600 fold_convert (var_type, integer_one_node));
3601 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3602
3603 /* ...and jump back to the loop entry. */
3604 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3605
3606 /* Add the loop exit label. */
3607 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3608 }
3609
3610 /* Return true if FDECL is accessing a field that is zero sized. */
3611
3612 static bool
3613 zero_sized_field_decl (const_tree fdecl)
3614 {
3615 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3616 && integer_zerop (DECL_SIZE (fdecl)))
3617 return true;
3618 return false;
3619 }
3620
3621 /* Return true if TYPE is zero sized. */
3622
3623 static bool
3624 zero_sized_type (const_tree type)
3625 {
3626 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3627 && integer_zerop (TYPE_SIZE (type)))
3628 return true;
3629 return false;
3630 }
3631
3632 /* A subroutine of gimplify_init_constructor. Generate individual
3633 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3634 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3635 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3636 zeroed first. */
3637
3638 static void
3639 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3640 gimple_seq *pre_p, bool cleared)
3641 {
3642 tree array_elt_type = NULL;
3643 unsigned HOST_WIDE_INT ix;
3644 tree purpose, value;
3645
3646 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3647 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3648
3649 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3650 {
3651 tree cref;
3652
3653 /* NULL values are created above for gimplification errors. */
3654 if (value == NULL)
3655 continue;
3656
3657 if (cleared && initializer_zerop (value))
3658 continue;
3659
3660 /* ??? Here's to hoping the front end fills in all of the indices,
3661 so we don't have to figure out what's missing ourselves. */
3662 gcc_assert (purpose);
3663
3664 /* Skip zero-sized fields, unless value has side-effects. This can
3665 happen with calls to functions returning a zero-sized type, which
3666 we shouldn't discard. As a number of downstream passes don't
3667 expect sets of zero-sized fields, we rely on the gimplification of
3668 the MODIFY_EXPR we make below to drop the assignment statement. */
3669 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3670 continue;
3671
3672 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3673 whole range. */
3674 if (TREE_CODE (purpose) == RANGE_EXPR)
3675 {
3676 tree lower = TREE_OPERAND (purpose, 0);
3677 tree upper = TREE_OPERAND (purpose, 1);
3678
3679 /* If the lower bound is equal to upper, just treat it as if
3680 upper was the index. */
3681 if (simple_cst_equal (lower, upper))
3682 purpose = upper;
3683 else
3684 {
3685 gimplify_init_ctor_eval_range (object, lower, upper, value,
3686 array_elt_type, pre_p, cleared);
3687 continue;
3688 }
3689 }
3690
3691 if (array_elt_type)
3692 {
3693 /* Do not use bitsizetype for ARRAY_REF indices. */
3694 if (TYPE_DOMAIN (TREE_TYPE (object)))
3695 purpose
3696 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3697 purpose);
3698 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3699 purpose, NULL_TREE, NULL_TREE);
3700 }
3701 else
3702 {
3703 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3704 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3705 unshare_expr (object), purpose, NULL_TREE);
3706 }
3707
3708 if (TREE_CODE (value) == CONSTRUCTOR
3709 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3710 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3711 pre_p, cleared);
3712 else
3713 {
3714 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3715 gimplify_and_add (init, pre_p);
3716 ggc_free (init);
3717 }
3718 }
3719 }
3720
3721 /* Return the appropriate RHS predicate for this LHS. */
3722
3723 gimple_predicate
3724 rhs_predicate_for (tree lhs)
3725 {
3726 if (is_gimple_reg (lhs))
3727 return is_gimple_reg_rhs_or_call;
3728 else
3729 return is_gimple_mem_rhs_or_call;
3730 }
3731
3732 /* Gimplify a C99 compound literal expression. This just means adding
3733 the DECL_EXPR before the current statement and using its anonymous
3734 decl instead. */
3735
3736 static enum gimplify_status
3737 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3738 bool (*gimple_test_f) (tree),
3739 fallback_t fallback)
3740 {
3741 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3742 tree decl = DECL_EXPR_DECL (decl_s);
3743 tree init = DECL_INITIAL (decl);
3744 /* Mark the decl as addressable if the compound literal
3745 expression is addressable now, otherwise it is marked too late
3746 after we gimplify the initialization expression. */
3747 if (TREE_ADDRESSABLE (*expr_p))
3748 TREE_ADDRESSABLE (decl) = 1;
3749 /* Otherwise, if we don't need an lvalue and have a literal directly
3750 substitute it. Check if it matches the gimple predicate, as
3751 otherwise we'd generate a new temporary, and we can as well just
3752 use the decl we already have. */
3753 else if (!TREE_ADDRESSABLE (decl)
3754 && init
3755 && (fallback & fb_lvalue) == 0
3756 && gimple_test_f (init))
3757 {
3758 *expr_p = init;
3759 return GS_OK;
3760 }
3761
3762 /* Preliminarily mark non-addressed complex variables as eligible
3763 for promotion to gimple registers. We'll transform their uses
3764 as we find them. */
3765 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3766 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3767 && !TREE_THIS_VOLATILE (decl)
3768 && !needs_to_live_in_memory (decl))
3769 DECL_GIMPLE_REG_P (decl) = 1;
3770
3771 /* If the decl is not addressable, then it is being used in some
3772 expression or on the right hand side of a statement, and it can
3773 be put into a readonly data section. */
3774 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3775 TREE_READONLY (decl) = 1;
3776
3777 /* This decl isn't mentioned in the enclosing block, so add it to the
3778 list of temps. FIXME it seems a bit of a kludge to say that
3779 anonymous artificial vars aren't pushed, but everything else is. */
3780 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3781 gimple_add_tmp_var (decl);
3782
3783 gimplify_and_add (decl_s, pre_p);
3784 *expr_p = decl;
3785 return GS_OK;
3786 }
3787
3788 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3789 return a new CONSTRUCTOR if something changed. */
3790
3791 static tree
3792 optimize_compound_literals_in_ctor (tree orig_ctor)
3793 {
3794 tree ctor = orig_ctor;
3795 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3796 unsigned int idx, num = vec_safe_length (elts);
3797
3798 for (idx = 0; idx < num; idx++)
3799 {
3800 tree value = (*elts)[idx].value;
3801 tree newval = value;
3802 if (TREE_CODE (value) == CONSTRUCTOR)
3803 newval = optimize_compound_literals_in_ctor (value);
3804 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3805 {
3806 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3807 tree decl = DECL_EXPR_DECL (decl_s);
3808 tree init = DECL_INITIAL (decl);
3809
3810 if (!TREE_ADDRESSABLE (value)
3811 && !TREE_ADDRESSABLE (decl)
3812 && init
3813 && TREE_CODE (init) == CONSTRUCTOR)
3814 newval = optimize_compound_literals_in_ctor (init);
3815 }
3816 if (newval == value)
3817 continue;
3818
3819 if (ctor == orig_ctor)
3820 {
3821 ctor = copy_node (orig_ctor);
3822 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3823 elts = CONSTRUCTOR_ELTS (ctor);
3824 }
3825 (*elts)[idx].value = newval;
3826 }
3827 return ctor;
3828 }
3829
3830 /* A subroutine of gimplify_modify_expr. Break out elements of a
3831 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3832
3833 Note that we still need to clear any elements that don't have explicit
3834 initializers, so if not all elements are initialized we keep the
3835 original MODIFY_EXPR, we just remove all of the constructor elements.
3836
3837 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3838 GS_ERROR if we would have to create a temporary when gimplifying
3839 this constructor. Otherwise, return GS_OK.
3840
3841 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3842
3843 static enum gimplify_status
3844 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3845 bool want_value, bool notify_temp_creation)
3846 {
3847 tree object, ctor, type;
3848 enum gimplify_status ret;
3849 vec<constructor_elt, va_gc> *elts;
3850
3851 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3852
3853 if (!notify_temp_creation)
3854 {
3855 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3856 is_gimple_lvalue, fb_lvalue);
3857 if (ret == GS_ERROR)
3858 return ret;
3859 }
3860
3861 object = TREE_OPERAND (*expr_p, 0);
3862 ctor = TREE_OPERAND (*expr_p, 1) =
3863 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3864 type = TREE_TYPE (ctor);
3865 elts = CONSTRUCTOR_ELTS (ctor);
3866 ret = GS_ALL_DONE;
3867
3868 switch (TREE_CODE (type))
3869 {
3870 case RECORD_TYPE:
3871 case UNION_TYPE:
3872 case QUAL_UNION_TYPE:
3873 case ARRAY_TYPE:
3874 {
3875 struct gimplify_init_ctor_preeval_data preeval_data;
3876 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3877 bool cleared, complete_p, valid_const_initializer;
3878
3879 /* Aggregate types must lower constructors to initialization of
3880 individual elements. The exception is that a CONSTRUCTOR node
3881 with no elements indicates zero-initialization of the whole. */
3882 if (vec_safe_is_empty (elts))
3883 {
3884 if (notify_temp_creation)
3885 return GS_OK;
3886 break;
3887 }
3888
3889 /* Fetch information about the constructor to direct later processing.
3890 We might want to make static versions of it in various cases, and
3891 can only do so if it known to be a valid constant initializer. */
3892 valid_const_initializer
3893 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3894 &num_ctor_elements, &complete_p);
3895
3896 /* If a const aggregate variable is being initialized, then it
3897 should never be a lose to promote the variable to be static. */
3898 if (valid_const_initializer
3899 && num_nonzero_elements > 1
3900 && TREE_READONLY (object)
3901 && TREE_CODE (object) == VAR_DECL
3902 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3903 {
3904 if (notify_temp_creation)
3905 return GS_ERROR;
3906 DECL_INITIAL (object) = ctor;
3907 TREE_STATIC (object) = 1;
3908 if (!DECL_NAME (object))
3909 DECL_NAME (object) = create_tmp_var_name ("C");
3910 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3911
3912 /* ??? C++ doesn't automatically append a .<number> to the
3913 assembler name, and even when it does, it looks at FE private
3914 data structures to figure out what that number should be,
3915 which are not set for this variable. I suppose this is
3916 important for local statics for inline functions, which aren't
3917 "local" in the object file sense. So in order to get a unique
3918 TU-local symbol, we must invoke the lhd version now. */
3919 lhd_set_decl_assembler_name (object);
3920
3921 *expr_p = NULL_TREE;
3922 break;
3923 }
3924
3925 /* If there are "lots" of initialized elements, even discounting
3926 those that are not address constants (and thus *must* be
3927 computed at runtime), then partition the constructor into
3928 constant and non-constant parts. Block copy the constant
3929 parts in, then generate code for the non-constant parts. */
3930 /* TODO. There's code in cp/typeck.c to do this. */
3931
3932 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3933 /* store_constructor will ignore the clearing of variable-sized
3934 objects. Initializers for such objects must explicitly set
3935 every field that needs to be set. */
3936 cleared = false;
3937 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3938 /* If the constructor isn't complete, clear the whole object
3939 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3940
3941 ??? This ought not to be needed. For any element not present
3942 in the initializer, we should simply set them to zero. Except
3943 we'd need to *find* the elements that are not present, and that
3944 requires trickery to avoid quadratic compile-time behavior in
3945 large cases or excessive memory use in small cases. */
3946 cleared = true;
3947 else if (num_ctor_elements - num_nonzero_elements
3948 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3949 && num_nonzero_elements < num_ctor_elements / 4)
3950 /* If there are "lots" of zeros, it's more efficient to clear
3951 the memory and then set the nonzero elements. */
3952 cleared = true;
3953 else
3954 cleared = false;
3955
3956 /* If there are "lots" of initialized elements, and all of them
3957 are valid address constants, then the entire initializer can
3958 be dropped to memory, and then memcpy'd out. Don't do this
3959 for sparse arrays, though, as it's more efficient to follow
3960 the standard CONSTRUCTOR behavior of memset followed by
3961 individual element initialization. Also don't do this for small
3962 all-zero initializers (which aren't big enough to merit
3963 clearing), and don't try to make bitwise copies of
3964 TREE_ADDRESSABLE types.
3965
3966 We cannot apply such transformation when compiling chkp static
3967 initializer because creation of initializer image in the memory
3968 will require static initialization of bounds for it. It should
3969 result in another gimplification of similar initializer and we
3970 may fall into infinite loop. */
3971 if (valid_const_initializer
3972 && !(cleared || num_nonzero_elements == 0)
3973 && !TREE_ADDRESSABLE (type)
3974 && (!current_function_decl
3975 || !lookup_attribute ("chkp ctor",
3976 DECL_ATTRIBUTES (current_function_decl))))
3977 {
3978 HOST_WIDE_INT size = int_size_in_bytes (type);
3979 unsigned int align;
3980
3981 /* ??? We can still get unbounded array types, at least
3982 from the C++ front end. This seems wrong, but attempt
3983 to work around it for now. */
3984 if (size < 0)
3985 {
3986 size = int_size_in_bytes (TREE_TYPE (object));
3987 if (size >= 0)
3988 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3989 }
3990
3991 /* Find the maximum alignment we can assume for the object. */
3992 /* ??? Make use of DECL_OFFSET_ALIGN. */
3993 if (DECL_P (object))
3994 align = DECL_ALIGN (object);
3995 else
3996 align = TYPE_ALIGN (type);
3997
3998 /* Do a block move either if the size is so small as to make
3999 each individual move a sub-unit move on average, or if it
4000 is so large as to make individual moves inefficient. */
4001 if (size > 0
4002 && num_nonzero_elements > 1
4003 && (size < num_nonzero_elements
4004 || !can_move_by_pieces (size, align)))
4005 {
4006 if (notify_temp_creation)
4007 return GS_ERROR;
4008
4009 walk_tree (&ctor, force_labels_r, NULL, NULL);
4010 ctor = tree_output_constant_def (ctor);
4011 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4012 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4013 TREE_OPERAND (*expr_p, 1) = ctor;
4014
4015 /* This is no longer an assignment of a CONSTRUCTOR, but
4016 we still may have processing to do on the LHS. So
4017 pretend we didn't do anything here to let that happen. */
4018 return GS_UNHANDLED;
4019 }
4020 }
4021
4022 /* If the target is volatile, we have non-zero elements and more than
4023 one field to assign, initialize the target from a temporary. */
4024 if (TREE_THIS_VOLATILE (object)
4025 && !TREE_ADDRESSABLE (type)
4026 && num_nonzero_elements > 0
4027 && vec_safe_length (elts) > 1)
4028 {
4029 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4030 TREE_OPERAND (*expr_p, 0) = temp;
4031 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4032 *expr_p,
4033 build2 (MODIFY_EXPR, void_type_node,
4034 object, temp));
4035 return GS_OK;
4036 }
4037
4038 if (notify_temp_creation)
4039 return GS_OK;
4040
4041 /* If there are nonzero elements and if needed, pre-evaluate to capture
4042 elements overlapping with the lhs into temporaries. We must do this
4043 before clearing to fetch the values before they are zeroed-out. */
4044 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4045 {
4046 preeval_data.lhs_base_decl = get_base_address (object);
4047 if (!DECL_P (preeval_data.lhs_base_decl))
4048 preeval_data.lhs_base_decl = NULL;
4049 preeval_data.lhs_alias_set = get_alias_set (object);
4050
4051 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4052 pre_p, post_p, &preeval_data);
4053 }
4054
4055 bool ctor_has_side_effects_p
4056 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4057
4058 if (cleared)
4059 {
4060 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4061 Note that we still have to gimplify, in order to handle the
4062 case of variable sized types. Avoid shared tree structures. */
4063 CONSTRUCTOR_ELTS (ctor) = NULL;
4064 TREE_SIDE_EFFECTS (ctor) = 0;
4065 object = unshare_expr (object);
4066 gimplify_stmt (expr_p, pre_p);
4067 }
4068
4069 /* If we have not block cleared the object, or if there are nonzero
4070 elements in the constructor, or if the constructor has side effects,
4071 add assignments to the individual scalar fields of the object. */
4072 if (!cleared
4073 || num_nonzero_elements > 0
4074 || ctor_has_side_effects_p)
4075 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4076
4077 *expr_p = NULL_TREE;
4078 }
4079 break;
4080
4081 case COMPLEX_TYPE:
4082 {
4083 tree r, i;
4084
4085 if (notify_temp_creation)
4086 return GS_OK;
4087
4088 /* Extract the real and imaginary parts out of the ctor. */
4089 gcc_assert (elts->length () == 2);
4090 r = (*elts)[0].value;
4091 i = (*elts)[1].value;
4092 if (r == NULL || i == NULL)
4093 {
4094 tree zero = build_zero_cst (TREE_TYPE (type));
4095 if (r == NULL)
4096 r = zero;
4097 if (i == NULL)
4098 i = zero;
4099 }
4100
4101 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4102 represent creation of a complex value. */
4103 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4104 {
4105 ctor = build_complex (type, r, i);
4106 TREE_OPERAND (*expr_p, 1) = ctor;
4107 }
4108 else
4109 {
4110 ctor = build2 (COMPLEX_EXPR, type, r, i);
4111 TREE_OPERAND (*expr_p, 1) = ctor;
4112 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4113 pre_p,
4114 post_p,
4115 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4116 fb_rvalue);
4117 }
4118 }
4119 break;
4120
4121 case VECTOR_TYPE:
4122 {
4123 unsigned HOST_WIDE_INT ix;
4124 constructor_elt *ce;
4125
4126 if (notify_temp_creation)
4127 return GS_OK;
4128
4129 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4130 if (TREE_CONSTANT (ctor))
4131 {
4132 bool constant_p = true;
4133 tree value;
4134
4135 /* Even when ctor is constant, it might contain non-*_CST
4136 elements, such as addresses or trapping values like
4137 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4138 in VECTOR_CST nodes. */
4139 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4140 if (!CONSTANT_CLASS_P (value))
4141 {
4142 constant_p = false;
4143 break;
4144 }
4145
4146 if (constant_p)
4147 {
4148 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4149 break;
4150 }
4151
4152 TREE_CONSTANT (ctor) = 0;
4153 }
4154
4155 /* Vector types use CONSTRUCTOR all the way through gimple
4156 compilation as a general initializer. */
4157 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4158 {
4159 enum gimplify_status tret;
4160 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4161 fb_rvalue);
4162 if (tret == GS_ERROR)
4163 ret = GS_ERROR;
4164 }
4165 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4166 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4167 }
4168 break;
4169
4170 default:
4171 /* So how did we get a CONSTRUCTOR for a scalar type? */
4172 gcc_unreachable ();
4173 }
4174
4175 if (ret == GS_ERROR)
4176 return GS_ERROR;
4177 else if (want_value)
4178 {
4179 *expr_p = object;
4180 return GS_OK;
4181 }
4182 else
4183 {
4184 /* If we have gimplified both sides of the initializer but have
4185 not emitted an assignment, do so now. */
4186 if (*expr_p)
4187 {
4188 tree lhs = TREE_OPERAND (*expr_p, 0);
4189 tree rhs = TREE_OPERAND (*expr_p, 1);
4190 gassign *init = gimple_build_assign (lhs, rhs);
4191 gimplify_seq_add_stmt (pre_p, init);
4192 *expr_p = NULL;
4193 }
4194
4195 return GS_ALL_DONE;
4196 }
4197 }
4198
4199 /* Given a pointer value OP0, return a simplified version of an
4200 indirection through OP0, or NULL_TREE if no simplification is
4201 possible. This may only be applied to a rhs of an expression.
4202 Note that the resulting type may be different from the type pointed
4203 to in the sense that it is still compatible from the langhooks
4204 point of view. */
4205
4206 static tree
4207 gimple_fold_indirect_ref_rhs (tree t)
4208 {
4209 return gimple_fold_indirect_ref (t);
4210 }
4211
4212 /* Subroutine of gimplify_modify_expr to do simplifications of
4213 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4214 something changes. */
4215
4216 static enum gimplify_status
4217 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4218 gimple_seq *pre_p, gimple_seq *post_p,
4219 bool want_value)
4220 {
4221 enum gimplify_status ret = GS_UNHANDLED;
4222 bool changed;
4223
4224 do
4225 {
4226 changed = false;
4227 switch (TREE_CODE (*from_p))
4228 {
4229 case VAR_DECL:
4230 /* If we're assigning from a read-only variable initialized with
4231 a constructor, do the direct assignment from the constructor,
4232 but only if neither source nor target are volatile since this
4233 latter assignment might end up being done on a per-field basis. */
4234 if (DECL_INITIAL (*from_p)
4235 && TREE_READONLY (*from_p)
4236 && !TREE_THIS_VOLATILE (*from_p)
4237 && !TREE_THIS_VOLATILE (*to_p)
4238 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4239 {
4240 tree old_from = *from_p;
4241 enum gimplify_status subret;
4242
4243 /* Move the constructor into the RHS. */
4244 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4245
4246 /* Let's see if gimplify_init_constructor will need to put
4247 it in memory. */
4248 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4249 false, true);
4250 if (subret == GS_ERROR)
4251 {
4252 /* If so, revert the change. */
4253 *from_p = old_from;
4254 }
4255 else
4256 {
4257 ret = GS_OK;
4258 changed = true;
4259 }
4260 }
4261 break;
4262 case INDIRECT_REF:
4263 {
4264 /* If we have code like
4265
4266 *(const A*)(A*)&x
4267
4268 where the type of "x" is a (possibly cv-qualified variant
4269 of "A"), treat the entire expression as identical to "x".
4270 This kind of code arises in C++ when an object is bound
4271 to a const reference, and if "x" is a TARGET_EXPR we want
4272 to take advantage of the optimization below. */
4273 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4274 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4275 if (t)
4276 {
4277 if (TREE_THIS_VOLATILE (t) != volatile_p)
4278 {
4279 if (DECL_P (t))
4280 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4281 build_fold_addr_expr (t));
4282 if (REFERENCE_CLASS_P (t))
4283 TREE_THIS_VOLATILE (t) = volatile_p;
4284 }
4285 *from_p = t;
4286 ret = GS_OK;
4287 changed = true;
4288 }
4289 break;
4290 }
4291
4292 case TARGET_EXPR:
4293 {
4294 /* If we are initializing something from a TARGET_EXPR, strip the
4295 TARGET_EXPR and initialize it directly, if possible. This can't
4296 be done if the initializer is void, since that implies that the
4297 temporary is set in some non-trivial way.
4298
4299 ??? What about code that pulls out the temp and uses it
4300 elsewhere? I think that such code never uses the TARGET_EXPR as
4301 an initializer. If I'm wrong, we'll die because the temp won't
4302 have any RTL. In that case, I guess we'll need to replace
4303 references somehow. */
4304 tree init = TARGET_EXPR_INITIAL (*from_p);
4305
4306 if (init
4307 && !VOID_TYPE_P (TREE_TYPE (init)))
4308 {
4309 *from_p = init;
4310 ret = GS_OK;
4311 changed = true;
4312 }
4313 }
4314 break;
4315
4316 case COMPOUND_EXPR:
4317 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4318 caught. */
4319 gimplify_compound_expr (from_p, pre_p, true);
4320 ret = GS_OK;
4321 changed = true;
4322 break;
4323
4324 case CONSTRUCTOR:
4325 /* If we already made some changes, let the front end have a
4326 crack at this before we break it down. */
4327 if (ret != GS_UNHANDLED)
4328 break;
4329 /* If we're initializing from a CONSTRUCTOR, break this into
4330 individual MODIFY_EXPRs. */
4331 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4332 false);
4333
4334 case COND_EXPR:
4335 /* If we're assigning to a non-register type, push the assignment
4336 down into the branches. This is mandatory for ADDRESSABLE types,
4337 since we cannot generate temporaries for such, but it saves a
4338 copy in other cases as well. */
4339 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4340 {
4341 /* This code should mirror the code in gimplify_cond_expr. */
4342 enum tree_code code = TREE_CODE (*expr_p);
4343 tree cond = *from_p;
4344 tree result = *to_p;
4345
4346 ret = gimplify_expr (&result, pre_p, post_p,
4347 is_gimple_lvalue, fb_lvalue);
4348 if (ret != GS_ERROR)
4349 ret = GS_OK;
4350
4351 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4352 TREE_OPERAND (cond, 1)
4353 = build2 (code, void_type_node, result,
4354 TREE_OPERAND (cond, 1));
4355 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4356 TREE_OPERAND (cond, 2)
4357 = build2 (code, void_type_node, unshare_expr (result),
4358 TREE_OPERAND (cond, 2));
4359
4360 TREE_TYPE (cond) = void_type_node;
4361 recalculate_side_effects (cond);
4362
4363 if (want_value)
4364 {
4365 gimplify_and_add (cond, pre_p);
4366 *expr_p = unshare_expr (result);
4367 }
4368 else
4369 *expr_p = cond;
4370 return ret;
4371 }
4372 break;
4373
4374 case CALL_EXPR:
4375 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4376 return slot so that we don't generate a temporary. */
4377 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4378 && aggregate_value_p (*from_p, *from_p))
4379 {
4380 bool use_target;
4381
4382 if (!(rhs_predicate_for (*to_p))(*from_p))
4383 /* If we need a temporary, *to_p isn't accurate. */
4384 use_target = false;
4385 /* It's OK to use the return slot directly unless it's an NRV. */
4386 else if (TREE_CODE (*to_p) == RESULT_DECL
4387 && DECL_NAME (*to_p) == NULL_TREE
4388 && needs_to_live_in_memory (*to_p))
4389 use_target = true;
4390 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4391 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4392 /* Don't force regs into memory. */
4393 use_target = false;
4394 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4395 /* It's OK to use the target directly if it's being
4396 initialized. */
4397 use_target = true;
4398 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4399 != INTEGER_CST)
4400 /* Always use the target and thus RSO for variable-sized types.
4401 GIMPLE cannot deal with a variable-sized assignment
4402 embedded in a call statement. */
4403 use_target = true;
4404 else if (TREE_CODE (*to_p) != SSA_NAME
4405 && (!is_gimple_variable (*to_p)
4406 || needs_to_live_in_memory (*to_p)))
4407 /* Don't use the original target if it's already addressable;
4408 if its address escapes, and the called function uses the
4409 NRV optimization, a conforming program could see *to_p
4410 change before the called function returns; see c++/19317.
4411 When optimizing, the return_slot pass marks more functions
4412 as safe after we have escape info. */
4413 use_target = false;
4414 else
4415 use_target = true;
4416
4417 if (use_target)
4418 {
4419 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4420 mark_addressable (*to_p);
4421 }
4422 }
4423 break;
4424
4425 case WITH_SIZE_EXPR:
4426 /* Likewise for calls that return an aggregate of non-constant size,
4427 since we would not be able to generate a temporary at all. */
4428 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4429 {
4430 *from_p = TREE_OPERAND (*from_p, 0);
4431 /* We don't change ret in this case because the
4432 WITH_SIZE_EXPR might have been added in
4433 gimplify_modify_expr, so returning GS_OK would lead to an
4434 infinite loop. */
4435 changed = true;
4436 }
4437 break;
4438
4439 /* If we're initializing from a container, push the initialization
4440 inside it. */
4441 case CLEANUP_POINT_EXPR:
4442 case BIND_EXPR:
4443 case STATEMENT_LIST:
4444 {
4445 tree wrap = *from_p;
4446 tree t;
4447
4448 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4449 fb_lvalue);
4450 if (ret != GS_ERROR)
4451 ret = GS_OK;
4452
4453 t = voidify_wrapper_expr (wrap, *expr_p);
4454 gcc_assert (t == *expr_p);
4455
4456 if (want_value)
4457 {
4458 gimplify_and_add (wrap, pre_p);
4459 *expr_p = unshare_expr (*to_p);
4460 }
4461 else
4462 *expr_p = wrap;
4463 return GS_OK;
4464 }
4465
4466 case COMPOUND_LITERAL_EXPR:
4467 {
4468 tree complit = TREE_OPERAND (*expr_p, 1);
4469 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4470 tree decl = DECL_EXPR_DECL (decl_s);
4471 tree init = DECL_INITIAL (decl);
4472
4473 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4474 into struct T x = { 0, 1, 2 } if the address of the
4475 compound literal has never been taken. */
4476 if (!TREE_ADDRESSABLE (complit)
4477 && !TREE_ADDRESSABLE (decl)
4478 && init)
4479 {
4480 *expr_p = copy_node (*expr_p);
4481 TREE_OPERAND (*expr_p, 1) = init;
4482 return GS_OK;
4483 }
4484 }
4485
4486 default:
4487 break;
4488 }
4489 }
4490 while (changed);
4491
4492 return ret;
4493 }
4494
4495
4496 /* Return true if T looks like a valid GIMPLE statement. */
4497
4498 static bool
4499 is_gimple_stmt (tree t)
4500 {
4501 const enum tree_code code = TREE_CODE (t);
4502
4503 switch (code)
4504 {
4505 case NOP_EXPR:
4506 /* The only valid NOP_EXPR is the empty statement. */
4507 return IS_EMPTY_STMT (t);
4508
4509 case BIND_EXPR:
4510 case COND_EXPR:
4511 /* These are only valid if they're void. */
4512 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4513
4514 case SWITCH_EXPR:
4515 case GOTO_EXPR:
4516 case RETURN_EXPR:
4517 case LABEL_EXPR:
4518 case CASE_LABEL_EXPR:
4519 case TRY_CATCH_EXPR:
4520 case TRY_FINALLY_EXPR:
4521 case EH_FILTER_EXPR:
4522 case CATCH_EXPR:
4523 case ASM_EXPR:
4524 case STATEMENT_LIST:
4525 case OACC_PARALLEL:
4526 case OACC_KERNELS:
4527 case OACC_DATA:
4528 case OACC_HOST_DATA:
4529 case OACC_DECLARE:
4530 case OACC_UPDATE:
4531 case OACC_ENTER_DATA:
4532 case OACC_EXIT_DATA:
4533 case OACC_CACHE:
4534 case OMP_PARALLEL:
4535 case OMP_FOR:
4536 case OMP_SIMD:
4537 case CILK_SIMD:
4538 case OMP_DISTRIBUTE:
4539 case OACC_LOOP:
4540 case OMP_SECTIONS:
4541 case OMP_SECTION:
4542 case OMP_SINGLE:
4543 case OMP_MASTER:
4544 case OMP_TASKGROUP:
4545 case OMP_ORDERED:
4546 case OMP_CRITICAL:
4547 case OMP_TASK:
4548 case OMP_TARGET:
4549 case OMP_TARGET_DATA:
4550 case OMP_TARGET_UPDATE:
4551 case OMP_TARGET_ENTER_DATA:
4552 case OMP_TARGET_EXIT_DATA:
4553 case OMP_TASKLOOP:
4554 case OMP_TEAMS:
4555 /* These are always void. */
4556 return true;
4557
4558 case CALL_EXPR:
4559 case MODIFY_EXPR:
4560 case PREDICT_EXPR:
4561 /* These are valid regardless of their type. */
4562 return true;
4563
4564 default:
4565 return false;
4566 }
4567 }
4568
4569
4570 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4571 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4572 DECL_GIMPLE_REG_P set.
4573
4574 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4575 other, unmodified part of the complex object just before the total store.
4576 As a consequence, if the object is still uninitialized, an undefined value
4577 will be loaded into a register, which may result in a spurious exception
4578 if the register is floating-point and the value happens to be a signaling
4579 NaN for example. Then the fully-fledged complex operations lowering pass
4580 followed by a DCE pass are necessary in order to fix things up. */
4581
4582 static enum gimplify_status
4583 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4584 bool want_value)
4585 {
4586 enum tree_code code, ocode;
4587 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4588
4589 lhs = TREE_OPERAND (*expr_p, 0);
4590 rhs = TREE_OPERAND (*expr_p, 1);
4591 code = TREE_CODE (lhs);
4592 lhs = TREE_OPERAND (lhs, 0);
4593
4594 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4595 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4596 TREE_NO_WARNING (other) = 1;
4597 other = get_formal_tmp_var (other, pre_p);
4598
4599 realpart = code == REALPART_EXPR ? rhs : other;
4600 imagpart = code == REALPART_EXPR ? other : rhs;
4601
4602 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4603 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4604 else
4605 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4606
4607 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4608 *expr_p = (want_value) ? rhs : NULL_TREE;
4609
4610 return GS_ALL_DONE;
4611 }
4612
4613 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4614
4615 modify_expr
4616 : varname '=' rhs
4617 | '*' ID '=' rhs
4618
4619 PRE_P points to the list where side effects that must happen before
4620 *EXPR_P should be stored.
4621
4622 POST_P points to the list where side effects that must happen after
4623 *EXPR_P should be stored.
4624
4625 WANT_VALUE is nonzero iff we want to use the value of this expression
4626 in another expression. */
4627
4628 static enum gimplify_status
4629 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4630 bool want_value)
4631 {
4632 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4633 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4634 enum gimplify_status ret = GS_UNHANDLED;
4635 gimple *assign;
4636 location_t loc = EXPR_LOCATION (*expr_p);
4637 gimple_stmt_iterator gsi;
4638
4639 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4640 || TREE_CODE (*expr_p) == INIT_EXPR);
4641
4642 /* Trying to simplify a clobber using normal logic doesn't work,
4643 so handle it here. */
4644 if (TREE_CLOBBER_P (*from_p))
4645 {
4646 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4647 if (ret == GS_ERROR)
4648 return ret;
4649 gcc_assert (!want_value
4650 && (TREE_CODE (*to_p) == VAR_DECL
4651 || TREE_CODE (*to_p) == MEM_REF));
4652 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4653 *expr_p = NULL;
4654 return GS_ALL_DONE;
4655 }
4656
4657 /* Insert pointer conversions required by the middle-end that are not
4658 required by the frontend. This fixes middle-end type checking for
4659 for example gcc.dg/redecl-6.c. */
4660 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4661 {
4662 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4663 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4664 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4665 }
4666
4667 /* See if any simplifications can be done based on what the RHS is. */
4668 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4669 want_value);
4670 if (ret != GS_UNHANDLED)
4671 return ret;
4672
4673 /* For zero sized types only gimplify the left hand side and right hand
4674 side as statements and throw away the assignment. Do this after
4675 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4676 types properly. */
4677 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4678 {
4679 gimplify_stmt (from_p, pre_p);
4680 gimplify_stmt (to_p, pre_p);
4681 *expr_p = NULL_TREE;
4682 return GS_ALL_DONE;
4683 }
4684
4685 /* If the value being copied is of variable width, compute the length
4686 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4687 before gimplifying any of the operands so that we can resolve any
4688 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4689 the size of the expression to be copied, not of the destination, so
4690 that is what we must do here. */
4691 maybe_with_size_expr (from_p);
4692
4693 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4694 if (ret == GS_ERROR)
4695 return ret;
4696
4697 /* As a special case, we have to temporarily allow for assignments
4698 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4699 a toplevel statement, when gimplifying the GENERIC expression
4700 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4701 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4702
4703 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4704 prevent gimplify_expr from trying to create a new temporary for
4705 foo's LHS, we tell it that it should only gimplify until it
4706 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4707 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4708 and all we need to do here is set 'a' to be its LHS. */
4709 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4710 fb_rvalue);
4711 if (ret == GS_ERROR)
4712 return ret;
4713
4714 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4715 size as argument to the call. */
4716 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4717 {
4718 tree call = TREE_OPERAND (*from_p, 0);
4719 tree vlasize = TREE_OPERAND (*from_p, 1);
4720
4721 if (TREE_CODE (call) == CALL_EXPR
4722 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4723 {
4724 int nargs = call_expr_nargs (call);
4725 tree type = TREE_TYPE (call);
4726 tree ap = CALL_EXPR_ARG (call, 0);
4727 tree tag = CALL_EXPR_ARG (call, 1);
4728 tree aptag = CALL_EXPR_ARG (call, 2);
4729 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4730 IFN_VA_ARG, type,
4731 nargs + 1, ap, tag,
4732 aptag, vlasize);
4733 TREE_OPERAND (*from_p, 0) = newcall;
4734 }
4735 }
4736
4737 /* Now see if the above changed *from_p to something we handle specially. */
4738 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4739 want_value);
4740 if (ret != GS_UNHANDLED)
4741 return ret;
4742
4743 /* If we've got a variable sized assignment between two lvalues (i.e. does
4744 not involve a call), then we can make things a bit more straightforward
4745 by converting the assignment to memcpy or memset. */
4746 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4747 {
4748 tree from = TREE_OPERAND (*from_p, 0);
4749 tree size = TREE_OPERAND (*from_p, 1);
4750
4751 if (TREE_CODE (from) == CONSTRUCTOR)
4752 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4753
4754 if (is_gimple_addressable (from))
4755 {
4756 *from_p = from;
4757 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4758 pre_p);
4759 }
4760 }
4761
4762 /* Transform partial stores to non-addressable complex variables into
4763 total stores. This allows us to use real instead of virtual operands
4764 for these variables, which improves optimization. */
4765 if ((TREE_CODE (*to_p) == REALPART_EXPR
4766 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4767 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4768 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4769
4770 /* Try to alleviate the effects of the gimplification creating artificial
4771 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4772 make sure not to create DECL_DEBUG_EXPR links across functions. */
4773 if (!gimplify_ctxp->into_ssa
4774 && TREE_CODE (*from_p) == VAR_DECL
4775 && DECL_IGNORED_P (*from_p)
4776 && DECL_P (*to_p)
4777 && !DECL_IGNORED_P (*to_p)
4778 && decl_function_context (*to_p) == current_function_decl)
4779 {
4780 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4781 DECL_NAME (*from_p)
4782 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4783 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4784 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4785 }
4786
4787 if (want_value && TREE_THIS_VOLATILE (*to_p))
4788 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4789
4790 if (TREE_CODE (*from_p) == CALL_EXPR)
4791 {
4792 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4793 instead of a GIMPLE_ASSIGN. */
4794 gcall *call_stmt;
4795 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4796 {
4797 /* Gimplify internal functions created in the FEs. */
4798 int nargs = call_expr_nargs (*from_p), i;
4799 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4800 auto_vec<tree> vargs (nargs);
4801
4802 for (i = 0; i < nargs; i++)
4803 {
4804 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4805 EXPR_LOCATION (*from_p));
4806 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4807 }
4808 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4809 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4810 }
4811 else
4812 {
4813 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4814 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4815 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4816 tree fndecl = get_callee_fndecl (*from_p);
4817 if (fndecl
4818 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4819 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4820 && call_expr_nargs (*from_p) == 3)
4821 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4822 CALL_EXPR_ARG (*from_p, 0),
4823 CALL_EXPR_ARG (*from_p, 1),
4824 CALL_EXPR_ARG (*from_p, 2));
4825 else
4826 {
4827 call_stmt = gimple_build_call_from_tree (*from_p);
4828 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4829 }
4830 }
4831 notice_special_calls (call_stmt);
4832 if (!gimple_call_noreturn_p (call_stmt)
4833 || TREE_ADDRESSABLE (TREE_TYPE (*to_p)))
4834 gimple_call_set_lhs (call_stmt, *to_p);
4835 assign = call_stmt;
4836 }
4837 else
4838 {
4839 assign = gimple_build_assign (*to_p, *from_p);
4840 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4841 }
4842
4843 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4844 {
4845 /* We should have got an SSA name from the start. */
4846 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4847 }
4848
4849 gimplify_seq_add_stmt (pre_p, assign);
4850 gsi = gsi_last (*pre_p);
4851 maybe_fold_stmt (&gsi);
4852
4853 if (want_value)
4854 {
4855 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4856 return GS_OK;
4857 }
4858 else
4859 *expr_p = NULL;
4860
4861 return GS_ALL_DONE;
4862 }
4863
4864 /* Gimplify a comparison between two variable-sized objects. Do this
4865 with a call to BUILT_IN_MEMCMP. */
4866
4867 static enum gimplify_status
4868 gimplify_variable_sized_compare (tree *expr_p)
4869 {
4870 location_t loc = EXPR_LOCATION (*expr_p);
4871 tree op0 = TREE_OPERAND (*expr_p, 0);
4872 tree op1 = TREE_OPERAND (*expr_p, 1);
4873 tree t, arg, dest, src, expr;
4874
4875 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4876 arg = unshare_expr (arg);
4877 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4878 src = build_fold_addr_expr_loc (loc, op1);
4879 dest = build_fold_addr_expr_loc (loc, op0);
4880 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4881 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4882
4883 expr
4884 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4885 SET_EXPR_LOCATION (expr, loc);
4886 *expr_p = expr;
4887
4888 return GS_OK;
4889 }
4890
4891 /* Gimplify a comparison between two aggregate objects of integral scalar
4892 mode as a comparison between the bitwise equivalent scalar values. */
4893
4894 static enum gimplify_status
4895 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4896 {
4897 location_t loc = EXPR_LOCATION (*expr_p);
4898 tree op0 = TREE_OPERAND (*expr_p, 0);
4899 tree op1 = TREE_OPERAND (*expr_p, 1);
4900
4901 tree type = TREE_TYPE (op0);
4902 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4903
4904 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4905 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4906
4907 *expr_p
4908 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4909
4910 return GS_OK;
4911 }
4912
4913 /* Gimplify an expression sequence. This function gimplifies each
4914 expression and rewrites the original expression with the last
4915 expression of the sequence in GIMPLE form.
4916
4917 PRE_P points to the list where the side effects for all the
4918 expressions in the sequence will be emitted.
4919
4920 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4921
4922 static enum gimplify_status
4923 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4924 {
4925 tree t = *expr_p;
4926
4927 do
4928 {
4929 tree *sub_p = &TREE_OPERAND (t, 0);
4930
4931 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4932 gimplify_compound_expr (sub_p, pre_p, false);
4933 else
4934 gimplify_stmt (sub_p, pre_p);
4935
4936 t = TREE_OPERAND (t, 1);
4937 }
4938 while (TREE_CODE (t) == COMPOUND_EXPR);
4939
4940 *expr_p = t;
4941 if (want_value)
4942 return GS_OK;
4943 else
4944 {
4945 gimplify_stmt (expr_p, pre_p);
4946 return GS_ALL_DONE;
4947 }
4948 }
4949
4950 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4951 gimplify. After gimplification, EXPR_P will point to a new temporary
4952 that holds the original value of the SAVE_EXPR node.
4953
4954 PRE_P points to the list where side effects that must happen before
4955 *EXPR_P should be stored. */
4956
4957 static enum gimplify_status
4958 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4959 {
4960 enum gimplify_status ret = GS_ALL_DONE;
4961 tree val;
4962
4963 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4964 val = TREE_OPERAND (*expr_p, 0);
4965
4966 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4967 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4968 {
4969 /* The operand may be a void-valued expression such as SAVE_EXPRs
4970 generated by the Java frontend for class initialization. It is
4971 being executed only for its side-effects. */
4972 if (TREE_TYPE (val) == void_type_node)
4973 {
4974 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4975 is_gimple_stmt, fb_none);
4976 val = NULL;
4977 }
4978 else
4979 val = get_initialized_tmp_var (val, pre_p, post_p);
4980
4981 TREE_OPERAND (*expr_p, 0) = val;
4982 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4983 }
4984
4985 *expr_p = val;
4986
4987 return ret;
4988 }
4989
4990 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4991
4992 unary_expr
4993 : ...
4994 | '&' varname
4995 ...
4996
4997 PRE_P points to the list where side effects that must happen before
4998 *EXPR_P should be stored.
4999
5000 POST_P points to the list where side effects that must happen after
5001 *EXPR_P should be stored. */
5002
5003 static enum gimplify_status
5004 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5005 {
5006 tree expr = *expr_p;
5007 tree op0 = TREE_OPERAND (expr, 0);
5008 enum gimplify_status ret;
5009 location_t loc = EXPR_LOCATION (*expr_p);
5010
5011 switch (TREE_CODE (op0))
5012 {
5013 case INDIRECT_REF:
5014 do_indirect_ref:
5015 /* Check if we are dealing with an expression of the form '&*ptr'.
5016 While the front end folds away '&*ptr' into 'ptr', these
5017 expressions may be generated internally by the compiler (e.g.,
5018 builtins like __builtin_va_end). */
5019 /* Caution: the silent array decomposition semantics we allow for
5020 ADDR_EXPR means we can't always discard the pair. */
5021 /* Gimplification of the ADDR_EXPR operand may drop
5022 cv-qualification conversions, so make sure we add them if
5023 needed. */
5024 {
5025 tree op00 = TREE_OPERAND (op0, 0);
5026 tree t_expr = TREE_TYPE (expr);
5027 tree t_op00 = TREE_TYPE (op00);
5028
5029 if (!useless_type_conversion_p (t_expr, t_op00))
5030 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5031 *expr_p = op00;
5032 ret = GS_OK;
5033 }
5034 break;
5035
5036 case VIEW_CONVERT_EXPR:
5037 /* Take the address of our operand and then convert it to the type of
5038 this ADDR_EXPR.
5039
5040 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5041 all clear. The impact of this transformation is even less clear. */
5042
5043 /* If the operand is a useless conversion, look through it. Doing so
5044 guarantees that the ADDR_EXPR and its operand will remain of the
5045 same type. */
5046 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5047 op0 = TREE_OPERAND (op0, 0);
5048
5049 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5050 build_fold_addr_expr_loc (loc,
5051 TREE_OPERAND (op0, 0)));
5052 ret = GS_OK;
5053 break;
5054
5055 case MEM_REF:
5056 if (integer_zerop (TREE_OPERAND (op0, 1)))
5057 goto do_indirect_ref;
5058
5059 /* ... fall through ... */
5060
5061 default:
5062 /* If we see a call to a declared builtin or see its address
5063 being taken (we can unify those cases here) then we can mark
5064 the builtin for implicit generation by GCC. */
5065 if (TREE_CODE (op0) == FUNCTION_DECL
5066 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5067 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5068 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5069
5070 /* We use fb_either here because the C frontend sometimes takes
5071 the address of a call that returns a struct; see
5072 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5073 the implied temporary explicit. */
5074
5075 /* Make the operand addressable. */
5076 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5077 is_gimple_addressable, fb_either);
5078 if (ret == GS_ERROR)
5079 break;
5080
5081 /* Then mark it. Beware that it may not be possible to do so directly
5082 if a temporary has been created by the gimplification. */
5083 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5084
5085 op0 = TREE_OPERAND (expr, 0);
5086
5087 /* For various reasons, the gimplification of the expression
5088 may have made a new INDIRECT_REF. */
5089 if (TREE_CODE (op0) == INDIRECT_REF)
5090 goto do_indirect_ref;
5091
5092 mark_addressable (TREE_OPERAND (expr, 0));
5093
5094 /* The FEs may end up building ADDR_EXPRs early on a decl with
5095 an incomplete type. Re-build ADDR_EXPRs in canonical form
5096 here. */
5097 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5098 *expr_p = build_fold_addr_expr (op0);
5099
5100 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5101 recompute_tree_invariant_for_addr_expr (*expr_p);
5102
5103 /* If we re-built the ADDR_EXPR add a conversion to the original type
5104 if required. */
5105 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5106 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5107
5108 break;
5109 }
5110
5111 return ret;
5112 }
5113
5114 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5115 value; output operands should be a gimple lvalue. */
5116
5117 static enum gimplify_status
5118 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5119 {
5120 tree expr;
5121 int noutputs;
5122 const char **oconstraints;
5123 int i;
5124 tree link;
5125 const char *constraint;
5126 bool allows_mem, allows_reg, is_inout;
5127 enum gimplify_status ret, tret;
5128 gasm *stmt;
5129 vec<tree, va_gc> *inputs;
5130 vec<tree, va_gc> *outputs;
5131 vec<tree, va_gc> *clobbers;
5132 vec<tree, va_gc> *labels;
5133 tree link_next;
5134
5135 expr = *expr_p;
5136 noutputs = list_length (ASM_OUTPUTS (expr));
5137 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5138
5139 inputs = NULL;
5140 outputs = NULL;
5141 clobbers = NULL;
5142 labels = NULL;
5143
5144 ret = GS_ALL_DONE;
5145 link_next = NULL_TREE;
5146 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5147 {
5148 bool ok;
5149 size_t constraint_len;
5150
5151 link_next = TREE_CHAIN (link);
5152
5153 oconstraints[i]
5154 = constraint
5155 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5156 constraint_len = strlen (constraint);
5157 if (constraint_len == 0)
5158 continue;
5159
5160 ok = parse_output_constraint (&constraint, i, 0, 0,
5161 &allows_mem, &allows_reg, &is_inout);
5162 if (!ok)
5163 {
5164 ret = GS_ERROR;
5165 is_inout = false;
5166 }
5167
5168 if (!allows_reg && allows_mem)
5169 mark_addressable (TREE_VALUE (link));
5170
5171 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5172 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5173 fb_lvalue | fb_mayfail);
5174 if (tret == GS_ERROR)
5175 {
5176 error ("invalid lvalue in asm output %d", i);
5177 ret = tret;
5178 }
5179
5180 vec_safe_push (outputs, link);
5181 TREE_CHAIN (link) = NULL_TREE;
5182
5183 if (is_inout)
5184 {
5185 /* An input/output operand. To give the optimizers more
5186 flexibility, split it into separate input and output
5187 operands. */
5188 tree input;
5189 char buf[10];
5190
5191 /* Turn the in/out constraint into an output constraint. */
5192 char *p = xstrdup (constraint);
5193 p[0] = '=';
5194 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5195
5196 /* And add a matching input constraint. */
5197 if (allows_reg)
5198 {
5199 sprintf (buf, "%d", i);
5200
5201 /* If there are multiple alternatives in the constraint,
5202 handle each of them individually. Those that allow register
5203 will be replaced with operand number, the others will stay
5204 unchanged. */
5205 if (strchr (p, ',') != NULL)
5206 {
5207 size_t len = 0, buflen = strlen (buf);
5208 char *beg, *end, *str, *dst;
5209
5210 for (beg = p + 1;;)
5211 {
5212 end = strchr (beg, ',');
5213 if (end == NULL)
5214 end = strchr (beg, '\0');
5215 if ((size_t) (end - beg) < buflen)
5216 len += buflen + 1;
5217 else
5218 len += end - beg + 1;
5219 if (*end)
5220 beg = end + 1;
5221 else
5222 break;
5223 }
5224
5225 str = (char *) alloca (len);
5226 for (beg = p + 1, dst = str;;)
5227 {
5228 const char *tem;
5229 bool mem_p, reg_p, inout_p;
5230
5231 end = strchr (beg, ',');
5232 if (end)
5233 *end = '\0';
5234 beg[-1] = '=';
5235 tem = beg - 1;
5236 parse_output_constraint (&tem, i, 0, 0,
5237 &mem_p, &reg_p, &inout_p);
5238 if (dst != str)
5239 *dst++ = ',';
5240 if (reg_p)
5241 {
5242 memcpy (dst, buf, buflen);
5243 dst += buflen;
5244 }
5245 else
5246 {
5247 if (end)
5248 len = end - beg;
5249 else
5250 len = strlen (beg);
5251 memcpy (dst, beg, len);
5252 dst += len;
5253 }
5254 if (end)
5255 beg = end + 1;
5256 else
5257 break;
5258 }
5259 *dst = '\0';
5260 input = build_string (dst - str, str);
5261 }
5262 else
5263 input = build_string (strlen (buf), buf);
5264 }
5265 else
5266 input = build_string (constraint_len - 1, constraint + 1);
5267
5268 free (p);
5269
5270 input = build_tree_list (build_tree_list (NULL_TREE, input),
5271 unshare_expr (TREE_VALUE (link)));
5272 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5273 }
5274 }
5275
5276 link_next = NULL_TREE;
5277 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5278 {
5279 link_next = TREE_CHAIN (link);
5280 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5281 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5282 oconstraints, &allows_mem, &allows_reg);
5283
5284 /* If we can't make copies, we can only accept memory. */
5285 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5286 {
5287 if (allows_mem)
5288 allows_reg = 0;
5289 else
5290 {
5291 error ("impossible constraint in %<asm%>");
5292 error ("non-memory input %d must stay in memory", i);
5293 return GS_ERROR;
5294 }
5295 }
5296
5297 /* If the operand is a memory input, it should be an lvalue. */
5298 if (!allows_reg && allows_mem)
5299 {
5300 tree inputv = TREE_VALUE (link);
5301 STRIP_NOPS (inputv);
5302 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5303 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5304 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5305 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5306 || TREE_CODE (inputv) == MODIFY_EXPR)
5307 TREE_VALUE (link) = error_mark_node;
5308 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5309 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5310 if (tret != GS_ERROR)
5311 {
5312 /* Unlike output operands, memory inputs are not guaranteed
5313 to be lvalues by the FE, and while the expressions are
5314 marked addressable there, if it is e.g. a statement
5315 expression, temporaries in it might not end up being
5316 addressable. They might be already used in the IL and thus
5317 it is too late to make them addressable now though. */
5318 tree x = TREE_VALUE (link);
5319 while (handled_component_p (x))
5320 x = TREE_OPERAND (x, 0);
5321 if (TREE_CODE (x) == MEM_REF
5322 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5323 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5324 if ((TREE_CODE (x) == VAR_DECL
5325 || TREE_CODE (x) == PARM_DECL
5326 || TREE_CODE (x) == RESULT_DECL)
5327 && !TREE_ADDRESSABLE (x)
5328 && is_gimple_reg (x))
5329 {
5330 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5331 input_location), 0,
5332 "memory input %d is not directly addressable",
5333 i);
5334 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5335 }
5336 }
5337 mark_addressable (TREE_VALUE (link));
5338 if (tret == GS_ERROR)
5339 {
5340 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5341 "memory input %d is not directly addressable", i);
5342 ret = tret;
5343 }
5344 }
5345 else
5346 {
5347 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5348 is_gimple_asm_val, fb_rvalue);
5349 if (tret == GS_ERROR)
5350 ret = tret;
5351 }
5352
5353 TREE_CHAIN (link) = NULL_TREE;
5354 vec_safe_push (inputs, link);
5355 }
5356
5357 link_next = NULL_TREE;
5358 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5359 {
5360 link_next = TREE_CHAIN (link);
5361 TREE_CHAIN (link) = NULL_TREE;
5362 vec_safe_push (clobbers, link);
5363 }
5364
5365 link_next = NULL_TREE;
5366 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5367 {
5368 link_next = TREE_CHAIN (link);
5369 TREE_CHAIN (link) = NULL_TREE;
5370 vec_safe_push (labels, link);
5371 }
5372
5373 /* Do not add ASMs with errors to the gimple IL stream. */
5374 if (ret != GS_ERROR)
5375 {
5376 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5377 inputs, outputs, clobbers, labels);
5378
5379 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5380 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5381
5382 gimplify_seq_add_stmt (pre_p, stmt);
5383 }
5384
5385 return ret;
5386 }
5387
5388 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5389 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5390 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5391 return to this function.
5392
5393 FIXME should we complexify the prequeue handling instead? Or use flags
5394 for all the cleanups and let the optimizer tighten them up? The current
5395 code seems pretty fragile; it will break on a cleanup within any
5396 non-conditional nesting. But any such nesting would be broken, anyway;
5397 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5398 and continues out of it. We can do that at the RTL level, though, so
5399 having an optimizer to tighten up try/finally regions would be a Good
5400 Thing. */
5401
5402 static enum gimplify_status
5403 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5404 {
5405 gimple_stmt_iterator iter;
5406 gimple_seq body_sequence = NULL;
5407
5408 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5409
5410 /* We only care about the number of conditions between the innermost
5411 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5412 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5413 int old_conds = gimplify_ctxp->conditions;
5414 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5415 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5416 gimplify_ctxp->conditions = 0;
5417 gimplify_ctxp->conditional_cleanups = NULL;
5418 gimplify_ctxp->in_cleanup_point_expr = true;
5419
5420 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5421
5422 gimplify_ctxp->conditions = old_conds;
5423 gimplify_ctxp->conditional_cleanups = old_cleanups;
5424 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5425
5426 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5427 {
5428 gimple *wce = gsi_stmt (iter);
5429
5430 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5431 {
5432 if (gsi_one_before_end_p (iter))
5433 {
5434 /* Note that gsi_insert_seq_before and gsi_remove do not
5435 scan operands, unlike some other sequence mutators. */
5436 if (!gimple_wce_cleanup_eh_only (wce))
5437 gsi_insert_seq_before_without_update (&iter,
5438 gimple_wce_cleanup (wce),
5439 GSI_SAME_STMT);
5440 gsi_remove (&iter, true);
5441 break;
5442 }
5443 else
5444 {
5445 gtry *gtry;
5446 gimple_seq seq;
5447 enum gimple_try_flags kind;
5448
5449 if (gimple_wce_cleanup_eh_only (wce))
5450 kind = GIMPLE_TRY_CATCH;
5451 else
5452 kind = GIMPLE_TRY_FINALLY;
5453 seq = gsi_split_seq_after (iter);
5454
5455 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5456 /* Do not use gsi_replace here, as it may scan operands.
5457 We want to do a simple structural modification only. */
5458 gsi_set_stmt (&iter, gtry);
5459 iter = gsi_start (gtry->eval);
5460 }
5461 }
5462 else
5463 gsi_next (&iter);
5464 }
5465
5466 gimplify_seq_add_seq (pre_p, body_sequence);
5467 if (temp)
5468 {
5469 *expr_p = temp;
5470 return GS_OK;
5471 }
5472 else
5473 {
5474 *expr_p = NULL;
5475 return GS_ALL_DONE;
5476 }
5477 }
5478
5479 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5480 is the cleanup action required. EH_ONLY is true if the cleanup should
5481 only be executed if an exception is thrown, not on normal exit. */
5482
5483 static void
5484 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5485 {
5486 gimple *wce;
5487 gimple_seq cleanup_stmts = NULL;
5488
5489 /* Errors can result in improperly nested cleanups. Which results in
5490 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5491 if (seen_error ())
5492 return;
5493
5494 if (gimple_conditional_context ())
5495 {
5496 /* If we're in a conditional context, this is more complex. We only
5497 want to run the cleanup if we actually ran the initialization that
5498 necessitates it, but we want to run it after the end of the
5499 conditional context. So we wrap the try/finally around the
5500 condition and use a flag to determine whether or not to actually
5501 run the destructor. Thus
5502
5503 test ? f(A()) : 0
5504
5505 becomes (approximately)
5506
5507 flag = 0;
5508 try {
5509 if (test) { A::A(temp); flag = 1; val = f(temp); }
5510 else { val = 0; }
5511 } finally {
5512 if (flag) A::~A(temp);
5513 }
5514 val
5515 */
5516 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5517 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5518 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5519
5520 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5521 gimplify_stmt (&cleanup, &cleanup_stmts);
5522 wce = gimple_build_wce (cleanup_stmts);
5523
5524 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5525 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5526 gimplify_seq_add_stmt (pre_p, ftrue);
5527
5528 /* Because of this manipulation, and the EH edges that jump
5529 threading cannot redirect, the temporary (VAR) will appear
5530 to be used uninitialized. Don't warn. */
5531 TREE_NO_WARNING (var) = 1;
5532 }
5533 else
5534 {
5535 gimplify_stmt (&cleanup, &cleanup_stmts);
5536 wce = gimple_build_wce (cleanup_stmts);
5537 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5538 gimplify_seq_add_stmt (pre_p, wce);
5539 }
5540 }
5541
5542 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5543
5544 static enum gimplify_status
5545 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5546 {
5547 tree targ = *expr_p;
5548 tree temp = TARGET_EXPR_SLOT (targ);
5549 tree init = TARGET_EXPR_INITIAL (targ);
5550 enum gimplify_status ret;
5551
5552 if (init)
5553 {
5554 tree cleanup = NULL_TREE;
5555
5556 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5557 to the temps list. Handle also variable length TARGET_EXPRs. */
5558 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5559 {
5560 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5561 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5562 gimplify_vla_decl (temp, pre_p);
5563 }
5564 else
5565 gimple_add_tmp_var (temp);
5566
5567 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5568 expression is supposed to initialize the slot. */
5569 if (VOID_TYPE_P (TREE_TYPE (init)))
5570 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5571 else
5572 {
5573 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5574 init = init_expr;
5575 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5576 init = NULL;
5577 ggc_free (init_expr);
5578 }
5579 if (ret == GS_ERROR)
5580 {
5581 /* PR c++/28266 Make sure this is expanded only once. */
5582 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5583 return GS_ERROR;
5584 }
5585 if (init)
5586 gimplify_and_add (init, pre_p);
5587
5588 /* If needed, push the cleanup for the temp. */
5589 if (TARGET_EXPR_CLEANUP (targ))
5590 {
5591 if (CLEANUP_EH_ONLY (targ))
5592 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5593 CLEANUP_EH_ONLY (targ), pre_p);
5594 else
5595 cleanup = TARGET_EXPR_CLEANUP (targ);
5596 }
5597
5598 /* Add a clobber for the temporary going out of scope, like
5599 gimplify_bind_expr. */
5600 if (gimplify_ctxp->in_cleanup_point_expr
5601 && needs_to_live_in_memory (temp)
5602 && flag_stack_reuse == SR_ALL)
5603 {
5604 tree clobber = build_constructor (TREE_TYPE (temp),
5605 NULL);
5606 TREE_THIS_VOLATILE (clobber) = true;
5607 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5608 if (cleanup)
5609 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5610 clobber);
5611 else
5612 cleanup = clobber;
5613 }
5614
5615 if (cleanup)
5616 gimple_push_cleanup (temp, cleanup, false, pre_p);
5617
5618 /* Only expand this once. */
5619 TREE_OPERAND (targ, 3) = init;
5620 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5621 }
5622 else
5623 /* We should have expanded this before. */
5624 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5625
5626 *expr_p = temp;
5627 return GS_OK;
5628 }
5629
5630 /* Gimplification of expression trees. */
5631
5632 /* Gimplify an expression which appears at statement context. The
5633 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5634 NULL, a new sequence is allocated.
5635
5636 Return true if we actually added a statement to the queue. */
5637
5638 bool
5639 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5640 {
5641 gimple_seq_node last;
5642
5643 last = gimple_seq_last (*seq_p);
5644 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5645 return last != gimple_seq_last (*seq_p);
5646 }
5647
5648 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5649 to CTX. If entries already exist, force them to be some flavor of private.
5650 If there is no enclosing parallel, do nothing. */
5651
5652 void
5653 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5654 {
5655 splay_tree_node n;
5656
5657 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5658 return;
5659
5660 do
5661 {
5662 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5663 if (n != NULL)
5664 {
5665 if (n->value & GOVD_SHARED)
5666 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5667 else if (n->value & GOVD_MAP)
5668 n->value |= GOVD_MAP_TO_ONLY;
5669 else
5670 return;
5671 }
5672 else if ((ctx->region_type & ORT_TARGET) != 0)
5673 {
5674 if (ctx->target_map_scalars_firstprivate)
5675 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5676 else
5677 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5678 }
5679 else if (ctx->region_type != ORT_WORKSHARE
5680 && ctx->region_type != ORT_SIMD
5681 && ctx->region_type != ORT_ACC
5682 && !(ctx->region_type & ORT_TARGET_DATA))
5683 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5684
5685 ctx = ctx->outer_context;
5686 }
5687 while (ctx);
5688 }
5689
5690 /* Similarly for each of the type sizes of TYPE. */
5691
5692 static void
5693 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5694 {
5695 if (type == NULL || type == error_mark_node)
5696 return;
5697 type = TYPE_MAIN_VARIANT (type);
5698
5699 if (ctx->privatized_types->add (type))
5700 return;
5701
5702 switch (TREE_CODE (type))
5703 {
5704 case INTEGER_TYPE:
5705 case ENUMERAL_TYPE:
5706 case BOOLEAN_TYPE:
5707 case REAL_TYPE:
5708 case FIXED_POINT_TYPE:
5709 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5710 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5711 break;
5712
5713 case ARRAY_TYPE:
5714 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5715 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5716 break;
5717
5718 case RECORD_TYPE:
5719 case UNION_TYPE:
5720 case QUAL_UNION_TYPE:
5721 {
5722 tree field;
5723 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5724 if (TREE_CODE (field) == FIELD_DECL)
5725 {
5726 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5727 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5728 }
5729 }
5730 break;
5731
5732 case POINTER_TYPE:
5733 case REFERENCE_TYPE:
5734 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5735 break;
5736
5737 default:
5738 break;
5739 }
5740
5741 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5742 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5743 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5744 }
5745
5746 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5747
5748 static void
5749 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5750 {
5751 splay_tree_node n;
5752 unsigned int nflags;
5753 tree t;
5754
5755 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5756 return;
5757
5758 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5759 there are constructors involved somewhere. */
5760 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5761 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5762 flags |= GOVD_SEEN;
5763
5764 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5765 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5766 {
5767 /* We shouldn't be re-adding the decl with the same data
5768 sharing class. */
5769 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5770 nflags = n->value | flags;
5771 /* The only combination of data sharing classes we should see is
5772 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5773 reduction variables to be used in data sharing clauses. */
5774 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5775 || ((nflags & GOVD_DATA_SHARE_CLASS)
5776 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5777 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5778 n->value = nflags;
5779 return;
5780 }
5781
5782 /* When adding a variable-sized variable, we have to handle all sorts
5783 of additional bits of data: the pointer replacement variable, and
5784 the parameters of the type. */
5785 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5786 {
5787 /* Add the pointer replacement variable as PRIVATE if the variable
5788 replacement is private, else FIRSTPRIVATE since we'll need the
5789 address of the original variable either for SHARED, or for the
5790 copy into or out of the context. */
5791 if (!(flags & GOVD_LOCAL))
5792 {
5793 if (flags & GOVD_MAP)
5794 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5795 else if (flags & GOVD_PRIVATE)
5796 nflags = GOVD_PRIVATE;
5797 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5798 && (flags & GOVD_FIRSTPRIVATE))
5799 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5800 else
5801 nflags = GOVD_FIRSTPRIVATE;
5802 nflags |= flags & GOVD_SEEN;
5803 t = DECL_VALUE_EXPR (decl);
5804 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5805 t = TREE_OPERAND (t, 0);
5806 gcc_assert (DECL_P (t));
5807 omp_add_variable (ctx, t, nflags);
5808 }
5809
5810 /* Add all of the variable and type parameters (which should have
5811 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5812 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5813 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5814 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5815
5816 /* The variable-sized variable itself is never SHARED, only some form
5817 of PRIVATE. The sharing would take place via the pointer variable
5818 which we remapped above. */
5819 if (flags & GOVD_SHARED)
5820 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5821 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5822
5823 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5824 alloca statement we generate for the variable, so make sure it
5825 is available. This isn't automatically needed for the SHARED
5826 case, since we won't be allocating local storage then.
5827 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5828 in this case omp_notice_variable will be called later
5829 on when it is gimplified. */
5830 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5831 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5832 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5833 }
5834 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5835 && lang_hooks.decls.omp_privatize_by_reference (decl))
5836 {
5837 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5838
5839 /* Similar to the direct variable sized case above, we'll need the
5840 size of references being privatized. */
5841 if ((flags & GOVD_SHARED) == 0)
5842 {
5843 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5844 if (DECL_P (t))
5845 omp_notice_variable (ctx, t, true);
5846 }
5847 }
5848
5849 if (n != NULL)
5850 n->value |= flags;
5851 else
5852 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5853 }
5854
5855 /* Notice a threadprivate variable DECL used in OMP context CTX.
5856 This just prints out diagnostics about threadprivate variable uses
5857 in untied tasks. If DECL2 is non-NULL, prevent this warning
5858 on that variable. */
5859
5860 static bool
5861 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5862 tree decl2)
5863 {
5864 splay_tree_node n;
5865 struct gimplify_omp_ctx *octx;
5866
5867 for (octx = ctx; octx; octx = octx->outer_context)
5868 if ((octx->region_type & ORT_TARGET) != 0)
5869 {
5870 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5871 if (n == NULL)
5872 {
5873 error ("threadprivate variable %qE used in target region",
5874 DECL_NAME (decl));
5875 error_at (octx->location, "enclosing target region");
5876 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5877 }
5878 if (decl2)
5879 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5880 }
5881
5882 if (ctx->region_type != ORT_UNTIED_TASK)
5883 return false;
5884 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5885 if (n == NULL)
5886 {
5887 error ("threadprivate variable %qE used in untied task",
5888 DECL_NAME (decl));
5889 error_at (ctx->location, "enclosing task");
5890 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5891 }
5892 if (decl2)
5893 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5894 return false;
5895 }
5896
5897 /* Return true if global var DECL is device resident. */
5898
5899 static bool
5900 device_resident_p (tree decl)
5901 {
5902 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
5903
5904 if (!attr)
5905 return false;
5906
5907 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
5908 {
5909 tree c = TREE_VALUE (t);
5910 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
5911 return true;
5912 }
5913
5914 return false;
5915 }
5916
5917 /* Determine outer default flags for DECL mentioned in an OMP region
5918 but not declared in an enclosing clause.
5919
5920 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5921 remapped firstprivate instead of shared. To some extent this is
5922 addressed in omp_firstprivatize_type_sizes, but not
5923 effectively. */
5924
5925 static unsigned
5926 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
5927 bool in_code, unsigned flags)
5928 {
5929 enum omp_clause_default_kind default_kind = ctx->default_kind;
5930 enum omp_clause_default_kind kind;
5931
5932 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5933 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5934 default_kind = kind;
5935
5936 switch (default_kind)
5937 {
5938 case OMP_CLAUSE_DEFAULT_NONE:
5939 {
5940 const char *rtype;
5941
5942 if (ctx->region_type & ORT_PARALLEL)
5943 rtype = "parallel";
5944 else if (ctx->region_type & ORT_TASK)
5945 rtype = "task";
5946 else if (ctx->region_type & ORT_TEAMS)
5947 rtype = "teams";
5948 else
5949 gcc_unreachable ();
5950
5951 error ("%qE not specified in enclosing %s",
5952 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
5953 error_at (ctx->location, "enclosing %s", rtype);
5954 }
5955 /* FALLTHRU */
5956 case OMP_CLAUSE_DEFAULT_SHARED:
5957 flags |= GOVD_SHARED;
5958 break;
5959 case OMP_CLAUSE_DEFAULT_PRIVATE:
5960 flags |= GOVD_PRIVATE;
5961 break;
5962 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5963 flags |= GOVD_FIRSTPRIVATE;
5964 break;
5965 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5966 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5967 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5968 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
5969 {
5970 omp_notice_variable (octx, decl, in_code);
5971 for (; octx; octx = octx->outer_context)
5972 {
5973 splay_tree_node n2;
5974
5975 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5976 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
5977 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
5978 continue;
5979 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5980 {
5981 flags |= GOVD_FIRSTPRIVATE;
5982 goto found_outer;
5983 }
5984 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5985 {
5986 flags |= GOVD_SHARED;
5987 goto found_outer;
5988 }
5989 }
5990 }
5991
5992 if (TREE_CODE (decl) == PARM_DECL
5993 || (!is_global_var (decl)
5994 && DECL_CONTEXT (decl) == current_function_decl))
5995 flags |= GOVD_FIRSTPRIVATE;
5996 else
5997 flags |= GOVD_SHARED;
5998 found_outer:
5999 break;
6000
6001 default:
6002 gcc_unreachable ();
6003 }
6004
6005 return flags;
6006 }
6007
6008
6009 /* Determine outer default flags for DECL mentioned in an OACC region
6010 but not declared in an enclosing clause. */
6011
6012 static unsigned
6013 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6014 {
6015 const char *rkind;
6016 bool on_device = false;
6017 tree type = TREE_TYPE (decl);
6018
6019 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6020 type = TREE_TYPE (type);
6021
6022 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6023 && is_global_var (decl)
6024 && device_resident_p (decl))
6025 {
6026 on_device = true;
6027 flags |= GOVD_MAP_TO_ONLY;
6028 }
6029
6030 switch (ctx->region_type)
6031 {
6032 default:
6033 gcc_unreachable ();
6034
6035 case ORT_ACC_KERNELS:
6036 /* Scalars are default 'copy' under kernels, non-scalars are default
6037 'present_or_copy'. */
6038 flags |= GOVD_MAP;
6039 if (!AGGREGATE_TYPE_P (type))
6040 flags |= GOVD_MAP_FORCE;
6041
6042 rkind = "kernels";
6043 break;
6044
6045 case ORT_ACC_PARALLEL:
6046 {
6047 if (on_device || AGGREGATE_TYPE_P (type))
6048 /* Aggregates default to 'present_or_copy'. */
6049 flags |= GOVD_MAP;
6050 else
6051 /* Scalars default to 'firstprivate'. */
6052 flags |= GOVD_FIRSTPRIVATE;
6053 rkind = "parallel";
6054 }
6055 break;
6056 }
6057
6058 if (DECL_ARTIFICIAL (decl))
6059 ; /* We can get compiler-generated decls, and should not complain
6060 about them. */
6061 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6062 {
6063 error ("%qE not specified in enclosing OpenACC %qs construct",
6064 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6065 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6066 }
6067 else
6068 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6069
6070 return flags;
6071 }
6072
6073 /* Record the fact that DECL was used within the OMP context CTX.
6074 IN_CODE is true when real code uses DECL, and false when we should
6075 merely emit default(none) errors. Return true if DECL is going to
6076 be remapped and thus DECL shouldn't be gimplified into its
6077 DECL_VALUE_EXPR (if any). */
6078
6079 static bool
6080 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6081 {
6082 splay_tree_node n;
6083 unsigned flags = in_code ? GOVD_SEEN : 0;
6084 bool ret = false, shared;
6085
6086 if (error_operand_p (decl))
6087 return false;
6088
6089 if (ctx->region_type == ORT_NONE)
6090 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6091
6092 if (is_global_var (decl))
6093 {
6094 /* Threadprivate variables are predetermined. */
6095 if (DECL_THREAD_LOCAL_P (decl))
6096 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6097
6098 if (DECL_HAS_VALUE_EXPR_P (decl))
6099 {
6100 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6101
6102 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6103 return omp_notice_threadprivate_variable (ctx, decl, value);
6104 }
6105
6106 if (gimplify_omp_ctxp->outer_context == NULL
6107 && VAR_P (decl)
6108 && get_oacc_fn_attrib (current_function_decl))
6109 {
6110 location_t loc = DECL_SOURCE_LOCATION (decl);
6111
6112 if (lookup_attribute ("omp declare target link",
6113 DECL_ATTRIBUTES (decl)))
6114 {
6115 error_at (loc,
6116 "%qE with %<link%> clause used in %<routine%> function",
6117 DECL_NAME (decl));
6118 return false;
6119 }
6120 else if (!lookup_attribute ("omp declare target",
6121 DECL_ATTRIBUTES (decl)))
6122 {
6123 error_at (loc,
6124 "%qE requires a %<declare%> directive for use "
6125 "in a %<routine%> function", DECL_NAME (decl));
6126 return false;
6127 }
6128 }
6129 }
6130
6131 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6132 if ((ctx->region_type & ORT_TARGET) != 0)
6133 {
6134 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6135 if (n == NULL)
6136 {
6137 unsigned nflags = flags;
6138 if (ctx->target_map_pointers_as_0len_arrays
6139 || ctx->target_map_scalars_firstprivate)
6140 {
6141 bool is_declare_target = false;
6142 bool is_scalar = false;
6143 if (is_global_var (decl)
6144 && varpool_node::get_create (decl)->offloadable)
6145 {
6146 struct gimplify_omp_ctx *octx;
6147 for (octx = ctx->outer_context;
6148 octx; octx = octx->outer_context)
6149 {
6150 n = splay_tree_lookup (octx->variables,
6151 (splay_tree_key)decl);
6152 if (n
6153 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6154 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6155 break;
6156 }
6157 is_declare_target = octx == NULL;
6158 }
6159 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6160 {
6161 tree type = TREE_TYPE (decl);
6162 if (TREE_CODE (type) == REFERENCE_TYPE)
6163 type = TREE_TYPE (type);
6164 if (TREE_CODE (type) == COMPLEX_TYPE)
6165 type = TREE_TYPE (type);
6166 if (INTEGRAL_TYPE_P (type)
6167 || SCALAR_FLOAT_TYPE_P (type)
6168 || TREE_CODE (type) == POINTER_TYPE)
6169 is_scalar = true;
6170 }
6171 if (is_declare_target)
6172 ;
6173 else if (ctx->target_map_pointers_as_0len_arrays
6174 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6175 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6176 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6177 == POINTER_TYPE)))
6178 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6179 else if (is_scalar)
6180 nflags |= GOVD_FIRSTPRIVATE;
6181 }
6182
6183 struct gimplify_omp_ctx *octx = ctx->outer_context;
6184 if ((ctx->region_type & ORT_ACC) && octx)
6185 {
6186 /* Look in outer OpenACC contexts, to see if there's a
6187 data attribute for this variable. */
6188 omp_notice_variable (octx, decl, in_code);
6189
6190 for (; octx; octx = octx->outer_context)
6191 {
6192 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6193 break;
6194 splay_tree_node n2
6195 = splay_tree_lookup (octx->variables,
6196 (splay_tree_key) decl);
6197 if (n2)
6198 {
6199 if (octx->region_type == ORT_ACC_HOST_DATA)
6200 error ("variable %qE declared in enclosing "
6201 "%<host_data%> region", DECL_NAME (decl));
6202 nflags |= GOVD_MAP;
6203 goto found_outer;
6204 }
6205 }
6206 }
6207
6208 {
6209 tree type = TREE_TYPE (decl);
6210
6211 if (nflags == flags
6212 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6213 && lang_hooks.decls.omp_privatize_by_reference (decl))
6214 type = TREE_TYPE (type);
6215 if (nflags == flags
6216 && !lang_hooks.types.omp_mappable_type (type))
6217 {
6218 error ("%qD referenced in target region does not have "
6219 "a mappable type", decl);
6220 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6221 }
6222 else if (nflags == flags)
6223 {
6224 if ((ctx->region_type & ORT_ACC) != 0)
6225 nflags = oacc_default_clause (ctx, decl, flags);
6226 else
6227 nflags |= GOVD_MAP;
6228 }
6229 }
6230 found_outer:
6231 omp_add_variable (ctx, decl, nflags);
6232 }
6233 else
6234 {
6235 /* If nothing changed, there's nothing left to do. */
6236 if ((n->value & flags) == flags)
6237 return ret;
6238 flags |= n->value;
6239 n->value = flags;
6240 }
6241 goto do_outer;
6242 }
6243
6244 if (n == NULL)
6245 {
6246 if (ctx->region_type == ORT_WORKSHARE
6247 || ctx->region_type == ORT_SIMD
6248 || ctx->region_type == ORT_ACC
6249 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6250 goto do_outer;
6251
6252 flags = omp_default_clause (ctx, decl, in_code, flags);
6253
6254 if ((flags & GOVD_PRIVATE)
6255 && lang_hooks.decls.omp_private_outer_ref (decl))
6256 flags |= GOVD_PRIVATE_OUTER_REF;
6257
6258 omp_add_variable (ctx, decl, flags);
6259
6260 shared = (flags & GOVD_SHARED) != 0;
6261 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6262 goto do_outer;
6263 }
6264
6265 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6266 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6267 && DECL_SIZE (decl)
6268 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6269 {
6270 splay_tree_node n2;
6271 tree t = DECL_VALUE_EXPR (decl);
6272 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6273 t = TREE_OPERAND (t, 0);
6274 gcc_assert (DECL_P (t));
6275 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6276 n2->value |= GOVD_SEEN;
6277 }
6278
6279 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6280 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6281
6282 /* If nothing changed, there's nothing left to do. */
6283 if ((n->value & flags) == flags)
6284 return ret;
6285 flags |= n->value;
6286 n->value = flags;
6287
6288 do_outer:
6289 /* If the variable is private in the current context, then we don't
6290 need to propagate anything to an outer context. */
6291 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6292 return ret;
6293 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6294 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6295 return ret;
6296 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6297 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6298 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6299 return ret;
6300 if (ctx->outer_context
6301 && omp_notice_variable (ctx->outer_context, decl, in_code))
6302 return true;
6303 return ret;
6304 }
6305
6306 /* Verify that DECL is private within CTX. If there's specific information
6307 to the contrary in the innermost scope, generate an error. */
6308
6309 static bool
6310 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6311 {
6312 splay_tree_node n;
6313
6314 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6315 if (n != NULL)
6316 {
6317 if (n->value & GOVD_SHARED)
6318 {
6319 if (ctx == gimplify_omp_ctxp)
6320 {
6321 if (simd)
6322 error ("iteration variable %qE is predetermined linear",
6323 DECL_NAME (decl));
6324 else
6325 error ("iteration variable %qE should be private",
6326 DECL_NAME (decl));
6327 n->value = GOVD_PRIVATE;
6328 return true;
6329 }
6330 else
6331 return false;
6332 }
6333 else if ((n->value & GOVD_EXPLICIT) != 0
6334 && (ctx == gimplify_omp_ctxp
6335 || (ctx->region_type == ORT_COMBINED_PARALLEL
6336 && gimplify_omp_ctxp->outer_context == ctx)))
6337 {
6338 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6339 error ("iteration variable %qE should not be firstprivate",
6340 DECL_NAME (decl));
6341 else if ((n->value & GOVD_REDUCTION) != 0)
6342 error ("iteration variable %qE should not be reduction",
6343 DECL_NAME (decl));
6344 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6345 error ("iteration variable %qE should not be linear",
6346 DECL_NAME (decl));
6347 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6348 error ("iteration variable %qE should not be lastprivate",
6349 DECL_NAME (decl));
6350 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6351 error ("iteration variable %qE should not be private",
6352 DECL_NAME (decl));
6353 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6354 error ("iteration variable %qE is predetermined linear",
6355 DECL_NAME (decl));
6356 }
6357 return (ctx == gimplify_omp_ctxp
6358 || (ctx->region_type == ORT_COMBINED_PARALLEL
6359 && gimplify_omp_ctxp->outer_context == ctx));
6360 }
6361
6362 if (ctx->region_type != ORT_WORKSHARE
6363 && ctx->region_type != ORT_SIMD
6364 && ctx->region_type != ORT_ACC)
6365 return false;
6366 else if (ctx->outer_context)
6367 return omp_is_private (ctx->outer_context, decl, simd);
6368 return false;
6369 }
6370
6371 /* Return true if DECL is private within a parallel region
6372 that binds to the current construct's context or in parallel
6373 region's REDUCTION clause. */
6374
6375 static bool
6376 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6377 {
6378 splay_tree_node n;
6379
6380 do
6381 {
6382 ctx = ctx->outer_context;
6383 if (ctx == NULL)
6384 {
6385 if (is_global_var (decl))
6386 return false;
6387
6388 /* References might be private, but might be shared too,
6389 when checking for copyprivate, assume they might be
6390 private, otherwise assume they might be shared. */
6391 if (copyprivate)
6392 return true;
6393
6394 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6395 return false;
6396
6397 /* Treat C++ privatized non-static data members outside
6398 of the privatization the same. */
6399 if (omp_member_access_dummy_var (decl))
6400 return false;
6401
6402 return true;
6403 }
6404
6405 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6406
6407 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6408 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6409 continue;
6410
6411 if (n != NULL)
6412 {
6413 if ((n->value & GOVD_LOCAL) != 0
6414 && omp_member_access_dummy_var (decl))
6415 return false;
6416 return (n->value & GOVD_SHARED) == 0;
6417 }
6418 }
6419 while (ctx->region_type == ORT_WORKSHARE
6420 || ctx->region_type == ORT_SIMD
6421 || ctx->region_type == ORT_ACC);
6422 return false;
6423 }
6424
6425 /* Return true if the CTX is combined with distribute and thus
6426 lastprivate can't be supported. */
6427
6428 static bool
6429 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6430 {
6431 do
6432 {
6433 if (ctx->outer_context == NULL)
6434 return false;
6435 ctx = ctx->outer_context;
6436 switch (ctx->region_type)
6437 {
6438 case ORT_WORKSHARE:
6439 if (!ctx->combined_loop)
6440 return false;
6441 if (ctx->distribute)
6442 return lang_GNU_Fortran ();
6443 break;
6444 case ORT_COMBINED_PARALLEL:
6445 break;
6446 case ORT_COMBINED_TEAMS:
6447 return lang_GNU_Fortran ();
6448 default:
6449 return false;
6450 }
6451 }
6452 while (1);
6453 }
6454
6455 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6456
6457 static tree
6458 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6459 {
6460 tree t = *tp;
6461
6462 /* If this node has been visited, unmark it and keep looking. */
6463 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6464 return t;
6465
6466 if (IS_TYPE_OR_DECL_P (t))
6467 *walk_subtrees = 0;
6468 return NULL_TREE;
6469 }
6470
6471 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6472 and previous omp contexts. */
6473
6474 static void
6475 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6476 enum omp_region_type region_type,
6477 enum tree_code code)
6478 {
6479 struct gimplify_omp_ctx *ctx, *outer_ctx;
6480 tree c;
6481 hash_map<tree, tree> *struct_map_to_clause = NULL;
6482 tree *prev_list_p = NULL;
6483
6484 ctx = new_omp_context (region_type);
6485 outer_ctx = ctx->outer_context;
6486 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6487 {
6488 ctx->target_map_pointers_as_0len_arrays = true;
6489 /* FIXME: For Fortran we want to set this too, when
6490 the Fortran FE is updated to OpenMP 4.5. */
6491 ctx->target_map_scalars_firstprivate = true;
6492 }
6493 if (!lang_GNU_Fortran ())
6494 switch (code)
6495 {
6496 case OMP_TARGET:
6497 case OMP_TARGET_DATA:
6498 case OMP_TARGET_ENTER_DATA:
6499 case OMP_TARGET_EXIT_DATA:
6500 case OACC_HOST_DATA:
6501 ctx->target_firstprivatize_array_bases = true;
6502 default:
6503 break;
6504 }
6505
6506 while ((c = *list_p) != NULL)
6507 {
6508 bool remove = false;
6509 bool notice_outer = true;
6510 const char *check_non_private = NULL;
6511 unsigned int flags;
6512 tree decl;
6513
6514 switch (OMP_CLAUSE_CODE (c))
6515 {
6516 case OMP_CLAUSE_PRIVATE:
6517 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6518 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6519 {
6520 flags |= GOVD_PRIVATE_OUTER_REF;
6521 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6522 }
6523 else
6524 notice_outer = false;
6525 goto do_add;
6526 case OMP_CLAUSE_SHARED:
6527 flags = GOVD_SHARED | GOVD_EXPLICIT;
6528 goto do_add;
6529 case OMP_CLAUSE_FIRSTPRIVATE:
6530 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6531 check_non_private = "firstprivate";
6532 goto do_add;
6533 case OMP_CLAUSE_LASTPRIVATE:
6534 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6535 check_non_private = "lastprivate";
6536 decl = OMP_CLAUSE_DECL (c);
6537 if (omp_no_lastprivate (ctx))
6538 {
6539 notice_outer = false;
6540 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6541 }
6542 else if (error_operand_p (decl))
6543 goto do_add;
6544 else if (outer_ctx
6545 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6546 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6547 && splay_tree_lookup (outer_ctx->variables,
6548 (splay_tree_key) decl) == NULL)
6549 {
6550 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6551 if (outer_ctx->outer_context)
6552 omp_notice_variable (outer_ctx->outer_context, decl, true);
6553 }
6554 else if (outer_ctx
6555 && (outer_ctx->region_type & ORT_TASK) != 0
6556 && outer_ctx->combined_loop
6557 && splay_tree_lookup (outer_ctx->variables,
6558 (splay_tree_key) decl) == NULL)
6559 {
6560 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6561 if (outer_ctx->outer_context)
6562 omp_notice_variable (outer_ctx->outer_context, decl, true);
6563 }
6564 else if (outer_ctx
6565 && (outer_ctx->region_type == ORT_WORKSHARE
6566 || outer_ctx->region_type == ORT_ACC)
6567 && outer_ctx->combined_loop
6568 && splay_tree_lookup (outer_ctx->variables,
6569 (splay_tree_key) decl) == NULL
6570 && !omp_check_private (outer_ctx, decl, false))
6571 {
6572 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6573 if (outer_ctx->outer_context
6574 && (outer_ctx->outer_context->region_type
6575 == ORT_COMBINED_PARALLEL)
6576 && splay_tree_lookup (outer_ctx->outer_context->variables,
6577 (splay_tree_key) decl) == NULL)
6578 {
6579 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6580 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6581 if (octx->outer_context)
6582 omp_notice_variable (octx->outer_context, decl, true);
6583 }
6584 else if (outer_ctx->outer_context)
6585 omp_notice_variable (outer_ctx->outer_context, decl, true);
6586 }
6587 goto do_add;
6588 case OMP_CLAUSE_REDUCTION:
6589 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6590 /* OpenACC permits reductions on private variables. */
6591 if (!(region_type & ORT_ACC))
6592 check_non_private = "reduction";
6593 decl = OMP_CLAUSE_DECL (c);
6594 if (TREE_CODE (decl) == MEM_REF)
6595 {
6596 tree type = TREE_TYPE (decl);
6597 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6598 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6599 {
6600 remove = true;
6601 break;
6602 }
6603 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6604 if (DECL_P (v))
6605 {
6606 omp_firstprivatize_variable (ctx, v);
6607 omp_notice_variable (ctx, v, true);
6608 }
6609 decl = TREE_OPERAND (decl, 0);
6610 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6611 {
6612 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6613 NULL, is_gimple_val, fb_rvalue)
6614 == GS_ERROR)
6615 {
6616 remove = true;
6617 break;
6618 }
6619 v = TREE_OPERAND (decl, 1);
6620 if (DECL_P (v))
6621 {
6622 omp_firstprivatize_variable (ctx, v);
6623 omp_notice_variable (ctx, v, true);
6624 }
6625 decl = TREE_OPERAND (decl, 0);
6626 }
6627 if (TREE_CODE (decl) == ADDR_EXPR
6628 || TREE_CODE (decl) == INDIRECT_REF)
6629 decl = TREE_OPERAND (decl, 0);
6630 }
6631 goto do_add_decl;
6632 case OMP_CLAUSE_LINEAR:
6633 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6634 is_gimple_val, fb_rvalue) == GS_ERROR)
6635 {
6636 remove = true;
6637 break;
6638 }
6639 else
6640 {
6641 if (code == OMP_SIMD
6642 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6643 {
6644 struct gimplify_omp_ctx *octx = outer_ctx;
6645 if (octx
6646 && octx->region_type == ORT_WORKSHARE
6647 && octx->combined_loop
6648 && !octx->distribute)
6649 {
6650 if (octx->outer_context
6651 && (octx->outer_context->region_type
6652 == ORT_COMBINED_PARALLEL))
6653 octx = octx->outer_context->outer_context;
6654 else
6655 octx = octx->outer_context;
6656 }
6657 if (octx
6658 && octx->region_type == ORT_WORKSHARE
6659 && octx->combined_loop
6660 && octx->distribute
6661 && !lang_GNU_Fortran ())
6662 {
6663 error_at (OMP_CLAUSE_LOCATION (c),
6664 "%<linear%> clause for variable other than "
6665 "loop iterator specified on construct "
6666 "combined with %<distribute%>");
6667 remove = true;
6668 break;
6669 }
6670 }
6671 /* For combined #pragma omp parallel for simd, need to put
6672 lastprivate and perhaps firstprivate too on the
6673 parallel. Similarly for #pragma omp for simd. */
6674 struct gimplify_omp_ctx *octx = outer_ctx;
6675 decl = NULL_TREE;
6676 if (omp_no_lastprivate (ctx))
6677 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6678 do
6679 {
6680 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6681 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6682 break;
6683 decl = OMP_CLAUSE_DECL (c);
6684 if (error_operand_p (decl))
6685 {
6686 decl = NULL_TREE;
6687 break;
6688 }
6689 flags = GOVD_SEEN;
6690 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6691 flags |= GOVD_FIRSTPRIVATE;
6692 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6693 flags |= GOVD_LASTPRIVATE;
6694 if (octx
6695 && octx->region_type == ORT_WORKSHARE
6696 && octx->combined_loop)
6697 {
6698 if (octx->outer_context
6699 && (octx->outer_context->region_type
6700 == ORT_COMBINED_PARALLEL))
6701 octx = octx->outer_context;
6702 else if (omp_check_private (octx, decl, false))
6703 break;
6704 }
6705 else if (octx
6706 && (octx->region_type & ORT_TASK) != 0
6707 && octx->combined_loop)
6708 ;
6709 else if (octx
6710 && octx->region_type == ORT_COMBINED_PARALLEL
6711 && ctx->region_type == ORT_WORKSHARE
6712 && octx == outer_ctx)
6713 flags = GOVD_SEEN | GOVD_SHARED;
6714 else if (octx
6715 && octx->region_type == ORT_COMBINED_TEAMS)
6716 flags = GOVD_SEEN | GOVD_SHARED;
6717 else if (octx
6718 && octx->region_type == ORT_COMBINED_TARGET)
6719 {
6720 flags &= ~GOVD_LASTPRIVATE;
6721 if (flags == GOVD_SEEN)
6722 break;
6723 }
6724 else
6725 break;
6726 splay_tree_node on
6727 = splay_tree_lookup (octx->variables,
6728 (splay_tree_key) decl);
6729 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6730 {
6731 octx = NULL;
6732 break;
6733 }
6734 omp_add_variable (octx, decl, flags);
6735 if (octx->outer_context == NULL)
6736 break;
6737 octx = octx->outer_context;
6738 }
6739 while (1);
6740 if (octx
6741 && decl
6742 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6743 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6744 omp_notice_variable (octx, decl, true);
6745 }
6746 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6747 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6748 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6749 {
6750 notice_outer = false;
6751 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6752 }
6753 goto do_add;
6754
6755 case OMP_CLAUSE_MAP:
6756 decl = OMP_CLAUSE_DECL (c);
6757 if (error_operand_p (decl))
6758 remove = true;
6759 switch (code)
6760 {
6761 case OMP_TARGET:
6762 break;
6763 case OMP_TARGET_DATA:
6764 case OMP_TARGET_ENTER_DATA:
6765 case OMP_TARGET_EXIT_DATA:
6766 case OACC_HOST_DATA:
6767 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6768 || (OMP_CLAUSE_MAP_KIND (c)
6769 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6770 /* For target {,enter ,exit }data only the array slice is
6771 mapped, but not the pointer to it. */
6772 remove = true;
6773 break;
6774 default:
6775 break;
6776 }
6777 if (remove)
6778 break;
6779 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6780 {
6781 struct gimplify_omp_ctx *octx;
6782 for (octx = outer_ctx; octx; octx = octx->outer_context)
6783 {
6784 if (octx->region_type != ORT_ACC_HOST_DATA)
6785 break;
6786 splay_tree_node n2
6787 = splay_tree_lookup (octx->variables,
6788 (splay_tree_key) decl);
6789 if (n2)
6790 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6791 "declared in enclosing %<host_data%> region",
6792 DECL_NAME (decl));
6793 }
6794 }
6795 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6796 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6797 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6798 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6799 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6800 {
6801 remove = true;
6802 break;
6803 }
6804 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6805 || (OMP_CLAUSE_MAP_KIND (c)
6806 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6807 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6808 {
6809 OMP_CLAUSE_SIZE (c)
6810 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6811 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6812 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6813 }
6814 if (!DECL_P (decl))
6815 {
6816 tree d = decl, *pd;
6817 if (TREE_CODE (d) == ARRAY_REF)
6818 {
6819 while (TREE_CODE (d) == ARRAY_REF)
6820 d = TREE_OPERAND (d, 0);
6821 if (TREE_CODE (d) == COMPONENT_REF
6822 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6823 decl = d;
6824 }
6825 pd = &OMP_CLAUSE_DECL (c);
6826 if (d == decl
6827 && TREE_CODE (decl) == INDIRECT_REF
6828 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6829 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6830 == REFERENCE_TYPE))
6831 {
6832 pd = &TREE_OPERAND (decl, 0);
6833 decl = TREE_OPERAND (decl, 0);
6834 }
6835 if (TREE_CODE (decl) == COMPONENT_REF)
6836 {
6837 while (TREE_CODE (decl) == COMPONENT_REF)
6838 decl = TREE_OPERAND (decl, 0);
6839 }
6840 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6841 == GS_ERROR)
6842 {
6843 remove = true;
6844 break;
6845 }
6846 if (DECL_P (decl))
6847 {
6848 if (error_operand_p (decl))
6849 {
6850 remove = true;
6851 break;
6852 }
6853
6854 if (TYPE_SIZE_UNIT (TREE_TYPE (decl)) == NULL
6855 || (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
6856 != INTEGER_CST))
6857 {
6858 error_at (OMP_CLAUSE_LOCATION (c),
6859 "mapping field %qE of variable length "
6860 "structure", OMP_CLAUSE_DECL (c));
6861 remove = true;
6862 break;
6863 }
6864
6865 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6866 {
6867 /* Error recovery. */
6868 if (prev_list_p == NULL)
6869 {
6870 remove = true;
6871 break;
6872 }
6873 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6874 {
6875 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6876 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6877 {
6878 remove = true;
6879 break;
6880 }
6881 }
6882 }
6883
6884 tree offset;
6885 HOST_WIDE_INT bitsize, bitpos;
6886 machine_mode mode;
6887 int unsignedp, reversep, volatilep = 0;
6888 tree base = OMP_CLAUSE_DECL (c);
6889 while (TREE_CODE (base) == ARRAY_REF)
6890 base = TREE_OPERAND (base, 0);
6891 if (TREE_CODE (base) == INDIRECT_REF)
6892 base = TREE_OPERAND (base, 0);
6893 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6894 &mode, &unsignedp, &reversep,
6895 &volatilep, false);
6896 gcc_assert (base == decl
6897 && (offset == NULL_TREE
6898 || TREE_CODE (offset) == INTEGER_CST));
6899
6900 splay_tree_node n
6901 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6902 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
6903 == GOMP_MAP_ALWAYS_POINTER);
6904 if (n == NULL || (n->value & GOVD_MAP) == 0)
6905 {
6906 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6907 OMP_CLAUSE_MAP);
6908 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
6909 OMP_CLAUSE_DECL (l) = decl;
6910 OMP_CLAUSE_SIZE (l) = size_int (1);
6911 if (struct_map_to_clause == NULL)
6912 struct_map_to_clause = new hash_map<tree, tree>;
6913 struct_map_to_clause->put (decl, l);
6914 if (ptr)
6915 {
6916 enum gomp_map_kind mkind
6917 = code == OMP_TARGET_EXIT_DATA
6918 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
6919 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6920 OMP_CLAUSE_MAP);
6921 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6922 OMP_CLAUSE_DECL (c2)
6923 = unshare_expr (OMP_CLAUSE_DECL (c));
6924 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
6925 OMP_CLAUSE_SIZE (c2)
6926 = TYPE_SIZE_UNIT (ptr_type_node);
6927 OMP_CLAUSE_CHAIN (l) = c2;
6928 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6929 {
6930 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6931 tree c3
6932 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6933 OMP_CLAUSE_MAP);
6934 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6935 OMP_CLAUSE_DECL (c3)
6936 = unshare_expr (OMP_CLAUSE_DECL (c4));
6937 OMP_CLAUSE_SIZE (c3)
6938 = TYPE_SIZE_UNIT (ptr_type_node);
6939 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6940 OMP_CLAUSE_CHAIN (c2) = c3;
6941 }
6942 *prev_list_p = l;
6943 prev_list_p = NULL;
6944 }
6945 else
6946 {
6947 OMP_CLAUSE_CHAIN (l) = c;
6948 *list_p = l;
6949 list_p = &OMP_CLAUSE_CHAIN (l);
6950 }
6951 flags = GOVD_MAP | GOVD_EXPLICIT;
6952 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6953 flags |= GOVD_SEEN;
6954 goto do_add_decl;
6955 }
6956 else
6957 {
6958 tree *osc = struct_map_to_clause->get (decl);
6959 tree *sc = NULL, *scp = NULL;
6960 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6961 n->value |= GOVD_SEEN;
6962 offset_int o1, o2;
6963 if (offset)
6964 o1 = wi::to_offset (offset);
6965 else
6966 o1 = 0;
6967 if (bitpos)
6968 o1 = o1 + bitpos / BITS_PER_UNIT;
6969 for (sc = &OMP_CLAUSE_CHAIN (*osc);
6970 *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
6971 if (ptr && sc == prev_list_p)
6972 break;
6973 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6974 != COMPONENT_REF
6975 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6976 != INDIRECT_REF)
6977 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6978 != ARRAY_REF))
6979 break;
6980 else
6981 {
6982 tree offset2;
6983 HOST_WIDE_INT bitsize2, bitpos2;
6984 base = OMP_CLAUSE_DECL (*sc);
6985 if (TREE_CODE (base) == ARRAY_REF)
6986 {
6987 while (TREE_CODE (base) == ARRAY_REF)
6988 base = TREE_OPERAND (base, 0);
6989 if (TREE_CODE (base) != COMPONENT_REF
6990 || (TREE_CODE (TREE_TYPE (base))
6991 != ARRAY_TYPE))
6992 break;
6993 }
6994 else if (TREE_CODE (base) == INDIRECT_REF
6995 && (TREE_CODE (TREE_OPERAND (base, 0))
6996 == COMPONENT_REF)
6997 && (TREE_CODE (TREE_TYPE
6998 (TREE_OPERAND (base, 0)))
6999 == REFERENCE_TYPE))
7000 base = TREE_OPERAND (base, 0);
7001 base = get_inner_reference (base, &bitsize2,
7002 &bitpos2, &offset2,
7003 &mode, &unsignedp,
7004 &reversep, &volatilep,
7005 false);
7006 if (base != decl)
7007 break;
7008 if (scp)
7009 continue;
7010 gcc_assert (offset == NULL_TREE
7011 || TREE_CODE (offset) == INTEGER_CST);
7012 tree d1 = OMP_CLAUSE_DECL (*sc);
7013 tree d2 = OMP_CLAUSE_DECL (c);
7014 while (TREE_CODE (d1) == ARRAY_REF)
7015 d1 = TREE_OPERAND (d1, 0);
7016 while (TREE_CODE (d2) == ARRAY_REF)
7017 d2 = TREE_OPERAND (d2, 0);
7018 if (TREE_CODE (d1) == INDIRECT_REF)
7019 d1 = TREE_OPERAND (d1, 0);
7020 if (TREE_CODE (d2) == INDIRECT_REF)
7021 d2 = TREE_OPERAND (d2, 0);
7022 while (TREE_CODE (d1) == COMPONENT_REF)
7023 if (TREE_CODE (d2) == COMPONENT_REF
7024 && TREE_OPERAND (d1, 1)
7025 == TREE_OPERAND (d2, 1))
7026 {
7027 d1 = TREE_OPERAND (d1, 0);
7028 d2 = TREE_OPERAND (d2, 0);
7029 }
7030 else
7031 break;
7032 if (d1 == d2)
7033 {
7034 error_at (OMP_CLAUSE_LOCATION (c),
7035 "%qE appears more than once in map "
7036 "clauses", OMP_CLAUSE_DECL (c));
7037 remove = true;
7038 break;
7039 }
7040 if (offset2)
7041 o2 = wi::to_offset (offset2);
7042 else
7043 o2 = 0;
7044 if (bitpos2)
7045 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7046 if (wi::ltu_p (o1, o2)
7047 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
7048 {
7049 if (ptr)
7050 scp = sc;
7051 else
7052 break;
7053 }
7054 }
7055 if (remove)
7056 break;
7057 OMP_CLAUSE_SIZE (*osc)
7058 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7059 size_one_node);
7060 if (ptr)
7061 {
7062 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7063 OMP_CLAUSE_MAP);
7064 tree cl = NULL_TREE;
7065 enum gomp_map_kind mkind
7066 = code == OMP_TARGET_EXIT_DATA
7067 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7068 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7069 OMP_CLAUSE_DECL (c2)
7070 = unshare_expr (OMP_CLAUSE_DECL (c));
7071 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7072 OMP_CLAUSE_SIZE (c2)
7073 = TYPE_SIZE_UNIT (ptr_type_node);
7074 cl = scp ? *prev_list_p : c2;
7075 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7076 {
7077 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7078 tree c3
7079 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7080 OMP_CLAUSE_MAP);
7081 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7082 OMP_CLAUSE_DECL (c3)
7083 = unshare_expr (OMP_CLAUSE_DECL (c4));
7084 OMP_CLAUSE_SIZE (c3)
7085 = TYPE_SIZE_UNIT (ptr_type_node);
7086 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7087 if (!scp)
7088 OMP_CLAUSE_CHAIN (c2) = c3;
7089 else
7090 cl = c3;
7091 }
7092 if (scp)
7093 *scp = c2;
7094 if (sc == prev_list_p)
7095 {
7096 *sc = cl;
7097 prev_list_p = NULL;
7098 }
7099 else
7100 {
7101 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7102 list_p = prev_list_p;
7103 prev_list_p = NULL;
7104 OMP_CLAUSE_CHAIN (c) = *sc;
7105 *sc = cl;
7106 continue;
7107 }
7108 }
7109 else if (*sc != c)
7110 {
7111 *list_p = OMP_CLAUSE_CHAIN (c);
7112 OMP_CLAUSE_CHAIN (c) = *sc;
7113 *sc = c;
7114 continue;
7115 }
7116 }
7117 }
7118 if (!remove
7119 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7120 && OMP_CLAUSE_CHAIN (c)
7121 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7122 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7123 == GOMP_MAP_ALWAYS_POINTER))
7124 prev_list_p = list_p;
7125 break;
7126 }
7127 flags = GOVD_MAP | GOVD_EXPLICIT;
7128 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7129 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7130 flags |= GOVD_MAP_ALWAYS_TO;
7131 goto do_add;
7132
7133 case OMP_CLAUSE_DEPEND:
7134 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7135 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7136 {
7137 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7138 omp-low.c. */
7139 break;
7140 }
7141 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7142 {
7143 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7144 NULL, is_gimple_val, fb_rvalue);
7145 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7146 }
7147 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7148 {
7149 remove = true;
7150 break;
7151 }
7152 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7153 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7154 is_gimple_val, fb_rvalue) == GS_ERROR)
7155 {
7156 remove = true;
7157 break;
7158 }
7159 break;
7160
7161 case OMP_CLAUSE_TO:
7162 case OMP_CLAUSE_FROM:
7163 case OMP_CLAUSE__CACHE_:
7164 decl = OMP_CLAUSE_DECL (c);
7165 if (error_operand_p (decl))
7166 {
7167 remove = true;
7168 break;
7169 }
7170 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7171 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7172 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7173 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7174 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7175 {
7176 remove = true;
7177 break;
7178 }
7179 if (!DECL_P (decl))
7180 {
7181 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7182 NULL, is_gimple_lvalue, fb_lvalue)
7183 == GS_ERROR)
7184 {
7185 remove = true;
7186 break;
7187 }
7188 break;
7189 }
7190 goto do_notice;
7191
7192 case OMP_CLAUSE_USE_DEVICE_PTR:
7193 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7194 goto do_add;
7195 case OMP_CLAUSE_IS_DEVICE_PTR:
7196 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7197 goto do_add;
7198
7199 do_add:
7200 decl = OMP_CLAUSE_DECL (c);
7201 do_add_decl:
7202 if (error_operand_p (decl))
7203 {
7204 remove = true;
7205 break;
7206 }
7207 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7208 {
7209 tree t = omp_member_access_dummy_var (decl);
7210 if (t)
7211 {
7212 tree v = DECL_VALUE_EXPR (decl);
7213 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7214 if (outer_ctx)
7215 omp_notice_variable (outer_ctx, t, true);
7216 }
7217 }
7218 omp_add_variable (ctx, decl, flags);
7219 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7220 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7221 {
7222 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7223 GOVD_LOCAL | GOVD_SEEN);
7224 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7225 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7226 find_decl_expr,
7227 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7228 NULL) == NULL_TREE)
7229 omp_add_variable (ctx,
7230 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7231 GOVD_LOCAL | GOVD_SEEN);
7232 gimplify_omp_ctxp = ctx;
7233 push_gimplify_context ();
7234
7235 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7236 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7237
7238 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7239 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7240 pop_gimplify_context
7241 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7242 push_gimplify_context ();
7243 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7244 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7245 pop_gimplify_context
7246 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7247 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7248 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7249
7250 gimplify_omp_ctxp = outer_ctx;
7251 }
7252 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7253 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7254 {
7255 gimplify_omp_ctxp = ctx;
7256 push_gimplify_context ();
7257 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7258 {
7259 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7260 NULL, NULL);
7261 TREE_SIDE_EFFECTS (bind) = 1;
7262 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7263 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7264 }
7265 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7266 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7267 pop_gimplify_context
7268 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7269 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7270
7271 gimplify_omp_ctxp = outer_ctx;
7272 }
7273 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7274 && OMP_CLAUSE_LINEAR_STMT (c))
7275 {
7276 gimplify_omp_ctxp = ctx;
7277 push_gimplify_context ();
7278 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7279 {
7280 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7281 NULL, NULL);
7282 TREE_SIDE_EFFECTS (bind) = 1;
7283 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7284 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7285 }
7286 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7287 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7288 pop_gimplify_context
7289 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7290 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7291
7292 gimplify_omp_ctxp = outer_ctx;
7293 }
7294 if (notice_outer)
7295 goto do_notice;
7296 break;
7297
7298 case OMP_CLAUSE_COPYIN:
7299 case OMP_CLAUSE_COPYPRIVATE:
7300 decl = OMP_CLAUSE_DECL (c);
7301 if (error_operand_p (decl))
7302 {
7303 remove = true;
7304 break;
7305 }
7306 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7307 && !remove
7308 && !omp_check_private (ctx, decl, true))
7309 {
7310 remove = true;
7311 if (is_global_var (decl))
7312 {
7313 if (DECL_THREAD_LOCAL_P (decl))
7314 remove = false;
7315 else if (DECL_HAS_VALUE_EXPR_P (decl))
7316 {
7317 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7318
7319 if (value
7320 && DECL_P (value)
7321 && DECL_THREAD_LOCAL_P (value))
7322 remove = false;
7323 }
7324 }
7325 if (remove)
7326 error_at (OMP_CLAUSE_LOCATION (c),
7327 "copyprivate variable %qE is not threadprivate"
7328 " or private in outer context", DECL_NAME (decl));
7329 }
7330 do_notice:
7331 if (outer_ctx)
7332 omp_notice_variable (outer_ctx, decl, true);
7333 if (check_non_private
7334 && region_type == ORT_WORKSHARE
7335 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7336 || decl == OMP_CLAUSE_DECL (c)
7337 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7338 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7339 == ADDR_EXPR
7340 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7341 == POINTER_PLUS_EXPR
7342 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7343 (OMP_CLAUSE_DECL (c), 0), 0))
7344 == ADDR_EXPR)))))
7345 && omp_check_private (ctx, decl, false))
7346 {
7347 error ("%s variable %qE is private in outer context",
7348 check_non_private, DECL_NAME (decl));
7349 remove = true;
7350 }
7351 break;
7352
7353 case OMP_CLAUSE_IF:
7354 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7355 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7356 {
7357 const char *p[2];
7358 for (int i = 0; i < 2; i++)
7359 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7360 {
7361 case OMP_PARALLEL: p[i] = "parallel"; break;
7362 case OMP_TASK: p[i] = "task"; break;
7363 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7364 case OMP_TARGET_DATA: p[i] = "target data"; break;
7365 case OMP_TARGET: p[i] = "target"; break;
7366 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7367 case OMP_TARGET_ENTER_DATA:
7368 p[i] = "target enter data"; break;
7369 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7370 default: gcc_unreachable ();
7371 }
7372 error_at (OMP_CLAUSE_LOCATION (c),
7373 "expected %qs %<if%> clause modifier rather than %qs",
7374 p[0], p[1]);
7375 remove = true;
7376 }
7377 /* Fall through. */
7378
7379 case OMP_CLAUSE_FINAL:
7380 OMP_CLAUSE_OPERAND (c, 0)
7381 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7382 /* Fall through. */
7383
7384 case OMP_CLAUSE_SCHEDULE:
7385 case OMP_CLAUSE_NUM_THREADS:
7386 case OMP_CLAUSE_NUM_TEAMS:
7387 case OMP_CLAUSE_THREAD_LIMIT:
7388 case OMP_CLAUSE_DIST_SCHEDULE:
7389 case OMP_CLAUSE_DEVICE:
7390 case OMP_CLAUSE_PRIORITY:
7391 case OMP_CLAUSE_GRAINSIZE:
7392 case OMP_CLAUSE_NUM_TASKS:
7393 case OMP_CLAUSE_HINT:
7394 case OMP_CLAUSE__CILK_FOR_COUNT_:
7395 case OMP_CLAUSE_ASYNC:
7396 case OMP_CLAUSE_WAIT:
7397 case OMP_CLAUSE_NUM_GANGS:
7398 case OMP_CLAUSE_NUM_WORKERS:
7399 case OMP_CLAUSE_VECTOR_LENGTH:
7400 case OMP_CLAUSE_WORKER:
7401 case OMP_CLAUSE_VECTOR:
7402 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7403 is_gimple_val, fb_rvalue) == GS_ERROR)
7404 remove = true;
7405 break;
7406
7407 case OMP_CLAUSE_GANG:
7408 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7409 is_gimple_val, fb_rvalue) == GS_ERROR)
7410 remove = true;
7411 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7412 is_gimple_val, fb_rvalue) == GS_ERROR)
7413 remove = true;
7414 break;
7415
7416 case OMP_CLAUSE_TILE:
7417 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7418 list = TREE_CHAIN (list))
7419 {
7420 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7421 is_gimple_val, fb_rvalue) == GS_ERROR)
7422 remove = true;
7423 }
7424 break;
7425
7426 case OMP_CLAUSE_DEVICE_RESIDENT:
7427 remove = true;
7428 break;
7429
7430 case OMP_CLAUSE_NOWAIT:
7431 case OMP_CLAUSE_ORDERED:
7432 case OMP_CLAUSE_UNTIED:
7433 case OMP_CLAUSE_COLLAPSE:
7434 case OMP_CLAUSE_AUTO:
7435 case OMP_CLAUSE_SEQ:
7436 case OMP_CLAUSE_INDEPENDENT:
7437 case OMP_CLAUSE_MERGEABLE:
7438 case OMP_CLAUSE_PROC_BIND:
7439 case OMP_CLAUSE_SAFELEN:
7440 case OMP_CLAUSE_SIMDLEN:
7441 case OMP_CLAUSE_NOGROUP:
7442 case OMP_CLAUSE_THREADS:
7443 case OMP_CLAUSE_SIMD:
7444 break;
7445
7446 case OMP_CLAUSE_DEFAULTMAP:
7447 ctx->target_map_scalars_firstprivate = false;
7448 break;
7449
7450 case OMP_CLAUSE_ALIGNED:
7451 decl = OMP_CLAUSE_DECL (c);
7452 if (error_operand_p (decl))
7453 {
7454 remove = true;
7455 break;
7456 }
7457 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7458 is_gimple_val, fb_rvalue) == GS_ERROR)
7459 {
7460 remove = true;
7461 break;
7462 }
7463 if (!is_global_var (decl)
7464 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7465 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7466 break;
7467
7468 case OMP_CLAUSE_DEFAULT:
7469 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7470 break;
7471
7472 default:
7473 gcc_unreachable ();
7474 }
7475
7476 if (remove)
7477 *list_p = OMP_CLAUSE_CHAIN (c);
7478 else
7479 list_p = &OMP_CLAUSE_CHAIN (c);
7480 }
7481
7482 gimplify_omp_ctxp = ctx;
7483 if (struct_map_to_clause)
7484 delete struct_map_to_clause;
7485 }
7486
7487 /* Return true if DECL is a candidate for shared to firstprivate
7488 optimization. We only consider non-addressable scalars, not
7489 too big, and not references. */
7490
7491 static bool
7492 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7493 {
7494 if (TREE_ADDRESSABLE (decl))
7495 return false;
7496 tree type = TREE_TYPE (decl);
7497 if (!is_gimple_reg_type (type)
7498 || TREE_CODE (type) == REFERENCE_TYPE
7499 || TREE_ADDRESSABLE (type))
7500 return false;
7501 /* Don't optimize too large decls, as each thread/task will have
7502 its own. */
7503 HOST_WIDE_INT len = int_size_in_bytes (type);
7504 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7505 return false;
7506 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7507 return false;
7508 return true;
7509 }
7510
7511 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7512 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7513 GOVD_WRITTEN in outer contexts. */
7514
7515 static void
7516 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7517 {
7518 for (; ctx; ctx = ctx->outer_context)
7519 {
7520 splay_tree_node n = splay_tree_lookup (ctx->variables,
7521 (splay_tree_key) decl);
7522 if (n == NULL)
7523 continue;
7524 else if (n->value & GOVD_SHARED)
7525 {
7526 n->value |= GOVD_WRITTEN;
7527 return;
7528 }
7529 else if (n->value & GOVD_DATA_SHARE_CLASS)
7530 return;
7531 }
7532 }
7533
7534 /* Helper callback for walk_gimple_seq to discover possible stores
7535 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7536 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7537 for those. */
7538
7539 static tree
7540 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7541 {
7542 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7543
7544 *walk_subtrees = 0;
7545 if (!wi->is_lhs)
7546 return NULL_TREE;
7547
7548 tree op = *tp;
7549 do
7550 {
7551 if (handled_component_p (op))
7552 op = TREE_OPERAND (op, 0);
7553 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7554 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7555 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7556 else
7557 break;
7558 }
7559 while (1);
7560 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7561 return NULL_TREE;
7562
7563 omp_mark_stores (gimplify_omp_ctxp, op);
7564 return NULL_TREE;
7565 }
7566
7567 /* Helper callback for walk_gimple_seq to discover possible stores
7568 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7569 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7570 for those. */
7571
7572 static tree
7573 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7574 bool *handled_ops_p,
7575 struct walk_stmt_info *wi)
7576 {
7577 gimple *stmt = gsi_stmt (*gsi_p);
7578 switch (gimple_code (stmt))
7579 {
7580 /* Don't recurse on OpenMP constructs for which
7581 gimplify_adjust_omp_clauses already handled the bodies,
7582 except handle gimple_omp_for_pre_body. */
7583 case GIMPLE_OMP_FOR:
7584 *handled_ops_p = true;
7585 if (gimple_omp_for_pre_body (stmt))
7586 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7587 omp_find_stores_stmt, omp_find_stores_op, wi);
7588 break;
7589 case GIMPLE_OMP_PARALLEL:
7590 case GIMPLE_OMP_TASK:
7591 case GIMPLE_OMP_SECTIONS:
7592 case GIMPLE_OMP_SINGLE:
7593 case GIMPLE_OMP_TARGET:
7594 case GIMPLE_OMP_TEAMS:
7595 case GIMPLE_OMP_CRITICAL:
7596 *handled_ops_p = true;
7597 break;
7598 default:
7599 break;
7600 }
7601 return NULL_TREE;
7602 }
7603
7604 struct gimplify_adjust_omp_clauses_data
7605 {
7606 tree *list_p;
7607 gimple_seq *pre_p;
7608 };
7609
7610 /* For all variables that were not actually used within the context,
7611 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7612
7613 static int
7614 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7615 {
7616 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7617 gimple_seq *pre_p
7618 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7619 tree decl = (tree) n->key;
7620 unsigned flags = n->value;
7621 enum omp_clause_code code;
7622 tree clause;
7623 bool private_debug;
7624
7625 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7626 return 0;
7627 if ((flags & GOVD_SEEN) == 0)
7628 return 0;
7629 if (flags & GOVD_DEBUG_PRIVATE)
7630 {
7631 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7632 private_debug = true;
7633 }
7634 else if (flags & GOVD_MAP)
7635 private_debug = false;
7636 else
7637 private_debug
7638 = lang_hooks.decls.omp_private_debug_clause (decl,
7639 !!(flags & GOVD_SHARED));
7640 if (private_debug)
7641 code = OMP_CLAUSE_PRIVATE;
7642 else if (flags & GOVD_MAP)
7643 code = OMP_CLAUSE_MAP;
7644 else if (flags & GOVD_SHARED)
7645 {
7646 if (is_global_var (decl))
7647 {
7648 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7649 while (ctx != NULL)
7650 {
7651 splay_tree_node on
7652 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7653 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7654 | GOVD_PRIVATE | GOVD_REDUCTION
7655 | GOVD_LINEAR | GOVD_MAP)) != 0)
7656 break;
7657 ctx = ctx->outer_context;
7658 }
7659 if (ctx == NULL)
7660 return 0;
7661 }
7662 code = OMP_CLAUSE_SHARED;
7663 }
7664 else if (flags & GOVD_PRIVATE)
7665 code = OMP_CLAUSE_PRIVATE;
7666 else if (flags & GOVD_FIRSTPRIVATE)
7667 code = OMP_CLAUSE_FIRSTPRIVATE;
7668 else if (flags & GOVD_LASTPRIVATE)
7669 code = OMP_CLAUSE_LASTPRIVATE;
7670 else if (flags & GOVD_ALIGNED)
7671 return 0;
7672 else
7673 gcc_unreachable ();
7674
7675 if (((flags & GOVD_LASTPRIVATE)
7676 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7677 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7678 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7679
7680 clause = build_omp_clause (input_location, code);
7681 OMP_CLAUSE_DECL (clause) = decl;
7682 OMP_CLAUSE_CHAIN (clause) = *list_p;
7683 if (private_debug)
7684 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7685 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7686 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7687 else if (code == OMP_CLAUSE_SHARED
7688 && (flags & GOVD_WRITTEN) == 0
7689 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7690 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
7691 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7692 {
7693 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7694 OMP_CLAUSE_DECL (nc) = decl;
7695 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7696 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7697 OMP_CLAUSE_DECL (clause)
7698 = build_simple_mem_ref_loc (input_location, decl);
7699 OMP_CLAUSE_DECL (clause)
7700 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7701 build_int_cst (build_pointer_type (char_type_node), 0));
7702 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7703 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7704 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7705 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7706 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7707 OMP_CLAUSE_CHAIN (nc) = *list_p;
7708 OMP_CLAUSE_CHAIN (clause) = nc;
7709 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7710 gimplify_omp_ctxp = ctx->outer_context;
7711 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7712 pre_p, NULL, is_gimple_val, fb_rvalue);
7713 gimplify_omp_ctxp = ctx;
7714 }
7715 else if (code == OMP_CLAUSE_MAP)
7716 {
7717 int kind = (flags & GOVD_MAP_TO_ONLY
7718 ? GOMP_MAP_TO
7719 : GOMP_MAP_TOFROM);
7720 if (flags & GOVD_MAP_FORCE)
7721 kind |= GOMP_MAP_FLAG_FORCE;
7722 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
7723 if (DECL_SIZE (decl)
7724 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7725 {
7726 tree decl2 = DECL_VALUE_EXPR (decl);
7727 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7728 decl2 = TREE_OPERAND (decl2, 0);
7729 gcc_assert (DECL_P (decl2));
7730 tree mem = build_simple_mem_ref (decl2);
7731 OMP_CLAUSE_DECL (clause) = mem;
7732 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7733 if (gimplify_omp_ctxp->outer_context)
7734 {
7735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7736 omp_notice_variable (ctx, decl2, true);
7737 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7738 }
7739 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7740 OMP_CLAUSE_MAP);
7741 OMP_CLAUSE_DECL (nc) = decl;
7742 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7743 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7744 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7745 else
7746 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7747 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7748 OMP_CLAUSE_CHAIN (clause) = nc;
7749 }
7750 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7751 && lang_hooks.decls.omp_privatize_by_reference (decl))
7752 {
7753 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7754 OMP_CLAUSE_SIZE (clause)
7755 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7756 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7757 gimplify_omp_ctxp = ctx->outer_context;
7758 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7759 pre_p, NULL, is_gimple_val, fb_rvalue);
7760 gimplify_omp_ctxp = ctx;
7761 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7762 OMP_CLAUSE_MAP);
7763 OMP_CLAUSE_DECL (nc) = decl;
7764 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7765 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7766 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7767 OMP_CLAUSE_CHAIN (clause) = nc;
7768 }
7769 else
7770 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7771 }
7772 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7773 {
7774 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7775 OMP_CLAUSE_DECL (nc) = decl;
7776 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7777 OMP_CLAUSE_CHAIN (nc) = *list_p;
7778 OMP_CLAUSE_CHAIN (clause) = nc;
7779 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7780 gimplify_omp_ctxp = ctx->outer_context;
7781 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7782 gimplify_omp_ctxp = ctx;
7783 }
7784 *list_p = clause;
7785 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7786 gimplify_omp_ctxp = ctx->outer_context;
7787 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7788 gimplify_omp_ctxp = ctx;
7789 return 0;
7790 }
7791
7792 static void
7793 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
7794 enum tree_code code)
7795 {
7796 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7797 tree c, decl;
7798
7799 if (body)
7800 {
7801 struct gimplify_omp_ctx *octx;
7802 for (octx = ctx; octx; octx = octx->outer_context)
7803 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
7804 break;
7805 if (octx)
7806 {
7807 struct walk_stmt_info wi;
7808 memset (&wi, 0, sizeof (wi));
7809 walk_gimple_seq (body, omp_find_stores_stmt,
7810 omp_find_stores_op, &wi);
7811 }
7812 }
7813 while ((c = *list_p) != NULL)
7814 {
7815 splay_tree_node n;
7816 bool remove = false;
7817
7818 switch (OMP_CLAUSE_CODE (c))
7819 {
7820 case OMP_CLAUSE_PRIVATE:
7821 case OMP_CLAUSE_SHARED:
7822 case OMP_CLAUSE_FIRSTPRIVATE:
7823 case OMP_CLAUSE_LINEAR:
7824 decl = OMP_CLAUSE_DECL (c);
7825 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7826 remove = !(n->value & GOVD_SEEN);
7827 if (! remove)
7828 {
7829 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
7830 if ((n->value & GOVD_DEBUG_PRIVATE)
7831 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7832 {
7833 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7834 || ((n->value & GOVD_DATA_SHARE_CLASS)
7835 == GOVD_PRIVATE));
7836 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
7837 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7838 }
7839 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7840 && (n->value & GOVD_WRITTEN) == 0
7841 && DECL_P (decl)
7842 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7843 OMP_CLAUSE_SHARED_READONLY (c) = 1;
7844 else if (DECL_P (decl)
7845 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7846 && (n->value & GOVD_WRITTEN) != 1)
7847 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7848 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7849 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7850 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7851 }
7852 break;
7853
7854 case OMP_CLAUSE_LASTPRIVATE:
7855 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
7856 accurately reflect the presence of a FIRSTPRIVATE clause. */
7857 decl = OMP_CLAUSE_DECL (c);
7858 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7859 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7860 = (n->value & GOVD_FIRSTPRIVATE) != 0;
7861 if (omp_no_lastprivate (ctx))
7862 {
7863 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7864 remove = true;
7865 else
7866 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
7867 }
7868 else if (code == OMP_DISTRIBUTE
7869 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7870 {
7871 remove = true;
7872 error_at (OMP_CLAUSE_LOCATION (c),
7873 "same variable used in %<firstprivate%> and "
7874 "%<lastprivate%> clauses on %<distribute%> "
7875 "construct");
7876 }
7877 if (!remove
7878 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7879 && DECL_P (decl)
7880 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7881 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7882 break;
7883
7884 case OMP_CLAUSE_ALIGNED:
7885 decl = OMP_CLAUSE_DECL (c);
7886 if (!is_global_var (decl))
7887 {
7888 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7889 remove = n == NULL || !(n->value & GOVD_SEEN);
7890 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7891 {
7892 struct gimplify_omp_ctx *octx;
7893 if (n != NULL
7894 && (n->value & (GOVD_DATA_SHARE_CLASS
7895 & ~GOVD_FIRSTPRIVATE)))
7896 remove = true;
7897 else
7898 for (octx = ctx->outer_context; octx;
7899 octx = octx->outer_context)
7900 {
7901 n = splay_tree_lookup (octx->variables,
7902 (splay_tree_key) decl);
7903 if (n == NULL)
7904 continue;
7905 if (n->value & GOVD_LOCAL)
7906 break;
7907 /* We have to avoid assigning a shared variable
7908 to itself when trying to add
7909 __builtin_assume_aligned. */
7910 if (n->value & GOVD_SHARED)
7911 {
7912 remove = true;
7913 break;
7914 }
7915 }
7916 }
7917 }
7918 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
7919 {
7920 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7921 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7922 remove = true;
7923 }
7924 break;
7925
7926 case OMP_CLAUSE_MAP:
7927 if (code == OMP_TARGET_EXIT_DATA
7928 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7929 {
7930 remove = true;
7931 break;
7932 }
7933 decl = OMP_CLAUSE_DECL (c);
7934 if (!DECL_P (decl))
7935 {
7936 if ((ctx->region_type & ORT_TARGET) != 0
7937 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7938 {
7939 if (TREE_CODE (decl) == INDIRECT_REF
7940 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7941 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7942 == REFERENCE_TYPE))
7943 decl = TREE_OPERAND (decl, 0);
7944 if (TREE_CODE (decl) == COMPONENT_REF)
7945 {
7946 while (TREE_CODE (decl) == COMPONENT_REF)
7947 decl = TREE_OPERAND (decl, 0);
7948 if (DECL_P (decl))
7949 {
7950 n = splay_tree_lookup (ctx->variables,
7951 (splay_tree_key) decl);
7952 if (!(n->value & GOVD_SEEN))
7953 remove = true;
7954 }
7955 }
7956 }
7957 break;
7958 }
7959 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7960 if ((ctx->region_type & ORT_TARGET) != 0
7961 && !(n->value & GOVD_SEEN)
7962 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
7963 && !lookup_attribute ("omp declare target link",
7964 DECL_ATTRIBUTES (decl)))
7965 {
7966 remove = true;
7967 /* For struct element mapping, if struct is never referenced
7968 in target block and none of the mapping has always modifier,
7969 remove all the struct element mappings, which immediately
7970 follow the GOMP_MAP_STRUCT map clause. */
7971 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
7972 {
7973 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
7974 while (cnt--)
7975 OMP_CLAUSE_CHAIN (c)
7976 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
7977 }
7978 }
7979 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
7980 && code == OMP_TARGET_EXIT_DATA)
7981 remove = true;
7982 else if (DECL_SIZE (decl)
7983 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
7984 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
7985 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
7986 && (OMP_CLAUSE_MAP_KIND (c)
7987 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7988 {
7989 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
7990 for these, TREE_CODE (DECL_SIZE (decl)) will always be
7991 INTEGER_CST. */
7992 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
7993
7994 tree decl2 = DECL_VALUE_EXPR (decl);
7995 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7996 decl2 = TREE_OPERAND (decl2, 0);
7997 gcc_assert (DECL_P (decl2));
7998 tree mem = build_simple_mem_ref (decl2);
7999 OMP_CLAUSE_DECL (c) = mem;
8000 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8001 if (ctx->outer_context)
8002 {
8003 omp_notice_variable (ctx->outer_context, decl2, true);
8004 omp_notice_variable (ctx->outer_context,
8005 OMP_CLAUSE_SIZE (c), true);
8006 }
8007 if (((ctx->region_type & ORT_TARGET) != 0
8008 || !ctx->target_firstprivatize_array_bases)
8009 && ((n->value & GOVD_SEEN) == 0
8010 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8011 {
8012 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8013 OMP_CLAUSE_MAP);
8014 OMP_CLAUSE_DECL (nc) = decl;
8015 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8016 if (ctx->target_firstprivatize_array_bases)
8017 OMP_CLAUSE_SET_MAP_KIND (nc,
8018 GOMP_MAP_FIRSTPRIVATE_POINTER);
8019 else
8020 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8021 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8022 OMP_CLAUSE_CHAIN (c) = nc;
8023 c = nc;
8024 }
8025 }
8026 else
8027 {
8028 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8029 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8030 gcc_assert ((n->value & GOVD_SEEN) == 0
8031 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8032 == 0));
8033 }
8034 break;
8035
8036 case OMP_CLAUSE_TO:
8037 case OMP_CLAUSE_FROM:
8038 case OMP_CLAUSE__CACHE_:
8039 decl = OMP_CLAUSE_DECL (c);
8040 if (!DECL_P (decl))
8041 break;
8042 if (DECL_SIZE (decl)
8043 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8044 {
8045 tree decl2 = DECL_VALUE_EXPR (decl);
8046 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8047 decl2 = TREE_OPERAND (decl2, 0);
8048 gcc_assert (DECL_P (decl2));
8049 tree mem = build_simple_mem_ref (decl2);
8050 OMP_CLAUSE_DECL (c) = mem;
8051 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8052 if (ctx->outer_context)
8053 {
8054 omp_notice_variable (ctx->outer_context, decl2, true);
8055 omp_notice_variable (ctx->outer_context,
8056 OMP_CLAUSE_SIZE (c), true);
8057 }
8058 }
8059 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8060 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8061 break;
8062
8063 case OMP_CLAUSE_REDUCTION:
8064 decl = OMP_CLAUSE_DECL (c);
8065 if (DECL_P (decl)
8066 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8067 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8068 break;
8069 case OMP_CLAUSE_COPYIN:
8070 case OMP_CLAUSE_COPYPRIVATE:
8071 case OMP_CLAUSE_IF:
8072 case OMP_CLAUSE_NUM_THREADS:
8073 case OMP_CLAUSE_NUM_TEAMS:
8074 case OMP_CLAUSE_THREAD_LIMIT:
8075 case OMP_CLAUSE_DIST_SCHEDULE:
8076 case OMP_CLAUSE_DEVICE:
8077 case OMP_CLAUSE_SCHEDULE:
8078 case OMP_CLAUSE_NOWAIT:
8079 case OMP_CLAUSE_ORDERED:
8080 case OMP_CLAUSE_DEFAULT:
8081 case OMP_CLAUSE_UNTIED:
8082 case OMP_CLAUSE_COLLAPSE:
8083 case OMP_CLAUSE_FINAL:
8084 case OMP_CLAUSE_MERGEABLE:
8085 case OMP_CLAUSE_PROC_BIND:
8086 case OMP_CLAUSE_SAFELEN:
8087 case OMP_CLAUSE_SIMDLEN:
8088 case OMP_CLAUSE_DEPEND:
8089 case OMP_CLAUSE_PRIORITY:
8090 case OMP_CLAUSE_GRAINSIZE:
8091 case OMP_CLAUSE_NUM_TASKS:
8092 case OMP_CLAUSE_NOGROUP:
8093 case OMP_CLAUSE_THREADS:
8094 case OMP_CLAUSE_SIMD:
8095 case OMP_CLAUSE_HINT:
8096 case OMP_CLAUSE_DEFAULTMAP:
8097 case OMP_CLAUSE_USE_DEVICE_PTR:
8098 case OMP_CLAUSE_IS_DEVICE_PTR:
8099 case OMP_CLAUSE__CILK_FOR_COUNT_:
8100 case OMP_CLAUSE_ASYNC:
8101 case OMP_CLAUSE_WAIT:
8102 case OMP_CLAUSE_DEVICE_RESIDENT:
8103 case OMP_CLAUSE_INDEPENDENT:
8104 case OMP_CLAUSE_NUM_GANGS:
8105 case OMP_CLAUSE_NUM_WORKERS:
8106 case OMP_CLAUSE_VECTOR_LENGTH:
8107 case OMP_CLAUSE_GANG:
8108 case OMP_CLAUSE_WORKER:
8109 case OMP_CLAUSE_VECTOR:
8110 case OMP_CLAUSE_AUTO:
8111 case OMP_CLAUSE_SEQ:
8112 case OMP_CLAUSE_TILE:
8113 break;
8114
8115 default:
8116 gcc_unreachable ();
8117 }
8118
8119 if (remove)
8120 *list_p = OMP_CLAUSE_CHAIN (c);
8121 else
8122 list_p = &OMP_CLAUSE_CHAIN (c);
8123 }
8124
8125 /* Add in any implicit data sharing. */
8126 struct gimplify_adjust_omp_clauses_data data;
8127 data.list_p = list_p;
8128 data.pre_p = pre_p;
8129 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
8130
8131 gimplify_omp_ctxp = ctx->outer_context;
8132 delete_omp_context (ctx);
8133 }
8134
8135 /* Gimplify OACC_CACHE. */
8136
8137 static void
8138 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8139 {
8140 tree expr = *expr_p;
8141
8142 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
8143 OACC_CACHE);
8144 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8145 OACC_CACHE);
8146
8147 /* TODO: Do something sensible with this information. */
8148
8149 *expr_p = NULL_TREE;
8150 }
8151
8152 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8153 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8154 kind. The entry kind will replace the one in CLAUSE, while the exit
8155 kind will be used in a new omp_clause and returned to the caller. */
8156
8157 static tree
8158 gimplify_oacc_declare_1 (tree clause)
8159 {
8160 HOST_WIDE_INT kind, new_op;
8161 bool ret = false;
8162 tree c = NULL;
8163
8164 kind = OMP_CLAUSE_MAP_KIND (clause);
8165
8166 switch (kind)
8167 {
8168 case GOMP_MAP_ALLOC:
8169 case GOMP_MAP_FORCE_ALLOC:
8170 case GOMP_MAP_FORCE_TO:
8171 new_op = GOMP_MAP_FORCE_DEALLOC;
8172 ret = true;
8173 break;
8174
8175 case GOMP_MAP_FORCE_FROM:
8176 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8177 new_op = GOMP_MAP_FORCE_FROM;
8178 ret = true;
8179 break;
8180
8181 case GOMP_MAP_FORCE_TOFROM:
8182 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8183 new_op = GOMP_MAP_FORCE_FROM;
8184 ret = true;
8185 break;
8186
8187 case GOMP_MAP_FROM:
8188 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8189 new_op = GOMP_MAP_FROM;
8190 ret = true;
8191 break;
8192
8193 case GOMP_MAP_TOFROM:
8194 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8195 new_op = GOMP_MAP_FROM;
8196 ret = true;
8197 break;
8198
8199 case GOMP_MAP_DEVICE_RESIDENT:
8200 case GOMP_MAP_FORCE_DEVICEPTR:
8201 case GOMP_MAP_FORCE_PRESENT:
8202 case GOMP_MAP_LINK:
8203 case GOMP_MAP_POINTER:
8204 case GOMP_MAP_TO:
8205 break;
8206
8207 default:
8208 gcc_unreachable ();
8209 break;
8210 }
8211
8212 if (ret)
8213 {
8214 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8215 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8216 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8217 }
8218
8219 return c;
8220 }
8221
8222 /* Gimplify OACC_DECLARE. */
8223
8224 static void
8225 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8226 {
8227 tree expr = *expr_p;
8228 gomp_target *stmt;
8229 tree clauses, t;
8230
8231 clauses = OACC_DECLARE_CLAUSES (expr);
8232
8233 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8234
8235 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8236 {
8237 tree decl = OMP_CLAUSE_DECL (t);
8238
8239 if (TREE_CODE (decl) == MEM_REF)
8240 continue;
8241
8242 if (TREE_CODE (decl) == VAR_DECL
8243 && !is_global_var (decl)
8244 && DECL_CONTEXT (decl) == current_function_decl)
8245 {
8246 tree c = gimplify_oacc_declare_1 (t);
8247 if (c)
8248 {
8249 if (oacc_declare_returns == NULL)
8250 oacc_declare_returns = new hash_map<tree, tree>;
8251
8252 oacc_declare_returns->put (decl, c);
8253 }
8254 }
8255
8256 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8257 }
8258
8259 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8260 clauses);
8261
8262 gimplify_seq_add_stmt (pre_p, stmt);
8263
8264 *expr_p = NULL_TREE;
8265 }
8266
8267 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
8268 gimplification of the body, as well as scanning the body for used
8269 variables. We need to do this scan now, because variable-sized
8270 decls will be decomposed during gimplification. */
8271
8272 static void
8273 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8274 {
8275 tree expr = *expr_p;
8276 gimple *g;
8277 gimple_seq body = NULL;
8278
8279 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8280 OMP_PARALLEL_COMBINED (expr)
8281 ? ORT_COMBINED_PARALLEL
8282 : ORT_PARALLEL, OMP_PARALLEL);
8283
8284 push_gimplify_context ();
8285
8286 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8287 if (gimple_code (g) == GIMPLE_BIND)
8288 pop_gimplify_context (g);
8289 else
8290 pop_gimplify_context (NULL);
8291
8292 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
8293 OMP_PARALLEL);
8294
8295 g = gimple_build_omp_parallel (body,
8296 OMP_PARALLEL_CLAUSES (expr),
8297 NULL_TREE, NULL_TREE);
8298 if (OMP_PARALLEL_COMBINED (expr))
8299 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8300 gimplify_seq_add_stmt (pre_p, g);
8301 *expr_p = NULL_TREE;
8302 }
8303
8304 /* Gimplify the contents of an OMP_TASK statement. This involves
8305 gimplification of the body, as well as scanning the body for used
8306 variables. We need to do this scan now, because variable-sized
8307 decls will be decomposed during gimplification. */
8308
8309 static void
8310 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8311 {
8312 tree expr = *expr_p;
8313 gimple *g;
8314 gimple_seq body = NULL;
8315
8316 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8317 find_omp_clause (OMP_TASK_CLAUSES (expr),
8318 OMP_CLAUSE_UNTIED)
8319 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8320
8321 push_gimplify_context ();
8322
8323 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8324 if (gimple_code (g) == GIMPLE_BIND)
8325 pop_gimplify_context (g);
8326 else
8327 pop_gimplify_context (NULL);
8328
8329 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8330 OMP_TASK);
8331
8332 g = gimple_build_omp_task (body,
8333 OMP_TASK_CLAUSES (expr),
8334 NULL_TREE, NULL_TREE,
8335 NULL_TREE, NULL_TREE, NULL_TREE);
8336 gimplify_seq_add_stmt (pre_p, g);
8337 *expr_p = NULL_TREE;
8338 }
8339
8340 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8341 with non-NULL OMP_FOR_INIT. */
8342
8343 static tree
8344 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8345 {
8346 *walk_subtrees = 0;
8347 switch (TREE_CODE (*tp))
8348 {
8349 case OMP_FOR:
8350 *walk_subtrees = 1;
8351 /* FALLTHRU */
8352 case OMP_SIMD:
8353 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8354 return *tp;
8355 break;
8356 case BIND_EXPR:
8357 case STATEMENT_LIST:
8358 case OMP_PARALLEL:
8359 *walk_subtrees = 1;
8360 break;
8361 default:
8362 break;
8363 }
8364 return NULL_TREE;
8365 }
8366
8367 /* Gimplify the gross structure of an OMP_FOR statement. */
8368
8369 static enum gimplify_status
8370 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8371 {
8372 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8373 enum gimplify_status ret = GS_ALL_DONE;
8374 enum gimplify_status tret;
8375 gomp_for *gfor;
8376 gimple_seq for_body, for_pre_body;
8377 int i;
8378 bitmap has_decl_expr = NULL;
8379 enum omp_region_type ort = ORT_WORKSHARE;
8380
8381 orig_for_stmt = for_stmt = *expr_p;
8382
8383 switch (TREE_CODE (for_stmt))
8384 {
8385 case OMP_FOR:
8386 case CILK_FOR:
8387 case OMP_DISTRIBUTE:
8388 break;
8389 case OACC_LOOP:
8390 ort = ORT_ACC;
8391 break;
8392 case OMP_TASKLOOP:
8393 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8394 ort = ORT_UNTIED_TASK;
8395 else
8396 ort = ORT_TASK;
8397 break;
8398 case OMP_SIMD:
8399 case CILK_SIMD:
8400 ort = ORT_SIMD;
8401 break;
8402 default:
8403 gcc_unreachable ();
8404 }
8405
8406 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8407 clause for the IV. */
8408 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8409 {
8410 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8411 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8412 decl = TREE_OPERAND (t, 0);
8413 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8414 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8415 && OMP_CLAUSE_DECL (c) == decl)
8416 {
8417 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8418 break;
8419 }
8420 }
8421
8422 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8423 {
8424 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8425 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8426 find_combined_omp_for, NULL, NULL);
8427 if (inner_for_stmt == NULL_TREE)
8428 {
8429 gcc_assert (seen_error ());
8430 *expr_p = NULL_TREE;
8431 return GS_ERROR;
8432 }
8433 }
8434
8435 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8436 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8437 TREE_CODE (for_stmt));
8438
8439 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8440 gimplify_omp_ctxp->distribute = true;
8441
8442 /* Handle OMP_FOR_INIT. */
8443 for_pre_body = NULL;
8444 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8445 {
8446 has_decl_expr = BITMAP_ALLOC (NULL);
8447 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8448 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8449 == VAR_DECL)
8450 {
8451 t = OMP_FOR_PRE_BODY (for_stmt);
8452 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8453 }
8454 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8455 {
8456 tree_stmt_iterator si;
8457 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8458 tsi_next (&si))
8459 {
8460 t = tsi_stmt (si);
8461 if (TREE_CODE (t) == DECL_EXPR
8462 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8463 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8464 }
8465 }
8466 }
8467 if (OMP_FOR_PRE_BODY (for_stmt))
8468 {
8469 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8470 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8471 else
8472 {
8473 struct gimplify_omp_ctx ctx;
8474 memset (&ctx, 0, sizeof (ctx));
8475 ctx.region_type = ORT_NONE;
8476 gimplify_omp_ctxp = &ctx;
8477 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8478 gimplify_omp_ctxp = NULL;
8479 }
8480 }
8481 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8482
8483 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8484 for_stmt = inner_for_stmt;
8485
8486 /* For taskloop, need to gimplify the start, end and step before the
8487 taskloop, outside of the taskloop omp context. */
8488 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8489 {
8490 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8491 {
8492 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8493 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8494 {
8495 TREE_OPERAND (t, 1)
8496 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8497 pre_p, NULL);
8498 tree c = build_omp_clause (input_location,
8499 OMP_CLAUSE_FIRSTPRIVATE);
8500 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8501 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8502 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8503 }
8504
8505 /* Handle OMP_FOR_COND. */
8506 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8507 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8508 {
8509 TREE_OPERAND (t, 1)
8510 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8511 gimple_seq_empty_p (for_pre_body)
8512 ? pre_p : &for_pre_body, NULL);
8513 tree c = build_omp_clause (input_location,
8514 OMP_CLAUSE_FIRSTPRIVATE);
8515 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8516 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8517 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8518 }
8519
8520 /* Handle OMP_FOR_INCR. */
8521 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8522 if (TREE_CODE (t) == MODIFY_EXPR)
8523 {
8524 decl = TREE_OPERAND (t, 0);
8525 t = TREE_OPERAND (t, 1);
8526 tree *tp = &TREE_OPERAND (t, 1);
8527 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8528 tp = &TREE_OPERAND (t, 0);
8529
8530 if (!is_gimple_constant (*tp))
8531 {
8532 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8533 ? pre_p : &for_pre_body;
8534 *tp = get_initialized_tmp_var (*tp, seq, NULL);
8535 tree c = build_omp_clause (input_location,
8536 OMP_CLAUSE_FIRSTPRIVATE);
8537 OMP_CLAUSE_DECL (c) = *tp;
8538 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8539 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8540 }
8541 }
8542 }
8543
8544 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8545 OMP_TASKLOOP);
8546 }
8547
8548 if (orig_for_stmt != for_stmt)
8549 gimplify_omp_ctxp->combined_loop = true;
8550
8551 for_body = NULL;
8552 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8553 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8554 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8555 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8556
8557 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8558 bool is_doacross = false;
8559 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8560 {
8561 is_doacross = true;
8562 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8563 (OMP_FOR_INIT (for_stmt))
8564 * 2);
8565 }
8566 int collapse = 1;
8567 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8568 if (c)
8569 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8570 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8571 {
8572 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8573 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8574 decl = TREE_OPERAND (t, 0);
8575 gcc_assert (DECL_P (decl));
8576 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8577 || POINTER_TYPE_P (TREE_TYPE (decl)));
8578 if (is_doacross)
8579 {
8580 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8581 gimplify_omp_ctxp->loop_iter_var.quick_push
8582 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8583 else
8584 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8585 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8586 }
8587
8588 /* Make sure the iteration variable is private. */
8589 tree c = NULL_TREE;
8590 tree c2 = NULL_TREE;
8591 if (orig_for_stmt != for_stmt)
8592 /* Do this only on innermost construct for combined ones. */;
8593 else if (ort == ORT_SIMD)
8594 {
8595 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8596 (splay_tree_key) decl);
8597 omp_is_private (gimplify_omp_ctxp, decl,
8598 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8599 != 1));
8600 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8601 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8602 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8603 {
8604 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8605 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8606 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8607 if ((has_decl_expr
8608 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8609 || omp_no_lastprivate (gimplify_omp_ctxp))
8610 {
8611 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8612 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8613 }
8614 struct gimplify_omp_ctx *outer
8615 = gimplify_omp_ctxp->outer_context;
8616 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8617 {
8618 if (outer->region_type == ORT_WORKSHARE
8619 && outer->combined_loop)
8620 {
8621 n = splay_tree_lookup (outer->variables,
8622 (splay_tree_key)decl);
8623 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8624 {
8625 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8626 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8627 }
8628 else
8629 {
8630 struct gimplify_omp_ctx *octx = outer->outer_context;
8631 if (octx
8632 && octx->region_type == ORT_COMBINED_PARALLEL
8633 && octx->outer_context
8634 && (octx->outer_context->region_type
8635 == ORT_WORKSHARE)
8636 && octx->outer_context->combined_loop)
8637 {
8638 octx = octx->outer_context;
8639 n = splay_tree_lookup (octx->variables,
8640 (splay_tree_key)decl);
8641 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8642 {
8643 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8644 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8645 }
8646 }
8647 }
8648 }
8649 }
8650
8651 OMP_CLAUSE_DECL (c) = decl;
8652 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8653 OMP_FOR_CLAUSES (for_stmt) = c;
8654 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8655 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8656 {
8657 if (outer->region_type == ORT_WORKSHARE
8658 && outer->combined_loop)
8659 {
8660 if (outer->outer_context
8661 && (outer->outer_context->region_type
8662 == ORT_COMBINED_PARALLEL))
8663 outer = outer->outer_context;
8664 else if (omp_check_private (outer, decl, false))
8665 outer = NULL;
8666 }
8667 else if (((outer->region_type & ORT_TASK) != 0)
8668 && outer->combined_loop
8669 && !omp_check_private (gimplify_omp_ctxp,
8670 decl, false))
8671 ;
8672 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8673 outer = NULL;
8674 if (outer)
8675 {
8676 n = splay_tree_lookup (outer->variables,
8677 (splay_tree_key)decl);
8678 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8679 {
8680 omp_add_variable (outer, decl,
8681 GOVD_LASTPRIVATE | GOVD_SEEN);
8682 if (outer->region_type == ORT_COMBINED_PARALLEL
8683 && outer->outer_context
8684 && (outer->outer_context->region_type
8685 == ORT_WORKSHARE)
8686 && outer->outer_context->combined_loop)
8687 {
8688 outer = outer->outer_context;
8689 n = splay_tree_lookup (outer->variables,
8690 (splay_tree_key)decl);
8691 if (omp_check_private (outer, decl, false))
8692 outer = NULL;
8693 else if (n == NULL
8694 || ((n->value & GOVD_DATA_SHARE_CLASS)
8695 == 0))
8696 omp_add_variable (outer, decl,
8697 GOVD_LASTPRIVATE
8698 | GOVD_SEEN);
8699 else
8700 outer = NULL;
8701 }
8702 if (outer && outer->outer_context
8703 && (outer->outer_context->region_type
8704 == ORT_COMBINED_TEAMS))
8705 {
8706 outer = outer->outer_context;
8707 n = splay_tree_lookup (outer->variables,
8708 (splay_tree_key)decl);
8709 if (n == NULL
8710 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8711 omp_add_variable (outer, decl,
8712 GOVD_SHARED | GOVD_SEEN);
8713 else
8714 outer = NULL;
8715 }
8716 if (outer && outer->outer_context)
8717 omp_notice_variable (outer->outer_context, decl,
8718 true);
8719 }
8720 }
8721 }
8722 }
8723 else
8724 {
8725 bool lastprivate
8726 = (!has_decl_expr
8727 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8728 && !omp_no_lastprivate (gimplify_omp_ctxp);
8729 struct gimplify_omp_ctx *outer
8730 = gimplify_omp_ctxp->outer_context;
8731 if (outer && lastprivate)
8732 {
8733 if (outer->region_type == ORT_WORKSHARE
8734 && outer->combined_loop)
8735 {
8736 n = splay_tree_lookup (outer->variables,
8737 (splay_tree_key)decl);
8738 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8739 {
8740 lastprivate = false;
8741 outer = NULL;
8742 }
8743 else if (outer->outer_context
8744 && (outer->outer_context->region_type
8745 == ORT_COMBINED_PARALLEL))
8746 outer = outer->outer_context;
8747 else if (omp_check_private (outer, decl, false))
8748 outer = NULL;
8749 }
8750 else if (((outer->region_type & ORT_TASK) != 0)
8751 && outer->combined_loop
8752 && !omp_check_private (gimplify_omp_ctxp,
8753 decl, false))
8754 ;
8755 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8756 outer = NULL;
8757 if (outer)
8758 {
8759 n = splay_tree_lookup (outer->variables,
8760 (splay_tree_key)decl);
8761 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8762 {
8763 omp_add_variable (outer, decl,
8764 GOVD_LASTPRIVATE | GOVD_SEEN);
8765 if (outer->region_type == ORT_COMBINED_PARALLEL
8766 && outer->outer_context
8767 && (outer->outer_context->region_type
8768 == ORT_WORKSHARE)
8769 && outer->outer_context->combined_loop)
8770 {
8771 outer = outer->outer_context;
8772 n = splay_tree_lookup (outer->variables,
8773 (splay_tree_key)decl);
8774 if (omp_check_private (outer, decl, false))
8775 outer = NULL;
8776 else if (n == NULL
8777 || ((n->value & GOVD_DATA_SHARE_CLASS)
8778 == 0))
8779 omp_add_variable (outer, decl,
8780 GOVD_LASTPRIVATE
8781 | GOVD_SEEN);
8782 else
8783 outer = NULL;
8784 }
8785 if (outer && outer->outer_context
8786 && (outer->outer_context->region_type
8787 == ORT_COMBINED_TEAMS))
8788 {
8789 outer = outer->outer_context;
8790 n = splay_tree_lookup (outer->variables,
8791 (splay_tree_key)decl);
8792 if (n == NULL
8793 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8794 omp_add_variable (outer, decl,
8795 GOVD_SHARED | GOVD_SEEN);
8796 else
8797 outer = NULL;
8798 }
8799 if (outer && outer->outer_context)
8800 omp_notice_variable (outer->outer_context, decl,
8801 true);
8802 }
8803 }
8804 }
8805
8806 c = build_omp_clause (input_location,
8807 lastprivate ? OMP_CLAUSE_LASTPRIVATE
8808 : OMP_CLAUSE_PRIVATE);
8809 OMP_CLAUSE_DECL (c) = decl;
8810 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8811 OMP_FOR_CLAUSES (for_stmt) = c;
8812 omp_add_variable (gimplify_omp_ctxp, decl,
8813 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
8814 | GOVD_EXPLICIT | GOVD_SEEN);
8815 c = NULL_TREE;
8816 }
8817 }
8818 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
8819 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8820 else
8821 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
8822
8823 /* If DECL is not a gimple register, create a temporary variable to act
8824 as an iteration counter. This is valid, since DECL cannot be
8825 modified in the body of the loop. Similarly for any iteration vars
8826 in simd with collapse > 1 where the iterator vars must be
8827 lastprivate. */
8828 if (orig_for_stmt != for_stmt)
8829 var = decl;
8830 else if (!is_gimple_reg (decl)
8831 || (ort == ORT_SIMD
8832 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
8833 {
8834 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8835 TREE_OPERAND (t, 0) = var;
8836
8837 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
8838
8839 if (ort == ORT_SIMD
8840 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8841 {
8842 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8843 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
8844 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
8845 OMP_CLAUSE_DECL (c2) = var;
8846 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
8847 OMP_FOR_CLAUSES (for_stmt) = c2;
8848 omp_add_variable (gimplify_omp_ctxp, var,
8849 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
8850 if (c == NULL_TREE)
8851 {
8852 c = c2;
8853 c2 = NULL_TREE;
8854 }
8855 }
8856 else
8857 omp_add_variable (gimplify_omp_ctxp, var,
8858 GOVD_PRIVATE | GOVD_SEEN);
8859 }
8860 else
8861 var = decl;
8862
8863 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8864 is_gimple_val, fb_rvalue);
8865 ret = MIN (ret, tret);
8866 if (ret == GS_ERROR)
8867 return ret;
8868
8869 /* Handle OMP_FOR_COND. */
8870 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8871 gcc_assert (COMPARISON_CLASS_P (t));
8872 gcc_assert (TREE_OPERAND (t, 0) == decl);
8873
8874 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8875 is_gimple_val, fb_rvalue);
8876 ret = MIN (ret, tret);
8877
8878 /* Handle OMP_FOR_INCR. */
8879 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8880 switch (TREE_CODE (t))
8881 {
8882 case PREINCREMENT_EXPR:
8883 case POSTINCREMENT_EXPR:
8884 {
8885 tree decl = TREE_OPERAND (t, 0);
8886 /* c_omp_for_incr_canonicalize_ptr() should have been
8887 called to massage things appropriately. */
8888 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8889
8890 if (orig_for_stmt != for_stmt)
8891 break;
8892 t = build_int_cst (TREE_TYPE (decl), 1);
8893 if (c)
8894 OMP_CLAUSE_LINEAR_STEP (c) = t;
8895 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8896 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8897 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8898 break;
8899 }
8900
8901 case PREDECREMENT_EXPR:
8902 case POSTDECREMENT_EXPR:
8903 /* c_omp_for_incr_canonicalize_ptr() should have been
8904 called to massage things appropriately. */
8905 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8906 if (orig_for_stmt != for_stmt)
8907 break;
8908 t = build_int_cst (TREE_TYPE (decl), -1);
8909 if (c)
8910 OMP_CLAUSE_LINEAR_STEP (c) = t;
8911 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8912 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8913 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8914 break;
8915
8916 case MODIFY_EXPR:
8917 gcc_assert (TREE_OPERAND (t, 0) == decl);
8918 TREE_OPERAND (t, 0) = var;
8919
8920 t = TREE_OPERAND (t, 1);
8921 switch (TREE_CODE (t))
8922 {
8923 case PLUS_EXPR:
8924 if (TREE_OPERAND (t, 1) == decl)
8925 {
8926 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
8927 TREE_OPERAND (t, 0) = var;
8928 break;
8929 }
8930
8931 /* Fallthru. */
8932 case MINUS_EXPR:
8933 case POINTER_PLUS_EXPR:
8934 gcc_assert (TREE_OPERAND (t, 0) == decl);
8935 TREE_OPERAND (t, 0) = var;
8936 break;
8937 default:
8938 gcc_unreachable ();
8939 }
8940
8941 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8942 is_gimple_val, fb_rvalue);
8943 ret = MIN (ret, tret);
8944 if (c)
8945 {
8946 tree step = TREE_OPERAND (t, 1);
8947 tree stept = TREE_TYPE (decl);
8948 if (POINTER_TYPE_P (stept))
8949 stept = sizetype;
8950 step = fold_convert (stept, step);
8951 if (TREE_CODE (t) == MINUS_EXPR)
8952 step = fold_build1 (NEGATE_EXPR, stept, step);
8953 OMP_CLAUSE_LINEAR_STEP (c) = step;
8954 if (step != TREE_OPERAND (t, 1))
8955 {
8956 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
8957 &for_pre_body, NULL,
8958 is_gimple_val, fb_rvalue);
8959 ret = MIN (ret, tret);
8960 }
8961 }
8962 break;
8963
8964 default:
8965 gcc_unreachable ();
8966 }
8967
8968 if (c2)
8969 {
8970 gcc_assert (c);
8971 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
8972 }
8973
8974 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
8975 {
8976 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
8977 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8978 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
8979 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8980 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
8981 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
8982 && OMP_CLAUSE_DECL (c) == decl)
8983 {
8984 if (is_doacross && (collapse == 1 || i >= collapse))
8985 t = var;
8986 else
8987 {
8988 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8989 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8990 gcc_assert (TREE_OPERAND (t, 0) == var);
8991 t = TREE_OPERAND (t, 1);
8992 gcc_assert (TREE_CODE (t) == PLUS_EXPR
8993 || TREE_CODE (t) == MINUS_EXPR
8994 || TREE_CODE (t) == POINTER_PLUS_EXPR);
8995 gcc_assert (TREE_OPERAND (t, 0) == var);
8996 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
8997 is_doacross ? var : decl,
8998 TREE_OPERAND (t, 1));
8999 }
9000 gimple_seq *seq;
9001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9002 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9003 else
9004 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9005 gimplify_assign (decl, t, seq);
9006 }
9007 }
9008 }
9009
9010 BITMAP_FREE (has_decl_expr);
9011
9012 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9013 {
9014 push_gimplify_context ();
9015 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9016 {
9017 OMP_FOR_BODY (orig_for_stmt)
9018 = build3 (BIND_EXPR, void_type_node, NULL,
9019 OMP_FOR_BODY (orig_for_stmt), NULL);
9020 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9021 }
9022 }
9023
9024 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9025 &for_body);
9026
9027 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9028 {
9029 if (gimple_code (g) == GIMPLE_BIND)
9030 pop_gimplify_context (g);
9031 else
9032 pop_gimplify_context (NULL);
9033 }
9034
9035 if (orig_for_stmt != for_stmt)
9036 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9037 {
9038 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9039 decl = TREE_OPERAND (t, 0);
9040 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9041 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9042 gimplify_omp_ctxp = ctx->outer_context;
9043 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9044 gimplify_omp_ctxp = ctx;
9045 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9046 TREE_OPERAND (t, 0) = var;
9047 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9048 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
9049 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9050 }
9051
9052 gimplify_adjust_omp_clauses (pre_p, for_body,
9053 &OMP_FOR_CLAUSES (orig_for_stmt),
9054 TREE_CODE (orig_for_stmt));
9055
9056 int kind;
9057 switch (TREE_CODE (orig_for_stmt))
9058 {
9059 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9060 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
9061 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9062 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
9063 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
9064 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
9065 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
9066 default:
9067 gcc_unreachable ();
9068 }
9069 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
9070 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9071 for_pre_body);
9072 if (orig_for_stmt != for_stmt)
9073 gimple_omp_for_set_combined_p (gfor, true);
9074 if (gimplify_omp_ctxp
9075 && (gimplify_omp_ctxp->combined_loop
9076 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9077 && gimplify_omp_ctxp->outer_context
9078 && gimplify_omp_ctxp->outer_context->combined_loop)))
9079 {
9080 gimple_omp_for_set_combined_into_p (gfor, true);
9081 if (gimplify_omp_ctxp->combined_loop)
9082 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9083 else
9084 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9085 }
9086
9087 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9088 {
9089 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9090 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9091 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9092 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9093 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9094 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9095 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9096 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9097 }
9098
9099 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9100 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9101 The outer taskloop stands for computing the number of iterations,
9102 counts for collapsed loops and holding taskloop specific clauses.
9103 The task construct stands for the effect of data sharing on the
9104 explicit task it creates and the inner taskloop stands for expansion
9105 of the static loop inside of the explicit task construct. */
9106 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9107 {
9108 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9109 tree task_clauses = NULL_TREE;
9110 tree c = *gfor_clauses_ptr;
9111 tree *gtask_clauses_ptr = &task_clauses;
9112 tree outer_for_clauses = NULL_TREE;
9113 tree *gforo_clauses_ptr = &outer_for_clauses;
9114 for (; c; c = OMP_CLAUSE_CHAIN (c))
9115 switch (OMP_CLAUSE_CODE (c))
9116 {
9117 /* These clauses are allowed on task, move them there. */
9118 case OMP_CLAUSE_SHARED:
9119 case OMP_CLAUSE_FIRSTPRIVATE:
9120 case OMP_CLAUSE_DEFAULT:
9121 case OMP_CLAUSE_IF:
9122 case OMP_CLAUSE_UNTIED:
9123 case OMP_CLAUSE_FINAL:
9124 case OMP_CLAUSE_MERGEABLE:
9125 case OMP_CLAUSE_PRIORITY:
9126 *gtask_clauses_ptr = c;
9127 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9128 break;
9129 case OMP_CLAUSE_PRIVATE:
9130 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9131 {
9132 /* We want private on outer for and firstprivate
9133 on task. */
9134 *gtask_clauses_ptr
9135 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9136 OMP_CLAUSE_FIRSTPRIVATE);
9137 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9138 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9139 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9140 *gforo_clauses_ptr = c;
9141 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9142 }
9143 else
9144 {
9145 *gtask_clauses_ptr = c;
9146 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9147 }
9148 break;
9149 /* These clauses go into outer taskloop clauses. */
9150 case OMP_CLAUSE_GRAINSIZE:
9151 case OMP_CLAUSE_NUM_TASKS:
9152 case OMP_CLAUSE_NOGROUP:
9153 *gforo_clauses_ptr = c;
9154 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9155 break;
9156 /* Taskloop clause we duplicate on both taskloops. */
9157 case OMP_CLAUSE_COLLAPSE:
9158 *gfor_clauses_ptr = c;
9159 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9160 *gforo_clauses_ptr = copy_node (c);
9161 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9162 break;
9163 /* For lastprivate, keep the clause on inner taskloop, and add
9164 a shared clause on task. If the same decl is also firstprivate,
9165 add also firstprivate clause on the inner taskloop. */
9166 case OMP_CLAUSE_LASTPRIVATE:
9167 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9168 {
9169 /* For taskloop C++ lastprivate IVs, we want:
9170 1) private on outer taskloop
9171 2) firstprivate and shared on task
9172 3) lastprivate on inner taskloop */
9173 *gtask_clauses_ptr
9174 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9175 OMP_CLAUSE_FIRSTPRIVATE);
9176 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9177 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9178 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9179 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9180 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9181 OMP_CLAUSE_PRIVATE);
9182 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9183 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9184 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9185 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9186 }
9187 *gfor_clauses_ptr = c;
9188 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9189 *gtask_clauses_ptr
9190 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9191 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9192 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9193 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9194 gtask_clauses_ptr
9195 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9196 break;
9197 default:
9198 gcc_unreachable ();
9199 }
9200 *gfor_clauses_ptr = NULL_TREE;
9201 *gtask_clauses_ptr = NULL_TREE;
9202 *gforo_clauses_ptr = NULL_TREE;
9203 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9204 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9205 NULL_TREE, NULL_TREE, NULL_TREE);
9206 gimple_omp_task_set_taskloop_p (g, true);
9207 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9208 gomp_for *gforo
9209 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9210 gimple_omp_for_collapse (gfor),
9211 gimple_omp_for_pre_body (gfor));
9212 gimple_omp_for_set_pre_body (gfor, NULL);
9213 gimple_omp_for_set_combined_p (gforo, true);
9214 gimple_omp_for_set_combined_into_p (gfor, true);
9215 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9216 {
9217 t = unshare_expr (gimple_omp_for_index (gfor, i));
9218 gimple_omp_for_set_index (gforo, i, t);
9219 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9220 gimple_omp_for_set_initial (gforo, i, t);
9221 gimple_omp_for_set_cond (gforo, i,
9222 gimple_omp_for_cond (gfor, i));
9223 t = unshare_expr (gimple_omp_for_final (gfor, i));
9224 gimple_omp_for_set_final (gforo, i, t);
9225 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9226 gimple_omp_for_set_incr (gforo, i, t);
9227 }
9228 gimplify_seq_add_stmt (pre_p, gforo);
9229 }
9230 else
9231 gimplify_seq_add_stmt (pre_p, gfor);
9232 if (ret != GS_ALL_DONE)
9233 return GS_ERROR;
9234 *expr_p = NULL_TREE;
9235 return GS_ALL_DONE;
9236 }
9237
9238 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
9239 of OMP_TARGET's body. */
9240
9241 static tree
9242 find_omp_teams (tree *tp, int *walk_subtrees, void *)
9243 {
9244 *walk_subtrees = 0;
9245 switch (TREE_CODE (*tp))
9246 {
9247 case OMP_TEAMS:
9248 return *tp;
9249 case BIND_EXPR:
9250 case STATEMENT_LIST:
9251 *walk_subtrees = 1;
9252 break;
9253 default:
9254 break;
9255 }
9256 return NULL_TREE;
9257 }
9258
9259 /* Helper function of optimize_target_teams, determine if the expression
9260 can be computed safely before the target construct on the host. */
9261
9262 static tree
9263 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9264 {
9265 splay_tree_node n;
9266
9267 if (TYPE_P (*tp))
9268 {
9269 *walk_subtrees = 0;
9270 return NULL_TREE;
9271 }
9272 switch (TREE_CODE (*tp))
9273 {
9274 case VAR_DECL:
9275 case PARM_DECL:
9276 case RESULT_DECL:
9277 *walk_subtrees = 0;
9278 if (error_operand_p (*tp)
9279 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9280 || DECL_HAS_VALUE_EXPR_P (*tp)
9281 || DECL_THREAD_LOCAL_P (*tp)
9282 || TREE_SIDE_EFFECTS (*tp)
9283 || TREE_THIS_VOLATILE (*tp))
9284 return *tp;
9285 if (is_global_var (*tp)
9286 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9287 || lookup_attribute ("omp declare target link",
9288 DECL_ATTRIBUTES (*tp))))
9289 return *tp;
9290 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9291 (splay_tree_key) *tp);
9292 if (n == NULL)
9293 {
9294 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9295 return NULL_TREE;
9296 return *tp;
9297 }
9298 else if (n->value & GOVD_LOCAL)
9299 return *tp;
9300 else if (n->value & GOVD_FIRSTPRIVATE)
9301 return NULL_TREE;
9302 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9303 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9304 return NULL_TREE;
9305 return *tp;
9306 case INTEGER_CST:
9307 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9308 return *tp;
9309 return NULL_TREE;
9310 case TARGET_EXPR:
9311 if (TARGET_EXPR_INITIAL (*tp)
9312 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9313 return *tp;
9314 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9315 walk_subtrees, NULL);
9316 /* Allow some reasonable subset of integral arithmetics. */
9317 case PLUS_EXPR:
9318 case MINUS_EXPR:
9319 case MULT_EXPR:
9320 case TRUNC_DIV_EXPR:
9321 case CEIL_DIV_EXPR:
9322 case FLOOR_DIV_EXPR:
9323 case ROUND_DIV_EXPR:
9324 case TRUNC_MOD_EXPR:
9325 case CEIL_MOD_EXPR:
9326 case FLOOR_MOD_EXPR:
9327 case ROUND_MOD_EXPR:
9328 case RDIV_EXPR:
9329 case EXACT_DIV_EXPR:
9330 case MIN_EXPR:
9331 case MAX_EXPR:
9332 case LSHIFT_EXPR:
9333 case RSHIFT_EXPR:
9334 case BIT_IOR_EXPR:
9335 case BIT_XOR_EXPR:
9336 case BIT_AND_EXPR:
9337 case NEGATE_EXPR:
9338 case ABS_EXPR:
9339 case BIT_NOT_EXPR:
9340 case NON_LVALUE_EXPR:
9341 CASE_CONVERT:
9342 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9343 return *tp;
9344 return NULL_TREE;
9345 /* And disallow anything else, except for comparisons. */
9346 default:
9347 if (COMPARISON_CLASS_P (*tp))
9348 return NULL_TREE;
9349 return *tp;
9350 }
9351 }
9352
9353 /* Try to determine if the num_teams and/or thread_limit expressions
9354 can have their values determined already before entering the
9355 target construct.
9356 INTEGER_CSTs trivially are,
9357 integral decls that are firstprivate (explicitly or implicitly)
9358 or explicitly map(always, to:) or map(always, tofrom:) on the target
9359 region too, and expressions involving simple arithmetics on those
9360 too, function calls are not ok, dereferencing something neither etc.
9361 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9362 EXPR based on what we find:
9363 0 stands for clause not specified at all, use implementation default
9364 -1 stands for value that can't be determined easily before entering
9365 the target construct.
9366 If teams construct is not present at all, use 1 for num_teams
9367 and 0 for thread_limit (only one team is involved, and the thread
9368 limit is implementation defined. */
9369
9370 static void
9371 optimize_target_teams (tree target, gimple_seq *pre_p)
9372 {
9373 tree body = OMP_BODY (target);
9374 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9375 tree num_teams = integer_zero_node;
9376 tree thread_limit = integer_zero_node;
9377 location_t num_teams_loc = EXPR_LOCATION (target);
9378 location_t thread_limit_loc = EXPR_LOCATION (target);
9379 tree c, *p, expr;
9380 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9381
9382 if (teams == NULL_TREE)
9383 num_teams = integer_one_node;
9384 else
9385 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9386 {
9387 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9388 {
9389 p = &num_teams;
9390 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9391 }
9392 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9393 {
9394 p = &thread_limit;
9395 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9396 }
9397 else
9398 continue;
9399 expr = OMP_CLAUSE_OPERAND (c, 0);
9400 if (TREE_CODE (expr) == INTEGER_CST)
9401 {
9402 *p = expr;
9403 continue;
9404 }
9405 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9406 {
9407 *p = integer_minus_one_node;
9408 continue;
9409 }
9410 *p = expr;
9411 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9412 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
9413 == GS_ERROR)
9414 {
9415 gimplify_omp_ctxp = target_ctx;
9416 *p = integer_minus_one_node;
9417 continue;
9418 }
9419 gimplify_omp_ctxp = target_ctx;
9420 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9421 OMP_CLAUSE_OPERAND (c, 0) = *p;
9422 }
9423 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9424 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9425 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9426 OMP_TARGET_CLAUSES (target) = c;
9427 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9428 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9429 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9430 OMP_TARGET_CLAUSES (target) = c;
9431 }
9432
9433 /* Gimplify the gross structure of several OMP constructs. */
9434
9435 static void
9436 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9437 {
9438 tree expr = *expr_p;
9439 gimple *stmt;
9440 gimple_seq body = NULL;
9441 enum omp_region_type ort;
9442
9443 switch (TREE_CODE (expr))
9444 {
9445 case OMP_SECTIONS:
9446 case OMP_SINGLE:
9447 ort = ORT_WORKSHARE;
9448 break;
9449 case OMP_TARGET:
9450 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9451 break;
9452 case OACC_KERNELS:
9453 ort = ORT_ACC_KERNELS;
9454 break;
9455 case OACC_PARALLEL:
9456 ort = ORT_ACC_PARALLEL;
9457 break;
9458 case OACC_DATA:
9459 ort = ORT_ACC_DATA;
9460 break;
9461 case OMP_TARGET_DATA:
9462 ort = ORT_TARGET_DATA;
9463 break;
9464 case OMP_TEAMS:
9465 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9466 break;
9467 case OACC_HOST_DATA:
9468 ort = ORT_ACC_HOST_DATA;
9469 break;
9470 default:
9471 gcc_unreachable ();
9472 }
9473 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9474 TREE_CODE (expr));
9475 if (TREE_CODE (expr) == OMP_TARGET)
9476 optimize_target_teams (expr, pre_p);
9477 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9478 {
9479 push_gimplify_context ();
9480 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9481 if (gimple_code (g) == GIMPLE_BIND)
9482 pop_gimplify_context (g);
9483 else
9484 pop_gimplify_context (NULL);
9485 if ((ort & ORT_TARGET_DATA) != 0)
9486 {
9487 enum built_in_function end_ix;
9488 switch (TREE_CODE (expr))
9489 {
9490 case OACC_DATA:
9491 case OACC_HOST_DATA:
9492 end_ix = BUILT_IN_GOACC_DATA_END;
9493 break;
9494 case OMP_TARGET_DATA:
9495 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9496 break;
9497 default:
9498 gcc_unreachable ();
9499 }
9500 tree fn = builtin_decl_explicit (end_ix);
9501 g = gimple_build_call (fn, 0);
9502 gimple_seq cleanup = NULL;
9503 gimple_seq_add_stmt (&cleanup, g);
9504 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9505 body = NULL;
9506 gimple_seq_add_stmt (&body, g);
9507 }
9508 }
9509 else
9510 gimplify_and_add (OMP_BODY (expr), &body);
9511 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9512 TREE_CODE (expr));
9513
9514 switch (TREE_CODE (expr))
9515 {
9516 case OACC_DATA:
9517 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9518 OMP_CLAUSES (expr));
9519 break;
9520 case OACC_KERNELS:
9521 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9522 OMP_CLAUSES (expr));
9523 break;
9524 case OACC_HOST_DATA:
9525 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9526 OMP_CLAUSES (expr));
9527 break;
9528 case OACC_PARALLEL:
9529 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9530 OMP_CLAUSES (expr));
9531 break;
9532 case OMP_SECTIONS:
9533 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9534 break;
9535 case OMP_SINGLE:
9536 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9537 break;
9538 case OMP_TARGET:
9539 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9540 OMP_CLAUSES (expr));
9541 break;
9542 case OMP_TARGET_DATA:
9543 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9544 OMP_CLAUSES (expr));
9545 break;
9546 case OMP_TEAMS:
9547 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9548 break;
9549 default:
9550 gcc_unreachable ();
9551 }
9552
9553 gimplify_seq_add_stmt (pre_p, stmt);
9554 *expr_p = NULL_TREE;
9555 }
9556
9557 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9558 target update constructs. */
9559
9560 static void
9561 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9562 {
9563 tree expr = *expr_p;
9564 int kind;
9565 gomp_target *stmt;
9566 enum omp_region_type ort = ORT_WORKSHARE;
9567
9568 switch (TREE_CODE (expr))
9569 {
9570 case OACC_ENTER_DATA:
9571 case OACC_EXIT_DATA:
9572 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9573 ort = ORT_ACC;
9574 break;
9575 case OACC_UPDATE:
9576 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9577 ort = ORT_ACC;
9578 break;
9579 case OMP_TARGET_UPDATE:
9580 kind = GF_OMP_TARGET_KIND_UPDATE;
9581 break;
9582 case OMP_TARGET_ENTER_DATA:
9583 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9584 break;
9585 case OMP_TARGET_EXIT_DATA:
9586 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9587 break;
9588 default:
9589 gcc_unreachable ();
9590 }
9591 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9592 ort, TREE_CODE (expr));
9593 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
9594 TREE_CODE (expr));
9595 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9596
9597 gimplify_seq_add_stmt (pre_p, stmt);
9598 *expr_p = NULL_TREE;
9599 }
9600
9601 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9602 stabilized the lhs of the atomic operation as *ADDR. Return true if
9603 EXPR is this stabilized form. */
9604
9605 static bool
9606 goa_lhs_expr_p (tree expr, tree addr)
9607 {
9608 /* Also include casts to other type variants. The C front end is fond
9609 of adding these for e.g. volatile variables. This is like
9610 STRIP_TYPE_NOPS but includes the main variant lookup. */
9611 STRIP_USELESS_TYPE_CONVERSION (expr);
9612
9613 if (TREE_CODE (expr) == INDIRECT_REF)
9614 {
9615 expr = TREE_OPERAND (expr, 0);
9616 while (expr != addr
9617 && (CONVERT_EXPR_P (expr)
9618 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9619 && TREE_CODE (expr) == TREE_CODE (addr)
9620 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9621 {
9622 expr = TREE_OPERAND (expr, 0);
9623 addr = TREE_OPERAND (addr, 0);
9624 }
9625 if (expr == addr)
9626 return true;
9627 return (TREE_CODE (addr) == ADDR_EXPR
9628 && TREE_CODE (expr) == ADDR_EXPR
9629 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9630 }
9631 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9632 return true;
9633 return false;
9634 }
9635
9636 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9637 expression does not involve the lhs, evaluate it into a temporary.
9638 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9639 or -1 if an error was encountered. */
9640
9641 static int
9642 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9643 tree lhs_var)
9644 {
9645 tree expr = *expr_p;
9646 int saw_lhs;
9647
9648 if (goa_lhs_expr_p (expr, lhs_addr))
9649 {
9650 *expr_p = lhs_var;
9651 return 1;
9652 }
9653 if (is_gimple_val (expr))
9654 return 0;
9655
9656 saw_lhs = 0;
9657 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9658 {
9659 case tcc_binary:
9660 case tcc_comparison:
9661 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9662 lhs_var);
9663 case tcc_unary:
9664 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9665 lhs_var);
9666 break;
9667 case tcc_expression:
9668 switch (TREE_CODE (expr))
9669 {
9670 case TRUTH_ANDIF_EXPR:
9671 case TRUTH_ORIF_EXPR:
9672 case TRUTH_AND_EXPR:
9673 case TRUTH_OR_EXPR:
9674 case TRUTH_XOR_EXPR:
9675 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9676 lhs_addr, lhs_var);
9677 case TRUTH_NOT_EXPR:
9678 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9679 lhs_addr, lhs_var);
9680 break;
9681 case COMPOUND_EXPR:
9682 /* Break out any preevaluations from cp_build_modify_expr. */
9683 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9684 expr = TREE_OPERAND (expr, 1))
9685 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9686 *expr_p = expr;
9687 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9688 default:
9689 break;
9690 }
9691 break;
9692 default:
9693 break;
9694 }
9695
9696 if (saw_lhs == 0)
9697 {
9698 enum gimplify_status gs;
9699 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9700 if (gs != GS_ALL_DONE)
9701 saw_lhs = -1;
9702 }
9703
9704 return saw_lhs;
9705 }
9706
9707 /* Gimplify an OMP_ATOMIC statement. */
9708
9709 static enum gimplify_status
9710 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9711 {
9712 tree addr = TREE_OPERAND (*expr_p, 0);
9713 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9714 ? NULL : TREE_OPERAND (*expr_p, 1);
9715 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9716 tree tmp_load;
9717 gomp_atomic_load *loadstmt;
9718 gomp_atomic_store *storestmt;
9719
9720 tmp_load = create_tmp_reg (type);
9721 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9722 return GS_ERROR;
9723
9724 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9725 != GS_ALL_DONE)
9726 return GS_ERROR;
9727
9728 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9729 gimplify_seq_add_stmt (pre_p, loadstmt);
9730 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9731 != GS_ALL_DONE)
9732 return GS_ERROR;
9733
9734 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9735 rhs = tmp_load;
9736 storestmt = gimple_build_omp_atomic_store (rhs);
9737 gimplify_seq_add_stmt (pre_p, storestmt);
9738 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9739 {
9740 gimple_omp_atomic_set_seq_cst (loadstmt);
9741 gimple_omp_atomic_set_seq_cst (storestmt);
9742 }
9743 switch (TREE_CODE (*expr_p))
9744 {
9745 case OMP_ATOMIC_READ:
9746 case OMP_ATOMIC_CAPTURE_OLD:
9747 *expr_p = tmp_load;
9748 gimple_omp_atomic_set_need_value (loadstmt);
9749 break;
9750 case OMP_ATOMIC_CAPTURE_NEW:
9751 *expr_p = rhs;
9752 gimple_omp_atomic_set_need_value (storestmt);
9753 break;
9754 default:
9755 *expr_p = NULL;
9756 break;
9757 }
9758
9759 return GS_ALL_DONE;
9760 }
9761
9762 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9763 body, and adding some EH bits. */
9764
9765 static enum gimplify_status
9766 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9767 {
9768 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
9769 gimple *body_stmt;
9770 gtransaction *trans_stmt;
9771 gimple_seq body = NULL;
9772 int subcode = 0;
9773
9774 /* Wrap the transaction body in a BIND_EXPR so we have a context
9775 where to put decls for OMP. */
9776 if (TREE_CODE (tbody) != BIND_EXPR)
9777 {
9778 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
9779 TREE_SIDE_EFFECTS (bind) = 1;
9780 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
9781 TRANSACTION_EXPR_BODY (expr) = bind;
9782 }
9783
9784 push_gimplify_context ();
9785 temp = voidify_wrapper_expr (*expr_p, NULL);
9786
9787 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
9788 pop_gimplify_context (body_stmt);
9789
9790 trans_stmt = gimple_build_transaction (body);
9791 if (TRANSACTION_EXPR_OUTER (expr))
9792 subcode = GTMA_IS_OUTER;
9793 else if (TRANSACTION_EXPR_RELAXED (expr))
9794 subcode = GTMA_IS_RELAXED;
9795 gimple_transaction_set_subcode (trans_stmt, subcode);
9796
9797 gimplify_seq_add_stmt (pre_p, trans_stmt);
9798
9799 if (temp)
9800 {
9801 *expr_p = temp;
9802 return GS_OK;
9803 }
9804
9805 *expr_p = NULL_TREE;
9806 return GS_ALL_DONE;
9807 }
9808
9809 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
9810 is the OMP_BODY of the original EXPR (which has already been
9811 gimplified so it's not present in the EXPR).
9812
9813 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
9814
9815 static gimple *
9816 gimplify_omp_ordered (tree expr, gimple_seq body)
9817 {
9818 tree c, decls;
9819 int failures = 0;
9820 unsigned int i;
9821 tree source_c = NULL_TREE;
9822 tree sink_c = NULL_TREE;
9823
9824 if (gimplify_omp_ctxp)
9825 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
9826 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9827 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
9828 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
9829 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
9830 {
9831 error_at (OMP_CLAUSE_LOCATION (c),
9832 "%<ordered%> construct with %<depend%> clause must be "
9833 "closely nested inside a loop with %<ordered%> clause "
9834 "with a parameter");
9835 failures++;
9836 }
9837 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9838 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9839 {
9840 bool fail = false;
9841 for (decls = OMP_CLAUSE_DECL (c), i = 0;
9842 decls && TREE_CODE (decls) == TREE_LIST;
9843 decls = TREE_CHAIN (decls), ++i)
9844 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
9845 continue;
9846 else if (TREE_VALUE (decls)
9847 != gimplify_omp_ctxp->loop_iter_var[2 * i])
9848 {
9849 error_at (OMP_CLAUSE_LOCATION (c),
9850 "variable %qE is not an iteration "
9851 "of outermost loop %d, expected %qE",
9852 TREE_VALUE (decls), i + 1,
9853 gimplify_omp_ctxp->loop_iter_var[2 * i]);
9854 fail = true;
9855 failures++;
9856 }
9857 else
9858 TREE_VALUE (decls)
9859 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
9860 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
9861 {
9862 error_at (OMP_CLAUSE_LOCATION (c),
9863 "number of variables in %<depend(sink)%> "
9864 "clause does not match number of "
9865 "iteration variables");
9866 failures++;
9867 }
9868 sink_c = c;
9869 }
9870 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9871 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9872 {
9873 if (source_c)
9874 {
9875 error_at (OMP_CLAUSE_LOCATION (c),
9876 "more than one %<depend(source)%> clause on an "
9877 "%<ordered%> construct");
9878 failures++;
9879 }
9880 else
9881 source_c = c;
9882 }
9883 if (source_c && sink_c)
9884 {
9885 error_at (OMP_CLAUSE_LOCATION (source_c),
9886 "%<depend(source)%> clause specified together with "
9887 "%<depend(sink:)%> clauses on the same construct");
9888 failures++;
9889 }
9890
9891 if (failures)
9892 return gimple_build_nop ();
9893 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
9894 }
9895
9896 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
9897 expression produces a value to be used as an operand inside a GIMPLE
9898 statement, the value will be stored back in *EXPR_P. This value will
9899 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
9900 an SSA_NAME. The corresponding sequence of GIMPLE statements is
9901 emitted in PRE_P and POST_P.
9902
9903 Additionally, this process may overwrite parts of the input
9904 expression during gimplification. Ideally, it should be
9905 possible to do non-destructive gimplification.
9906
9907 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
9908 the expression needs to evaluate to a value to be used as
9909 an operand in a GIMPLE statement, this value will be stored in
9910 *EXPR_P on exit. This happens when the caller specifies one
9911 of fb_lvalue or fb_rvalue fallback flags.
9912
9913 PRE_P will contain the sequence of GIMPLE statements corresponding
9914 to the evaluation of EXPR and all the side-effects that must
9915 be executed before the main expression. On exit, the last
9916 statement of PRE_P is the core statement being gimplified. For
9917 instance, when gimplifying 'if (++a)' the last statement in
9918 PRE_P will be 'if (t.1)' where t.1 is the result of
9919 pre-incrementing 'a'.
9920
9921 POST_P will contain the sequence of GIMPLE statements corresponding
9922 to the evaluation of all the side-effects that must be executed
9923 after the main expression. If this is NULL, the post
9924 side-effects are stored at the end of PRE_P.
9925
9926 The reason why the output is split in two is to handle post
9927 side-effects explicitly. In some cases, an expression may have
9928 inner and outer post side-effects which need to be emitted in
9929 an order different from the one given by the recursive
9930 traversal. For instance, for the expression (*p--)++ the post
9931 side-effects of '--' must actually occur *after* the post
9932 side-effects of '++'. However, gimplification will first visit
9933 the inner expression, so if a separate POST sequence was not
9934 used, the resulting sequence would be:
9935
9936 1 t.1 = *p
9937 2 p = p - 1
9938 3 t.2 = t.1 + 1
9939 4 *p = t.2
9940
9941 However, the post-decrement operation in line #2 must not be
9942 evaluated until after the store to *p at line #4, so the
9943 correct sequence should be:
9944
9945 1 t.1 = *p
9946 2 t.2 = t.1 + 1
9947 3 *p = t.2
9948 4 p = p - 1
9949
9950 So, by specifying a separate post queue, it is possible
9951 to emit the post side-effects in the correct order.
9952 If POST_P is NULL, an internal queue will be used. Before
9953 returning to the caller, the sequence POST_P is appended to
9954 the main output sequence PRE_P.
9955
9956 GIMPLE_TEST_F points to a function that takes a tree T and
9957 returns nonzero if T is in the GIMPLE form requested by the
9958 caller. The GIMPLE predicates are in gimple.c.
9959
9960 FALLBACK tells the function what sort of a temporary we want if
9961 gimplification cannot produce an expression that complies with
9962 GIMPLE_TEST_F.
9963
9964 fb_none means that no temporary should be generated
9965 fb_rvalue means that an rvalue is OK to generate
9966 fb_lvalue means that an lvalue is OK to generate
9967 fb_either means that either is OK, but an lvalue is preferable.
9968 fb_mayfail means that gimplification may fail (in which case
9969 GS_ERROR will be returned)
9970
9971 The return value is either GS_ERROR or GS_ALL_DONE, since this
9972 function iterates until EXPR is completely gimplified or an error
9973 occurs. */
9974
9975 enum gimplify_status
9976 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
9977 bool (*gimple_test_f) (tree), fallback_t fallback)
9978 {
9979 tree tmp;
9980 gimple_seq internal_pre = NULL;
9981 gimple_seq internal_post = NULL;
9982 tree save_expr;
9983 bool is_statement;
9984 location_t saved_location;
9985 enum gimplify_status ret;
9986 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
9987
9988 save_expr = *expr_p;
9989 if (save_expr == NULL_TREE)
9990 return GS_ALL_DONE;
9991
9992 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
9993 is_statement = gimple_test_f == is_gimple_stmt;
9994 if (is_statement)
9995 gcc_assert (pre_p);
9996
9997 /* Consistency checks. */
9998 if (gimple_test_f == is_gimple_reg)
9999 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10000 else if (gimple_test_f == is_gimple_val
10001 || gimple_test_f == is_gimple_call_addr
10002 || gimple_test_f == is_gimple_condexpr
10003 || gimple_test_f == is_gimple_mem_rhs
10004 || gimple_test_f == is_gimple_mem_rhs_or_call
10005 || gimple_test_f == is_gimple_reg_rhs
10006 || gimple_test_f == is_gimple_reg_rhs_or_call
10007 || gimple_test_f == is_gimple_asm_val
10008 || gimple_test_f == is_gimple_mem_ref_addr)
10009 gcc_assert (fallback & fb_rvalue);
10010 else if (gimple_test_f == is_gimple_min_lval
10011 || gimple_test_f == is_gimple_lvalue)
10012 gcc_assert (fallback & fb_lvalue);
10013 else if (gimple_test_f == is_gimple_addressable)
10014 gcc_assert (fallback & fb_either);
10015 else if (gimple_test_f == is_gimple_stmt)
10016 gcc_assert (fallback == fb_none);
10017 else
10018 {
10019 /* We should have recognized the GIMPLE_TEST_F predicate to
10020 know what kind of fallback to use in case a temporary is
10021 needed to hold the value or address of *EXPR_P. */
10022 gcc_unreachable ();
10023 }
10024
10025 /* We used to check the predicate here and return immediately if it
10026 succeeds. This is wrong; the design is for gimplification to be
10027 idempotent, and for the predicates to only test for valid forms, not
10028 whether they are fully simplified. */
10029 if (pre_p == NULL)
10030 pre_p = &internal_pre;
10031
10032 if (post_p == NULL)
10033 post_p = &internal_post;
10034
10035 /* Remember the last statements added to PRE_P and POST_P. Every
10036 new statement added by the gimplification helpers needs to be
10037 annotated with location information. To centralize the
10038 responsibility, we remember the last statement that had been
10039 added to both queues before gimplifying *EXPR_P. If
10040 gimplification produces new statements in PRE_P and POST_P, those
10041 statements will be annotated with the same location information
10042 as *EXPR_P. */
10043 pre_last_gsi = gsi_last (*pre_p);
10044 post_last_gsi = gsi_last (*post_p);
10045
10046 saved_location = input_location;
10047 if (save_expr != error_mark_node
10048 && EXPR_HAS_LOCATION (*expr_p))
10049 input_location = EXPR_LOCATION (*expr_p);
10050
10051 /* Loop over the specific gimplifiers until the toplevel node
10052 remains the same. */
10053 do
10054 {
10055 /* Strip away as many useless type conversions as possible
10056 at the toplevel. */
10057 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
10058
10059 /* Remember the expr. */
10060 save_expr = *expr_p;
10061
10062 /* Die, die, die, my darling. */
10063 if (save_expr == error_mark_node
10064 || (TREE_TYPE (save_expr)
10065 && TREE_TYPE (save_expr) == error_mark_node))
10066 {
10067 ret = GS_ERROR;
10068 break;
10069 }
10070
10071 /* Do any language-specific gimplification. */
10072 ret = ((enum gimplify_status)
10073 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
10074 if (ret == GS_OK)
10075 {
10076 if (*expr_p == NULL_TREE)
10077 break;
10078 if (*expr_p != save_expr)
10079 continue;
10080 }
10081 else if (ret != GS_UNHANDLED)
10082 break;
10083
10084 /* Make sure that all the cases set 'ret' appropriately. */
10085 ret = GS_UNHANDLED;
10086 switch (TREE_CODE (*expr_p))
10087 {
10088 /* First deal with the special cases. */
10089
10090 case POSTINCREMENT_EXPR:
10091 case POSTDECREMENT_EXPR:
10092 case PREINCREMENT_EXPR:
10093 case PREDECREMENT_EXPR:
10094 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
10095 fallback != fb_none,
10096 TREE_TYPE (*expr_p));
10097 break;
10098
10099 case VIEW_CONVERT_EXPR:
10100 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10101 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10102 {
10103 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10104 post_p, is_gimple_val, fb_rvalue);
10105 recalculate_side_effects (*expr_p);
10106 break;
10107 }
10108 /* Fallthru. */
10109
10110 case ARRAY_REF:
10111 case ARRAY_RANGE_REF:
10112 case REALPART_EXPR:
10113 case IMAGPART_EXPR:
10114 case COMPONENT_REF:
10115 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
10116 fallback ? fallback : fb_rvalue);
10117 break;
10118
10119 case COND_EXPR:
10120 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
10121
10122 /* C99 code may assign to an array in a structure value of a
10123 conditional expression, and this has undefined behavior
10124 only on execution, so create a temporary if an lvalue is
10125 required. */
10126 if (fallback == fb_lvalue)
10127 {
10128 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10129 mark_addressable (*expr_p);
10130 ret = GS_OK;
10131 }
10132 break;
10133
10134 case CALL_EXPR:
10135 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
10136
10137 /* C99 code may assign to an array in a structure returned
10138 from a function, and this has undefined behavior only on
10139 execution, so create a temporary if an lvalue is
10140 required. */
10141 if (fallback == fb_lvalue)
10142 {
10143 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10144 mark_addressable (*expr_p);
10145 ret = GS_OK;
10146 }
10147 break;
10148
10149 case TREE_LIST:
10150 gcc_unreachable ();
10151
10152 case COMPOUND_EXPR:
10153 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10154 break;
10155
10156 case COMPOUND_LITERAL_EXPR:
10157 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10158 gimple_test_f, fallback);
10159 break;
10160
10161 case MODIFY_EXPR:
10162 case INIT_EXPR:
10163 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10164 fallback != fb_none);
10165 break;
10166
10167 case TRUTH_ANDIF_EXPR:
10168 case TRUTH_ORIF_EXPR:
10169 {
10170 /* Preserve the original type of the expression and the
10171 source location of the outer expression. */
10172 tree org_type = TREE_TYPE (*expr_p);
10173 *expr_p = gimple_boolify (*expr_p);
10174 *expr_p = build3_loc (input_location, COND_EXPR,
10175 org_type, *expr_p,
10176 fold_convert_loc
10177 (input_location,
10178 org_type, boolean_true_node),
10179 fold_convert_loc
10180 (input_location,
10181 org_type, boolean_false_node));
10182 ret = GS_OK;
10183 break;
10184 }
10185
10186 case TRUTH_NOT_EXPR:
10187 {
10188 tree type = TREE_TYPE (*expr_p);
10189 /* The parsers are careful to generate TRUTH_NOT_EXPR
10190 only with operands that are always zero or one.
10191 We do not fold here but handle the only interesting case
10192 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
10193 *expr_p = gimple_boolify (*expr_p);
10194 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10195 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10196 TREE_TYPE (*expr_p),
10197 TREE_OPERAND (*expr_p, 0));
10198 else
10199 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10200 TREE_TYPE (*expr_p),
10201 TREE_OPERAND (*expr_p, 0),
10202 build_int_cst (TREE_TYPE (*expr_p), 1));
10203 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10204 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10205 ret = GS_OK;
10206 break;
10207 }
10208
10209 case ADDR_EXPR:
10210 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10211 break;
10212
10213 case ANNOTATE_EXPR:
10214 {
10215 tree cond = TREE_OPERAND (*expr_p, 0);
10216 tree kind = TREE_OPERAND (*expr_p, 1);
10217 tree type = TREE_TYPE (cond);
10218 if (!INTEGRAL_TYPE_P (type))
10219 {
10220 *expr_p = cond;
10221 ret = GS_OK;
10222 break;
10223 }
10224 tree tmp = create_tmp_var (type);
10225 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
10226 gcall *call
10227 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
10228 gimple_call_set_lhs (call, tmp);
10229 gimplify_seq_add_stmt (pre_p, call);
10230 *expr_p = tmp;
10231 ret = GS_ALL_DONE;
10232 break;
10233 }
10234
10235 case VA_ARG_EXPR:
10236 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
10237 break;
10238
10239 CASE_CONVERT:
10240 if (IS_EMPTY_STMT (*expr_p))
10241 {
10242 ret = GS_ALL_DONE;
10243 break;
10244 }
10245
10246 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10247 || fallback == fb_none)
10248 {
10249 /* Just strip a conversion to void (or in void context) and
10250 try again. */
10251 *expr_p = TREE_OPERAND (*expr_p, 0);
10252 ret = GS_OK;
10253 break;
10254 }
10255
10256 ret = gimplify_conversion (expr_p);
10257 if (ret == GS_ERROR)
10258 break;
10259 if (*expr_p != save_expr)
10260 break;
10261 /* FALLTHRU */
10262
10263 case FIX_TRUNC_EXPR:
10264 /* unary_expr: ... | '(' cast ')' val | ... */
10265 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10266 is_gimple_val, fb_rvalue);
10267 recalculate_side_effects (*expr_p);
10268 break;
10269
10270 case INDIRECT_REF:
10271 {
10272 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
10273 bool notrap = TREE_THIS_NOTRAP (*expr_p);
10274 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10275
10276 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10277 if (*expr_p != save_expr)
10278 {
10279 ret = GS_OK;
10280 break;
10281 }
10282
10283 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10284 is_gimple_reg, fb_rvalue);
10285 if (ret == GS_ERROR)
10286 break;
10287
10288 recalculate_side_effects (*expr_p);
10289 *expr_p = fold_build2_loc (input_location, MEM_REF,
10290 TREE_TYPE (*expr_p),
10291 TREE_OPERAND (*expr_p, 0),
10292 build_int_cst (saved_ptr_type, 0));
10293 TREE_THIS_VOLATILE (*expr_p) = volatilep;
10294 TREE_THIS_NOTRAP (*expr_p) = notrap;
10295 ret = GS_OK;
10296 break;
10297 }
10298
10299 /* We arrive here through the various re-gimplifcation paths. */
10300 case MEM_REF:
10301 /* First try re-folding the whole thing. */
10302 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10303 TREE_OPERAND (*expr_p, 0),
10304 TREE_OPERAND (*expr_p, 1));
10305 if (tmp)
10306 {
10307 REF_REVERSE_STORAGE_ORDER (tmp)
10308 = REF_REVERSE_STORAGE_ORDER (*expr_p);
10309 *expr_p = tmp;
10310 recalculate_side_effects (*expr_p);
10311 ret = GS_OK;
10312 break;
10313 }
10314 /* Avoid re-gimplifying the address operand if it is already
10315 in suitable form. Re-gimplifying would mark the address
10316 operand addressable. Always gimplify when not in SSA form
10317 as we still may have to gimplify decls with value-exprs. */
10318 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
10319 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10320 {
10321 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10322 is_gimple_mem_ref_addr, fb_rvalue);
10323 if (ret == GS_ERROR)
10324 break;
10325 }
10326 recalculate_side_effects (*expr_p);
10327 ret = GS_ALL_DONE;
10328 break;
10329
10330 /* Constants need not be gimplified. */
10331 case INTEGER_CST:
10332 case REAL_CST:
10333 case FIXED_CST:
10334 case STRING_CST:
10335 case COMPLEX_CST:
10336 case VECTOR_CST:
10337 /* Drop the overflow flag on constants, we do not want
10338 that in the GIMPLE IL. */
10339 if (TREE_OVERFLOW_P (*expr_p))
10340 *expr_p = drop_tree_overflow (*expr_p);
10341 ret = GS_ALL_DONE;
10342 break;
10343
10344 case CONST_DECL:
10345 /* If we require an lvalue, such as for ADDR_EXPR, retain the
10346 CONST_DECL node. Otherwise the decl is replaceable by its
10347 value. */
10348 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10349 if (fallback & fb_lvalue)
10350 ret = GS_ALL_DONE;
10351 else
10352 {
10353 *expr_p = DECL_INITIAL (*expr_p);
10354 ret = GS_OK;
10355 }
10356 break;
10357
10358 case DECL_EXPR:
10359 ret = gimplify_decl_expr (expr_p, pre_p);
10360 break;
10361
10362 case BIND_EXPR:
10363 ret = gimplify_bind_expr (expr_p, pre_p);
10364 break;
10365
10366 case LOOP_EXPR:
10367 ret = gimplify_loop_expr (expr_p, pre_p);
10368 break;
10369
10370 case SWITCH_EXPR:
10371 ret = gimplify_switch_expr (expr_p, pre_p);
10372 break;
10373
10374 case EXIT_EXPR:
10375 ret = gimplify_exit_expr (expr_p);
10376 break;
10377
10378 case GOTO_EXPR:
10379 /* If the target is not LABEL, then it is a computed jump
10380 and the target needs to be gimplified. */
10381 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10382 {
10383 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10384 NULL, is_gimple_val, fb_rvalue);
10385 if (ret == GS_ERROR)
10386 break;
10387 }
10388 gimplify_seq_add_stmt (pre_p,
10389 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10390 ret = GS_ALL_DONE;
10391 break;
10392
10393 case PREDICT_EXPR:
10394 gimplify_seq_add_stmt (pre_p,
10395 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10396 PREDICT_EXPR_OUTCOME (*expr_p)));
10397 ret = GS_ALL_DONE;
10398 break;
10399
10400 case LABEL_EXPR:
10401 ret = GS_ALL_DONE;
10402 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10403 == current_function_decl);
10404 gimplify_seq_add_stmt (pre_p,
10405 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10406 break;
10407
10408 case CASE_LABEL_EXPR:
10409 ret = gimplify_case_label_expr (expr_p, pre_p);
10410 break;
10411
10412 case RETURN_EXPR:
10413 ret = gimplify_return_expr (*expr_p, pre_p);
10414 break;
10415
10416 case CONSTRUCTOR:
10417 /* Don't reduce this in place; let gimplify_init_constructor work its
10418 magic. Buf if we're just elaborating this for side effects, just
10419 gimplify any element that has side-effects. */
10420 if (fallback == fb_none)
10421 {
10422 unsigned HOST_WIDE_INT ix;
10423 tree val;
10424 tree temp = NULL_TREE;
10425 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10426 if (TREE_SIDE_EFFECTS (val))
10427 append_to_statement_list (val, &temp);
10428
10429 *expr_p = temp;
10430 ret = temp ? GS_OK : GS_ALL_DONE;
10431 }
10432 /* C99 code may assign to an array in a constructed
10433 structure or union, and this has undefined behavior only
10434 on execution, so create a temporary if an lvalue is
10435 required. */
10436 else if (fallback == fb_lvalue)
10437 {
10438 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10439 mark_addressable (*expr_p);
10440 ret = GS_OK;
10441 }
10442 else
10443 ret = GS_ALL_DONE;
10444 break;
10445
10446 /* The following are special cases that are not handled by the
10447 original GIMPLE grammar. */
10448
10449 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10450 eliminated. */
10451 case SAVE_EXPR:
10452 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10453 break;
10454
10455 case BIT_FIELD_REF:
10456 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10457 post_p, is_gimple_lvalue, fb_either);
10458 recalculate_side_effects (*expr_p);
10459 break;
10460
10461 case TARGET_MEM_REF:
10462 {
10463 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10464
10465 if (TMR_BASE (*expr_p))
10466 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10467 post_p, is_gimple_mem_ref_addr, fb_either);
10468 if (TMR_INDEX (*expr_p))
10469 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10470 post_p, is_gimple_val, fb_rvalue);
10471 if (TMR_INDEX2 (*expr_p))
10472 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10473 post_p, is_gimple_val, fb_rvalue);
10474 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10475 ret = MIN (r0, r1);
10476 }
10477 break;
10478
10479 case NON_LVALUE_EXPR:
10480 /* This should have been stripped above. */
10481 gcc_unreachable ();
10482
10483 case ASM_EXPR:
10484 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10485 break;
10486
10487 case TRY_FINALLY_EXPR:
10488 case TRY_CATCH_EXPR:
10489 {
10490 gimple_seq eval, cleanup;
10491 gtry *try_;
10492
10493 /* Calls to destructors are generated automatically in FINALLY/CATCH
10494 block. They should have location as UNKNOWN_LOCATION. However,
10495 gimplify_call_expr will reset these call stmts to input_location
10496 if it finds stmt's location is unknown. To prevent resetting for
10497 destructors, we set the input_location to unknown.
10498 Note that this only affects the destructor calls in FINALLY/CATCH
10499 block, and will automatically reset to its original value by the
10500 end of gimplify_expr. */
10501 input_location = UNKNOWN_LOCATION;
10502 eval = cleanup = NULL;
10503 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10504 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10505 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10506 if (gimple_seq_empty_p (cleanup))
10507 {
10508 gimple_seq_add_seq (pre_p, eval);
10509 ret = GS_ALL_DONE;
10510 break;
10511 }
10512 try_ = gimple_build_try (eval, cleanup,
10513 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10514 ? GIMPLE_TRY_FINALLY
10515 : GIMPLE_TRY_CATCH);
10516 if (EXPR_HAS_LOCATION (save_expr))
10517 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10518 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10519 gimple_set_location (try_, saved_location);
10520 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10521 gimple_try_set_catch_is_cleanup (try_,
10522 TRY_CATCH_IS_CLEANUP (*expr_p));
10523 gimplify_seq_add_stmt (pre_p, try_);
10524 ret = GS_ALL_DONE;
10525 break;
10526 }
10527
10528 case CLEANUP_POINT_EXPR:
10529 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10530 break;
10531
10532 case TARGET_EXPR:
10533 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10534 break;
10535
10536 case CATCH_EXPR:
10537 {
10538 gimple *c;
10539 gimple_seq handler = NULL;
10540 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10541 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10542 gimplify_seq_add_stmt (pre_p, c);
10543 ret = GS_ALL_DONE;
10544 break;
10545 }
10546
10547 case EH_FILTER_EXPR:
10548 {
10549 gimple *ehf;
10550 gimple_seq failure = NULL;
10551
10552 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10553 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10554 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10555 gimplify_seq_add_stmt (pre_p, ehf);
10556 ret = GS_ALL_DONE;
10557 break;
10558 }
10559
10560 case OBJ_TYPE_REF:
10561 {
10562 enum gimplify_status r0, r1;
10563 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10564 post_p, is_gimple_val, fb_rvalue);
10565 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10566 post_p, is_gimple_val, fb_rvalue);
10567 TREE_SIDE_EFFECTS (*expr_p) = 0;
10568 ret = MIN (r0, r1);
10569 }
10570 break;
10571
10572 case LABEL_DECL:
10573 /* We get here when taking the address of a label. We mark
10574 the label as "forced"; meaning it can never be removed and
10575 it is a potential target for any computed goto. */
10576 FORCED_LABEL (*expr_p) = 1;
10577 ret = GS_ALL_DONE;
10578 break;
10579
10580 case STATEMENT_LIST:
10581 ret = gimplify_statement_list (expr_p, pre_p);
10582 break;
10583
10584 case WITH_SIZE_EXPR:
10585 {
10586 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10587 post_p == &internal_post ? NULL : post_p,
10588 gimple_test_f, fallback);
10589 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10590 is_gimple_val, fb_rvalue);
10591 ret = GS_ALL_DONE;
10592 }
10593 break;
10594
10595 case VAR_DECL:
10596 case PARM_DECL:
10597 ret = gimplify_var_or_parm_decl (expr_p);
10598 break;
10599
10600 case RESULT_DECL:
10601 /* When within an OMP context, notice uses of variables. */
10602 if (gimplify_omp_ctxp)
10603 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10604 ret = GS_ALL_DONE;
10605 break;
10606
10607 case SSA_NAME:
10608 /* Allow callbacks into the gimplifier during optimization. */
10609 ret = GS_ALL_DONE;
10610 break;
10611
10612 case OMP_PARALLEL:
10613 gimplify_omp_parallel (expr_p, pre_p);
10614 ret = GS_ALL_DONE;
10615 break;
10616
10617 case OMP_TASK:
10618 gimplify_omp_task (expr_p, pre_p);
10619 ret = GS_ALL_DONE;
10620 break;
10621
10622 case OMP_FOR:
10623 case OMP_SIMD:
10624 case CILK_SIMD:
10625 case CILK_FOR:
10626 case OMP_DISTRIBUTE:
10627 case OMP_TASKLOOP:
10628 case OACC_LOOP:
10629 ret = gimplify_omp_for (expr_p, pre_p);
10630 break;
10631
10632 case OACC_CACHE:
10633 gimplify_oacc_cache (expr_p, pre_p);
10634 ret = GS_ALL_DONE;
10635 break;
10636
10637 case OACC_DECLARE:
10638 gimplify_oacc_declare (expr_p, pre_p);
10639 ret = GS_ALL_DONE;
10640 break;
10641
10642 case OACC_HOST_DATA:
10643 case OACC_DATA:
10644 case OACC_KERNELS:
10645 case OACC_PARALLEL:
10646 case OMP_SECTIONS:
10647 case OMP_SINGLE:
10648 case OMP_TARGET:
10649 case OMP_TARGET_DATA:
10650 case OMP_TEAMS:
10651 gimplify_omp_workshare (expr_p, pre_p);
10652 ret = GS_ALL_DONE;
10653 break;
10654
10655 case OACC_ENTER_DATA:
10656 case OACC_EXIT_DATA:
10657 case OACC_UPDATE:
10658 case OMP_TARGET_UPDATE:
10659 case OMP_TARGET_ENTER_DATA:
10660 case OMP_TARGET_EXIT_DATA:
10661 gimplify_omp_target_update (expr_p, pre_p);
10662 ret = GS_ALL_DONE;
10663 break;
10664
10665 case OMP_SECTION:
10666 case OMP_MASTER:
10667 case OMP_TASKGROUP:
10668 case OMP_ORDERED:
10669 case OMP_CRITICAL:
10670 {
10671 gimple_seq body = NULL;
10672 gimple *g;
10673
10674 gimplify_and_add (OMP_BODY (*expr_p), &body);
10675 switch (TREE_CODE (*expr_p))
10676 {
10677 case OMP_SECTION:
10678 g = gimple_build_omp_section (body);
10679 break;
10680 case OMP_MASTER:
10681 g = gimple_build_omp_master (body);
10682 break;
10683 case OMP_TASKGROUP:
10684 {
10685 gimple_seq cleanup = NULL;
10686 tree fn
10687 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10688 g = gimple_build_call (fn, 0);
10689 gimple_seq_add_stmt (&cleanup, g);
10690 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10691 body = NULL;
10692 gimple_seq_add_stmt (&body, g);
10693 g = gimple_build_omp_taskgroup (body);
10694 }
10695 break;
10696 case OMP_ORDERED:
10697 g = gimplify_omp_ordered (*expr_p, body);
10698 break;
10699 case OMP_CRITICAL:
10700 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10701 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10702 gimplify_adjust_omp_clauses (pre_p, body,
10703 &OMP_CRITICAL_CLAUSES (*expr_p),
10704 OMP_CRITICAL);
10705 g = gimple_build_omp_critical (body,
10706 OMP_CRITICAL_NAME (*expr_p),
10707 OMP_CRITICAL_CLAUSES (*expr_p));
10708 break;
10709 default:
10710 gcc_unreachable ();
10711 }
10712 gimplify_seq_add_stmt (pre_p, g);
10713 ret = GS_ALL_DONE;
10714 break;
10715 }
10716
10717 case OMP_ATOMIC:
10718 case OMP_ATOMIC_READ:
10719 case OMP_ATOMIC_CAPTURE_OLD:
10720 case OMP_ATOMIC_CAPTURE_NEW:
10721 ret = gimplify_omp_atomic (expr_p, pre_p);
10722 break;
10723
10724 case TRANSACTION_EXPR:
10725 ret = gimplify_transaction (expr_p, pre_p);
10726 break;
10727
10728 case TRUTH_AND_EXPR:
10729 case TRUTH_OR_EXPR:
10730 case TRUTH_XOR_EXPR:
10731 {
10732 tree orig_type = TREE_TYPE (*expr_p);
10733 tree new_type, xop0, xop1;
10734 *expr_p = gimple_boolify (*expr_p);
10735 new_type = TREE_TYPE (*expr_p);
10736 if (!useless_type_conversion_p (orig_type, new_type))
10737 {
10738 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
10739 ret = GS_OK;
10740 break;
10741 }
10742
10743 /* Boolified binary truth expressions are semantically equivalent
10744 to bitwise binary expressions. Canonicalize them to the
10745 bitwise variant. */
10746 switch (TREE_CODE (*expr_p))
10747 {
10748 case TRUTH_AND_EXPR:
10749 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10750 break;
10751 case TRUTH_OR_EXPR:
10752 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
10753 break;
10754 case TRUTH_XOR_EXPR:
10755 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
10756 break;
10757 default:
10758 break;
10759 }
10760 /* Now make sure that operands have compatible type to
10761 expression's new_type. */
10762 xop0 = TREE_OPERAND (*expr_p, 0);
10763 xop1 = TREE_OPERAND (*expr_p, 1);
10764 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
10765 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
10766 new_type,
10767 xop0);
10768 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
10769 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
10770 new_type,
10771 xop1);
10772 /* Continue classified as tcc_binary. */
10773 goto expr_2;
10774 }
10775
10776 case FMA_EXPR:
10777 case VEC_COND_EXPR:
10778 case VEC_PERM_EXPR:
10779 /* Classified as tcc_expression. */
10780 goto expr_3;
10781
10782 case POINTER_PLUS_EXPR:
10783 {
10784 enum gimplify_status r0, r1;
10785 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10786 post_p, is_gimple_val, fb_rvalue);
10787 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10788 post_p, is_gimple_val, fb_rvalue);
10789 recalculate_side_effects (*expr_p);
10790 ret = MIN (r0, r1);
10791 break;
10792 }
10793
10794 case CILK_SYNC_STMT:
10795 {
10796 if (!fn_contains_cilk_spawn_p (cfun))
10797 {
10798 error_at (EXPR_LOCATION (*expr_p),
10799 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
10800 ret = GS_ERROR;
10801 }
10802 else
10803 {
10804 gimplify_cilk_sync (expr_p, pre_p);
10805 ret = GS_ALL_DONE;
10806 }
10807 break;
10808 }
10809
10810 default:
10811 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
10812 {
10813 case tcc_comparison:
10814 /* Handle comparison of objects of non scalar mode aggregates
10815 with a call to memcmp. It would be nice to only have to do
10816 this for variable-sized objects, but then we'd have to allow
10817 the same nest of reference nodes we allow for MODIFY_EXPR and
10818 that's too complex.
10819
10820 Compare scalar mode aggregates as scalar mode values. Using
10821 memcmp for them would be very inefficient at best, and is
10822 plain wrong if bitfields are involved. */
10823 {
10824 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
10825
10826 /* Vector comparisons need no boolification. */
10827 if (TREE_CODE (type) == VECTOR_TYPE)
10828 goto expr_2;
10829 else if (!AGGREGATE_TYPE_P (type))
10830 {
10831 tree org_type = TREE_TYPE (*expr_p);
10832 *expr_p = gimple_boolify (*expr_p);
10833 if (!useless_type_conversion_p (org_type,
10834 TREE_TYPE (*expr_p)))
10835 {
10836 *expr_p = fold_convert_loc (input_location,
10837 org_type, *expr_p);
10838 ret = GS_OK;
10839 }
10840 else
10841 goto expr_2;
10842 }
10843 else if (TYPE_MODE (type) != BLKmode)
10844 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
10845 else
10846 ret = gimplify_variable_sized_compare (expr_p);
10847
10848 break;
10849 }
10850
10851 /* If *EXPR_P does not need to be special-cased, handle it
10852 according to its class. */
10853 case tcc_unary:
10854 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10855 post_p, is_gimple_val, fb_rvalue);
10856 break;
10857
10858 case tcc_binary:
10859 expr_2:
10860 {
10861 enum gimplify_status r0, r1;
10862
10863 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10864 post_p, is_gimple_val, fb_rvalue);
10865 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10866 post_p, is_gimple_val, fb_rvalue);
10867
10868 ret = MIN (r0, r1);
10869 break;
10870 }
10871
10872 expr_3:
10873 {
10874 enum gimplify_status r0, r1, r2;
10875
10876 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10877 post_p, is_gimple_val, fb_rvalue);
10878 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10879 post_p, is_gimple_val, fb_rvalue);
10880 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
10881 post_p, is_gimple_val, fb_rvalue);
10882
10883 ret = MIN (MIN (r0, r1), r2);
10884 break;
10885 }
10886
10887 case tcc_declaration:
10888 case tcc_constant:
10889 ret = GS_ALL_DONE;
10890 goto dont_recalculate;
10891
10892 default:
10893 gcc_unreachable ();
10894 }
10895
10896 recalculate_side_effects (*expr_p);
10897
10898 dont_recalculate:
10899 break;
10900 }
10901
10902 gcc_assert (*expr_p || ret != GS_OK);
10903 }
10904 while (ret == GS_OK);
10905
10906 /* If we encountered an error_mark somewhere nested inside, either
10907 stub out the statement or propagate the error back out. */
10908 if (ret == GS_ERROR)
10909 {
10910 if (is_statement)
10911 *expr_p = NULL;
10912 goto out;
10913 }
10914
10915 /* This was only valid as a return value from the langhook, which
10916 we handled. Make sure it doesn't escape from any other context. */
10917 gcc_assert (ret != GS_UNHANDLED);
10918
10919 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
10920 {
10921 /* We aren't looking for a value, and we don't have a valid
10922 statement. If it doesn't have side-effects, throw it away. */
10923 if (!TREE_SIDE_EFFECTS (*expr_p))
10924 *expr_p = NULL;
10925 else if (!TREE_THIS_VOLATILE (*expr_p))
10926 {
10927 /* This is probably a _REF that contains something nested that
10928 has side effects. Recurse through the operands to find it. */
10929 enum tree_code code = TREE_CODE (*expr_p);
10930
10931 switch (code)
10932 {
10933 case COMPONENT_REF:
10934 case REALPART_EXPR:
10935 case IMAGPART_EXPR:
10936 case VIEW_CONVERT_EXPR:
10937 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10938 gimple_test_f, fallback);
10939 break;
10940
10941 case ARRAY_REF:
10942 case ARRAY_RANGE_REF:
10943 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10944 gimple_test_f, fallback);
10945 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10946 gimple_test_f, fallback);
10947 break;
10948
10949 default:
10950 /* Anything else with side-effects must be converted to
10951 a valid statement before we get here. */
10952 gcc_unreachable ();
10953 }
10954
10955 *expr_p = NULL;
10956 }
10957 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
10958 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
10959 {
10960 /* Historically, the compiler has treated a bare reference
10961 to a non-BLKmode volatile lvalue as forcing a load. */
10962 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
10963
10964 /* Normally, we do not want to create a temporary for a
10965 TREE_ADDRESSABLE type because such a type should not be
10966 copied by bitwise-assignment. However, we make an
10967 exception here, as all we are doing here is ensuring that
10968 we read the bytes that make up the type. We use
10969 create_tmp_var_raw because create_tmp_var will abort when
10970 given a TREE_ADDRESSABLE type. */
10971 tree tmp = create_tmp_var_raw (type, "vol");
10972 gimple_add_tmp_var (tmp);
10973 gimplify_assign (tmp, *expr_p, pre_p);
10974 *expr_p = NULL;
10975 }
10976 else
10977 /* We can't do anything useful with a volatile reference to
10978 an incomplete type, so just throw it away. Likewise for
10979 a BLKmode type, since any implicit inner load should
10980 already have been turned into an explicit one by the
10981 gimplification process. */
10982 *expr_p = NULL;
10983 }
10984
10985 /* If we are gimplifying at the statement level, we're done. Tack
10986 everything together and return. */
10987 if (fallback == fb_none || is_statement)
10988 {
10989 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
10990 it out for GC to reclaim it. */
10991 *expr_p = NULL_TREE;
10992
10993 if (!gimple_seq_empty_p (internal_pre)
10994 || !gimple_seq_empty_p (internal_post))
10995 {
10996 gimplify_seq_add_seq (&internal_pre, internal_post);
10997 gimplify_seq_add_seq (pre_p, internal_pre);
10998 }
10999
11000 /* The result of gimplifying *EXPR_P is going to be the last few
11001 statements in *PRE_P and *POST_P. Add location information
11002 to all the statements that were added by the gimplification
11003 helpers. */
11004 if (!gimple_seq_empty_p (*pre_p))
11005 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
11006
11007 if (!gimple_seq_empty_p (*post_p))
11008 annotate_all_with_location_after (*post_p, post_last_gsi,
11009 input_location);
11010
11011 goto out;
11012 }
11013
11014 #ifdef ENABLE_GIMPLE_CHECKING
11015 if (*expr_p)
11016 {
11017 enum tree_code code = TREE_CODE (*expr_p);
11018 /* These expressions should already be in gimple IR form. */
11019 gcc_assert (code != MODIFY_EXPR
11020 && code != ASM_EXPR
11021 && code != BIND_EXPR
11022 && code != CATCH_EXPR
11023 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
11024 && code != EH_FILTER_EXPR
11025 && code != GOTO_EXPR
11026 && code != LABEL_EXPR
11027 && code != LOOP_EXPR
11028 && code != SWITCH_EXPR
11029 && code != TRY_FINALLY_EXPR
11030 && code != OACC_PARALLEL
11031 && code != OACC_KERNELS
11032 && code != OACC_DATA
11033 && code != OACC_HOST_DATA
11034 && code != OACC_DECLARE
11035 && code != OACC_UPDATE
11036 && code != OACC_ENTER_DATA
11037 && code != OACC_EXIT_DATA
11038 && code != OACC_CACHE
11039 && code != OMP_CRITICAL
11040 && code != OMP_FOR
11041 && code != OACC_LOOP
11042 && code != OMP_MASTER
11043 && code != OMP_TASKGROUP
11044 && code != OMP_ORDERED
11045 && code != OMP_PARALLEL
11046 && code != OMP_SECTIONS
11047 && code != OMP_SECTION
11048 && code != OMP_SINGLE);
11049 }
11050 #endif
11051
11052 /* Otherwise we're gimplifying a subexpression, so the resulting
11053 value is interesting. If it's a valid operand that matches
11054 GIMPLE_TEST_F, we're done. Unless we are handling some
11055 post-effects internally; if that's the case, we need to copy into
11056 a temporary before adding the post-effects to POST_P. */
11057 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
11058 goto out;
11059
11060 /* Otherwise, we need to create a new temporary for the gimplified
11061 expression. */
11062
11063 /* We can't return an lvalue if we have an internal postqueue. The
11064 object the lvalue refers to would (probably) be modified by the
11065 postqueue; we need to copy the value out first, which means an
11066 rvalue. */
11067 if ((fallback & fb_lvalue)
11068 && gimple_seq_empty_p (internal_post)
11069 && is_gimple_addressable (*expr_p))
11070 {
11071 /* An lvalue will do. Take the address of the expression, store it
11072 in a temporary, and replace the expression with an INDIRECT_REF of
11073 that temporary. */
11074 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
11075 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
11076 *expr_p = build_simple_mem_ref (tmp);
11077 }
11078 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
11079 {
11080 /* An rvalue will do. Assign the gimplified expression into a
11081 new temporary TMP and replace the original expression with
11082 TMP. First, make sure that the expression has a type so that
11083 it can be assigned into a temporary. */
11084 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
11085 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
11086 }
11087 else
11088 {
11089 #ifdef ENABLE_GIMPLE_CHECKING
11090 if (!(fallback & fb_mayfail))
11091 {
11092 fprintf (stderr, "gimplification failed:\n");
11093 print_generic_expr (stderr, *expr_p, 0);
11094 debug_tree (*expr_p);
11095 internal_error ("gimplification failed");
11096 }
11097 #endif
11098 gcc_assert (fallback & fb_mayfail);
11099
11100 /* If this is an asm statement, and the user asked for the
11101 impossible, don't die. Fail and let gimplify_asm_expr
11102 issue an error. */
11103 ret = GS_ERROR;
11104 goto out;
11105 }
11106
11107 /* Make sure the temporary matches our predicate. */
11108 gcc_assert ((*gimple_test_f) (*expr_p));
11109
11110 if (!gimple_seq_empty_p (internal_post))
11111 {
11112 annotate_all_with_location (internal_post, input_location);
11113 gimplify_seq_add_seq (pre_p, internal_post);
11114 }
11115
11116 out:
11117 input_location = saved_location;
11118 return ret;
11119 }
11120
11121 /* Look through TYPE for variable-sized objects and gimplify each such
11122 size that we find. Add to LIST_P any statements generated. */
11123
11124 void
11125 gimplify_type_sizes (tree type, gimple_seq *list_p)
11126 {
11127 tree field, t;
11128
11129 if (type == NULL || type == error_mark_node)
11130 return;
11131
11132 /* We first do the main variant, then copy into any other variants. */
11133 type = TYPE_MAIN_VARIANT (type);
11134
11135 /* Avoid infinite recursion. */
11136 if (TYPE_SIZES_GIMPLIFIED (type))
11137 return;
11138
11139 TYPE_SIZES_GIMPLIFIED (type) = 1;
11140
11141 switch (TREE_CODE (type))
11142 {
11143 case INTEGER_TYPE:
11144 case ENUMERAL_TYPE:
11145 case BOOLEAN_TYPE:
11146 case REAL_TYPE:
11147 case FIXED_POINT_TYPE:
11148 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11149 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
11150
11151 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11152 {
11153 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11154 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
11155 }
11156 break;
11157
11158 case ARRAY_TYPE:
11159 /* These types may not have declarations, so handle them here. */
11160 gimplify_type_sizes (TREE_TYPE (type), list_p);
11161 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
11162 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11163 with assigned stack slots, for -O1+ -g they should be tracked
11164 by VTA. */
11165 if (!(TYPE_NAME (type)
11166 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11167 && DECL_IGNORED_P (TYPE_NAME (type)))
11168 && TYPE_DOMAIN (type)
11169 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11170 {
11171 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11172 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11173 DECL_IGNORED_P (t) = 0;
11174 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11175 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11176 DECL_IGNORED_P (t) = 0;
11177 }
11178 break;
11179
11180 case RECORD_TYPE:
11181 case UNION_TYPE:
11182 case QUAL_UNION_TYPE:
11183 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11184 if (TREE_CODE (field) == FIELD_DECL)
11185 {
11186 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
11187 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11188 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
11189 gimplify_type_sizes (TREE_TYPE (field), list_p);
11190 }
11191 break;
11192
11193 case POINTER_TYPE:
11194 case REFERENCE_TYPE:
11195 /* We used to recurse on the pointed-to type here, which turned out to
11196 be incorrect because its definition might refer to variables not
11197 yet initialized at this point if a forward declaration is involved.
11198
11199 It was actually useful for anonymous pointed-to types to ensure
11200 that the sizes evaluation dominates every possible later use of the
11201 values. Restricting to such types here would be safe since there
11202 is no possible forward declaration around, but would introduce an
11203 undesirable middle-end semantic to anonymity. We then defer to
11204 front-ends the responsibility of ensuring that the sizes are
11205 evaluated both early and late enough, e.g. by attaching artificial
11206 type declarations to the tree. */
11207 break;
11208
11209 default:
11210 break;
11211 }
11212
11213 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11214 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
11215
11216 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11217 {
11218 TYPE_SIZE (t) = TYPE_SIZE (type);
11219 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11220 TYPE_SIZES_GIMPLIFIED (t) = 1;
11221 }
11222 }
11223
11224 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11225 a size or position, has had all of its SAVE_EXPRs evaluated.
11226 We add any required statements to *STMT_P. */
11227
11228 void
11229 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
11230 {
11231 tree expr = *expr_p;
11232
11233 /* We don't do anything if the value isn't there, is constant, or contains
11234 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
11235 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
11236 will want to replace it with a new variable, but that will cause problems
11237 if this type is from outside the function. It's OK to have that here. */
11238 if (is_gimple_sizepos (expr))
11239 return;
11240
11241 *expr_p = unshare_expr (expr);
11242
11243 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
11244 }
11245
11246 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11247 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11248 is true, also gimplify the parameters. */
11249
11250 gbind *
11251 gimplify_body (tree fndecl, bool do_parms)
11252 {
11253 location_t saved_location = input_location;
11254 gimple_seq parm_stmts, seq;
11255 gimple *outer_stmt;
11256 gbind *outer_bind;
11257 struct cgraph_node *cgn;
11258
11259 timevar_push (TV_TREE_GIMPLIFY);
11260
11261 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11262 gimplification. */
11263 default_rtl_profile ();
11264
11265 gcc_assert (gimplify_ctxp == NULL);
11266 push_gimplify_context ();
11267
11268 if (flag_openacc || flag_openmp)
11269 {
11270 gcc_assert (gimplify_omp_ctxp == NULL);
11271 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11272 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11273 }
11274
11275 /* Unshare most shared trees in the body and in that of any nested functions.
11276 It would seem we don't have to do this for nested functions because
11277 they are supposed to be output and then the outer function gimplified
11278 first, but the g++ front end doesn't always do it that way. */
11279 unshare_body (fndecl);
11280 unvisit_body (fndecl);
11281
11282 cgn = cgraph_node::get (fndecl);
11283 if (cgn && cgn->origin)
11284 nonlocal_vlas = new hash_set<tree>;
11285
11286 /* Make sure input_location isn't set to something weird. */
11287 input_location = DECL_SOURCE_LOCATION (fndecl);
11288
11289 /* Resolve callee-copies. This has to be done before processing
11290 the body so that DECL_VALUE_EXPR gets processed correctly. */
11291 parm_stmts = do_parms ? gimplify_parameters () : NULL;
11292
11293 /* Gimplify the function's body. */
11294 seq = NULL;
11295 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11296 outer_stmt = gimple_seq_first_stmt (seq);
11297 if (!outer_stmt)
11298 {
11299 outer_stmt = gimple_build_nop ();
11300 gimplify_seq_add_stmt (&seq, outer_stmt);
11301 }
11302
11303 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11304 not the case, wrap everything in a GIMPLE_BIND to make it so. */
11305 if (gimple_code (outer_stmt) == GIMPLE_BIND
11306 && gimple_seq_first (seq) == gimple_seq_last (seq))
11307 outer_bind = as_a <gbind *> (outer_stmt);
11308 else
11309 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11310
11311 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11312
11313 /* If we had callee-copies statements, insert them at the beginning
11314 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
11315 if (!gimple_seq_empty_p (parm_stmts))
11316 {
11317 tree parm;
11318
11319 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11320 gimple_bind_set_body (outer_bind, parm_stmts);
11321
11322 for (parm = DECL_ARGUMENTS (current_function_decl);
11323 parm; parm = DECL_CHAIN (parm))
11324 if (DECL_HAS_VALUE_EXPR_P (parm))
11325 {
11326 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11327 DECL_IGNORED_P (parm) = 0;
11328 }
11329 }
11330
11331 if (nonlocal_vlas)
11332 {
11333 if (nonlocal_vla_vars)
11334 {
11335 /* tree-nested.c may later on call declare_vars (..., true);
11336 which relies on BLOCK_VARS chain to be the tail of the
11337 gimple_bind_vars chain. Ensure we don't violate that
11338 assumption. */
11339 if (gimple_bind_block (outer_bind)
11340 == DECL_INITIAL (current_function_decl))
11341 declare_vars (nonlocal_vla_vars, outer_bind, true);
11342 else
11343 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11344 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11345 nonlocal_vla_vars);
11346 nonlocal_vla_vars = NULL_TREE;
11347 }
11348 delete nonlocal_vlas;
11349 nonlocal_vlas = NULL;
11350 }
11351
11352 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11353 && gimplify_omp_ctxp)
11354 {
11355 delete_omp_context (gimplify_omp_ctxp);
11356 gimplify_omp_ctxp = NULL;
11357 }
11358
11359 pop_gimplify_context (outer_bind);
11360 gcc_assert (gimplify_ctxp == NULL);
11361
11362 if (flag_checking && !seen_error ())
11363 verify_gimple_in_seq (gimple_bind_body (outer_bind));
11364
11365 timevar_pop (TV_TREE_GIMPLIFY);
11366 input_location = saved_location;
11367
11368 return outer_bind;
11369 }
11370
11371 typedef char *char_p; /* For DEF_VEC_P. */
11372
11373 /* Return whether we should exclude FNDECL from instrumentation. */
11374
11375 static bool
11376 flag_instrument_functions_exclude_p (tree fndecl)
11377 {
11378 vec<char_p> *v;
11379
11380 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11381 if (v && v->length () > 0)
11382 {
11383 const char *name;
11384 int i;
11385 char *s;
11386
11387 name = lang_hooks.decl_printable_name (fndecl, 0);
11388 FOR_EACH_VEC_ELT (*v, i, s)
11389 if (strstr (name, s) != NULL)
11390 return true;
11391 }
11392
11393 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11394 if (v && v->length () > 0)
11395 {
11396 const char *name;
11397 int i;
11398 char *s;
11399
11400 name = DECL_SOURCE_FILE (fndecl);
11401 FOR_EACH_VEC_ELT (*v, i, s)
11402 if (strstr (name, s) != NULL)
11403 return true;
11404 }
11405
11406 return false;
11407 }
11408
11409 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
11410 node for the function we want to gimplify.
11411
11412 Return the sequence of GIMPLE statements corresponding to the body
11413 of FNDECL. */
11414
11415 void
11416 gimplify_function_tree (tree fndecl)
11417 {
11418 tree parm, ret;
11419 gimple_seq seq;
11420 gbind *bind;
11421
11422 gcc_assert (!gimple_body (fndecl));
11423
11424 if (DECL_STRUCT_FUNCTION (fndecl))
11425 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11426 else
11427 push_struct_function (fndecl);
11428
11429 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11430 if necessary. */
11431 cfun->curr_properties |= PROP_gimple_lva;
11432
11433 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11434 {
11435 /* Preliminarily mark non-addressed complex variables as eligible
11436 for promotion to gimple registers. We'll transform their uses
11437 as we find them. */
11438 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11439 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11440 && !TREE_THIS_VOLATILE (parm)
11441 && !needs_to_live_in_memory (parm))
11442 DECL_GIMPLE_REG_P (parm) = 1;
11443 }
11444
11445 ret = DECL_RESULT (fndecl);
11446 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11447 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11448 && !needs_to_live_in_memory (ret))
11449 DECL_GIMPLE_REG_P (ret) = 1;
11450
11451 bind = gimplify_body (fndecl, true);
11452
11453 /* The tree body of the function is no longer needed, replace it
11454 with the new GIMPLE body. */
11455 seq = NULL;
11456 gimple_seq_add_stmt (&seq, bind);
11457 gimple_set_body (fndecl, seq);
11458
11459 /* If we're instrumenting function entry/exit, then prepend the call to
11460 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11461 catch the exit hook. */
11462 /* ??? Add some way to ignore exceptions for this TFE. */
11463 if (flag_instrument_function_entry_exit
11464 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11465 && !flag_instrument_functions_exclude_p (fndecl))
11466 {
11467 tree x;
11468 gbind *new_bind;
11469 gimple *tf;
11470 gimple_seq cleanup = NULL, body = NULL;
11471 tree tmp_var;
11472 gcall *call;
11473
11474 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11475 call = gimple_build_call (x, 1, integer_zero_node);
11476 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11477 gimple_call_set_lhs (call, tmp_var);
11478 gimplify_seq_add_stmt (&cleanup, call);
11479 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11480 call = gimple_build_call (x, 2,
11481 build_fold_addr_expr (current_function_decl),
11482 tmp_var);
11483 gimplify_seq_add_stmt (&cleanup, call);
11484 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11485
11486 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11487 call = gimple_build_call (x, 1, integer_zero_node);
11488 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11489 gimple_call_set_lhs (call, tmp_var);
11490 gimplify_seq_add_stmt (&body, call);
11491 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11492 call = gimple_build_call (x, 2,
11493 build_fold_addr_expr (current_function_decl),
11494 tmp_var);
11495 gimplify_seq_add_stmt (&body, call);
11496 gimplify_seq_add_stmt (&body, tf);
11497 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11498 /* Clear the block for BIND, since it is no longer directly inside
11499 the function, but within a try block. */
11500 gimple_bind_set_block (bind, NULL);
11501
11502 /* Replace the current function body with the body
11503 wrapped in the try/finally TF. */
11504 seq = NULL;
11505 gimple_seq_add_stmt (&seq, new_bind);
11506 gimple_set_body (fndecl, seq);
11507 bind = new_bind;
11508 }
11509
11510 if ((flag_sanitize & SANITIZE_THREAD) != 0
11511 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11512 {
11513 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11514 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11515 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11516 /* Clear the block for BIND, since it is no longer directly inside
11517 the function, but within a try block. */
11518 gimple_bind_set_block (bind, NULL);
11519 /* Replace the current function body with the body
11520 wrapped in the try/finally TF. */
11521 seq = NULL;
11522 gimple_seq_add_stmt (&seq, new_bind);
11523 gimple_set_body (fndecl, seq);
11524 }
11525
11526 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11527 cfun->curr_properties |= PROP_gimple_any;
11528
11529 pop_cfun ();
11530
11531 dump_function (TDI_generic, fndecl);
11532 }
11533
11534 /* Return a dummy expression of type TYPE in order to keep going after an
11535 error. */
11536
11537 static tree
11538 dummy_object (tree type)
11539 {
11540 tree t = build_int_cst (build_pointer_type (type), 0);
11541 return build2 (MEM_REF, type, t, t);
11542 }
11543
11544 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11545 builtin function, but a very special sort of operator. */
11546
11547 enum gimplify_status
11548 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11549 gimple_seq *post_p ATTRIBUTE_UNUSED)
11550 {
11551 tree promoted_type, have_va_type;
11552 tree valist = TREE_OPERAND (*expr_p, 0);
11553 tree type = TREE_TYPE (*expr_p);
11554 tree t, tag, aptag;
11555 location_t loc = EXPR_LOCATION (*expr_p);
11556
11557 /* Verify that valist is of the proper type. */
11558 have_va_type = TREE_TYPE (valist);
11559 if (have_va_type == error_mark_node)
11560 return GS_ERROR;
11561 have_va_type = targetm.canonical_va_list_type (have_va_type);
11562
11563 if (have_va_type == NULL_TREE)
11564 {
11565 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11566 return GS_ERROR;
11567 }
11568
11569 /* Generate a diagnostic for requesting data of a type that cannot
11570 be passed through `...' due to type promotion at the call site. */
11571 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11572 != type)
11573 {
11574 static bool gave_help;
11575 bool warned;
11576
11577 /* Unfortunately, this is merely undefined, rather than a constraint
11578 violation, so we cannot make this an error. If this call is never
11579 executed, the program is still strictly conforming. */
11580 warned = warning_at (loc, 0,
11581 "%qT is promoted to %qT when passed through %<...%>",
11582 type, promoted_type);
11583 if (!gave_help && warned)
11584 {
11585 gave_help = true;
11586 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
11587 promoted_type, type);
11588 }
11589
11590 /* We can, however, treat "undefined" any way we please.
11591 Call abort to encourage the user to fix the program. */
11592 if (warned)
11593 inform (loc, "if this code is reached, the program will abort");
11594 /* Before the abort, allow the evaluation of the va_list
11595 expression to exit or longjmp. */
11596 gimplify_and_add (valist, pre_p);
11597 t = build_call_expr_loc (loc,
11598 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11599 gimplify_and_add (t, pre_p);
11600
11601 /* This is dead code, but go ahead and finish so that the
11602 mode of the result comes out right. */
11603 *expr_p = dummy_object (type);
11604 return GS_ALL_DONE;
11605 }
11606
11607 tag = build_int_cst (build_pointer_type (type), 0);
11608 aptag = build_int_cst (TREE_TYPE (valist), 0);
11609
11610 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
11611 valist, tag, aptag);
11612
11613 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11614 needs to be expanded. */
11615 cfun->curr_properties &= ~PROP_gimple_lva;
11616
11617 return GS_OK;
11618 }
11619
11620 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11621
11622 DST/SRC are the destination and source respectively. You can pass
11623 ungimplified trees in DST or SRC, in which case they will be
11624 converted to a gimple operand if necessary.
11625
11626 This function returns the newly created GIMPLE_ASSIGN tuple. */
11627
11628 gimple *
11629 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11630 {
11631 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11632 gimplify_and_add (t, seq_p);
11633 ggc_free (t);
11634 return gimple_seq_last_stmt (*seq_p);
11635 }
11636
11637 inline hashval_t
11638 gimplify_hasher::hash (const elt_t *p)
11639 {
11640 tree t = p->val;
11641 return iterative_hash_expr (t, 0);
11642 }
11643
11644 inline bool
11645 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11646 {
11647 tree t1 = p1->val;
11648 tree t2 = p2->val;
11649 enum tree_code code = TREE_CODE (t1);
11650
11651 if (TREE_CODE (t2) != code
11652 || TREE_TYPE (t1) != TREE_TYPE (t2))
11653 return false;
11654
11655 if (!operand_equal_p (t1, t2, 0))
11656 return false;
11657
11658 /* Only allow them to compare equal if they also hash equal; otherwise
11659 results are nondeterminate, and we fail bootstrap comparison. */
11660 gcc_checking_assert (hash (p1) == hash (p2));
11661
11662 return true;
11663 }