ipa-icf.c (sem_item_optimizer::merge_classes): Do not ICE on VAR_DECL w/o DECL_NAME.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
62
63 enum gimplify_omp_var_data
64 {
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
76 GOVD_LINEAR = 2048,
77 GOVD_ALIGNED = 4096,
78
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
81
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
84
85 GOVD_MAP_0LEN_ARRAY = 32768,
86
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
89
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
92
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
95
96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 | GOVD_LOCAL)
99 };
100
101
102 enum omp_region_type
103 {
104 ORT_WORKSHARE = 0x00,
105 ORT_SIMD = 0x01,
106
107 ORT_PARALLEL = 0x02,
108 ORT_COMBINED_PARALLEL = 0x03,
109
110 ORT_TASK = 0x04,
111 ORT_UNTIED_TASK = 0x05,
112
113 ORT_TEAMS = 0x08,
114 ORT_COMBINED_TEAMS = 0x09,
115
116 /* Data region. */
117 ORT_TARGET_DATA = 0x10,
118
119 /* Data region with offloading. */
120 ORT_TARGET = 0x20,
121 ORT_COMBINED_TARGET = 0x21,
122
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
129
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
132 ORT_NONE = 0x100
133 };
134
135 /* Gimplify hashtable helper. */
136
137 struct gimplify_hasher : free_ptr_hash <elt_t>
138 {
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
141 };
142
143 struct gimplify_ctx
144 {
145 struct gimplify_ctx *prev_context;
146
147 vec<gbind *> bind_expr_stack;
148 tree temps;
149 gimple_seq conditional_cleanups;
150 tree exit_label;
151 tree return_temp;
152
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
155 hash_table<gimplify_hasher> *temp_htab;
156
157 int conditions;
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
163 };
164
165 struct gimplify_omp_ctx
166 {
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
169 hash_set<tree> *privatized_types;
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
172 location_t location;
173 enum omp_clause_default_kind default_kind;
174 enum omp_region_type region_type;
175 bool combined_loop;
176 bool distribute;
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
180 };
181
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184
185 /* Forward declaration. */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 static hash_map<tree, tree> *oacc_declare_returns;
188
189 /* Shorter alias name for the above function for use in gimplify.c
190 only. */
191
192 static inline void
193 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
194 {
195 gimple_seq_add_stmt_without_update (seq_p, gs);
196 }
197
198 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
199 NULL, a new sequence is allocated. This function is
200 similar to gimple_seq_add_seq, but does not scan the operands.
201 During gimplification, we need to manipulate statement sequences
202 before the def/use vectors have been constructed. */
203
204 static void
205 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
206 {
207 gimple_stmt_iterator si;
208
209 if (src == NULL)
210 return;
211
212 si = gsi_last (*dst_p);
213 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
214 }
215
216
217 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
218 and popping gimplify contexts. */
219
220 static struct gimplify_ctx *ctx_pool = NULL;
221
222 /* Return a gimplify context struct from the pool. */
223
224 static inline struct gimplify_ctx *
225 ctx_alloc (void)
226 {
227 struct gimplify_ctx * c = ctx_pool;
228
229 if (c)
230 ctx_pool = c->prev_context;
231 else
232 c = XNEW (struct gimplify_ctx);
233
234 memset (c, '\0', sizeof (*c));
235 return c;
236 }
237
238 /* Put gimplify context C back into the pool. */
239
240 static inline void
241 ctx_free (struct gimplify_ctx *c)
242 {
243 c->prev_context = ctx_pool;
244 ctx_pool = c;
245 }
246
247 /* Free allocated ctx stack memory. */
248
249 void
250 free_gimplify_stack (void)
251 {
252 struct gimplify_ctx *c;
253
254 while ((c = ctx_pool))
255 {
256 ctx_pool = c->prev_context;
257 free (c);
258 }
259 }
260
261
262 /* Set up a context for the gimplifier. */
263
264 void
265 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
266 {
267 struct gimplify_ctx *c = ctx_alloc ();
268
269 c->prev_context = gimplify_ctxp;
270 gimplify_ctxp = c;
271 gimplify_ctxp->into_ssa = in_ssa;
272 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
273 }
274
275 /* Tear down a context for the gimplifier. If BODY is non-null, then
276 put the temporaries into the outer BIND_EXPR. Otherwise, put them
277 in the local_decls.
278
279 BODY is not a sequence, but the first tuple in a sequence. */
280
281 void
282 pop_gimplify_context (gimple *body)
283 {
284 struct gimplify_ctx *c = gimplify_ctxp;
285
286 gcc_assert (c
287 && (!c->bind_expr_stack.exists ()
288 || c->bind_expr_stack.is_empty ()));
289 c->bind_expr_stack.release ();
290 gimplify_ctxp = c->prev_context;
291
292 if (body)
293 declare_vars (c->temps, body, false);
294 else
295 record_vars (c->temps);
296
297 delete c->temp_htab;
298 c->temp_htab = NULL;
299 ctx_free (c);
300 }
301
302 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
303
304 static void
305 gimple_push_bind_expr (gbind *bind_stmt)
306 {
307 gimplify_ctxp->bind_expr_stack.reserve (8);
308 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
309 }
310
311 /* Pop the first element off the stack of bindings. */
312
313 static void
314 gimple_pop_bind_expr (void)
315 {
316 gimplify_ctxp->bind_expr_stack.pop ();
317 }
318
319 /* Return the first element of the stack of bindings. */
320
321 gbind *
322 gimple_current_bind_expr (void)
323 {
324 return gimplify_ctxp->bind_expr_stack.last ();
325 }
326
327 /* Return the stack of bindings created during gimplification. */
328
329 vec<gbind *>
330 gimple_bind_expr_stack (void)
331 {
332 return gimplify_ctxp->bind_expr_stack;
333 }
334
335 /* Return true iff there is a COND_EXPR between us and the innermost
336 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
337
338 static bool
339 gimple_conditional_context (void)
340 {
341 return gimplify_ctxp->conditions > 0;
342 }
343
344 /* Note that we've entered a COND_EXPR. */
345
346 static void
347 gimple_push_condition (void)
348 {
349 #ifdef ENABLE_GIMPLE_CHECKING
350 if (gimplify_ctxp->conditions == 0)
351 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
352 #endif
353 ++(gimplify_ctxp->conditions);
354 }
355
356 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
357 now, add any conditional cleanups we've seen to the prequeue. */
358
359 static void
360 gimple_pop_condition (gimple_seq *pre_p)
361 {
362 int conds = --(gimplify_ctxp->conditions);
363
364 gcc_assert (conds >= 0);
365 if (conds == 0)
366 {
367 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
368 gimplify_ctxp->conditional_cleanups = NULL;
369 }
370 }
371
372 /* A stable comparison routine for use with splay trees and DECLs. */
373
374 static int
375 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
376 {
377 tree a = (tree) xa;
378 tree b = (tree) xb;
379
380 return DECL_UID (a) - DECL_UID (b);
381 }
382
383 /* Create a new omp construct that deals with variable remapping. */
384
385 static struct gimplify_omp_ctx *
386 new_omp_context (enum omp_region_type region_type)
387 {
388 struct gimplify_omp_ctx *c;
389
390 c = XCNEW (struct gimplify_omp_ctx);
391 c->outer_context = gimplify_omp_ctxp;
392 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
393 c->privatized_types = new hash_set<tree>;
394 c->location = input_location;
395 c->region_type = region_type;
396 if ((region_type & ORT_TASK) == 0)
397 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
398 else
399 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
400
401 return c;
402 }
403
404 /* Destroy an omp construct that deals with variable remapping. */
405
406 static void
407 delete_omp_context (struct gimplify_omp_ctx *c)
408 {
409 splay_tree_delete (c->variables);
410 delete c->privatized_types;
411 c->loop_iter_var.release ();
412 XDELETE (c);
413 }
414
415 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
416 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
417
418 /* Both gimplify the statement T and append it to *SEQ_P. This function
419 behaves exactly as gimplify_stmt, but you don't have to pass T as a
420 reference. */
421
422 void
423 gimplify_and_add (tree t, gimple_seq *seq_p)
424 {
425 gimplify_stmt (&t, seq_p);
426 }
427
428 /* Gimplify statement T into sequence *SEQ_P, and return the first
429 tuple in the sequence of generated tuples for this statement.
430 Return NULL if gimplifying T produced no tuples. */
431
432 static gimple *
433 gimplify_and_return_first (tree t, gimple_seq *seq_p)
434 {
435 gimple_stmt_iterator last = gsi_last (*seq_p);
436
437 gimplify_and_add (t, seq_p);
438
439 if (!gsi_end_p (last))
440 {
441 gsi_next (&last);
442 return gsi_stmt (last);
443 }
444 else
445 return gimple_seq_first_stmt (*seq_p);
446 }
447
448 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
449 LHS, or for a call argument. */
450
451 static bool
452 is_gimple_mem_rhs (tree t)
453 {
454 /* If we're dealing with a renamable type, either source or dest must be
455 a renamed variable. */
456 if (is_gimple_reg_type (TREE_TYPE (t)))
457 return is_gimple_val (t);
458 else
459 return is_gimple_val (t) || is_gimple_lvalue (t);
460 }
461
462 /* Return true if T is a CALL_EXPR or an expression that can be
463 assigned to a temporary. Note that this predicate should only be
464 used during gimplification. See the rationale for this in
465 gimplify_modify_expr. */
466
467 static bool
468 is_gimple_reg_rhs_or_call (tree t)
469 {
470 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
471 || TREE_CODE (t) == CALL_EXPR);
472 }
473
474 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
475 this predicate should only be used during gimplification. See the
476 rationale for this in gimplify_modify_expr. */
477
478 static bool
479 is_gimple_mem_rhs_or_call (tree t)
480 {
481 /* If we're dealing with a renamable type, either source or dest must be
482 a renamed variable. */
483 if (is_gimple_reg_type (TREE_TYPE (t)))
484 return is_gimple_val (t);
485 else
486 return (is_gimple_val (t) || is_gimple_lvalue (t)
487 || TREE_CODE (t) == CALL_EXPR);
488 }
489
490 /* Create a temporary with a name derived from VAL. Subroutine of
491 lookup_tmp_var; nobody else should call this function. */
492
493 static inline tree
494 create_tmp_from_val (tree val)
495 {
496 /* Drop all qualifiers and address-space information from the value type. */
497 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
498 tree var = create_tmp_var (type, get_name (val));
499 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
500 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
501 DECL_GIMPLE_REG_P (var) = 1;
502 return var;
503 }
504
505 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
506 an existing expression temporary. */
507
508 static tree
509 lookup_tmp_var (tree val, bool is_formal)
510 {
511 tree ret;
512
513 /* If not optimizing, never really reuse a temporary. local-alloc
514 won't allocate any variable that is used in more than one basic
515 block, which means it will go into memory, causing much extra
516 work in reload and final and poorer code generation, outweighing
517 the extra memory allocation here. */
518 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
519 ret = create_tmp_from_val (val);
520 else
521 {
522 elt_t elt, *elt_p;
523 elt_t **slot;
524
525 elt.val = val;
526 if (!gimplify_ctxp->temp_htab)
527 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
528 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
529 if (*slot == NULL)
530 {
531 elt_p = XNEW (elt_t);
532 elt_p->val = val;
533 elt_p->temp = ret = create_tmp_from_val (val);
534 *slot = elt_p;
535 }
536 else
537 {
538 elt_p = *slot;
539 ret = elt_p->temp;
540 }
541 }
542
543 return ret;
544 }
545
546 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
547
548 static tree
549 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
550 bool is_formal)
551 {
552 tree t, mod;
553
554 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
555 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
556 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
557 fb_rvalue);
558
559 if (gimplify_ctxp->into_ssa
560 && is_gimple_reg_type (TREE_TYPE (val)))
561 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
562 else
563 t = lookup_tmp_var (val, is_formal);
564
565 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
566
567 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
568
569 /* gimplify_modify_expr might want to reduce this further. */
570 gimplify_and_add (mod, pre_p);
571 ggc_free (mod);
572
573 return t;
574 }
575
576 /* Return a formal temporary variable initialized with VAL. PRE_P is as
577 in gimplify_expr. Only use this function if:
578
579 1) The value of the unfactored expression represented by VAL will not
580 change between the initialization and use of the temporary, and
581 2) The temporary will not be otherwise modified.
582
583 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
584 and #2 means it is inappropriate for && temps.
585
586 For other cases, use get_initialized_tmp_var instead. */
587
588 tree
589 get_formal_tmp_var (tree val, gimple_seq *pre_p)
590 {
591 return internal_get_tmp_var (val, pre_p, NULL, true);
592 }
593
594 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
595 are as in gimplify_expr. */
596
597 tree
598 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
599 {
600 return internal_get_tmp_var (val, pre_p, post_p, false);
601 }
602
603 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
604 generate debug info for them; otherwise don't. */
605
606 void
607 declare_vars (tree vars, gimple *gs, bool debug_info)
608 {
609 tree last = vars;
610 if (last)
611 {
612 tree temps, block;
613
614 gbind *scope = as_a <gbind *> (gs);
615
616 temps = nreverse (last);
617
618 block = gimple_bind_block (scope);
619 gcc_assert (!block || TREE_CODE (block) == BLOCK);
620 if (!block || !debug_info)
621 {
622 DECL_CHAIN (last) = gimple_bind_vars (scope);
623 gimple_bind_set_vars (scope, temps);
624 }
625 else
626 {
627 /* We need to attach the nodes both to the BIND_EXPR and to its
628 associated BLOCK for debugging purposes. The key point here
629 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
630 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
631 if (BLOCK_VARS (block))
632 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
633 else
634 {
635 gimple_bind_set_vars (scope,
636 chainon (gimple_bind_vars (scope), temps));
637 BLOCK_VARS (block) = temps;
638 }
639 }
640 }
641 }
642
643 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
644 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
645 no such upper bound can be obtained. */
646
647 static void
648 force_constant_size (tree var)
649 {
650 /* The only attempt we make is by querying the maximum size of objects
651 of the variable's type. */
652
653 HOST_WIDE_INT max_size;
654
655 gcc_assert (TREE_CODE (var) == VAR_DECL);
656
657 max_size = max_int_size_in_bytes (TREE_TYPE (var));
658
659 gcc_assert (max_size >= 0);
660
661 DECL_SIZE_UNIT (var)
662 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
663 DECL_SIZE (var)
664 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
665 }
666
667 /* Push the temporary variable TMP into the current binding. */
668
669 void
670 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
671 {
672 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
673
674 /* Later processing assumes that the object size is constant, which might
675 not be true at this point. Force the use of a constant upper bound in
676 this case. */
677 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
678 force_constant_size (tmp);
679
680 DECL_CONTEXT (tmp) = fn->decl;
681 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
682
683 record_vars_into (tmp, fn->decl);
684 }
685
686 /* Push the temporary variable TMP into the current binding. */
687
688 void
689 gimple_add_tmp_var (tree tmp)
690 {
691 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
692
693 /* Later processing assumes that the object size is constant, which might
694 not be true at this point. Force the use of a constant upper bound in
695 this case. */
696 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
697 force_constant_size (tmp);
698
699 DECL_CONTEXT (tmp) = current_function_decl;
700 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
701
702 if (gimplify_ctxp)
703 {
704 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
705 gimplify_ctxp->temps = tmp;
706
707 /* Mark temporaries local within the nearest enclosing parallel. */
708 if (gimplify_omp_ctxp)
709 {
710 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
711 while (ctx
712 && (ctx->region_type == ORT_WORKSHARE
713 || ctx->region_type == ORT_SIMD
714 || ctx->region_type == ORT_ACC))
715 ctx = ctx->outer_context;
716 if (ctx)
717 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
718 }
719 }
720 else if (cfun)
721 record_vars (tmp);
722 else
723 {
724 gimple_seq body_seq;
725
726 /* This case is for nested functions. We need to expose the locals
727 they create. */
728 body_seq = gimple_body (current_function_decl);
729 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
730 }
731 }
732
733
734 \f
735 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
736 nodes that are referenced more than once in GENERIC functions. This is
737 necessary because gimplification (translation into GIMPLE) is performed
738 by modifying tree nodes in-place, so gimplication of a shared node in a
739 first context could generate an invalid GIMPLE form in a second context.
740
741 This is achieved with a simple mark/copy/unmark algorithm that walks the
742 GENERIC representation top-down, marks nodes with TREE_VISITED the first
743 time it encounters them, duplicates them if they already have TREE_VISITED
744 set, and finally removes the TREE_VISITED marks it has set.
745
746 The algorithm works only at the function level, i.e. it generates a GENERIC
747 representation of a function with no nodes shared within the function when
748 passed a GENERIC function (except for nodes that are allowed to be shared).
749
750 At the global level, it is also necessary to unshare tree nodes that are
751 referenced in more than one function, for the same aforementioned reason.
752 This requires some cooperation from the front-end. There are 2 strategies:
753
754 1. Manual unsharing. The front-end needs to call unshare_expr on every
755 expression that might end up being shared across functions.
756
757 2. Deep unsharing. This is an extension of regular unsharing. Instead
758 of calling unshare_expr on expressions that might be shared across
759 functions, the front-end pre-marks them with TREE_VISITED. This will
760 ensure that they are unshared on the first reference within functions
761 when the regular unsharing algorithm runs. The counterpart is that
762 this algorithm must look deeper than for manual unsharing, which is
763 specified by LANG_HOOKS_DEEP_UNSHARING.
764
765 If there are only few specific cases of node sharing across functions, it is
766 probably easier for a front-end to unshare the expressions manually. On the
767 contrary, if the expressions generated at the global level are as widespread
768 as expressions generated within functions, deep unsharing is very likely the
769 way to go. */
770
771 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
772 These nodes model computations that must be done once. If we were to
773 unshare something like SAVE_EXPR(i++), the gimplification process would
774 create wrong code. However, if DATA is non-null, it must hold a pointer
775 set that is used to unshare the subtrees of these nodes. */
776
777 static tree
778 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
779 {
780 tree t = *tp;
781 enum tree_code code = TREE_CODE (t);
782
783 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
784 copy their subtrees if we can make sure to do it only once. */
785 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
786 {
787 if (data && !((hash_set<tree> *)data)->add (t))
788 ;
789 else
790 *walk_subtrees = 0;
791 }
792
793 /* Stop at types, decls, constants like copy_tree_r. */
794 else if (TREE_CODE_CLASS (code) == tcc_type
795 || TREE_CODE_CLASS (code) == tcc_declaration
796 || TREE_CODE_CLASS (code) == tcc_constant
797 /* We can't do anything sensible with a BLOCK used as an
798 expression, but we also can't just die when we see it
799 because of non-expression uses. So we avert our eyes
800 and cross our fingers. Silly Java. */
801 || code == BLOCK)
802 *walk_subtrees = 0;
803
804 /* Cope with the statement expression extension. */
805 else if (code == STATEMENT_LIST)
806 ;
807
808 /* Leave the bulk of the work to copy_tree_r itself. */
809 else
810 copy_tree_r (tp, walk_subtrees, NULL);
811
812 return NULL_TREE;
813 }
814
815 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
816 If *TP has been visited already, then *TP is deeply copied by calling
817 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
818
819 static tree
820 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
821 {
822 tree t = *tp;
823 enum tree_code code = TREE_CODE (t);
824
825 /* Skip types, decls, and constants. But we do want to look at their
826 types and the bounds of types. Mark them as visited so we properly
827 unmark their subtrees on the unmark pass. If we've already seen them,
828 don't look down further. */
829 if (TREE_CODE_CLASS (code) == tcc_type
830 || TREE_CODE_CLASS (code) == tcc_declaration
831 || TREE_CODE_CLASS (code) == tcc_constant)
832 {
833 if (TREE_VISITED (t))
834 *walk_subtrees = 0;
835 else
836 TREE_VISITED (t) = 1;
837 }
838
839 /* If this node has been visited already, unshare it and don't look
840 any deeper. */
841 else if (TREE_VISITED (t))
842 {
843 walk_tree (tp, mostly_copy_tree_r, data, NULL);
844 *walk_subtrees = 0;
845 }
846
847 /* Otherwise, mark the node as visited and keep looking. */
848 else
849 TREE_VISITED (t) = 1;
850
851 return NULL_TREE;
852 }
853
854 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
855 copy_if_shared_r callback unmodified. */
856
857 static inline void
858 copy_if_shared (tree *tp, void *data)
859 {
860 walk_tree (tp, copy_if_shared_r, data, NULL);
861 }
862
863 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
864 any nested functions. */
865
866 static void
867 unshare_body (tree fndecl)
868 {
869 struct cgraph_node *cgn = cgraph_node::get (fndecl);
870 /* If the language requires deep unsharing, we need a pointer set to make
871 sure we don't repeatedly unshare subtrees of unshareable nodes. */
872 hash_set<tree> *visited
873 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
874
875 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
876 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
877 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
878
879 delete visited;
880
881 if (cgn)
882 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
883 unshare_body (cgn->decl);
884 }
885
886 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
887 Subtrees are walked until the first unvisited node is encountered. */
888
889 static tree
890 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
891 {
892 tree t = *tp;
893
894 /* If this node has been visited, unmark it and keep looking. */
895 if (TREE_VISITED (t))
896 TREE_VISITED (t) = 0;
897
898 /* Otherwise, don't look any deeper. */
899 else
900 *walk_subtrees = 0;
901
902 return NULL_TREE;
903 }
904
905 /* Unmark the visited trees rooted at *TP. */
906
907 static inline void
908 unmark_visited (tree *tp)
909 {
910 walk_tree (tp, unmark_visited_r, NULL, NULL);
911 }
912
913 /* Likewise, but mark all trees as not visited. */
914
915 static void
916 unvisit_body (tree fndecl)
917 {
918 struct cgraph_node *cgn = cgraph_node::get (fndecl);
919
920 unmark_visited (&DECL_SAVED_TREE (fndecl));
921 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
922 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
923
924 if (cgn)
925 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
926 unvisit_body (cgn->decl);
927 }
928
929 /* Unconditionally make an unshared copy of EXPR. This is used when using
930 stored expressions which span multiple functions, such as BINFO_VTABLE,
931 as the normal unsharing process can't tell that they're shared. */
932
933 tree
934 unshare_expr (tree expr)
935 {
936 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
937 return expr;
938 }
939
940 /* Worker for unshare_expr_without_location. */
941
942 static tree
943 prune_expr_location (tree *tp, int *walk_subtrees, void *)
944 {
945 if (EXPR_P (*tp))
946 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
947 else
948 *walk_subtrees = 0;
949 return NULL_TREE;
950 }
951
952 /* Similar to unshare_expr but also prune all expression locations
953 from EXPR. */
954
955 tree
956 unshare_expr_without_location (tree expr)
957 {
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 if (EXPR_P (expr))
960 walk_tree (&expr, prune_expr_location, NULL, NULL);
961 return expr;
962 }
963 \f
964 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
965 contain statements and have a value. Assign its value to a temporary
966 and give it void_type_node. Return the temporary, or NULL_TREE if
967 WRAPPER was already void. */
968
969 tree
970 voidify_wrapper_expr (tree wrapper, tree temp)
971 {
972 tree type = TREE_TYPE (wrapper);
973 if (type && !VOID_TYPE_P (type))
974 {
975 tree *p;
976
977 /* Set p to point to the body of the wrapper. Loop until we find
978 something that isn't a wrapper. */
979 for (p = &wrapper; p && *p; )
980 {
981 switch (TREE_CODE (*p))
982 {
983 case BIND_EXPR:
984 TREE_SIDE_EFFECTS (*p) = 1;
985 TREE_TYPE (*p) = void_type_node;
986 /* For a BIND_EXPR, the body is operand 1. */
987 p = &BIND_EXPR_BODY (*p);
988 break;
989
990 case CLEANUP_POINT_EXPR:
991 case TRY_FINALLY_EXPR:
992 case TRY_CATCH_EXPR:
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
995 p = &TREE_OPERAND (*p, 0);
996 break;
997
998 case STATEMENT_LIST:
999 {
1000 tree_stmt_iterator i = tsi_last (*p);
1001 TREE_SIDE_EFFECTS (*p) = 1;
1002 TREE_TYPE (*p) = void_type_node;
1003 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1004 }
1005 break;
1006
1007 case COMPOUND_EXPR:
1008 /* Advance to the last statement. Set all container types to
1009 void. */
1010 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1011 {
1012 TREE_SIDE_EFFECTS (*p) = 1;
1013 TREE_TYPE (*p) = void_type_node;
1014 }
1015 break;
1016
1017 case TRANSACTION_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TRANSACTION_EXPR_BODY (*p);
1021 break;
1022
1023 default:
1024 /* Assume that any tree upon which voidify_wrapper_expr is
1025 directly called is a wrapper, and that its body is op0. */
1026 if (p == &wrapper)
1027 {
1028 TREE_SIDE_EFFECTS (*p) = 1;
1029 TREE_TYPE (*p) = void_type_node;
1030 p = &TREE_OPERAND (*p, 0);
1031 break;
1032 }
1033 goto out;
1034 }
1035 }
1036
1037 out:
1038 if (p == NULL || IS_EMPTY_STMT (*p))
1039 temp = NULL_TREE;
1040 else if (temp)
1041 {
1042 /* The wrapper is on the RHS of an assignment that we're pushing
1043 down. */
1044 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1045 || TREE_CODE (temp) == MODIFY_EXPR);
1046 TREE_OPERAND (temp, 1) = *p;
1047 *p = temp;
1048 }
1049 else
1050 {
1051 temp = create_tmp_var (type, "retval");
1052 *p = build2 (INIT_EXPR, type, temp, *p);
1053 }
1054
1055 return temp;
1056 }
1057
1058 return NULL_TREE;
1059 }
1060
1061 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1062 a temporary through which they communicate. */
1063
1064 static void
1065 build_stack_save_restore (gcall **save, gcall **restore)
1066 {
1067 tree tmp_var;
1068
1069 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1070 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1071 gimple_call_set_lhs (*save, tmp_var);
1072
1073 *restore
1074 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1075 1, tmp_var);
1076 }
1077
1078 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1079
1080 static enum gimplify_status
1081 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1082 {
1083 tree bind_expr = *expr_p;
1084 bool old_keep_stack = gimplify_ctxp->keep_stack;
1085 bool old_save_stack = gimplify_ctxp->save_stack;
1086 tree t;
1087 gbind *bind_stmt;
1088 gimple_seq body, cleanup;
1089 gcall *stack_save;
1090 location_t start_locus = 0, end_locus = 0;
1091 tree ret_clauses = NULL;
1092
1093 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1094
1095 /* Mark variables seen in this bind expr. */
1096 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1097 {
1098 if (TREE_CODE (t) == VAR_DECL)
1099 {
1100 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1101
1102 /* Mark variable as local. */
1103 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1104 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1105 || splay_tree_lookup (ctx->variables,
1106 (splay_tree_key) t) == NULL))
1107 {
1108 if (ctx->region_type == ORT_SIMD
1109 && TREE_ADDRESSABLE (t)
1110 && !TREE_STATIC (t))
1111 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1112 else
1113 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1114 }
1115
1116 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1117
1118 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1119 cfun->has_local_explicit_reg_vars = true;
1120 }
1121
1122 /* Preliminarily mark non-addressed complex variables as eligible
1123 for promotion to gimple registers. We'll transform their uses
1124 as we find them. */
1125 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1126 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1127 && !TREE_THIS_VOLATILE (t)
1128 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1129 && !needs_to_live_in_memory (t))
1130 DECL_GIMPLE_REG_P (t) = 1;
1131 }
1132
1133 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1134 BIND_EXPR_BLOCK (bind_expr));
1135 gimple_push_bind_expr (bind_stmt);
1136
1137 gimplify_ctxp->keep_stack = false;
1138 gimplify_ctxp->save_stack = false;
1139
1140 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1141 body = NULL;
1142 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1143 gimple_bind_set_body (bind_stmt, body);
1144
1145 /* Source location wise, the cleanup code (stack_restore and clobbers)
1146 belongs to the end of the block, so propagate what we have. The
1147 stack_save operation belongs to the beginning of block, which we can
1148 infer from the bind_expr directly if the block has no explicit
1149 assignment. */
1150 if (BIND_EXPR_BLOCK (bind_expr))
1151 {
1152 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1153 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1154 }
1155 if (start_locus == 0)
1156 start_locus = EXPR_LOCATION (bind_expr);
1157
1158 cleanup = NULL;
1159 stack_save = NULL;
1160
1161 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1162 the stack space allocated to the VLAs. */
1163 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1164 {
1165 gcall *stack_restore;
1166
1167 /* Save stack on entry and restore it on exit. Add a try_finally
1168 block to achieve this. */
1169 build_stack_save_restore (&stack_save, &stack_restore);
1170
1171 gimple_set_location (stack_save, start_locus);
1172 gimple_set_location (stack_restore, end_locus);
1173
1174 gimplify_seq_add_stmt (&cleanup, stack_restore);
1175 }
1176
1177 /* Add clobbers for all variables that go out of scope. */
1178 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1179 {
1180 if (TREE_CODE (t) == VAR_DECL
1181 && !is_global_var (t)
1182 && DECL_CONTEXT (t) == current_function_decl
1183 && !DECL_HARD_REGISTER (t)
1184 && !TREE_THIS_VOLATILE (t)
1185 && !DECL_HAS_VALUE_EXPR_P (t)
1186 /* Only care for variables that have to be in memory. Others
1187 will be rewritten into SSA names, hence moved to the top-level. */
1188 && !is_gimple_reg (t)
1189 && flag_stack_reuse != SR_NONE)
1190 {
1191 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1192 gimple *clobber_stmt;
1193 TREE_THIS_VOLATILE (clobber) = 1;
1194 clobber_stmt = gimple_build_assign (t, clobber);
1195 gimple_set_location (clobber_stmt, end_locus);
1196 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1197
1198 if (flag_openacc && oacc_declare_returns != NULL)
1199 {
1200 tree *c = oacc_declare_returns->get (t);
1201 if (c != NULL)
1202 {
1203 if (ret_clauses)
1204 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1205
1206 ret_clauses = *c;
1207
1208 oacc_declare_returns->remove (t);
1209
1210 if (oacc_declare_returns->elements () == 0)
1211 {
1212 delete oacc_declare_returns;
1213 oacc_declare_returns = NULL;
1214 }
1215 }
1216 }
1217 }
1218 }
1219
1220 if (ret_clauses)
1221 {
1222 gomp_target *stmt;
1223 gimple_stmt_iterator si = gsi_start (cleanup);
1224
1225 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1226 ret_clauses);
1227 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1228 }
1229
1230 if (cleanup)
1231 {
1232 gtry *gs;
1233 gimple_seq new_body;
1234
1235 new_body = NULL;
1236 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1237 GIMPLE_TRY_FINALLY);
1238
1239 if (stack_save)
1240 gimplify_seq_add_stmt (&new_body, stack_save);
1241 gimplify_seq_add_stmt (&new_body, gs);
1242 gimple_bind_set_body (bind_stmt, new_body);
1243 }
1244
1245 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1246 if (!gimplify_ctxp->keep_stack)
1247 gimplify_ctxp->keep_stack = old_keep_stack;
1248 gimplify_ctxp->save_stack = old_save_stack;
1249
1250 gimple_pop_bind_expr ();
1251
1252 gimplify_seq_add_stmt (pre_p, bind_stmt);
1253
1254 if (temp)
1255 {
1256 *expr_p = temp;
1257 return GS_OK;
1258 }
1259
1260 *expr_p = NULL_TREE;
1261 return GS_ALL_DONE;
1262 }
1263
1264 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1265 GIMPLE value, it is assigned to a new temporary and the statement is
1266 re-written to return the temporary.
1267
1268 PRE_P points to the sequence where side effects that must happen before
1269 STMT should be stored. */
1270
1271 static enum gimplify_status
1272 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1273 {
1274 greturn *ret;
1275 tree ret_expr = TREE_OPERAND (stmt, 0);
1276 tree result_decl, result;
1277
1278 if (ret_expr == error_mark_node)
1279 return GS_ERROR;
1280
1281 /* Implicit _Cilk_sync must be inserted right before any return statement
1282 if there is a _Cilk_spawn in the function. If the user has provided a
1283 _Cilk_sync, the optimizer should remove this duplicate one. */
1284 if (fn_contains_cilk_spawn_p (cfun))
1285 {
1286 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1287 gimplify_and_add (impl_sync, pre_p);
1288 }
1289
1290 if (!ret_expr
1291 || TREE_CODE (ret_expr) == RESULT_DECL
1292 || ret_expr == error_mark_node)
1293 {
1294 greturn *ret = gimple_build_return (ret_expr);
1295 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1296 gimplify_seq_add_stmt (pre_p, ret);
1297 return GS_ALL_DONE;
1298 }
1299
1300 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1301 result_decl = NULL_TREE;
1302 else
1303 {
1304 result_decl = TREE_OPERAND (ret_expr, 0);
1305
1306 /* See through a return by reference. */
1307 if (TREE_CODE (result_decl) == INDIRECT_REF)
1308 result_decl = TREE_OPERAND (result_decl, 0);
1309
1310 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1311 || TREE_CODE (ret_expr) == INIT_EXPR)
1312 && TREE_CODE (result_decl) == RESULT_DECL);
1313 }
1314
1315 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1316 Recall that aggregate_value_p is FALSE for any aggregate type that is
1317 returned in registers. If we're returning values in registers, then
1318 we don't want to extend the lifetime of the RESULT_DECL, particularly
1319 across another call. In addition, for those aggregates for which
1320 hard_function_value generates a PARALLEL, we'll die during normal
1321 expansion of structure assignments; there's special code in expand_return
1322 to handle this case that does not exist in expand_expr. */
1323 if (!result_decl)
1324 result = NULL_TREE;
1325 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1326 {
1327 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1328 {
1329 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1330 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1331 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1332 should be effectively allocated by the caller, i.e. all calls to
1333 this function must be subject to the Return Slot Optimization. */
1334 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1335 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1336 }
1337 result = result_decl;
1338 }
1339 else if (gimplify_ctxp->return_temp)
1340 result = gimplify_ctxp->return_temp;
1341 else
1342 {
1343 result = create_tmp_reg (TREE_TYPE (result_decl));
1344
1345 /* ??? With complex control flow (usually involving abnormal edges),
1346 we can wind up warning about an uninitialized value for this. Due
1347 to how this variable is constructed and initialized, this is never
1348 true. Give up and never warn. */
1349 TREE_NO_WARNING (result) = 1;
1350
1351 gimplify_ctxp->return_temp = result;
1352 }
1353
1354 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1355 Then gimplify the whole thing. */
1356 if (result != result_decl)
1357 TREE_OPERAND (ret_expr, 0) = result;
1358
1359 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1360
1361 ret = gimple_build_return (result);
1362 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1363 gimplify_seq_add_stmt (pre_p, ret);
1364
1365 return GS_ALL_DONE;
1366 }
1367
1368 /* Gimplify a variable-length array DECL. */
1369
1370 static void
1371 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1372 {
1373 /* This is a variable-sized decl. Simplify its size and mark it
1374 for deferred expansion. */
1375 tree t, addr, ptr_type;
1376
1377 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1378 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1379
1380 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1381 if (DECL_HAS_VALUE_EXPR_P (decl))
1382 return;
1383
1384 /* All occurrences of this decl in final gimplified code will be
1385 replaced by indirection. Setting DECL_VALUE_EXPR does two
1386 things: First, it lets the rest of the gimplifier know what
1387 replacement to use. Second, it lets the debug info know
1388 where to find the value. */
1389 ptr_type = build_pointer_type (TREE_TYPE (decl));
1390 addr = create_tmp_var (ptr_type, get_name (decl));
1391 DECL_IGNORED_P (addr) = 0;
1392 t = build_fold_indirect_ref (addr);
1393 TREE_THIS_NOTRAP (t) = 1;
1394 SET_DECL_VALUE_EXPR (decl, t);
1395 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1396
1397 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1398 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1399 size_int (DECL_ALIGN (decl)));
1400 /* The call has been built for a variable-sized object. */
1401 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1402 t = fold_convert (ptr_type, t);
1403 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1404
1405 gimplify_and_add (t, seq_p);
1406 }
1407
1408 /* A helper function to be called via walk_tree. Mark all labels under *TP
1409 as being forced. To be called for DECL_INITIAL of static variables. */
1410
1411 static tree
1412 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1413 {
1414 if (TYPE_P (*tp))
1415 *walk_subtrees = 0;
1416 if (TREE_CODE (*tp) == LABEL_DECL)
1417 FORCED_LABEL (*tp) = 1;
1418
1419 return NULL_TREE;
1420 }
1421
1422 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1423 and initialization explicit. */
1424
1425 static enum gimplify_status
1426 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1427 {
1428 tree stmt = *stmt_p;
1429 tree decl = DECL_EXPR_DECL (stmt);
1430
1431 *stmt_p = NULL_TREE;
1432
1433 if (TREE_TYPE (decl) == error_mark_node)
1434 return GS_ERROR;
1435
1436 if ((TREE_CODE (decl) == TYPE_DECL
1437 || TREE_CODE (decl) == VAR_DECL)
1438 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1439 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1440
1441 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1442 in case its size expressions contain problematic nodes like CALL_EXPR. */
1443 if (TREE_CODE (decl) == TYPE_DECL
1444 && DECL_ORIGINAL_TYPE (decl)
1445 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1446 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1447
1448 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1449 {
1450 tree init = DECL_INITIAL (decl);
1451
1452 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1453 || (!TREE_STATIC (decl)
1454 && flag_stack_check == GENERIC_STACK_CHECK
1455 && compare_tree_int (DECL_SIZE_UNIT (decl),
1456 STACK_CHECK_MAX_VAR_SIZE) > 0))
1457 gimplify_vla_decl (decl, seq_p);
1458
1459 /* Some front ends do not explicitly declare all anonymous
1460 artificial variables. We compensate here by declaring the
1461 variables, though it would be better if the front ends would
1462 explicitly declare them. */
1463 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1464 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1465 gimple_add_tmp_var (decl);
1466
1467 if (init && init != error_mark_node)
1468 {
1469 if (!TREE_STATIC (decl))
1470 {
1471 DECL_INITIAL (decl) = NULL_TREE;
1472 init = build2 (INIT_EXPR, void_type_node, decl, init);
1473 gimplify_and_add (init, seq_p);
1474 ggc_free (init);
1475 }
1476 else
1477 /* We must still examine initializers for static variables
1478 as they may contain a label address. */
1479 walk_tree (&init, force_labels_r, NULL, NULL);
1480 }
1481 }
1482
1483 return GS_ALL_DONE;
1484 }
1485
1486 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1487 and replacing the LOOP_EXPR with goto, but if the loop contains an
1488 EXIT_EXPR, we need to append a label for it to jump to. */
1489
1490 static enum gimplify_status
1491 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1492 {
1493 tree saved_label = gimplify_ctxp->exit_label;
1494 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1495
1496 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1497
1498 gimplify_ctxp->exit_label = NULL_TREE;
1499
1500 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1501
1502 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1503
1504 if (gimplify_ctxp->exit_label)
1505 gimplify_seq_add_stmt (pre_p,
1506 gimple_build_label (gimplify_ctxp->exit_label));
1507
1508 gimplify_ctxp->exit_label = saved_label;
1509
1510 *expr_p = NULL;
1511 return GS_ALL_DONE;
1512 }
1513
1514 /* Gimplify a statement list onto a sequence. These may be created either
1515 by an enlightened front-end, or by shortcut_cond_expr. */
1516
1517 static enum gimplify_status
1518 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1519 {
1520 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1521
1522 tree_stmt_iterator i = tsi_start (*expr_p);
1523
1524 while (!tsi_end_p (i))
1525 {
1526 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1527 tsi_delink (&i);
1528 }
1529
1530 if (temp)
1531 {
1532 *expr_p = temp;
1533 return GS_OK;
1534 }
1535
1536 return GS_ALL_DONE;
1537 }
1538
1539 \f
1540 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1541 branch to. */
1542
1543 static enum gimplify_status
1544 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1545 {
1546 tree switch_expr = *expr_p;
1547 gimple_seq switch_body_seq = NULL;
1548 enum gimplify_status ret;
1549 tree index_type = TREE_TYPE (switch_expr);
1550 if (index_type == NULL_TREE)
1551 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1552
1553 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1554 fb_rvalue);
1555 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1556 return ret;
1557
1558 if (SWITCH_BODY (switch_expr))
1559 {
1560 vec<tree> labels;
1561 vec<tree> saved_labels;
1562 tree default_case = NULL_TREE;
1563 gswitch *switch_stmt;
1564
1565 /* If someone can be bothered to fill in the labels, they can
1566 be bothered to null out the body too. */
1567 gcc_assert (!SWITCH_LABELS (switch_expr));
1568
1569 /* Save old labels, get new ones from body, then restore the old
1570 labels. Save all the things from the switch body to append after. */
1571 saved_labels = gimplify_ctxp->case_labels;
1572 gimplify_ctxp->case_labels.create (8);
1573
1574 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1575 labels = gimplify_ctxp->case_labels;
1576 gimplify_ctxp->case_labels = saved_labels;
1577
1578 preprocess_case_label_vec_for_gimple (labels, index_type,
1579 &default_case);
1580
1581 if (!default_case)
1582 {
1583 glabel *new_default;
1584
1585 default_case
1586 = build_case_label (NULL_TREE, NULL_TREE,
1587 create_artificial_label (UNKNOWN_LOCATION));
1588 new_default = gimple_build_label (CASE_LABEL (default_case));
1589 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1590 }
1591
1592 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1593 default_case, labels);
1594 gimplify_seq_add_stmt (pre_p, switch_stmt);
1595 gimplify_seq_add_seq (pre_p, switch_body_seq);
1596 labels.release ();
1597 }
1598 else
1599 gcc_assert (SWITCH_LABELS (switch_expr));
1600
1601 return GS_ALL_DONE;
1602 }
1603
1604 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1605
1606 static enum gimplify_status
1607 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1608 {
1609 struct gimplify_ctx *ctxp;
1610 glabel *label_stmt;
1611
1612 /* Invalid programs can play Duff's Device type games with, for example,
1613 #pragma omp parallel. At least in the C front end, we don't
1614 detect such invalid branches until after gimplification, in the
1615 diagnose_omp_blocks pass. */
1616 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1617 if (ctxp->case_labels.exists ())
1618 break;
1619
1620 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1621 ctxp->case_labels.safe_push (*expr_p);
1622 gimplify_seq_add_stmt (pre_p, label_stmt);
1623
1624 return GS_ALL_DONE;
1625 }
1626
1627 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1628 if necessary. */
1629
1630 tree
1631 build_and_jump (tree *label_p)
1632 {
1633 if (label_p == NULL)
1634 /* If there's nowhere to jump, just fall through. */
1635 return NULL_TREE;
1636
1637 if (*label_p == NULL_TREE)
1638 {
1639 tree label = create_artificial_label (UNKNOWN_LOCATION);
1640 *label_p = label;
1641 }
1642
1643 return build1 (GOTO_EXPR, void_type_node, *label_p);
1644 }
1645
1646 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1647 This also involves building a label to jump to and communicating it to
1648 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1649
1650 static enum gimplify_status
1651 gimplify_exit_expr (tree *expr_p)
1652 {
1653 tree cond = TREE_OPERAND (*expr_p, 0);
1654 tree expr;
1655
1656 expr = build_and_jump (&gimplify_ctxp->exit_label);
1657 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1658 *expr_p = expr;
1659
1660 return GS_OK;
1661 }
1662
1663 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1664 different from its canonical type, wrap the whole thing inside a
1665 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1666 type.
1667
1668 The canonical type of a COMPONENT_REF is the type of the field being
1669 referenced--unless the field is a bit-field which can be read directly
1670 in a smaller mode, in which case the canonical type is the
1671 sign-appropriate type corresponding to that mode. */
1672
1673 static void
1674 canonicalize_component_ref (tree *expr_p)
1675 {
1676 tree expr = *expr_p;
1677 tree type;
1678
1679 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1680
1681 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1682 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1683 else
1684 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1685
1686 /* One could argue that all the stuff below is not necessary for
1687 the non-bitfield case and declare it a FE error if type
1688 adjustment would be needed. */
1689 if (TREE_TYPE (expr) != type)
1690 {
1691 #ifdef ENABLE_TYPES_CHECKING
1692 tree old_type = TREE_TYPE (expr);
1693 #endif
1694 int type_quals;
1695
1696 /* We need to preserve qualifiers and propagate them from
1697 operand 0. */
1698 type_quals = TYPE_QUALS (type)
1699 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1700 if (TYPE_QUALS (type) != type_quals)
1701 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1702
1703 /* Set the type of the COMPONENT_REF to the underlying type. */
1704 TREE_TYPE (expr) = type;
1705
1706 #ifdef ENABLE_TYPES_CHECKING
1707 /* It is now a FE error, if the conversion from the canonical
1708 type to the original expression type is not useless. */
1709 gcc_assert (useless_type_conversion_p (old_type, type));
1710 #endif
1711 }
1712 }
1713
1714 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1715 to foo, embed that change in the ADDR_EXPR by converting
1716 T array[U];
1717 (T *)&array
1718 ==>
1719 &array[L]
1720 where L is the lower bound. For simplicity, only do this for constant
1721 lower bound.
1722 The constraint is that the type of &array[L] is trivially convertible
1723 to T *. */
1724
1725 static void
1726 canonicalize_addr_expr (tree *expr_p)
1727 {
1728 tree expr = *expr_p;
1729 tree addr_expr = TREE_OPERAND (expr, 0);
1730 tree datype, ddatype, pddatype;
1731
1732 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1733 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1734 || TREE_CODE (addr_expr) != ADDR_EXPR)
1735 return;
1736
1737 /* The addr_expr type should be a pointer to an array. */
1738 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1739 if (TREE_CODE (datype) != ARRAY_TYPE)
1740 return;
1741
1742 /* The pointer to element type shall be trivially convertible to
1743 the expression pointer type. */
1744 ddatype = TREE_TYPE (datype);
1745 pddatype = build_pointer_type (ddatype);
1746 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1747 pddatype))
1748 return;
1749
1750 /* The lower bound and element sizes must be constant. */
1751 if (!TYPE_SIZE_UNIT (ddatype)
1752 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1753 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1754 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1755 return;
1756
1757 /* All checks succeeded. Build a new node to merge the cast. */
1758 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1759 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1760 NULL_TREE, NULL_TREE);
1761 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1762
1763 /* We can have stripped a required restrict qualifier above. */
1764 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1765 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1766 }
1767
1768 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1769 underneath as appropriate. */
1770
1771 static enum gimplify_status
1772 gimplify_conversion (tree *expr_p)
1773 {
1774 location_t loc = EXPR_LOCATION (*expr_p);
1775 gcc_assert (CONVERT_EXPR_P (*expr_p));
1776
1777 /* Then strip away all but the outermost conversion. */
1778 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1779
1780 /* And remove the outermost conversion if it's useless. */
1781 if (tree_ssa_useless_type_conversion (*expr_p))
1782 *expr_p = TREE_OPERAND (*expr_p, 0);
1783
1784 /* If we still have a conversion at the toplevel,
1785 then canonicalize some constructs. */
1786 if (CONVERT_EXPR_P (*expr_p))
1787 {
1788 tree sub = TREE_OPERAND (*expr_p, 0);
1789
1790 /* If a NOP conversion is changing the type of a COMPONENT_REF
1791 expression, then canonicalize its type now in order to expose more
1792 redundant conversions. */
1793 if (TREE_CODE (sub) == COMPONENT_REF)
1794 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1795
1796 /* If a NOP conversion is changing a pointer to array of foo
1797 to a pointer to foo, embed that change in the ADDR_EXPR. */
1798 else if (TREE_CODE (sub) == ADDR_EXPR)
1799 canonicalize_addr_expr (expr_p);
1800 }
1801
1802 /* If we have a conversion to a non-register type force the
1803 use of a VIEW_CONVERT_EXPR instead. */
1804 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1805 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1806 TREE_OPERAND (*expr_p, 0));
1807
1808 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1809 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1810 TREE_SET_CODE (*expr_p, NOP_EXPR);
1811
1812 return GS_OK;
1813 }
1814
1815 /* Nonlocal VLAs seen in the current function. */
1816 static hash_set<tree> *nonlocal_vlas;
1817
1818 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1819 static tree nonlocal_vla_vars;
1820
1821 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1822 DECL_VALUE_EXPR, and it's worth re-examining things. */
1823
1824 static enum gimplify_status
1825 gimplify_var_or_parm_decl (tree *expr_p)
1826 {
1827 tree decl = *expr_p;
1828
1829 /* ??? If this is a local variable, and it has not been seen in any
1830 outer BIND_EXPR, then it's probably the result of a duplicate
1831 declaration, for which we've already issued an error. It would
1832 be really nice if the front end wouldn't leak these at all.
1833 Currently the only known culprit is C++ destructors, as seen
1834 in g++.old-deja/g++.jason/binding.C. */
1835 if (TREE_CODE (decl) == VAR_DECL
1836 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1837 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1838 && decl_function_context (decl) == current_function_decl)
1839 {
1840 gcc_assert (seen_error ());
1841 return GS_ERROR;
1842 }
1843
1844 /* When within an OMP context, notice uses of variables. */
1845 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1846 return GS_ALL_DONE;
1847
1848 /* If the decl is an alias for another expression, substitute it now. */
1849 if (DECL_HAS_VALUE_EXPR_P (decl))
1850 {
1851 tree value_expr = DECL_VALUE_EXPR (decl);
1852
1853 /* For referenced nonlocal VLAs add a decl for debugging purposes
1854 to the current function. */
1855 if (TREE_CODE (decl) == VAR_DECL
1856 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1857 && nonlocal_vlas != NULL
1858 && TREE_CODE (value_expr) == INDIRECT_REF
1859 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1860 && decl_function_context (decl) != current_function_decl)
1861 {
1862 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1863 while (ctx
1864 && (ctx->region_type == ORT_WORKSHARE
1865 || ctx->region_type == ORT_SIMD
1866 || ctx->region_type == ORT_ACC))
1867 ctx = ctx->outer_context;
1868 if (!ctx && !nonlocal_vlas->add (decl))
1869 {
1870 tree copy = copy_node (decl);
1871
1872 lang_hooks.dup_lang_specific_decl (copy);
1873 SET_DECL_RTL (copy, 0);
1874 TREE_USED (copy) = 1;
1875 DECL_CHAIN (copy) = nonlocal_vla_vars;
1876 nonlocal_vla_vars = copy;
1877 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1878 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1879 }
1880 }
1881
1882 *expr_p = unshare_expr (value_expr);
1883 return GS_OK;
1884 }
1885
1886 return GS_ALL_DONE;
1887 }
1888
1889 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1890
1891 static void
1892 recalculate_side_effects (tree t)
1893 {
1894 enum tree_code code = TREE_CODE (t);
1895 int len = TREE_OPERAND_LENGTH (t);
1896 int i;
1897
1898 switch (TREE_CODE_CLASS (code))
1899 {
1900 case tcc_expression:
1901 switch (code)
1902 {
1903 case INIT_EXPR:
1904 case MODIFY_EXPR:
1905 case VA_ARG_EXPR:
1906 case PREDECREMENT_EXPR:
1907 case PREINCREMENT_EXPR:
1908 case POSTDECREMENT_EXPR:
1909 case POSTINCREMENT_EXPR:
1910 /* All of these have side-effects, no matter what their
1911 operands are. */
1912 return;
1913
1914 default:
1915 break;
1916 }
1917 /* Fall through. */
1918
1919 case tcc_comparison: /* a comparison expression */
1920 case tcc_unary: /* a unary arithmetic expression */
1921 case tcc_binary: /* a binary arithmetic expression */
1922 case tcc_reference: /* a reference */
1923 case tcc_vl_exp: /* a function call */
1924 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1925 for (i = 0; i < len; ++i)
1926 {
1927 tree op = TREE_OPERAND (t, i);
1928 if (op && TREE_SIDE_EFFECTS (op))
1929 TREE_SIDE_EFFECTS (t) = 1;
1930 }
1931 break;
1932
1933 case tcc_constant:
1934 /* No side-effects. */
1935 return;
1936
1937 default:
1938 gcc_unreachable ();
1939 }
1940 }
1941
1942 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1943 node *EXPR_P.
1944
1945 compound_lval
1946 : min_lval '[' val ']'
1947 | min_lval '.' ID
1948 | compound_lval '[' val ']'
1949 | compound_lval '.' ID
1950
1951 This is not part of the original SIMPLE definition, which separates
1952 array and member references, but it seems reasonable to handle them
1953 together. Also, this way we don't run into problems with union
1954 aliasing; gcc requires that for accesses through a union to alias, the
1955 union reference must be explicit, which was not always the case when we
1956 were splitting up array and member refs.
1957
1958 PRE_P points to the sequence where side effects that must happen before
1959 *EXPR_P should be stored.
1960
1961 POST_P points to the sequence where side effects that must happen after
1962 *EXPR_P should be stored. */
1963
1964 static enum gimplify_status
1965 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1966 fallback_t fallback)
1967 {
1968 tree *p;
1969 enum gimplify_status ret = GS_ALL_DONE, tret;
1970 int i;
1971 location_t loc = EXPR_LOCATION (*expr_p);
1972 tree expr = *expr_p;
1973
1974 /* Create a stack of the subexpressions so later we can walk them in
1975 order from inner to outer. */
1976 auto_vec<tree, 10> expr_stack;
1977
1978 /* We can handle anything that get_inner_reference can deal with. */
1979 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1980 {
1981 restart:
1982 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1983 if (TREE_CODE (*p) == INDIRECT_REF)
1984 *p = fold_indirect_ref_loc (loc, *p);
1985
1986 if (handled_component_p (*p))
1987 ;
1988 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1989 additional COMPONENT_REFs. */
1990 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1991 && gimplify_var_or_parm_decl (p) == GS_OK)
1992 goto restart;
1993 else
1994 break;
1995
1996 expr_stack.safe_push (*p);
1997 }
1998
1999 gcc_assert (expr_stack.length ());
2000
2001 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2002 walked through and P points to the innermost expression.
2003
2004 Java requires that we elaborated nodes in source order. That
2005 means we must gimplify the inner expression followed by each of
2006 the indices, in order. But we can't gimplify the inner
2007 expression until we deal with any variable bounds, sizes, or
2008 positions in order to deal with PLACEHOLDER_EXPRs.
2009
2010 So we do this in three steps. First we deal with the annotations
2011 for any variables in the components, then we gimplify the base,
2012 then we gimplify any indices, from left to right. */
2013 for (i = expr_stack.length () - 1; i >= 0; i--)
2014 {
2015 tree t = expr_stack[i];
2016
2017 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2018 {
2019 /* Gimplify the low bound and element type size and put them into
2020 the ARRAY_REF. If these values are set, they have already been
2021 gimplified. */
2022 if (TREE_OPERAND (t, 2) == NULL_TREE)
2023 {
2024 tree low = unshare_expr (array_ref_low_bound (t));
2025 if (!is_gimple_min_invariant (low))
2026 {
2027 TREE_OPERAND (t, 2) = low;
2028 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2029 post_p, is_gimple_reg,
2030 fb_rvalue);
2031 ret = MIN (ret, tret);
2032 }
2033 }
2034 else
2035 {
2036 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2037 is_gimple_reg, fb_rvalue);
2038 ret = MIN (ret, tret);
2039 }
2040
2041 if (TREE_OPERAND (t, 3) == NULL_TREE)
2042 {
2043 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2044 tree elmt_size = unshare_expr (array_ref_element_size (t));
2045 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2046
2047 /* Divide the element size by the alignment of the element
2048 type (above). */
2049 elmt_size
2050 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2051
2052 if (!is_gimple_min_invariant (elmt_size))
2053 {
2054 TREE_OPERAND (t, 3) = elmt_size;
2055 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2056 post_p, is_gimple_reg,
2057 fb_rvalue);
2058 ret = MIN (ret, tret);
2059 }
2060 }
2061 else
2062 {
2063 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2064 is_gimple_reg, fb_rvalue);
2065 ret = MIN (ret, tret);
2066 }
2067 }
2068 else if (TREE_CODE (t) == COMPONENT_REF)
2069 {
2070 /* Set the field offset into T and gimplify it. */
2071 if (TREE_OPERAND (t, 2) == NULL_TREE)
2072 {
2073 tree offset = unshare_expr (component_ref_field_offset (t));
2074 tree field = TREE_OPERAND (t, 1);
2075 tree factor
2076 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2077
2078 /* Divide the offset by its alignment. */
2079 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2080
2081 if (!is_gimple_min_invariant (offset))
2082 {
2083 TREE_OPERAND (t, 2) = offset;
2084 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2085 post_p, is_gimple_reg,
2086 fb_rvalue);
2087 ret = MIN (ret, tret);
2088 }
2089 }
2090 else
2091 {
2092 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2093 is_gimple_reg, fb_rvalue);
2094 ret = MIN (ret, tret);
2095 }
2096 }
2097 }
2098
2099 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2100 so as to match the min_lval predicate. Failure to do so may result
2101 in the creation of large aggregate temporaries. */
2102 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2103 fallback | fb_lvalue);
2104 ret = MIN (ret, tret);
2105
2106 /* And finally, the indices and operands of ARRAY_REF. During this
2107 loop we also remove any useless conversions. */
2108 for (; expr_stack.length () > 0; )
2109 {
2110 tree t = expr_stack.pop ();
2111
2112 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2113 {
2114 /* Gimplify the dimension. */
2115 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2116 {
2117 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2118 is_gimple_val, fb_rvalue);
2119 ret = MIN (ret, tret);
2120 }
2121 }
2122
2123 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2124
2125 /* The innermost expression P may have originally had
2126 TREE_SIDE_EFFECTS set which would have caused all the outer
2127 expressions in *EXPR_P leading to P to also have had
2128 TREE_SIDE_EFFECTS set. */
2129 recalculate_side_effects (t);
2130 }
2131
2132 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2133 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2134 {
2135 canonicalize_component_ref (expr_p);
2136 }
2137
2138 expr_stack.release ();
2139
2140 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2141
2142 return ret;
2143 }
2144
2145 /* Gimplify the self modifying expression pointed to by EXPR_P
2146 (++, --, +=, -=).
2147
2148 PRE_P points to the list where side effects that must happen before
2149 *EXPR_P should be stored.
2150
2151 POST_P points to the list where side effects that must happen after
2152 *EXPR_P should be stored.
2153
2154 WANT_VALUE is nonzero iff we want to use the value of this expression
2155 in another expression.
2156
2157 ARITH_TYPE is the type the computation should be performed in. */
2158
2159 enum gimplify_status
2160 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2161 bool want_value, tree arith_type)
2162 {
2163 enum tree_code code;
2164 tree lhs, lvalue, rhs, t1;
2165 gimple_seq post = NULL, *orig_post_p = post_p;
2166 bool postfix;
2167 enum tree_code arith_code;
2168 enum gimplify_status ret;
2169 location_t loc = EXPR_LOCATION (*expr_p);
2170
2171 code = TREE_CODE (*expr_p);
2172
2173 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2174 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2175
2176 /* Prefix or postfix? */
2177 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2178 /* Faster to treat as prefix if result is not used. */
2179 postfix = want_value;
2180 else
2181 postfix = false;
2182
2183 /* For postfix, make sure the inner expression's post side effects
2184 are executed after side effects from this expression. */
2185 if (postfix)
2186 post_p = &post;
2187
2188 /* Add or subtract? */
2189 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2190 arith_code = PLUS_EXPR;
2191 else
2192 arith_code = MINUS_EXPR;
2193
2194 /* Gimplify the LHS into a GIMPLE lvalue. */
2195 lvalue = TREE_OPERAND (*expr_p, 0);
2196 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2197 if (ret == GS_ERROR)
2198 return ret;
2199
2200 /* Extract the operands to the arithmetic operation. */
2201 lhs = lvalue;
2202 rhs = TREE_OPERAND (*expr_p, 1);
2203
2204 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2205 that as the result value and in the postqueue operation. */
2206 if (postfix)
2207 {
2208 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2209 if (ret == GS_ERROR)
2210 return ret;
2211
2212 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2213 }
2214
2215 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2216 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2217 {
2218 rhs = convert_to_ptrofftype_loc (loc, rhs);
2219 if (arith_code == MINUS_EXPR)
2220 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2221 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2222 }
2223 else
2224 t1 = fold_convert (TREE_TYPE (*expr_p),
2225 fold_build2 (arith_code, arith_type,
2226 fold_convert (arith_type, lhs),
2227 fold_convert (arith_type, rhs)));
2228
2229 if (postfix)
2230 {
2231 gimplify_assign (lvalue, t1, pre_p);
2232 gimplify_seq_add_seq (orig_post_p, post);
2233 *expr_p = lhs;
2234 return GS_ALL_DONE;
2235 }
2236 else
2237 {
2238 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2239 return GS_OK;
2240 }
2241 }
2242
2243 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2244
2245 static void
2246 maybe_with_size_expr (tree *expr_p)
2247 {
2248 tree expr = *expr_p;
2249 tree type = TREE_TYPE (expr);
2250 tree size;
2251
2252 /* If we've already wrapped this or the type is error_mark_node, we can't do
2253 anything. */
2254 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2255 || type == error_mark_node)
2256 return;
2257
2258 /* If the size isn't known or is a constant, we have nothing to do. */
2259 size = TYPE_SIZE_UNIT (type);
2260 if (!size || TREE_CODE (size) == INTEGER_CST)
2261 return;
2262
2263 /* Otherwise, make a WITH_SIZE_EXPR. */
2264 size = unshare_expr (size);
2265 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2266 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2267 }
2268
2269 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2270 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2271 the CALL_EXPR. */
2272
2273 enum gimplify_status
2274 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2275 {
2276 bool (*test) (tree);
2277 fallback_t fb;
2278
2279 /* In general, we allow lvalues for function arguments to avoid
2280 extra overhead of copying large aggregates out of even larger
2281 aggregates into temporaries only to copy the temporaries to
2282 the argument list. Make optimizers happy by pulling out to
2283 temporaries those types that fit in registers. */
2284 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2285 test = is_gimple_val, fb = fb_rvalue;
2286 else
2287 {
2288 test = is_gimple_lvalue, fb = fb_either;
2289 /* Also strip a TARGET_EXPR that would force an extra copy. */
2290 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2291 {
2292 tree init = TARGET_EXPR_INITIAL (*arg_p);
2293 if (init
2294 && !VOID_TYPE_P (TREE_TYPE (init)))
2295 *arg_p = init;
2296 }
2297 }
2298
2299 /* If this is a variable sized type, we must remember the size. */
2300 maybe_with_size_expr (arg_p);
2301
2302 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2303 /* Make sure arguments have the same location as the function call
2304 itself. */
2305 protected_set_expr_location (*arg_p, call_location);
2306
2307 /* There is a sequence point before a function call. Side effects in
2308 the argument list must occur before the actual call. So, when
2309 gimplifying arguments, force gimplify_expr to use an internal
2310 post queue which is then appended to the end of PRE_P. */
2311 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2312 }
2313
2314 /* Don't fold inside offloading or taskreg regions: it can break code by
2315 adding decl references that weren't in the source. We'll do it during
2316 omplower pass instead. */
2317
2318 static bool
2319 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2320 {
2321 struct gimplify_omp_ctx *ctx;
2322 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2323 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2324 return false;
2325 return fold_stmt (gsi);
2326 }
2327
2328 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2329 WANT_VALUE is true if the result of the call is desired. */
2330
2331 static enum gimplify_status
2332 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2333 {
2334 tree fndecl, parms, p, fnptrtype;
2335 enum gimplify_status ret;
2336 int i, nargs;
2337 gcall *call;
2338 bool builtin_va_start_p = false;
2339 location_t loc = EXPR_LOCATION (*expr_p);
2340
2341 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2342
2343 /* For reliable diagnostics during inlining, it is necessary that
2344 every call_expr be annotated with file and line. */
2345 if (! EXPR_HAS_LOCATION (*expr_p))
2346 SET_EXPR_LOCATION (*expr_p, input_location);
2347
2348 /* Gimplify internal functions created in the FEs. */
2349 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2350 {
2351 if (want_value)
2352 return GS_ALL_DONE;
2353
2354 nargs = call_expr_nargs (*expr_p);
2355 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2356 auto_vec<tree> vargs (nargs);
2357
2358 for (i = 0; i < nargs; i++)
2359 {
2360 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2361 EXPR_LOCATION (*expr_p));
2362 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2363 }
2364 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2365 gimplify_seq_add_stmt (pre_p, call);
2366 return GS_ALL_DONE;
2367 }
2368
2369 /* This may be a call to a builtin function.
2370
2371 Builtin function calls may be transformed into different
2372 (and more efficient) builtin function calls under certain
2373 circumstances. Unfortunately, gimplification can muck things
2374 up enough that the builtin expanders are not aware that certain
2375 transformations are still valid.
2376
2377 So we attempt transformation/gimplification of the call before
2378 we gimplify the CALL_EXPR. At this time we do not manage to
2379 transform all calls in the same manner as the expanders do, but
2380 we do transform most of them. */
2381 fndecl = get_callee_fndecl (*expr_p);
2382 if (fndecl
2383 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2384 switch (DECL_FUNCTION_CODE (fndecl))
2385 {
2386 case BUILT_IN_ALLOCA:
2387 case BUILT_IN_ALLOCA_WITH_ALIGN:
2388 /* If the call has been built for a variable-sized object, then we
2389 want to restore the stack level when the enclosing BIND_EXPR is
2390 exited to reclaim the allocated space; otherwise, we precisely
2391 need to do the opposite and preserve the latest stack level. */
2392 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2393 gimplify_ctxp->save_stack = true;
2394 else
2395 gimplify_ctxp->keep_stack = true;
2396 break;
2397
2398 case BUILT_IN_VA_START:
2399 {
2400 builtin_va_start_p = TRUE;
2401 if (call_expr_nargs (*expr_p) < 2)
2402 {
2403 error ("too few arguments to function %<va_start%>");
2404 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2405 return GS_OK;
2406 }
2407
2408 if (fold_builtin_next_arg (*expr_p, true))
2409 {
2410 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2411 return GS_OK;
2412 }
2413 break;
2414 }
2415 case BUILT_IN_LINE:
2416 {
2417 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2418 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2419 return GS_OK;
2420 }
2421 case BUILT_IN_FILE:
2422 {
2423 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2424 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2425 return GS_OK;
2426 }
2427 case BUILT_IN_FUNCTION:
2428 {
2429 const char *function;
2430 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2431 *expr_p = build_string_literal (strlen (function) + 1, function);
2432 return GS_OK;
2433 }
2434 default:
2435 ;
2436 }
2437 if (fndecl && DECL_BUILT_IN (fndecl))
2438 {
2439 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2440 if (new_tree && new_tree != *expr_p)
2441 {
2442 /* There was a transformation of this call which computes the
2443 same value, but in a more efficient way. Return and try
2444 again. */
2445 *expr_p = new_tree;
2446 return GS_OK;
2447 }
2448 }
2449
2450 /* Remember the original function pointer type. */
2451 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2452
2453 /* There is a sequence point before the call, so any side effects in
2454 the calling expression must occur before the actual call. Force
2455 gimplify_expr to use an internal post queue. */
2456 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2457 is_gimple_call_addr, fb_rvalue);
2458
2459 nargs = call_expr_nargs (*expr_p);
2460
2461 /* Get argument types for verification. */
2462 fndecl = get_callee_fndecl (*expr_p);
2463 parms = NULL_TREE;
2464 if (fndecl)
2465 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2466 else
2467 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2468
2469 if (fndecl && DECL_ARGUMENTS (fndecl))
2470 p = DECL_ARGUMENTS (fndecl);
2471 else if (parms)
2472 p = parms;
2473 else
2474 p = NULL_TREE;
2475 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2476 ;
2477
2478 /* If the last argument is __builtin_va_arg_pack () and it is not
2479 passed as a named argument, decrease the number of CALL_EXPR
2480 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2481 if (!p
2482 && i < nargs
2483 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2484 {
2485 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2486 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2487
2488 if (last_arg_fndecl
2489 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2490 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2491 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2492 {
2493 tree call = *expr_p;
2494
2495 --nargs;
2496 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2497 CALL_EXPR_FN (call),
2498 nargs, CALL_EXPR_ARGP (call));
2499
2500 /* Copy all CALL_EXPR flags, location and block, except
2501 CALL_EXPR_VA_ARG_PACK flag. */
2502 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2503 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2504 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2505 = CALL_EXPR_RETURN_SLOT_OPT (call);
2506 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2507 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2508
2509 /* Set CALL_EXPR_VA_ARG_PACK. */
2510 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2511 }
2512 }
2513
2514 /* Gimplify the function arguments. */
2515 if (nargs > 0)
2516 {
2517 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2518 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2519 PUSH_ARGS_REVERSED ? i-- : i++)
2520 {
2521 enum gimplify_status t;
2522
2523 /* Avoid gimplifying the second argument to va_start, which needs to
2524 be the plain PARM_DECL. */
2525 if ((i != 1) || !builtin_va_start_p)
2526 {
2527 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2528 EXPR_LOCATION (*expr_p));
2529
2530 if (t == GS_ERROR)
2531 ret = GS_ERROR;
2532 }
2533 }
2534 }
2535
2536 /* Gimplify the static chain. */
2537 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2538 {
2539 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2540 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2541 else
2542 {
2543 enum gimplify_status t;
2544 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2545 EXPR_LOCATION (*expr_p));
2546 if (t == GS_ERROR)
2547 ret = GS_ERROR;
2548 }
2549 }
2550
2551 /* Verify the function result. */
2552 if (want_value && fndecl
2553 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2554 {
2555 error_at (loc, "using result of function returning %<void%>");
2556 ret = GS_ERROR;
2557 }
2558
2559 /* Try this again in case gimplification exposed something. */
2560 if (ret != GS_ERROR)
2561 {
2562 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2563
2564 if (new_tree && new_tree != *expr_p)
2565 {
2566 /* There was a transformation of this call which computes the
2567 same value, but in a more efficient way. Return and try
2568 again. */
2569 *expr_p = new_tree;
2570 return GS_OK;
2571 }
2572 }
2573 else
2574 {
2575 *expr_p = error_mark_node;
2576 return GS_ERROR;
2577 }
2578
2579 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2580 decl. This allows us to eliminate redundant or useless
2581 calls to "const" functions. */
2582 if (TREE_CODE (*expr_p) == CALL_EXPR)
2583 {
2584 int flags = call_expr_flags (*expr_p);
2585 if (flags & (ECF_CONST | ECF_PURE)
2586 /* An infinite loop is considered a side effect. */
2587 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2588 TREE_SIDE_EFFECTS (*expr_p) = 0;
2589 }
2590
2591 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2592 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2593 form and delegate the creation of a GIMPLE_CALL to
2594 gimplify_modify_expr. This is always possible because when
2595 WANT_VALUE is true, the caller wants the result of this call into
2596 a temporary, which means that we will emit an INIT_EXPR in
2597 internal_get_tmp_var which will then be handled by
2598 gimplify_modify_expr. */
2599 if (!want_value)
2600 {
2601 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2602 have to do is replicate it as a GIMPLE_CALL tuple. */
2603 gimple_stmt_iterator gsi;
2604 call = gimple_build_call_from_tree (*expr_p);
2605 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2606 notice_special_calls (call);
2607 gimplify_seq_add_stmt (pre_p, call);
2608 gsi = gsi_last (*pre_p);
2609 maybe_fold_stmt (&gsi);
2610 *expr_p = NULL_TREE;
2611 }
2612 else
2613 /* Remember the original function type. */
2614 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2615 CALL_EXPR_FN (*expr_p));
2616
2617 return ret;
2618 }
2619
2620 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2621 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2622
2623 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2624 condition is true or false, respectively. If null, we should generate
2625 our own to skip over the evaluation of this specific expression.
2626
2627 LOCUS is the source location of the COND_EXPR.
2628
2629 This function is the tree equivalent of do_jump.
2630
2631 shortcut_cond_r should only be called by shortcut_cond_expr. */
2632
2633 static tree
2634 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2635 location_t locus)
2636 {
2637 tree local_label = NULL_TREE;
2638 tree t, expr = NULL;
2639
2640 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2641 retain the shortcut semantics. Just insert the gotos here;
2642 shortcut_cond_expr will append the real blocks later. */
2643 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2644 {
2645 location_t new_locus;
2646
2647 /* Turn if (a && b) into
2648
2649 if (a); else goto no;
2650 if (b) goto yes; else goto no;
2651 (no:) */
2652
2653 if (false_label_p == NULL)
2654 false_label_p = &local_label;
2655
2656 /* Keep the original source location on the first 'if'. */
2657 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2658 append_to_statement_list (t, &expr);
2659
2660 /* Set the source location of the && on the second 'if'. */
2661 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2662 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2663 new_locus);
2664 append_to_statement_list (t, &expr);
2665 }
2666 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2667 {
2668 location_t new_locus;
2669
2670 /* Turn if (a || b) into
2671
2672 if (a) goto yes;
2673 if (b) goto yes; else goto no;
2674 (yes:) */
2675
2676 if (true_label_p == NULL)
2677 true_label_p = &local_label;
2678
2679 /* Keep the original source location on the first 'if'. */
2680 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2681 append_to_statement_list (t, &expr);
2682
2683 /* Set the source location of the || on the second 'if'. */
2684 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2685 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2686 new_locus);
2687 append_to_statement_list (t, &expr);
2688 }
2689 else if (TREE_CODE (pred) == COND_EXPR
2690 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2691 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2692 {
2693 location_t new_locus;
2694
2695 /* As long as we're messing with gotos, turn if (a ? b : c) into
2696 if (a)
2697 if (b) goto yes; else goto no;
2698 else
2699 if (c) goto yes; else goto no;
2700
2701 Don't do this if one of the arms has void type, which can happen
2702 in C++ when the arm is throw. */
2703
2704 /* Keep the original source location on the first 'if'. Set the source
2705 location of the ? on the second 'if'. */
2706 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2707 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2708 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2709 false_label_p, locus),
2710 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2711 false_label_p, new_locus));
2712 }
2713 else
2714 {
2715 expr = build3 (COND_EXPR, void_type_node, pred,
2716 build_and_jump (true_label_p),
2717 build_and_jump (false_label_p));
2718 SET_EXPR_LOCATION (expr, locus);
2719 }
2720
2721 if (local_label)
2722 {
2723 t = build1 (LABEL_EXPR, void_type_node, local_label);
2724 append_to_statement_list (t, &expr);
2725 }
2726
2727 return expr;
2728 }
2729
2730 /* Given a conditional expression EXPR with short-circuit boolean
2731 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2732 predicate apart into the equivalent sequence of conditionals. */
2733
2734 static tree
2735 shortcut_cond_expr (tree expr)
2736 {
2737 tree pred = TREE_OPERAND (expr, 0);
2738 tree then_ = TREE_OPERAND (expr, 1);
2739 tree else_ = TREE_OPERAND (expr, 2);
2740 tree true_label, false_label, end_label, t;
2741 tree *true_label_p;
2742 tree *false_label_p;
2743 bool emit_end, emit_false, jump_over_else;
2744 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2745 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2746
2747 /* First do simple transformations. */
2748 if (!else_se)
2749 {
2750 /* If there is no 'else', turn
2751 if (a && b) then c
2752 into
2753 if (a) if (b) then c. */
2754 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2755 {
2756 /* Keep the original source location on the first 'if'. */
2757 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2758 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2759 /* Set the source location of the && on the second 'if'. */
2760 if (EXPR_HAS_LOCATION (pred))
2761 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2762 then_ = shortcut_cond_expr (expr);
2763 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2764 pred = TREE_OPERAND (pred, 0);
2765 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2766 SET_EXPR_LOCATION (expr, locus);
2767 }
2768 }
2769
2770 if (!then_se)
2771 {
2772 /* If there is no 'then', turn
2773 if (a || b); else d
2774 into
2775 if (a); else if (b); else d. */
2776 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2777 {
2778 /* Keep the original source location on the first 'if'. */
2779 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2780 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2781 /* Set the source location of the || on the second 'if'. */
2782 if (EXPR_HAS_LOCATION (pred))
2783 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2784 else_ = shortcut_cond_expr (expr);
2785 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2786 pred = TREE_OPERAND (pred, 0);
2787 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2788 SET_EXPR_LOCATION (expr, locus);
2789 }
2790 }
2791
2792 /* If we're done, great. */
2793 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2794 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2795 return expr;
2796
2797 /* Otherwise we need to mess with gotos. Change
2798 if (a) c; else d;
2799 to
2800 if (a); else goto no;
2801 c; goto end;
2802 no: d; end:
2803 and recursively gimplify the condition. */
2804
2805 true_label = false_label = end_label = NULL_TREE;
2806
2807 /* If our arms just jump somewhere, hijack those labels so we don't
2808 generate jumps to jumps. */
2809
2810 if (then_
2811 && TREE_CODE (then_) == GOTO_EXPR
2812 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2813 {
2814 true_label = GOTO_DESTINATION (then_);
2815 then_ = NULL;
2816 then_se = false;
2817 }
2818
2819 if (else_
2820 && TREE_CODE (else_) == GOTO_EXPR
2821 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2822 {
2823 false_label = GOTO_DESTINATION (else_);
2824 else_ = NULL;
2825 else_se = false;
2826 }
2827
2828 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2829 if (true_label)
2830 true_label_p = &true_label;
2831 else
2832 true_label_p = NULL;
2833
2834 /* The 'else' branch also needs a label if it contains interesting code. */
2835 if (false_label || else_se)
2836 false_label_p = &false_label;
2837 else
2838 false_label_p = NULL;
2839
2840 /* If there was nothing else in our arms, just forward the label(s). */
2841 if (!then_se && !else_se)
2842 return shortcut_cond_r (pred, true_label_p, false_label_p,
2843 EXPR_LOC_OR_LOC (expr, input_location));
2844
2845 /* If our last subexpression already has a terminal label, reuse it. */
2846 if (else_se)
2847 t = expr_last (else_);
2848 else if (then_se)
2849 t = expr_last (then_);
2850 else
2851 t = NULL;
2852 if (t && TREE_CODE (t) == LABEL_EXPR)
2853 end_label = LABEL_EXPR_LABEL (t);
2854
2855 /* If we don't care about jumping to the 'else' branch, jump to the end
2856 if the condition is false. */
2857 if (!false_label_p)
2858 false_label_p = &end_label;
2859
2860 /* We only want to emit these labels if we aren't hijacking them. */
2861 emit_end = (end_label == NULL_TREE);
2862 emit_false = (false_label == NULL_TREE);
2863
2864 /* We only emit the jump over the else clause if we have to--if the
2865 then clause may fall through. Otherwise we can wind up with a
2866 useless jump and a useless label at the end of gimplified code,
2867 which will cause us to think that this conditional as a whole
2868 falls through even if it doesn't. If we then inline a function
2869 which ends with such a condition, that can cause us to issue an
2870 inappropriate warning about control reaching the end of a
2871 non-void function. */
2872 jump_over_else = block_may_fallthru (then_);
2873
2874 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2875 EXPR_LOC_OR_LOC (expr, input_location));
2876
2877 expr = NULL;
2878 append_to_statement_list (pred, &expr);
2879
2880 append_to_statement_list (then_, &expr);
2881 if (else_se)
2882 {
2883 if (jump_over_else)
2884 {
2885 tree last = expr_last (expr);
2886 t = build_and_jump (&end_label);
2887 if (EXPR_HAS_LOCATION (last))
2888 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2889 append_to_statement_list (t, &expr);
2890 }
2891 if (emit_false)
2892 {
2893 t = build1 (LABEL_EXPR, void_type_node, false_label);
2894 append_to_statement_list (t, &expr);
2895 }
2896 append_to_statement_list (else_, &expr);
2897 }
2898 if (emit_end && end_label)
2899 {
2900 t = build1 (LABEL_EXPR, void_type_node, end_label);
2901 append_to_statement_list (t, &expr);
2902 }
2903
2904 return expr;
2905 }
2906
2907 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2908
2909 tree
2910 gimple_boolify (tree expr)
2911 {
2912 tree type = TREE_TYPE (expr);
2913 location_t loc = EXPR_LOCATION (expr);
2914
2915 if (TREE_CODE (expr) == NE_EXPR
2916 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2917 && integer_zerop (TREE_OPERAND (expr, 1)))
2918 {
2919 tree call = TREE_OPERAND (expr, 0);
2920 tree fn = get_callee_fndecl (call);
2921
2922 /* For __builtin_expect ((long) (x), y) recurse into x as well
2923 if x is truth_value_p. */
2924 if (fn
2925 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2926 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2927 && call_expr_nargs (call) == 2)
2928 {
2929 tree arg = CALL_EXPR_ARG (call, 0);
2930 if (arg)
2931 {
2932 if (TREE_CODE (arg) == NOP_EXPR
2933 && TREE_TYPE (arg) == TREE_TYPE (call))
2934 arg = TREE_OPERAND (arg, 0);
2935 if (truth_value_p (TREE_CODE (arg)))
2936 {
2937 arg = gimple_boolify (arg);
2938 CALL_EXPR_ARG (call, 0)
2939 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2940 }
2941 }
2942 }
2943 }
2944
2945 switch (TREE_CODE (expr))
2946 {
2947 case TRUTH_AND_EXPR:
2948 case TRUTH_OR_EXPR:
2949 case TRUTH_XOR_EXPR:
2950 case TRUTH_ANDIF_EXPR:
2951 case TRUTH_ORIF_EXPR:
2952 /* Also boolify the arguments of truth exprs. */
2953 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2954 /* FALLTHRU */
2955
2956 case TRUTH_NOT_EXPR:
2957 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2958
2959 /* These expressions always produce boolean results. */
2960 if (TREE_CODE (type) != BOOLEAN_TYPE)
2961 TREE_TYPE (expr) = boolean_type_node;
2962 return expr;
2963
2964 case ANNOTATE_EXPR:
2965 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2966 {
2967 case annot_expr_ivdep_kind:
2968 case annot_expr_no_vector_kind:
2969 case annot_expr_vector_kind:
2970 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2971 if (TREE_CODE (type) != BOOLEAN_TYPE)
2972 TREE_TYPE (expr) = boolean_type_node;
2973 return expr;
2974 default:
2975 gcc_unreachable ();
2976 }
2977
2978 default:
2979 if (COMPARISON_CLASS_P (expr))
2980 {
2981 /* There expressions always prduce boolean results. */
2982 if (TREE_CODE (type) != BOOLEAN_TYPE)
2983 TREE_TYPE (expr) = boolean_type_node;
2984 return expr;
2985 }
2986 /* Other expressions that get here must have boolean values, but
2987 might need to be converted to the appropriate mode. */
2988 if (TREE_CODE (type) == BOOLEAN_TYPE)
2989 return expr;
2990 return fold_convert_loc (loc, boolean_type_node, expr);
2991 }
2992 }
2993
2994 /* Given a conditional expression *EXPR_P without side effects, gimplify
2995 its operands. New statements are inserted to PRE_P. */
2996
2997 static enum gimplify_status
2998 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2999 {
3000 tree expr = *expr_p, cond;
3001 enum gimplify_status ret, tret;
3002 enum tree_code code;
3003
3004 cond = gimple_boolify (COND_EXPR_COND (expr));
3005
3006 /* We need to handle && and || specially, as their gimplification
3007 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3008 code = TREE_CODE (cond);
3009 if (code == TRUTH_ANDIF_EXPR)
3010 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3011 else if (code == TRUTH_ORIF_EXPR)
3012 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3013 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3014 COND_EXPR_COND (*expr_p) = cond;
3015
3016 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3017 is_gimple_val, fb_rvalue);
3018 ret = MIN (ret, tret);
3019 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3020 is_gimple_val, fb_rvalue);
3021
3022 return MIN (ret, tret);
3023 }
3024
3025 /* Return true if evaluating EXPR could trap.
3026 EXPR is GENERIC, while tree_could_trap_p can be called
3027 only on GIMPLE. */
3028
3029 static bool
3030 generic_expr_could_trap_p (tree expr)
3031 {
3032 unsigned i, n;
3033
3034 if (!expr || is_gimple_val (expr))
3035 return false;
3036
3037 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3038 return true;
3039
3040 n = TREE_OPERAND_LENGTH (expr);
3041 for (i = 0; i < n; i++)
3042 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3043 return true;
3044
3045 return false;
3046 }
3047
3048 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3049 into
3050
3051 if (p) if (p)
3052 t1 = a; a;
3053 else or else
3054 t1 = b; b;
3055 t1;
3056
3057 The second form is used when *EXPR_P is of type void.
3058
3059 PRE_P points to the list where side effects that must happen before
3060 *EXPR_P should be stored. */
3061
3062 static enum gimplify_status
3063 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3064 {
3065 tree expr = *expr_p;
3066 tree type = TREE_TYPE (expr);
3067 location_t loc = EXPR_LOCATION (expr);
3068 tree tmp, arm1, arm2;
3069 enum gimplify_status ret;
3070 tree label_true, label_false, label_cont;
3071 bool have_then_clause_p, have_else_clause_p;
3072 gcond *cond_stmt;
3073 enum tree_code pred_code;
3074 gimple_seq seq = NULL;
3075
3076 /* If this COND_EXPR has a value, copy the values into a temporary within
3077 the arms. */
3078 if (!VOID_TYPE_P (type))
3079 {
3080 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3081 tree result;
3082
3083 /* If either an rvalue is ok or we do not require an lvalue, create the
3084 temporary. But we cannot do that if the type is addressable. */
3085 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3086 && !TREE_ADDRESSABLE (type))
3087 {
3088 if (gimplify_ctxp->allow_rhs_cond_expr
3089 /* If either branch has side effects or could trap, it can't be
3090 evaluated unconditionally. */
3091 && !TREE_SIDE_EFFECTS (then_)
3092 && !generic_expr_could_trap_p (then_)
3093 && !TREE_SIDE_EFFECTS (else_)
3094 && !generic_expr_could_trap_p (else_))
3095 return gimplify_pure_cond_expr (expr_p, pre_p);
3096
3097 tmp = create_tmp_var (type, "iftmp");
3098 result = tmp;
3099 }
3100
3101 /* Otherwise, only create and copy references to the values. */
3102 else
3103 {
3104 type = build_pointer_type (type);
3105
3106 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3107 then_ = build_fold_addr_expr_loc (loc, then_);
3108
3109 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3110 else_ = build_fold_addr_expr_loc (loc, else_);
3111
3112 expr
3113 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3114
3115 tmp = create_tmp_var (type, "iftmp");
3116 result = build_simple_mem_ref_loc (loc, tmp);
3117 }
3118
3119 /* Build the new then clause, `tmp = then_;'. But don't build the
3120 assignment if the value is void; in C++ it can be if it's a throw. */
3121 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3122 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3123
3124 /* Similarly, build the new else clause, `tmp = else_;'. */
3125 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3126 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3127
3128 TREE_TYPE (expr) = void_type_node;
3129 recalculate_side_effects (expr);
3130
3131 /* Move the COND_EXPR to the prequeue. */
3132 gimplify_stmt (&expr, pre_p);
3133
3134 *expr_p = result;
3135 return GS_ALL_DONE;
3136 }
3137
3138 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3139 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3140 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3141 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3142
3143 /* Make sure the condition has BOOLEAN_TYPE. */
3144 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3145
3146 /* Break apart && and || conditions. */
3147 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3148 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3149 {
3150 expr = shortcut_cond_expr (expr);
3151
3152 if (expr != *expr_p)
3153 {
3154 *expr_p = expr;
3155
3156 /* We can't rely on gimplify_expr to re-gimplify the expanded
3157 form properly, as cleanups might cause the target labels to be
3158 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3159 set up a conditional context. */
3160 gimple_push_condition ();
3161 gimplify_stmt (expr_p, &seq);
3162 gimple_pop_condition (pre_p);
3163 gimple_seq_add_seq (pre_p, seq);
3164
3165 return GS_ALL_DONE;
3166 }
3167 }
3168
3169 /* Now do the normal gimplification. */
3170
3171 /* Gimplify condition. */
3172 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3173 fb_rvalue);
3174 if (ret == GS_ERROR)
3175 return GS_ERROR;
3176 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3177
3178 gimple_push_condition ();
3179
3180 have_then_clause_p = have_else_clause_p = false;
3181 if (TREE_OPERAND (expr, 1) != NULL
3182 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3183 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3184 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3185 == current_function_decl)
3186 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3187 have different locations, otherwise we end up with incorrect
3188 location information on the branches. */
3189 && (optimize
3190 || !EXPR_HAS_LOCATION (expr)
3191 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3192 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3193 {
3194 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3195 have_then_clause_p = true;
3196 }
3197 else
3198 label_true = create_artificial_label (UNKNOWN_LOCATION);
3199 if (TREE_OPERAND (expr, 2) != NULL
3200 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3201 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3202 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3203 == current_function_decl)
3204 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3205 have different locations, otherwise we end up with incorrect
3206 location information on the branches. */
3207 && (optimize
3208 || !EXPR_HAS_LOCATION (expr)
3209 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3210 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3211 {
3212 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3213 have_else_clause_p = true;
3214 }
3215 else
3216 label_false = create_artificial_label (UNKNOWN_LOCATION);
3217
3218 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3219 &arm2);
3220 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3221 label_false);
3222 gimplify_seq_add_stmt (&seq, cond_stmt);
3223 gimple_stmt_iterator gsi = gsi_last (seq);
3224 maybe_fold_stmt (&gsi);
3225
3226 label_cont = NULL_TREE;
3227 if (!have_then_clause_p)
3228 {
3229 /* For if (...) {} else { code; } put label_true after
3230 the else block. */
3231 if (TREE_OPERAND (expr, 1) == NULL_TREE
3232 && !have_else_clause_p
3233 && TREE_OPERAND (expr, 2) != NULL_TREE)
3234 label_cont = label_true;
3235 else
3236 {
3237 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3238 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3239 /* For if (...) { code; } else {} or
3240 if (...) { code; } else goto label; or
3241 if (...) { code; return; } else { ... }
3242 label_cont isn't needed. */
3243 if (!have_else_clause_p
3244 && TREE_OPERAND (expr, 2) != NULL_TREE
3245 && gimple_seq_may_fallthru (seq))
3246 {
3247 gimple *g;
3248 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3249
3250 g = gimple_build_goto (label_cont);
3251
3252 /* GIMPLE_COND's are very low level; they have embedded
3253 gotos. This particular embedded goto should not be marked
3254 with the location of the original COND_EXPR, as it would
3255 correspond to the COND_EXPR's condition, not the ELSE or the
3256 THEN arms. To avoid marking it with the wrong location, flag
3257 it as "no location". */
3258 gimple_set_do_not_emit_location (g);
3259
3260 gimplify_seq_add_stmt (&seq, g);
3261 }
3262 }
3263 }
3264 if (!have_else_clause_p)
3265 {
3266 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3267 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3268 }
3269 if (label_cont)
3270 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3271
3272 gimple_pop_condition (pre_p);
3273 gimple_seq_add_seq (pre_p, seq);
3274
3275 if (ret == GS_ERROR)
3276 ; /* Do nothing. */
3277 else if (have_then_clause_p || have_else_clause_p)
3278 ret = GS_ALL_DONE;
3279 else
3280 {
3281 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3282 expr = TREE_OPERAND (expr, 0);
3283 gimplify_stmt (&expr, pre_p);
3284 }
3285
3286 *expr_p = NULL;
3287 return ret;
3288 }
3289
3290 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3291 to be marked addressable.
3292
3293 We cannot rely on such an expression being directly markable if a temporary
3294 has been created by the gimplification. In this case, we create another
3295 temporary and initialize it with a copy, which will become a store after we
3296 mark it addressable. This can happen if the front-end passed us something
3297 that it could not mark addressable yet, like a Fortran pass-by-reference
3298 parameter (int) floatvar. */
3299
3300 static void
3301 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3302 {
3303 while (handled_component_p (*expr_p))
3304 expr_p = &TREE_OPERAND (*expr_p, 0);
3305 if (is_gimple_reg (*expr_p))
3306 {
3307 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3308 DECL_GIMPLE_REG_P (var) = 0;
3309 *expr_p = var;
3310 }
3311 }
3312
3313 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3314 a call to __builtin_memcpy. */
3315
3316 static enum gimplify_status
3317 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3318 gimple_seq *seq_p)
3319 {
3320 tree t, to, to_ptr, from, from_ptr;
3321 gcall *gs;
3322 location_t loc = EXPR_LOCATION (*expr_p);
3323
3324 to = TREE_OPERAND (*expr_p, 0);
3325 from = TREE_OPERAND (*expr_p, 1);
3326
3327 /* Mark the RHS addressable. Beware that it may not be possible to do so
3328 directly if a temporary has been created by the gimplification. */
3329 prepare_gimple_addressable (&from, seq_p);
3330
3331 mark_addressable (from);
3332 from_ptr = build_fold_addr_expr_loc (loc, from);
3333 gimplify_arg (&from_ptr, seq_p, loc);
3334
3335 mark_addressable (to);
3336 to_ptr = build_fold_addr_expr_loc (loc, to);
3337 gimplify_arg (&to_ptr, seq_p, loc);
3338
3339 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3340
3341 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3342
3343 if (want_value)
3344 {
3345 /* tmp = memcpy() */
3346 t = create_tmp_var (TREE_TYPE (to_ptr));
3347 gimple_call_set_lhs (gs, t);
3348 gimplify_seq_add_stmt (seq_p, gs);
3349
3350 *expr_p = build_simple_mem_ref (t);
3351 return GS_ALL_DONE;
3352 }
3353
3354 gimplify_seq_add_stmt (seq_p, gs);
3355 *expr_p = NULL;
3356 return GS_ALL_DONE;
3357 }
3358
3359 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3360 a call to __builtin_memset. In this case we know that the RHS is
3361 a CONSTRUCTOR with an empty element list. */
3362
3363 static enum gimplify_status
3364 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3365 gimple_seq *seq_p)
3366 {
3367 tree t, from, to, to_ptr;
3368 gcall *gs;
3369 location_t loc = EXPR_LOCATION (*expr_p);
3370
3371 /* Assert our assumptions, to abort instead of producing wrong code
3372 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3373 not be immediately exposed. */
3374 from = TREE_OPERAND (*expr_p, 1);
3375 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3376 from = TREE_OPERAND (from, 0);
3377
3378 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3379 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3380
3381 /* Now proceed. */
3382 to = TREE_OPERAND (*expr_p, 0);
3383
3384 to_ptr = build_fold_addr_expr_loc (loc, to);
3385 gimplify_arg (&to_ptr, seq_p, loc);
3386 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3387
3388 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3389
3390 if (want_value)
3391 {
3392 /* tmp = memset() */
3393 t = create_tmp_var (TREE_TYPE (to_ptr));
3394 gimple_call_set_lhs (gs, t);
3395 gimplify_seq_add_stmt (seq_p, gs);
3396
3397 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3398 return GS_ALL_DONE;
3399 }
3400
3401 gimplify_seq_add_stmt (seq_p, gs);
3402 *expr_p = NULL;
3403 return GS_ALL_DONE;
3404 }
3405
3406 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3407 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3408 assignment. Return non-null if we detect a potential overlap. */
3409
3410 struct gimplify_init_ctor_preeval_data
3411 {
3412 /* The base decl of the lhs object. May be NULL, in which case we
3413 have to assume the lhs is indirect. */
3414 tree lhs_base_decl;
3415
3416 /* The alias set of the lhs object. */
3417 alias_set_type lhs_alias_set;
3418 };
3419
3420 static tree
3421 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3422 {
3423 struct gimplify_init_ctor_preeval_data *data
3424 = (struct gimplify_init_ctor_preeval_data *) xdata;
3425 tree t = *tp;
3426
3427 /* If we find the base object, obviously we have overlap. */
3428 if (data->lhs_base_decl == t)
3429 return t;
3430
3431 /* If the constructor component is indirect, determine if we have a
3432 potential overlap with the lhs. The only bits of information we
3433 have to go on at this point are addressability and alias sets. */
3434 if ((INDIRECT_REF_P (t)
3435 || TREE_CODE (t) == MEM_REF)
3436 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3437 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3438 return t;
3439
3440 /* If the constructor component is a call, determine if it can hide a
3441 potential overlap with the lhs through an INDIRECT_REF like above.
3442 ??? Ugh - this is completely broken. In fact this whole analysis
3443 doesn't look conservative. */
3444 if (TREE_CODE (t) == CALL_EXPR)
3445 {
3446 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3447
3448 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3449 if (POINTER_TYPE_P (TREE_VALUE (type))
3450 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3451 && alias_sets_conflict_p (data->lhs_alias_set,
3452 get_alias_set
3453 (TREE_TYPE (TREE_VALUE (type)))))
3454 return t;
3455 }
3456
3457 if (IS_TYPE_OR_DECL_P (t))
3458 *walk_subtrees = 0;
3459 return NULL;
3460 }
3461
3462 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3463 force values that overlap with the lhs (as described by *DATA)
3464 into temporaries. */
3465
3466 static void
3467 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3468 struct gimplify_init_ctor_preeval_data *data)
3469 {
3470 enum gimplify_status one;
3471
3472 /* If the value is constant, then there's nothing to pre-evaluate. */
3473 if (TREE_CONSTANT (*expr_p))
3474 {
3475 /* Ensure it does not have side effects, it might contain a reference to
3476 the object we're initializing. */
3477 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3478 return;
3479 }
3480
3481 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3482 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3483 return;
3484
3485 /* Recurse for nested constructors. */
3486 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3487 {
3488 unsigned HOST_WIDE_INT ix;
3489 constructor_elt *ce;
3490 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3491
3492 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3493 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3494
3495 return;
3496 }
3497
3498 /* If this is a variable sized type, we must remember the size. */
3499 maybe_with_size_expr (expr_p);
3500
3501 /* Gimplify the constructor element to something appropriate for the rhs
3502 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3503 the gimplifier will consider this a store to memory. Doing this
3504 gimplification now means that we won't have to deal with complicated
3505 language-specific trees, nor trees like SAVE_EXPR that can induce
3506 exponential search behavior. */
3507 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3508 if (one == GS_ERROR)
3509 {
3510 *expr_p = NULL;
3511 return;
3512 }
3513
3514 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3515 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3516 always be true for all scalars, since is_gimple_mem_rhs insists on a
3517 temporary variable for them. */
3518 if (DECL_P (*expr_p))
3519 return;
3520
3521 /* If this is of variable size, we have no choice but to assume it doesn't
3522 overlap since we can't make a temporary for it. */
3523 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3524 return;
3525
3526 /* Otherwise, we must search for overlap ... */
3527 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3528 return;
3529
3530 /* ... and if found, force the value into a temporary. */
3531 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3532 }
3533
3534 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3535 a RANGE_EXPR in a CONSTRUCTOR for an array.
3536
3537 var = lower;
3538 loop_entry:
3539 object[var] = value;
3540 if (var == upper)
3541 goto loop_exit;
3542 var = var + 1;
3543 goto loop_entry;
3544 loop_exit:
3545
3546 We increment var _after_ the loop exit check because we might otherwise
3547 fail if upper == TYPE_MAX_VALUE (type for upper).
3548
3549 Note that we never have to deal with SAVE_EXPRs here, because this has
3550 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3551
3552 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3553 gimple_seq *, bool);
3554
3555 static void
3556 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3557 tree value, tree array_elt_type,
3558 gimple_seq *pre_p, bool cleared)
3559 {
3560 tree loop_entry_label, loop_exit_label, fall_thru_label;
3561 tree var, var_type, cref, tmp;
3562
3563 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3564 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3565 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3566
3567 /* Create and initialize the index variable. */
3568 var_type = TREE_TYPE (upper);
3569 var = create_tmp_var (var_type);
3570 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3571
3572 /* Add the loop entry label. */
3573 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3574
3575 /* Build the reference. */
3576 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3577 var, NULL_TREE, NULL_TREE);
3578
3579 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3580 the store. Otherwise just assign value to the reference. */
3581
3582 if (TREE_CODE (value) == CONSTRUCTOR)
3583 /* NB we might have to call ourself recursively through
3584 gimplify_init_ctor_eval if the value is a constructor. */
3585 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3586 pre_p, cleared);
3587 else
3588 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3589
3590 /* We exit the loop when the index var is equal to the upper bound. */
3591 gimplify_seq_add_stmt (pre_p,
3592 gimple_build_cond (EQ_EXPR, var, upper,
3593 loop_exit_label, fall_thru_label));
3594
3595 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3596
3597 /* Otherwise, increment the index var... */
3598 tmp = build2 (PLUS_EXPR, var_type, var,
3599 fold_convert (var_type, integer_one_node));
3600 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3601
3602 /* ...and jump back to the loop entry. */
3603 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3604
3605 /* Add the loop exit label. */
3606 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3607 }
3608
3609 /* Return true if FDECL is accessing a field that is zero sized. */
3610
3611 static bool
3612 zero_sized_field_decl (const_tree fdecl)
3613 {
3614 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3615 && integer_zerop (DECL_SIZE (fdecl)))
3616 return true;
3617 return false;
3618 }
3619
3620 /* Return true if TYPE is zero sized. */
3621
3622 static bool
3623 zero_sized_type (const_tree type)
3624 {
3625 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3626 && integer_zerop (TYPE_SIZE (type)))
3627 return true;
3628 return false;
3629 }
3630
3631 /* A subroutine of gimplify_init_constructor. Generate individual
3632 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3633 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3634 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3635 zeroed first. */
3636
3637 static void
3638 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3639 gimple_seq *pre_p, bool cleared)
3640 {
3641 tree array_elt_type = NULL;
3642 unsigned HOST_WIDE_INT ix;
3643 tree purpose, value;
3644
3645 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3646 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3647
3648 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3649 {
3650 tree cref;
3651
3652 /* NULL values are created above for gimplification errors. */
3653 if (value == NULL)
3654 continue;
3655
3656 if (cleared && initializer_zerop (value))
3657 continue;
3658
3659 /* ??? Here's to hoping the front end fills in all of the indices,
3660 so we don't have to figure out what's missing ourselves. */
3661 gcc_assert (purpose);
3662
3663 /* Skip zero-sized fields, unless value has side-effects. This can
3664 happen with calls to functions returning a zero-sized type, which
3665 we shouldn't discard. As a number of downstream passes don't
3666 expect sets of zero-sized fields, we rely on the gimplification of
3667 the MODIFY_EXPR we make below to drop the assignment statement. */
3668 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3669 continue;
3670
3671 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3672 whole range. */
3673 if (TREE_CODE (purpose) == RANGE_EXPR)
3674 {
3675 tree lower = TREE_OPERAND (purpose, 0);
3676 tree upper = TREE_OPERAND (purpose, 1);
3677
3678 /* If the lower bound is equal to upper, just treat it as if
3679 upper was the index. */
3680 if (simple_cst_equal (lower, upper))
3681 purpose = upper;
3682 else
3683 {
3684 gimplify_init_ctor_eval_range (object, lower, upper, value,
3685 array_elt_type, pre_p, cleared);
3686 continue;
3687 }
3688 }
3689
3690 if (array_elt_type)
3691 {
3692 /* Do not use bitsizetype for ARRAY_REF indices. */
3693 if (TYPE_DOMAIN (TREE_TYPE (object)))
3694 purpose
3695 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3696 purpose);
3697 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3698 purpose, NULL_TREE, NULL_TREE);
3699 }
3700 else
3701 {
3702 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3703 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3704 unshare_expr (object), purpose, NULL_TREE);
3705 }
3706
3707 if (TREE_CODE (value) == CONSTRUCTOR
3708 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3709 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3710 pre_p, cleared);
3711 else
3712 {
3713 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3714 gimplify_and_add (init, pre_p);
3715 ggc_free (init);
3716 }
3717 }
3718 }
3719
3720 /* Return the appropriate RHS predicate for this LHS. */
3721
3722 gimple_predicate
3723 rhs_predicate_for (tree lhs)
3724 {
3725 if (is_gimple_reg (lhs))
3726 return is_gimple_reg_rhs_or_call;
3727 else
3728 return is_gimple_mem_rhs_or_call;
3729 }
3730
3731 /* Gimplify a C99 compound literal expression. This just means adding
3732 the DECL_EXPR before the current statement and using its anonymous
3733 decl instead. */
3734
3735 static enum gimplify_status
3736 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3737 bool (*gimple_test_f) (tree),
3738 fallback_t fallback)
3739 {
3740 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3741 tree decl = DECL_EXPR_DECL (decl_s);
3742 tree init = DECL_INITIAL (decl);
3743 /* Mark the decl as addressable if the compound literal
3744 expression is addressable now, otherwise it is marked too late
3745 after we gimplify the initialization expression. */
3746 if (TREE_ADDRESSABLE (*expr_p))
3747 TREE_ADDRESSABLE (decl) = 1;
3748 /* Otherwise, if we don't need an lvalue and have a literal directly
3749 substitute it. Check if it matches the gimple predicate, as
3750 otherwise we'd generate a new temporary, and we can as well just
3751 use the decl we already have. */
3752 else if (!TREE_ADDRESSABLE (decl)
3753 && init
3754 && (fallback & fb_lvalue) == 0
3755 && gimple_test_f (init))
3756 {
3757 *expr_p = init;
3758 return GS_OK;
3759 }
3760
3761 /* Preliminarily mark non-addressed complex variables as eligible
3762 for promotion to gimple registers. We'll transform their uses
3763 as we find them. */
3764 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3765 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3766 && !TREE_THIS_VOLATILE (decl)
3767 && !needs_to_live_in_memory (decl))
3768 DECL_GIMPLE_REG_P (decl) = 1;
3769
3770 /* If the decl is not addressable, then it is being used in some
3771 expression or on the right hand side of a statement, and it can
3772 be put into a readonly data section. */
3773 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3774 TREE_READONLY (decl) = 1;
3775
3776 /* This decl isn't mentioned in the enclosing block, so add it to the
3777 list of temps. FIXME it seems a bit of a kludge to say that
3778 anonymous artificial vars aren't pushed, but everything else is. */
3779 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3780 gimple_add_tmp_var (decl);
3781
3782 gimplify_and_add (decl_s, pre_p);
3783 *expr_p = decl;
3784 return GS_OK;
3785 }
3786
3787 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3788 return a new CONSTRUCTOR if something changed. */
3789
3790 static tree
3791 optimize_compound_literals_in_ctor (tree orig_ctor)
3792 {
3793 tree ctor = orig_ctor;
3794 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3795 unsigned int idx, num = vec_safe_length (elts);
3796
3797 for (idx = 0; idx < num; idx++)
3798 {
3799 tree value = (*elts)[idx].value;
3800 tree newval = value;
3801 if (TREE_CODE (value) == CONSTRUCTOR)
3802 newval = optimize_compound_literals_in_ctor (value);
3803 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3804 {
3805 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3806 tree decl = DECL_EXPR_DECL (decl_s);
3807 tree init = DECL_INITIAL (decl);
3808
3809 if (!TREE_ADDRESSABLE (value)
3810 && !TREE_ADDRESSABLE (decl)
3811 && init
3812 && TREE_CODE (init) == CONSTRUCTOR)
3813 newval = optimize_compound_literals_in_ctor (init);
3814 }
3815 if (newval == value)
3816 continue;
3817
3818 if (ctor == orig_ctor)
3819 {
3820 ctor = copy_node (orig_ctor);
3821 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3822 elts = CONSTRUCTOR_ELTS (ctor);
3823 }
3824 (*elts)[idx].value = newval;
3825 }
3826 return ctor;
3827 }
3828
3829 /* A subroutine of gimplify_modify_expr. Break out elements of a
3830 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3831
3832 Note that we still need to clear any elements that don't have explicit
3833 initializers, so if not all elements are initialized we keep the
3834 original MODIFY_EXPR, we just remove all of the constructor elements.
3835
3836 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3837 GS_ERROR if we would have to create a temporary when gimplifying
3838 this constructor. Otherwise, return GS_OK.
3839
3840 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3841
3842 static enum gimplify_status
3843 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3844 bool want_value, bool notify_temp_creation)
3845 {
3846 tree object, ctor, type;
3847 enum gimplify_status ret;
3848 vec<constructor_elt, va_gc> *elts;
3849
3850 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3851
3852 if (!notify_temp_creation)
3853 {
3854 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3855 is_gimple_lvalue, fb_lvalue);
3856 if (ret == GS_ERROR)
3857 return ret;
3858 }
3859
3860 object = TREE_OPERAND (*expr_p, 0);
3861 ctor = TREE_OPERAND (*expr_p, 1) =
3862 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3863 type = TREE_TYPE (ctor);
3864 elts = CONSTRUCTOR_ELTS (ctor);
3865 ret = GS_ALL_DONE;
3866
3867 switch (TREE_CODE (type))
3868 {
3869 case RECORD_TYPE:
3870 case UNION_TYPE:
3871 case QUAL_UNION_TYPE:
3872 case ARRAY_TYPE:
3873 {
3874 struct gimplify_init_ctor_preeval_data preeval_data;
3875 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3876 bool cleared, complete_p, valid_const_initializer;
3877
3878 /* Aggregate types must lower constructors to initialization of
3879 individual elements. The exception is that a CONSTRUCTOR node
3880 with no elements indicates zero-initialization of the whole. */
3881 if (vec_safe_is_empty (elts))
3882 {
3883 if (notify_temp_creation)
3884 return GS_OK;
3885 break;
3886 }
3887
3888 /* Fetch information about the constructor to direct later processing.
3889 We might want to make static versions of it in various cases, and
3890 can only do so if it known to be a valid constant initializer. */
3891 valid_const_initializer
3892 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3893 &num_ctor_elements, &complete_p);
3894
3895 /* If a const aggregate variable is being initialized, then it
3896 should never be a lose to promote the variable to be static. */
3897 if (valid_const_initializer
3898 && num_nonzero_elements > 1
3899 && TREE_READONLY (object)
3900 && TREE_CODE (object) == VAR_DECL
3901 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3902 {
3903 if (notify_temp_creation)
3904 return GS_ERROR;
3905 DECL_INITIAL (object) = ctor;
3906 TREE_STATIC (object) = 1;
3907 if (!DECL_NAME (object))
3908 DECL_NAME (object) = create_tmp_var_name ("C");
3909 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3910
3911 /* ??? C++ doesn't automatically append a .<number> to the
3912 assembler name, and even when it does, it looks at FE private
3913 data structures to figure out what that number should be,
3914 which are not set for this variable. I suppose this is
3915 important for local statics for inline functions, which aren't
3916 "local" in the object file sense. So in order to get a unique
3917 TU-local symbol, we must invoke the lhd version now. */
3918 lhd_set_decl_assembler_name (object);
3919
3920 *expr_p = NULL_TREE;
3921 break;
3922 }
3923
3924 /* If there are "lots" of initialized elements, even discounting
3925 those that are not address constants (and thus *must* be
3926 computed at runtime), then partition the constructor into
3927 constant and non-constant parts. Block copy the constant
3928 parts in, then generate code for the non-constant parts. */
3929 /* TODO. There's code in cp/typeck.c to do this. */
3930
3931 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3932 /* store_constructor will ignore the clearing of variable-sized
3933 objects. Initializers for such objects must explicitly set
3934 every field that needs to be set. */
3935 cleared = false;
3936 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3937 /* If the constructor isn't complete, clear the whole object
3938 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3939
3940 ??? This ought not to be needed. For any element not present
3941 in the initializer, we should simply set them to zero. Except
3942 we'd need to *find* the elements that are not present, and that
3943 requires trickery to avoid quadratic compile-time behavior in
3944 large cases or excessive memory use in small cases. */
3945 cleared = true;
3946 else if (num_ctor_elements - num_nonzero_elements
3947 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3948 && num_nonzero_elements < num_ctor_elements / 4)
3949 /* If there are "lots" of zeros, it's more efficient to clear
3950 the memory and then set the nonzero elements. */
3951 cleared = true;
3952 else
3953 cleared = false;
3954
3955 /* If there are "lots" of initialized elements, and all of them
3956 are valid address constants, then the entire initializer can
3957 be dropped to memory, and then memcpy'd out. Don't do this
3958 for sparse arrays, though, as it's more efficient to follow
3959 the standard CONSTRUCTOR behavior of memset followed by
3960 individual element initialization. Also don't do this for small
3961 all-zero initializers (which aren't big enough to merit
3962 clearing), and don't try to make bitwise copies of
3963 TREE_ADDRESSABLE types.
3964
3965 We cannot apply such transformation when compiling chkp static
3966 initializer because creation of initializer image in the memory
3967 will require static initialization of bounds for it. It should
3968 result in another gimplification of similar initializer and we
3969 may fall into infinite loop. */
3970 if (valid_const_initializer
3971 && !(cleared || num_nonzero_elements == 0)
3972 && !TREE_ADDRESSABLE (type)
3973 && (!current_function_decl
3974 || !lookup_attribute ("chkp ctor",
3975 DECL_ATTRIBUTES (current_function_decl))))
3976 {
3977 HOST_WIDE_INT size = int_size_in_bytes (type);
3978 unsigned int align;
3979
3980 /* ??? We can still get unbounded array types, at least
3981 from the C++ front end. This seems wrong, but attempt
3982 to work around it for now. */
3983 if (size < 0)
3984 {
3985 size = int_size_in_bytes (TREE_TYPE (object));
3986 if (size >= 0)
3987 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3988 }
3989
3990 /* Find the maximum alignment we can assume for the object. */
3991 /* ??? Make use of DECL_OFFSET_ALIGN. */
3992 if (DECL_P (object))
3993 align = DECL_ALIGN (object);
3994 else
3995 align = TYPE_ALIGN (type);
3996
3997 /* Do a block move either if the size is so small as to make
3998 each individual move a sub-unit move on average, or if it
3999 is so large as to make individual moves inefficient. */
4000 if (size > 0
4001 && num_nonzero_elements > 1
4002 && (size < num_nonzero_elements
4003 || !can_move_by_pieces (size, align)))
4004 {
4005 if (notify_temp_creation)
4006 return GS_ERROR;
4007
4008 walk_tree (&ctor, force_labels_r, NULL, NULL);
4009 ctor = tree_output_constant_def (ctor);
4010 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4011 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4012 TREE_OPERAND (*expr_p, 1) = ctor;
4013
4014 /* This is no longer an assignment of a CONSTRUCTOR, but
4015 we still may have processing to do on the LHS. So
4016 pretend we didn't do anything here to let that happen. */
4017 return GS_UNHANDLED;
4018 }
4019 }
4020
4021 /* If the target is volatile, we have non-zero elements and more than
4022 one field to assign, initialize the target from a temporary. */
4023 if (TREE_THIS_VOLATILE (object)
4024 && !TREE_ADDRESSABLE (type)
4025 && num_nonzero_elements > 0
4026 && vec_safe_length (elts) > 1)
4027 {
4028 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4029 TREE_OPERAND (*expr_p, 0) = temp;
4030 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4031 *expr_p,
4032 build2 (MODIFY_EXPR, void_type_node,
4033 object, temp));
4034 return GS_OK;
4035 }
4036
4037 if (notify_temp_creation)
4038 return GS_OK;
4039
4040 /* If there are nonzero elements and if needed, pre-evaluate to capture
4041 elements overlapping with the lhs into temporaries. We must do this
4042 before clearing to fetch the values before they are zeroed-out. */
4043 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4044 {
4045 preeval_data.lhs_base_decl = get_base_address (object);
4046 if (!DECL_P (preeval_data.lhs_base_decl))
4047 preeval_data.lhs_base_decl = NULL;
4048 preeval_data.lhs_alias_set = get_alias_set (object);
4049
4050 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4051 pre_p, post_p, &preeval_data);
4052 }
4053
4054 bool ctor_has_side_effects_p
4055 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4056
4057 if (cleared)
4058 {
4059 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4060 Note that we still have to gimplify, in order to handle the
4061 case of variable sized types. Avoid shared tree structures. */
4062 CONSTRUCTOR_ELTS (ctor) = NULL;
4063 TREE_SIDE_EFFECTS (ctor) = 0;
4064 object = unshare_expr (object);
4065 gimplify_stmt (expr_p, pre_p);
4066 }
4067
4068 /* If we have not block cleared the object, or if there are nonzero
4069 elements in the constructor, or if the constructor has side effects,
4070 add assignments to the individual scalar fields of the object. */
4071 if (!cleared
4072 || num_nonzero_elements > 0
4073 || ctor_has_side_effects_p)
4074 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4075
4076 *expr_p = NULL_TREE;
4077 }
4078 break;
4079
4080 case COMPLEX_TYPE:
4081 {
4082 tree r, i;
4083
4084 if (notify_temp_creation)
4085 return GS_OK;
4086
4087 /* Extract the real and imaginary parts out of the ctor. */
4088 gcc_assert (elts->length () == 2);
4089 r = (*elts)[0].value;
4090 i = (*elts)[1].value;
4091 if (r == NULL || i == NULL)
4092 {
4093 tree zero = build_zero_cst (TREE_TYPE (type));
4094 if (r == NULL)
4095 r = zero;
4096 if (i == NULL)
4097 i = zero;
4098 }
4099
4100 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4101 represent creation of a complex value. */
4102 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4103 {
4104 ctor = build_complex (type, r, i);
4105 TREE_OPERAND (*expr_p, 1) = ctor;
4106 }
4107 else
4108 {
4109 ctor = build2 (COMPLEX_EXPR, type, r, i);
4110 TREE_OPERAND (*expr_p, 1) = ctor;
4111 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4112 pre_p,
4113 post_p,
4114 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4115 fb_rvalue);
4116 }
4117 }
4118 break;
4119
4120 case VECTOR_TYPE:
4121 {
4122 unsigned HOST_WIDE_INT ix;
4123 constructor_elt *ce;
4124
4125 if (notify_temp_creation)
4126 return GS_OK;
4127
4128 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4129 if (TREE_CONSTANT (ctor))
4130 {
4131 bool constant_p = true;
4132 tree value;
4133
4134 /* Even when ctor is constant, it might contain non-*_CST
4135 elements, such as addresses or trapping values like
4136 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4137 in VECTOR_CST nodes. */
4138 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4139 if (!CONSTANT_CLASS_P (value))
4140 {
4141 constant_p = false;
4142 break;
4143 }
4144
4145 if (constant_p)
4146 {
4147 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4148 break;
4149 }
4150
4151 TREE_CONSTANT (ctor) = 0;
4152 }
4153
4154 /* Vector types use CONSTRUCTOR all the way through gimple
4155 compilation as a general initializer. */
4156 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4157 {
4158 enum gimplify_status tret;
4159 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4160 fb_rvalue);
4161 if (tret == GS_ERROR)
4162 ret = GS_ERROR;
4163 }
4164 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4165 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4166 }
4167 break;
4168
4169 default:
4170 /* So how did we get a CONSTRUCTOR for a scalar type? */
4171 gcc_unreachable ();
4172 }
4173
4174 if (ret == GS_ERROR)
4175 return GS_ERROR;
4176 else if (want_value)
4177 {
4178 *expr_p = object;
4179 return GS_OK;
4180 }
4181 else
4182 {
4183 /* If we have gimplified both sides of the initializer but have
4184 not emitted an assignment, do so now. */
4185 if (*expr_p)
4186 {
4187 tree lhs = TREE_OPERAND (*expr_p, 0);
4188 tree rhs = TREE_OPERAND (*expr_p, 1);
4189 gassign *init = gimple_build_assign (lhs, rhs);
4190 gimplify_seq_add_stmt (pre_p, init);
4191 *expr_p = NULL;
4192 }
4193
4194 return GS_ALL_DONE;
4195 }
4196 }
4197
4198 /* Given a pointer value OP0, return a simplified version of an
4199 indirection through OP0, or NULL_TREE if no simplification is
4200 possible. This may only be applied to a rhs of an expression.
4201 Note that the resulting type may be different from the type pointed
4202 to in the sense that it is still compatible from the langhooks
4203 point of view. */
4204
4205 static tree
4206 gimple_fold_indirect_ref_rhs (tree t)
4207 {
4208 return gimple_fold_indirect_ref (t);
4209 }
4210
4211 /* Subroutine of gimplify_modify_expr to do simplifications of
4212 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4213 something changes. */
4214
4215 static enum gimplify_status
4216 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4217 gimple_seq *pre_p, gimple_seq *post_p,
4218 bool want_value)
4219 {
4220 enum gimplify_status ret = GS_UNHANDLED;
4221 bool changed;
4222
4223 do
4224 {
4225 changed = false;
4226 switch (TREE_CODE (*from_p))
4227 {
4228 case VAR_DECL:
4229 /* If we're assigning from a read-only variable initialized with
4230 a constructor, do the direct assignment from the constructor,
4231 but only if neither source nor target are volatile since this
4232 latter assignment might end up being done on a per-field basis. */
4233 if (DECL_INITIAL (*from_p)
4234 && TREE_READONLY (*from_p)
4235 && !TREE_THIS_VOLATILE (*from_p)
4236 && !TREE_THIS_VOLATILE (*to_p)
4237 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4238 {
4239 tree old_from = *from_p;
4240 enum gimplify_status subret;
4241
4242 /* Move the constructor into the RHS. */
4243 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4244
4245 /* Let's see if gimplify_init_constructor will need to put
4246 it in memory. */
4247 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4248 false, true);
4249 if (subret == GS_ERROR)
4250 {
4251 /* If so, revert the change. */
4252 *from_p = old_from;
4253 }
4254 else
4255 {
4256 ret = GS_OK;
4257 changed = true;
4258 }
4259 }
4260 break;
4261 case INDIRECT_REF:
4262 {
4263 /* If we have code like
4264
4265 *(const A*)(A*)&x
4266
4267 where the type of "x" is a (possibly cv-qualified variant
4268 of "A"), treat the entire expression as identical to "x".
4269 This kind of code arises in C++ when an object is bound
4270 to a const reference, and if "x" is a TARGET_EXPR we want
4271 to take advantage of the optimization below. */
4272 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4273 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4274 if (t)
4275 {
4276 if (TREE_THIS_VOLATILE (t) != volatile_p)
4277 {
4278 if (DECL_P (t))
4279 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4280 build_fold_addr_expr (t));
4281 if (REFERENCE_CLASS_P (t))
4282 TREE_THIS_VOLATILE (t) = volatile_p;
4283 }
4284 *from_p = t;
4285 ret = GS_OK;
4286 changed = true;
4287 }
4288 break;
4289 }
4290
4291 case TARGET_EXPR:
4292 {
4293 /* If we are initializing something from a TARGET_EXPR, strip the
4294 TARGET_EXPR and initialize it directly, if possible. This can't
4295 be done if the initializer is void, since that implies that the
4296 temporary is set in some non-trivial way.
4297
4298 ??? What about code that pulls out the temp and uses it
4299 elsewhere? I think that such code never uses the TARGET_EXPR as
4300 an initializer. If I'm wrong, we'll die because the temp won't
4301 have any RTL. In that case, I guess we'll need to replace
4302 references somehow. */
4303 tree init = TARGET_EXPR_INITIAL (*from_p);
4304
4305 if (init
4306 && !VOID_TYPE_P (TREE_TYPE (init)))
4307 {
4308 *from_p = init;
4309 ret = GS_OK;
4310 changed = true;
4311 }
4312 }
4313 break;
4314
4315 case COMPOUND_EXPR:
4316 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4317 caught. */
4318 gimplify_compound_expr (from_p, pre_p, true);
4319 ret = GS_OK;
4320 changed = true;
4321 break;
4322
4323 case CONSTRUCTOR:
4324 /* If we already made some changes, let the front end have a
4325 crack at this before we break it down. */
4326 if (ret != GS_UNHANDLED)
4327 break;
4328 /* If we're initializing from a CONSTRUCTOR, break this into
4329 individual MODIFY_EXPRs. */
4330 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4331 false);
4332
4333 case COND_EXPR:
4334 /* If we're assigning to a non-register type, push the assignment
4335 down into the branches. This is mandatory for ADDRESSABLE types,
4336 since we cannot generate temporaries for such, but it saves a
4337 copy in other cases as well. */
4338 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4339 {
4340 /* This code should mirror the code in gimplify_cond_expr. */
4341 enum tree_code code = TREE_CODE (*expr_p);
4342 tree cond = *from_p;
4343 tree result = *to_p;
4344
4345 ret = gimplify_expr (&result, pre_p, post_p,
4346 is_gimple_lvalue, fb_lvalue);
4347 if (ret != GS_ERROR)
4348 ret = GS_OK;
4349
4350 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4351 TREE_OPERAND (cond, 1)
4352 = build2 (code, void_type_node, result,
4353 TREE_OPERAND (cond, 1));
4354 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4355 TREE_OPERAND (cond, 2)
4356 = build2 (code, void_type_node, unshare_expr (result),
4357 TREE_OPERAND (cond, 2));
4358
4359 TREE_TYPE (cond) = void_type_node;
4360 recalculate_side_effects (cond);
4361
4362 if (want_value)
4363 {
4364 gimplify_and_add (cond, pre_p);
4365 *expr_p = unshare_expr (result);
4366 }
4367 else
4368 *expr_p = cond;
4369 return ret;
4370 }
4371 break;
4372
4373 case CALL_EXPR:
4374 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4375 return slot so that we don't generate a temporary. */
4376 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4377 && aggregate_value_p (*from_p, *from_p))
4378 {
4379 bool use_target;
4380
4381 if (!(rhs_predicate_for (*to_p))(*from_p))
4382 /* If we need a temporary, *to_p isn't accurate. */
4383 use_target = false;
4384 /* It's OK to use the return slot directly unless it's an NRV. */
4385 else if (TREE_CODE (*to_p) == RESULT_DECL
4386 && DECL_NAME (*to_p) == NULL_TREE
4387 && needs_to_live_in_memory (*to_p))
4388 use_target = true;
4389 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4390 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4391 /* Don't force regs into memory. */
4392 use_target = false;
4393 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4394 /* It's OK to use the target directly if it's being
4395 initialized. */
4396 use_target = true;
4397 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4398 != INTEGER_CST)
4399 /* Always use the target and thus RSO for variable-sized types.
4400 GIMPLE cannot deal with a variable-sized assignment
4401 embedded in a call statement. */
4402 use_target = true;
4403 else if (TREE_CODE (*to_p) != SSA_NAME
4404 && (!is_gimple_variable (*to_p)
4405 || needs_to_live_in_memory (*to_p)))
4406 /* Don't use the original target if it's already addressable;
4407 if its address escapes, and the called function uses the
4408 NRV optimization, a conforming program could see *to_p
4409 change before the called function returns; see c++/19317.
4410 When optimizing, the return_slot pass marks more functions
4411 as safe after we have escape info. */
4412 use_target = false;
4413 else
4414 use_target = true;
4415
4416 if (use_target)
4417 {
4418 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4419 mark_addressable (*to_p);
4420 }
4421 }
4422 break;
4423
4424 case WITH_SIZE_EXPR:
4425 /* Likewise for calls that return an aggregate of non-constant size,
4426 since we would not be able to generate a temporary at all. */
4427 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4428 {
4429 *from_p = TREE_OPERAND (*from_p, 0);
4430 /* We don't change ret in this case because the
4431 WITH_SIZE_EXPR might have been added in
4432 gimplify_modify_expr, so returning GS_OK would lead to an
4433 infinite loop. */
4434 changed = true;
4435 }
4436 break;
4437
4438 /* If we're initializing from a container, push the initialization
4439 inside it. */
4440 case CLEANUP_POINT_EXPR:
4441 case BIND_EXPR:
4442 case STATEMENT_LIST:
4443 {
4444 tree wrap = *from_p;
4445 tree t;
4446
4447 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4448 fb_lvalue);
4449 if (ret != GS_ERROR)
4450 ret = GS_OK;
4451
4452 t = voidify_wrapper_expr (wrap, *expr_p);
4453 gcc_assert (t == *expr_p);
4454
4455 if (want_value)
4456 {
4457 gimplify_and_add (wrap, pre_p);
4458 *expr_p = unshare_expr (*to_p);
4459 }
4460 else
4461 *expr_p = wrap;
4462 return GS_OK;
4463 }
4464
4465 case COMPOUND_LITERAL_EXPR:
4466 {
4467 tree complit = TREE_OPERAND (*expr_p, 1);
4468 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4469 tree decl = DECL_EXPR_DECL (decl_s);
4470 tree init = DECL_INITIAL (decl);
4471
4472 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4473 into struct T x = { 0, 1, 2 } if the address of the
4474 compound literal has never been taken. */
4475 if (!TREE_ADDRESSABLE (complit)
4476 && !TREE_ADDRESSABLE (decl)
4477 && init)
4478 {
4479 *expr_p = copy_node (*expr_p);
4480 TREE_OPERAND (*expr_p, 1) = init;
4481 return GS_OK;
4482 }
4483 }
4484
4485 default:
4486 break;
4487 }
4488 }
4489 while (changed);
4490
4491 return ret;
4492 }
4493
4494
4495 /* Return true if T looks like a valid GIMPLE statement. */
4496
4497 static bool
4498 is_gimple_stmt (tree t)
4499 {
4500 const enum tree_code code = TREE_CODE (t);
4501
4502 switch (code)
4503 {
4504 case NOP_EXPR:
4505 /* The only valid NOP_EXPR is the empty statement. */
4506 return IS_EMPTY_STMT (t);
4507
4508 case BIND_EXPR:
4509 case COND_EXPR:
4510 /* These are only valid if they're void. */
4511 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4512
4513 case SWITCH_EXPR:
4514 case GOTO_EXPR:
4515 case RETURN_EXPR:
4516 case LABEL_EXPR:
4517 case CASE_LABEL_EXPR:
4518 case TRY_CATCH_EXPR:
4519 case TRY_FINALLY_EXPR:
4520 case EH_FILTER_EXPR:
4521 case CATCH_EXPR:
4522 case ASM_EXPR:
4523 case STATEMENT_LIST:
4524 case OACC_PARALLEL:
4525 case OACC_KERNELS:
4526 case OACC_DATA:
4527 case OACC_HOST_DATA:
4528 case OACC_DECLARE:
4529 case OACC_UPDATE:
4530 case OACC_ENTER_DATA:
4531 case OACC_EXIT_DATA:
4532 case OACC_CACHE:
4533 case OMP_PARALLEL:
4534 case OMP_FOR:
4535 case OMP_SIMD:
4536 case CILK_SIMD:
4537 case OMP_DISTRIBUTE:
4538 case OACC_LOOP:
4539 case OMP_SECTIONS:
4540 case OMP_SECTION:
4541 case OMP_SINGLE:
4542 case OMP_MASTER:
4543 case OMP_TASKGROUP:
4544 case OMP_ORDERED:
4545 case OMP_CRITICAL:
4546 case OMP_TASK:
4547 case OMP_TARGET:
4548 case OMP_TARGET_DATA:
4549 case OMP_TARGET_UPDATE:
4550 case OMP_TARGET_ENTER_DATA:
4551 case OMP_TARGET_EXIT_DATA:
4552 case OMP_TASKLOOP:
4553 case OMP_TEAMS:
4554 /* These are always void. */
4555 return true;
4556
4557 case CALL_EXPR:
4558 case MODIFY_EXPR:
4559 case PREDICT_EXPR:
4560 /* These are valid regardless of their type. */
4561 return true;
4562
4563 default:
4564 return false;
4565 }
4566 }
4567
4568
4569 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4570 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4571 DECL_GIMPLE_REG_P set.
4572
4573 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4574 other, unmodified part of the complex object just before the total store.
4575 As a consequence, if the object is still uninitialized, an undefined value
4576 will be loaded into a register, which may result in a spurious exception
4577 if the register is floating-point and the value happens to be a signaling
4578 NaN for example. Then the fully-fledged complex operations lowering pass
4579 followed by a DCE pass are necessary in order to fix things up. */
4580
4581 static enum gimplify_status
4582 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4583 bool want_value)
4584 {
4585 enum tree_code code, ocode;
4586 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4587
4588 lhs = TREE_OPERAND (*expr_p, 0);
4589 rhs = TREE_OPERAND (*expr_p, 1);
4590 code = TREE_CODE (lhs);
4591 lhs = TREE_OPERAND (lhs, 0);
4592
4593 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4594 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4595 TREE_NO_WARNING (other) = 1;
4596 other = get_formal_tmp_var (other, pre_p);
4597
4598 realpart = code == REALPART_EXPR ? rhs : other;
4599 imagpart = code == REALPART_EXPR ? other : rhs;
4600
4601 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4602 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4603 else
4604 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4605
4606 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4607 *expr_p = (want_value) ? rhs : NULL_TREE;
4608
4609 return GS_ALL_DONE;
4610 }
4611
4612 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4613
4614 modify_expr
4615 : varname '=' rhs
4616 | '*' ID '=' rhs
4617
4618 PRE_P points to the list where side effects that must happen before
4619 *EXPR_P should be stored.
4620
4621 POST_P points to the list where side effects that must happen after
4622 *EXPR_P should be stored.
4623
4624 WANT_VALUE is nonzero iff we want to use the value of this expression
4625 in another expression. */
4626
4627 static enum gimplify_status
4628 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4629 bool want_value)
4630 {
4631 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4632 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4633 enum gimplify_status ret = GS_UNHANDLED;
4634 gimple *assign;
4635 location_t loc = EXPR_LOCATION (*expr_p);
4636 gimple_stmt_iterator gsi;
4637
4638 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4639 || TREE_CODE (*expr_p) == INIT_EXPR);
4640
4641 /* Trying to simplify a clobber using normal logic doesn't work,
4642 so handle it here. */
4643 if (TREE_CLOBBER_P (*from_p))
4644 {
4645 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4646 if (ret == GS_ERROR)
4647 return ret;
4648 gcc_assert (!want_value
4649 && (TREE_CODE (*to_p) == VAR_DECL
4650 || TREE_CODE (*to_p) == MEM_REF));
4651 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4652 *expr_p = NULL;
4653 return GS_ALL_DONE;
4654 }
4655
4656 /* Insert pointer conversions required by the middle-end that are not
4657 required by the frontend. This fixes middle-end type checking for
4658 for example gcc.dg/redecl-6.c. */
4659 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4660 {
4661 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4662 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4663 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4664 }
4665
4666 /* See if any simplifications can be done based on what the RHS is. */
4667 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4668 want_value);
4669 if (ret != GS_UNHANDLED)
4670 return ret;
4671
4672 /* For zero sized types only gimplify the left hand side and right hand
4673 side as statements and throw away the assignment. Do this after
4674 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4675 types properly. */
4676 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4677 {
4678 gimplify_stmt (from_p, pre_p);
4679 gimplify_stmt (to_p, pre_p);
4680 *expr_p = NULL_TREE;
4681 return GS_ALL_DONE;
4682 }
4683
4684 /* If the value being copied is of variable width, compute the length
4685 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4686 before gimplifying any of the operands so that we can resolve any
4687 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4688 the size of the expression to be copied, not of the destination, so
4689 that is what we must do here. */
4690 maybe_with_size_expr (from_p);
4691
4692 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4693 if (ret == GS_ERROR)
4694 return ret;
4695
4696 /* As a special case, we have to temporarily allow for assignments
4697 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4698 a toplevel statement, when gimplifying the GENERIC expression
4699 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4700 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4701
4702 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4703 prevent gimplify_expr from trying to create a new temporary for
4704 foo's LHS, we tell it that it should only gimplify until it
4705 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4706 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4707 and all we need to do here is set 'a' to be its LHS. */
4708 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4709 fb_rvalue);
4710 if (ret == GS_ERROR)
4711 return ret;
4712
4713 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4714 size as argument to the call. */
4715 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4716 {
4717 tree call = TREE_OPERAND (*from_p, 0);
4718 tree vlasize = TREE_OPERAND (*from_p, 1);
4719
4720 if (TREE_CODE (call) == CALL_EXPR
4721 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4722 {
4723 int nargs = call_expr_nargs (call);
4724 tree type = TREE_TYPE (call);
4725 tree ap = CALL_EXPR_ARG (call, 0);
4726 tree tag = CALL_EXPR_ARG (call, 1);
4727 tree aptag = CALL_EXPR_ARG (call, 2);
4728 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4729 IFN_VA_ARG, type,
4730 nargs + 1, ap, tag,
4731 aptag, vlasize);
4732 TREE_OPERAND (*from_p, 0) = newcall;
4733 }
4734 }
4735
4736 /* Now see if the above changed *from_p to something we handle specially. */
4737 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4738 want_value);
4739 if (ret != GS_UNHANDLED)
4740 return ret;
4741
4742 /* If we've got a variable sized assignment between two lvalues (i.e. does
4743 not involve a call), then we can make things a bit more straightforward
4744 by converting the assignment to memcpy or memset. */
4745 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4746 {
4747 tree from = TREE_OPERAND (*from_p, 0);
4748 tree size = TREE_OPERAND (*from_p, 1);
4749
4750 if (TREE_CODE (from) == CONSTRUCTOR)
4751 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4752
4753 if (is_gimple_addressable (from))
4754 {
4755 *from_p = from;
4756 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4757 pre_p);
4758 }
4759 }
4760
4761 /* Transform partial stores to non-addressable complex variables into
4762 total stores. This allows us to use real instead of virtual operands
4763 for these variables, which improves optimization. */
4764 if ((TREE_CODE (*to_p) == REALPART_EXPR
4765 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4766 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4767 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4768
4769 /* Try to alleviate the effects of the gimplification creating artificial
4770 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4771 make sure not to create DECL_DEBUG_EXPR links across functions. */
4772 if (!gimplify_ctxp->into_ssa
4773 && TREE_CODE (*from_p) == VAR_DECL
4774 && DECL_IGNORED_P (*from_p)
4775 && DECL_P (*to_p)
4776 && !DECL_IGNORED_P (*to_p)
4777 && decl_function_context (*to_p) == current_function_decl)
4778 {
4779 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4780 DECL_NAME (*from_p)
4781 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4782 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4783 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4784 }
4785
4786 if (want_value && TREE_THIS_VOLATILE (*to_p))
4787 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4788
4789 if (TREE_CODE (*from_p) == CALL_EXPR)
4790 {
4791 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4792 instead of a GIMPLE_ASSIGN. */
4793 gcall *call_stmt;
4794 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4795 {
4796 /* Gimplify internal functions created in the FEs. */
4797 int nargs = call_expr_nargs (*from_p), i;
4798 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4799 auto_vec<tree> vargs (nargs);
4800
4801 for (i = 0; i < nargs; i++)
4802 {
4803 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4804 EXPR_LOCATION (*from_p));
4805 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4806 }
4807 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4808 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4809 }
4810 else
4811 {
4812 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4813 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4814 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4815 tree fndecl = get_callee_fndecl (*from_p);
4816 if (fndecl
4817 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4818 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4819 && call_expr_nargs (*from_p) == 3)
4820 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4821 CALL_EXPR_ARG (*from_p, 0),
4822 CALL_EXPR_ARG (*from_p, 1),
4823 CALL_EXPR_ARG (*from_p, 2));
4824 else
4825 {
4826 call_stmt = gimple_build_call_from_tree (*from_p);
4827 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4828 }
4829 }
4830 notice_special_calls (call_stmt);
4831 if (!gimple_call_noreturn_p (call_stmt))
4832 gimple_call_set_lhs (call_stmt, *to_p);
4833 assign = call_stmt;
4834 }
4835 else
4836 {
4837 assign = gimple_build_assign (*to_p, *from_p);
4838 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4839 }
4840
4841 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4842 {
4843 /* We should have got an SSA name from the start. */
4844 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4845 }
4846
4847 gimplify_seq_add_stmt (pre_p, assign);
4848 gsi = gsi_last (*pre_p);
4849 maybe_fold_stmt (&gsi);
4850
4851 if (want_value)
4852 {
4853 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4854 return GS_OK;
4855 }
4856 else
4857 *expr_p = NULL;
4858
4859 return GS_ALL_DONE;
4860 }
4861
4862 /* Gimplify a comparison between two variable-sized objects. Do this
4863 with a call to BUILT_IN_MEMCMP. */
4864
4865 static enum gimplify_status
4866 gimplify_variable_sized_compare (tree *expr_p)
4867 {
4868 location_t loc = EXPR_LOCATION (*expr_p);
4869 tree op0 = TREE_OPERAND (*expr_p, 0);
4870 tree op1 = TREE_OPERAND (*expr_p, 1);
4871 tree t, arg, dest, src, expr;
4872
4873 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4874 arg = unshare_expr (arg);
4875 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4876 src = build_fold_addr_expr_loc (loc, op1);
4877 dest = build_fold_addr_expr_loc (loc, op0);
4878 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4879 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4880
4881 expr
4882 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4883 SET_EXPR_LOCATION (expr, loc);
4884 *expr_p = expr;
4885
4886 return GS_OK;
4887 }
4888
4889 /* Gimplify a comparison between two aggregate objects of integral scalar
4890 mode as a comparison between the bitwise equivalent scalar values. */
4891
4892 static enum gimplify_status
4893 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4894 {
4895 location_t loc = EXPR_LOCATION (*expr_p);
4896 tree op0 = TREE_OPERAND (*expr_p, 0);
4897 tree op1 = TREE_OPERAND (*expr_p, 1);
4898
4899 tree type = TREE_TYPE (op0);
4900 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4901
4902 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4903 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4904
4905 *expr_p
4906 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4907
4908 return GS_OK;
4909 }
4910
4911 /* Gimplify an expression sequence. This function gimplifies each
4912 expression and rewrites the original expression with the last
4913 expression of the sequence in GIMPLE form.
4914
4915 PRE_P points to the list where the side effects for all the
4916 expressions in the sequence will be emitted.
4917
4918 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4919
4920 static enum gimplify_status
4921 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4922 {
4923 tree t = *expr_p;
4924
4925 do
4926 {
4927 tree *sub_p = &TREE_OPERAND (t, 0);
4928
4929 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4930 gimplify_compound_expr (sub_p, pre_p, false);
4931 else
4932 gimplify_stmt (sub_p, pre_p);
4933
4934 t = TREE_OPERAND (t, 1);
4935 }
4936 while (TREE_CODE (t) == COMPOUND_EXPR);
4937
4938 *expr_p = t;
4939 if (want_value)
4940 return GS_OK;
4941 else
4942 {
4943 gimplify_stmt (expr_p, pre_p);
4944 return GS_ALL_DONE;
4945 }
4946 }
4947
4948 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4949 gimplify. After gimplification, EXPR_P will point to a new temporary
4950 that holds the original value of the SAVE_EXPR node.
4951
4952 PRE_P points to the list where side effects that must happen before
4953 *EXPR_P should be stored. */
4954
4955 static enum gimplify_status
4956 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4957 {
4958 enum gimplify_status ret = GS_ALL_DONE;
4959 tree val;
4960
4961 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4962 val = TREE_OPERAND (*expr_p, 0);
4963
4964 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4965 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4966 {
4967 /* The operand may be a void-valued expression such as SAVE_EXPRs
4968 generated by the Java frontend for class initialization. It is
4969 being executed only for its side-effects. */
4970 if (TREE_TYPE (val) == void_type_node)
4971 {
4972 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4973 is_gimple_stmt, fb_none);
4974 val = NULL;
4975 }
4976 else
4977 val = get_initialized_tmp_var (val, pre_p, post_p);
4978
4979 TREE_OPERAND (*expr_p, 0) = val;
4980 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4981 }
4982
4983 *expr_p = val;
4984
4985 return ret;
4986 }
4987
4988 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4989
4990 unary_expr
4991 : ...
4992 | '&' varname
4993 ...
4994
4995 PRE_P points to the list where side effects that must happen before
4996 *EXPR_P should be stored.
4997
4998 POST_P points to the list where side effects that must happen after
4999 *EXPR_P should be stored. */
5000
5001 static enum gimplify_status
5002 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5003 {
5004 tree expr = *expr_p;
5005 tree op0 = TREE_OPERAND (expr, 0);
5006 enum gimplify_status ret;
5007 location_t loc = EXPR_LOCATION (*expr_p);
5008
5009 switch (TREE_CODE (op0))
5010 {
5011 case INDIRECT_REF:
5012 do_indirect_ref:
5013 /* Check if we are dealing with an expression of the form '&*ptr'.
5014 While the front end folds away '&*ptr' into 'ptr', these
5015 expressions may be generated internally by the compiler (e.g.,
5016 builtins like __builtin_va_end). */
5017 /* Caution: the silent array decomposition semantics we allow for
5018 ADDR_EXPR means we can't always discard the pair. */
5019 /* Gimplification of the ADDR_EXPR operand may drop
5020 cv-qualification conversions, so make sure we add them if
5021 needed. */
5022 {
5023 tree op00 = TREE_OPERAND (op0, 0);
5024 tree t_expr = TREE_TYPE (expr);
5025 tree t_op00 = TREE_TYPE (op00);
5026
5027 if (!useless_type_conversion_p (t_expr, t_op00))
5028 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5029 *expr_p = op00;
5030 ret = GS_OK;
5031 }
5032 break;
5033
5034 case VIEW_CONVERT_EXPR:
5035 /* Take the address of our operand and then convert it to the type of
5036 this ADDR_EXPR.
5037
5038 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5039 all clear. The impact of this transformation is even less clear. */
5040
5041 /* If the operand is a useless conversion, look through it. Doing so
5042 guarantees that the ADDR_EXPR and its operand will remain of the
5043 same type. */
5044 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5045 op0 = TREE_OPERAND (op0, 0);
5046
5047 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5048 build_fold_addr_expr_loc (loc,
5049 TREE_OPERAND (op0, 0)));
5050 ret = GS_OK;
5051 break;
5052
5053 case MEM_REF:
5054 if (integer_zerop (TREE_OPERAND (op0, 1)))
5055 goto do_indirect_ref;
5056
5057 /* ... fall through ... */
5058
5059 default:
5060 /* If we see a call to a declared builtin or see its address
5061 being taken (we can unify those cases here) then we can mark
5062 the builtin for implicit generation by GCC. */
5063 if (TREE_CODE (op0) == FUNCTION_DECL
5064 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5065 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5066 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5067
5068 /* We use fb_either here because the C frontend sometimes takes
5069 the address of a call that returns a struct; see
5070 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5071 the implied temporary explicit. */
5072
5073 /* Make the operand addressable. */
5074 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5075 is_gimple_addressable, fb_either);
5076 if (ret == GS_ERROR)
5077 break;
5078
5079 /* Then mark it. Beware that it may not be possible to do so directly
5080 if a temporary has been created by the gimplification. */
5081 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5082
5083 op0 = TREE_OPERAND (expr, 0);
5084
5085 /* For various reasons, the gimplification of the expression
5086 may have made a new INDIRECT_REF. */
5087 if (TREE_CODE (op0) == INDIRECT_REF)
5088 goto do_indirect_ref;
5089
5090 mark_addressable (TREE_OPERAND (expr, 0));
5091
5092 /* The FEs may end up building ADDR_EXPRs early on a decl with
5093 an incomplete type. Re-build ADDR_EXPRs in canonical form
5094 here. */
5095 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5096 *expr_p = build_fold_addr_expr (op0);
5097
5098 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5099 recompute_tree_invariant_for_addr_expr (*expr_p);
5100
5101 /* If we re-built the ADDR_EXPR add a conversion to the original type
5102 if required. */
5103 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5104 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5105
5106 break;
5107 }
5108
5109 return ret;
5110 }
5111
5112 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5113 value; output operands should be a gimple lvalue. */
5114
5115 static enum gimplify_status
5116 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5117 {
5118 tree expr;
5119 int noutputs;
5120 const char **oconstraints;
5121 int i;
5122 tree link;
5123 const char *constraint;
5124 bool allows_mem, allows_reg, is_inout;
5125 enum gimplify_status ret, tret;
5126 gasm *stmt;
5127 vec<tree, va_gc> *inputs;
5128 vec<tree, va_gc> *outputs;
5129 vec<tree, va_gc> *clobbers;
5130 vec<tree, va_gc> *labels;
5131 tree link_next;
5132
5133 expr = *expr_p;
5134 noutputs = list_length (ASM_OUTPUTS (expr));
5135 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5136
5137 inputs = NULL;
5138 outputs = NULL;
5139 clobbers = NULL;
5140 labels = NULL;
5141
5142 ret = GS_ALL_DONE;
5143 link_next = NULL_TREE;
5144 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5145 {
5146 bool ok;
5147 size_t constraint_len;
5148
5149 link_next = TREE_CHAIN (link);
5150
5151 oconstraints[i]
5152 = constraint
5153 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5154 constraint_len = strlen (constraint);
5155 if (constraint_len == 0)
5156 continue;
5157
5158 ok = parse_output_constraint (&constraint, i, 0, 0,
5159 &allows_mem, &allows_reg, &is_inout);
5160 if (!ok)
5161 {
5162 ret = GS_ERROR;
5163 is_inout = false;
5164 }
5165
5166 if (!allows_reg && allows_mem)
5167 mark_addressable (TREE_VALUE (link));
5168
5169 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5170 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5171 fb_lvalue | fb_mayfail);
5172 if (tret == GS_ERROR)
5173 {
5174 error ("invalid lvalue in asm output %d", i);
5175 ret = tret;
5176 }
5177
5178 vec_safe_push (outputs, link);
5179 TREE_CHAIN (link) = NULL_TREE;
5180
5181 if (is_inout)
5182 {
5183 /* An input/output operand. To give the optimizers more
5184 flexibility, split it into separate input and output
5185 operands. */
5186 tree input;
5187 char buf[10];
5188
5189 /* Turn the in/out constraint into an output constraint. */
5190 char *p = xstrdup (constraint);
5191 p[0] = '=';
5192 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5193
5194 /* And add a matching input constraint. */
5195 if (allows_reg)
5196 {
5197 sprintf (buf, "%d", i);
5198
5199 /* If there are multiple alternatives in the constraint,
5200 handle each of them individually. Those that allow register
5201 will be replaced with operand number, the others will stay
5202 unchanged. */
5203 if (strchr (p, ',') != NULL)
5204 {
5205 size_t len = 0, buflen = strlen (buf);
5206 char *beg, *end, *str, *dst;
5207
5208 for (beg = p + 1;;)
5209 {
5210 end = strchr (beg, ',');
5211 if (end == NULL)
5212 end = strchr (beg, '\0');
5213 if ((size_t) (end - beg) < buflen)
5214 len += buflen + 1;
5215 else
5216 len += end - beg + 1;
5217 if (*end)
5218 beg = end + 1;
5219 else
5220 break;
5221 }
5222
5223 str = (char *) alloca (len);
5224 for (beg = p + 1, dst = str;;)
5225 {
5226 const char *tem;
5227 bool mem_p, reg_p, inout_p;
5228
5229 end = strchr (beg, ',');
5230 if (end)
5231 *end = '\0';
5232 beg[-1] = '=';
5233 tem = beg - 1;
5234 parse_output_constraint (&tem, i, 0, 0,
5235 &mem_p, &reg_p, &inout_p);
5236 if (dst != str)
5237 *dst++ = ',';
5238 if (reg_p)
5239 {
5240 memcpy (dst, buf, buflen);
5241 dst += buflen;
5242 }
5243 else
5244 {
5245 if (end)
5246 len = end - beg;
5247 else
5248 len = strlen (beg);
5249 memcpy (dst, beg, len);
5250 dst += len;
5251 }
5252 if (end)
5253 beg = end + 1;
5254 else
5255 break;
5256 }
5257 *dst = '\0';
5258 input = build_string (dst - str, str);
5259 }
5260 else
5261 input = build_string (strlen (buf), buf);
5262 }
5263 else
5264 input = build_string (constraint_len - 1, constraint + 1);
5265
5266 free (p);
5267
5268 input = build_tree_list (build_tree_list (NULL_TREE, input),
5269 unshare_expr (TREE_VALUE (link)));
5270 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5271 }
5272 }
5273
5274 link_next = NULL_TREE;
5275 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5276 {
5277 link_next = TREE_CHAIN (link);
5278 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5279 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5280 oconstraints, &allows_mem, &allows_reg);
5281
5282 /* If we can't make copies, we can only accept memory. */
5283 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5284 {
5285 if (allows_mem)
5286 allows_reg = 0;
5287 else
5288 {
5289 error ("impossible constraint in %<asm%>");
5290 error ("non-memory input %d must stay in memory", i);
5291 return GS_ERROR;
5292 }
5293 }
5294
5295 /* If the operand is a memory input, it should be an lvalue. */
5296 if (!allows_reg && allows_mem)
5297 {
5298 tree inputv = TREE_VALUE (link);
5299 STRIP_NOPS (inputv);
5300 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5301 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5302 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5303 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5304 || TREE_CODE (inputv) == MODIFY_EXPR)
5305 TREE_VALUE (link) = error_mark_node;
5306 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5307 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5308 mark_addressable (TREE_VALUE (link));
5309 if (tret == GS_ERROR)
5310 {
5311 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5312 input_location = EXPR_LOCATION (TREE_VALUE (link));
5313 error ("memory input %d is not directly addressable", i);
5314 ret = tret;
5315 }
5316 }
5317 else
5318 {
5319 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5320 is_gimple_asm_val, fb_rvalue);
5321 if (tret == GS_ERROR)
5322 ret = tret;
5323 }
5324
5325 TREE_CHAIN (link) = NULL_TREE;
5326 vec_safe_push (inputs, link);
5327 }
5328
5329 link_next = NULL_TREE;
5330 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5331 {
5332 link_next = TREE_CHAIN (link);
5333 TREE_CHAIN (link) = NULL_TREE;
5334 vec_safe_push (clobbers, link);
5335 }
5336
5337 link_next = NULL_TREE;
5338 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5339 {
5340 link_next = TREE_CHAIN (link);
5341 TREE_CHAIN (link) = NULL_TREE;
5342 vec_safe_push (labels, link);
5343 }
5344
5345 /* Do not add ASMs with errors to the gimple IL stream. */
5346 if (ret != GS_ERROR)
5347 {
5348 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5349 inputs, outputs, clobbers, labels);
5350
5351 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5352 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5353
5354 gimplify_seq_add_stmt (pre_p, stmt);
5355 }
5356
5357 return ret;
5358 }
5359
5360 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5361 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5362 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5363 return to this function.
5364
5365 FIXME should we complexify the prequeue handling instead? Or use flags
5366 for all the cleanups and let the optimizer tighten them up? The current
5367 code seems pretty fragile; it will break on a cleanup within any
5368 non-conditional nesting. But any such nesting would be broken, anyway;
5369 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5370 and continues out of it. We can do that at the RTL level, though, so
5371 having an optimizer to tighten up try/finally regions would be a Good
5372 Thing. */
5373
5374 static enum gimplify_status
5375 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5376 {
5377 gimple_stmt_iterator iter;
5378 gimple_seq body_sequence = NULL;
5379
5380 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5381
5382 /* We only care about the number of conditions between the innermost
5383 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5384 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5385 int old_conds = gimplify_ctxp->conditions;
5386 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5387 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5388 gimplify_ctxp->conditions = 0;
5389 gimplify_ctxp->conditional_cleanups = NULL;
5390 gimplify_ctxp->in_cleanup_point_expr = true;
5391
5392 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5393
5394 gimplify_ctxp->conditions = old_conds;
5395 gimplify_ctxp->conditional_cleanups = old_cleanups;
5396 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5397
5398 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5399 {
5400 gimple *wce = gsi_stmt (iter);
5401
5402 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5403 {
5404 if (gsi_one_before_end_p (iter))
5405 {
5406 /* Note that gsi_insert_seq_before and gsi_remove do not
5407 scan operands, unlike some other sequence mutators. */
5408 if (!gimple_wce_cleanup_eh_only (wce))
5409 gsi_insert_seq_before_without_update (&iter,
5410 gimple_wce_cleanup (wce),
5411 GSI_SAME_STMT);
5412 gsi_remove (&iter, true);
5413 break;
5414 }
5415 else
5416 {
5417 gtry *gtry;
5418 gimple_seq seq;
5419 enum gimple_try_flags kind;
5420
5421 if (gimple_wce_cleanup_eh_only (wce))
5422 kind = GIMPLE_TRY_CATCH;
5423 else
5424 kind = GIMPLE_TRY_FINALLY;
5425 seq = gsi_split_seq_after (iter);
5426
5427 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5428 /* Do not use gsi_replace here, as it may scan operands.
5429 We want to do a simple structural modification only. */
5430 gsi_set_stmt (&iter, gtry);
5431 iter = gsi_start (gtry->eval);
5432 }
5433 }
5434 else
5435 gsi_next (&iter);
5436 }
5437
5438 gimplify_seq_add_seq (pre_p, body_sequence);
5439 if (temp)
5440 {
5441 *expr_p = temp;
5442 return GS_OK;
5443 }
5444 else
5445 {
5446 *expr_p = NULL;
5447 return GS_ALL_DONE;
5448 }
5449 }
5450
5451 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5452 is the cleanup action required. EH_ONLY is true if the cleanup should
5453 only be executed if an exception is thrown, not on normal exit. */
5454
5455 static void
5456 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5457 {
5458 gimple *wce;
5459 gimple_seq cleanup_stmts = NULL;
5460
5461 /* Errors can result in improperly nested cleanups. Which results in
5462 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5463 if (seen_error ())
5464 return;
5465
5466 if (gimple_conditional_context ())
5467 {
5468 /* If we're in a conditional context, this is more complex. We only
5469 want to run the cleanup if we actually ran the initialization that
5470 necessitates it, but we want to run it after the end of the
5471 conditional context. So we wrap the try/finally around the
5472 condition and use a flag to determine whether or not to actually
5473 run the destructor. Thus
5474
5475 test ? f(A()) : 0
5476
5477 becomes (approximately)
5478
5479 flag = 0;
5480 try {
5481 if (test) { A::A(temp); flag = 1; val = f(temp); }
5482 else { val = 0; }
5483 } finally {
5484 if (flag) A::~A(temp);
5485 }
5486 val
5487 */
5488 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5489 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5490 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5491
5492 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5493 gimplify_stmt (&cleanup, &cleanup_stmts);
5494 wce = gimple_build_wce (cleanup_stmts);
5495
5496 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5497 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5498 gimplify_seq_add_stmt (pre_p, ftrue);
5499
5500 /* Because of this manipulation, and the EH edges that jump
5501 threading cannot redirect, the temporary (VAR) will appear
5502 to be used uninitialized. Don't warn. */
5503 TREE_NO_WARNING (var) = 1;
5504 }
5505 else
5506 {
5507 gimplify_stmt (&cleanup, &cleanup_stmts);
5508 wce = gimple_build_wce (cleanup_stmts);
5509 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5510 gimplify_seq_add_stmt (pre_p, wce);
5511 }
5512 }
5513
5514 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5515
5516 static enum gimplify_status
5517 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5518 {
5519 tree targ = *expr_p;
5520 tree temp = TARGET_EXPR_SLOT (targ);
5521 tree init = TARGET_EXPR_INITIAL (targ);
5522 enum gimplify_status ret;
5523
5524 if (init)
5525 {
5526 tree cleanup = NULL_TREE;
5527
5528 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5529 to the temps list. Handle also variable length TARGET_EXPRs. */
5530 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5531 {
5532 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5533 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5534 gimplify_vla_decl (temp, pre_p);
5535 }
5536 else
5537 gimple_add_tmp_var (temp);
5538
5539 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5540 expression is supposed to initialize the slot. */
5541 if (VOID_TYPE_P (TREE_TYPE (init)))
5542 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5543 else
5544 {
5545 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5546 init = init_expr;
5547 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5548 init = NULL;
5549 ggc_free (init_expr);
5550 }
5551 if (ret == GS_ERROR)
5552 {
5553 /* PR c++/28266 Make sure this is expanded only once. */
5554 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5555 return GS_ERROR;
5556 }
5557 if (init)
5558 gimplify_and_add (init, pre_p);
5559
5560 /* If needed, push the cleanup for the temp. */
5561 if (TARGET_EXPR_CLEANUP (targ))
5562 {
5563 if (CLEANUP_EH_ONLY (targ))
5564 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5565 CLEANUP_EH_ONLY (targ), pre_p);
5566 else
5567 cleanup = TARGET_EXPR_CLEANUP (targ);
5568 }
5569
5570 /* Add a clobber for the temporary going out of scope, like
5571 gimplify_bind_expr. */
5572 if (gimplify_ctxp->in_cleanup_point_expr
5573 && needs_to_live_in_memory (temp)
5574 && flag_stack_reuse == SR_ALL)
5575 {
5576 tree clobber = build_constructor (TREE_TYPE (temp),
5577 NULL);
5578 TREE_THIS_VOLATILE (clobber) = true;
5579 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5580 if (cleanup)
5581 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5582 clobber);
5583 else
5584 cleanup = clobber;
5585 }
5586
5587 if (cleanup)
5588 gimple_push_cleanup (temp, cleanup, false, pre_p);
5589
5590 /* Only expand this once. */
5591 TREE_OPERAND (targ, 3) = init;
5592 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5593 }
5594 else
5595 /* We should have expanded this before. */
5596 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5597
5598 *expr_p = temp;
5599 return GS_OK;
5600 }
5601
5602 /* Gimplification of expression trees. */
5603
5604 /* Gimplify an expression which appears at statement context. The
5605 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5606 NULL, a new sequence is allocated.
5607
5608 Return true if we actually added a statement to the queue. */
5609
5610 bool
5611 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5612 {
5613 gimple_seq_node last;
5614
5615 last = gimple_seq_last (*seq_p);
5616 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5617 return last != gimple_seq_last (*seq_p);
5618 }
5619
5620 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5621 to CTX. If entries already exist, force them to be some flavor of private.
5622 If there is no enclosing parallel, do nothing. */
5623
5624 void
5625 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5626 {
5627 splay_tree_node n;
5628
5629 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5630 return;
5631
5632 do
5633 {
5634 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5635 if (n != NULL)
5636 {
5637 if (n->value & GOVD_SHARED)
5638 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5639 else if (n->value & GOVD_MAP)
5640 n->value |= GOVD_MAP_TO_ONLY;
5641 else
5642 return;
5643 }
5644 else if ((ctx->region_type & ORT_TARGET) != 0)
5645 {
5646 if (ctx->target_map_scalars_firstprivate)
5647 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5648 else
5649 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5650 }
5651 else if (ctx->region_type != ORT_WORKSHARE
5652 && ctx->region_type != ORT_SIMD
5653 && ctx->region_type != ORT_ACC
5654 && !(ctx->region_type & ORT_TARGET_DATA))
5655 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5656
5657 ctx = ctx->outer_context;
5658 }
5659 while (ctx);
5660 }
5661
5662 /* Similarly for each of the type sizes of TYPE. */
5663
5664 static void
5665 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5666 {
5667 if (type == NULL || type == error_mark_node)
5668 return;
5669 type = TYPE_MAIN_VARIANT (type);
5670
5671 if (ctx->privatized_types->add (type))
5672 return;
5673
5674 switch (TREE_CODE (type))
5675 {
5676 case INTEGER_TYPE:
5677 case ENUMERAL_TYPE:
5678 case BOOLEAN_TYPE:
5679 case REAL_TYPE:
5680 case FIXED_POINT_TYPE:
5681 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5682 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5683 break;
5684
5685 case ARRAY_TYPE:
5686 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5687 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5688 break;
5689
5690 case RECORD_TYPE:
5691 case UNION_TYPE:
5692 case QUAL_UNION_TYPE:
5693 {
5694 tree field;
5695 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5696 if (TREE_CODE (field) == FIELD_DECL)
5697 {
5698 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5699 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5700 }
5701 }
5702 break;
5703
5704 case POINTER_TYPE:
5705 case REFERENCE_TYPE:
5706 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5707 break;
5708
5709 default:
5710 break;
5711 }
5712
5713 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5714 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5715 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5716 }
5717
5718 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5719
5720 static void
5721 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5722 {
5723 splay_tree_node n;
5724 unsigned int nflags;
5725 tree t;
5726
5727 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5728 return;
5729
5730 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5731 there are constructors involved somewhere. */
5732 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5733 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5734 flags |= GOVD_SEEN;
5735
5736 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5737 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5738 {
5739 /* We shouldn't be re-adding the decl with the same data
5740 sharing class. */
5741 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5742 nflags = n->value | flags;
5743 /* The only combination of data sharing classes we should see is
5744 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5745 reduction variables to be used in data sharing clauses. */
5746 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5747 || ((nflags & GOVD_DATA_SHARE_CLASS)
5748 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5749 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5750 n->value = nflags;
5751 return;
5752 }
5753
5754 /* When adding a variable-sized variable, we have to handle all sorts
5755 of additional bits of data: the pointer replacement variable, and
5756 the parameters of the type. */
5757 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5758 {
5759 /* Add the pointer replacement variable as PRIVATE if the variable
5760 replacement is private, else FIRSTPRIVATE since we'll need the
5761 address of the original variable either for SHARED, or for the
5762 copy into or out of the context. */
5763 if (!(flags & GOVD_LOCAL))
5764 {
5765 if (flags & GOVD_MAP)
5766 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5767 else if (flags & GOVD_PRIVATE)
5768 nflags = GOVD_PRIVATE;
5769 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5770 && (flags & GOVD_FIRSTPRIVATE))
5771 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5772 else
5773 nflags = GOVD_FIRSTPRIVATE;
5774 nflags |= flags & GOVD_SEEN;
5775 t = DECL_VALUE_EXPR (decl);
5776 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5777 t = TREE_OPERAND (t, 0);
5778 gcc_assert (DECL_P (t));
5779 omp_add_variable (ctx, t, nflags);
5780 }
5781
5782 /* Add all of the variable and type parameters (which should have
5783 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5784 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5785 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5786 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5787
5788 /* The variable-sized variable itself is never SHARED, only some form
5789 of PRIVATE. The sharing would take place via the pointer variable
5790 which we remapped above. */
5791 if (flags & GOVD_SHARED)
5792 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5793 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5794
5795 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5796 alloca statement we generate for the variable, so make sure it
5797 is available. This isn't automatically needed for the SHARED
5798 case, since we won't be allocating local storage then.
5799 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5800 in this case omp_notice_variable will be called later
5801 on when it is gimplified. */
5802 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5803 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5804 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5805 }
5806 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5807 && lang_hooks.decls.omp_privatize_by_reference (decl))
5808 {
5809 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5810
5811 /* Similar to the direct variable sized case above, we'll need the
5812 size of references being privatized. */
5813 if ((flags & GOVD_SHARED) == 0)
5814 {
5815 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5816 if (DECL_P (t))
5817 omp_notice_variable (ctx, t, true);
5818 }
5819 }
5820
5821 if (n != NULL)
5822 n->value |= flags;
5823 else
5824 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5825 }
5826
5827 /* Notice a threadprivate variable DECL used in OMP context CTX.
5828 This just prints out diagnostics about threadprivate variable uses
5829 in untied tasks. If DECL2 is non-NULL, prevent this warning
5830 on that variable. */
5831
5832 static bool
5833 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5834 tree decl2)
5835 {
5836 splay_tree_node n;
5837 struct gimplify_omp_ctx *octx;
5838
5839 for (octx = ctx; octx; octx = octx->outer_context)
5840 if ((octx->region_type & ORT_TARGET) != 0)
5841 {
5842 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5843 if (n == NULL)
5844 {
5845 error ("threadprivate variable %qE used in target region",
5846 DECL_NAME (decl));
5847 error_at (octx->location, "enclosing target region");
5848 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5849 }
5850 if (decl2)
5851 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5852 }
5853
5854 if (ctx->region_type != ORT_UNTIED_TASK)
5855 return false;
5856 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5857 if (n == NULL)
5858 {
5859 error ("threadprivate variable %qE used in untied task",
5860 DECL_NAME (decl));
5861 error_at (ctx->location, "enclosing task");
5862 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5863 }
5864 if (decl2)
5865 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5866 return false;
5867 }
5868
5869 /* Return true if global var DECL is device resident. */
5870
5871 static bool
5872 device_resident_p (tree decl)
5873 {
5874 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
5875
5876 if (!attr)
5877 return false;
5878
5879 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
5880 {
5881 tree c = TREE_VALUE (t);
5882 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
5883 return true;
5884 }
5885
5886 return false;
5887 }
5888
5889 /* Determine outer default flags for DECL mentioned in an OMP region
5890 but not declared in an enclosing clause.
5891
5892 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5893 remapped firstprivate instead of shared. To some extent this is
5894 addressed in omp_firstprivatize_type_sizes, but not
5895 effectively. */
5896
5897 static unsigned
5898 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
5899 bool in_code, unsigned flags)
5900 {
5901 enum omp_clause_default_kind default_kind = ctx->default_kind;
5902 enum omp_clause_default_kind kind;
5903
5904 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5905 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5906 default_kind = kind;
5907
5908 switch (default_kind)
5909 {
5910 case OMP_CLAUSE_DEFAULT_NONE:
5911 {
5912 const char *rtype;
5913
5914 if (ctx->region_type & ORT_PARALLEL)
5915 rtype = "parallel";
5916 else if (ctx->region_type & ORT_TASK)
5917 rtype = "task";
5918 else if (ctx->region_type & ORT_TEAMS)
5919 rtype = "teams";
5920 else
5921 gcc_unreachable ();
5922
5923 error ("%qE not specified in enclosing %s",
5924 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
5925 error_at (ctx->location, "enclosing %s", rtype);
5926 }
5927 /* FALLTHRU */
5928 case OMP_CLAUSE_DEFAULT_SHARED:
5929 flags |= GOVD_SHARED;
5930 break;
5931 case OMP_CLAUSE_DEFAULT_PRIVATE:
5932 flags |= GOVD_PRIVATE;
5933 break;
5934 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5935 flags |= GOVD_FIRSTPRIVATE;
5936 break;
5937 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5938 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5939 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5940 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
5941 {
5942 omp_notice_variable (octx, decl, in_code);
5943 for (; octx; octx = octx->outer_context)
5944 {
5945 splay_tree_node n2;
5946
5947 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5948 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
5949 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
5950 continue;
5951 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5952 {
5953 flags |= GOVD_FIRSTPRIVATE;
5954 goto found_outer;
5955 }
5956 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5957 {
5958 flags |= GOVD_SHARED;
5959 goto found_outer;
5960 }
5961 }
5962 }
5963
5964 if (TREE_CODE (decl) == PARM_DECL
5965 || (!is_global_var (decl)
5966 && DECL_CONTEXT (decl) == current_function_decl))
5967 flags |= GOVD_FIRSTPRIVATE;
5968 else
5969 flags |= GOVD_SHARED;
5970 found_outer:
5971 break;
5972
5973 default:
5974 gcc_unreachable ();
5975 }
5976
5977 return flags;
5978 }
5979
5980
5981 /* Determine outer default flags for DECL mentioned in an OACC region
5982 but not declared in an enclosing clause. */
5983
5984 static unsigned
5985 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
5986 {
5987 const char *rkind;
5988 bool on_device = false;
5989
5990 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
5991 && is_global_var (decl)
5992 && device_resident_p (decl))
5993 {
5994 on_device = true;
5995 flags |= GOVD_MAP_TO_ONLY;
5996 }
5997
5998 switch (ctx->region_type)
5999 {
6000 default:
6001 gcc_unreachable ();
6002
6003 case ORT_ACC_KERNELS:
6004 /* Scalars are default 'copy' under kernels, non-scalars are default
6005 'present_or_copy'. */
6006 flags |= GOVD_MAP;
6007 if (!AGGREGATE_TYPE_P (TREE_TYPE (decl)))
6008 flags |= GOVD_MAP_FORCE;
6009
6010 rkind = "kernels";
6011 break;
6012
6013 case ORT_ACC_PARALLEL:
6014 {
6015 tree type = TREE_TYPE (decl);
6016
6017 if (TREE_CODE (type) == REFERENCE_TYPE
6018 || POINTER_TYPE_P (type))
6019 type = TREE_TYPE (type);
6020
6021 if (on_device || AGGREGATE_TYPE_P (type))
6022 /* Aggregates default to 'present_or_copy'. */
6023 flags |= GOVD_MAP;
6024 else
6025 /* Scalars default to 'firstprivate'. */
6026 flags |= GOVD_FIRSTPRIVATE;
6027 rkind = "parallel";
6028 }
6029 break;
6030 }
6031
6032 if (DECL_ARTIFICIAL (decl))
6033 ; /* We can get compiler-generated decls, and should not complain
6034 about them. */
6035 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6036 {
6037 error ("%qE not specified in enclosing OpenACC %qs construct",
6038 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6039 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6040 }
6041 else
6042 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6043
6044 return flags;
6045 }
6046
6047 /* Record the fact that DECL was used within the OMP context CTX.
6048 IN_CODE is true when real code uses DECL, and false when we should
6049 merely emit default(none) errors. Return true if DECL is going to
6050 be remapped and thus DECL shouldn't be gimplified into its
6051 DECL_VALUE_EXPR (if any). */
6052
6053 static bool
6054 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6055 {
6056 splay_tree_node n;
6057 unsigned flags = in_code ? GOVD_SEEN : 0;
6058 bool ret = false, shared;
6059
6060 if (error_operand_p (decl))
6061 return false;
6062
6063 if (ctx->region_type == ORT_NONE)
6064 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6065
6066 /* Threadprivate variables are predetermined. */
6067 if (is_global_var (decl))
6068 {
6069 if (DECL_THREAD_LOCAL_P (decl))
6070 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6071
6072 if (DECL_HAS_VALUE_EXPR_P (decl))
6073 {
6074 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6075
6076 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6077 return omp_notice_threadprivate_variable (ctx, decl, value);
6078 }
6079 }
6080
6081 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6082 if ((ctx->region_type & ORT_TARGET) != 0)
6083 {
6084 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6085 if (n == NULL)
6086 {
6087 unsigned nflags = flags;
6088 if (ctx->target_map_pointers_as_0len_arrays
6089 || ctx->target_map_scalars_firstprivate)
6090 {
6091 bool is_declare_target = false;
6092 bool is_scalar = false;
6093 if (is_global_var (decl)
6094 && varpool_node::get_create (decl)->offloadable)
6095 {
6096 struct gimplify_omp_ctx *octx;
6097 for (octx = ctx->outer_context;
6098 octx; octx = octx->outer_context)
6099 {
6100 n = splay_tree_lookup (octx->variables,
6101 (splay_tree_key)decl);
6102 if (n
6103 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6104 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6105 break;
6106 }
6107 is_declare_target = octx == NULL;
6108 }
6109 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6110 {
6111 tree type = TREE_TYPE (decl);
6112 if (TREE_CODE (type) == REFERENCE_TYPE)
6113 type = TREE_TYPE (type);
6114 if (TREE_CODE (type) == COMPLEX_TYPE)
6115 type = TREE_TYPE (type);
6116 if (INTEGRAL_TYPE_P (type)
6117 || SCALAR_FLOAT_TYPE_P (type)
6118 || TREE_CODE (type) == POINTER_TYPE)
6119 is_scalar = true;
6120 }
6121 if (is_declare_target)
6122 ;
6123 else if (ctx->target_map_pointers_as_0len_arrays
6124 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6125 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6126 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6127 == POINTER_TYPE)))
6128 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6129 else if (is_scalar)
6130 nflags |= GOVD_FIRSTPRIVATE;
6131 }
6132
6133 struct gimplify_omp_ctx *octx = ctx->outer_context;
6134 if ((ctx->region_type & ORT_ACC) && octx)
6135 {
6136 /* Look in outer OpenACC contexts, to see if there's a
6137 data attribute for this variable. */
6138 omp_notice_variable (octx, decl, in_code);
6139
6140 for (; octx; octx = octx->outer_context)
6141 {
6142 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6143 break;
6144 splay_tree_node n2
6145 = splay_tree_lookup (octx->variables,
6146 (splay_tree_key) decl);
6147 if (n2)
6148 {
6149 if (octx->region_type == ORT_ACC_HOST_DATA)
6150 error ("variable %qE declared in enclosing "
6151 "%<host_data%> region", DECL_NAME (decl));
6152 nflags |= GOVD_MAP;
6153 goto found_outer;
6154 }
6155 }
6156 }
6157
6158 {
6159 tree type = TREE_TYPE (decl);
6160
6161 if (nflags == flags
6162 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6163 && lang_hooks.decls.omp_privatize_by_reference (decl))
6164 type = TREE_TYPE (type);
6165 if (nflags == flags
6166 && !lang_hooks.types.omp_mappable_type (type))
6167 {
6168 error ("%qD referenced in target region does not have "
6169 "a mappable type", decl);
6170 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6171 }
6172 else if (nflags == flags)
6173 {
6174 if ((ctx->region_type & ORT_ACC) != 0)
6175 nflags = oacc_default_clause (ctx, decl, flags);
6176 else
6177 nflags |= GOVD_MAP;
6178 }
6179 }
6180 found_outer:
6181 omp_add_variable (ctx, decl, nflags);
6182 }
6183 else
6184 {
6185 /* If nothing changed, there's nothing left to do. */
6186 if ((n->value & flags) == flags)
6187 return ret;
6188 flags |= n->value;
6189 n->value = flags;
6190 }
6191 goto do_outer;
6192 }
6193
6194 if (n == NULL)
6195 {
6196 if (ctx->region_type == ORT_WORKSHARE
6197 || ctx->region_type == ORT_SIMD
6198 || ctx->region_type == ORT_ACC
6199 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6200 goto do_outer;
6201
6202 flags = omp_default_clause (ctx, decl, in_code, flags);
6203
6204 if ((flags & GOVD_PRIVATE)
6205 && lang_hooks.decls.omp_private_outer_ref (decl))
6206 flags |= GOVD_PRIVATE_OUTER_REF;
6207
6208 omp_add_variable (ctx, decl, flags);
6209
6210 shared = (flags & GOVD_SHARED) != 0;
6211 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6212 goto do_outer;
6213 }
6214
6215 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6216 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6217 && DECL_SIZE (decl)
6218 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6219 {
6220 splay_tree_node n2;
6221 tree t = DECL_VALUE_EXPR (decl);
6222 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6223 t = TREE_OPERAND (t, 0);
6224 gcc_assert (DECL_P (t));
6225 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6226 n2->value |= GOVD_SEEN;
6227 }
6228
6229 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6230 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6231
6232 /* If nothing changed, there's nothing left to do. */
6233 if ((n->value & flags) == flags)
6234 return ret;
6235 flags |= n->value;
6236 n->value = flags;
6237
6238 do_outer:
6239 /* If the variable is private in the current context, then we don't
6240 need to propagate anything to an outer context. */
6241 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6242 return ret;
6243 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6244 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6245 return ret;
6246 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6247 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6248 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6249 return ret;
6250 if (ctx->outer_context
6251 && omp_notice_variable (ctx->outer_context, decl, in_code))
6252 return true;
6253 return ret;
6254 }
6255
6256 /* Verify that DECL is private within CTX. If there's specific information
6257 to the contrary in the innermost scope, generate an error. */
6258
6259 static bool
6260 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6261 {
6262 splay_tree_node n;
6263
6264 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6265 if (n != NULL)
6266 {
6267 if (n->value & GOVD_SHARED)
6268 {
6269 if (ctx == gimplify_omp_ctxp)
6270 {
6271 if (simd)
6272 error ("iteration variable %qE is predetermined linear",
6273 DECL_NAME (decl));
6274 else
6275 error ("iteration variable %qE should be private",
6276 DECL_NAME (decl));
6277 n->value = GOVD_PRIVATE;
6278 return true;
6279 }
6280 else
6281 return false;
6282 }
6283 else if ((n->value & GOVD_EXPLICIT) != 0
6284 && (ctx == gimplify_omp_ctxp
6285 || (ctx->region_type == ORT_COMBINED_PARALLEL
6286 && gimplify_omp_ctxp->outer_context == ctx)))
6287 {
6288 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6289 error ("iteration variable %qE should not be firstprivate",
6290 DECL_NAME (decl));
6291 else if ((n->value & GOVD_REDUCTION) != 0)
6292 error ("iteration variable %qE should not be reduction",
6293 DECL_NAME (decl));
6294 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6295 error ("iteration variable %qE should not be linear",
6296 DECL_NAME (decl));
6297 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6298 error ("iteration variable %qE should not be lastprivate",
6299 DECL_NAME (decl));
6300 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6301 error ("iteration variable %qE should not be private",
6302 DECL_NAME (decl));
6303 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6304 error ("iteration variable %qE is predetermined linear",
6305 DECL_NAME (decl));
6306 }
6307 return (ctx == gimplify_omp_ctxp
6308 || (ctx->region_type == ORT_COMBINED_PARALLEL
6309 && gimplify_omp_ctxp->outer_context == ctx));
6310 }
6311
6312 if (ctx->region_type != ORT_WORKSHARE
6313 && ctx->region_type != ORT_SIMD
6314 && ctx->region_type != ORT_ACC)
6315 return false;
6316 else if (ctx->outer_context)
6317 return omp_is_private (ctx->outer_context, decl, simd);
6318 return false;
6319 }
6320
6321 /* Return true if DECL is private within a parallel region
6322 that binds to the current construct's context or in parallel
6323 region's REDUCTION clause. */
6324
6325 static bool
6326 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6327 {
6328 splay_tree_node n;
6329
6330 do
6331 {
6332 ctx = ctx->outer_context;
6333 if (ctx == NULL)
6334 {
6335 if (is_global_var (decl))
6336 return false;
6337
6338 /* References might be private, but might be shared too,
6339 when checking for copyprivate, assume they might be
6340 private, otherwise assume they might be shared. */
6341 if (copyprivate)
6342 return true;
6343
6344 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6345 return false;
6346
6347 /* Treat C++ privatized non-static data members outside
6348 of the privatization the same. */
6349 if (omp_member_access_dummy_var (decl))
6350 return false;
6351
6352 return true;
6353 }
6354
6355 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6356
6357 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6358 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6359 continue;
6360
6361 if (n != NULL)
6362 {
6363 if ((n->value & GOVD_LOCAL) != 0
6364 && omp_member_access_dummy_var (decl))
6365 return false;
6366 return (n->value & GOVD_SHARED) == 0;
6367 }
6368 }
6369 while (ctx->region_type == ORT_WORKSHARE
6370 || ctx->region_type == ORT_SIMD
6371 || ctx->region_type == ORT_ACC);
6372 return false;
6373 }
6374
6375 /* Return true if the CTX is combined with distribute and thus
6376 lastprivate can't be supported. */
6377
6378 static bool
6379 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6380 {
6381 do
6382 {
6383 if (ctx->outer_context == NULL)
6384 return false;
6385 ctx = ctx->outer_context;
6386 switch (ctx->region_type)
6387 {
6388 case ORT_WORKSHARE:
6389 if (!ctx->combined_loop)
6390 return false;
6391 if (ctx->distribute)
6392 return lang_GNU_Fortran ();
6393 break;
6394 case ORT_COMBINED_PARALLEL:
6395 break;
6396 case ORT_COMBINED_TEAMS:
6397 return lang_GNU_Fortran ();
6398 default:
6399 return false;
6400 }
6401 }
6402 while (1);
6403 }
6404
6405 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6406
6407 static tree
6408 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6409 {
6410 tree t = *tp;
6411
6412 /* If this node has been visited, unmark it and keep looking. */
6413 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6414 return t;
6415
6416 if (IS_TYPE_OR_DECL_P (t))
6417 *walk_subtrees = 0;
6418 return NULL_TREE;
6419 }
6420
6421 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6422 and previous omp contexts. */
6423
6424 static void
6425 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6426 enum omp_region_type region_type,
6427 enum tree_code code)
6428 {
6429 struct gimplify_omp_ctx *ctx, *outer_ctx;
6430 tree c;
6431 hash_map<tree, tree> *struct_map_to_clause = NULL;
6432 tree *prev_list_p = NULL;
6433
6434 ctx = new_omp_context (region_type);
6435 outer_ctx = ctx->outer_context;
6436 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6437 {
6438 ctx->target_map_pointers_as_0len_arrays = true;
6439 /* FIXME: For Fortran we want to set this too, when
6440 the Fortran FE is updated to OpenMP 4.5. */
6441 ctx->target_map_scalars_firstprivate = true;
6442 }
6443 if (!lang_GNU_Fortran ())
6444 switch (code)
6445 {
6446 case OMP_TARGET:
6447 case OMP_TARGET_DATA:
6448 case OMP_TARGET_ENTER_DATA:
6449 case OMP_TARGET_EXIT_DATA:
6450 case OACC_HOST_DATA:
6451 ctx->target_firstprivatize_array_bases = true;
6452 default:
6453 break;
6454 }
6455
6456 while ((c = *list_p) != NULL)
6457 {
6458 bool remove = false;
6459 bool notice_outer = true;
6460 const char *check_non_private = NULL;
6461 unsigned int flags;
6462 tree decl;
6463
6464 switch (OMP_CLAUSE_CODE (c))
6465 {
6466 case OMP_CLAUSE_PRIVATE:
6467 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6468 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6469 {
6470 flags |= GOVD_PRIVATE_OUTER_REF;
6471 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6472 }
6473 else
6474 notice_outer = false;
6475 goto do_add;
6476 case OMP_CLAUSE_SHARED:
6477 flags = GOVD_SHARED | GOVD_EXPLICIT;
6478 goto do_add;
6479 case OMP_CLAUSE_FIRSTPRIVATE:
6480 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6481 check_non_private = "firstprivate";
6482 goto do_add;
6483 case OMP_CLAUSE_LASTPRIVATE:
6484 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6485 check_non_private = "lastprivate";
6486 decl = OMP_CLAUSE_DECL (c);
6487 if (omp_no_lastprivate (ctx))
6488 {
6489 notice_outer = false;
6490 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6491 }
6492 else if (error_operand_p (decl))
6493 goto do_add;
6494 else if (outer_ctx
6495 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6496 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6497 && splay_tree_lookup (outer_ctx->variables,
6498 (splay_tree_key) decl) == NULL)
6499 {
6500 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6501 if (outer_ctx->outer_context)
6502 omp_notice_variable (outer_ctx->outer_context, decl, true);
6503 }
6504 else if (outer_ctx
6505 && (outer_ctx->region_type & ORT_TASK) != 0
6506 && outer_ctx->combined_loop
6507 && splay_tree_lookup (outer_ctx->variables,
6508 (splay_tree_key) decl) == NULL)
6509 {
6510 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6511 if (outer_ctx->outer_context)
6512 omp_notice_variable (outer_ctx->outer_context, decl, true);
6513 }
6514 else if (outer_ctx
6515 && (outer_ctx->region_type == ORT_WORKSHARE
6516 || outer_ctx->region_type == ORT_ACC)
6517 && outer_ctx->combined_loop
6518 && splay_tree_lookup (outer_ctx->variables,
6519 (splay_tree_key) decl) == NULL
6520 && !omp_check_private (outer_ctx, decl, false))
6521 {
6522 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6523 if (outer_ctx->outer_context
6524 && (outer_ctx->outer_context->region_type
6525 == ORT_COMBINED_PARALLEL)
6526 && splay_tree_lookup (outer_ctx->outer_context->variables,
6527 (splay_tree_key) decl) == NULL)
6528 {
6529 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6530 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6531 if (octx->outer_context)
6532 omp_notice_variable (octx->outer_context, decl, true);
6533 }
6534 else if (outer_ctx->outer_context)
6535 omp_notice_variable (outer_ctx->outer_context, decl, true);
6536 }
6537 goto do_add;
6538 case OMP_CLAUSE_REDUCTION:
6539 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6540 /* OpenACC permits reductions on private variables. */
6541 if (!(region_type & ORT_ACC))
6542 check_non_private = "reduction";
6543 decl = OMP_CLAUSE_DECL (c);
6544 if (TREE_CODE (decl) == MEM_REF)
6545 {
6546 tree type = TREE_TYPE (decl);
6547 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6548 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6549 {
6550 remove = true;
6551 break;
6552 }
6553 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6554 if (DECL_P (v))
6555 {
6556 omp_firstprivatize_variable (ctx, v);
6557 omp_notice_variable (ctx, v, true);
6558 }
6559 decl = TREE_OPERAND (decl, 0);
6560 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6561 {
6562 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6563 NULL, is_gimple_val, fb_rvalue)
6564 == GS_ERROR)
6565 {
6566 remove = true;
6567 break;
6568 }
6569 v = TREE_OPERAND (decl, 1);
6570 if (DECL_P (v))
6571 {
6572 omp_firstprivatize_variable (ctx, v);
6573 omp_notice_variable (ctx, v, true);
6574 }
6575 decl = TREE_OPERAND (decl, 0);
6576 }
6577 if (TREE_CODE (decl) == ADDR_EXPR
6578 || TREE_CODE (decl) == INDIRECT_REF)
6579 decl = TREE_OPERAND (decl, 0);
6580 }
6581 goto do_add_decl;
6582 case OMP_CLAUSE_LINEAR:
6583 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6584 is_gimple_val, fb_rvalue) == GS_ERROR)
6585 {
6586 remove = true;
6587 break;
6588 }
6589 else
6590 {
6591 if (code == OMP_SIMD
6592 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6593 {
6594 struct gimplify_omp_ctx *octx = outer_ctx;
6595 if (octx
6596 && octx->region_type == ORT_WORKSHARE
6597 && octx->combined_loop
6598 && !octx->distribute)
6599 {
6600 if (octx->outer_context
6601 && (octx->outer_context->region_type
6602 == ORT_COMBINED_PARALLEL))
6603 octx = octx->outer_context->outer_context;
6604 else
6605 octx = octx->outer_context;
6606 }
6607 if (octx
6608 && octx->region_type == ORT_WORKSHARE
6609 && octx->combined_loop
6610 && octx->distribute
6611 && !lang_GNU_Fortran ())
6612 {
6613 error_at (OMP_CLAUSE_LOCATION (c),
6614 "%<linear%> clause for variable other than "
6615 "loop iterator specified on construct "
6616 "combined with %<distribute%>");
6617 remove = true;
6618 break;
6619 }
6620 }
6621 /* For combined #pragma omp parallel for simd, need to put
6622 lastprivate and perhaps firstprivate too on the
6623 parallel. Similarly for #pragma omp for simd. */
6624 struct gimplify_omp_ctx *octx = outer_ctx;
6625 decl = NULL_TREE;
6626 if (omp_no_lastprivate (ctx))
6627 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6628 do
6629 {
6630 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6631 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6632 break;
6633 decl = OMP_CLAUSE_DECL (c);
6634 if (error_operand_p (decl))
6635 {
6636 decl = NULL_TREE;
6637 break;
6638 }
6639 flags = GOVD_SEEN;
6640 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6641 flags |= GOVD_FIRSTPRIVATE;
6642 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6643 flags |= GOVD_LASTPRIVATE;
6644 if (octx
6645 && octx->region_type == ORT_WORKSHARE
6646 && octx->combined_loop)
6647 {
6648 if (octx->outer_context
6649 && (octx->outer_context->region_type
6650 == ORT_COMBINED_PARALLEL))
6651 octx = octx->outer_context;
6652 else if (omp_check_private (octx, decl, false))
6653 break;
6654 }
6655 else if (octx
6656 && (octx->region_type & ORT_TASK) != 0
6657 && octx->combined_loop)
6658 ;
6659 else if (octx
6660 && octx->region_type == ORT_COMBINED_PARALLEL
6661 && ctx->region_type == ORT_WORKSHARE
6662 && octx == outer_ctx)
6663 flags = GOVD_SEEN | GOVD_SHARED;
6664 else if (octx
6665 && octx->region_type == ORT_COMBINED_TEAMS)
6666 flags = GOVD_SEEN | GOVD_SHARED;
6667 else if (octx
6668 && octx->region_type == ORT_COMBINED_TARGET)
6669 {
6670 flags &= ~GOVD_LASTPRIVATE;
6671 if (flags == GOVD_SEEN)
6672 break;
6673 }
6674 else
6675 break;
6676 splay_tree_node on
6677 = splay_tree_lookup (octx->variables,
6678 (splay_tree_key) decl);
6679 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6680 {
6681 octx = NULL;
6682 break;
6683 }
6684 omp_add_variable (octx, decl, flags);
6685 if (octx->outer_context == NULL)
6686 break;
6687 octx = octx->outer_context;
6688 }
6689 while (1);
6690 if (octx
6691 && decl
6692 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6693 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6694 omp_notice_variable (octx, decl, true);
6695 }
6696 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6697 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6698 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6699 {
6700 notice_outer = false;
6701 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6702 }
6703 goto do_add;
6704
6705 case OMP_CLAUSE_MAP:
6706 decl = OMP_CLAUSE_DECL (c);
6707 if (error_operand_p (decl))
6708 remove = true;
6709 switch (code)
6710 {
6711 case OMP_TARGET:
6712 break;
6713 case OMP_TARGET_DATA:
6714 case OMP_TARGET_ENTER_DATA:
6715 case OMP_TARGET_EXIT_DATA:
6716 case OACC_HOST_DATA:
6717 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6718 || (OMP_CLAUSE_MAP_KIND (c)
6719 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6720 /* For target {,enter ,exit }data only the array slice is
6721 mapped, but not the pointer to it. */
6722 remove = true;
6723 break;
6724 default:
6725 break;
6726 }
6727 if (remove)
6728 break;
6729 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6730 {
6731 struct gimplify_omp_ctx *octx;
6732 for (octx = outer_ctx; octx; octx = octx->outer_context)
6733 {
6734 if (octx->region_type != ORT_ACC_HOST_DATA)
6735 break;
6736 splay_tree_node n2
6737 = splay_tree_lookup (octx->variables,
6738 (splay_tree_key) decl);
6739 if (n2)
6740 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6741 "declared in enclosing %<host_data%> region",
6742 DECL_NAME (decl));
6743 }
6744 }
6745 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6746 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6747 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6748 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6749 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6750 {
6751 remove = true;
6752 break;
6753 }
6754 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6755 || (OMP_CLAUSE_MAP_KIND (c)
6756 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6757 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6758 {
6759 OMP_CLAUSE_SIZE (c)
6760 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6761 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6762 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6763 }
6764 if (!DECL_P (decl))
6765 {
6766 tree d = decl, *pd;
6767 if (TREE_CODE (d) == ARRAY_REF)
6768 {
6769 while (TREE_CODE (d) == ARRAY_REF)
6770 d = TREE_OPERAND (d, 0);
6771 if (TREE_CODE (d) == COMPONENT_REF
6772 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6773 decl = d;
6774 }
6775 pd = &OMP_CLAUSE_DECL (c);
6776 if (d == decl
6777 && TREE_CODE (decl) == INDIRECT_REF
6778 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6779 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6780 == REFERENCE_TYPE))
6781 {
6782 pd = &TREE_OPERAND (decl, 0);
6783 decl = TREE_OPERAND (decl, 0);
6784 }
6785 if (TREE_CODE (decl) == COMPONENT_REF)
6786 {
6787 while (TREE_CODE (decl) == COMPONENT_REF)
6788 decl = TREE_OPERAND (decl, 0);
6789 }
6790 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6791 == GS_ERROR)
6792 {
6793 remove = true;
6794 break;
6795 }
6796 if (DECL_P (decl))
6797 {
6798 if (error_operand_p (decl))
6799 {
6800 remove = true;
6801 break;
6802 }
6803
6804 if (TYPE_SIZE_UNIT (TREE_TYPE (decl)) == NULL
6805 || (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
6806 != INTEGER_CST))
6807 {
6808 error_at (OMP_CLAUSE_LOCATION (c),
6809 "mapping field %qE of variable length "
6810 "structure", OMP_CLAUSE_DECL (c));
6811 remove = true;
6812 break;
6813 }
6814
6815 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6816 {
6817 /* Error recovery. */
6818 if (prev_list_p == NULL)
6819 {
6820 remove = true;
6821 break;
6822 }
6823 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6824 {
6825 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6826 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6827 {
6828 remove = true;
6829 break;
6830 }
6831 }
6832 }
6833
6834 tree offset;
6835 HOST_WIDE_INT bitsize, bitpos;
6836 machine_mode mode;
6837 int unsignedp, reversep, volatilep = 0;
6838 tree base = OMP_CLAUSE_DECL (c);
6839 while (TREE_CODE (base) == ARRAY_REF)
6840 base = TREE_OPERAND (base, 0);
6841 if (TREE_CODE (base) == INDIRECT_REF)
6842 base = TREE_OPERAND (base, 0);
6843 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6844 &mode, &unsignedp, &reversep,
6845 &volatilep, false);
6846 gcc_assert (base == decl
6847 && (offset == NULL_TREE
6848 || TREE_CODE (offset) == INTEGER_CST));
6849
6850 splay_tree_node n
6851 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6852 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
6853 == GOMP_MAP_ALWAYS_POINTER);
6854 if (n == NULL || (n->value & GOVD_MAP) == 0)
6855 {
6856 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6857 OMP_CLAUSE_MAP);
6858 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
6859 OMP_CLAUSE_DECL (l) = decl;
6860 OMP_CLAUSE_SIZE (l) = size_int (1);
6861 if (struct_map_to_clause == NULL)
6862 struct_map_to_clause = new hash_map<tree, tree>;
6863 struct_map_to_clause->put (decl, l);
6864 if (ptr)
6865 {
6866 enum gomp_map_kind mkind
6867 = code == OMP_TARGET_EXIT_DATA
6868 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
6869 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6870 OMP_CLAUSE_MAP);
6871 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6872 OMP_CLAUSE_DECL (c2)
6873 = unshare_expr (OMP_CLAUSE_DECL (c));
6874 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
6875 OMP_CLAUSE_SIZE (c2)
6876 = TYPE_SIZE_UNIT (ptr_type_node);
6877 OMP_CLAUSE_CHAIN (l) = c2;
6878 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6879 {
6880 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6881 tree c3
6882 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6883 OMP_CLAUSE_MAP);
6884 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6885 OMP_CLAUSE_DECL (c3)
6886 = unshare_expr (OMP_CLAUSE_DECL (c4));
6887 OMP_CLAUSE_SIZE (c3)
6888 = TYPE_SIZE_UNIT (ptr_type_node);
6889 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6890 OMP_CLAUSE_CHAIN (c2) = c3;
6891 }
6892 *prev_list_p = l;
6893 prev_list_p = NULL;
6894 }
6895 else
6896 {
6897 OMP_CLAUSE_CHAIN (l) = c;
6898 *list_p = l;
6899 list_p = &OMP_CLAUSE_CHAIN (l);
6900 }
6901 flags = GOVD_MAP | GOVD_EXPLICIT;
6902 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6903 flags |= GOVD_SEEN;
6904 goto do_add_decl;
6905 }
6906 else
6907 {
6908 tree *osc = struct_map_to_clause->get (decl);
6909 tree *sc = NULL, *scp = NULL;
6910 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6911 n->value |= GOVD_SEEN;
6912 offset_int o1, o2;
6913 if (offset)
6914 o1 = wi::to_offset (offset);
6915 else
6916 o1 = 0;
6917 if (bitpos)
6918 o1 = o1 + bitpos / BITS_PER_UNIT;
6919 for (sc = &OMP_CLAUSE_CHAIN (*osc);
6920 *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
6921 if (ptr && sc == prev_list_p)
6922 break;
6923 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6924 != COMPONENT_REF
6925 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6926 != INDIRECT_REF)
6927 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6928 != ARRAY_REF))
6929 break;
6930 else
6931 {
6932 tree offset2;
6933 HOST_WIDE_INT bitsize2, bitpos2;
6934 base = OMP_CLAUSE_DECL (*sc);
6935 if (TREE_CODE (base) == ARRAY_REF)
6936 {
6937 while (TREE_CODE (base) == ARRAY_REF)
6938 base = TREE_OPERAND (base, 0);
6939 if (TREE_CODE (base) != COMPONENT_REF
6940 || (TREE_CODE (TREE_TYPE (base))
6941 != ARRAY_TYPE))
6942 break;
6943 }
6944 else if (TREE_CODE (base) == INDIRECT_REF
6945 && (TREE_CODE (TREE_OPERAND (base, 0))
6946 == COMPONENT_REF)
6947 && (TREE_CODE (TREE_TYPE
6948 (TREE_OPERAND (base, 0)))
6949 == REFERENCE_TYPE))
6950 base = TREE_OPERAND (base, 0);
6951 base = get_inner_reference (base, &bitsize2,
6952 &bitpos2, &offset2,
6953 &mode, &unsignedp,
6954 &reversep, &volatilep,
6955 false);
6956 if (base != decl)
6957 break;
6958 if (scp)
6959 continue;
6960 gcc_assert (offset == NULL_TREE
6961 || TREE_CODE (offset) == INTEGER_CST);
6962 tree d1 = OMP_CLAUSE_DECL (*sc);
6963 tree d2 = OMP_CLAUSE_DECL (c);
6964 while (TREE_CODE (d1) == ARRAY_REF)
6965 d1 = TREE_OPERAND (d1, 0);
6966 while (TREE_CODE (d2) == ARRAY_REF)
6967 d2 = TREE_OPERAND (d2, 0);
6968 if (TREE_CODE (d1) == INDIRECT_REF)
6969 d1 = TREE_OPERAND (d1, 0);
6970 if (TREE_CODE (d2) == INDIRECT_REF)
6971 d2 = TREE_OPERAND (d2, 0);
6972 while (TREE_CODE (d1) == COMPONENT_REF)
6973 if (TREE_CODE (d2) == COMPONENT_REF
6974 && TREE_OPERAND (d1, 1)
6975 == TREE_OPERAND (d2, 1))
6976 {
6977 d1 = TREE_OPERAND (d1, 0);
6978 d2 = TREE_OPERAND (d2, 0);
6979 }
6980 else
6981 break;
6982 if (d1 == d2)
6983 {
6984 error_at (OMP_CLAUSE_LOCATION (c),
6985 "%qE appears more than once in map "
6986 "clauses", OMP_CLAUSE_DECL (c));
6987 remove = true;
6988 break;
6989 }
6990 if (offset2)
6991 o2 = wi::to_offset (offset2);
6992 else
6993 o2 = 0;
6994 if (bitpos2)
6995 o2 = o2 + bitpos2 / BITS_PER_UNIT;
6996 if (wi::ltu_p (o1, o2)
6997 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
6998 {
6999 if (ptr)
7000 scp = sc;
7001 else
7002 break;
7003 }
7004 }
7005 if (remove)
7006 break;
7007 OMP_CLAUSE_SIZE (*osc)
7008 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7009 size_one_node);
7010 if (ptr)
7011 {
7012 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7013 OMP_CLAUSE_MAP);
7014 tree cl = NULL_TREE;
7015 enum gomp_map_kind mkind
7016 = code == OMP_TARGET_EXIT_DATA
7017 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7018 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7019 OMP_CLAUSE_DECL (c2)
7020 = unshare_expr (OMP_CLAUSE_DECL (c));
7021 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7022 OMP_CLAUSE_SIZE (c2)
7023 = TYPE_SIZE_UNIT (ptr_type_node);
7024 cl = scp ? *prev_list_p : c2;
7025 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7026 {
7027 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7028 tree c3
7029 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7030 OMP_CLAUSE_MAP);
7031 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7032 OMP_CLAUSE_DECL (c3)
7033 = unshare_expr (OMP_CLAUSE_DECL (c4));
7034 OMP_CLAUSE_SIZE (c3)
7035 = TYPE_SIZE_UNIT (ptr_type_node);
7036 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7037 if (!scp)
7038 OMP_CLAUSE_CHAIN (c2) = c3;
7039 else
7040 cl = c3;
7041 }
7042 if (scp)
7043 *scp = c2;
7044 if (sc == prev_list_p)
7045 {
7046 *sc = cl;
7047 prev_list_p = NULL;
7048 }
7049 else
7050 {
7051 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7052 list_p = prev_list_p;
7053 prev_list_p = NULL;
7054 OMP_CLAUSE_CHAIN (c) = *sc;
7055 *sc = cl;
7056 continue;
7057 }
7058 }
7059 else if (*sc != c)
7060 {
7061 *list_p = OMP_CLAUSE_CHAIN (c);
7062 OMP_CLAUSE_CHAIN (c) = *sc;
7063 *sc = c;
7064 continue;
7065 }
7066 }
7067 }
7068 if (!remove
7069 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7070 && OMP_CLAUSE_CHAIN (c)
7071 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7072 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7073 == GOMP_MAP_ALWAYS_POINTER))
7074 prev_list_p = list_p;
7075 break;
7076 }
7077 flags = GOVD_MAP | GOVD_EXPLICIT;
7078 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7079 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7080 flags |= GOVD_MAP_ALWAYS_TO;
7081 goto do_add;
7082
7083 case OMP_CLAUSE_DEPEND:
7084 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7085 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7086 {
7087 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7088 omp-low.c. */
7089 break;
7090 }
7091 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7092 {
7093 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7094 NULL, is_gimple_val, fb_rvalue);
7095 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7096 }
7097 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7098 {
7099 remove = true;
7100 break;
7101 }
7102 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7103 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7104 is_gimple_val, fb_rvalue) == GS_ERROR)
7105 {
7106 remove = true;
7107 break;
7108 }
7109 break;
7110
7111 case OMP_CLAUSE_TO:
7112 case OMP_CLAUSE_FROM:
7113 case OMP_CLAUSE__CACHE_:
7114 decl = OMP_CLAUSE_DECL (c);
7115 if (error_operand_p (decl))
7116 {
7117 remove = true;
7118 break;
7119 }
7120 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7121 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7122 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7123 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7124 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7125 {
7126 remove = true;
7127 break;
7128 }
7129 if (!DECL_P (decl))
7130 {
7131 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7132 NULL, is_gimple_lvalue, fb_lvalue)
7133 == GS_ERROR)
7134 {
7135 remove = true;
7136 break;
7137 }
7138 break;
7139 }
7140 goto do_notice;
7141
7142 case OMP_CLAUSE_USE_DEVICE_PTR:
7143 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7144 goto do_add;
7145 case OMP_CLAUSE_IS_DEVICE_PTR:
7146 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7147 goto do_add;
7148
7149 do_add:
7150 decl = OMP_CLAUSE_DECL (c);
7151 do_add_decl:
7152 if (error_operand_p (decl))
7153 {
7154 remove = true;
7155 break;
7156 }
7157 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7158 {
7159 tree t = omp_member_access_dummy_var (decl);
7160 if (t)
7161 {
7162 tree v = DECL_VALUE_EXPR (decl);
7163 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7164 if (outer_ctx)
7165 omp_notice_variable (outer_ctx, t, true);
7166 }
7167 }
7168 omp_add_variable (ctx, decl, flags);
7169 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7170 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7171 {
7172 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7173 GOVD_LOCAL | GOVD_SEEN);
7174 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7175 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7176 find_decl_expr,
7177 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7178 NULL) == NULL_TREE)
7179 omp_add_variable (ctx,
7180 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7181 GOVD_LOCAL | GOVD_SEEN);
7182 gimplify_omp_ctxp = ctx;
7183 push_gimplify_context ();
7184
7185 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7186 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7187
7188 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7189 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7190 pop_gimplify_context
7191 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7192 push_gimplify_context ();
7193 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7194 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7195 pop_gimplify_context
7196 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7197 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7198 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7199
7200 gimplify_omp_ctxp = outer_ctx;
7201 }
7202 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7203 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7204 {
7205 gimplify_omp_ctxp = ctx;
7206 push_gimplify_context ();
7207 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7208 {
7209 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7210 NULL, NULL);
7211 TREE_SIDE_EFFECTS (bind) = 1;
7212 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7213 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7214 }
7215 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7216 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7217 pop_gimplify_context
7218 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7219 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7220
7221 gimplify_omp_ctxp = outer_ctx;
7222 }
7223 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7224 && OMP_CLAUSE_LINEAR_STMT (c))
7225 {
7226 gimplify_omp_ctxp = ctx;
7227 push_gimplify_context ();
7228 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7229 {
7230 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7231 NULL, NULL);
7232 TREE_SIDE_EFFECTS (bind) = 1;
7233 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7234 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7235 }
7236 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7237 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7238 pop_gimplify_context
7239 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7240 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7241
7242 gimplify_omp_ctxp = outer_ctx;
7243 }
7244 if (notice_outer)
7245 goto do_notice;
7246 break;
7247
7248 case OMP_CLAUSE_COPYIN:
7249 case OMP_CLAUSE_COPYPRIVATE:
7250 decl = OMP_CLAUSE_DECL (c);
7251 if (error_operand_p (decl))
7252 {
7253 remove = true;
7254 break;
7255 }
7256 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7257 && !remove
7258 && !omp_check_private (ctx, decl, true))
7259 {
7260 remove = true;
7261 if (is_global_var (decl))
7262 {
7263 if (DECL_THREAD_LOCAL_P (decl))
7264 remove = false;
7265 else if (DECL_HAS_VALUE_EXPR_P (decl))
7266 {
7267 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7268
7269 if (value
7270 && DECL_P (value)
7271 && DECL_THREAD_LOCAL_P (value))
7272 remove = false;
7273 }
7274 }
7275 if (remove)
7276 error_at (OMP_CLAUSE_LOCATION (c),
7277 "copyprivate variable %qE is not threadprivate"
7278 " or private in outer context", DECL_NAME (decl));
7279 }
7280 do_notice:
7281 if (outer_ctx)
7282 omp_notice_variable (outer_ctx, decl, true);
7283 if (check_non_private
7284 && region_type == ORT_WORKSHARE
7285 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7286 || decl == OMP_CLAUSE_DECL (c)
7287 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7288 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7289 == ADDR_EXPR
7290 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7291 == POINTER_PLUS_EXPR
7292 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7293 (OMP_CLAUSE_DECL (c), 0), 0))
7294 == ADDR_EXPR)))))
7295 && omp_check_private (ctx, decl, false))
7296 {
7297 error ("%s variable %qE is private in outer context",
7298 check_non_private, DECL_NAME (decl));
7299 remove = true;
7300 }
7301 break;
7302
7303 case OMP_CLAUSE_IF:
7304 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7305 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7306 {
7307 const char *p[2];
7308 for (int i = 0; i < 2; i++)
7309 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7310 {
7311 case OMP_PARALLEL: p[i] = "parallel"; break;
7312 case OMP_TASK: p[i] = "task"; break;
7313 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7314 case OMP_TARGET_DATA: p[i] = "target data"; break;
7315 case OMP_TARGET: p[i] = "target"; break;
7316 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7317 case OMP_TARGET_ENTER_DATA:
7318 p[i] = "target enter data"; break;
7319 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7320 default: gcc_unreachable ();
7321 }
7322 error_at (OMP_CLAUSE_LOCATION (c),
7323 "expected %qs %<if%> clause modifier rather than %qs",
7324 p[0], p[1]);
7325 remove = true;
7326 }
7327 /* Fall through. */
7328
7329 case OMP_CLAUSE_FINAL:
7330 OMP_CLAUSE_OPERAND (c, 0)
7331 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7332 /* Fall through. */
7333
7334 case OMP_CLAUSE_SCHEDULE:
7335 case OMP_CLAUSE_NUM_THREADS:
7336 case OMP_CLAUSE_NUM_TEAMS:
7337 case OMP_CLAUSE_THREAD_LIMIT:
7338 case OMP_CLAUSE_DIST_SCHEDULE:
7339 case OMP_CLAUSE_DEVICE:
7340 case OMP_CLAUSE_PRIORITY:
7341 case OMP_CLAUSE_GRAINSIZE:
7342 case OMP_CLAUSE_NUM_TASKS:
7343 case OMP_CLAUSE_HINT:
7344 case OMP_CLAUSE__CILK_FOR_COUNT_:
7345 case OMP_CLAUSE_ASYNC:
7346 case OMP_CLAUSE_WAIT:
7347 case OMP_CLAUSE_NUM_GANGS:
7348 case OMP_CLAUSE_NUM_WORKERS:
7349 case OMP_CLAUSE_VECTOR_LENGTH:
7350 case OMP_CLAUSE_WORKER:
7351 case OMP_CLAUSE_VECTOR:
7352 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7353 is_gimple_val, fb_rvalue) == GS_ERROR)
7354 remove = true;
7355 break;
7356
7357 case OMP_CLAUSE_GANG:
7358 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7359 is_gimple_val, fb_rvalue) == GS_ERROR)
7360 remove = true;
7361 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7362 is_gimple_val, fb_rvalue) == GS_ERROR)
7363 remove = true;
7364 break;
7365
7366 case OMP_CLAUSE_TILE:
7367 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7368 list = TREE_CHAIN (list))
7369 {
7370 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7371 is_gimple_val, fb_rvalue) == GS_ERROR)
7372 remove = true;
7373 }
7374 break;
7375
7376 case OMP_CLAUSE_DEVICE_RESIDENT:
7377 remove = true;
7378 break;
7379
7380 case OMP_CLAUSE_NOWAIT:
7381 case OMP_CLAUSE_ORDERED:
7382 case OMP_CLAUSE_UNTIED:
7383 case OMP_CLAUSE_COLLAPSE:
7384 case OMP_CLAUSE_AUTO:
7385 case OMP_CLAUSE_SEQ:
7386 case OMP_CLAUSE_INDEPENDENT:
7387 case OMP_CLAUSE_MERGEABLE:
7388 case OMP_CLAUSE_PROC_BIND:
7389 case OMP_CLAUSE_SAFELEN:
7390 case OMP_CLAUSE_SIMDLEN:
7391 case OMP_CLAUSE_NOGROUP:
7392 case OMP_CLAUSE_THREADS:
7393 case OMP_CLAUSE_SIMD:
7394 break;
7395
7396 case OMP_CLAUSE_DEFAULTMAP:
7397 ctx->target_map_scalars_firstprivate = false;
7398 break;
7399
7400 case OMP_CLAUSE_ALIGNED:
7401 decl = OMP_CLAUSE_DECL (c);
7402 if (error_operand_p (decl))
7403 {
7404 remove = true;
7405 break;
7406 }
7407 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7408 is_gimple_val, fb_rvalue) == GS_ERROR)
7409 {
7410 remove = true;
7411 break;
7412 }
7413 if (!is_global_var (decl)
7414 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7415 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7416 break;
7417
7418 case OMP_CLAUSE_DEFAULT:
7419 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7420 break;
7421
7422 default:
7423 gcc_unreachable ();
7424 }
7425
7426 if (remove)
7427 *list_p = OMP_CLAUSE_CHAIN (c);
7428 else
7429 list_p = &OMP_CLAUSE_CHAIN (c);
7430 }
7431
7432 gimplify_omp_ctxp = ctx;
7433 if (struct_map_to_clause)
7434 delete struct_map_to_clause;
7435 }
7436
7437 /* Return true if DECL is a candidate for shared to firstprivate
7438 optimization. We only consider non-addressable scalars, not
7439 too big, and not references. */
7440
7441 static bool
7442 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7443 {
7444 if (TREE_ADDRESSABLE (decl))
7445 return false;
7446 tree type = TREE_TYPE (decl);
7447 if (!is_gimple_reg_type (type)
7448 || TREE_CODE (type) == REFERENCE_TYPE
7449 || TREE_ADDRESSABLE (type))
7450 return false;
7451 /* Don't optimize too large decls, as each thread/task will have
7452 its own. */
7453 HOST_WIDE_INT len = int_size_in_bytes (type);
7454 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7455 return false;
7456 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7457 return false;
7458 return true;
7459 }
7460
7461 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7462 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7463 GOVD_WRITTEN in outer contexts. */
7464
7465 static void
7466 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7467 {
7468 for (; ctx; ctx = ctx->outer_context)
7469 {
7470 splay_tree_node n = splay_tree_lookup (ctx->variables,
7471 (splay_tree_key) decl);
7472 if (n == NULL)
7473 continue;
7474 else if (n->value & GOVD_SHARED)
7475 {
7476 n->value |= GOVD_WRITTEN;
7477 return;
7478 }
7479 else if (n->value & GOVD_DATA_SHARE_CLASS)
7480 return;
7481 }
7482 }
7483
7484 /* Helper callback for walk_gimple_seq to discover possible stores
7485 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7486 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7487 for those. */
7488
7489 static tree
7490 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7491 {
7492 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7493
7494 *walk_subtrees = 0;
7495 if (!wi->is_lhs)
7496 return NULL_TREE;
7497
7498 tree op = *tp;
7499 do
7500 {
7501 if (handled_component_p (op))
7502 op = TREE_OPERAND (op, 0);
7503 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7504 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7505 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7506 else
7507 break;
7508 }
7509 while (1);
7510 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7511 return NULL_TREE;
7512
7513 omp_mark_stores (gimplify_omp_ctxp, op);
7514 return NULL_TREE;
7515 }
7516
7517 /* Helper callback for walk_gimple_seq to discover possible stores
7518 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7519 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7520 for those. */
7521
7522 static tree
7523 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7524 bool *handled_ops_p,
7525 struct walk_stmt_info *wi)
7526 {
7527 gimple *stmt = gsi_stmt (*gsi_p);
7528 switch (gimple_code (stmt))
7529 {
7530 /* Don't recurse on OpenMP constructs for which
7531 gimplify_adjust_omp_clauses already handled the bodies,
7532 except handle gimple_omp_for_pre_body. */
7533 case GIMPLE_OMP_FOR:
7534 *handled_ops_p = true;
7535 if (gimple_omp_for_pre_body (stmt))
7536 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7537 omp_find_stores_stmt, omp_find_stores_op, wi);
7538 break;
7539 case GIMPLE_OMP_PARALLEL:
7540 case GIMPLE_OMP_TASK:
7541 case GIMPLE_OMP_SECTIONS:
7542 case GIMPLE_OMP_SINGLE:
7543 case GIMPLE_OMP_TARGET:
7544 case GIMPLE_OMP_TEAMS:
7545 case GIMPLE_OMP_CRITICAL:
7546 *handled_ops_p = true;
7547 break;
7548 default:
7549 break;
7550 }
7551 return NULL_TREE;
7552 }
7553
7554 struct gimplify_adjust_omp_clauses_data
7555 {
7556 tree *list_p;
7557 gimple_seq *pre_p;
7558 };
7559
7560 /* For all variables that were not actually used within the context,
7561 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7562
7563 static int
7564 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7565 {
7566 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7567 gimple_seq *pre_p
7568 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7569 tree decl = (tree) n->key;
7570 unsigned flags = n->value;
7571 enum omp_clause_code code;
7572 tree clause;
7573 bool private_debug;
7574
7575 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7576 return 0;
7577 if ((flags & GOVD_SEEN) == 0)
7578 return 0;
7579 if (flags & GOVD_DEBUG_PRIVATE)
7580 {
7581 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7582 private_debug = true;
7583 }
7584 else if (flags & GOVD_MAP)
7585 private_debug = false;
7586 else
7587 private_debug
7588 = lang_hooks.decls.omp_private_debug_clause (decl,
7589 !!(flags & GOVD_SHARED));
7590 if (private_debug)
7591 code = OMP_CLAUSE_PRIVATE;
7592 else if (flags & GOVD_MAP)
7593 code = OMP_CLAUSE_MAP;
7594 else if (flags & GOVD_SHARED)
7595 {
7596 if (is_global_var (decl))
7597 {
7598 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7599 while (ctx != NULL)
7600 {
7601 splay_tree_node on
7602 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7603 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7604 | GOVD_PRIVATE | GOVD_REDUCTION
7605 | GOVD_LINEAR | GOVD_MAP)) != 0)
7606 break;
7607 ctx = ctx->outer_context;
7608 }
7609 if (ctx == NULL)
7610 return 0;
7611 }
7612 code = OMP_CLAUSE_SHARED;
7613 }
7614 else if (flags & GOVD_PRIVATE)
7615 code = OMP_CLAUSE_PRIVATE;
7616 else if (flags & GOVD_FIRSTPRIVATE)
7617 code = OMP_CLAUSE_FIRSTPRIVATE;
7618 else if (flags & GOVD_LASTPRIVATE)
7619 code = OMP_CLAUSE_LASTPRIVATE;
7620 else if (flags & GOVD_ALIGNED)
7621 return 0;
7622 else
7623 gcc_unreachable ();
7624
7625 if (((flags & GOVD_LASTPRIVATE)
7626 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7627 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7628 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7629
7630 clause = build_omp_clause (input_location, code);
7631 OMP_CLAUSE_DECL (clause) = decl;
7632 OMP_CLAUSE_CHAIN (clause) = *list_p;
7633 if (private_debug)
7634 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7635 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7636 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7637 else if (code == OMP_CLAUSE_SHARED
7638 && (flags & GOVD_WRITTEN) == 0
7639 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7640 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
7641 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7642 {
7643 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7644 OMP_CLAUSE_DECL (nc) = decl;
7645 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7646 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7647 OMP_CLAUSE_DECL (clause)
7648 = build_simple_mem_ref_loc (input_location, decl);
7649 OMP_CLAUSE_DECL (clause)
7650 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7651 build_int_cst (build_pointer_type (char_type_node), 0));
7652 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7653 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7654 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7655 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7656 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7657 OMP_CLAUSE_CHAIN (nc) = *list_p;
7658 OMP_CLAUSE_CHAIN (clause) = nc;
7659 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7660 gimplify_omp_ctxp = ctx->outer_context;
7661 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7662 pre_p, NULL, is_gimple_val, fb_rvalue);
7663 gimplify_omp_ctxp = ctx;
7664 }
7665 else if (code == OMP_CLAUSE_MAP)
7666 {
7667 int kind = (flags & GOVD_MAP_TO_ONLY
7668 ? GOMP_MAP_TO
7669 : GOMP_MAP_TOFROM);
7670 if (flags & GOVD_MAP_FORCE)
7671 kind |= GOMP_MAP_FLAG_FORCE;
7672 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
7673 if (DECL_SIZE (decl)
7674 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7675 {
7676 tree decl2 = DECL_VALUE_EXPR (decl);
7677 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7678 decl2 = TREE_OPERAND (decl2, 0);
7679 gcc_assert (DECL_P (decl2));
7680 tree mem = build_simple_mem_ref (decl2);
7681 OMP_CLAUSE_DECL (clause) = mem;
7682 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7683 if (gimplify_omp_ctxp->outer_context)
7684 {
7685 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7686 omp_notice_variable (ctx, decl2, true);
7687 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7688 }
7689 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7690 OMP_CLAUSE_MAP);
7691 OMP_CLAUSE_DECL (nc) = decl;
7692 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7693 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7694 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7695 else
7696 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7697 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7698 OMP_CLAUSE_CHAIN (clause) = nc;
7699 }
7700 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7701 && lang_hooks.decls.omp_privatize_by_reference (decl))
7702 {
7703 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7704 OMP_CLAUSE_SIZE (clause)
7705 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7706 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7707 gimplify_omp_ctxp = ctx->outer_context;
7708 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7709 pre_p, NULL, is_gimple_val, fb_rvalue);
7710 gimplify_omp_ctxp = ctx;
7711 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7712 OMP_CLAUSE_MAP);
7713 OMP_CLAUSE_DECL (nc) = decl;
7714 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7715 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7716 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7717 OMP_CLAUSE_CHAIN (clause) = nc;
7718 }
7719 else
7720 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7721 }
7722 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7723 {
7724 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7725 OMP_CLAUSE_DECL (nc) = decl;
7726 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7727 OMP_CLAUSE_CHAIN (nc) = *list_p;
7728 OMP_CLAUSE_CHAIN (clause) = nc;
7729 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7730 gimplify_omp_ctxp = ctx->outer_context;
7731 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7732 gimplify_omp_ctxp = ctx;
7733 }
7734 *list_p = clause;
7735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7736 gimplify_omp_ctxp = ctx->outer_context;
7737 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7738 gimplify_omp_ctxp = ctx;
7739 return 0;
7740 }
7741
7742 static void
7743 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
7744 enum tree_code code)
7745 {
7746 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7747 tree c, decl;
7748
7749 if (body)
7750 {
7751 struct gimplify_omp_ctx *octx;
7752 for (octx = ctx; octx; octx = octx->outer_context)
7753 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
7754 break;
7755 if (octx)
7756 {
7757 struct walk_stmt_info wi;
7758 memset (&wi, 0, sizeof (wi));
7759 walk_gimple_seq (body, omp_find_stores_stmt,
7760 omp_find_stores_op, &wi);
7761 }
7762 }
7763 while ((c = *list_p) != NULL)
7764 {
7765 splay_tree_node n;
7766 bool remove = false;
7767
7768 switch (OMP_CLAUSE_CODE (c))
7769 {
7770 case OMP_CLAUSE_PRIVATE:
7771 case OMP_CLAUSE_SHARED:
7772 case OMP_CLAUSE_FIRSTPRIVATE:
7773 case OMP_CLAUSE_LINEAR:
7774 decl = OMP_CLAUSE_DECL (c);
7775 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7776 remove = !(n->value & GOVD_SEEN);
7777 if (! remove)
7778 {
7779 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
7780 if ((n->value & GOVD_DEBUG_PRIVATE)
7781 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7782 {
7783 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7784 || ((n->value & GOVD_DATA_SHARE_CLASS)
7785 == GOVD_PRIVATE));
7786 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
7787 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7788 }
7789 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7790 && (n->value & GOVD_WRITTEN) == 0
7791 && DECL_P (decl)
7792 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7793 OMP_CLAUSE_SHARED_READONLY (c) = 1;
7794 else if (DECL_P (decl)
7795 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7796 && (n->value & GOVD_WRITTEN) != 1)
7797 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7798 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7799 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7800 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7801 }
7802 break;
7803
7804 case OMP_CLAUSE_LASTPRIVATE:
7805 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
7806 accurately reflect the presence of a FIRSTPRIVATE clause. */
7807 decl = OMP_CLAUSE_DECL (c);
7808 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7809 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7810 = (n->value & GOVD_FIRSTPRIVATE) != 0;
7811 if (omp_no_lastprivate (ctx))
7812 {
7813 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7814 remove = true;
7815 else
7816 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
7817 }
7818 else if (code == OMP_DISTRIBUTE
7819 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7820 {
7821 remove = true;
7822 error_at (OMP_CLAUSE_LOCATION (c),
7823 "same variable used in %<firstprivate%> and "
7824 "%<lastprivate%> clauses on %<distribute%> "
7825 "construct");
7826 }
7827 if (!remove
7828 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7829 && DECL_P (decl)
7830 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7831 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7832 break;
7833
7834 case OMP_CLAUSE_ALIGNED:
7835 decl = OMP_CLAUSE_DECL (c);
7836 if (!is_global_var (decl))
7837 {
7838 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7839 remove = n == NULL || !(n->value & GOVD_SEEN);
7840 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7841 {
7842 struct gimplify_omp_ctx *octx;
7843 if (n != NULL
7844 && (n->value & (GOVD_DATA_SHARE_CLASS
7845 & ~GOVD_FIRSTPRIVATE)))
7846 remove = true;
7847 else
7848 for (octx = ctx->outer_context; octx;
7849 octx = octx->outer_context)
7850 {
7851 n = splay_tree_lookup (octx->variables,
7852 (splay_tree_key) decl);
7853 if (n == NULL)
7854 continue;
7855 if (n->value & GOVD_LOCAL)
7856 break;
7857 /* We have to avoid assigning a shared variable
7858 to itself when trying to add
7859 __builtin_assume_aligned. */
7860 if (n->value & GOVD_SHARED)
7861 {
7862 remove = true;
7863 break;
7864 }
7865 }
7866 }
7867 }
7868 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
7869 {
7870 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7871 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7872 remove = true;
7873 }
7874 break;
7875
7876 case OMP_CLAUSE_MAP:
7877 if (code == OMP_TARGET_EXIT_DATA
7878 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7879 {
7880 remove = true;
7881 break;
7882 }
7883 decl = OMP_CLAUSE_DECL (c);
7884 if (!DECL_P (decl))
7885 {
7886 if ((ctx->region_type & ORT_TARGET) != 0
7887 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7888 {
7889 if (TREE_CODE (decl) == INDIRECT_REF
7890 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7891 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7892 == REFERENCE_TYPE))
7893 decl = TREE_OPERAND (decl, 0);
7894 if (TREE_CODE (decl) == COMPONENT_REF)
7895 {
7896 while (TREE_CODE (decl) == COMPONENT_REF)
7897 decl = TREE_OPERAND (decl, 0);
7898 if (DECL_P (decl))
7899 {
7900 n = splay_tree_lookup (ctx->variables,
7901 (splay_tree_key) decl);
7902 if (!(n->value & GOVD_SEEN))
7903 remove = true;
7904 }
7905 }
7906 }
7907 break;
7908 }
7909 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7910 if ((ctx->region_type & ORT_TARGET) != 0
7911 && !(n->value & GOVD_SEEN)
7912 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
7913 && !lookup_attribute ("omp declare target link",
7914 DECL_ATTRIBUTES (decl)))
7915 {
7916 remove = true;
7917 /* For struct element mapping, if struct is never referenced
7918 in target block and none of the mapping has always modifier,
7919 remove all the struct element mappings, which immediately
7920 follow the GOMP_MAP_STRUCT map clause. */
7921 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
7922 {
7923 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
7924 while (cnt--)
7925 OMP_CLAUSE_CHAIN (c)
7926 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
7927 }
7928 }
7929 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
7930 && code == OMP_TARGET_EXIT_DATA)
7931 remove = true;
7932 else if (DECL_SIZE (decl)
7933 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
7934 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
7935 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
7936 && (OMP_CLAUSE_MAP_KIND (c)
7937 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7938 {
7939 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
7940 for these, TREE_CODE (DECL_SIZE (decl)) will always be
7941 INTEGER_CST. */
7942 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
7943
7944 tree decl2 = DECL_VALUE_EXPR (decl);
7945 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7946 decl2 = TREE_OPERAND (decl2, 0);
7947 gcc_assert (DECL_P (decl2));
7948 tree mem = build_simple_mem_ref (decl2);
7949 OMP_CLAUSE_DECL (c) = mem;
7950 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7951 if (ctx->outer_context)
7952 {
7953 omp_notice_variable (ctx->outer_context, decl2, true);
7954 omp_notice_variable (ctx->outer_context,
7955 OMP_CLAUSE_SIZE (c), true);
7956 }
7957 if (((ctx->region_type & ORT_TARGET) != 0
7958 || !ctx->target_firstprivatize_array_bases)
7959 && ((n->value & GOVD_SEEN) == 0
7960 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
7961 {
7962 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7963 OMP_CLAUSE_MAP);
7964 OMP_CLAUSE_DECL (nc) = decl;
7965 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7966 if (ctx->target_firstprivatize_array_bases)
7967 OMP_CLAUSE_SET_MAP_KIND (nc,
7968 GOMP_MAP_FIRSTPRIVATE_POINTER);
7969 else
7970 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7971 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
7972 OMP_CLAUSE_CHAIN (c) = nc;
7973 c = nc;
7974 }
7975 }
7976 else
7977 {
7978 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7979 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
7980 gcc_assert ((n->value & GOVD_SEEN) == 0
7981 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
7982 == 0));
7983 }
7984 break;
7985
7986 case OMP_CLAUSE_TO:
7987 case OMP_CLAUSE_FROM:
7988 case OMP_CLAUSE__CACHE_:
7989 decl = OMP_CLAUSE_DECL (c);
7990 if (!DECL_P (decl))
7991 break;
7992 if (DECL_SIZE (decl)
7993 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7994 {
7995 tree decl2 = DECL_VALUE_EXPR (decl);
7996 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7997 decl2 = TREE_OPERAND (decl2, 0);
7998 gcc_assert (DECL_P (decl2));
7999 tree mem = build_simple_mem_ref (decl2);
8000 OMP_CLAUSE_DECL (c) = mem;
8001 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8002 if (ctx->outer_context)
8003 {
8004 omp_notice_variable (ctx->outer_context, decl2, true);
8005 omp_notice_variable (ctx->outer_context,
8006 OMP_CLAUSE_SIZE (c), true);
8007 }
8008 }
8009 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8010 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8011 break;
8012
8013 case OMP_CLAUSE_REDUCTION:
8014 decl = OMP_CLAUSE_DECL (c);
8015 if (DECL_P (decl)
8016 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8017 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8018 break;
8019 case OMP_CLAUSE_COPYIN:
8020 case OMP_CLAUSE_COPYPRIVATE:
8021 case OMP_CLAUSE_IF:
8022 case OMP_CLAUSE_NUM_THREADS:
8023 case OMP_CLAUSE_NUM_TEAMS:
8024 case OMP_CLAUSE_THREAD_LIMIT:
8025 case OMP_CLAUSE_DIST_SCHEDULE:
8026 case OMP_CLAUSE_DEVICE:
8027 case OMP_CLAUSE_SCHEDULE:
8028 case OMP_CLAUSE_NOWAIT:
8029 case OMP_CLAUSE_ORDERED:
8030 case OMP_CLAUSE_DEFAULT:
8031 case OMP_CLAUSE_UNTIED:
8032 case OMP_CLAUSE_COLLAPSE:
8033 case OMP_CLAUSE_FINAL:
8034 case OMP_CLAUSE_MERGEABLE:
8035 case OMP_CLAUSE_PROC_BIND:
8036 case OMP_CLAUSE_SAFELEN:
8037 case OMP_CLAUSE_SIMDLEN:
8038 case OMP_CLAUSE_DEPEND:
8039 case OMP_CLAUSE_PRIORITY:
8040 case OMP_CLAUSE_GRAINSIZE:
8041 case OMP_CLAUSE_NUM_TASKS:
8042 case OMP_CLAUSE_NOGROUP:
8043 case OMP_CLAUSE_THREADS:
8044 case OMP_CLAUSE_SIMD:
8045 case OMP_CLAUSE_HINT:
8046 case OMP_CLAUSE_DEFAULTMAP:
8047 case OMP_CLAUSE_USE_DEVICE_PTR:
8048 case OMP_CLAUSE_IS_DEVICE_PTR:
8049 case OMP_CLAUSE__CILK_FOR_COUNT_:
8050 case OMP_CLAUSE_ASYNC:
8051 case OMP_CLAUSE_WAIT:
8052 case OMP_CLAUSE_DEVICE_RESIDENT:
8053 case OMP_CLAUSE_INDEPENDENT:
8054 case OMP_CLAUSE_NUM_GANGS:
8055 case OMP_CLAUSE_NUM_WORKERS:
8056 case OMP_CLAUSE_VECTOR_LENGTH:
8057 case OMP_CLAUSE_GANG:
8058 case OMP_CLAUSE_WORKER:
8059 case OMP_CLAUSE_VECTOR:
8060 case OMP_CLAUSE_AUTO:
8061 case OMP_CLAUSE_SEQ:
8062 case OMP_CLAUSE_TILE:
8063 break;
8064
8065 default:
8066 gcc_unreachable ();
8067 }
8068
8069 if (remove)
8070 *list_p = OMP_CLAUSE_CHAIN (c);
8071 else
8072 list_p = &OMP_CLAUSE_CHAIN (c);
8073 }
8074
8075 /* Add in any implicit data sharing. */
8076 struct gimplify_adjust_omp_clauses_data data;
8077 data.list_p = list_p;
8078 data.pre_p = pre_p;
8079 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
8080
8081 gimplify_omp_ctxp = ctx->outer_context;
8082 delete_omp_context (ctx);
8083 }
8084
8085 /* Gimplify OACC_CACHE. */
8086
8087 static void
8088 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8089 {
8090 tree expr = *expr_p;
8091
8092 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
8093 OACC_CACHE);
8094 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8095 OACC_CACHE);
8096
8097 /* TODO: Do something sensible with this information. */
8098
8099 *expr_p = NULL_TREE;
8100 }
8101
8102 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8103 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8104 kind. The entry kind will replace the one in CLAUSE, while the exit
8105 kind will be used in a new omp_clause and returned to the caller. */
8106
8107 static tree
8108 gimplify_oacc_declare_1 (tree clause)
8109 {
8110 HOST_WIDE_INT kind, new_op;
8111 bool ret = false;
8112 tree c = NULL;
8113
8114 kind = OMP_CLAUSE_MAP_KIND (clause);
8115
8116 switch (kind)
8117 {
8118 case GOMP_MAP_ALLOC:
8119 case GOMP_MAP_FORCE_ALLOC:
8120 case GOMP_MAP_FORCE_TO:
8121 new_op = GOMP_MAP_FORCE_DEALLOC;
8122 ret = true;
8123 break;
8124
8125 case GOMP_MAP_FORCE_FROM:
8126 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8127 new_op = GOMP_MAP_FORCE_FROM;
8128 ret = true;
8129 break;
8130
8131 case GOMP_MAP_FORCE_TOFROM:
8132 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8133 new_op = GOMP_MAP_FORCE_FROM;
8134 ret = true;
8135 break;
8136
8137 case GOMP_MAP_FROM:
8138 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8139 new_op = GOMP_MAP_FROM;
8140 ret = true;
8141 break;
8142
8143 case GOMP_MAP_TOFROM:
8144 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8145 new_op = GOMP_MAP_FROM;
8146 ret = true;
8147 break;
8148
8149 case GOMP_MAP_DEVICE_RESIDENT:
8150 case GOMP_MAP_FORCE_DEVICEPTR:
8151 case GOMP_MAP_FORCE_PRESENT:
8152 case GOMP_MAP_LINK:
8153 case GOMP_MAP_POINTER:
8154 case GOMP_MAP_TO:
8155 break;
8156
8157 default:
8158 gcc_unreachable ();
8159 break;
8160 }
8161
8162 if (ret)
8163 {
8164 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8165 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8166 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8167 }
8168
8169 return c;
8170 }
8171
8172 /* Gimplify OACC_DECLARE. */
8173
8174 static void
8175 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8176 {
8177 tree expr = *expr_p;
8178 gomp_target *stmt;
8179 tree clauses, t;
8180
8181 clauses = OACC_DECLARE_CLAUSES (expr);
8182
8183 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8184
8185 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8186 {
8187 tree decl = OMP_CLAUSE_DECL (t);
8188
8189 if (TREE_CODE (decl) == MEM_REF)
8190 continue;
8191
8192 if (TREE_CODE (decl) == VAR_DECL
8193 && !is_global_var (decl)
8194 && DECL_CONTEXT (decl) == current_function_decl)
8195 {
8196 tree c = gimplify_oacc_declare_1 (t);
8197 if (c)
8198 {
8199 if (oacc_declare_returns == NULL)
8200 oacc_declare_returns = new hash_map<tree, tree>;
8201
8202 oacc_declare_returns->put (decl, c);
8203 }
8204 }
8205
8206 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8207 }
8208
8209 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8210 clauses);
8211
8212 gimplify_seq_add_stmt (pre_p, stmt);
8213
8214 *expr_p = NULL_TREE;
8215 }
8216
8217 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
8218 gimplification of the body, as well as scanning the body for used
8219 variables. We need to do this scan now, because variable-sized
8220 decls will be decomposed during gimplification. */
8221
8222 static void
8223 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8224 {
8225 tree expr = *expr_p;
8226 gimple *g;
8227 gimple_seq body = NULL;
8228
8229 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8230 OMP_PARALLEL_COMBINED (expr)
8231 ? ORT_COMBINED_PARALLEL
8232 : ORT_PARALLEL, OMP_PARALLEL);
8233
8234 push_gimplify_context ();
8235
8236 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8237 if (gimple_code (g) == GIMPLE_BIND)
8238 pop_gimplify_context (g);
8239 else
8240 pop_gimplify_context (NULL);
8241
8242 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
8243 OMP_PARALLEL);
8244
8245 g = gimple_build_omp_parallel (body,
8246 OMP_PARALLEL_CLAUSES (expr),
8247 NULL_TREE, NULL_TREE);
8248 if (OMP_PARALLEL_COMBINED (expr))
8249 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8250 gimplify_seq_add_stmt (pre_p, g);
8251 *expr_p = NULL_TREE;
8252 }
8253
8254 /* Gimplify the contents of an OMP_TASK statement. This involves
8255 gimplification of the body, as well as scanning the body for used
8256 variables. We need to do this scan now, because variable-sized
8257 decls will be decomposed during gimplification. */
8258
8259 static void
8260 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8261 {
8262 tree expr = *expr_p;
8263 gimple *g;
8264 gimple_seq body = NULL;
8265
8266 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8267 find_omp_clause (OMP_TASK_CLAUSES (expr),
8268 OMP_CLAUSE_UNTIED)
8269 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8270
8271 push_gimplify_context ();
8272
8273 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8274 if (gimple_code (g) == GIMPLE_BIND)
8275 pop_gimplify_context (g);
8276 else
8277 pop_gimplify_context (NULL);
8278
8279 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8280 OMP_TASK);
8281
8282 g = gimple_build_omp_task (body,
8283 OMP_TASK_CLAUSES (expr),
8284 NULL_TREE, NULL_TREE,
8285 NULL_TREE, NULL_TREE, NULL_TREE);
8286 gimplify_seq_add_stmt (pre_p, g);
8287 *expr_p = NULL_TREE;
8288 }
8289
8290 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8291 with non-NULL OMP_FOR_INIT. */
8292
8293 static tree
8294 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8295 {
8296 *walk_subtrees = 0;
8297 switch (TREE_CODE (*tp))
8298 {
8299 case OMP_FOR:
8300 *walk_subtrees = 1;
8301 /* FALLTHRU */
8302 case OMP_SIMD:
8303 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8304 return *tp;
8305 break;
8306 case BIND_EXPR:
8307 case STATEMENT_LIST:
8308 case OMP_PARALLEL:
8309 *walk_subtrees = 1;
8310 break;
8311 default:
8312 break;
8313 }
8314 return NULL_TREE;
8315 }
8316
8317 /* Gimplify the gross structure of an OMP_FOR statement. */
8318
8319 static enum gimplify_status
8320 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8321 {
8322 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8323 enum gimplify_status ret = GS_ALL_DONE;
8324 enum gimplify_status tret;
8325 gomp_for *gfor;
8326 gimple_seq for_body, for_pre_body;
8327 int i;
8328 bitmap has_decl_expr = NULL;
8329 enum omp_region_type ort = ORT_WORKSHARE;
8330
8331 orig_for_stmt = for_stmt = *expr_p;
8332
8333 switch (TREE_CODE (for_stmt))
8334 {
8335 case OMP_FOR:
8336 case CILK_FOR:
8337 case OMP_DISTRIBUTE:
8338 break;
8339 case OACC_LOOP:
8340 ort = ORT_ACC;
8341 break;
8342 case OMP_TASKLOOP:
8343 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8344 ort = ORT_UNTIED_TASK;
8345 else
8346 ort = ORT_TASK;
8347 break;
8348 case OMP_SIMD:
8349 case CILK_SIMD:
8350 ort = ORT_SIMD;
8351 break;
8352 default:
8353 gcc_unreachable ();
8354 }
8355
8356 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8357 clause for the IV. */
8358 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8359 {
8360 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8361 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8362 decl = TREE_OPERAND (t, 0);
8363 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8364 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8365 && OMP_CLAUSE_DECL (c) == decl)
8366 {
8367 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8368 break;
8369 }
8370 }
8371
8372 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8373 {
8374 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8375 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8376 find_combined_omp_for, NULL, NULL);
8377 if (inner_for_stmt == NULL_TREE)
8378 {
8379 gcc_assert (seen_error ());
8380 *expr_p = NULL_TREE;
8381 return GS_ERROR;
8382 }
8383 }
8384
8385 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8386 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8387 TREE_CODE (for_stmt));
8388
8389 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8390 gimplify_omp_ctxp->distribute = true;
8391
8392 /* Handle OMP_FOR_INIT. */
8393 for_pre_body = NULL;
8394 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8395 {
8396 has_decl_expr = BITMAP_ALLOC (NULL);
8397 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8398 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8399 == VAR_DECL)
8400 {
8401 t = OMP_FOR_PRE_BODY (for_stmt);
8402 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8403 }
8404 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8405 {
8406 tree_stmt_iterator si;
8407 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8408 tsi_next (&si))
8409 {
8410 t = tsi_stmt (si);
8411 if (TREE_CODE (t) == DECL_EXPR
8412 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8413 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8414 }
8415 }
8416 }
8417 if (OMP_FOR_PRE_BODY (for_stmt))
8418 {
8419 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8420 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8421 else
8422 {
8423 struct gimplify_omp_ctx ctx;
8424 memset (&ctx, 0, sizeof (ctx));
8425 ctx.region_type = ORT_NONE;
8426 gimplify_omp_ctxp = &ctx;
8427 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8428 gimplify_omp_ctxp = NULL;
8429 }
8430 }
8431 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8432
8433 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8434 for_stmt = inner_for_stmt;
8435
8436 /* For taskloop, need to gimplify the start, end and step before the
8437 taskloop, outside of the taskloop omp context. */
8438 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8439 {
8440 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8441 {
8442 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8443 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8444 {
8445 TREE_OPERAND (t, 1)
8446 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8447 pre_p, NULL);
8448 tree c = build_omp_clause (input_location,
8449 OMP_CLAUSE_FIRSTPRIVATE);
8450 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8451 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8452 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8453 }
8454
8455 /* Handle OMP_FOR_COND. */
8456 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8457 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8458 {
8459 TREE_OPERAND (t, 1)
8460 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8461 gimple_seq_empty_p (for_pre_body)
8462 ? pre_p : &for_pre_body, NULL);
8463 tree c = build_omp_clause (input_location,
8464 OMP_CLAUSE_FIRSTPRIVATE);
8465 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8466 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8467 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8468 }
8469
8470 /* Handle OMP_FOR_INCR. */
8471 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8472 if (TREE_CODE (t) == MODIFY_EXPR)
8473 {
8474 decl = TREE_OPERAND (t, 0);
8475 t = TREE_OPERAND (t, 1);
8476 tree *tp = &TREE_OPERAND (t, 1);
8477 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8478 tp = &TREE_OPERAND (t, 0);
8479
8480 if (!is_gimple_constant (*tp))
8481 {
8482 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8483 ? pre_p : &for_pre_body;
8484 *tp = get_initialized_tmp_var (*tp, seq, NULL);
8485 tree c = build_omp_clause (input_location,
8486 OMP_CLAUSE_FIRSTPRIVATE);
8487 OMP_CLAUSE_DECL (c) = *tp;
8488 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8489 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8490 }
8491 }
8492 }
8493
8494 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8495 OMP_TASKLOOP);
8496 }
8497
8498 if (orig_for_stmt != for_stmt)
8499 gimplify_omp_ctxp->combined_loop = true;
8500
8501 for_body = NULL;
8502 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8503 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8504 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8505 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8506
8507 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8508 bool is_doacross = false;
8509 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8510 {
8511 is_doacross = true;
8512 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8513 (OMP_FOR_INIT (for_stmt))
8514 * 2);
8515 }
8516 int collapse = 1;
8517 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8518 if (c)
8519 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8520 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8521 {
8522 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8523 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8524 decl = TREE_OPERAND (t, 0);
8525 gcc_assert (DECL_P (decl));
8526 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8527 || POINTER_TYPE_P (TREE_TYPE (decl)));
8528 if (is_doacross)
8529 {
8530 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8531 gimplify_omp_ctxp->loop_iter_var.quick_push
8532 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8533 else
8534 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8535 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8536 }
8537
8538 /* Make sure the iteration variable is private. */
8539 tree c = NULL_TREE;
8540 tree c2 = NULL_TREE;
8541 if (orig_for_stmt != for_stmt)
8542 /* Do this only on innermost construct for combined ones. */;
8543 else if (ort == ORT_SIMD)
8544 {
8545 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8546 (splay_tree_key) decl);
8547 omp_is_private (gimplify_omp_ctxp, decl,
8548 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8549 != 1));
8550 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8551 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8552 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8553 {
8554 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8555 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8556 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8557 if ((has_decl_expr
8558 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8559 || omp_no_lastprivate (gimplify_omp_ctxp))
8560 {
8561 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8562 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8563 }
8564 struct gimplify_omp_ctx *outer
8565 = gimplify_omp_ctxp->outer_context;
8566 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8567 {
8568 if (outer->region_type == ORT_WORKSHARE
8569 && outer->combined_loop)
8570 {
8571 n = splay_tree_lookup (outer->variables,
8572 (splay_tree_key)decl);
8573 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8574 {
8575 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8576 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8577 }
8578 else
8579 {
8580 struct gimplify_omp_ctx *octx = outer->outer_context;
8581 if (octx
8582 && octx->region_type == ORT_COMBINED_PARALLEL
8583 && octx->outer_context
8584 && (octx->outer_context->region_type
8585 == ORT_WORKSHARE)
8586 && octx->outer_context->combined_loop)
8587 {
8588 octx = octx->outer_context;
8589 n = splay_tree_lookup (octx->variables,
8590 (splay_tree_key)decl);
8591 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8592 {
8593 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8594 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8595 }
8596 }
8597 }
8598 }
8599 }
8600
8601 OMP_CLAUSE_DECL (c) = decl;
8602 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8603 OMP_FOR_CLAUSES (for_stmt) = c;
8604 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8605 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8606 {
8607 if (outer->region_type == ORT_WORKSHARE
8608 && outer->combined_loop)
8609 {
8610 if (outer->outer_context
8611 && (outer->outer_context->region_type
8612 == ORT_COMBINED_PARALLEL))
8613 outer = outer->outer_context;
8614 else if (omp_check_private (outer, decl, false))
8615 outer = NULL;
8616 }
8617 else if (((outer->region_type & ORT_TASK) != 0)
8618 && outer->combined_loop
8619 && !omp_check_private (gimplify_omp_ctxp,
8620 decl, false))
8621 ;
8622 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8623 outer = NULL;
8624 if (outer)
8625 {
8626 n = splay_tree_lookup (outer->variables,
8627 (splay_tree_key)decl);
8628 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8629 {
8630 omp_add_variable (outer, decl,
8631 GOVD_LASTPRIVATE | GOVD_SEEN);
8632 if (outer->region_type == ORT_COMBINED_PARALLEL
8633 && outer->outer_context
8634 && (outer->outer_context->region_type
8635 == ORT_WORKSHARE)
8636 && outer->outer_context->combined_loop)
8637 {
8638 outer = outer->outer_context;
8639 n = splay_tree_lookup (outer->variables,
8640 (splay_tree_key)decl);
8641 if (omp_check_private (outer, decl, false))
8642 outer = NULL;
8643 else if (n == NULL
8644 || ((n->value & GOVD_DATA_SHARE_CLASS)
8645 == 0))
8646 omp_add_variable (outer, decl,
8647 GOVD_LASTPRIVATE
8648 | GOVD_SEEN);
8649 else
8650 outer = NULL;
8651 }
8652 if (outer && outer->outer_context
8653 && (outer->outer_context->region_type
8654 == ORT_COMBINED_TEAMS))
8655 {
8656 outer = outer->outer_context;
8657 n = splay_tree_lookup (outer->variables,
8658 (splay_tree_key)decl);
8659 if (n == NULL
8660 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8661 omp_add_variable (outer, decl,
8662 GOVD_SHARED | GOVD_SEEN);
8663 else
8664 outer = NULL;
8665 }
8666 if (outer && outer->outer_context)
8667 omp_notice_variable (outer->outer_context, decl,
8668 true);
8669 }
8670 }
8671 }
8672 }
8673 else
8674 {
8675 bool lastprivate
8676 = (!has_decl_expr
8677 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8678 && !omp_no_lastprivate (gimplify_omp_ctxp);
8679 struct gimplify_omp_ctx *outer
8680 = gimplify_omp_ctxp->outer_context;
8681 if (outer && lastprivate)
8682 {
8683 if (outer->region_type == ORT_WORKSHARE
8684 && outer->combined_loop)
8685 {
8686 n = splay_tree_lookup (outer->variables,
8687 (splay_tree_key)decl);
8688 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8689 {
8690 lastprivate = false;
8691 outer = NULL;
8692 }
8693 else if (outer->outer_context
8694 && (outer->outer_context->region_type
8695 == ORT_COMBINED_PARALLEL))
8696 outer = outer->outer_context;
8697 else if (omp_check_private (outer, decl, false))
8698 outer = NULL;
8699 }
8700 else if (((outer->region_type & ORT_TASK) != 0)
8701 && outer->combined_loop
8702 && !omp_check_private (gimplify_omp_ctxp,
8703 decl, false))
8704 ;
8705 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8706 outer = NULL;
8707 if (outer)
8708 {
8709 n = splay_tree_lookup (outer->variables,
8710 (splay_tree_key)decl);
8711 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8712 {
8713 omp_add_variable (outer, decl,
8714 GOVD_LASTPRIVATE | GOVD_SEEN);
8715 if (outer->region_type == ORT_COMBINED_PARALLEL
8716 && outer->outer_context
8717 && (outer->outer_context->region_type
8718 == ORT_WORKSHARE)
8719 && outer->outer_context->combined_loop)
8720 {
8721 outer = outer->outer_context;
8722 n = splay_tree_lookup (outer->variables,
8723 (splay_tree_key)decl);
8724 if (omp_check_private (outer, decl, false))
8725 outer = NULL;
8726 else if (n == NULL
8727 || ((n->value & GOVD_DATA_SHARE_CLASS)
8728 == 0))
8729 omp_add_variable (outer, decl,
8730 GOVD_LASTPRIVATE
8731 | GOVD_SEEN);
8732 else
8733 outer = NULL;
8734 }
8735 if (outer && outer->outer_context
8736 && (outer->outer_context->region_type
8737 == ORT_COMBINED_TEAMS))
8738 {
8739 outer = outer->outer_context;
8740 n = splay_tree_lookup (outer->variables,
8741 (splay_tree_key)decl);
8742 if (n == NULL
8743 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8744 omp_add_variable (outer, decl,
8745 GOVD_SHARED | GOVD_SEEN);
8746 else
8747 outer = NULL;
8748 }
8749 if (outer && outer->outer_context)
8750 omp_notice_variable (outer->outer_context, decl,
8751 true);
8752 }
8753 }
8754 }
8755
8756 c = build_omp_clause (input_location,
8757 lastprivate ? OMP_CLAUSE_LASTPRIVATE
8758 : OMP_CLAUSE_PRIVATE);
8759 OMP_CLAUSE_DECL (c) = decl;
8760 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8761 OMP_FOR_CLAUSES (for_stmt) = c;
8762 omp_add_variable (gimplify_omp_ctxp, decl,
8763 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
8764 | GOVD_EXPLICIT | GOVD_SEEN);
8765 c = NULL_TREE;
8766 }
8767 }
8768 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
8769 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8770 else
8771 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
8772
8773 /* If DECL is not a gimple register, create a temporary variable to act
8774 as an iteration counter. This is valid, since DECL cannot be
8775 modified in the body of the loop. Similarly for any iteration vars
8776 in simd with collapse > 1 where the iterator vars must be
8777 lastprivate. */
8778 if (orig_for_stmt != for_stmt)
8779 var = decl;
8780 else if (!is_gimple_reg (decl)
8781 || (ort == ORT_SIMD
8782 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
8783 {
8784 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8785 TREE_OPERAND (t, 0) = var;
8786
8787 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
8788
8789 if (ort == ORT_SIMD
8790 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8791 {
8792 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8793 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
8794 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
8795 OMP_CLAUSE_DECL (c2) = var;
8796 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
8797 OMP_FOR_CLAUSES (for_stmt) = c2;
8798 omp_add_variable (gimplify_omp_ctxp, var,
8799 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
8800 if (c == NULL_TREE)
8801 {
8802 c = c2;
8803 c2 = NULL_TREE;
8804 }
8805 }
8806 else
8807 omp_add_variable (gimplify_omp_ctxp, var,
8808 GOVD_PRIVATE | GOVD_SEEN);
8809 }
8810 else
8811 var = decl;
8812
8813 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8814 is_gimple_val, fb_rvalue);
8815 ret = MIN (ret, tret);
8816 if (ret == GS_ERROR)
8817 return ret;
8818
8819 /* Handle OMP_FOR_COND. */
8820 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8821 gcc_assert (COMPARISON_CLASS_P (t));
8822 gcc_assert (TREE_OPERAND (t, 0) == decl);
8823
8824 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8825 is_gimple_val, fb_rvalue);
8826 ret = MIN (ret, tret);
8827
8828 /* Handle OMP_FOR_INCR. */
8829 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8830 switch (TREE_CODE (t))
8831 {
8832 case PREINCREMENT_EXPR:
8833 case POSTINCREMENT_EXPR:
8834 {
8835 tree decl = TREE_OPERAND (t, 0);
8836 /* c_omp_for_incr_canonicalize_ptr() should have been
8837 called to massage things appropriately. */
8838 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8839
8840 if (orig_for_stmt != for_stmt)
8841 break;
8842 t = build_int_cst (TREE_TYPE (decl), 1);
8843 if (c)
8844 OMP_CLAUSE_LINEAR_STEP (c) = t;
8845 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8846 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8847 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8848 break;
8849 }
8850
8851 case PREDECREMENT_EXPR:
8852 case POSTDECREMENT_EXPR:
8853 /* c_omp_for_incr_canonicalize_ptr() should have been
8854 called to massage things appropriately. */
8855 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8856 if (orig_for_stmt != for_stmt)
8857 break;
8858 t = build_int_cst (TREE_TYPE (decl), -1);
8859 if (c)
8860 OMP_CLAUSE_LINEAR_STEP (c) = t;
8861 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8862 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8863 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8864 break;
8865
8866 case MODIFY_EXPR:
8867 gcc_assert (TREE_OPERAND (t, 0) == decl);
8868 TREE_OPERAND (t, 0) = var;
8869
8870 t = TREE_OPERAND (t, 1);
8871 switch (TREE_CODE (t))
8872 {
8873 case PLUS_EXPR:
8874 if (TREE_OPERAND (t, 1) == decl)
8875 {
8876 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
8877 TREE_OPERAND (t, 0) = var;
8878 break;
8879 }
8880
8881 /* Fallthru. */
8882 case MINUS_EXPR:
8883 case POINTER_PLUS_EXPR:
8884 gcc_assert (TREE_OPERAND (t, 0) == decl);
8885 TREE_OPERAND (t, 0) = var;
8886 break;
8887 default:
8888 gcc_unreachable ();
8889 }
8890
8891 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8892 is_gimple_val, fb_rvalue);
8893 ret = MIN (ret, tret);
8894 if (c)
8895 {
8896 tree step = TREE_OPERAND (t, 1);
8897 tree stept = TREE_TYPE (decl);
8898 if (POINTER_TYPE_P (stept))
8899 stept = sizetype;
8900 step = fold_convert (stept, step);
8901 if (TREE_CODE (t) == MINUS_EXPR)
8902 step = fold_build1 (NEGATE_EXPR, stept, step);
8903 OMP_CLAUSE_LINEAR_STEP (c) = step;
8904 if (step != TREE_OPERAND (t, 1))
8905 {
8906 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
8907 &for_pre_body, NULL,
8908 is_gimple_val, fb_rvalue);
8909 ret = MIN (ret, tret);
8910 }
8911 }
8912 break;
8913
8914 default:
8915 gcc_unreachable ();
8916 }
8917
8918 if (c2)
8919 {
8920 gcc_assert (c);
8921 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
8922 }
8923
8924 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
8925 {
8926 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
8927 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8928 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
8929 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8930 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
8931 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
8932 && OMP_CLAUSE_DECL (c) == decl)
8933 {
8934 if (is_doacross && (collapse == 1 || i >= collapse))
8935 t = var;
8936 else
8937 {
8938 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8939 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8940 gcc_assert (TREE_OPERAND (t, 0) == var);
8941 t = TREE_OPERAND (t, 1);
8942 gcc_assert (TREE_CODE (t) == PLUS_EXPR
8943 || TREE_CODE (t) == MINUS_EXPR
8944 || TREE_CODE (t) == POINTER_PLUS_EXPR);
8945 gcc_assert (TREE_OPERAND (t, 0) == var);
8946 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
8947 is_doacross ? var : decl,
8948 TREE_OPERAND (t, 1));
8949 }
8950 gimple_seq *seq;
8951 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
8952 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
8953 else
8954 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
8955 gimplify_assign (decl, t, seq);
8956 }
8957 }
8958 }
8959
8960 BITMAP_FREE (has_decl_expr);
8961
8962 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8963 {
8964 push_gimplify_context ();
8965 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
8966 {
8967 OMP_FOR_BODY (orig_for_stmt)
8968 = build3 (BIND_EXPR, void_type_node, NULL,
8969 OMP_FOR_BODY (orig_for_stmt), NULL);
8970 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
8971 }
8972 }
8973
8974 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
8975 &for_body);
8976
8977 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8978 {
8979 if (gimple_code (g) == GIMPLE_BIND)
8980 pop_gimplify_context (g);
8981 else
8982 pop_gimplify_context (NULL);
8983 }
8984
8985 if (orig_for_stmt != for_stmt)
8986 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8987 {
8988 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8989 decl = TREE_OPERAND (t, 0);
8990 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8991 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8992 gimplify_omp_ctxp = ctx->outer_context;
8993 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8994 gimplify_omp_ctxp = ctx;
8995 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
8996 TREE_OPERAND (t, 0) = var;
8997 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8998 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
8999 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9000 }
9001
9002 gimplify_adjust_omp_clauses (pre_p, for_body,
9003 &OMP_FOR_CLAUSES (orig_for_stmt),
9004 TREE_CODE (orig_for_stmt));
9005
9006 int kind;
9007 switch (TREE_CODE (orig_for_stmt))
9008 {
9009 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9010 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
9011 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9012 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
9013 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
9014 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
9015 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
9016 default:
9017 gcc_unreachable ();
9018 }
9019 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
9020 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9021 for_pre_body);
9022 if (orig_for_stmt != for_stmt)
9023 gimple_omp_for_set_combined_p (gfor, true);
9024 if (gimplify_omp_ctxp
9025 && (gimplify_omp_ctxp->combined_loop
9026 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9027 && gimplify_omp_ctxp->outer_context
9028 && gimplify_omp_ctxp->outer_context->combined_loop)))
9029 {
9030 gimple_omp_for_set_combined_into_p (gfor, true);
9031 if (gimplify_omp_ctxp->combined_loop)
9032 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9033 else
9034 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9035 }
9036
9037 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9038 {
9039 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9040 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9041 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9042 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9043 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9044 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9045 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9046 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9047 }
9048
9049 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9050 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9051 The outer taskloop stands for computing the number of iterations,
9052 counts for collapsed loops and holding taskloop specific clauses.
9053 The task construct stands for the effect of data sharing on the
9054 explicit task it creates and the inner taskloop stands for expansion
9055 of the static loop inside of the explicit task construct. */
9056 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9057 {
9058 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9059 tree task_clauses = NULL_TREE;
9060 tree c = *gfor_clauses_ptr;
9061 tree *gtask_clauses_ptr = &task_clauses;
9062 tree outer_for_clauses = NULL_TREE;
9063 tree *gforo_clauses_ptr = &outer_for_clauses;
9064 for (; c; c = OMP_CLAUSE_CHAIN (c))
9065 switch (OMP_CLAUSE_CODE (c))
9066 {
9067 /* These clauses are allowed on task, move them there. */
9068 case OMP_CLAUSE_SHARED:
9069 case OMP_CLAUSE_FIRSTPRIVATE:
9070 case OMP_CLAUSE_DEFAULT:
9071 case OMP_CLAUSE_IF:
9072 case OMP_CLAUSE_UNTIED:
9073 case OMP_CLAUSE_FINAL:
9074 case OMP_CLAUSE_MERGEABLE:
9075 case OMP_CLAUSE_PRIORITY:
9076 *gtask_clauses_ptr = c;
9077 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9078 break;
9079 case OMP_CLAUSE_PRIVATE:
9080 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9081 {
9082 /* We want private on outer for and firstprivate
9083 on task. */
9084 *gtask_clauses_ptr
9085 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9086 OMP_CLAUSE_FIRSTPRIVATE);
9087 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9088 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9089 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9090 *gforo_clauses_ptr = c;
9091 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9092 }
9093 else
9094 {
9095 *gtask_clauses_ptr = c;
9096 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9097 }
9098 break;
9099 /* These clauses go into outer taskloop clauses. */
9100 case OMP_CLAUSE_GRAINSIZE:
9101 case OMP_CLAUSE_NUM_TASKS:
9102 case OMP_CLAUSE_NOGROUP:
9103 *gforo_clauses_ptr = c;
9104 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9105 break;
9106 /* Taskloop clause we duplicate on both taskloops. */
9107 case OMP_CLAUSE_COLLAPSE:
9108 *gfor_clauses_ptr = c;
9109 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9110 *gforo_clauses_ptr = copy_node (c);
9111 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9112 break;
9113 /* For lastprivate, keep the clause on inner taskloop, and add
9114 a shared clause on task. If the same decl is also firstprivate,
9115 add also firstprivate clause on the inner taskloop. */
9116 case OMP_CLAUSE_LASTPRIVATE:
9117 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9118 {
9119 /* For taskloop C++ lastprivate IVs, we want:
9120 1) private on outer taskloop
9121 2) firstprivate and shared on task
9122 3) lastprivate on inner taskloop */
9123 *gtask_clauses_ptr
9124 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9125 OMP_CLAUSE_FIRSTPRIVATE);
9126 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9127 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9128 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9129 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9130 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9131 OMP_CLAUSE_PRIVATE);
9132 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9133 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9134 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9135 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9136 }
9137 *gfor_clauses_ptr = c;
9138 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9139 *gtask_clauses_ptr
9140 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9141 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9142 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9143 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9144 gtask_clauses_ptr
9145 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9146 break;
9147 default:
9148 gcc_unreachable ();
9149 }
9150 *gfor_clauses_ptr = NULL_TREE;
9151 *gtask_clauses_ptr = NULL_TREE;
9152 *gforo_clauses_ptr = NULL_TREE;
9153 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9154 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9155 NULL_TREE, NULL_TREE, NULL_TREE);
9156 gimple_omp_task_set_taskloop_p (g, true);
9157 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9158 gomp_for *gforo
9159 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9160 gimple_omp_for_collapse (gfor),
9161 gimple_omp_for_pre_body (gfor));
9162 gimple_omp_for_set_pre_body (gfor, NULL);
9163 gimple_omp_for_set_combined_p (gforo, true);
9164 gimple_omp_for_set_combined_into_p (gfor, true);
9165 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9166 {
9167 t = unshare_expr (gimple_omp_for_index (gfor, i));
9168 gimple_omp_for_set_index (gforo, i, t);
9169 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9170 gimple_omp_for_set_initial (gforo, i, t);
9171 gimple_omp_for_set_cond (gforo, i,
9172 gimple_omp_for_cond (gfor, i));
9173 t = unshare_expr (gimple_omp_for_final (gfor, i));
9174 gimple_omp_for_set_final (gforo, i, t);
9175 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9176 gimple_omp_for_set_incr (gforo, i, t);
9177 }
9178 gimplify_seq_add_stmt (pre_p, gforo);
9179 }
9180 else
9181 gimplify_seq_add_stmt (pre_p, gfor);
9182 if (ret != GS_ALL_DONE)
9183 return GS_ERROR;
9184 *expr_p = NULL_TREE;
9185 return GS_ALL_DONE;
9186 }
9187
9188 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
9189 of OMP_TARGET's body. */
9190
9191 static tree
9192 find_omp_teams (tree *tp, int *walk_subtrees, void *)
9193 {
9194 *walk_subtrees = 0;
9195 switch (TREE_CODE (*tp))
9196 {
9197 case OMP_TEAMS:
9198 return *tp;
9199 case BIND_EXPR:
9200 case STATEMENT_LIST:
9201 *walk_subtrees = 1;
9202 break;
9203 default:
9204 break;
9205 }
9206 return NULL_TREE;
9207 }
9208
9209 /* Helper function of optimize_target_teams, determine if the expression
9210 can be computed safely before the target construct on the host. */
9211
9212 static tree
9213 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9214 {
9215 splay_tree_node n;
9216
9217 if (TYPE_P (*tp))
9218 {
9219 *walk_subtrees = 0;
9220 return NULL_TREE;
9221 }
9222 switch (TREE_CODE (*tp))
9223 {
9224 case VAR_DECL:
9225 case PARM_DECL:
9226 case RESULT_DECL:
9227 *walk_subtrees = 0;
9228 if (error_operand_p (*tp)
9229 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9230 || DECL_HAS_VALUE_EXPR_P (*tp)
9231 || DECL_THREAD_LOCAL_P (*tp)
9232 || TREE_SIDE_EFFECTS (*tp)
9233 || TREE_THIS_VOLATILE (*tp))
9234 return *tp;
9235 if (is_global_var (*tp)
9236 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9237 || lookup_attribute ("omp declare target link",
9238 DECL_ATTRIBUTES (*tp))))
9239 return *tp;
9240 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9241 (splay_tree_key) *tp);
9242 if (n == NULL)
9243 {
9244 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9245 return NULL_TREE;
9246 return *tp;
9247 }
9248 else if (n->value & GOVD_LOCAL)
9249 return *tp;
9250 else if (n->value & GOVD_FIRSTPRIVATE)
9251 return NULL_TREE;
9252 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9253 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9254 return NULL_TREE;
9255 return *tp;
9256 case INTEGER_CST:
9257 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9258 return *tp;
9259 return NULL_TREE;
9260 case TARGET_EXPR:
9261 if (TARGET_EXPR_INITIAL (*tp)
9262 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9263 return *tp;
9264 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9265 walk_subtrees, NULL);
9266 /* Allow some reasonable subset of integral arithmetics. */
9267 case PLUS_EXPR:
9268 case MINUS_EXPR:
9269 case MULT_EXPR:
9270 case TRUNC_DIV_EXPR:
9271 case CEIL_DIV_EXPR:
9272 case FLOOR_DIV_EXPR:
9273 case ROUND_DIV_EXPR:
9274 case TRUNC_MOD_EXPR:
9275 case CEIL_MOD_EXPR:
9276 case FLOOR_MOD_EXPR:
9277 case ROUND_MOD_EXPR:
9278 case RDIV_EXPR:
9279 case EXACT_DIV_EXPR:
9280 case MIN_EXPR:
9281 case MAX_EXPR:
9282 case LSHIFT_EXPR:
9283 case RSHIFT_EXPR:
9284 case BIT_IOR_EXPR:
9285 case BIT_XOR_EXPR:
9286 case BIT_AND_EXPR:
9287 case NEGATE_EXPR:
9288 case ABS_EXPR:
9289 case BIT_NOT_EXPR:
9290 case NON_LVALUE_EXPR:
9291 CASE_CONVERT:
9292 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9293 return *tp;
9294 return NULL_TREE;
9295 /* And disallow anything else, except for comparisons. */
9296 default:
9297 if (COMPARISON_CLASS_P (*tp))
9298 return NULL_TREE;
9299 return *tp;
9300 }
9301 }
9302
9303 /* Try to determine if the num_teams and/or thread_limit expressions
9304 can have their values determined already before entering the
9305 target construct.
9306 INTEGER_CSTs trivially are,
9307 integral decls that are firstprivate (explicitly or implicitly)
9308 or explicitly map(always, to:) or map(always, tofrom:) on the target
9309 region too, and expressions involving simple arithmetics on those
9310 too, function calls are not ok, dereferencing something neither etc.
9311 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9312 EXPR based on what we find:
9313 0 stands for clause not specified at all, use implementation default
9314 -1 stands for value that can't be determined easily before entering
9315 the target construct.
9316 If teams construct is not present at all, use 1 for num_teams
9317 and 0 for thread_limit (only one team is involved, and the thread
9318 limit is implementation defined. */
9319
9320 static void
9321 optimize_target_teams (tree target, gimple_seq *pre_p)
9322 {
9323 tree body = OMP_BODY (target);
9324 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9325 tree num_teams = integer_zero_node;
9326 tree thread_limit = integer_zero_node;
9327 location_t num_teams_loc = EXPR_LOCATION (target);
9328 location_t thread_limit_loc = EXPR_LOCATION (target);
9329 tree c, *p, expr;
9330 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9331
9332 if (teams == NULL_TREE)
9333 num_teams = integer_one_node;
9334 else
9335 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9336 {
9337 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9338 {
9339 p = &num_teams;
9340 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9341 }
9342 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9343 {
9344 p = &thread_limit;
9345 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9346 }
9347 else
9348 continue;
9349 expr = OMP_CLAUSE_OPERAND (c, 0);
9350 if (TREE_CODE (expr) == INTEGER_CST)
9351 {
9352 *p = expr;
9353 continue;
9354 }
9355 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9356 {
9357 *p = integer_minus_one_node;
9358 continue;
9359 }
9360 *p = expr;
9361 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9362 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
9363 == GS_ERROR)
9364 {
9365 gimplify_omp_ctxp = target_ctx;
9366 *p = integer_minus_one_node;
9367 continue;
9368 }
9369 gimplify_omp_ctxp = target_ctx;
9370 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9371 OMP_CLAUSE_OPERAND (c, 0) = *p;
9372 }
9373 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9374 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9375 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9376 OMP_TARGET_CLAUSES (target) = c;
9377 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9378 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9379 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9380 OMP_TARGET_CLAUSES (target) = c;
9381 }
9382
9383 /* Gimplify the gross structure of several OMP constructs. */
9384
9385 static void
9386 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9387 {
9388 tree expr = *expr_p;
9389 gimple *stmt;
9390 gimple_seq body = NULL;
9391 enum omp_region_type ort;
9392
9393 switch (TREE_CODE (expr))
9394 {
9395 case OMP_SECTIONS:
9396 case OMP_SINGLE:
9397 ort = ORT_WORKSHARE;
9398 break;
9399 case OMP_TARGET:
9400 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9401 break;
9402 case OACC_KERNELS:
9403 ort = ORT_ACC_KERNELS;
9404 break;
9405 case OACC_PARALLEL:
9406 ort = ORT_ACC_PARALLEL;
9407 break;
9408 case OACC_DATA:
9409 ort = ORT_ACC_DATA;
9410 break;
9411 case OMP_TARGET_DATA:
9412 ort = ORT_TARGET_DATA;
9413 break;
9414 case OMP_TEAMS:
9415 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9416 break;
9417 case OACC_HOST_DATA:
9418 ort = ORT_ACC_HOST_DATA;
9419 break;
9420 default:
9421 gcc_unreachable ();
9422 }
9423 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9424 TREE_CODE (expr));
9425 if (TREE_CODE (expr) == OMP_TARGET)
9426 optimize_target_teams (expr, pre_p);
9427 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9428 {
9429 push_gimplify_context ();
9430 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9431 if (gimple_code (g) == GIMPLE_BIND)
9432 pop_gimplify_context (g);
9433 else
9434 pop_gimplify_context (NULL);
9435 if ((ort & ORT_TARGET_DATA) != 0)
9436 {
9437 enum built_in_function end_ix;
9438 switch (TREE_CODE (expr))
9439 {
9440 case OACC_DATA:
9441 case OACC_HOST_DATA:
9442 end_ix = BUILT_IN_GOACC_DATA_END;
9443 break;
9444 case OMP_TARGET_DATA:
9445 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9446 break;
9447 default:
9448 gcc_unreachable ();
9449 }
9450 tree fn = builtin_decl_explicit (end_ix);
9451 g = gimple_build_call (fn, 0);
9452 gimple_seq cleanup = NULL;
9453 gimple_seq_add_stmt (&cleanup, g);
9454 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9455 body = NULL;
9456 gimple_seq_add_stmt (&body, g);
9457 }
9458 }
9459 else
9460 gimplify_and_add (OMP_BODY (expr), &body);
9461 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9462 TREE_CODE (expr));
9463
9464 switch (TREE_CODE (expr))
9465 {
9466 case OACC_DATA:
9467 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9468 OMP_CLAUSES (expr));
9469 break;
9470 case OACC_KERNELS:
9471 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9472 OMP_CLAUSES (expr));
9473 break;
9474 case OACC_HOST_DATA:
9475 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9476 OMP_CLAUSES (expr));
9477 break;
9478 case OACC_PARALLEL:
9479 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9480 OMP_CLAUSES (expr));
9481 break;
9482 case OMP_SECTIONS:
9483 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9484 break;
9485 case OMP_SINGLE:
9486 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9487 break;
9488 case OMP_TARGET:
9489 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9490 OMP_CLAUSES (expr));
9491 break;
9492 case OMP_TARGET_DATA:
9493 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9494 OMP_CLAUSES (expr));
9495 break;
9496 case OMP_TEAMS:
9497 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9498 break;
9499 default:
9500 gcc_unreachable ();
9501 }
9502
9503 gimplify_seq_add_stmt (pre_p, stmt);
9504 *expr_p = NULL_TREE;
9505 }
9506
9507 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9508 target update constructs. */
9509
9510 static void
9511 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9512 {
9513 tree expr = *expr_p;
9514 int kind;
9515 gomp_target *stmt;
9516 enum omp_region_type ort = ORT_WORKSHARE;
9517
9518 switch (TREE_CODE (expr))
9519 {
9520 case OACC_ENTER_DATA:
9521 case OACC_EXIT_DATA:
9522 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9523 ort = ORT_ACC;
9524 break;
9525 case OACC_UPDATE:
9526 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9527 ort = ORT_ACC;
9528 break;
9529 case OMP_TARGET_UPDATE:
9530 kind = GF_OMP_TARGET_KIND_UPDATE;
9531 break;
9532 case OMP_TARGET_ENTER_DATA:
9533 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9534 break;
9535 case OMP_TARGET_EXIT_DATA:
9536 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9537 break;
9538 default:
9539 gcc_unreachable ();
9540 }
9541 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9542 ort, TREE_CODE (expr));
9543 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
9544 TREE_CODE (expr));
9545 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9546
9547 gimplify_seq_add_stmt (pre_p, stmt);
9548 *expr_p = NULL_TREE;
9549 }
9550
9551 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9552 stabilized the lhs of the atomic operation as *ADDR. Return true if
9553 EXPR is this stabilized form. */
9554
9555 static bool
9556 goa_lhs_expr_p (tree expr, tree addr)
9557 {
9558 /* Also include casts to other type variants. The C front end is fond
9559 of adding these for e.g. volatile variables. This is like
9560 STRIP_TYPE_NOPS but includes the main variant lookup. */
9561 STRIP_USELESS_TYPE_CONVERSION (expr);
9562
9563 if (TREE_CODE (expr) == INDIRECT_REF)
9564 {
9565 expr = TREE_OPERAND (expr, 0);
9566 while (expr != addr
9567 && (CONVERT_EXPR_P (expr)
9568 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9569 && TREE_CODE (expr) == TREE_CODE (addr)
9570 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9571 {
9572 expr = TREE_OPERAND (expr, 0);
9573 addr = TREE_OPERAND (addr, 0);
9574 }
9575 if (expr == addr)
9576 return true;
9577 return (TREE_CODE (addr) == ADDR_EXPR
9578 && TREE_CODE (expr) == ADDR_EXPR
9579 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9580 }
9581 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9582 return true;
9583 return false;
9584 }
9585
9586 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9587 expression does not involve the lhs, evaluate it into a temporary.
9588 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9589 or -1 if an error was encountered. */
9590
9591 static int
9592 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9593 tree lhs_var)
9594 {
9595 tree expr = *expr_p;
9596 int saw_lhs;
9597
9598 if (goa_lhs_expr_p (expr, lhs_addr))
9599 {
9600 *expr_p = lhs_var;
9601 return 1;
9602 }
9603 if (is_gimple_val (expr))
9604 return 0;
9605
9606 saw_lhs = 0;
9607 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9608 {
9609 case tcc_binary:
9610 case tcc_comparison:
9611 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9612 lhs_var);
9613 case tcc_unary:
9614 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9615 lhs_var);
9616 break;
9617 case tcc_expression:
9618 switch (TREE_CODE (expr))
9619 {
9620 case TRUTH_ANDIF_EXPR:
9621 case TRUTH_ORIF_EXPR:
9622 case TRUTH_AND_EXPR:
9623 case TRUTH_OR_EXPR:
9624 case TRUTH_XOR_EXPR:
9625 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9626 lhs_addr, lhs_var);
9627 case TRUTH_NOT_EXPR:
9628 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9629 lhs_addr, lhs_var);
9630 break;
9631 case COMPOUND_EXPR:
9632 /* Break out any preevaluations from cp_build_modify_expr. */
9633 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9634 expr = TREE_OPERAND (expr, 1))
9635 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9636 *expr_p = expr;
9637 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9638 default:
9639 break;
9640 }
9641 break;
9642 default:
9643 break;
9644 }
9645
9646 if (saw_lhs == 0)
9647 {
9648 enum gimplify_status gs;
9649 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9650 if (gs != GS_ALL_DONE)
9651 saw_lhs = -1;
9652 }
9653
9654 return saw_lhs;
9655 }
9656
9657 /* Gimplify an OMP_ATOMIC statement. */
9658
9659 static enum gimplify_status
9660 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9661 {
9662 tree addr = TREE_OPERAND (*expr_p, 0);
9663 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9664 ? NULL : TREE_OPERAND (*expr_p, 1);
9665 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9666 tree tmp_load;
9667 gomp_atomic_load *loadstmt;
9668 gomp_atomic_store *storestmt;
9669
9670 tmp_load = create_tmp_reg (type);
9671 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9672 return GS_ERROR;
9673
9674 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9675 != GS_ALL_DONE)
9676 return GS_ERROR;
9677
9678 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9679 gimplify_seq_add_stmt (pre_p, loadstmt);
9680 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9681 != GS_ALL_DONE)
9682 return GS_ERROR;
9683
9684 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9685 rhs = tmp_load;
9686 storestmt = gimple_build_omp_atomic_store (rhs);
9687 gimplify_seq_add_stmt (pre_p, storestmt);
9688 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9689 {
9690 gimple_omp_atomic_set_seq_cst (loadstmt);
9691 gimple_omp_atomic_set_seq_cst (storestmt);
9692 }
9693 switch (TREE_CODE (*expr_p))
9694 {
9695 case OMP_ATOMIC_READ:
9696 case OMP_ATOMIC_CAPTURE_OLD:
9697 *expr_p = tmp_load;
9698 gimple_omp_atomic_set_need_value (loadstmt);
9699 break;
9700 case OMP_ATOMIC_CAPTURE_NEW:
9701 *expr_p = rhs;
9702 gimple_omp_atomic_set_need_value (storestmt);
9703 break;
9704 default:
9705 *expr_p = NULL;
9706 break;
9707 }
9708
9709 return GS_ALL_DONE;
9710 }
9711
9712 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9713 body, and adding some EH bits. */
9714
9715 static enum gimplify_status
9716 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9717 {
9718 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
9719 gimple *body_stmt;
9720 gtransaction *trans_stmt;
9721 gimple_seq body = NULL;
9722 int subcode = 0;
9723
9724 /* Wrap the transaction body in a BIND_EXPR so we have a context
9725 where to put decls for OMP. */
9726 if (TREE_CODE (tbody) != BIND_EXPR)
9727 {
9728 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
9729 TREE_SIDE_EFFECTS (bind) = 1;
9730 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
9731 TRANSACTION_EXPR_BODY (expr) = bind;
9732 }
9733
9734 push_gimplify_context ();
9735 temp = voidify_wrapper_expr (*expr_p, NULL);
9736
9737 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
9738 pop_gimplify_context (body_stmt);
9739
9740 trans_stmt = gimple_build_transaction (body);
9741 if (TRANSACTION_EXPR_OUTER (expr))
9742 subcode = GTMA_IS_OUTER;
9743 else if (TRANSACTION_EXPR_RELAXED (expr))
9744 subcode = GTMA_IS_RELAXED;
9745 gimple_transaction_set_subcode (trans_stmt, subcode);
9746
9747 gimplify_seq_add_stmt (pre_p, trans_stmt);
9748
9749 if (temp)
9750 {
9751 *expr_p = temp;
9752 return GS_OK;
9753 }
9754
9755 *expr_p = NULL_TREE;
9756 return GS_ALL_DONE;
9757 }
9758
9759 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
9760 is the OMP_BODY of the original EXPR (which has already been
9761 gimplified so it's not present in the EXPR).
9762
9763 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
9764
9765 static gimple *
9766 gimplify_omp_ordered (tree expr, gimple_seq body)
9767 {
9768 tree c, decls;
9769 int failures = 0;
9770 unsigned int i;
9771 tree source_c = NULL_TREE;
9772 tree sink_c = NULL_TREE;
9773
9774 if (gimplify_omp_ctxp)
9775 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
9776 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9777 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
9778 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
9779 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
9780 {
9781 error_at (OMP_CLAUSE_LOCATION (c),
9782 "%<ordered%> construct with %<depend%> clause must be "
9783 "closely nested inside a loop with %<ordered%> clause "
9784 "with a parameter");
9785 failures++;
9786 }
9787 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9788 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9789 {
9790 bool fail = false;
9791 for (decls = OMP_CLAUSE_DECL (c), i = 0;
9792 decls && TREE_CODE (decls) == TREE_LIST;
9793 decls = TREE_CHAIN (decls), ++i)
9794 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
9795 continue;
9796 else if (TREE_VALUE (decls)
9797 != gimplify_omp_ctxp->loop_iter_var[2 * i])
9798 {
9799 error_at (OMP_CLAUSE_LOCATION (c),
9800 "variable %qE is not an iteration "
9801 "of outermost loop %d, expected %qE",
9802 TREE_VALUE (decls), i + 1,
9803 gimplify_omp_ctxp->loop_iter_var[2 * i]);
9804 fail = true;
9805 failures++;
9806 }
9807 else
9808 TREE_VALUE (decls)
9809 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
9810 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
9811 {
9812 error_at (OMP_CLAUSE_LOCATION (c),
9813 "number of variables in %<depend(sink)%> "
9814 "clause does not match number of "
9815 "iteration variables");
9816 failures++;
9817 }
9818 sink_c = c;
9819 }
9820 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9821 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9822 {
9823 if (source_c)
9824 {
9825 error_at (OMP_CLAUSE_LOCATION (c),
9826 "more than one %<depend(source)%> clause on an "
9827 "%<ordered%> construct");
9828 failures++;
9829 }
9830 else
9831 source_c = c;
9832 }
9833 if (source_c && sink_c)
9834 {
9835 error_at (OMP_CLAUSE_LOCATION (source_c),
9836 "%<depend(source)%> clause specified together with "
9837 "%<depend(sink:)%> clauses on the same construct");
9838 failures++;
9839 }
9840
9841 if (failures)
9842 return gimple_build_nop ();
9843 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
9844 }
9845
9846 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
9847 expression produces a value to be used as an operand inside a GIMPLE
9848 statement, the value will be stored back in *EXPR_P. This value will
9849 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
9850 an SSA_NAME. The corresponding sequence of GIMPLE statements is
9851 emitted in PRE_P and POST_P.
9852
9853 Additionally, this process may overwrite parts of the input
9854 expression during gimplification. Ideally, it should be
9855 possible to do non-destructive gimplification.
9856
9857 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
9858 the expression needs to evaluate to a value to be used as
9859 an operand in a GIMPLE statement, this value will be stored in
9860 *EXPR_P on exit. This happens when the caller specifies one
9861 of fb_lvalue or fb_rvalue fallback flags.
9862
9863 PRE_P will contain the sequence of GIMPLE statements corresponding
9864 to the evaluation of EXPR and all the side-effects that must
9865 be executed before the main expression. On exit, the last
9866 statement of PRE_P is the core statement being gimplified. For
9867 instance, when gimplifying 'if (++a)' the last statement in
9868 PRE_P will be 'if (t.1)' where t.1 is the result of
9869 pre-incrementing 'a'.
9870
9871 POST_P will contain the sequence of GIMPLE statements corresponding
9872 to the evaluation of all the side-effects that must be executed
9873 after the main expression. If this is NULL, the post
9874 side-effects are stored at the end of PRE_P.
9875
9876 The reason why the output is split in two is to handle post
9877 side-effects explicitly. In some cases, an expression may have
9878 inner and outer post side-effects which need to be emitted in
9879 an order different from the one given by the recursive
9880 traversal. For instance, for the expression (*p--)++ the post
9881 side-effects of '--' must actually occur *after* the post
9882 side-effects of '++'. However, gimplification will first visit
9883 the inner expression, so if a separate POST sequence was not
9884 used, the resulting sequence would be:
9885
9886 1 t.1 = *p
9887 2 p = p - 1
9888 3 t.2 = t.1 + 1
9889 4 *p = t.2
9890
9891 However, the post-decrement operation in line #2 must not be
9892 evaluated until after the store to *p at line #4, so the
9893 correct sequence should be:
9894
9895 1 t.1 = *p
9896 2 t.2 = t.1 + 1
9897 3 *p = t.2
9898 4 p = p - 1
9899
9900 So, by specifying a separate post queue, it is possible
9901 to emit the post side-effects in the correct order.
9902 If POST_P is NULL, an internal queue will be used. Before
9903 returning to the caller, the sequence POST_P is appended to
9904 the main output sequence PRE_P.
9905
9906 GIMPLE_TEST_F points to a function that takes a tree T and
9907 returns nonzero if T is in the GIMPLE form requested by the
9908 caller. The GIMPLE predicates are in gimple.c.
9909
9910 FALLBACK tells the function what sort of a temporary we want if
9911 gimplification cannot produce an expression that complies with
9912 GIMPLE_TEST_F.
9913
9914 fb_none means that no temporary should be generated
9915 fb_rvalue means that an rvalue is OK to generate
9916 fb_lvalue means that an lvalue is OK to generate
9917 fb_either means that either is OK, but an lvalue is preferable.
9918 fb_mayfail means that gimplification may fail (in which case
9919 GS_ERROR will be returned)
9920
9921 The return value is either GS_ERROR or GS_ALL_DONE, since this
9922 function iterates until EXPR is completely gimplified or an error
9923 occurs. */
9924
9925 enum gimplify_status
9926 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
9927 bool (*gimple_test_f) (tree), fallback_t fallback)
9928 {
9929 tree tmp;
9930 gimple_seq internal_pre = NULL;
9931 gimple_seq internal_post = NULL;
9932 tree save_expr;
9933 bool is_statement;
9934 location_t saved_location;
9935 enum gimplify_status ret;
9936 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
9937
9938 save_expr = *expr_p;
9939 if (save_expr == NULL_TREE)
9940 return GS_ALL_DONE;
9941
9942 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
9943 is_statement = gimple_test_f == is_gimple_stmt;
9944 if (is_statement)
9945 gcc_assert (pre_p);
9946
9947 /* Consistency checks. */
9948 if (gimple_test_f == is_gimple_reg)
9949 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
9950 else if (gimple_test_f == is_gimple_val
9951 || gimple_test_f == is_gimple_call_addr
9952 || gimple_test_f == is_gimple_condexpr
9953 || gimple_test_f == is_gimple_mem_rhs
9954 || gimple_test_f == is_gimple_mem_rhs_or_call
9955 || gimple_test_f == is_gimple_reg_rhs
9956 || gimple_test_f == is_gimple_reg_rhs_or_call
9957 || gimple_test_f == is_gimple_asm_val
9958 || gimple_test_f == is_gimple_mem_ref_addr)
9959 gcc_assert (fallback & fb_rvalue);
9960 else if (gimple_test_f == is_gimple_min_lval
9961 || gimple_test_f == is_gimple_lvalue)
9962 gcc_assert (fallback & fb_lvalue);
9963 else if (gimple_test_f == is_gimple_addressable)
9964 gcc_assert (fallback & fb_either);
9965 else if (gimple_test_f == is_gimple_stmt)
9966 gcc_assert (fallback == fb_none);
9967 else
9968 {
9969 /* We should have recognized the GIMPLE_TEST_F predicate to
9970 know what kind of fallback to use in case a temporary is
9971 needed to hold the value or address of *EXPR_P. */
9972 gcc_unreachable ();
9973 }
9974
9975 /* We used to check the predicate here and return immediately if it
9976 succeeds. This is wrong; the design is for gimplification to be
9977 idempotent, and for the predicates to only test for valid forms, not
9978 whether they are fully simplified. */
9979 if (pre_p == NULL)
9980 pre_p = &internal_pre;
9981
9982 if (post_p == NULL)
9983 post_p = &internal_post;
9984
9985 /* Remember the last statements added to PRE_P and POST_P. Every
9986 new statement added by the gimplification helpers needs to be
9987 annotated with location information. To centralize the
9988 responsibility, we remember the last statement that had been
9989 added to both queues before gimplifying *EXPR_P. If
9990 gimplification produces new statements in PRE_P and POST_P, those
9991 statements will be annotated with the same location information
9992 as *EXPR_P. */
9993 pre_last_gsi = gsi_last (*pre_p);
9994 post_last_gsi = gsi_last (*post_p);
9995
9996 saved_location = input_location;
9997 if (save_expr != error_mark_node
9998 && EXPR_HAS_LOCATION (*expr_p))
9999 input_location = EXPR_LOCATION (*expr_p);
10000
10001 /* Loop over the specific gimplifiers until the toplevel node
10002 remains the same. */
10003 do
10004 {
10005 /* Strip away as many useless type conversions as possible
10006 at the toplevel. */
10007 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
10008
10009 /* Remember the expr. */
10010 save_expr = *expr_p;
10011
10012 /* Die, die, die, my darling. */
10013 if (save_expr == error_mark_node
10014 || (TREE_TYPE (save_expr)
10015 && TREE_TYPE (save_expr) == error_mark_node))
10016 {
10017 ret = GS_ERROR;
10018 break;
10019 }
10020
10021 /* Do any language-specific gimplification. */
10022 ret = ((enum gimplify_status)
10023 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
10024 if (ret == GS_OK)
10025 {
10026 if (*expr_p == NULL_TREE)
10027 break;
10028 if (*expr_p != save_expr)
10029 continue;
10030 }
10031 else if (ret != GS_UNHANDLED)
10032 break;
10033
10034 /* Make sure that all the cases set 'ret' appropriately. */
10035 ret = GS_UNHANDLED;
10036 switch (TREE_CODE (*expr_p))
10037 {
10038 /* First deal with the special cases. */
10039
10040 case POSTINCREMENT_EXPR:
10041 case POSTDECREMENT_EXPR:
10042 case PREINCREMENT_EXPR:
10043 case PREDECREMENT_EXPR:
10044 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
10045 fallback != fb_none,
10046 TREE_TYPE (*expr_p));
10047 break;
10048
10049 case VIEW_CONVERT_EXPR:
10050 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10051 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10052 {
10053 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10054 post_p, is_gimple_val, fb_rvalue);
10055 recalculate_side_effects (*expr_p);
10056 break;
10057 }
10058 /* Fallthru. */
10059
10060 case ARRAY_REF:
10061 case ARRAY_RANGE_REF:
10062 case REALPART_EXPR:
10063 case IMAGPART_EXPR:
10064 case COMPONENT_REF:
10065 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
10066 fallback ? fallback : fb_rvalue);
10067 break;
10068
10069 case COND_EXPR:
10070 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
10071
10072 /* C99 code may assign to an array in a structure value of a
10073 conditional expression, and this has undefined behavior
10074 only on execution, so create a temporary if an lvalue is
10075 required. */
10076 if (fallback == fb_lvalue)
10077 {
10078 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10079 mark_addressable (*expr_p);
10080 ret = GS_OK;
10081 }
10082 break;
10083
10084 case CALL_EXPR:
10085 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
10086
10087 /* C99 code may assign to an array in a structure returned
10088 from a function, and this has undefined behavior only on
10089 execution, so create a temporary if an lvalue is
10090 required. */
10091 if (fallback == fb_lvalue)
10092 {
10093 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10094 mark_addressable (*expr_p);
10095 ret = GS_OK;
10096 }
10097 break;
10098
10099 case TREE_LIST:
10100 gcc_unreachable ();
10101
10102 case COMPOUND_EXPR:
10103 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10104 break;
10105
10106 case COMPOUND_LITERAL_EXPR:
10107 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10108 gimple_test_f, fallback);
10109 break;
10110
10111 case MODIFY_EXPR:
10112 case INIT_EXPR:
10113 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10114 fallback != fb_none);
10115 break;
10116
10117 case TRUTH_ANDIF_EXPR:
10118 case TRUTH_ORIF_EXPR:
10119 {
10120 /* Preserve the original type of the expression and the
10121 source location of the outer expression. */
10122 tree org_type = TREE_TYPE (*expr_p);
10123 *expr_p = gimple_boolify (*expr_p);
10124 *expr_p = build3_loc (input_location, COND_EXPR,
10125 org_type, *expr_p,
10126 fold_convert_loc
10127 (input_location,
10128 org_type, boolean_true_node),
10129 fold_convert_loc
10130 (input_location,
10131 org_type, boolean_false_node));
10132 ret = GS_OK;
10133 break;
10134 }
10135
10136 case TRUTH_NOT_EXPR:
10137 {
10138 tree type = TREE_TYPE (*expr_p);
10139 /* The parsers are careful to generate TRUTH_NOT_EXPR
10140 only with operands that are always zero or one.
10141 We do not fold here but handle the only interesting case
10142 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
10143 *expr_p = gimple_boolify (*expr_p);
10144 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10145 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10146 TREE_TYPE (*expr_p),
10147 TREE_OPERAND (*expr_p, 0));
10148 else
10149 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10150 TREE_TYPE (*expr_p),
10151 TREE_OPERAND (*expr_p, 0),
10152 build_int_cst (TREE_TYPE (*expr_p), 1));
10153 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10154 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10155 ret = GS_OK;
10156 break;
10157 }
10158
10159 case ADDR_EXPR:
10160 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10161 break;
10162
10163 case ANNOTATE_EXPR:
10164 {
10165 tree cond = TREE_OPERAND (*expr_p, 0);
10166 tree kind = TREE_OPERAND (*expr_p, 1);
10167 tree type = TREE_TYPE (cond);
10168 if (!INTEGRAL_TYPE_P (type))
10169 {
10170 *expr_p = cond;
10171 ret = GS_OK;
10172 break;
10173 }
10174 tree tmp = create_tmp_var (type);
10175 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
10176 gcall *call
10177 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
10178 gimple_call_set_lhs (call, tmp);
10179 gimplify_seq_add_stmt (pre_p, call);
10180 *expr_p = tmp;
10181 ret = GS_ALL_DONE;
10182 break;
10183 }
10184
10185 case VA_ARG_EXPR:
10186 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
10187 break;
10188
10189 CASE_CONVERT:
10190 if (IS_EMPTY_STMT (*expr_p))
10191 {
10192 ret = GS_ALL_DONE;
10193 break;
10194 }
10195
10196 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10197 || fallback == fb_none)
10198 {
10199 /* Just strip a conversion to void (or in void context) and
10200 try again. */
10201 *expr_p = TREE_OPERAND (*expr_p, 0);
10202 ret = GS_OK;
10203 break;
10204 }
10205
10206 ret = gimplify_conversion (expr_p);
10207 if (ret == GS_ERROR)
10208 break;
10209 if (*expr_p != save_expr)
10210 break;
10211 /* FALLTHRU */
10212
10213 case FIX_TRUNC_EXPR:
10214 /* unary_expr: ... | '(' cast ')' val | ... */
10215 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10216 is_gimple_val, fb_rvalue);
10217 recalculate_side_effects (*expr_p);
10218 break;
10219
10220 case INDIRECT_REF:
10221 {
10222 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
10223 bool notrap = TREE_THIS_NOTRAP (*expr_p);
10224 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10225
10226 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10227 if (*expr_p != save_expr)
10228 {
10229 ret = GS_OK;
10230 break;
10231 }
10232
10233 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10234 is_gimple_reg, fb_rvalue);
10235 if (ret == GS_ERROR)
10236 break;
10237
10238 recalculate_side_effects (*expr_p);
10239 *expr_p = fold_build2_loc (input_location, MEM_REF,
10240 TREE_TYPE (*expr_p),
10241 TREE_OPERAND (*expr_p, 0),
10242 build_int_cst (saved_ptr_type, 0));
10243 TREE_THIS_VOLATILE (*expr_p) = volatilep;
10244 TREE_THIS_NOTRAP (*expr_p) = notrap;
10245 ret = GS_OK;
10246 break;
10247 }
10248
10249 /* We arrive here through the various re-gimplifcation paths. */
10250 case MEM_REF:
10251 /* First try re-folding the whole thing. */
10252 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10253 TREE_OPERAND (*expr_p, 0),
10254 TREE_OPERAND (*expr_p, 1));
10255 if (tmp)
10256 {
10257 REF_REVERSE_STORAGE_ORDER (tmp)
10258 = REF_REVERSE_STORAGE_ORDER (*expr_p);
10259 *expr_p = tmp;
10260 recalculate_side_effects (*expr_p);
10261 ret = GS_OK;
10262 break;
10263 }
10264 /* Avoid re-gimplifying the address operand if it is already
10265 in suitable form. Re-gimplifying would mark the address
10266 operand addressable. Always gimplify when not in SSA form
10267 as we still may have to gimplify decls with value-exprs. */
10268 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
10269 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10270 {
10271 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10272 is_gimple_mem_ref_addr, fb_rvalue);
10273 if (ret == GS_ERROR)
10274 break;
10275 }
10276 recalculate_side_effects (*expr_p);
10277 ret = GS_ALL_DONE;
10278 break;
10279
10280 /* Constants need not be gimplified. */
10281 case INTEGER_CST:
10282 case REAL_CST:
10283 case FIXED_CST:
10284 case STRING_CST:
10285 case COMPLEX_CST:
10286 case VECTOR_CST:
10287 /* Drop the overflow flag on constants, we do not want
10288 that in the GIMPLE IL. */
10289 if (TREE_OVERFLOW_P (*expr_p))
10290 *expr_p = drop_tree_overflow (*expr_p);
10291 ret = GS_ALL_DONE;
10292 break;
10293
10294 case CONST_DECL:
10295 /* If we require an lvalue, such as for ADDR_EXPR, retain the
10296 CONST_DECL node. Otherwise the decl is replaceable by its
10297 value. */
10298 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10299 if (fallback & fb_lvalue)
10300 ret = GS_ALL_DONE;
10301 else
10302 {
10303 *expr_p = DECL_INITIAL (*expr_p);
10304 ret = GS_OK;
10305 }
10306 break;
10307
10308 case DECL_EXPR:
10309 ret = gimplify_decl_expr (expr_p, pre_p);
10310 break;
10311
10312 case BIND_EXPR:
10313 ret = gimplify_bind_expr (expr_p, pre_p);
10314 break;
10315
10316 case LOOP_EXPR:
10317 ret = gimplify_loop_expr (expr_p, pre_p);
10318 break;
10319
10320 case SWITCH_EXPR:
10321 ret = gimplify_switch_expr (expr_p, pre_p);
10322 break;
10323
10324 case EXIT_EXPR:
10325 ret = gimplify_exit_expr (expr_p);
10326 break;
10327
10328 case GOTO_EXPR:
10329 /* If the target is not LABEL, then it is a computed jump
10330 and the target needs to be gimplified. */
10331 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10332 {
10333 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10334 NULL, is_gimple_val, fb_rvalue);
10335 if (ret == GS_ERROR)
10336 break;
10337 }
10338 gimplify_seq_add_stmt (pre_p,
10339 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10340 ret = GS_ALL_DONE;
10341 break;
10342
10343 case PREDICT_EXPR:
10344 gimplify_seq_add_stmt (pre_p,
10345 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10346 PREDICT_EXPR_OUTCOME (*expr_p)));
10347 ret = GS_ALL_DONE;
10348 break;
10349
10350 case LABEL_EXPR:
10351 ret = GS_ALL_DONE;
10352 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10353 == current_function_decl);
10354 gimplify_seq_add_stmt (pre_p,
10355 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10356 break;
10357
10358 case CASE_LABEL_EXPR:
10359 ret = gimplify_case_label_expr (expr_p, pre_p);
10360 break;
10361
10362 case RETURN_EXPR:
10363 ret = gimplify_return_expr (*expr_p, pre_p);
10364 break;
10365
10366 case CONSTRUCTOR:
10367 /* Don't reduce this in place; let gimplify_init_constructor work its
10368 magic. Buf if we're just elaborating this for side effects, just
10369 gimplify any element that has side-effects. */
10370 if (fallback == fb_none)
10371 {
10372 unsigned HOST_WIDE_INT ix;
10373 tree val;
10374 tree temp = NULL_TREE;
10375 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10376 if (TREE_SIDE_EFFECTS (val))
10377 append_to_statement_list (val, &temp);
10378
10379 *expr_p = temp;
10380 ret = temp ? GS_OK : GS_ALL_DONE;
10381 }
10382 /* C99 code may assign to an array in a constructed
10383 structure or union, and this has undefined behavior only
10384 on execution, so create a temporary if an lvalue is
10385 required. */
10386 else if (fallback == fb_lvalue)
10387 {
10388 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10389 mark_addressable (*expr_p);
10390 ret = GS_OK;
10391 }
10392 else
10393 ret = GS_ALL_DONE;
10394 break;
10395
10396 /* The following are special cases that are not handled by the
10397 original GIMPLE grammar. */
10398
10399 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10400 eliminated. */
10401 case SAVE_EXPR:
10402 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10403 break;
10404
10405 case BIT_FIELD_REF:
10406 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10407 post_p, is_gimple_lvalue, fb_either);
10408 recalculate_side_effects (*expr_p);
10409 break;
10410
10411 case TARGET_MEM_REF:
10412 {
10413 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10414
10415 if (TMR_BASE (*expr_p))
10416 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10417 post_p, is_gimple_mem_ref_addr, fb_either);
10418 if (TMR_INDEX (*expr_p))
10419 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10420 post_p, is_gimple_val, fb_rvalue);
10421 if (TMR_INDEX2 (*expr_p))
10422 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10423 post_p, is_gimple_val, fb_rvalue);
10424 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10425 ret = MIN (r0, r1);
10426 }
10427 break;
10428
10429 case NON_LVALUE_EXPR:
10430 /* This should have been stripped above. */
10431 gcc_unreachable ();
10432
10433 case ASM_EXPR:
10434 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10435 break;
10436
10437 case TRY_FINALLY_EXPR:
10438 case TRY_CATCH_EXPR:
10439 {
10440 gimple_seq eval, cleanup;
10441 gtry *try_;
10442
10443 /* Calls to destructors are generated automatically in FINALLY/CATCH
10444 block. They should have location as UNKNOWN_LOCATION. However,
10445 gimplify_call_expr will reset these call stmts to input_location
10446 if it finds stmt's location is unknown. To prevent resetting for
10447 destructors, we set the input_location to unknown.
10448 Note that this only affects the destructor calls in FINALLY/CATCH
10449 block, and will automatically reset to its original value by the
10450 end of gimplify_expr. */
10451 input_location = UNKNOWN_LOCATION;
10452 eval = cleanup = NULL;
10453 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10454 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10455 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10456 if (gimple_seq_empty_p (cleanup))
10457 {
10458 gimple_seq_add_seq (pre_p, eval);
10459 ret = GS_ALL_DONE;
10460 break;
10461 }
10462 try_ = gimple_build_try (eval, cleanup,
10463 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10464 ? GIMPLE_TRY_FINALLY
10465 : GIMPLE_TRY_CATCH);
10466 if (EXPR_HAS_LOCATION (save_expr))
10467 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10468 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10469 gimple_set_location (try_, saved_location);
10470 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10471 gimple_try_set_catch_is_cleanup (try_,
10472 TRY_CATCH_IS_CLEANUP (*expr_p));
10473 gimplify_seq_add_stmt (pre_p, try_);
10474 ret = GS_ALL_DONE;
10475 break;
10476 }
10477
10478 case CLEANUP_POINT_EXPR:
10479 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10480 break;
10481
10482 case TARGET_EXPR:
10483 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10484 break;
10485
10486 case CATCH_EXPR:
10487 {
10488 gimple *c;
10489 gimple_seq handler = NULL;
10490 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10491 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10492 gimplify_seq_add_stmt (pre_p, c);
10493 ret = GS_ALL_DONE;
10494 break;
10495 }
10496
10497 case EH_FILTER_EXPR:
10498 {
10499 gimple *ehf;
10500 gimple_seq failure = NULL;
10501
10502 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10503 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10504 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10505 gimplify_seq_add_stmt (pre_p, ehf);
10506 ret = GS_ALL_DONE;
10507 break;
10508 }
10509
10510 case OBJ_TYPE_REF:
10511 {
10512 enum gimplify_status r0, r1;
10513 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10514 post_p, is_gimple_val, fb_rvalue);
10515 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10516 post_p, is_gimple_val, fb_rvalue);
10517 TREE_SIDE_EFFECTS (*expr_p) = 0;
10518 ret = MIN (r0, r1);
10519 }
10520 break;
10521
10522 case LABEL_DECL:
10523 /* We get here when taking the address of a label. We mark
10524 the label as "forced"; meaning it can never be removed and
10525 it is a potential target for any computed goto. */
10526 FORCED_LABEL (*expr_p) = 1;
10527 ret = GS_ALL_DONE;
10528 break;
10529
10530 case STATEMENT_LIST:
10531 ret = gimplify_statement_list (expr_p, pre_p);
10532 break;
10533
10534 case WITH_SIZE_EXPR:
10535 {
10536 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10537 post_p == &internal_post ? NULL : post_p,
10538 gimple_test_f, fallback);
10539 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10540 is_gimple_val, fb_rvalue);
10541 ret = GS_ALL_DONE;
10542 }
10543 break;
10544
10545 case VAR_DECL:
10546 case PARM_DECL:
10547 ret = gimplify_var_or_parm_decl (expr_p);
10548 break;
10549
10550 case RESULT_DECL:
10551 /* When within an OMP context, notice uses of variables. */
10552 if (gimplify_omp_ctxp)
10553 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10554 ret = GS_ALL_DONE;
10555 break;
10556
10557 case SSA_NAME:
10558 /* Allow callbacks into the gimplifier during optimization. */
10559 ret = GS_ALL_DONE;
10560 break;
10561
10562 case OMP_PARALLEL:
10563 gimplify_omp_parallel (expr_p, pre_p);
10564 ret = GS_ALL_DONE;
10565 break;
10566
10567 case OMP_TASK:
10568 gimplify_omp_task (expr_p, pre_p);
10569 ret = GS_ALL_DONE;
10570 break;
10571
10572 case OMP_FOR:
10573 case OMP_SIMD:
10574 case CILK_SIMD:
10575 case CILK_FOR:
10576 case OMP_DISTRIBUTE:
10577 case OMP_TASKLOOP:
10578 case OACC_LOOP:
10579 ret = gimplify_omp_for (expr_p, pre_p);
10580 break;
10581
10582 case OACC_CACHE:
10583 gimplify_oacc_cache (expr_p, pre_p);
10584 ret = GS_ALL_DONE;
10585 break;
10586
10587 case OACC_DECLARE:
10588 gimplify_oacc_declare (expr_p, pre_p);
10589 ret = GS_ALL_DONE;
10590 break;
10591
10592 case OACC_HOST_DATA:
10593 case OACC_DATA:
10594 case OACC_KERNELS:
10595 case OACC_PARALLEL:
10596 case OMP_SECTIONS:
10597 case OMP_SINGLE:
10598 case OMP_TARGET:
10599 case OMP_TARGET_DATA:
10600 case OMP_TEAMS:
10601 gimplify_omp_workshare (expr_p, pre_p);
10602 ret = GS_ALL_DONE;
10603 break;
10604
10605 case OACC_ENTER_DATA:
10606 case OACC_EXIT_DATA:
10607 case OACC_UPDATE:
10608 case OMP_TARGET_UPDATE:
10609 case OMP_TARGET_ENTER_DATA:
10610 case OMP_TARGET_EXIT_DATA:
10611 gimplify_omp_target_update (expr_p, pre_p);
10612 ret = GS_ALL_DONE;
10613 break;
10614
10615 case OMP_SECTION:
10616 case OMP_MASTER:
10617 case OMP_TASKGROUP:
10618 case OMP_ORDERED:
10619 case OMP_CRITICAL:
10620 {
10621 gimple_seq body = NULL;
10622 gimple *g;
10623
10624 gimplify_and_add (OMP_BODY (*expr_p), &body);
10625 switch (TREE_CODE (*expr_p))
10626 {
10627 case OMP_SECTION:
10628 g = gimple_build_omp_section (body);
10629 break;
10630 case OMP_MASTER:
10631 g = gimple_build_omp_master (body);
10632 break;
10633 case OMP_TASKGROUP:
10634 {
10635 gimple_seq cleanup = NULL;
10636 tree fn
10637 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10638 g = gimple_build_call (fn, 0);
10639 gimple_seq_add_stmt (&cleanup, g);
10640 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10641 body = NULL;
10642 gimple_seq_add_stmt (&body, g);
10643 g = gimple_build_omp_taskgroup (body);
10644 }
10645 break;
10646 case OMP_ORDERED:
10647 g = gimplify_omp_ordered (*expr_p, body);
10648 break;
10649 case OMP_CRITICAL:
10650 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10651 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10652 gimplify_adjust_omp_clauses (pre_p, body,
10653 &OMP_CRITICAL_CLAUSES (*expr_p),
10654 OMP_CRITICAL);
10655 g = gimple_build_omp_critical (body,
10656 OMP_CRITICAL_NAME (*expr_p),
10657 OMP_CRITICAL_CLAUSES (*expr_p));
10658 break;
10659 default:
10660 gcc_unreachable ();
10661 }
10662 gimplify_seq_add_stmt (pre_p, g);
10663 ret = GS_ALL_DONE;
10664 break;
10665 }
10666
10667 case OMP_ATOMIC:
10668 case OMP_ATOMIC_READ:
10669 case OMP_ATOMIC_CAPTURE_OLD:
10670 case OMP_ATOMIC_CAPTURE_NEW:
10671 ret = gimplify_omp_atomic (expr_p, pre_p);
10672 break;
10673
10674 case TRANSACTION_EXPR:
10675 ret = gimplify_transaction (expr_p, pre_p);
10676 break;
10677
10678 case TRUTH_AND_EXPR:
10679 case TRUTH_OR_EXPR:
10680 case TRUTH_XOR_EXPR:
10681 {
10682 tree orig_type = TREE_TYPE (*expr_p);
10683 tree new_type, xop0, xop1;
10684 *expr_p = gimple_boolify (*expr_p);
10685 new_type = TREE_TYPE (*expr_p);
10686 if (!useless_type_conversion_p (orig_type, new_type))
10687 {
10688 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
10689 ret = GS_OK;
10690 break;
10691 }
10692
10693 /* Boolified binary truth expressions are semantically equivalent
10694 to bitwise binary expressions. Canonicalize them to the
10695 bitwise variant. */
10696 switch (TREE_CODE (*expr_p))
10697 {
10698 case TRUTH_AND_EXPR:
10699 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10700 break;
10701 case TRUTH_OR_EXPR:
10702 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
10703 break;
10704 case TRUTH_XOR_EXPR:
10705 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
10706 break;
10707 default:
10708 break;
10709 }
10710 /* Now make sure that operands have compatible type to
10711 expression's new_type. */
10712 xop0 = TREE_OPERAND (*expr_p, 0);
10713 xop1 = TREE_OPERAND (*expr_p, 1);
10714 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
10715 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
10716 new_type,
10717 xop0);
10718 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
10719 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
10720 new_type,
10721 xop1);
10722 /* Continue classified as tcc_binary. */
10723 goto expr_2;
10724 }
10725
10726 case FMA_EXPR:
10727 case VEC_COND_EXPR:
10728 case VEC_PERM_EXPR:
10729 /* Classified as tcc_expression. */
10730 goto expr_3;
10731
10732 case POINTER_PLUS_EXPR:
10733 {
10734 enum gimplify_status r0, r1;
10735 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10736 post_p, is_gimple_val, fb_rvalue);
10737 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10738 post_p, is_gimple_val, fb_rvalue);
10739 recalculate_side_effects (*expr_p);
10740 ret = MIN (r0, r1);
10741 break;
10742 }
10743
10744 case CILK_SYNC_STMT:
10745 {
10746 if (!fn_contains_cilk_spawn_p (cfun))
10747 {
10748 error_at (EXPR_LOCATION (*expr_p),
10749 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
10750 ret = GS_ERROR;
10751 }
10752 else
10753 {
10754 gimplify_cilk_sync (expr_p, pre_p);
10755 ret = GS_ALL_DONE;
10756 }
10757 break;
10758 }
10759
10760 default:
10761 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
10762 {
10763 case tcc_comparison:
10764 /* Handle comparison of objects of non scalar mode aggregates
10765 with a call to memcmp. It would be nice to only have to do
10766 this for variable-sized objects, but then we'd have to allow
10767 the same nest of reference nodes we allow for MODIFY_EXPR and
10768 that's too complex.
10769
10770 Compare scalar mode aggregates as scalar mode values. Using
10771 memcmp for them would be very inefficient at best, and is
10772 plain wrong if bitfields are involved. */
10773 {
10774 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
10775
10776 /* Vector comparisons need no boolification. */
10777 if (TREE_CODE (type) == VECTOR_TYPE)
10778 goto expr_2;
10779 else if (!AGGREGATE_TYPE_P (type))
10780 {
10781 tree org_type = TREE_TYPE (*expr_p);
10782 *expr_p = gimple_boolify (*expr_p);
10783 if (!useless_type_conversion_p (org_type,
10784 TREE_TYPE (*expr_p)))
10785 {
10786 *expr_p = fold_convert_loc (input_location,
10787 org_type, *expr_p);
10788 ret = GS_OK;
10789 }
10790 else
10791 goto expr_2;
10792 }
10793 else if (TYPE_MODE (type) != BLKmode)
10794 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
10795 else
10796 ret = gimplify_variable_sized_compare (expr_p);
10797
10798 break;
10799 }
10800
10801 /* If *EXPR_P does not need to be special-cased, handle it
10802 according to its class. */
10803 case tcc_unary:
10804 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10805 post_p, is_gimple_val, fb_rvalue);
10806 break;
10807
10808 case tcc_binary:
10809 expr_2:
10810 {
10811 enum gimplify_status r0, r1;
10812
10813 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10814 post_p, is_gimple_val, fb_rvalue);
10815 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10816 post_p, is_gimple_val, fb_rvalue);
10817
10818 ret = MIN (r0, r1);
10819 break;
10820 }
10821
10822 expr_3:
10823 {
10824 enum gimplify_status r0, r1, r2;
10825
10826 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10827 post_p, is_gimple_val, fb_rvalue);
10828 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10829 post_p, is_gimple_val, fb_rvalue);
10830 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
10831 post_p, is_gimple_val, fb_rvalue);
10832
10833 ret = MIN (MIN (r0, r1), r2);
10834 break;
10835 }
10836
10837 case tcc_declaration:
10838 case tcc_constant:
10839 ret = GS_ALL_DONE;
10840 goto dont_recalculate;
10841
10842 default:
10843 gcc_unreachable ();
10844 }
10845
10846 recalculate_side_effects (*expr_p);
10847
10848 dont_recalculate:
10849 break;
10850 }
10851
10852 gcc_assert (*expr_p || ret != GS_OK);
10853 }
10854 while (ret == GS_OK);
10855
10856 /* If we encountered an error_mark somewhere nested inside, either
10857 stub out the statement or propagate the error back out. */
10858 if (ret == GS_ERROR)
10859 {
10860 if (is_statement)
10861 *expr_p = NULL;
10862 goto out;
10863 }
10864
10865 /* This was only valid as a return value from the langhook, which
10866 we handled. Make sure it doesn't escape from any other context. */
10867 gcc_assert (ret != GS_UNHANDLED);
10868
10869 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
10870 {
10871 /* We aren't looking for a value, and we don't have a valid
10872 statement. If it doesn't have side-effects, throw it away. */
10873 if (!TREE_SIDE_EFFECTS (*expr_p))
10874 *expr_p = NULL;
10875 else if (!TREE_THIS_VOLATILE (*expr_p))
10876 {
10877 /* This is probably a _REF that contains something nested that
10878 has side effects. Recurse through the operands to find it. */
10879 enum tree_code code = TREE_CODE (*expr_p);
10880
10881 switch (code)
10882 {
10883 case COMPONENT_REF:
10884 case REALPART_EXPR:
10885 case IMAGPART_EXPR:
10886 case VIEW_CONVERT_EXPR:
10887 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10888 gimple_test_f, fallback);
10889 break;
10890
10891 case ARRAY_REF:
10892 case ARRAY_RANGE_REF:
10893 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10894 gimple_test_f, fallback);
10895 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10896 gimple_test_f, fallback);
10897 break;
10898
10899 default:
10900 /* Anything else with side-effects must be converted to
10901 a valid statement before we get here. */
10902 gcc_unreachable ();
10903 }
10904
10905 *expr_p = NULL;
10906 }
10907 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
10908 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
10909 {
10910 /* Historically, the compiler has treated a bare reference
10911 to a non-BLKmode volatile lvalue as forcing a load. */
10912 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
10913
10914 /* Normally, we do not want to create a temporary for a
10915 TREE_ADDRESSABLE type because such a type should not be
10916 copied by bitwise-assignment. However, we make an
10917 exception here, as all we are doing here is ensuring that
10918 we read the bytes that make up the type. We use
10919 create_tmp_var_raw because create_tmp_var will abort when
10920 given a TREE_ADDRESSABLE type. */
10921 tree tmp = create_tmp_var_raw (type, "vol");
10922 gimple_add_tmp_var (tmp);
10923 gimplify_assign (tmp, *expr_p, pre_p);
10924 *expr_p = NULL;
10925 }
10926 else
10927 /* We can't do anything useful with a volatile reference to
10928 an incomplete type, so just throw it away. Likewise for
10929 a BLKmode type, since any implicit inner load should
10930 already have been turned into an explicit one by the
10931 gimplification process. */
10932 *expr_p = NULL;
10933 }
10934
10935 /* If we are gimplifying at the statement level, we're done. Tack
10936 everything together and return. */
10937 if (fallback == fb_none || is_statement)
10938 {
10939 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
10940 it out for GC to reclaim it. */
10941 *expr_p = NULL_TREE;
10942
10943 if (!gimple_seq_empty_p (internal_pre)
10944 || !gimple_seq_empty_p (internal_post))
10945 {
10946 gimplify_seq_add_seq (&internal_pre, internal_post);
10947 gimplify_seq_add_seq (pre_p, internal_pre);
10948 }
10949
10950 /* The result of gimplifying *EXPR_P is going to be the last few
10951 statements in *PRE_P and *POST_P. Add location information
10952 to all the statements that were added by the gimplification
10953 helpers. */
10954 if (!gimple_seq_empty_p (*pre_p))
10955 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
10956
10957 if (!gimple_seq_empty_p (*post_p))
10958 annotate_all_with_location_after (*post_p, post_last_gsi,
10959 input_location);
10960
10961 goto out;
10962 }
10963
10964 #ifdef ENABLE_GIMPLE_CHECKING
10965 if (*expr_p)
10966 {
10967 enum tree_code code = TREE_CODE (*expr_p);
10968 /* These expressions should already be in gimple IR form. */
10969 gcc_assert (code != MODIFY_EXPR
10970 && code != ASM_EXPR
10971 && code != BIND_EXPR
10972 && code != CATCH_EXPR
10973 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
10974 && code != EH_FILTER_EXPR
10975 && code != GOTO_EXPR
10976 && code != LABEL_EXPR
10977 && code != LOOP_EXPR
10978 && code != SWITCH_EXPR
10979 && code != TRY_FINALLY_EXPR
10980 && code != OACC_PARALLEL
10981 && code != OACC_KERNELS
10982 && code != OACC_DATA
10983 && code != OACC_HOST_DATA
10984 && code != OACC_DECLARE
10985 && code != OACC_UPDATE
10986 && code != OACC_ENTER_DATA
10987 && code != OACC_EXIT_DATA
10988 && code != OACC_CACHE
10989 && code != OMP_CRITICAL
10990 && code != OMP_FOR
10991 && code != OACC_LOOP
10992 && code != OMP_MASTER
10993 && code != OMP_TASKGROUP
10994 && code != OMP_ORDERED
10995 && code != OMP_PARALLEL
10996 && code != OMP_SECTIONS
10997 && code != OMP_SECTION
10998 && code != OMP_SINGLE);
10999 }
11000 #endif
11001
11002 /* Otherwise we're gimplifying a subexpression, so the resulting
11003 value is interesting. If it's a valid operand that matches
11004 GIMPLE_TEST_F, we're done. Unless we are handling some
11005 post-effects internally; if that's the case, we need to copy into
11006 a temporary before adding the post-effects to POST_P. */
11007 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
11008 goto out;
11009
11010 /* Otherwise, we need to create a new temporary for the gimplified
11011 expression. */
11012
11013 /* We can't return an lvalue if we have an internal postqueue. The
11014 object the lvalue refers to would (probably) be modified by the
11015 postqueue; we need to copy the value out first, which means an
11016 rvalue. */
11017 if ((fallback & fb_lvalue)
11018 && gimple_seq_empty_p (internal_post)
11019 && is_gimple_addressable (*expr_p))
11020 {
11021 /* An lvalue will do. Take the address of the expression, store it
11022 in a temporary, and replace the expression with an INDIRECT_REF of
11023 that temporary. */
11024 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
11025 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
11026 *expr_p = build_simple_mem_ref (tmp);
11027 }
11028 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
11029 {
11030 /* An rvalue will do. Assign the gimplified expression into a
11031 new temporary TMP and replace the original expression with
11032 TMP. First, make sure that the expression has a type so that
11033 it can be assigned into a temporary. */
11034 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
11035 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
11036 }
11037 else
11038 {
11039 #ifdef ENABLE_GIMPLE_CHECKING
11040 if (!(fallback & fb_mayfail))
11041 {
11042 fprintf (stderr, "gimplification failed:\n");
11043 print_generic_expr (stderr, *expr_p, 0);
11044 debug_tree (*expr_p);
11045 internal_error ("gimplification failed");
11046 }
11047 #endif
11048 gcc_assert (fallback & fb_mayfail);
11049
11050 /* If this is an asm statement, and the user asked for the
11051 impossible, don't die. Fail and let gimplify_asm_expr
11052 issue an error. */
11053 ret = GS_ERROR;
11054 goto out;
11055 }
11056
11057 /* Make sure the temporary matches our predicate. */
11058 gcc_assert ((*gimple_test_f) (*expr_p));
11059
11060 if (!gimple_seq_empty_p (internal_post))
11061 {
11062 annotate_all_with_location (internal_post, input_location);
11063 gimplify_seq_add_seq (pre_p, internal_post);
11064 }
11065
11066 out:
11067 input_location = saved_location;
11068 return ret;
11069 }
11070
11071 /* Look through TYPE for variable-sized objects and gimplify each such
11072 size that we find. Add to LIST_P any statements generated. */
11073
11074 void
11075 gimplify_type_sizes (tree type, gimple_seq *list_p)
11076 {
11077 tree field, t;
11078
11079 if (type == NULL || type == error_mark_node)
11080 return;
11081
11082 /* We first do the main variant, then copy into any other variants. */
11083 type = TYPE_MAIN_VARIANT (type);
11084
11085 /* Avoid infinite recursion. */
11086 if (TYPE_SIZES_GIMPLIFIED (type))
11087 return;
11088
11089 TYPE_SIZES_GIMPLIFIED (type) = 1;
11090
11091 switch (TREE_CODE (type))
11092 {
11093 case INTEGER_TYPE:
11094 case ENUMERAL_TYPE:
11095 case BOOLEAN_TYPE:
11096 case REAL_TYPE:
11097 case FIXED_POINT_TYPE:
11098 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11099 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
11100
11101 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11102 {
11103 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11104 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
11105 }
11106 break;
11107
11108 case ARRAY_TYPE:
11109 /* These types may not have declarations, so handle them here. */
11110 gimplify_type_sizes (TREE_TYPE (type), list_p);
11111 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
11112 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11113 with assigned stack slots, for -O1+ -g they should be tracked
11114 by VTA. */
11115 if (!(TYPE_NAME (type)
11116 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11117 && DECL_IGNORED_P (TYPE_NAME (type)))
11118 && TYPE_DOMAIN (type)
11119 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11120 {
11121 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11122 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11123 DECL_IGNORED_P (t) = 0;
11124 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11125 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11126 DECL_IGNORED_P (t) = 0;
11127 }
11128 break;
11129
11130 case RECORD_TYPE:
11131 case UNION_TYPE:
11132 case QUAL_UNION_TYPE:
11133 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11134 if (TREE_CODE (field) == FIELD_DECL)
11135 {
11136 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
11137 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11138 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
11139 gimplify_type_sizes (TREE_TYPE (field), list_p);
11140 }
11141 break;
11142
11143 case POINTER_TYPE:
11144 case REFERENCE_TYPE:
11145 /* We used to recurse on the pointed-to type here, which turned out to
11146 be incorrect because its definition might refer to variables not
11147 yet initialized at this point if a forward declaration is involved.
11148
11149 It was actually useful for anonymous pointed-to types to ensure
11150 that the sizes evaluation dominates every possible later use of the
11151 values. Restricting to such types here would be safe since there
11152 is no possible forward declaration around, but would introduce an
11153 undesirable middle-end semantic to anonymity. We then defer to
11154 front-ends the responsibility of ensuring that the sizes are
11155 evaluated both early and late enough, e.g. by attaching artificial
11156 type declarations to the tree. */
11157 break;
11158
11159 default:
11160 break;
11161 }
11162
11163 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11164 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
11165
11166 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11167 {
11168 TYPE_SIZE (t) = TYPE_SIZE (type);
11169 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11170 TYPE_SIZES_GIMPLIFIED (t) = 1;
11171 }
11172 }
11173
11174 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11175 a size or position, has had all of its SAVE_EXPRs evaluated.
11176 We add any required statements to *STMT_P. */
11177
11178 void
11179 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
11180 {
11181 tree expr = *expr_p;
11182
11183 /* We don't do anything if the value isn't there, is constant, or contains
11184 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
11185 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
11186 will want to replace it with a new variable, but that will cause problems
11187 if this type is from outside the function. It's OK to have that here. */
11188 if (is_gimple_sizepos (expr))
11189 return;
11190
11191 *expr_p = unshare_expr (expr);
11192
11193 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
11194 }
11195
11196 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11197 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11198 is true, also gimplify the parameters. */
11199
11200 gbind *
11201 gimplify_body (tree fndecl, bool do_parms)
11202 {
11203 location_t saved_location = input_location;
11204 gimple_seq parm_stmts, seq;
11205 gimple *outer_stmt;
11206 gbind *outer_bind;
11207 struct cgraph_node *cgn;
11208
11209 timevar_push (TV_TREE_GIMPLIFY);
11210
11211 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11212 gimplification. */
11213 default_rtl_profile ();
11214
11215 gcc_assert (gimplify_ctxp == NULL);
11216 push_gimplify_context ();
11217
11218 if (flag_openacc || flag_openmp)
11219 {
11220 gcc_assert (gimplify_omp_ctxp == NULL);
11221 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11222 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11223 }
11224
11225 /* Unshare most shared trees in the body and in that of any nested functions.
11226 It would seem we don't have to do this for nested functions because
11227 they are supposed to be output and then the outer function gimplified
11228 first, but the g++ front end doesn't always do it that way. */
11229 unshare_body (fndecl);
11230 unvisit_body (fndecl);
11231
11232 cgn = cgraph_node::get (fndecl);
11233 if (cgn && cgn->origin)
11234 nonlocal_vlas = new hash_set<tree>;
11235
11236 /* Make sure input_location isn't set to something weird. */
11237 input_location = DECL_SOURCE_LOCATION (fndecl);
11238
11239 /* Resolve callee-copies. This has to be done before processing
11240 the body so that DECL_VALUE_EXPR gets processed correctly. */
11241 parm_stmts = do_parms ? gimplify_parameters () : NULL;
11242
11243 /* Gimplify the function's body. */
11244 seq = NULL;
11245 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11246 outer_stmt = gimple_seq_first_stmt (seq);
11247 if (!outer_stmt)
11248 {
11249 outer_stmt = gimple_build_nop ();
11250 gimplify_seq_add_stmt (&seq, outer_stmt);
11251 }
11252
11253 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11254 not the case, wrap everything in a GIMPLE_BIND to make it so. */
11255 if (gimple_code (outer_stmt) == GIMPLE_BIND
11256 && gimple_seq_first (seq) == gimple_seq_last (seq))
11257 outer_bind = as_a <gbind *> (outer_stmt);
11258 else
11259 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11260
11261 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11262
11263 /* If we had callee-copies statements, insert them at the beginning
11264 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
11265 if (!gimple_seq_empty_p (parm_stmts))
11266 {
11267 tree parm;
11268
11269 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11270 gimple_bind_set_body (outer_bind, parm_stmts);
11271
11272 for (parm = DECL_ARGUMENTS (current_function_decl);
11273 parm; parm = DECL_CHAIN (parm))
11274 if (DECL_HAS_VALUE_EXPR_P (parm))
11275 {
11276 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11277 DECL_IGNORED_P (parm) = 0;
11278 }
11279 }
11280
11281 if (nonlocal_vlas)
11282 {
11283 if (nonlocal_vla_vars)
11284 {
11285 /* tree-nested.c may later on call declare_vars (..., true);
11286 which relies on BLOCK_VARS chain to be the tail of the
11287 gimple_bind_vars chain. Ensure we don't violate that
11288 assumption. */
11289 if (gimple_bind_block (outer_bind)
11290 == DECL_INITIAL (current_function_decl))
11291 declare_vars (nonlocal_vla_vars, outer_bind, true);
11292 else
11293 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11294 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11295 nonlocal_vla_vars);
11296 nonlocal_vla_vars = NULL_TREE;
11297 }
11298 delete nonlocal_vlas;
11299 nonlocal_vlas = NULL;
11300 }
11301
11302 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11303 && gimplify_omp_ctxp)
11304 {
11305 delete_omp_context (gimplify_omp_ctxp);
11306 gimplify_omp_ctxp = NULL;
11307 }
11308
11309 pop_gimplify_context (outer_bind);
11310 gcc_assert (gimplify_ctxp == NULL);
11311
11312 if (flag_checking && !seen_error ())
11313 verify_gimple_in_seq (gimple_bind_body (outer_bind));
11314
11315 timevar_pop (TV_TREE_GIMPLIFY);
11316 input_location = saved_location;
11317
11318 return outer_bind;
11319 }
11320
11321 typedef char *char_p; /* For DEF_VEC_P. */
11322
11323 /* Return whether we should exclude FNDECL from instrumentation. */
11324
11325 static bool
11326 flag_instrument_functions_exclude_p (tree fndecl)
11327 {
11328 vec<char_p> *v;
11329
11330 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11331 if (v && v->length () > 0)
11332 {
11333 const char *name;
11334 int i;
11335 char *s;
11336
11337 name = lang_hooks.decl_printable_name (fndecl, 0);
11338 FOR_EACH_VEC_ELT (*v, i, s)
11339 if (strstr (name, s) != NULL)
11340 return true;
11341 }
11342
11343 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11344 if (v && v->length () > 0)
11345 {
11346 const char *name;
11347 int i;
11348 char *s;
11349
11350 name = DECL_SOURCE_FILE (fndecl);
11351 FOR_EACH_VEC_ELT (*v, i, s)
11352 if (strstr (name, s) != NULL)
11353 return true;
11354 }
11355
11356 return false;
11357 }
11358
11359 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
11360 node for the function we want to gimplify.
11361
11362 Return the sequence of GIMPLE statements corresponding to the body
11363 of FNDECL. */
11364
11365 void
11366 gimplify_function_tree (tree fndecl)
11367 {
11368 tree parm, ret;
11369 gimple_seq seq;
11370 gbind *bind;
11371
11372 gcc_assert (!gimple_body (fndecl));
11373
11374 if (DECL_STRUCT_FUNCTION (fndecl))
11375 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11376 else
11377 push_struct_function (fndecl);
11378
11379 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11380 if necessary. */
11381 cfun->curr_properties |= PROP_gimple_lva;
11382
11383 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11384 {
11385 /* Preliminarily mark non-addressed complex variables as eligible
11386 for promotion to gimple registers. We'll transform their uses
11387 as we find them. */
11388 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11389 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11390 && !TREE_THIS_VOLATILE (parm)
11391 && !needs_to_live_in_memory (parm))
11392 DECL_GIMPLE_REG_P (parm) = 1;
11393 }
11394
11395 ret = DECL_RESULT (fndecl);
11396 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11397 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11398 && !needs_to_live_in_memory (ret))
11399 DECL_GIMPLE_REG_P (ret) = 1;
11400
11401 bind = gimplify_body (fndecl, true);
11402
11403 /* The tree body of the function is no longer needed, replace it
11404 with the new GIMPLE body. */
11405 seq = NULL;
11406 gimple_seq_add_stmt (&seq, bind);
11407 gimple_set_body (fndecl, seq);
11408
11409 /* If we're instrumenting function entry/exit, then prepend the call to
11410 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11411 catch the exit hook. */
11412 /* ??? Add some way to ignore exceptions for this TFE. */
11413 if (flag_instrument_function_entry_exit
11414 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11415 && !flag_instrument_functions_exclude_p (fndecl))
11416 {
11417 tree x;
11418 gbind *new_bind;
11419 gimple *tf;
11420 gimple_seq cleanup = NULL, body = NULL;
11421 tree tmp_var;
11422 gcall *call;
11423
11424 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11425 call = gimple_build_call (x, 1, integer_zero_node);
11426 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11427 gimple_call_set_lhs (call, tmp_var);
11428 gimplify_seq_add_stmt (&cleanup, call);
11429 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11430 call = gimple_build_call (x, 2,
11431 build_fold_addr_expr (current_function_decl),
11432 tmp_var);
11433 gimplify_seq_add_stmt (&cleanup, call);
11434 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11435
11436 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11437 call = gimple_build_call (x, 1, integer_zero_node);
11438 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11439 gimple_call_set_lhs (call, tmp_var);
11440 gimplify_seq_add_stmt (&body, call);
11441 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11442 call = gimple_build_call (x, 2,
11443 build_fold_addr_expr (current_function_decl),
11444 tmp_var);
11445 gimplify_seq_add_stmt (&body, call);
11446 gimplify_seq_add_stmt (&body, tf);
11447 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11448 /* Clear the block for BIND, since it is no longer directly inside
11449 the function, but within a try block. */
11450 gimple_bind_set_block (bind, NULL);
11451
11452 /* Replace the current function body with the body
11453 wrapped in the try/finally TF. */
11454 seq = NULL;
11455 gimple_seq_add_stmt (&seq, new_bind);
11456 gimple_set_body (fndecl, seq);
11457 bind = new_bind;
11458 }
11459
11460 if ((flag_sanitize & SANITIZE_THREAD) != 0
11461 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11462 {
11463 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11464 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11465 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11466 /* Clear the block for BIND, since it is no longer directly inside
11467 the function, but within a try block. */
11468 gimple_bind_set_block (bind, NULL);
11469 /* Replace the current function body with the body
11470 wrapped in the try/finally TF. */
11471 seq = NULL;
11472 gimple_seq_add_stmt (&seq, new_bind);
11473 gimple_set_body (fndecl, seq);
11474 }
11475
11476 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11477 cfun->curr_properties |= PROP_gimple_any;
11478
11479 pop_cfun ();
11480
11481 dump_function (TDI_generic, fndecl);
11482 }
11483
11484 /* Return a dummy expression of type TYPE in order to keep going after an
11485 error. */
11486
11487 static tree
11488 dummy_object (tree type)
11489 {
11490 tree t = build_int_cst (build_pointer_type (type), 0);
11491 return build2 (MEM_REF, type, t, t);
11492 }
11493
11494 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11495 builtin function, but a very special sort of operator. */
11496
11497 enum gimplify_status
11498 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11499 gimple_seq *post_p ATTRIBUTE_UNUSED)
11500 {
11501 tree promoted_type, have_va_type;
11502 tree valist = TREE_OPERAND (*expr_p, 0);
11503 tree type = TREE_TYPE (*expr_p);
11504 tree t, tag, aptag;
11505 location_t loc = EXPR_LOCATION (*expr_p);
11506
11507 /* Verify that valist is of the proper type. */
11508 have_va_type = TREE_TYPE (valist);
11509 if (have_va_type == error_mark_node)
11510 return GS_ERROR;
11511 have_va_type = targetm.canonical_va_list_type (have_va_type);
11512
11513 if (have_va_type == NULL_TREE)
11514 {
11515 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11516 return GS_ERROR;
11517 }
11518
11519 /* Generate a diagnostic for requesting data of a type that cannot
11520 be passed through `...' due to type promotion at the call site. */
11521 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11522 != type)
11523 {
11524 static bool gave_help;
11525 bool warned;
11526
11527 /* Unfortunately, this is merely undefined, rather than a constraint
11528 violation, so we cannot make this an error. If this call is never
11529 executed, the program is still strictly conforming. */
11530 warned = warning_at (loc, 0,
11531 "%qT is promoted to %qT when passed through %<...%>",
11532 type, promoted_type);
11533 if (!gave_help && warned)
11534 {
11535 gave_help = true;
11536 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
11537 promoted_type, type);
11538 }
11539
11540 /* We can, however, treat "undefined" any way we please.
11541 Call abort to encourage the user to fix the program. */
11542 if (warned)
11543 inform (loc, "if this code is reached, the program will abort");
11544 /* Before the abort, allow the evaluation of the va_list
11545 expression to exit or longjmp. */
11546 gimplify_and_add (valist, pre_p);
11547 t = build_call_expr_loc (loc,
11548 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11549 gimplify_and_add (t, pre_p);
11550
11551 /* This is dead code, but go ahead and finish so that the
11552 mode of the result comes out right. */
11553 *expr_p = dummy_object (type);
11554 return GS_ALL_DONE;
11555 }
11556
11557 tag = build_int_cst (build_pointer_type (type), 0);
11558 aptag = build_int_cst (TREE_TYPE (valist), 0);
11559
11560 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
11561 valist, tag, aptag);
11562
11563 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11564 needs to be expanded. */
11565 cfun->curr_properties &= ~PROP_gimple_lva;
11566
11567 return GS_OK;
11568 }
11569
11570 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11571
11572 DST/SRC are the destination and source respectively. You can pass
11573 ungimplified trees in DST or SRC, in which case they will be
11574 converted to a gimple operand if necessary.
11575
11576 This function returns the newly created GIMPLE_ASSIGN tuple. */
11577
11578 gimple *
11579 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11580 {
11581 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11582 gimplify_and_add (t, seq_p);
11583 ggc_free (t);
11584 return gimple_seq_last_stmt (*seq_p);
11585 }
11586
11587 inline hashval_t
11588 gimplify_hasher::hash (const elt_t *p)
11589 {
11590 tree t = p->val;
11591 return iterative_hash_expr (t, 0);
11592 }
11593
11594 inline bool
11595 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11596 {
11597 tree t1 = p1->val;
11598 tree t2 = p2->val;
11599 enum tree_code code = TREE_CODE (t1);
11600
11601 if (TREE_CODE (t2) != code
11602 || TREE_TYPE (t1) != TREE_TYPE (t2))
11603 return false;
11604
11605 if (!operand_equal_p (t1, t2, 0))
11606 return false;
11607
11608 /* Only allow them to compare equal if they also hash equal; otherwise
11609 results are nondeterminate, and we fail bootstrap comparison. */
11610 gcc_checking_assert (hash (p1) == hash (p2));
11611
11612 return true;
11613 }