exp_attr.adb, [...]: Minor reformatting.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
62
63 enum gimplify_omp_var_data
64 {
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
76 GOVD_LINEAR = 2048,
77 GOVD_ALIGNED = 4096,
78
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
81
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
84
85 GOVD_MAP_0LEN_ARRAY = 32768,
86
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
89
90 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
91 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
92 | GOVD_LOCAL)
93 };
94
95
96 enum omp_region_type
97 {
98 ORT_WORKSHARE = 0x00,
99 ORT_SIMD = 0x01,
100
101 ORT_PARALLEL = 0x02,
102 ORT_COMBINED_PARALLEL = 0x03,
103
104 ORT_TASK = 0x04,
105 ORT_UNTIED_TASK = 0x05,
106
107 ORT_TEAMS = 0x08,
108 ORT_COMBINED_TEAMS = 0x09,
109
110 /* Data region. */
111 ORT_TARGET_DATA = 0x10,
112
113 /* Data region with offloading. */
114 ORT_TARGET = 0x20,
115 ORT_COMBINED_TARGET = 0x21,
116
117 /* OpenACC variants. */
118 ORT_ACC = 0x40, /* A generic OpenACC region. */
119 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
120 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
121 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
122
123 /* Dummy OpenMP region, used to disable expansion of
124 DECL_VALUE_EXPRs in taskloop pre body. */
125 ORT_NONE = 0x100
126 };
127
128 /* Gimplify hashtable helper. */
129
130 struct gimplify_hasher : free_ptr_hash <elt_t>
131 {
132 static inline hashval_t hash (const elt_t *);
133 static inline bool equal (const elt_t *, const elt_t *);
134 };
135
136 struct gimplify_ctx
137 {
138 struct gimplify_ctx *prev_context;
139
140 vec<gbind *> bind_expr_stack;
141 tree temps;
142 gimple_seq conditional_cleanups;
143 tree exit_label;
144 tree return_temp;
145
146 vec<tree> case_labels;
147 /* The formal temporary table. Should this be persistent? */
148 hash_table<gimplify_hasher> *temp_htab;
149
150 int conditions;
151 bool save_stack;
152 bool into_ssa;
153 bool allow_rhs_cond_expr;
154 bool in_cleanup_point_expr;
155 };
156
157 struct gimplify_omp_ctx
158 {
159 struct gimplify_omp_ctx *outer_context;
160 splay_tree variables;
161 hash_set<tree> *privatized_types;
162 /* Iteration variables in an OMP_FOR. */
163 vec<tree> loop_iter_var;
164 location_t location;
165 enum omp_clause_default_kind default_kind;
166 enum omp_region_type region_type;
167 bool combined_loop;
168 bool distribute;
169 bool target_map_scalars_firstprivate;
170 bool target_map_pointers_as_0len_arrays;
171 bool target_firstprivatize_array_bases;
172 };
173
174 static struct gimplify_ctx *gimplify_ctxp;
175 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
176
177 /* Forward declaration. */
178 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
179
180 /* Shorter alias name for the above function for use in gimplify.c
181 only. */
182
183 static inline void
184 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
185 {
186 gimple_seq_add_stmt_without_update (seq_p, gs);
187 }
188
189 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
190 NULL, a new sequence is allocated. This function is
191 similar to gimple_seq_add_seq, but does not scan the operands.
192 During gimplification, we need to manipulate statement sequences
193 before the def/use vectors have been constructed. */
194
195 static void
196 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
197 {
198 gimple_stmt_iterator si;
199
200 if (src == NULL)
201 return;
202
203 si = gsi_last (*dst_p);
204 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
205 }
206
207
208 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
209 and popping gimplify contexts. */
210
211 static struct gimplify_ctx *ctx_pool = NULL;
212
213 /* Return a gimplify context struct from the pool. */
214
215 static inline struct gimplify_ctx *
216 ctx_alloc (void)
217 {
218 struct gimplify_ctx * c = ctx_pool;
219
220 if (c)
221 ctx_pool = c->prev_context;
222 else
223 c = XNEW (struct gimplify_ctx);
224
225 memset (c, '\0', sizeof (*c));
226 return c;
227 }
228
229 /* Put gimplify context C back into the pool. */
230
231 static inline void
232 ctx_free (struct gimplify_ctx *c)
233 {
234 c->prev_context = ctx_pool;
235 ctx_pool = c;
236 }
237
238 /* Free allocated ctx stack memory. */
239
240 void
241 free_gimplify_stack (void)
242 {
243 struct gimplify_ctx *c;
244
245 while ((c = ctx_pool))
246 {
247 ctx_pool = c->prev_context;
248 free (c);
249 }
250 }
251
252
253 /* Set up a context for the gimplifier. */
254
255 void
256 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
257 {
258 struct gimplify_ctx *c = ctx_alloc ();
259
260 c->prev_context = gimplify_ctxp;
261 gimplify_ctxp = c;
262 gimplify_ctxp->into_ssa = in_ssa;
263 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
264 }
265
266 /* Tear down a context for the gimplifier. If BODY is non-null, then
267 put the temporaries into the outer BIND_EXPR. Otherwise, put them
268 in the local_decls.
269
270 BODY is not a sequence, but the first tuple in a sequence. */
271
272 void
273 pop_gimplify_context (gimple *body)
274 {
275 struct gimplify_ctx *c = gimplify_ctxp;
276
277 gcc_assert (c
278 && (!c->bind_expr_stack.exists ()
279 || c->bind_expr_stack.is_empty ()));
280 c->bind_expr_stack.release ();
281 gimplify_ctxp = c->prev_context;
282
283 if (body)
284 declare_vars (c->temps, body, false);
285 else
286 record_vars (c->temps);
287
288 delete c->temp_htab;
289 c->temp_htab = NULL;
290 ctx_free (c);
291 }
292
293 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
294
295 static void
296 gimple_push_bind_expr (gbind *bind_stmt)
297 {
298 gimplify_ctxp->bind_expr_stack.reserve (8);
299 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
300 }
301
302 /* Pop the first element off the stack of bindings. */
303
304 static void
305 gimple_pop_bind_expr (void)
306 {
307 gimplify_ctxp->bind_expr_stack.pop ();
308 }
309
310 /* Return the first element of the stack of bindings. */
311
312 gbind *
313 gimple_current_bind_expr (void)
314 {
315 return gimplify_ctxp->bind_expr_stack.last ();
316 }
317
318 /* Return the stack of bindings created during gimplification. */
319
320 vec<gbind *>
321 gimple_bind_expr_stack (void)
322 {
323 return gimplify_ctxp->bind_expr_stack;
324 }
325
326 /* Return true iff there is a COND_EXPR between us and the innermost
327 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
328
329 static bool
330 gimple_conditional_context (void)
331 {
332 return gimplify_ctxp->conditions > 0;
333 }
334
335 /* Note that we've entered a COND_EXPR. */
336
337 static void
338 gimple_push_condition (void)
339 {
340 #ifdef ENABLE_GIMPLE_CHECKING
341 if (gimplify_ctxp->conditions == 0)
342 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
343 #endif
344 ++(gimplify_ctxp->conditions);
345 }
346
347 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
348 now, add any conditional cleanups we've seen to the prequeue. */
349
350 static void
351 gimple_pop_condition (gimple_seq *pre_p)
352 {
353 int conds = --(gimplify_ctxp->conditions);
354
355 gcc_assert (conds >= 0);
356 if (conds == 0)
357 {
358 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
359 gimplify_ctxp->conditional_cleanups = NULL;
360 }
361 }
362
363 /* A stable comparison routine for use with splay trees and DECLs. */
364
365 static int
366 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
367 {
368 tree a = (tree) xa;
369 tree b = (tree) xb;
370
371 return DECL_UID (a) - DECL_UID (b);
372 }
373
374 /* Create a new omp construct that deals with variable remapping. */
375
376 static struct gimplify_omp_ctx *
377 new_omp_context (enum omp_region_type region_type)
378 {
379 struct gimplify_omp_ctx *c;
380
381 c = XCNEW (struct gimplify_omp_ctx);
382 c->outer_context = gimplify_omp_ctxp;
383 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
384 c->privatized_types = new hash_set<tree>;
385 c->location = input_location;
386 c->region_type = region_type;
387 if ((region_type & ORT_TASK) == 0)
388 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
389 else
390 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
391
392 return c;
393 }
394
395 /* Destroy an omp construct that deals with variable remapping. */
396
397 static void
398 delete_omp_context (struct gimplify_omp_ctx *c)
399 {
400 splay_tree_delete (c->variables);
401 delete c->privatized_types;
402 c->loop_iter_var.release ();
403 XDELETE (c);
404 }
405
406 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
407 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
408
409 /* Both gimplify the statement T and append it to *SEQ_P. This function
410 behaves exactly as gimplify_stmt, but you don't have to pass T as a
411 reference. */
412
413 void
414 gimplify_and_add (tree t, gimple_seq *seq_p)
415 {
416 gimplify_stmt (&t, seq_p);
417 }
418
419 /* Gimplify statement T into sequence *SEQ_P, and return the first
420 tuple in the sequence of generated tuples for this statement.
421 Return NULL if gimplifying T produced no tuples. */
422
423 static gimple *
424 gimplify_and_return_first (tree t, gimple_seq *seq_p)
425 {
426 gimple_stmt_iterator last = gsi_last (*seq_p);
427
428 gimplify_and_add (t, seq_p);
429
430 if (!gsi_end_p (last))
431 {
432 gsi_next (&last);
433 return gsi_stmt (last);
434 }
435 else
436 return gimple_seq_first_stmt (*seq_p);
437 }
438
439 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
440 LHS, or for a call argument. */
441
442 static bool
443 is_gimple_mem_rhs (tree t)
444 {
445 /* If we're dealing with a renamable type, either source or dest must be
446 a renamed variable. */
447 if (is_gimple_reg_type (TREE_TYPE (t)))
448 return is_gimple_val (t);
449 else
450 return is_gimple_val (t) || is_gimple_lvalue (t);
451 }
452
453 /* Return true if T is a CALL_EXPR or an expression that can be
454 assigned to a temporary. Note that this predicate should only be
455 used during gimplification. See the rationale for this in
456 gimplify_modify_expr. */
457
458 static bool
459 is_gimple_reg_rhs_or_call (tree t)
460 {
461 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
462 || TREE_CODE (t) == CALL_EXPR);
463 }
464
465 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
466 this predicate should only be used during gimplification. See the
467 rationale for this in gimplify_modify_expr. */
468
469 static bool
470 is_gimple_mem_rhs_or_call (tree t)
471 {
472 /* If we're dealing with a renamable type, either source or dest must be
473 a renamed variable. */
474 if (is_gimple_reg_type (TREE_TYPE (t)))
475 return is_gimple_val (t);
476 else
477 return (is_gimple_val (t) || is_gimple_lvalue (t)
478 || TREE_CODE (t) == CALL_EXPR);
479 }
480
481 /* Create a temporary with a name derived from VAL. Subroutine of
482 lookup_tmp_var; nobody else should call this function. */
483
484 static inline tree
485 create_tmp_from_val (tree val)
486 {
487 /* Drop all qualifiers and address-space information from the value type. */
488 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
489 tree var = create_tmp_var (type, get_name (val));
490 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
491 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
492 DECL_GIMPLE_REG_P (var) = 1;
493 return var;
494 }
495
496 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
497 an existing expression temporary. */
498
499 static tree
500 lookup_tmp_var (tree val, bool is_formal)
501 {
502 tree ret;
503
504 /* If not optimizing, never really reuse a temporary. local-alloc
505 won't allocate any variable that is used in more than one basic
506 block, which means it will go into memory, causing much extra
507 work in reload and final and poorer code generation, outweighing
508 the extra memory allocation here. */
509 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
510 ret = create_tmp_from_val (val);
511 else
512 {
513 elt_t elt, *elt_p;
514 elt_t **slot;
515
516 elt.val = val;
517 if (!gimplify_ctxp->temp_htab)
518 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
519 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
520 if (*slot == NULL)
521 {
522 elt_p = XNEW (elt_t);
523 elt_p->val = val;
524 elt_p->temp = ret = create_tmp_from_val (val);
525 *slot = elt_p;
526 }
527 else
528 {
529 elt_p = *slot;
530 ret = elt_p->temp;
531 }
532 }
533
534 return ret;
535 }
536
537 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
538
539 static tree
540 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
541 bool is_formal)
542 {
543 tree t, mod;
544
545 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
546 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
547 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
548 fb_rvalue);
549
550 if (gimplify_ctxp->into_ssa
551 && is_gimple_reg_type (TREE_TYPE (val)))
552 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
553 else
554 t = lookup_tmp_var (val, is_formal);
555
556 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
557
558 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
559
560 /* gimplify_modify_expr might want to reduce this further. */
561 gimplify_and_add (mod, pre_p);
562 ggc_free (mod);
563
564 return t;
565 }
566
567 /* Return a formal temporary variable initialized with VAL. PRE_P is as
568 in gimplify_expr. Only use this function if:
569
570 1) The value of the unfactored expression represented by VAL will not
571 change between the initialization and use of the temporary, and
572 2) The temporary will not be otherwise modified.
573
574 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
575 and #2 means it is inappropriate for && temps.
576
577 For other cases, use get_initialized_tmp_var instead. */
578
579 tree
580 get_formal_tmp_var (tree val, gimple_seq *pre_p)
581 {
582 return internal_get_tmp_var (val, pre_p, NULL, true);
583 }
584
585 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
586 are as in gimplify_expr. */
587
588 tree
589 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
590 {
591 return internal_get_tmp_var (val, pre_p, post_p, false);
592 }
593
594 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
595 generate debug info for them; otherwise don't. */
596
597 void
598 declare_vars (tree vars, gimple *gs, bool debug_info)
599 {
600 tree last = vars;
601 if (last)
602 {
603 tree temps, block;
604
605 gbind *scope = as_a <gbind *> (gs);
606
607 temps = nreverse (last);
608
609 block = gimple_bind_block (scope);
610 gcc_assert (!block || TREE_CODE (block) == BLOCK);
611 if (!block || !debug_info)
612 {
613 DECL_CHAIN (last) = gimple_bind_vars (scope);
614 gimple_bind_set_vars (scope, temps);
615 }
616 else
617 {
618 /* We need to attach the nodes both to the BIND_EXPR and to its
619 associated BLOCK for debugging purposes. The key point here
620 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
621 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
622 if (BLOCK_VARS (block))
623 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
624 else
625 {
626 gimple_bind_set_vars (scope,
627 chainon (gimple_bind_vars (scope), temps));
628 BLOCK_VARS (block) = temps;
629 }
630 }
631 }
632 }
633
634 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
635 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
636 no such upper bound can be obtained. */
637
638 static void
639 force_constant_size (tree var)
640 {
641 /* The only attempt we make is by querying the maximum size of objects
642 of the variable's type. */
643
644 HOST_WIDE_INT max_size;
645
646 gcc_assert (TREE_CODE (var) == VAR_DECL);
647
648 max_size = max_int_size_in_bytes (TREE_TYPE (var));
649
650 gcc_assert (max_size >= 0);
651
652 DECL_SIZE_UNIT (var)
653 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
654 DECL_SIZE (var)
655 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
656 }
657
658 /* Push the temporary variable TMP into the current binding. */
659
660 void
661 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
662 {
663 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
664
665 /* Later processing assumes that the object size is constant, which might
666 not be true at this point. Force the use of a constant upper bound in
667 this case. */
668 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
669 force_constant_size (tmp);
670
671 DECL_CONTEXT (tmp) = fn->decl;
672 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
673
674 record_vars_into (tmp, fn->decl);
675 }
676
677 /* Push the temporary variable TMP into the current binding. */
678
679 void
680 gimple_add_tmp_var (tree tmp)
681 {
682 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
683
684 /* Later processing assumes that the object size is constant, which might
685 not be true at this point. Force the use of a constant upper bound in
686 this case. */
687 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
688 force_constant_size (tmp);
689
690 DECL_CONTEXT (tmp) = current_function_decl;
691 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
692
693 if (gimplify_ctxp)
694 {
695 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
696 gimplify_ctxp->temps = tmp;
697
698 /* Mark temporaries local within the nearest enclosing parallel. */
699 if (gimplify_omp_ctxp)
700 {
701 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
702 while (ctx
703 && (ctx->region_type == ORT_WORKSHARE
704 || ctx->region_type == ORT_SIMD
705 || ctx->region_type == ORT_ACC))
706 ctx = ctx->outer_context;
707 if (ctx)
708 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
709 }
710 }
711 else if (cfun)
712 record_vars (tmp);
713 else
714 {
715 gimple_seq body_seq;
716
717 /* This case is for nested functions. We need to expose the locals
718 they create. */
719 body_seq = gimple_body (current_function_decl);
720 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
721 }
722 }
723
724
725 \f
726 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
727 nodes that are referenced more than once in GENERIC functions. This is
728 necessary because gimplification (translation into GIMPLE) is performed
729 by modifying tree nodes in-place, so gimplication of a shared node in a
730 first context could generate an invalid GIMPLE form in a second context.
731
732 This is achieved with a simple mark/copy/unmark algorithm that walks the
733 GENERIC representation top-down, marks nodes with TREE_VISITED the first
734 time it encounters them, duplicates them if they already have TREE_VISITED
735 set, and finally removes the TREE_VISITED marks it has set.
736
737 The algorithm works only at the function level, i.e. it generates a GENERIC
738 representation of a function with no nodes shared within the function when
739 passed a GENERIC function (except for nodes that are allowed to be shared).
740
741 At the global level, it is also necessary to unshare tree nodes that are
742 referenced in more than one function, for the same aforementioned reason.
743 This requires some cooperation from the front-end. There are 2 strategies:
744
745 1. Manual unsharing. The front-end needs to call unshare_expr on every
746 expression that might end up being shared across functions.
747
748 2. Deep unsharing. This is an extension of regular unsharing. Instead
749 of calling unshare_expr on expressions that might be shared across
750 functions, the front-end pre-marks them with TREE_VISITED. This will
751 ensure that they are unshared on the first reference within functions
752 when the regular unsharing algorithm runs. The counterpart is that
753 this algorithm must look deeper than for manual unsharing, which is
754 specified by LANG_HOOKS_DEEP_UNSHARING.
755
756 If there are only few specific cases of node sharing across functions, it is
757 probably easier for a front-end to unshare the expressions manually. On the
758 contrary, if the expressions generated at the global level are as widespread
759 as expressions generated within functions, deep unsharing is very likely the
760 way to go. */
761
762 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
763 These nodes model computations that must be done once. If we were to
764 unshare something like SAVE_EXPR(i++), the gimplification process would
765 create wrong code. However, if DATA is non-null, it must hold a pointer
766 set that is used to unshare the subtrees of these nodes. */
767
768 static tree
769 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
770 {
771 tree t = *tp;
772 enum tree_code code = TREE_CODE (t);
773
774 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
775 copy their subtrees if we can make sure to do it only once. */
776 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
777 {
778 if (data && !((hash_set<tree> *)data)->add (t))
779 ;
780 else
781 *walk_subtrees = 0;
782 }
783
784 /* Stop at types, decls, constants like copy_tree_r. */
785 else if (TREE_CODE_CLASS (code) == tcc_type
786 || TREE_CODE_CLASS (code) == tcc_declaration
787 || TREE_CODE_CLASS (code) == tcc_constant
788 /* We can't do anything sensible with a BLOCK used as an
789 expression, but we also can't just die when we see it
790 because of non-expression uses. So we avert our eyes
791 and cross our fingers. Silly Java. */
792 || code == BLOCK)
793 *walk_subtrees = 0;
794
795 /* Cope with the statement expression extension. */
796 else if (code == STATEMENT_LIST)
797 ;
798
799 /* Leave the bulk of the work to copy_tree_r itself. */
800 else
801 copy_tree_r (tp, walk_subtrees, NULL);
802
803 return NULL_TREE;
804 }
805
806 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
807 If *TP has been visited already, then *TP is deeply copied by calling
808 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
809
810 static tree
811 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
812 {
813 tree t = *tp;
814 enum tree_code code = TREE_CODE (t);
815
816 /* Skip types, decls, and constants. But we do want to look at their
817 types and the bounds of types. Mark them as visited so we properly
818 unmark their subtrees on the unmark pass. If we've already seen them,
819 don't look down further. */
820 if (TREE_CODE_CLASS (code) == tcc_type
821 || TREE_CODE_CLASS (code) == tcc_declaration
822 || TREE_CODE_CLASS (code) == tcc_constant)
823 {
824 if (TREE_VISITED (t))
825 *walk_subtrees = 0;
826 else
827 TREE_VISITED (t) = 1;
828 }
829
830 /* If this node has been visited already, unshare it and don't look
831 any deeper. */
832 else if (TREE_VISITED (t))
833 {
834 walk_tree (tp, mostly_copy_tree_r, data, NULL);
835 *walk_subtrees = 0;
836 }
837
838 /* Otherwise, mark the node as visited and keep looking. */
839 else
840 TREE_VISITED (t) = 1;
841
842 return NULL_TREE;
843 }
844
845 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
846 copy_if_shared_r callback unmodified. */
847
848 static inline void
849 copy_if_shared (tree *tp, void *data)
850 {
851 walk_tree (tp, copy_if_shared_r, data, NULL);
852 }
853
854 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
855 any nested functions. */
856
857 static void
858 unshare_body (tree fndecl)
859 {
860 struct cgraph_node *cgn = cgraph_node::get (fndecl);
861 /* If the language requires deep unsharing, we need a pointer set to make
862 sure we don't repeatedly unshare subtrees of unshareable nodes. */
863 hash_set<tree> *visited
864 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
865
866 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
867 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
868 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
869
870 delete visited;
871
872 if (cgn)
873 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
874 unshare_body (cgn->decl);
875 }
876
877 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
878 Subtrees are walked until the first unvisited node is encountered. */
879
880 static tree
881 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
882 {
883 tree t = *tp;
884
885 /* If this node has been visited, unmark it and keep looking. */
886 if (TREE_VISITED (t))
887 TREE_VISITED (t) = 0;
888
889 /* Otherwise, don't look any deeper. */
890 else
891 *walk_subtrees = 0;
892
893 return NULL_TREE;
894 }
895
896 /* Unmark the visited trees rooted at *TP. */
897
898 static inline void
899 unmark_visited (tree *tp)
900 {
901 walk_tree (tp, unmark_visited_r, NULL, NULL);
902 }
903
904 /* Likewise, but mark all trees as not visited. */
905
906 static void
907 unvisit_body (tree fndecl)
908 {
909 struct cgraph_node *cgn = cgraph_node::get (fndecl);
910
911 unmark_visited (&DECL_SAVED_TREE (fndecl));
912 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
913 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
914
915 if (cgn)
916 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
917 unvisit_body (cgn->decl);
918 }
919
920 /* Unconditionally make an unshared copy of EXPR. This is used when using
921 stored expressions which span multiple functions, such as BINFO_VTABLE,
922 as the normal unsharing process can't tell that they're shared. */
923
924 tree
925 unshare_expr (tree expr)
926 {
927 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
928 return expr;
929 }
930
931 /* Worker for unshare_expr_without_location. */
932
933 static tree
934 prune_expr_location (tree *tp, int *walk_subtrees, void *)
935 {
936 if (EXPR_P (*tp))
937 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
938 else
939 *walk_subtrees = 0;
940 return NULL_TREE;
941 }
942
943 /* Similar to unshare_expr but also prune all expression locations
944 from EXPR. */
945
946 tree
947 unshare_expr_without_location (tree expr)
948 {
949 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
950 if (EXPR_P (expr))
951 walk_tree (&expr, prune_expr_location, NULL, NULL);
952 return expr;
953 }
954 \f
955 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
956 contain statements and have a value. Assign its value to a temporary
957 and give it void_type_node. Return the temporary, or NULL_TREE if
958 WRAPPER was already void. */
959
960 tree
961 voidify_wrapper_expr (tree wrapper, tree temp)
962 {
963 tree type = TREE_TYPE (wrapper);
964 if (type && !VOID_TYPE_P (type))
965 {
966 tree *p;
967
968 /* Set p to point to the body of the wrapper. Loop until we find
969 something that isn't a wrapper. */
970 for (p = &wrapper; p && *p; )
971 {
972 switch (TREE_CODE (*p))
973 {
974 case BIND_EXPR:
975 TREE_SIDE_EFFECTS (*p) = 1;
976 TREE_TYPE (*p) = void_type_node;
977 /* For a BIND_EXPR, the body is operand 1. */
978 p = &BIND_EXPR_BODY (*p);
979 break;
980
981 case CLEANUP_POINT_EXPR:
982 case TRY_FINALLY_EXPR:
983 case TRY_CATCH_EXPR:
984 TREE_SIDE_EFFECTS (*p) = 1;
985 TREE_TYPE (*p) = void_type_node;
986 p = &TREE_OPERAND (*p, 0);
987 break;
988
989 case STATEMENT_LIST:
990 {
991 tree_stmt_iterator i = tsi_last (*p);
992 TREE_SIDE_EFFECTS (*p) = 1;
993 TREE_TYPE (*p) = void_type_node;
994 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
995 }
996 break;
997
998 case COMPOUND_EXPR:
999 /* Advance to the last statement. Set all container types to
1000 void. */
1001 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1002 {
1003 TREE_SIDE_EFFECTS (*p) = 1;
1004 TREE_TYPE (*p) = void_type_node;
1005 }
1006 break;
1007
1008 case TRANSACTION_EXPR:
1009 TREE_SIDE_EFFECTS (*p) = 1;
1010 TREE_TYPE (*p) = void_type_node;
1011 p = &TRANSACTION_EXPR_BODY (*p);
1012 break;
1013
1014 default:
1015 /* Assume that any tree upon which voidify_wrapper_expr is
1016 directly called is a wrapper, and that its body is op0. */
1017 if (p == &wrapper)
1018 {
1019 TREE_SIDE_EFFECTS (*p) = 1;
1020 TREE_TYPE (*p) = void_type_node;
1021 p = &TREE_OPERAND (*p, 0);
1022 break;
1023 }
1024 goto out;
1025 }
1026 }
1027
1028 out:
1029 if (p == NULL || IS_EMPTY_STMT (*p))
1030 temp = NULL_TREE;
1031 else if (temp)
1032 {
1033 /* The wrapper is on the RHS of an assignment that we're pushing
1034 down. */
1035 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1036 || TREE_CODE (temp) == MODIFY_EXPR);
1037 TREE_OPERAND (temp, 1) = *p;
1038 *p = temp;
1039 }
1040 else
1041 {
1042 temp = create_tmp_var (type, "retval");
1043 *p = build2 (INIT_EXPR, type, temp, *p);
1044 }
1045
1046 return temp;
1047 }
1048
1049 return NULL_TREE;
1050 }
1051
1052 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1053 a temporary through which they communicate. */
1054
1055 static void
1056 build_stack_save_restore (gcall **save, gcall **restore)
1057 {
1058 tree tmp_var;
1059
1060 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1061 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1062 gimple_call_set_lhs (*save, tmp_var);
1063
1064 *restore
1065 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1066 1, tmp_var);
1067 }
1068
1069 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1070
1071 static enum gimplify_status
1072 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1073 {
1074 tree bind_expr = *expr_p;
1075 bool old_save_stack = gimplify_ctxp->save_stack;
1076 tree t;
1077 gbind *bind_stmt;
1078 gimple_seq body, cleanup;
1079 gcall *stack_save;
1080 location_t start_locus = 0, end_locus = 0;
1081
1082 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1083
1084 /* Mark variables seen in this bind expr. */
1085 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1086 {
1087 if (TREE_CODE (t) == VAR_DECL)
1088 {
1089 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1090
1091 /* Mark variable as local. */
1092 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1093 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1094 || splay_tree_lookup (ctx->variables,
1095 (splay_tree_key) t) == NULL))
1096 {
1097 if (ctx->region_type == ORT_SIMD
1098 && TREE_ADDRESSABLE (t)
1099 && !TREE_STATIC (t))
1100 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1101 else
1102 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1103 }
1104
1105 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1106
1107 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1108 cfun->has_local_explicit_reg_vars = true;
1109 }
1110
1111 /* Preliminarily mark non-addressed complex variables as eligible
1112 for promotion to gimple registers. We'll transform their uses
1113 as we find them. */
1114 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1115 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1116 && !TREE_THIS_VOLATILE (t)
1117 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1118 && !needs_to_live_in_memory (t))
1119 DECL_GIMPLE_REG_P (t) = 1;
1120 }
1121
1122 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1123 BIND_EXPR_BLOCK (bind_expr));
1124 gimple_push_bind_expr (bind_stmt);
1125
1126 gimplify_ctxp->save_stack = false;
1127
1128 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1129 body = NULL;
1130 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1131 gimple_bind_set_body (bind_stmt, body);
1132
1133 /* Source location wise, the cleanup code (stack_restore and clobbers)
1134 belongs to the end of the block, so propagate what we have. The
1135 stack_save operation belongs to the beginning of block, which we can
1136 infer from the bind_expr directly if the block has no explicit
1137 assignment. */
1138 if (BIND_EXPR_BLOCK (bind_expr))
1139 {
1140 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1141 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1142 }
1143 if (start_locus == 0)
1144 start_locus = EXPR_LOCATION (bind_expr);
1145
1146 cleanup = NULL;
1147 stack_save = NULL;
1148 if (gimplify_ctxp->save_stack)
1149 {
1150 gcall *stack_restore;
1151
1152 /* Save stack on entry and restore it on exit. Add a try_finally
1153 block to achieve this. */
1154 build_stack_save_restore (&stack_save, &stack_restore);
1155
1156 gimple_set_location (stack_save, start_locus);
1157 gimple_set_location (stack_restore, end_locus);
1158
1159 gimplify_seq_add_stmt (&cleanup, stack_restore);
1160 }
1161
1162 /* Add clobbers for all variables that go out of scope. */
1163 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1164 {
1165 if (TREE_CODE (t) == VAR_DECL
1166 && !is_global_var (t)
1167 && DECL_CONTEXT (t) == current_function_decl
1168 && !DECL_HARD_REGISTER (t)
1169 && !TREE_THIS_VOLATILE (t)
1170 && !DECL_HAS_VALUE_EXPR_P (t)
1171 /* Only care for variables that have to be in memory. Others
1172 will be rewritten into SSA names, hence moved to the top-level. */
1173 && !is_gimple_reg (t)
1174 && flag_stack_reuse != SR_NONE)
1175 {
1176 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1177 gimple *clobber_stmt;
1178 TREE_THIS_VOLATILE (clobber) = 1;
1179 clobber_stmt = gimple_build_assign (t, clobber);
1180 gimple_set_location (clobber_stmt, end_locus);
1181 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1182 }
1183 }
1184
1185 if (cleanup)
1186 {
1187 gtry *gs;
1188 gimple_seq new_body;
1189
1190 new_body = NULL;
1191 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1192 GIMPLE_TRY_FINALLY);
1193
1194 if (stack_save)
1195 gimplify_seq_add_stmt (&new_body, stack_save);
1196 gimplify_seq_add_stmt (&new_body, gs);
1197 gimple_bind_set_body (bind_stmt, new_body);
1198 }
1199
1200 gimplify_ctxp->save_stack = old_save_stack;
1201 gimple_pop_bind_expr ();
1202
1203 gimplify_seq_add_stmt (pre_p, bind_stmt);
1204
1205 if (temp)
1206 {
1207 *expr_p = temp;
1208 return GS_OK;
1209 }
1210
1211 *expr_p = NULL_TREE;
1212 return GS_ALL_DONE;
1213 }
1214
1215 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1216 GIMPLE value, it is assigned to a new temporary and the statement is
1217 re-written to return the temporary.
1218
1219 PRE_P points to the sequence where side effects that must happen before
1220 STMT should be stored. */
1221
1222 static enum gimplify_status
1223 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1224 {
1225 greturn *ret;
1226 tree ret_expr = TREE_OPERAND (stmt, 0);
1227 tree result_decl, result;
1228
1229 if (ret_expr == error_mark_node)
1230 return GS_ERROR;
1231
1232 /* Implicit _Cilk_sync must be inserted right before any return statement
1233 if there is a _Cilk_spawn in the function. If the user has provided a
1234 _Cilk_sync, the optimizer should remove this duplicate one. */
1235 if (fn_contains_cilk_spawn_p (cfun))
1236 {
1237 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1238 gimplify_and_add (impl_sync, pre_p);
1239 }
1240
1241 if (!ret_expr
1242 || TREE_CODE (ret_expr) == RESULT_DECL
1243 || ret_expr == error_mark_node)
1244 {
1245 greturn *ret = gimple_build_return (ret_expr);
1246 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1247 gimplify_seq_add_stmt (pre_p, ret);
1248 return GS_ALL_DONE;
1249 }
1250
1251 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1252 result_decl = NULL_TREE;
1253 else
1254 {
1255 result_decl = TREE_OPERAND (ret_expr, 0);
1256
1257 /* See through a return by reference. */
1258 if (TREE_CODE (result_decl) == INDIRECT_REF)
1259 result_decl = TREE_OPERAND (result_decl, 0);
1260
1261 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1262 || TREE_CODE (ret_expr) == INIT_EXPR)
1263 && TREE_CODE (result_decl) == RESULT_DECL);
1264 }
1265
1266 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1267 Recall that aggregate_value_p is FALSE for any aggregate type that is
1268 returned in registers. If we're returning values in registers, then
1269 we don't want to extend the lifetime of the RESULT_DECL, particularly
1270 across another call. In addition, for those aggregates for which
1271 hard_function_value generates a PARALLEL, we'll die during normal
1272 expansion of structure assignments; there's special code in expand_return
1273 to handle this case that does not exist in expand_expr. */
1274 if (!result_decl)
1275 result = NULL_TREE;
1276 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1277 {
1278 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1279 {
1280 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1281 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1282 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1283 should be effectively allocated by the caller, i.e. all calls to
1284 this function must be subject to the Return Slot Optimization. */
1285 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1286 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1287 }
1288 result = result_decl;
1289 }
1290 else if (gimplify_ctxp->return_temp)
1291 result = gimplify_ctxp->return_temp;
1292 else
1293 {
1294 result = create_tmp_reg (TREE_TYPE (result_decl));
1295
1296 /* ??? With complex control flow (usually involving abnormal edges),
1297 we can wind up warning about an uninitialized value for this. Due
1298 to how this variable is constructed and initialized, this is never
1299 true. Give up and never warn. */
1300 TREE_NO_WARNING (result) = 1;
1301
1302 gimplify_ctxp->return_temp = result;
1303 }
1304
1305 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1306 Then gimplify the whole thing. */
1307 if (result != result_decl)
1308 TREE_OPERAND (ret_expr, 0) = result;
1309
1310 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1311
1312 ret = gimple_build_return (result);
1313 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1314 gimplify_seq_add_stmt (pre_p, ret);
1315
1316 return GS_ALL_DONE;
1317 }
1318
1319 /* Gimplify a variable-length array DECL. */
1320
1321 static void
1322 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1323 {
1324 /* This is a variable-sized decl. Simplify its size and mark it
1325 for deferred expansion. */
1326 tree t, addr, ptr_type;
1327
1328 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1329 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1330
1331 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1332 if (DECL_HAS_VALUE_EXPR_P (decl))
1333 return;
1334
1335 /* All occurrences of this decl in final gimplified code will be
1336 replaced by indirection. Setting DECL_VALUE_EXPR does two
1337 things: First, it lets the rest of the gimplifier know what
1338 replacement to use. Second, it lets the debug info know
1339 where to find the value. */
1340 ptr_type = build_pointer_type (TREE_TYPE (decl));
1341 addr = create_tmp_var (ptr_type, get_name (decl));
1342 DECL_IGNORED_P (addr) = 0;
1343 t = build_fold_indirect_ref (addr);
1344 TREE_THIS_NOTRAP (t) = 1;
1345 SET_DECL_VALUE_EXPR (decl, t);
1346 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1347
1348 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1349 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1350 size_int (DECL_ALIGN (decl)));
1351 /* The call has been built for a variable-sized object. */
1352 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1353 t = fold_convert (ptr_type, t);
1354 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1355
1356 gimplify_and_add (t, seq_p);
1357
1358 /* Indicate that we need to restore the stack level when the
1359 enclosing BIND_EXPR is exited. */
1360 gimplify_ctxp->save_stack = true;
1361 }
1362
1363 /* A helper function to be called via walk_tree. Mark all labels under *TP
1364 as being forced. To be called for DECL_INITIAL of static variables. */
1365
1366 static tree
1367 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1368 {
1369 if (TYPE_P (*tp))
1370 *walk_subtrees = 0;
1371 if (TREE_CODE (*tp) == LABEL_DECL)
1372 FORCED_LABEL (*tp) = 1;
1373
1374 return NULL_TREE;
1375 }
1376
1377 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1378 and initialization explicit. */
1379
1380 static enum gimplify_status
1381 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1382 {
1383 tree stmt = *stmt_p;
1384 tree decl = DECL_EXPR_DECL (stmt);
1385
1386 *stmt_p = NULL_TREE;
1387
1388 if (TREE_TYPE (decl) == error_mark_node)
1389 return GS_ERROR;
1390
1391 if ((TREE_CODE (decl) == TYPE_DECL
1392 || TREE_CODE (decl) == VAR_DECL)
1393 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1394 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1395
1396 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1397 in case its size expressions contain problematic nodes like CALL_EXPR. */
1398 if (TREE_CODE (decl) == TYPE_DECL
1399 && DECL_ORIGINAL_TYPE (decl)
1400 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1401 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1402
1403 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1404 {
1405 tree init = DECL_INITIAL (decl);
1406
1407 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1408 || (!TREE_STATIC (decl)
1409 && flag_stack_check == GENERIC_STACK_CHECK
1410 && compare_tree_int (DECL_SIZE_UNIT (decl),
1411 STACK_CHECK_MAX_VAR_SIZE) > 0))
1412 gimplify_vla_decl (decl, seq_p);
1413
1414 /* Some front ends do not explicitly declare all anonymous
1415 artificial variables. We compensate here by declaring the
1416 variables, though it would be better if the front ends would
1417 explicitly declare them. */
1418 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1419 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1420 gimple_add_tmp_var (decl);
1421
1422 if (init && init != error_mark_node)
1423 {
1424 if (!TREE_STATIC (decl))
1425 {
1426 DECL_INITIAL (decl) = NULL_TREE;
1427 init = build2 (INIT_EXPR, void_type_node, decl, init);
1428 gimplify_and_add (init, seq_p);
1429 ggc_free (init);
1430 }
1431 else
1432 /* We must still examine initializers for static variables
1433 as they may contain a label address. */
1434 walk_tree (&init, force_labels_r, NULL, NULL);
1435 }
1436 }
1437
1438 return GS_ALL_DONE;
1439 }
1440
1441 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1442 and replacing the LOOP_EXPR with goto, but if the loop contains an
1443 EXIT_EXPR, we need to append a label for it to jump to. */
1444
1445 static enum gimplify_status
1446 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1447 {
1448 tree saved_label = gimplify_ctxp->exit_label;
1449 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1450
1451 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1452
1453 gimplify_ctxp->exit_label = NULL_TREE;
1454
1455 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1456
1457 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1458
1459 if (gimplify_ctxp->exit_label)
1460 gimplify_seq_add_stmt (pre_p,
1461 gimple_build_label (gimplify_ctxp->exit_label));
1462
1463 gimplify_ctxp->exit_label = saved_label;
1464
1465 *expr_p = NULL;
1466 return GS_ALL_DONE;
1467 }
1468
1469 /* Gimplify a statement list onto a sequence. These may be created either
1470 by an enlightened front-end, or by shortcut_cond_expr. */
1471
1472 static enum gimplify_status
1473 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1474 {
1475 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1476
1477 tree_stmt_iterator i = tsi_start (*expr_p);
1478
1479 while (!tsi_end_p (i))
1480 {
1481 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1482 tsi_delink (&i);
1483 }
1484
1485 if (temp)
1486 {
1487 *expr_p = temp;
1488 return GS_OK;
1489 }
1490
1491 return GS_ALL_DONE;
1492 }
1493
1494 \f
1495 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1496 branch to. */
1497
1498 static enum gimplify_status
1499 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1500 {
1501 tree switch_expr = *expr_p;
1502 gimple_seq switch_body_seq = NULL;
1503 enum gimplify_status ret;
1504 tree index_type = TREE_TYPE (switch_expr);
1505 if (index_type == NULL_TREE)
1506 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1507
1508 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1509 fb_rvalue);
1510 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1511 return ret;
1512
1513 if (SWITCH_BODY (switch_expr))
1514 {
1515 vec<tree> labels;
1516 vec<tree> saved_labels;
1517 tree default_case = NULL_TREE;
1518 gswitch *switch_stmt;
1519
1520 /* If someone can be bothered to fill in the labels, they can
1521 be bothered to null out the body too. */
1522 gcc_assert (!SWITCH_LABELS (switch_expr));
1523
1524 /* Save old labels, get new ones from body, then restore the old
1525 labels. Save all the things from the switch body to append after. */
1526 saved_labels = gimplify_ctxp->case_labels;
1527 gimplify_ctxp->case_labels.create (8);
1528
1529 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1530 labels = gimplify_ctxp->case_labels;
1531 gimplify_ctxp->case_labels = saved_labels;
1532
1533 preprocess_case_label_vec_for_gimple (labels, index_type,
1534 &default_case);
1535
1536 if (!default_case)
1537 {
1538 glabel *new_default;
1539
1540 default_case
1541 = build_case_label (NULL_TREE, NULL_TREE,
1542 create_artificial_label (UNKNOWN_LOCATION));
1543 new_default = gimple_build_label (CASE_LABEL (default_case));
1544 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1545 }
1546
1547 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1548 default_case, labels);
1549 gimplify_seq_add_stmt (pre_p, switch_stmt);
1550 gimplify_seq_add_seq (pre_p, switch_body_seq);
1551 labels.release ();
1552 }
1553 else
1554 gcc_assert (SWITCH_LABELS (switch_expr));
1555
1556 return GS_ALL_DONE;
1557 }
1558
1559 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1560
1561 static enum gimplify_status
1562 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1563 {
1564 struct gimplify_ctx *ctxp;
1565 glabel *label_stmt;
1566
1567 /* Invalid programs can play Duff's Device type games with, for example,
1568 #pragma omp parallel. At least in the C front end, we don't
1569 detect such invalid branches until after gimplification, in the
1570 diagnose_omp_blocks pass. */
1571 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1572 if (ctxp->case_labels.exists ())
1573 break;
1574
1575 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1576 ctxp->case_labels.safe_push (*expr_p);
1577 gimplify_seq_add_stmt (pre_p, label_stmt);
1578
1579 return GS_ALL_DONE;
1580 }
1581
1582 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1583 if necessary. */
1584
1585 tree
1586 build_and_jump (tree *label_p)
1587 {
1588 if (label_p == NULL)
1589 /* If there's nowhere to jump, just fall through. */
1590 return NULL_TREE;
1591
1592 if (*label_p == NULL_TREE)
1593 {
1594 tree label = create_artificial_label (UNKNOWN_LOCATION);
1595 *label_p = label;
1596 }
1597
1598 return build1 (GOTO_EXPR, void_type_node, *label_p);
1599 }
1600
1601 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1602 This also involves building a label to jump to and communicating it to
1603 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1604
1605 static enum gimplify_status
1606 gimplify_exit_expr (tree *expr_p)
1607 {
1608 tree cond = TREE_OPERAND (*expr_p, 0);
1609 tree expr;
1610
1611 expr = build_and_jump (&gimplify_ctxp->exit_label);
1612 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1613 *expr_p = expr;
1614
1615 return GS_OK;
1616 }
1617
1618 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1619 different from its canonical type, wrap the whole thing inside a
1620 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1621 type.
1622
1623 The canonical type of a COMPONENT_REF is the type of the field being
1624 referenced--unless the field is a bit-field which can be read directly
1625 in a smaller mode, in which case the canonical type is the
1626 sign-appropriate type corresponding to that mode. */
1627
1628 static void
1629 canonicalize_component_ref (tree *expr_p)
1630 {
1631 tree expr = *expr_p;
1632 tree type;
1633
1634 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1635
1636 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1637 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1638 else
1639 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1640
1641 /* One could argue that all the stuff below is not necessary for
1642 the non-bitfield case and declare it a FE error if type
1643 adjustment would be needed. */
1644 if (TREE_TYPE (expr) != type)
1645 {
1646 #ifdef ENABLE_TYPES_CHECKING
1647 tree old_type = TREE_TYPE (expr);
1648 #endif
1649 int type_quals;
1650
1651 /* We need to preserve qualifiers and propagate them from
1652 operand 0. */
1653 type_quals = TYPE_QUALS (type)
1654 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1655 if (TYPE_QUALS (type) != type_quals)
1656 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1657
1658 /* Set the type of the COMPONENT_REF to the underlying type. */
1659 TREE_TYPE (expr) = type;
1660
1661 #ifdef ENABLE_TYPES_CHECKING
1662 /* It is now a FE error, if the conversion from the canonical
1663 type to the original expression type is not useless. */
1664 gcc_assert (useless_type_conversion_p (old_type, type));
1665 #endif
1666 }
1667 }
1668
1669 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1670 to foo, embed that change in the ADDR_EXPR by converting
1671 T array[U];
1672 (T *)&array
1673 ==>
1674 &array[L]
1675 where L is the lower bound. For simplicity, only do this for constant
1676 lower bound.
1677 The constraint is that the type of &array[L] is trivially convertible
1678 to T *. */
1679
1680 static void
1681 canonicalize_addr_expr (tree *expr_p)
1682 {
1683 tree expr = *expr_p;
1684 tree addr_expr = TREE_OPERAND (expr, 0);
1685 tree datype, ddatype, pddatype;
1686
1687 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1688 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1689 || TREE_CODE (addr_expr) != ADDR_EXPR)
1690 return;
1691
1692 /* The addr_expr type should be a pointer to an array. */
1693 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1694 if (TREE_CODE (datype) != ARRAY_TYPE)
1695 return;
1696
1697 /* The pointer to element type shall be trivially convertible to
1698 the expression pointer type. */
1699 ddatype = TREE_TYPE (datype);
1700 pddatype = build_pointer_type (ddatype);
1701 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1702 pddatype))
1703 return;
1704
1705 /* The lower bound and element sizes must be constant. */
1706 if (!TYPE_SIZE_UNIT (ddatype)
1707 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1708 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1709 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1710 return;
1711
1712 /* All checks succeeded. Build a new node to merge the cast. */
1713 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1714 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1715 NULL_TREE, NULL_TREE);
1716 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1717
1718 /* We can have stripped a required restrict qualifier above. */
1719 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1720 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1721 }
1722
1723 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1724 underneath as appropriate. */
1725
1726 static enum gimplify_status
1727 gimplify_conversion (tree *expr_p)
1728 {
1729 location_t loc = EXPR_LOCATION (*expr_p);
1730 gcc_assert (CONVERT_EXPR_P (*expr_p));
1731
1732 /* Then strip away all but the outermost conversion. */
1733 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1734
1735 /* And remove the outermost conversion if it's useless. */
1736 if (tree_ssa_useless_type_conversion (*expr_p))
1737 *expr_p = TREE_OPERAND (*expr_p, 0);
1738
1739 /* If we still have a conversion at the toplevel,
1740 then canonicalize some constructs. */
1741 if (CONVERT_EXPR_P (*expr_p))
1742 {
1743 tree sub = TREE_OPERAND (*expr_p, 0);
1744
1745 /* If a NOP conversion is changing the type of a COMPONENT_REF
1746 expression, then canonicalize its type now in order to expose more
1747 redundant conversions. */
1748 if (TREE_CODE (sub) == COMPONENT_REF)
1749 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1750
1751 /* If a NOP conversion is changing a pointer to array of foo
1752 to a pointer to foo, embed that change in the ADDR_EXPR. */
1753 else if (TREE_CODE (sub) == ADDR_EXPR)
1754 canonicalize_addr_expr (expr_p);
1755 }
1756
1757 /* If we have a conversion to a non-register type force the
1758 use of a VIEW_CONVERT_EXPR instead. */
1759 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1760 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1761 TREE_OPERAND (*expr_p, 0));
1762
1763 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1764 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1765 TREE_SET_CODE (*expr_p, NOP_EXPR);
1766
1767 return GS_OK;
1768 }
1769
1770 /* Nonlocal VLAs seen in the current function. */
1771 static hash_set<tree> *nonlocal_vlas;
1772
1773 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1774 static tree nonlocal_vla_vars;
1775
1776 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1777 DECL_VALUE_EXPR, and it's worth re-examining things. */
1778
1779 static enum gimplify_status
1780 gimplify_var_or_parm_decl (tree *expr_p)
1781 {
1782 tree decl = *expr_p;
1783
1784 /* ??? If this is a local variable, and it has not been seen in any
1785 outer BIND_EXPR, then it's probably the result of a duplicate
1786 declaration, for which we've already issued an error. It would
1787 be really nice if the front end wouldn't leak these at all.
1788 Currently the only known culprit is C++ destructors, as seen
1789 in g++.old-deja/g++.jason/binding.C. */
1790 if (TREE_CODE (decl) == VAR_DECL
1791 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1792 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1793 && decl_function_context (decl) == current_function_decl)
1794 {
1795 gcc_assert (seen_error ());
1796 return GS_ERROR;
1797 }
1798
1799 /* When within an OMP context, notice uses of variables. */
1800 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1801 return GS_ALL_DONE;
1802
1803 /* If the decl is an alias for another expression, substitute it now. */
1804 if (DECL_HAS_VALUE_EXPR_P (decl))
1805 {
1806 tree value_expr = DECL_VALUE_EXPR (decl);
1807
1808 /* For referenced nonlocal VLAs add a decl for debugging purposes
1809 to the current function. */
1810 if (TREE_CODE (decl) == VAR_DECL
1811 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1812 && nonlocal_vlas != NULL
1813 && TREE_CODE (value_expr) == INDIRECT_REF
1814 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1815 && decl_function_context (decl) != current_function_decl)
1816 {
1817 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1818 while (ctx
1819 && (ctx->region_type == ORT_WORKSHARE
1820 || ctx->region_type == ORT_SIMD
1821 || ctx->region_type == ORT_ACC))
1822 ctx = ctx->outer_context;
1823 if (!ctx && !nonlocal_vlas->add (decl))
1824 {
1825 tree copy = copy_node (decl);
1826
1827 lang_hooks.dup_lang_specific_decl (copy);
1828 SET_DECL_RTL (copy, 0);
1829 TREE_USED (copy) = 1;
1830 DECL_CHAIN (copy) = nonlocal_vla_vars;
1831 nonlocal_vla_vars = copy;
1832 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1833 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1834 }
1835 }
1836
1837 *expr_p = unshare_expr (value_expr);
1838 return GS_OK;
1839 }
1840
1841 return GS_ALL_DONE;
1842 }
1843
1844 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1845
1846 static void
1847 recalculate_side_effects (tree t)
1848 {
1849 enum tree_code code = TREE_CODE (t);
1850 int len = TREE_OPERAND_LENGTH (t);
1851 int i;
1852
1853 switch (TREE_CODE_CLASS (code))
1854 {
1855 case tcc_expression:
1856 switch (code)
1857 {
1858 case INIT_EXPR:
1859 case MODIFY_EXPR:
1860 case VA_ARG_EXPR:
1861 case PREDECREMENT_EXPR:
1862 case PREINCREMENT_EXPR:
1863 case POSTDECREMENT_EXPR:
1864 case POSTINCREMENT_EXPR:
1865 /* All of these have side-effects, no matter what their
1866 operands are. */
1867 return;
1868
1869 default:
1870 break;
1871 }
1872 /* Fall through. */
1873
1874 case tcc_comparison: /* a comparison expression */
1875 case tcc_unary: /* a unary arithmetic expression */
1876 case tcc_binary: /* a binary arithmetic expression */
1877 case tcc_reference: /* a reference */
1878 case tcc_vl_exp: /* a function call */
1879 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1880 for (i = 0; i < len; ++i)
1881 {
1882 tree op = TREE_OPERAND (t, i);
1883 if (op && TREE_SIDE_EFFECTS (op))
1884 TREE_SIDE_EFFECTS (t) = 1;
1885 }
1886 break;
1887
1888 case tcc_constant:
1889 /* No side-effects. */
1890 return;
1891
1892 default:
1893 gcc_unreachable ();
1894 }
1895 }
1896
1897 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1898 node *EXPR_P.
1899
1900 compound_lval
1901 : min_lval '[' val ']'
1902 | min_lval '.' ID
1903 | compound_lval '[' val ']'
1904 | compound_lval '.' ID
1905
1906 This is not part of the original SIMPLE definition, which separates
1907 array and member references, but it seems reasonable to handle them
1908 together. Also, this way we don't run into problems with union
1909 aliasing; gcc requires that for accesses through a union to alias, the
1910 union reference must be explicit, which was not always the case when we
1911 were splitting up array and member refs.
1912
1913 PRE_P points to the sequence where side effects that must happen before
1914 *EXPR_P should be stored.
1915
1916 POST_P points to the sequence where side effects that must happen after
1917 *EXPR_P should be stored. */
1918
1919 static enum gimplify_status
1920 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1921 fallback_t fallback)
1922 {
1923 tree *p;
1924 enum gimplify_status ret = GS_ALL_DONE, tret;
1925 int i;
1926 location_t loc = EXPR_LOCATION (*expr_p);
1927 tree expr = *expr_p;
1928
1929 /* Create a stack of the subexpressions so later we can walk them in
1930 order from inner to outer. */
1931 auto_vec<tree, 10> expr_stack;
1932
1933 /* We can handle anything that get_inner_reference can deal with. */
1934 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1935 {
1936 restart:
1937 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1938 if (TREE_CODE (*p) == INDIRECT_REF)
1939 *p = fold_indirect_ref_loc (loc, *p);
1940
1941 if (handled_component_p (*p))
1942 ;
1943 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1944 additional COMPONENT_REFs. */
1945 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1946 && gimplify_var_or_parm_decl (p) == GS_OK)
1947 goto restart;
1948 else
1949 break;
1950
1951 expr_stack.safe_push (*p);
1952 }
1953
1954 gcc_assert (expr_stack.length ());
1955
1956 /* Now EXPR_STACK is a stack of pointers to all the refs we've
1957 walked through and P points to the innermost expression.
1958
1959 Java requires that we elaborated nodes in source order. That
1960 means we must gimplify the inner expression followed by each of
1961 the indices, in order. But we can't gimplify the inner
1962 expression until we deal with any variable bounds, sizes, or
1963 positions in order to deal with PLACEHOLDER_EXPRs.
1964
1965 So we do this in three steps. First we deal with the annotations
1966 for any variables in the components, then we gimplify the base,
1967 then we gimplify any indices, from left to right. */
1968 for (i = expr_stack.length () - 1; i >= 0; i--)
1969 {
1970 tree t = expr_stack[i];
1971
1972 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1973 {
1974 /* Gimplify the low bound and element type size and put them into
1975 the ARRAY_REF. If these values are set, they have already been
1976 gimplified. */
1977 if (TREE_OPERAND (t, 2) == NULL_TREE)
1978 {
1979 tree low = unshare_expr (array_ref_low_bound (t));
1980 if (!is_gimple_min_invariant (low))
1981 {
1982 TREE_OPERAND (t, 2) = low;
1983 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1984 post_p, is_gimple_reg,
1985 fb_rvalue);
1986 ret = MIN (ret, tret);
1987 }
1988 }
1989 else
1990 {
1991 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1992 is_gimple_reg, fb_rvalue);
1993 ret = MIN (ret, tret);
1994 }
1995
1996 if (TREE_OPERAND (t, 3) == NULL_TREE)
1997 {
1998 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1999 tree elmt_size = unshare_expr (array_ref_element_size (t));
2000 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2001
2002 /* Divide the element size by the alignment of the element
2003 type (above). */
2004 elmt_size
2005 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2006
2007 if (!is_gimple_min_invariant (elmt_size))
2008 {
2009 TREE_OPERAND (t, 3) = elmt_size;
2010 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2011 post_p, is_gimple_reg,
2012 fb_rvalue);
2013 ret = MIN (ret, tret);
2014 }
2015 }
2016 else
2017 {
2018 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2019 is_gimple_reg, fb_rvalue);
2020 ret = MIN (ret, tret);
2021 }
2022 }
2023 else if (TREE_CODE (t) == COMPONENT_REF)
2024 {
2025 /* Set the field offset into T and gimplify it. */
2026 if (TREE_OPERAND (t, 2) == NULL_TREE)
2027 {
2028 tree offset = unshare_expr (component_ref_field_offset (t));
2029 tree field = TREE_OPERAND (t, 1);
2030 tree factor
2031 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2032
2033 /* Divide the offset by its alignment. */
2034 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2035
2036 if (!is_gimple_min_invariant (offset))
2037 {
2038 TREE_OPERAND (t, 2) = offset;
2039 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2040 post_p, is_gimple_reg,
2041 fb_rvalue);
2042 ret = MIN (ret, tret);
2043 }
2044 }
2045 else
2046 {
2047 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2048 is_gimple_reg, fb_rvalue);
2049 ret = MIN (ret, tret);
2050 }
2051 }
2052 }
2053
2054 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2055 so as to match the min_lval predicate. Failure to do so may result
2056 in the creation of large aggregate temporaries. */
2057 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2058 fallback | fb_lvalue);
2059 ret = MIN (ret, tret);
2060
2061 /* And finally, the indices and operands of ARRAY_REF. During this
2062 loop we also remove any useless conversions. */
2063 for (; expr_stack.length () > 0; )
2064 {
2065 tree t = expr_stack.pop ();
2066
2067 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2068 {
2069 /* Gimplify the dimension. */
2070 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2071 {
2072 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2073 is_gimple_val, fb_rvalue);
2074 ret = MIN (ret, tret);
2075 }
2076 }
2077
2078 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2079
2080 /* The innermost expression P may have originally had
2081 TREE_SIDE_EFFECTS set which would have caused all the outer
2082 expressions in *EXPR_P leading to P to also have had
2083 TREE_SIDE_EFFECTS set. */
2084 recalculate_side_effects (t);
2085 }
2086
2087 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2088 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2089 {
2090 canonicalize_component_ref (expr_p);
2091 }
2092
2093 expr_stack.release ();
2094
2095 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2096
2097 return ret;
2098 }
2099
2100 /* Gimplify the self modifying expression pointed to by EXPR_P
2101 (++, --, +=, -=).
2102
2103 PRE_P points to the list where side effects that must happen before
2104 *EXPR_P should be stored.
2105
2106 POST_P points to the list where side effects that must happen after
2107 *EXPR_P should be stored.
2108
2109 WANT_VALUE is nonzero iff we want to use the value of this expression
2110 in another expression.
2111
2112 ARITH_TYPE is the type the computation should be performed in. */
2113
2114 enum gimplify_status
2115 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2116 bool want_value, tree arith_type)
2117 {
2118 enum tree_code code;
2119 tree lhs, lvalue, rhs, t1;
2120 gimple_seq post = NULL, *orig_post_p = post_p;
2121 bool postfix;
2122 enum tree_code arith_code;
2123 enum gimplify_status ret;
2124 location_t loc = EXPR_LOCATION (*expr_p);
2125
2126 code = TREE_CODE (*expr_p);
2127
2128 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2129 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2130
2131 /* Prefix or postfix? */
2132 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2133 /* Faster to treat as prefix if result is not used. */
2134 postfix = want_value;
2135 else
2136 postfix = false;
2137
2138 /* For postfix, make sure the inner expression's post side effects
2139 are executed after side effects from this expression. */
2140 if (postfix)
2141 post_p = &post;
2142
2143 /* Add or subtract? */
2144 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2145 arith_code = PLUS_EXPR;
2146 else
2147 arith_code = MINUS_EXPR;
2148
2149 /* Gimplify the LHS into a GIMPLE lvalue. */
2150 lvalue = TREE_OPERAND (*expr_p, 0);
2151 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2152 if (ret == GS_ERROR)
2153 return ret;
2154
2155 /* Extract the operands to the arithmetic operation. */
2156 lhs = lvalue;
2157 rhs = TREE_OPERAND (*expr_p, 1);
2158
2159 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2160 that as the result value and in the postqueue operation. */
2161 if (postfix)
2162 {
2163 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2164 if (ret == GS_ERROR)
2165 return ret;
2166
2167 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2168 }
2169
2170 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2171 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2172 {
2173 rhs = convert_to_ptrofftype_loc (loc, rhs);
2174 if (arith_code == MINUS_EXPR)
2175 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2176 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2177 }
2178 else
2179 t1 = fold_convert (TREE_TYPE (*expr_p),
2180 fold_build2 (arith_code, arith_type,
2181 fold_convert (arith_type, lhs),
2182 fold_convert (arith_type, rhs)));
2183
2184 if (postfix)
2185 {
2186 gimplify_assign (lvalue, t1, pre_p);
2187 gimplify_seq_add_seq (orig_post_p, post);
2188 *expr_p = lhs;
2189 return GS_ALL_DONE;
2190 }
2191 else
2192 {
2193 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2194 return GS_OK;
2195 }
2196 }
2197
2198 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2199
2200 static void
2201 maybe_with_size_expr (tree *expr_p)
2202 {
2203 tree expr = *expr_p;
2204 tree type = TREE_TYPE (expr);
2205 tree size;
2206
2207 /* If we've already wrapped this or the type is error_mark_node, we can't do
2208 anything. */
2209 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2210 || type == error_mark_node)
2211 return;
2212
2213 /* If the size isn't known or is a constant, we have nothing to do. */
2214 size = TYPE_SIZE_UNIT (type);
2215 if (!size || TREE_CODE (size) == INTEGER_CST)
2216 return;
2217
2218 /* Otherwise, make a WITH_SIZE_EXPR. */
2219 size = unshare_expr (size);
2220 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2221 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2222 }
2223
2224 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2225 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2226 the CALL_EXPR. */
2227
2228 enum gimplify_status
2229 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2230 {
2231 bool (*test) (tree);
2232 fallback_t fb;
2233
2234 /* In general, we allow lvalues for function arguments to avoid
2235 extra overhead of copying large aggregates out of even larger
2236 aggregates into temporaries only to copy the temporaries to
2237 the argument list. Make optimizers happy by pulling out to
2238 temporaries those types that fit in registers. */
2239 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2240 test = is_gimple_val, fb = fb_rvalue;
2241 else
2242 {
2243 test = is_gimple_lvalue, fb = fb_either;
2244 /* Also strip a TARGET_EXPR that would force an extra copy. */
2245 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2246 {
2247 tree init = TARGET_EXPR_INITIAL (*arg_p);
2248 if (init
2249 && !VOID_TYPE_P (TREE_TYPE (init)))
2250 *arg_p = init;
2251 }
2252 }
2253
2254 /* If this is a variable sized type, we must remember the size. */
2255 maybe_with_size_expr (arg_p);
2256
2257 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2258 /* Make sure arguments have the same location as the function call
2259 itself. */
2260 protected_set_expr_location (*arg_p, call_location);
2261
2262 /* There is a sequence point before a function call. Side effects in
2263 the argument list must occur before the actual call. So, when
2264 gimplifying arguments, force gimplify_expr to use an internal
2265 post queue which is then appended to the end of PRE_P. */
2266 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2267 }
2268
2269 /* Don't fold inside offloading or taskreg regions: it can break code by
2270 adding decl references that weren't in the source. We'll do it during
2271 omplower pass instead. */
2272
2273 static bool
2274 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2275 {
2276 struct gimplify_omp_ctx *ctx;
2277 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2278 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2279 return false;
2280 return fold_stmt (gsi);
2281 }
2282
2283 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2284 WANT_VALUE is true if the result of the call is desired. */
2285
2286 static enum gimplify_status
2287 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2288 {
2289 tree fndecl, parms, p, fnptrtype;
2290 enum gimplify_status ret;
2291 int i, nargs;
2292 gcall *call;
2293 bool builtin_va_start_p = false;
2294 location_t loc = EXPR_LOCATION (*expr_p);
2295
2296 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2297
2298 /* For reliable diagnostics during inlining, it is necessary that
2299 every call_expr be annotated with file and line. */
2300 if (! EXPR_HAS_LOCATION (*expr_p))
2301 SET_EXPR_LOCATION (*expr_p, input_location);
2302
2303 /* Gimplify internal functions created in the FEs. */
2304 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2305 {
2306 if (want_value)
2307 return GS_ALL_DONE;
2308
2309 nargs = call_expr_nargs (*expr_p);
2310 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2311 auto_vec<tree> vargs (nargs);
2312
2313 for (i = 0; i < nargs; i++)
2314 {
2315 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2316 EXPR_LOCATION (*expr_p));
2317 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2318 }
2319 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2320 gimplify_seq_add_stmt (pre_p, call);
2321 return GS_ALL_DONE;
2322 }
2323
2324 /* This may be a call to a builtin function.
2325
2326 Builtin function calls may be transformed into different
2327 (and more efficient) builtin function calls under certain
2328 circumstances. Unfortunately, gimplification can muck things
2329 up enough that the builtin expanders are not aware that certain
2330 transformations are still valid.
2331
2332 So we attempt transformation/gimplification of the call before
2333 we gimplify the CALL_EXPR. At this time we do not manage to
2334 transform all calls in the same manner as the expanders do, but
2335 we do transform most of them. */
2336 fndecl = get_callee_fndecl (*expr_p);
2337 if (fndecl
2338 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2339 switch (DECL_FUNCTION_CODE (fndecl))
2340 {
2341 case BUILT_IN_VA_START:
2342 {
2343 builtin_va_start_p = TRUE;
2344 if (call_expr_nargs (*expr_p) < 2)
2345 {
2346 error ("too few arguments to function %<va_start%>");
2347 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2348 return GS_OK;
2349 }
2350
2351 if (fold_builtin_next_arg (*expr_p, true))
2352 {
2353 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2354 return GS_OK;
2355 }
2356 break;
2357 }
2358 case BUILT_IN_LINE:
2359 {
2360 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2361 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2362 return GS_OK;
2363 }
2364 case BUILT_IN_FILE:
2365 {
2366 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2367 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2368 return GS_OK;
2369 }
2370 case BUILT_IN_FUNCTION:
2371 {
2372 const char *function;
2373 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2374 *expr_p = build_string_literal (strlen (function) + 1, function);
2375 return GS_OK;
2376 }
2377 default:
2378 ;
2379 }
2380 if (fndecl && DECL_BUILT_IN (fndecl))
2381 {
2382 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2383 if (new_tree && new_tree != *expr_p)
2384 {
2385 /* There was a transformation of this call which computes the
2386 same value, but in a more efficient way. Return and try
2387 again. */
2388 *expr_p = new_tree;
2389 return GS_OK;
2390 }
2391 }
2392
2393 /* Remember the original function pointer type. */
2394 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2395
2396 /* There is a sequence point before the call, so any side effects in
2397 the calling expression must occur before the actual call. Force
2398 gimplify_expr to use an internal post queue. */
2399 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2400 is_gimple_call_addr, fb_rvalue);
2401
2402 nargs = call_expr_nargs (*expr_p);
2403
2404 /* Get argument types for verification. */
2405 fndecl = get_callee_fndecl (*expr_p);
2406 parms = NULL_TREE;
2407 if (fndecl)
2408 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2409 else
2410 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2411
2412 if (fndecl && DECL_ARGUMENTS (fndecl))
2413 p = DECL_ARGUMENTS (fndecl);
2414 else if (parms)
2415 p = parms;
2416 else
2417 p = NULL_TREE;
2418 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2419 ;
2420
2421 /* If the last argument is __builtin_va_arg_pack () and it is not
2422 passed as a named argument, decrease the number of CALL_EXPR
2423 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2424 if (!p
2425 && i < nargs
2426 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2427 {
2428 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2429 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2430
2431 if (last_arg_fndecl
2432 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2433 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2434 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2435 {
2436 tree call = *expr_p;
2437
2438 --nargs;
2439 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2440 CALL_EXPR_FN (call),
2441 nargs, CALL_EXPR_ARGP (call));
2442
2443 /* Copy all CALL_EXPR flags, location and block, except
2444 CALL_EXPR_VA_ARG_PACK flag. */
2445 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2446 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2447 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2448 = CALL_EXPR_RETURN_SLOT_OPT (call);
2449 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2450 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2451
2452 /* Set CALL_EXPR_VA_ARG_PACK. */
2453 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2454 }
2455 }
2456
2457 /* Gimplify the function arguments. */
2458 if (nargs > 0)
2459 {
2460 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2461 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2462 PUSH_ARGS_REVERSED ? i-- : i++)
2463 {
2464 enum gimplify_status t;
2465
2466 /* Avoid gimplifying the second argument to va_start, which needs to
2467 be the plain PARM_DECL. */
2468 if ((i != 1) || !builtin_va_start_p)
2469 {
2470 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2471 EXPR_LOCATION (*expr_p));
2472
2473 if (t == GS_ERROR)
2474 ret = GS_ERROR;
2475 }
2476 }
2477 }
2478
2479 /* Gimplify the static chain. */
2480 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2481 {
2482 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2483 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2484 else
2485 {
2486 enum gimplify_status t;
2487 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2488 EXPR_LOCATION (*expr_p));
2489 if (t == GS_ERROR)
2490 ret = GS_ERROR;
2491 }
2492 }
2493
2494 /* Verify the function result. */
2495 if (want_value && fndecl
2496 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2497 {
2498 error_at (loc, "using result of function returning %<void%>");
2499 ret = GS_ERROR;
2500 }
2501
2502 /* Try this again in case gimplification exposed something. */
2503 if (ret != GS_ERROR)
2504 {
2505 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2506
2507 if (new_tree && new_tree != *expr_p)
2508 {
2509 /* There was a transformation of this call which computes the
2510 same value, but in a more efficient way. Return and try
2511 again. */
2512 *expr_p = new_tree;
2513 return GS_OK;
2514 }
2515 }
2516 else
2517 {
2518 *expr_p = error_mark_node;
2519 return GS_ERROR;
2520 }
2521
2522 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2523 decl. This allows us to eliminate redundant or useless
2524 calls to "const" functions. */
2525 if (TREE_CODE (*expr_p) == CALL_EXPR)
2526 {
2527 int flags = call_expr_flags (*expr_p);
2528 if (flags & (ECF_CONST | ECF_PURE)
2529 /* An infinite loop is considered a side effect. */
2530 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2531 TREE_SIDE_EFFECTS (*expr_p) = 0;
2532 }
2533
2534 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2535 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2536 form and delegate the creation of a GIMPLE_CALL to
2537 gimplify_modify_expr. This is always possible because when
2538 WANT_VALUE is true, the caller wants the result of this call into
2539 a temporary, which means that we will emit an INIT_EXPR in
2540 internal_get_tmp_var which will then be handled by
2541 gimplify_modify_expr. */
2542 if (!want_value)
2543 {
2544 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2545 have to do is replicate it as a GIMPLE_CALL tuple. */
2546 gimple_stmt_iterator gsi;
2547 call = gimple_build_call_from_tree (*expr_p);
2548 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2549 notice_special_calls (call);
2550 gimplify_seq_add_stmt (pre_p, call);
2551 gsi = gsi_last (*pre_p);
2552 maybe_fold_stmt (&gsi);
2553 *expr_p = NULL_TREE;
2554 }
2555 else
2556 /* Remember the original function type. */
2557 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2558 CALL_EXPR_FN (*expr_p));
2559
2560 return ret;
2561 }
2562
2563 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2564 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2565
2566 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2567 condition is true or false, respectively. If null, we should generate
2568 our own to skip over the evaluation of this specific expression.
2569
2570 LOCUS is the source location of the COND_EXPR.
2571
2572 This function is the tree equivalent of do_jump.
2573
2574 shortcut_cond_r should only be called by shortcut_cond_expr. */
2575
2576 static tree
2577 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2578 location_t locus)
2579 {
2580 tree local_label = NULL_TREE;
2581 tree t, expr = NULL;
2582
2583 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2584 retain the shortcut semantics. Just insert the gotos here;
2585 shortcut_cond_expr will append the real blocks later. */
2586 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2587 {
2588 location_t new_locus;
2589
2590 /* Turn if (a && b) into
2591
2592 if (a); else goto no;
2593 if (b) goto yes; else goto no;
2594 (no:) */
2595
2596 if (false_label_p == NULL)
2597 false_label_p = &local_label;
2598
2599 /* Keep the original source location on the first 'if'. */
2600 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2601 append_to_statement_list (t, &expr);
2602
2603 /* Set the source location of the && on the second 'if'. */
2604 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2605 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2606 new_locus);
2607 append_to_statement_list (t, &expr);
2608 }
2609 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2610 {
2611 location_t new_locus;
2612
2613 /* Turn if (a || b) into
2614
2615 if (a) goto yes;
2616 if (b) goto yes; else goto no;
2617 (yes:) */
2618
2619 if (true_label_p == NULL)
2620 true_label_p = &local_label;
2621
2622 /* Keep the original source location on the first 'if'. */
2623 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2624 append_to_statement_list (t, &expr);
2625
2626 /* Set the source location of the || on the second 'if'. */
2627 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2628 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2629 new_locus);
2630 append_to_statement_list (t, &expr);
2631 }
2632 else if (TREE_CODE (pred) == COND_EXPR
2633 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2634 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2635 {
2636 location_t new_locus;
2637
2638 /* As long as we're messing with gotos, turn if (a ? b : c) into
2639 if (a)
2640 if (b) goto yes; else goto no;
2641 else
2642 if (c) goto yes; else goto no;
2643
2644 Don't do this if one of the arms has void type, which can happen
2645 in C++ when the arm is throw. */
2646
2647 /* Keep the original source location on the first 'if'. Set the source
2648 location of the ? on the second 'if'. */
2649 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2650 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2651 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2652 false_label_p, locus),
2653 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2654 false_label_p, new_locus));
2655 }
2656 else
2657 {
2658 expr = build3 (COND_EXPR, void_type_node, pred,
2659 build_and_jump (true_label_p),
2660 build_and_jump (false_label_p));
2661 SET_EXPR_LOCATION (expr, locus);
2662 }
2663
2664 if (local_label)
2665 {
2666 t = build1 (LABEL_EXPR, void_type_node, local_label);
2667 append_to_statement_list (t, &expr);
2668 }
2669
2670 return expr;
2671 }
2672
2673 /* Given a conditional expression EXPR with short-circuit boolean
2674 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2675 predicate apart into the equivalent sequence of conditionals. */
2676
2677 static tree
2678 shortcut_cond_expr (tree expr)
2679 {
2680 tree pred = TREE_OPERAND (expr, 0);
2681 tree then_ = TREE_OPERAND (expr, 1);
2682 tree else_ = TREE_OPERAND (expr, 2);
2683 tree true_label, false_label, end_label, t;
2684 tree *true_label_p;
2685 tree *false_label_p;
2686 bool emit_end, emit_false, jump_over_else;
2687 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2688 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2689
2690 /* First do simple transformations. */
2691 if (!else_se)
2692 {
2693 /* If there is no 'else', turn
2694 if (a && b) then c
2695 into
2696 if (a) if (b) then c. */
2697 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2698 {
2699 /* Keep the original source location on the first 'if'. */
2700 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2701 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2702 /* Set the source location of the && on the second 'if'. */
2703 if (EXPR_HAS_LOCATION (pred))
2704 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2705 then_ = shortcut_cond_expr (expr);
2706 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2707 pred = TREE_OPERAND (pred, 0);
2708 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2709 SET_EXPR_LOCATION (expr, locus);
2710 }
2711 }
2712
2713 if (!then_se)
2714 {
2715 /* If there is no 'then', turn
2716 if (a || b); else d
2717 into
2718 if (a); else if (b); else d. */
2719 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2720 {
2721 /* Keep the original source location on the first 'if'. */
2722 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2723 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2724 /* Set the source location of the || on the second 'if'. */
2725 if (EXPR_HAS_LOCATION (pred))
2726 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2727 else_ = shortcut_cond_expr (expr);
2728 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2729 pred = TREE_OPERAND (pred, 0);
2730 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2731 SET_EXPR_LOCATION (expr, locus);
2732 }
2733 }
2734
2735 /* If we're done, great. */
2736 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2737 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2738 return expr;
2739
2740 /* Otherwise we need to mess with gotos. Change
2741 if (a) c; else d;
2742 to
2743 if (a); else goto no;
2744 c; goto end;
2745 no: d; end:
2746 and recursively gimplify the condition. */
2747
2748 true_label = false_label = end_label = NULL_TREE;
2749
2750 /* If our arms just jump somewhere, hijack those labels so we don't
2751 generate jumps to jumps. */
2752
2753 if (then_
2754 && TREE_CODE (then_) == GOTO_EXPR
2755 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2756 {
2757 true_label = GOTO_DESTINATION (then_);
2758 then_ = NULL;
2759 then_se = false;
2760 }
2761
2762 if (else_
2763 && TREE_CODE (else_) == GOTO_EXPR
2764 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2765 {
2766 false_label = GOTO_DESTINATION (else_);
2767 else_ = NULL;
2768 else_se = false;
2769 }
2770
2771 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2772 if (true_label)
2773 true_label_p = &true_label;
2774 else
2775 true_label_p = NULL;
2776
2777 /* The 'else' branch also needs a label if it contains interesting code. */
2778 if (false_label || else_se)
2779 false_label_p = &false_label;
2780 else
2781 false_label_p = NULL;
2782
2783 /* If there was nothing else in our arms, just forward the label(s). */
2784 if (!then_se && !else_se)
2785 return shortcut_cond_r (pred, true_label_p, false_label_p,
2786 EXPR_LOC_OR_LOC (expr, input_location));
2787
2788 /* If our last subexpression already has a terminal label, reuse it. */
2789 if (else_se)
2790 t = expr_last (else_);
2791 else if (then_se)
2792 t = expr_last (then_);
2793 else
2794 t = NULL;
2795 if (t && TREE_CODE (t) == LABEL_EXPR)
2796 end_label = LABEL_EXPR_LABEL (t);
2797
2798 /* If we don't care about jumping to the 'else' branch, jump to the end
2799 if the condition is false. */
2800 if (!false_label_p)
2801 false_label_p = &end_label;
2802
2803 /* We only want to emit these labels if we aren't hijacking them. */
2804 emit_end = (end_label == NULL_TREE);
2805 emit_false = (false_label == NULL_TREE);
2806
2807 /* We only emit the jump over the else clause if we have to--if the
2808 then clause may fall through. Otherwise we can wind up with a
2809 useless jump and a useless label at the end of gimplified code,
2810 which will cause us to think that this conditional as a whole
2811 falls through even if it doesn't. If we then inline a function
2812 which ends with such a condition, that can cause us to issue an
2813 inappropriate warning about control reaching the end of a
2814 non-void function. */
2815 jump_over_else = block_may_fallthru (then_);
2816
2817 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2818 EXPR_LOC_OR_LOC (expr, input_location));
2819
2820 expr = NULL;
2821 append_to_statement_list (pred, &expr);
2822
2823 append_to_statement_list (then_, &expr);
2824 if (else_se)
2825 {
2826 if (jump_over_else)
2827 {
2828 tree last = expr_last (expr);
2829 t = build_and_jump (&end_label);
2830 if (EXPR_HAS_LOCATION (last))
2831 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2832 append_to_statement_list (t, &expr);
2833 }
2834 if (emit_false)
2835 {
2836 t = build1 (LABEL_EXPR, void_type_node, false_label);
2837 append_to_statement_list (t, &expr);
2838 }
2839 append_to_statement_list (else_, &expr);
2840 }
2841 if (emit_end && end_label)
2842 {
2843 t = build1 (LABEL_EXPR, void_type_node, end_label);
2844 append_to_statement_list (t, &expr);
2845 }
2846
2847 return expr;
2848 }
2849
2850 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2851
2852 tree
2853 gimple_boolify (tree expr)
2854 {
2855 tree type = TREE_TYPE (expr);
2856 location_t loc = EXPR_LOCATION (expr);
2857
2858 if (TREE_CODE (expr) == NE_EXPR
2859 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2860 && integer_zerop (TREE_OPERAND (expr, 1)))
2861 {
2862 tree call = TREE_OPERAND (expr, 0);
2863 tree fn = get_callee_fndecl (call);
2864
2865 /* For __builtin_expect ((long) (x), y) recurse into x as well
2866 if x is truth_value_p. */
2867 if (fn
2868 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2869 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2870 && call_expr_nargs (call) == 2)
2871 {
2872 tree arg = CALL_EXPR_ARG (call, 0);
2873 if (arg)
2874 {
2875 if (TREE_CODE (arg) == NOP_EXPR
2876 && TREE_TYPE (arg) == TREE_TYPE (call))
2877 arg = TREE_OPERAND (arg, 0);
2878 if (truth_value_p (TREE_CODE (arg)))
2879 {
2880 arg = gimple_boolify (arg);
2881 CALL_EXPR_ARG (call, 0)
2882 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2883 }
2884 }
2885 }
2886 }
2887
2888 switch (TREE_CODE (expr))
2889 {
2890 case TRUTH_AND_EXPR:
2891 case TRUTH_OR_EXPR:
2892 case TRUTH_XOR_EXPR:
2893 case TRUTH_ANDIF_EXPR:
2894 case TRUTH_ORIF_EXPR:
2895 /* Also boolify the arguments of truth exprs. */
2896 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2897 /* FALLTHRU */
2898
2899 case TRUTH_NOT_EXPR:
2900 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2901
2902 /* These expressions always produce boolean results. */
2903 if (TREE_CODE (type) != BOOLEAN_TYPE)
2904 TREE_TYPE (expr) = boolean_type_node;
2905 return expr;
2906
2907 case ANNOTATE_EXPR:
2908 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2909 {
2910 case annot_expr_ivdep_kind:
2911 case annot_expr_no_vector_kind:
2912 case annot_expr_vector_kind:
2913 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2914 if (TREE_CODE (type) != BOOLEAN_TYPE)
2915 TREE_TYPE (expr) = boolean_type_node;
2916 return expr;
2917 default:
2918 gcc_unreachable ();
2919 }
2920
2921 default:
2922 if (COMPARISON_CLASS_P (expr))
2923 {
2924 /* There expressions always prduce boolean results. */
2925 if (TREE_CODE (type) != BOOLEAN_TYPE)
2926 TREE_TYPE (expr) = boolean_type_node;
2927 return expr;
2928 }
2929 /* Other expressions that get here must have boolean values, but
2930 might need to be converted to the appropriate mode. */
2931 if (TREE_CODE (type) == BOOLEAN_TYPE)
2932 return expr;
2933 return fold_convert_loc (loc, boolean_type_node, expr);
2934 }
2935 }
2936
2937 /* Given a conditional expression *EXPR_P without side effects, gimplify
2938 its operands. New statements are inserted to PRE_P. */
2939
2940 static enum gimplify_status
2941 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2942 {
2943 tree expr = *expr_p, cond;
2944 enum gimplify_status ret, tret;
2945 enum tree_code code;
2946
2947 cond = gimple_boolify (COND_EXPR_COND (expr));
2948
2949 /* We need to handle && and || specially, as their gimplification
2950 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2951 code = TREE_CODE (cond);
2952 if (code == TRUTH_ANDIF_EXPR)
2953 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2954 else if (code == TRUTH_ORIF_EXPR)
2955 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2956 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2957 COND_EXPR_COND (*expr_p) = cond;
2958
2959 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2960 is_gimple_val, fb_rvalue);
2961 ret = MIN (ret, tret);
2962 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2963 is_gimple_val, fb_rvalue);
2964
2965 return MIN (ret, tret);
2966 }
2967
2968 /* Return true if evaluating EXPR could trap.
2969 EXPR is GENERIC, while tree_could_trap_p can be called
2970 only on GIMPLE. */
2971
2972 static bool
2973 generic_expr_could_trap_p (tree expr)
2974 {
2975 unsigned i, n;
2976
2977 if (!expr || is_gimple_val (expr))
2978 return false;
2979
2980 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2981 return true;
2982
2983 n = TREE_OPERAND_LENGTH (expr);
2984 for (i = 0; i < n; i++)
2985 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2986 return true;
2987
2988 return false;
2989 }
2990
2991 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2992 into
2993
2994 if (p) if (p)
2995 t1 = a; a;
2996 else or else
2997 t1 = b; b;
2998 t1;
2999
3000 The second form is used when *EXPR_P is of type void.
3001
3002 PRE_P points to the list where side effects that must happen before
3003 *EXPR_P should be stored. */
3004
3005 static enum gimplify_status
3006 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3007 {
3008 tree expr = *expr_p;
3009 tree type = TREE_TYPE (expr);
3010 location_t loc = EXPR_LOCATION (expr);
3011 tree tmp, arm1, arm2;
3012 enum gimplify_status ret;
3013 tree label_true, label_false, label_cont;
3014 bool have_then_clause_p, have_else_clause_p;
3015 gcond *cond_stmt;
3016 enum tree_code pred_code;
3017 gimple_seq seq = NULL;
3018
3019 /* If this COND_EXPR has a value, copy the values into a temporary within
3020 the arms. */
3021 if (!VOID_TYPE_P (type))
3022 {
3023 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3024 tree result;
3025
3026 /* If either an rvalue is ok or we do not require an lvalue, create the
3027 temporary. But we cannot do that if the type is addressable. */
3028 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3029 && !TREE_ADDRESSABLE (type))
3030 {
3031 if (gimplify_ctxp->allow_rhs_cond_expr
3032 /* If either branch has side effects or could trap, it can't be
3033 evaluated unconditionally. */
3034 && !TREE_SIDE_EFFECTS (then_)
3035 && !generic_expr_could_trap_p (then_)
3036 && !TREE_SIDE_EFFECTS (else_)
3037 && !generic_expr_could_trap_p (else_))
3038 return gimplify_pure_cond_expr (expr_p, pre_p);
3039
3040 tmp = create_tmp_var (type, "iftmp");
3041 result = tmp;
3042 }
3043
3044 /* Otherwise, only create and copy references to the values. */
3045 else
3046 {
3047 type = build_pointer_type (type);
3048
3049 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3050 then_ = build_fold_addr_expr_loc (loc, then_);
3051
3052 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3053 else_ = build_fold_addr_expr_loc (loc, else_);
3054
3055 expr
3056 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3057
3058 tmp = create_tmp_var (type, "iftmp");
3059 result = build_simple_mem_ref_loc (loc, tmp);
3060 }
3061
3062 /* Build the new then clause, `tmp = then_;'. But don't build the
3063 assignment if the value is void; in C++ it can be if it's a throw. */
3064 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3065 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3066
3067 /* Similarly, build the new else clause, `tmp = else_;'. */
3068 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3069 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3070
3071 TREE_TYPE (expr) = void_type_node;
3072 recalculate_side_effects (expr);
3073
3074 /* Move the COND_EXPR to the prequeue. */
3075 gimplify_stmt (&expr, pre_p);
3076
3077 *expr_p = result;
3078 return GS_ALL_DONE;
3079 }
3080
3081 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3082 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3083 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3084 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3085
3086 /* Make sure the condition has BOOLEAN_TYPE. */
3087 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3088
3089 /* Break apart && and || conditions. */
3090 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3091 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3092 {
3093 expr = shortcut_cond_expr (expr);
3094
3095 if (expr != *expr_p)
3096 {
3097 *expr_p = expr;
3098
3099 /* We can't rely on gimplify_expr to re-gimplify the expanded
3100 form properly, as cleanups might cause the target labels to be
3101 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3102 set up a conditional context. */
3103 gimple_push_condition ();
3104 gimplify_stmt (expr_p, &seq);
3105 gimple_pop_condition (pre_p);
3106 gimple_seq_add_seq (pre_p, seq);
3107
3108 return GS_ALL_DONE;
3109 }
3110 }
3111
3112 /* Now do the normal gimplification. */
3113
3114 /* Gimplify condition. */
3115 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3116 fb_rvalue);
3117 if (ret == GS_ERROR)
3118 return GS_ERROR;
3119 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3120
3121 gimple_push_condition ();
3122
3123 have_then_clause_p = have_else_clause_p = false;
3124 if (TREE_OPERAND (expr, 1) != NULL
3125 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3126 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3127 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3128 == current_function_decl)
3129 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3130 have different locations, otherwise we end up with incorrect
3131 location information on the branches. */
3132 && (optimize
3133 || !EXPR_HAS_LOCATION (expr)
3134 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3135 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3136 {
3137 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3138 have_then_clause_p = true;
3139 }
3140 else
3141 label_true = create_artificial_label (UNKNOWN_LOCATION);
3142 if (TREE_OPERAND (expr, 2) != NULL
3143 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3144 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3145 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3146 == current_function_decl)
3147 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3148 have different locations, otherwise we end up with incorrect
3149 location information on the branches. */
3150 && (optimize
3151 || !EXPR_HAS_LOCATION (expr)
3152 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3153 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3154 {
3155 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3156 have_else_clause_p = true;
3157 }
3158 else
3159 label_false = create_artificial_label (UNKNOWN_LOCATION);
3160
3161 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3162 &arm2);
3163 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3164 label_false);
3165 gimplify_seq_add_stmt (&seq, cond_stmt);
3166 gimple_stmt_iterator gsi = gsi_last (seq);
3167 maybe_fold_stmt (&gsi);
3168
3169 label_cont = NULL_TREE;
3170 if (!have_then_clause_p)
3171 {
3172 /* For if (...) {} else { code; } put label_true after
3173 the else block. */
3174 if (TREE_OPERAND (expr, 1) == NULL_TREE
3175 && !have_else_clause_p
3176 && TREE_OPERAND (expr, 2) != NULL_TREE)
3177 label_cont = label_true;
3178 else
3179 {
3180 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3181 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3182 /* For if (...) { code; } else {} or
3183 if (...) { code; } else goto label; or
3184 if (...) { code; return; } else { ... }
3185 label_cont isn't needed. */
3186 if (!have_else_clause_p
3187 && TREE_OPERAND (expr, 2) != NULL_TREE
3188 && gimple_seq_may_fallthru (seq))
3189 {
3190 gimple *g;
3191 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3192
3193 g = gimple_build_goto (label_cont);
3194
3195 /* GIMPLE_COND's are very low level; they have embedded
3196 gotos. This particular embedded goto should not be marked
3197 with the location of the original COND_EXPR, as it would
3198 correspond to the COND_EXPR's condition, not the ELSE or the
3199 THEN arms. To avoid marking it with the wrong location, flag
3200 it as "no location". */
3201 gimple_set_do_not_emit_location (g);
3202
3203 gimplify_seq_add_stmt (&seq, g);
3204 }
3205 }
3206 }
3207 if (!have_else_clause_p)
3208 {
3209 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3210 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3211 }
3212 if (label_cont)
3213 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3214
3215 gimple_pop_condition (pre_p);
3216 gimple_seq_add_seq (pre_p, seq);
3217
3218 if (ret == GS_ERROR)
3219 ; /* Do nothing. */
3220 else if (have_then_clause_p || have_else_clause_p)
3221 ret = GS_ALL_DONE;
3222 else
3223 {
3224 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3225 expr = TREE_OPERAND (expr, 0);
3226 gimplify_stmt (&expr, pre_p);
3227 }
3228
3229 *expr_p = NULL;
3230 return ret;
3231 }
3232
3233 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3234 to be marked addressable.
3235
3236 We cannot rely on such an expression being directly markable if a temporary
3237 has been created by the gimplification. In this case, we create another
3238 temporary and initialize it with a copy, which will become a store after we
3239 mark it addressable. This can happen if the front-end passed us something
3240 that it could not mark addressable yet, like a Fortran pass-by-reference
3241 parameter (int) floatvar. */
3242
3243 static void
3244 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3245 {
3246 while (handled_component_p (*expr_p))
3247 expr_p = &TREE_OPERAND (*expr_p, 0);
3248 if (is_gimple_reg (*expr_p))
3249 {
3250 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3251 DECL_GIMPLE_REG_P (var) = 0;
3252 *expr_p = var;
3253 }
3254 }
3255
3256 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3257 a call to __builtin_memcpy. */
3258
3259 static enum gimplify_status
3260 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3261 gimple_seq *seq_p)
3262 {
3263 tree t, to, to_ptr, from, from_ptr;
3264 gcall *gs;
3265 location_t loc = EXPR_LOCATION (*expr_p);
3266
3267 to = TREE_OPERAND (*expr_p, 0);
3268 from = TREE_OPERAND (*expr_p, 1);
3269
3270 /* Mark the RHS addressable. Beware that it may not be possible to do so
3271 directly if a temporary has been created by the gimplification. */
3272 prepare_gimple_addressable (&from, seq_p);
3273
3274 mark_addressable (from);
3275 from_ptr = build_fold_addr_expr_loc (loc, from);
3276 gimplify_arg (&from_ptr, seq_p, loc);
3277
3278 mark_addressable (to);
3279 to_ptr = build_fold_addr_expr_loc (loc, to);
3280 gimplify_arg (&to_ptr, seq_p, loc);
3281
3282 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3283
3284 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3285
3286 if (want_value)
3287 {
3288 /* tmp = memcpy() */
3289 t = create_tmp_var (TREE_TYPE (to_ptr));
3290 gimple_call_set_lhs (gs, t);
3291 gimplify_seq_add_stmt (seq_p, gs);
3292
3293 *expr_p = build_simple_mem_ref (t);
3294 return GS_ALL_DONE;
3295 }
3296
3297 gimplify_seq_add_stmt (seq_p, gs);
3298 *expr_p = NULL;
3299 return GS_ALL_DONE;
3300 }
3301
3302 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3303 a call to __builtin_memset. In this case we know that the RHS is
3304 a CONSTRUCTOR with an empty element list. */
3305
3306 static enum gimplify_status
3307 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3308 gimple_seq *seq_p)
3309 {
3310 tree t, from, to, to_ptr;
3311 gcall *gs;
3312 location_t loc = EXPR_LOCATION (*expr_p);
3313
3314 /* Assert our assumptions, to abort instead of producing wrong code
3315 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3316 not be immediately exposed. */
3317 from = TREE_OPERAND (*expr_p, 1);
3318 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3319 from = TREE_OPERAND (from, 0);
3320
3321 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3322 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3323
3324 /* Now proceed. */
3325 to = TREE_OPERAND (*expr_p, 0);
3326
3327 to_ptr = build_fold_addr_expr_loc (loc, to);
3328 gimplify_arg (&to_ptr, seq_p, loc);
3329 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3330
3331 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3332
3333 if (want_value)
3334 {
3335 /* tmp = memset() */
3336 t = create_tmp_var (TREE_TYPE (to_ptr));
3337 gimple_call_set_lhs (gs, t);
3338 gimplify_seq_add_stmt (seq_p, gs);
3339
3340 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3341 return GS_ALL_DONE;
3342 }
3343
3344 gimplify_seq_add_stmt (seq_p, gs);
3345 *expr_p = NULL;
3346 return GS_ALL_DONE;
3347 }
3348
3349 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3350 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3351 assignment. Return non-null if we detect a potential overlap. */
3352
3353 struct gimplify_init_ctor_preeval_data
3354 {
3355 /* The base decl of the lhs object. May be NULL, in which case we
3356 have to assume the lhs is indirect. */
3357 tree lhs_base_decl;
3358
3359 /* The alias set of the lhs object. */
3360 alias_set_type lhs_alias_set;
3361 };
3362
3363 static tree
3364 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3365 {
3366 struct gimplify_init_ctor_preeval_data *data
3367 = (struct gimplify_init_ctor_preeval_data *) xdata;
3368 tree t = *tp;
3369
3370 /* If we find the base object, obviously we have overlap. */
3371 if (data->lhs_base_decl == t)
3372 return t;
3373
3374 /* If the constructor component is indirect, determine if we have a
3375 potential overlap with the lhs. The only bits of information we
3376 have to go on at this point are addressability and alias sets. */
3377 if ((INDIRECT_REF_P (t)
3378 || TREE_CODE (t) == MEM_REF)
3379 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3380 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3381 return t;
3382
3383 /* If the constructor component is a call, determine if it can hide a
3384 potential overlap with the lhs through an INDIRECT_REF like above.
3385 ??? Ugh - this is completely broken. In fact this whole analysis
3386 doesn't look conservative. */
3387 if (TREE_CODE (t) == CALL_EXPR)
3388 {
3389 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3390
3391 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3392 if (POINTER_TYPE_P (TREE_VALUE (type))
3393 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3394 && alias_sets_conflict_p (data->lhs_alias_set,
3395 get_alias_set
3396 (TREE_TYPE (TREE_VALUE (type)))))
3397 return t;
3398 }
3399
3400 if (IS_TYPE_OR_DECL_P (t))
3401 *walk_subtrees = 0;
3402 return NULL;
3403 }
3404
3405 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3406 force values that overlap with the lhs (as described by *DATA)
3407 into temporaries. */
3408
3409 static void
3410 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3411 struct gimplify_init_ctor_preeval_data *data)
3412 {
3413 enum gimplify_status one;
3414
3415 /* If the value is constant, then there's nothing to pre-evaluate. */
3416 if (TREE_CONSTANT (*expr_p))
3417 {
3418 /* Ensure it does not have side effects, it might contain a reference to
3419 the object we're initializing. */
3420 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3421 return;
3422 }
3423
3424 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3425 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3426 return;
3427
3428 /* Recurse for nested constructors. */
3429 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3430 {
3431 unsigned HOST_WIDE_INT ix;
3432 constructor_elt *ce;
3433 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3434
3435 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3436 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3437
3438 return;
3439 }
3440
3441 /* If this is a variable sized type, we must remember the size. */
3442 maybe_with_size_expr (expr_p);
3443
3444 /* Gimplify the constructor element to something appropriate for the rhs
3445 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3446 the gimplifier will consider this a store to memory. Doing this
3447 gimplification now means that we won't have to deal with complicated
3448 language-specific trees, nor trees like SAVE_EXPR that can induce
3449 exponential search behavior. */
3450 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3451 if (one == GS_ERROR)
3452 {
3453 *expr_p = NULL;
3454 return;
3455 }
3456
3457 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3458 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3459 always be true for all scalars, since is_gimple_mem_rhs insists on a
3460 temporary variable for them. */
3461 if (DECL_P (*expr_p))
3462 return;
3463
3464 /* If this is of variable size, we have no choice but to assume it doesn't
3465 overlap since we can't make a temporary for it. */
3466 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3467 return;
3468
3469 /* Otherwise, we must search for overlap ... */
3470 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3471 return;
3472
3473 /* ... and if found, force the value into a temporary. */
3474 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3475 }
3476
3477 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3478 a RANGE_EXPR in a CONSTRUCTOR for an array.
3479
3480 var = lower;
3481 loop_entry:
3482 object[var] = value;
3483 if (var == upper)
3484 goto loop_exit;
3485 var = var + 1;
3486 goto loop_entry;
3487 loop_exit:
3488
3489 We increment var _after_ the loop exit check because we might otherwise
3490 fail if upper == TYPE_MAX_VALUE (type for upper).
3491
3492 Note that we never have to deal with SAVE_EXPRs here, because this has
3493 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3494
3495 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3496 gimple_seq *, bool);
3497
3498 static void
3499 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3500 tree value, tree array_elt_type,
3501 gimple_seq *pre_p, bool cleared)
3502 {
3503 tree loop_entry_label, loop_exit_label, fall_thru_label;
3504 tree var, var_type, cref, tmp;
3505
3506 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3507 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3508 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3509
3510 /* Create and initialize the index variable. */
3511 var_type = TREE_TYPE (upper);
3512 var = create_tmp_var (var_type);
3513 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3514
3515 /* Add the loop entry label. */
3516 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3517
3518 /* Build the reference. */
3519 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3520 var, NULL_TREE, NULL_TREE);
3521
3522 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3523 the store. Otherwise just assign value to the reference. */
3524
3525 if (TREE_CODE (value) == CONSTRUCTOR)
3526 /* NB we might have to call ourself recursively through
3527 gimplify_init_ctor_eval if the value is a constructor. */
3528 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3529 pre_p, cleared);
3530 else
3531 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3532
3533 /* We exit the loop when the index var is equal to the upper bound. */
3534 gimplify_seq_add_stmt (pre_p,
3535 gimple_build_cond (EQ_EXPR, var, upper,
3536 loop_exit_label, fall_thru_label));
3537
3538 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3539
3540 /* Otherwise, increment the index var... */
3541 tmp = build2 (PLUS_EXPR, var_type, var,
3542 fold_convert (var_type, integer_one_node));
3543 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3544
3545 /* ...and jump back to the loop entry. */
3546 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3547
3548 /* Add the loop exit label. */
3549 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3550 }
3551
3552 /* Return true if FDECL is accessing a field that is zero sized. */
3553
3554 static bool
3555 zero_sized_field_decl (const_tree fdecl)
3556 {
3557 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3558 && integer_zerop (DECL_SIZE (fdecl)))
3559 return true;
3560 return false;
3561 }
3562
3563 /* Return true if TYPE is zero sized. */
3564
3565 static bool
3566 zero_sized_type (const_tree type)
3567 {
3568 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3569 && integer_zerop (TYPE_SIZE (type)))
3570 return true;
3571 return false;
3572 }
3573
3574 /* A subroutine of gimplify_init_constructor. Generate individual
3575 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3576 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3577 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3578 zeroed first. */
3579
3580 static void
3581 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3582 gimple_seq *pre_p, bool cleared)
3583 {
3584 tree array_elt_type = NULL;
3585 unsigned HOST_WIDE_INT ix;
3586 tree purpose, value;
3587
3588 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3589 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3590
3591 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3592 {
3593 tree cref;
3594
3595 /* NULL values are created above for gimplification errors. */
3596 if (value == NULL)
3597 continue;
3598
3599 if (cleared && initializer_zerop (value))
3600 continue;
3601
3602 /* ??? Here's to hoping the front end fills in all of the indices,
3603 so we don't have to figure out what's missing ourselves. */
3604 gcc_assert (purpose);
3605
3606 /* Skip zero-sized fields, unless value has side-effects. This can
3607 happen with calls to functions returning a zero-sized type, which
3608 we shouldn't discard. As a number of downstream passes don't
3609 expect sets of zero-sized fields, we rely on the gimplification of
3610 the MODIFY_EXPR we make below to drop the assignment statement. */
3611 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3612 continue;
3613
3614 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3615 whole range. */
3616 if (TREE_CODE (purpose) == RANGE_EXPR)
3617 {
3618 tree lower = TREE_OPERAND (purpose, 0);
3619 tree upper = TREE_OPERAND (purpose, 1);
3620
3621 /* If the lower bound is equal to upper, just treat it as if
3622 upper was the index. */
3623 if (simple_cst_equal (lower, upper))
3624 purpose = upper;
3625 else
3626 {
3627 gimplify_init_ctor_eval_range (object, lower, upper, value,
3628 array_elt_type, pre_p, cleared);
3629 continue;
3630 }
3631 }
3632
3633 if (array_elt_type)
3634 {
3635 /* Do not use bitsizetype for ARRAY_REF indices. */
3636 if (TYPE_DOMAIN (TREE_TYPE (object)))
3637 purpose
3638 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3639 purpose);
3640 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3641 purpose, NULL_TREE, NULL_TREE);
3642 }
3643 else
3644 {
3645 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3646 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3647 unshare_expr (object), purpose, NULL_TREE);
3648 }
3649
3650 if (TREE_CODE (value) == CONSTRUCTOR
3651 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3652 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3653 pre_p, cleared);
3654 else
3655 {
3656 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3657 gimplify_and_add (init, pre_p);
3658 ggc_free (init);
3659 }
3660 }
3661 }
3662
3663 /* Return the appropriate RHS predicate for this LHS. */
3664
3665 gimple_predicate
3666 rhs_predicate_for (tree lhs)
3667 {
3668 if (is_gimple_reg (lhs))
3669 return is_gimple_reg_rhs_or_call;
3670 else
3671 return is_gimple_mem_rhs_or_call;
3672 }
3673
3674 /* Gimplify a C99 compound literal expression. This just means adding
3675 the DECL_EXPR before the current statement and using its anonymous
3676 decl instead. */
3677
3678 static enum gimplify_status
3679 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3680 bool (*gimple_test_f) (tree),
3681 fallback_t fallback)
3682 {
3683 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3684 tree decl = DECL_EXPR_DECL (decl_s);
3685 tree init = DECL_INITIAL (decl);
3686 /* Mark the decl as addressable if the compound literal
3687 expression is addressable now, otherwise it is marked too late
3688 after we gimplify the initialization expression. */
3689 if (TREE_ADDRESSABLE (*expr_p))
3690 TREE_ADDRESSABLE (decl) = 1;
3691 /* Otherwise, if we don't need an lvalue and have a literal directly
3692 substitute it. Check if it matches the gimple predicate, as
3693 otherwise we'd generate a new temporary, and we can as well just
3694 use the decl we already have. */
3695 else if (!TREE_ADDRESSABLE (decl)
3696 && init
3697 && (fallback & fb_lvalue) == 0
3698 && gimple_test_f (init))
3699 {
3700 *expr_p = init;
3701 return GS_OK;
3702 }
3703
3704 /* Preliminarily mark non-addressed complex variables as eligible
3705 for promotion to gimple registers. We'll transform their uses
3706 as we find them. */
3707 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3708 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3709 && !TREE_THIS_VOLATILE (decl)
3710 && !needs_to_live_in_memory (decl))
3711 DECL_GIMPLE_REG_P (decl) = 1;
3712
3713 /* If the decl is not addressable, then it is being used in some
3714 expression or on the right hand side of a statement, and it can
3715 be put into a readonly data section. */
3716 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3717 TREE_READONLY (decl) = 1;
3718
3719 /* This decl isn't mentioned in the enclosing block, so add it to the
3720 list of temps. FIXME it seems a bit of a kludge to say that
3721 anonymous artificial vars aren't pushed, but everything else is. */
3722 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3723 gimple_add_tmp_var (decl);
3724
3725 gimplify_and_add (decl_s, pre_p);
3726 *expr_p = decl;
3727 return GS_OK;
3728 }
3729
3730 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3731 return a new CONSTRUCTOR if something changed. */
3732
3733 static tree
3734 optimize_compound_literals_in_ctor (tree orig_ctor)
3735 {
3736 tree ctor = orig_ctor;
3737 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3738 unsigned int idx, num = vec_safe_length (elts);
3739
3740 for (idx = 0; idx < num; idx++)
3741 {
3742 tree value = (*elts)[idx].value;
3743 tree newval = value;
3744 if (TREE_CODE (value) == CONSTRUCTOR)
3745 newval = optimize_compound_literals_in_ctor (value);
3746 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3747 {
3748 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3749 tree decl = DECL_EXPR_DECL (decl_s);
3750 tree init = DECL_INITIAL (decl);
3751
3752 if (!TREE_ADDRESSABLE (value)
3753 && !TREE_ADDRESSABLE (decl)
3754 && init
3755 && TREE_CODE (init) == CONSTRUCTOR)
3756 newval = optimize_compound_literals_in_ctor (init);
3757 }
3758 if (newval == value)
3759 continue;
3760
3761 if (ctor == orig_ctor)
3762 {
3763 ctor = copy_node (orig_ctor);
3764 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3765 elts = CONSTRUCTOR_ELTS (ctor);
3766 }
3767 (*elts)[idx].value = newval;
3768 }
3769 return ctor;
3770 }
3771
3772 /* A subroutine of gimplify_modify_expr. Break out elements of a
3773 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3774
3775 Note that we still need to clear any elements that don't have explicit
3776 initializers, so if not all elements are initialized we keep the
3777 original MODIFY_EXPR, we just remove all of the constructor elements.
3778
3779 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3780 GS_ERROR if we would have to create a temporary when gimplifying
3781 this constructor. Otherwise, return GS_OK.
3782
3783 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3784
3785 static enum gimplify_status
3786 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3787 bool want_value, bool notify_temp_creation)
3788 {
3789 tree object, ctor, type;
3790 enum gimplify_status ret;
3791 vec<constructor_elt, va_gc> *elts;
3792
3793 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3794
3795 if (!notify_temp_creation)
3796 {
3797 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3798 is_gimple_lvalue, fb_lvalue);
3799 if (ret == GS_ERROR)
3800 return ret;
3801 }
3802
3803 object = TREE_OPERAND (*expr_p, 0);
3804 ctor = TREE_OPERAND (*expr_p, 1) =
3805 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3806 type = TREE_TYPE (ctor);
3807 elts = CONSTRUCTOR_ELTS (ctor);
3808 ret = GS_ALL_DONE;
3809
3810 switch (TREE_CODE (type))
3811 {
3812 case RECORD_TYPE:
3813 case UNION_TYPE:
3814 case QUAL_UNION_TYPE:
3815 case ARRAY_TYPE:
3816 {
3817 struct gimplify_init_ctor_preeval_data preeval_data;
3818 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3819 bool cleared, complete_p, valid_const_initializer;
3820
3821 /* Aggregate types must lower constructors to initialization of
3822 individual elements. The exception is that a CONSTRUCTOR node
3823 with no elements indicates zero-initialization of the whole. */
3824 if (vec_safe_is_empty (elts))
3825 {
3826 if (notify_temp_creation)
3827 return GS_OK;
3828 break;
3829 }
3830
3831 /* Fetch information about the constructor to direct later processing.
3832 We might want to make static versions of it in various cases, and
3833 can only do so if it known to be a valid constant initializer. */
3834 valid_const_initializer
3835 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3836 &num_ctor_elements, &complete_p);
3837
3838 /* If a const aggregate variable is being initialized, then it
3839 should never be a lose to promote the variable to be static. */
3840 if (valid_const_initializer
3841 && num_nonzero_elements > 1
3842 && TREE_READONLY (object)
3843 && TREE_CODE (object) == VAR_DECL
3844 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3845 {
3846 if (notify_temp_creation)
3847 return GS_ERROR;
3848 DECL_INITIAL (object) = ctor;
3849 TREE_STATIC (object) = 1;
3850 if (!DECL_NAME (object))
3851 DECL_NAME (object) = create_tmp_var_name ("C");
3852 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3853
3854 /* ??? C++ doesn't automatically append a .<number> to the
3855 assembler name, and even when it does, it looks at FE private
3856 data structures to figure out what that number should be,
3857 which are not set for this variable. I suppose this is
3858 important for local statics for inline functions, which aren't
3859 "local" in the object file sense. So in order to get a unique
3860 TU-local symbol, we must invoke the lhd version now. */
3861 lhd_set_decl_assembler_name (object);
3862
3863 *expr_p = NULL_TREE;
3864 break;
3865 }
3866
3867 /* If there are "lots" of initialized elements, even discounting
3868 those that are not address constants (and thus *must* be
3869 computed at runtime), then partition the constructor into
3870 constant and non-constant parts. Block copy the constant
3871 parts in, then generate code for the non-constant parts. */
3872 /* TODO. There's code in cp/typeck.c to do this. */
3873
3874 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3875 /* store_constructor will ignore the clearing of variable-sized
3876 objects. Initializers for such objects must explicitly set
3877 every field that needs to be set. */
3878 cleared = false;
3879 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3880 /* If the constructor isn't complete, clear the whole object
3881 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3882
3883 ??? This ought not to be needed. For any element not present
3884 in the initializer, we should simply set them to zero. Except
3885 we'd need to *find* the elements that are not present, and that
3886 requires trickery to avoid quadratic compile-time behavior in
3887 large cases or excessive memory use in small cases. */
3888 cleared = true;
3889 else if (num_ctor_elements - num_nonzero_elements
3890 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3891 && num_nonzero_elements < num_ctor_elements / 4)
3892 /* If there are "lots" of zeros, it's more efficient to clear
3893 the memory and then set the nonzero elements. */
3894 cleared = true;
3895 else
3896 cleared = false;
3897
3898 /* If there are "lots" of initialized elements, and all of them
3899 are valid address constants, then the entire initializer can
3900 be dropped to memory, and then memcpy'd out. Don't do this
3901 for sparse arrays, though, as it's more efficient to follow
3902 the standard CONSTRUCTOR behavior of memset followed by
3903 individual element initialization. Also don't do this for small
3904 all-zero initializers (which aren't big enough to merit
3905 clearing), and don't try to make bitwise copies of
3906 TREE_ADDRESSABLE types.
3907
3908 We cannot apply such transformation when compiling chkp static
3909 initializer because creation of initializer image in the memory
3910 will require static initialization of bounds for it. It should
3911 result in another gimplification of similar initializer and we
3912 may fall into infinite loop. */
3913 if (valid_const_initializer
3914 && !(cleared || num_nonzero_elements == 0)
3915 && !TREE_ADDRESSABLE (type)
3916 && (!current_function_decl
3917 || !lookup_attribute ("chkp ctor",
3918 DECL_ATTRIBUTES (current_function_decl))))
3919 {
3920 HOST_WIDE_INT size = int_size_in_bytes (type);
3921 unsigned int align;
3922
3923 /* ??? We can still get unbounded array types, at least
3924 from the C++ front end. This seems wrong, but attempt
3925 to work around it for now. */
3926 if (size < 0)
3927 {
3928 size = int_size_in_bytes (TREE_TYPE (object));
3929 if (size >= 0)
3930 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3931 }
3932
3933 /* Find the maximum alignment we can assume for the object. */
3934 /* ??? Make use of DECL_OFFSET_ALIGN. */
3935 if (DECL_P (object))
3936 align = DECL_ALIGN (object);
3937 else
3938 align = TYPE_ALIGN (type);
3939
3940 /* Do a block move either if the size is so small as to make
3941 each individual move a sub-unit move on average, or if it
3942 is so large as to make individual moves inefficient. */
3943 if (size > 0
3944 && num_nonzero_elements > 1
3945 && (size < num_nonzero_elements
3946 || !can_move_by_pieces (size, align)))
3947 {
3948 if (notify_temp_creation)
3949 return GS_ERROR;
3950
3951 walk_tree (&ctor, force_labels_r, NULL, NULL);
3952 ctor = tree_output_constant_def (ctor);
3953 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3954 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3955 TREE_OPERAND (*expr_p, 1) = ctor;
3956
3957 /* This is no longer an assignment of a CONSTRUCTOR, but
3958 we still may have processing to do on the LHS. So
3959 pretend we didn't do anything here to let that happen. */
3960 return GS_UNHANDLED;
3961 }
3962 }
3963
3964 /* If the target is volatile, we have non-zero elements and more than
3965 one field to assign, initialize the target from a temporary. */
3966 if (TREE_THIS_VOLATILE (object)
3967 && !TREE_ADDRESSABLE (type)
3968 && num_nonzero_elements > 0
3969 && vec_safe_length (elts) > 1)
3970 {
3971 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
3972 TREE_OPERAND (*expr_p, 0) = temp;
3973 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3974 *expr_p,
3975 build2 (MODIFY_EXPR, void_type_node,
3976 object, temp));
3977 return GS_OK;
3978 }
3979
3980 if (notify_temp_creation)
3981 return GS_OK;
3982
3983 /* If there are nonzero elements and if needed, pre-evaluate to capture
3984 elements overlapping with the lhs into temporaries. We must do this
3985 before clearing to fetch the values before they are zeroed-out. */
3986 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3987 {
3988 preeval_data.lhs_base_decl = get_base_address (object);
3989 if (!DECL_P (preeval_data.lhs_base_decl))
3990 preeval_data.lhs_base_decl = NULL;
3991 preeval_data.lhs_alias_set = get_alias_set (object);
3992
3993 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3994 pre_p, post_p, &preeval_data);
3995 }
3996
3997 bool ctor_has_side_effects_p
3998 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
3999
4000 if (cleared)
4001 {
4002 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4003 Note that we still have to gimplify, in order to handle the
4004 case of variable sized types. Avoid shared tree structures. */
4005 CONSTRUCTOR_ELTS (ctor) = NULL;
4006 TREE_SIDE_EFFECTS (ctor) = 0;
4007 object = unshare_expr (object);
4008 gimplify_stmt (expr_p, pre_p);
4009 }
4010
4011 /* If we have not block cleared the object, or if there are nonzero
4012 elements in the constructor, or if the constructor has side effects,
4013 add assignments to the individual scalar fields of the object. */
4014 if (!cleared
4015 || num_nonzero_elements > 0
4016 || ctor_has_side_effects_p)
4017 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4018
4019 *expr_p = NULL_TREE;
4020 }
4021 break;
4022
4023 case COMPLEX_TYPE:
4024 {
4025 tree r, i;
4026
4027 if (notify_temp_creation)
4028 return GS_OK;
4029
4030 /* Extract the real and imaginary parts out of the ctor. */
4031 gcc_assert (elts->length () == 2);
4032 r = (*elts)[0].value;
4033 i = (*elts)[1].value;
4034 if (r == NULL || i == NULL)
4035 {
4036 tree zero = build_zero_cst (TREE_TYPE (type));
4037 if (r == NULL)
4038 r = zero;
4039 if (i == NULL)
4040 i = zero;
4041 }
4042
4043 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4044 represent creation of a complex value. */
4045 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4046 {
4047 ctor = build_complex (type, r, i);
4048 TREE_OPERAND (*expr_p, 1) = ctor;
4049 }
4050 else
4051 {
4052 ctor = build2 (COMPLEX_EXPR, type, r, i);
4053 TREE_OPERAND (*expr_p, 1) = ctor;
4054 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4055 pre_p,
4056 post_p,
4057 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4058 fb_rvalue);
4059 }
4060 }
4061 break;
4062
4063 case VECTOR_TYPE:
4064 {
4065 unsigned HOST_WIDE_INT ix;
4066 constructor_elt *ce;
4067
4068 if (notify_temp_creation)
4069 return GS_OK;
4070
4071 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4072 if (TREE_CONSTANT (ctor))
4073 {
4074 bool constant_p = true;
4075 tree value;
4076
4077 /* Even when ctor is constant, it might contain non-*_CST
4078 elements, such as addresses or trapping values like
4079 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4080 in VECTOR_CST nodes. */
4081 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4082 if (!CONSTANT_CLASS_P (value))
4083 {
4084 constant_p = false;
4085 break;
4086 }
4087
4088 if (constant_p)
4089 {
4090 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4091 break;
4092 }
4093
4094 TREE_CONSTANT (ctor) = 0;
4095 }
4096
4097 /* Vector types use CONSTRUCTOR all the way through gimple
4098 compilation as a general initializer. */
4099 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4100 {
4101 enum gimplify_status tret;
4102 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4103 fb_rvalue);
4104 if (tret == GS_ERROR)
4105 ret = GS_ERROR;
4106 }
4107 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4108 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4109 }
4110 break;
4111
4112 default:
4113 /* So how did we get a CONSTRUCTOR for a scalar type? */
4114 gcc_unreachable ();
4115 }
4116
4117 if (ret == GS_ERROR)
4118 return GS_ERROR;
4119 else if (want_value)
4120 {
4121 *expr_p = object;
4122 return GS_OK;
4123 }
4124 else
4125 {
4126 /* If we have gimplified both sides of the initializer but have
4127 not emitted an assignment, do so now. */
4128 if (*expr_p)
4129 {
4130 tree lhs = TREE_OPERAND (*expr_p, 0);
4131 tree rhs = TREE_OPERAND (*expr_p, 1);
4132 gassign *init = gimple_build_assign (lhs, rhs);
4133 gimplify_seq_add_stmt (pre_p, init);
4134 *expr_p = NULL;
4135 }
4136
4137 return GS_ALL_DONE;
4138 }
4139 }
4140
4141 /* Given a pointer value OP0, return a simplified version of an
4142 indirection through OP0, or NULL_TREE if no simplification is
4143 possible. This may only be applied to a rhs of an expression.
4144 Note that the resulting type may be different from the type pointed
4145 to in the sense that it is still compatible from the langhooks
4146 point of view. */
4147
4148 static tree
4149 gimple_fold_indirect_ref_rhs (tree t)
4150 {
4151 return gimple_fold_indirect_ref (t);
4152 }
4153
4154 /* Subroutine of gimplify_modify_expr to do simplifications of
4155 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4156 something changes. */
4157
4158 static enum gimplify_status
4159 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4160 gimple_seq *pre_p, gimple_seq *post_p,
4161 bool want_value)
4162 {
4163 enum gimplify_status ret = GS_UNHANDLED;
4164 bool changed;
4165
4166 do
4167 {
4168 changed = false;
4169 switch (TREE_CODE (*from_p))
4170 {
4171 case VAR_DECL:
4172 /* If we're assigning from a read-only variable initialized with
4173 a constructor, do the direct assignment from the constructor,
4174 but only if neither source nor target are volatile since this
4175 latter assignment might end up being done on a per-field basis. */
4176 if (DECL_INITIAL (*from_p)
4177 && TREE_READONLY (*from_p)
4178 && !TREE_THIS_VOLATILE (*from_p)
4179 && !TREE_THIS_VOLATILE (*to_p)
4180 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4181 {
4182 tree old_from = *from_p;
4183 enum gimplify_status subret;
4184
4185 /* Move the constructor into the RHS. */
4186 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4187
4188 /* Let's see if gimplify_init_constructor will need to put
4189 it in memory. */
4190 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4191 false, true);
4192 if (subret == GS_ERROR)
4193 {
4194 /* If so, revert the change. */
4195 *from_p = old_from;
4196 }
4197 else
4198 {
4199 ret = GS_OK;
4200 changed = true;
4201 }
4202 }
4203 break;
4204 case INDIRECT_REF:
4205 {
4206 /* If we have code like
4207
4208 *(const A*)(A*)&x
4209
4210 where the type of "x" is a (possibly cv-qualified variant
4211 of "A"), treat the entire expression as identical to "x".
4212 This kind of code arises in C++ when an object is bound
4213 to a const reference, and if "x" is a TARGET_EXPR we want
4214 to take advantage of the optimization below. */
4215 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4216 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4217 if (t)
4218 {
4219 if (TREE_THIS_VOLATILE (t) != volatile_p)
4220 {
4221 if (DECL_P (t))
4222 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4223 build_fold_addr_expr (t));
4224 if (REFERENCE_CLASS_P (t))
4225 TREE_THIS_VOLATILE (t) = volatile_p;
4226 }
4227 *from_p = t;
4228 ret = GS_OK;
4229 changed = true;
4230 }
4231 break;
4232 }
4233
4234 case TARGET_EXPR:
4235 {
4236 /* If we are initializing something from a TARGET_EXPR, strip the
4237 TARGET_EXPR and initialize it directly, if possible. This can't
4238 be done if the initializer is void, since that implies that the
4239 temporary is set in some non-trivial way.
4240
4241 ??? What about code that pulls out the temp and uses it
4242 elsewhere? I think that such code never uses the TARGET_EXPR as
4243 an initializer. If I'm wrong, we'll die because the temp won't
4244 have any RTL. In that case, I guess we'll need to replace
4245 references somehow. */
4246 tree init = TARGET_EXPR_INITIAL (*from_p);
4247
4248 if (init
4249 && !VOID_TYPE_P (TREE_TYPE (init)))
4250 {
4251 *from_p = init;
4252 ret = GS_OK;
4253 changed = true;
4254 }
4255 }
4256 break;
4257
4258 case COMPOUND_EXPR:
4259 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4260 caught. */
4261 gimplify_compound_expr (from_p, pre_p, true);
4262 ret = GS_OK;
4263 changed = true;
4264 break;
4265
4266 case CONSTRUCTOR:
4267 /* If we already made some changes, let the front end have a
4268 crack at this before we break it down. */
4269 if (ret != GS_UNHANDLED)
4270 break;
4271 /* If we're initializing from a CONSTRUCTOR, break this into
4272 individual MODIFY_EXPRs. */
4273 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4274 false);
4275
4276 case COND_EXPR:
4277 /* If we're assigning to a non-register type, push the assignment
4278 down into the branches. This is mandatory for ADDRESSABLE types,
4279 since we cannot generate temporaries for such, but it saves a
4280 copy in other cases as well. */
4281 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4282 {
4283 /* This code should mirror the code in gimplify_cond_expr. */
4284 enum tree_code code = TREE_CODE (*expr_p);
4285 tree cond = *from_p;
4286 tree result = *to_p;
4287
4288 ret = gimplify_expr (&result, pre_p, post_p,
4289 is_gimple_lvalue, fb_lvalue);
4290 if (ret != GS_ERROR)
4291 ret = GS_OK;
4292
4293 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4294 TREE_OPERAND (cond, 1)
4295 = build2 (code, void_type_node, result,
4296 TREE_OPERAND (cond, 1));
4297 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4298 TREE_OPERAND (cond, 2)
4299 = build2 (code, void_type_node, unshare_expr (result),
4300 TREE_OPERAND (cond, 2));
4301
4302 TREE_TYPE (cond) = void_type_node;
4303 recalculate_side_effects (cond);
4304
4305 if (want_value)
4306 {
4307 gimplify_and_add (cond, pre_p);
4308 *expr_p = unshare_expr (result);
4309 }
4310 else
4311 *expr_p = cond;
4312 return ret;
4313 }
4314 break;
4315
4316 case CALL_EXPR:
4317 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4318 return slot so that we don't generate a temporary. */
4319 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4320 && aggregate_value_p (*from_p, *from_p))
4321 {
4322 bool use_target;
4323
4324 if (!(rhs_predicate_for (*to_p))(*from_p))
4325 /* If we need a temporary, *to_p isn't accurate. */
4326 use_target = false;
4327 /* It's OK to use the return slot directly unless it's an NRV. */
4328 else if (TREE_CODE (*to_p) == RESULT_DECL
4329 && DECL_NAME (*to_p) == NULL_TREE
4330 && needs_to_live_in_memory (*to_p))
4331 use_target = true;
4332 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4333 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4334 /* Don't force regs into memory. */
4335 use_target = false;
4336 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4337 /* It's OK to use the target directly if it's being
4338 initialized. */
4339 use_target = true;
4340 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4341 != INTEGER_CST)
4342 /* Always use the target and thus RSO for variable-sized types.
4343 GIMPLE cannot deal with a variable-sized assignment
4344 embedded in a call statement. */
4345 use_target = true;
4346 else if (TREE_CODE (*to_p) != SSA_NAME
4347 && (!is_gimple_variable (*to_p)
4348 || needs_to_live_in_memory (*to_p)))
4349 /* Don't use the original target if it's already addressable;
4350 if its address escapes, and the called function uses the
4351 NRV optimization, a conforming program could see *to_p
4352 change before the called function returns; see c++/19317.
4353 When optimizing, the return_slot pass marks more functions
4354 as safe after we have escape info. */
4355 use_target = false;
4356 else
4357 use_target = true;
4358
4359 if (use_target)
4360 {
4361 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4362 mark_addressable (*to_p);
4363 }
4364 }
4365 break;
4366
4367 case WITH_SIZE_EXPR:
4368 /* Likewise for calls that return an aggregate of non-constant size,
4369 since we would not be able to generate a temporary at all. */
4370 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4371 {
4372 *from_p = TREE_OPERAND (*from_p, 0);
4373 /* We don't change ret in this case because the
4374 WITH_SIZE_EXPR might have been added in
4375 gimplify_modify_expr, so returning GS_OK would lead to an
4376 infinite loop. */
4377 changed = true;
4378 }
4379 break;
4380
4381 /* If we're initializing from a container, push the initialization
4382 inside it. */
4383 case CLEANUP_POINT_EXPR:
4384 case BIND_EXPR:
4385 case STATEMENT_LIST:
4386 {
4387 tree wrap = *from_p;
4388 tree t;
4389
4390 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4391 fb_lvalue);
4392 if (ret != GS_ERROR)
4393 ret = GS_OK;
4394
4395 t = voidify_wrapper_expr (wrap, *expr_p);
4396 gcc_assert (t == *expr_p);
4397
4398 if (want_value)
4399 {
4400 gimplify_and_add (wrap, pre_p);
4401 *expr_p = unshare_expr (*to_p);
4402 }
4403 else
4404 *expr_p = wrap;
4405 return GS_OK;
4406 }
4407
4408 case COMPOUND_LITERAL_EXPR:
4409 {
4410 tree complit = TREE_OPERAND (*expr_p, 1);
4411 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4412 tree decl = DECL_EXPR_DECL (decl_s);
4413 tree init = DECL_INITIAL (decl);
4414
4415 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4416 into struct T x = { 0, 1, 2 } if the address of the
4417 compound literal has never been taken. */
4418 if (!TREE_ADDRESSABLE (complit)
4419 && !TREE_ADDRESSABLE (decl)
4420 && init)
4421 {
4422 *expr_p = copy_node (*expr_p);
4423 TREE_OPERAND (*expr_p, 1) = init;
4424 return GS_OK;
4425 }
4426 }
4427
4428 default:
4429 break;
4430 }
4431 }
4432 while (changed);
4433
4434 return ret;
4435 }
4436
4437
4438 /* Return true if T looks like a valid GIMPLE statement. */
4439
4440 static bool
4441 is_gimple_stmt (tree t)
4442 {
4443 const enum tree_code code = TREE_CODE (t);
4444
4445 switch (code)
4446 {
4447 case NOP_EXPR:
4448 /* The only valid NOP_EXPR is the empty statement. */
4449 return IS_EMPTY_STMT (t);
4450
4451 case BIND_EXPR:
4452 case COND_EXPR:
4453 /* These are only valid if they're void. */
4454 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4455
4456 case SWITCH_EXPR:
4457 case GOTO_EXPR:
4458 case RETURN_EXPR:
4459 case LABEL_EXPR:
4460 case CASE_LABEL_EXPR:
4461 case TRY_CATCH_EXPR:
4462 case TRY_FINALLY_EXPR:
4463 case EH_FILTER_EXPR:
4464 case CATCH_EXPR:
4465 case ASM_EXPR:
4466 case STATEMENT_LIST:
4467 case OACC_PARALLEL:
4468 case OACC_KERNELS:
4469 case OACC_DATA:
4470 case OACC_HOST_DATA:
4471 case OACC_DECLARE:
4472 case OACC_UPDATE:
4473 case OACC_ENTER_DATA:
4474 case OACC_EXIT_DATA:
4475 case OACC_CACHE:
4476 case OMP_PARALLEL:
4477 case OMP_FOR:
4478 case OMP_SIMD:
4479 case CILK_SIMD:
4480 case OMP_DISTRIBUTE:
4481 case OACC_LOOP:
4482 case OMP_SECTIONS:
4483 case OMP_SECTION:
4484 case OMP_SINGLE:
4485 case OMP_MASTER:
4486 case OMP_TASKGROUP:
4487 case OMP_ORDERED:
4488 case OMP_CRITICAL:
4489 case OMP_TASK:
4490 case OMP_TARGET:
4491 case OMP_TARGET_DATA:
4492 case OMP_TARGET_UPDATE:
4493 case OMP_TARGET_ENTER_DATA:
4494 case OMP_TARGET_EXIT_DATA:
4495 case OMP_TASKLOOP:
4496 case OMP_TEAMS:
4497 /* These are always void. */
4498 return true;
4499
4500 case CALL_EXPR:
4501 case MODIFY_EXPR:
4502 case PREDICT_EXPR:
4503 /* These are valid regardless of their type. */
4504 return true;
4505
4506 default:
4507 return false;
4508 }
4509 }
4510
4511
4512 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4513 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4514 DECL_GIMPLE_REG_P set.
4515
4516 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4517 other, unmodified part of the complex object just before the total store.
4518 As a consequence, if the object is still uninitialized, an undefined value
4519 will be loaded into a register, which may result in a spurious exception
4520 if the register is floating-point and the value happens to be a signaling
4521 NaN for example. Then the fully-fledged complex operations lowering pass
4522 followed by a DCE pass are necessary in order to fix things up. */
4523
4524 static enum gimplify_status
4525 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4526 bool want_value)
4527 {
4528 enum tree_code code, ocode;
4529 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4530
4531 lhs = TREE_OPERAND (*expr_p, 0);
4532 rhs = TREE_OPERAND (*expr_p, 1);
4533 code = TREE_CODE (lhs);
4534 lhs = TREE_OPERAND (lhs, 0);
4535
4536 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4537 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4538 TREE_NO_WARNING (other) = 1;
4539 other = get_formal_tmp_var (other, pre_p);
4540
4541 realpart = code == REALPART_EXPR ? rhs : other;
4542 imagpart = code == REALPART_EXPR ? other : rhs;
4543
4544 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4545 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4546 else
4547 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4548
4549 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4550 *expr_p = (want_value) ? rhs : NULL_TREE;
4551
4552 return GS_ALL_DONE;
4553 }
4554
4555 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4556
4557 modify_expr
4558 : varname '=' rhs
4559 | '*' ID '=' rhs
4560
4561 PRE_P points to the list where side effects that must happen before
4562 *EXPR_P should be stored.
4563
4564 POST_P points to the list where side effects that must happen after
4565 *EXPR_P should be stored.
4566
4567 WANT_VALUE is nonzero iff we want to use the value of this expression
4568 in another expression. */
4569
4570 static enum gimplify_status
4571 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4572 bool want_value)
4573 {
4574 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4575 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4576 enum gimplify_status ret = GS_UNHANDLED;
4577 gimple *assign;
4578 location_t loc = EXPR_LOCATION (*expr_p);
4579 gimple_stmt_iterator gsi;
4580
4581 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4582 || TREE_CODE (*expr_p) == INIT_EXPR);
4583
4584 /* Trying to simplify a clobber using normal logic doesn't work,
4585 so handle it here. */
4586 if (TREE_CLOBBER_P (*from_p))
4587 {
4588 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4589 if (ret == GS_ERROR)
4590 return ret;
4591 gcc_assert (!want_value
4592 && (TREE_CODE (*to_p) == VAR_DECL
4593 || TREE_CODE (*to_p) == MEM_REF));
4594 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4595 *expr_p = NULL;
4596 return GS_ALL_DONE;
4597 }
4598
4599 /* Insert pointer conversions required by the middle-end that are not
4600 required by the frontend. This fixes middle-end type checking for
4601 for example gcc.dg/redecl-6.c. */
4602 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4603 {
4604 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4605 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4606 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4607 }
4608
4609 /* See if any simplifications can be done based on what the RHS is. */
4610 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4611 want_value);
4612 if (ret != GS_UNHANDLED)
4613 return ret;
4614
4615 /* For zero sized types only gimplify the left hand side and right hand
4616 side as statements and throw away the assignment. Do this after
4617 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4618 types properly. */
4619 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4620 {
4621 gimplify_stmt (from_p, pre_p);
4622 gimplify_stmt (to_p, pre_p);
4623 *expr_p = NULL_TREE;
4624 return GS_ALL_DONE;
4625 }
4626
4627 /* If the value being copied is of variable width, compute the length
4628 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4629 before gimplifying any of the operands so that we can resolve any
4630 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4631 the size of the expression to be copied, not of the destination, so
4632 that is what we must do here. */
4633 maybe_with_size_expr (from_p);
4634
4635 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4636 if (ret == GS_ERROR)
4637 return ret;
4638
4639 /* As a special case, we have to temporarily allow for assignments
4640 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4641 a toplevel statement, when gimplifying the GENERIC expression
4642 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4643 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4644
4645 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4646 prevent gimplify_expr from trying to create a new temporary for
4647 foo's LHS, we tell it that it should only gimplify until it
4648 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4649 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4650 and all we need to do here is set 'a' to be its LHS. */
4651 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4652 fb_rvalue);
4653 if (ret == GS_ERROR)
4654 return ret;
4655
4656 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4657 size as argument to the call. */
4658 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4659 {
4660 tree call = TREE_OPERAND (*from_p, 0);
4661 tree vlasize = TREE_OPERAND (*from_p, 1);
4662
4663 if (TREE_CODE (call) == CALL_EXPR
4664 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4665 {
4666 int nargs = call_expr_nargs (call);
4667 tree type = TREE_TYPE (call);
4668 tree ap = CALL_EXPR_ARG (call, 0);
4669 tree tag = CALL_EXPR_ARG (call, 1);
4670 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4671 IFN_VA_ARG, type,
4672 nargs + 1, ap, tag,
4673 vlasize);
4674 tree *call_p = &(TREE_OPERAND (*from_p, 0));
4675 *call_p = newcall;
4676 }
4677 }
4678
4679 /* Now see if the above changed *from_p to something we handle specially. */
4680 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4681 want_value);
4682 if (ret != GS_UNHANDLED)
4683 return ret;
4684
4685 /* If we've got a variable sized assignment between two lvalues (i.e. does
4686 not involve a call), then we can make things a bit more straightforward
4687 by converting the assignment to memcpy or memset. */
4688 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4689 {
4690 tree from = TREE_OPERAND (*from_p, 0);
4691 tree size = TREE_OPERAND (*from_p, 1);
4692
4693 if (TREE_CODE (from) == CONSTRUCTOR)
4694 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4695
4696 if (is_gimple_addressable (from))
4697 {
4698 *from_p = from;
4699 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4700 pre_p);
4701 }
4702 }
4703
4704 /* Transform partial stores to non-addressable complex variables into
4705 total stores. This allows us to use real instead of virtual operands
4706 for these variables, which improves optimization. */
4707 if ((TREE_CODE (*to_p) == REALPART_EXPR
4708 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4709 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4710 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4711
4712 /* Try to alleviate the effects of the gimplification creating artificial
4713 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4714 make sure not to create DECL_DEBUG_EXPR links across functions. */
4715 if (!gimplify_ctxp->into_ssa
4716 && TREE_CODE (*from_p) == VAR_DECL
4717 && DECL_IGNORED_P (*from_p)
4718 && DECL_P (*to_p)
4719 && !DECL_IGNORED_P (*to_p)
4720 && decl_function_context (*to_p) == current_function_decl)
4721 {
4722 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4723 DECL_NAME (*from_p)
4724 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4725 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4726 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4727 }
4728
4729 if (want_value && TREE_THIS_VOLATILE (*to_p))
4730 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4731
4732 if (TREE_CODE (*from_p) == CALL_EXPR)
4733 {
4734 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4735 instead of a GIMPLE_ASSIGN. */
4736 gcall *call_stmt;
4737 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4738 {
4739 /* Gimplify internal functions created in the FEs. */
4740 int nargs = call_expr_nargs (*from_p), i;
4741 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4742 auto_vec<tree> vargs (nargs);
4743
4744 for (i = 0; i < nargs; i++)
4745 {
4746 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4747 EXPR_LOCATION (*from_p));
4748 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4749 }
4750 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4751 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4752 }
4753 else
4754 {
4755 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4756 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4757 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4758 tree fndecl = get_callee_fndecl (*from_p);
4759 if (fndecl
4760 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4761 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4762 && call_expr_nargs (*from_p) == 3)
4763 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4764 CALL_EXPR_ARG (*from_p, 0),
4765 CALL_EXPR_ARG (*from_p, 1),
4766 CALL_EXPR_ARG (*from_p, 2));
4767 else
4768 {
4769 call_stmt = gimple_build_call_from_tree (*from_p);
4770 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4771 }
4772 }
4773 notice_special_calls (call_stmt);
4774 if (!gimple_call_noreturn_p (call_stmt))
4775 gimple_call_set_lhs (call_stmt, *to_p);
4776 assign = call_stmt;
4777 }
4778 else
4779 {
4780 assign = gimple_build_assign (*to_p, *from_p);
4781 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4782 }
4783
4784 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4785 {
4786 /* We should have got an SSA name from the start. */
4787 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4788 }
4789
4790 gimplify_seq_add_stmt (pre_p, assign);
4791 gsi = gsi_last (*pre_p);
4792 maybe_fold_stmt (&gsi);
4793
4794 if (want_value)
4795 {
4796 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4797 return GS_OK;
4798 }
4799 else
4800 *expr_p = NULL;
4801
4802 return GS_ALL_DONE;
4803 }
4804
4805 /* Gimplify a comparison between two variable-sized objects. Do this
4806 with a call to BUILT_IN_MEMCMP. */
4807
4808 static enum gimplify_status
4809 gimplify_variable_sized_compare (tree *expr_p)
4810 {
4811 location_t loc = EXPR_LOCATION (*expr_p);
4812 tree op0 = TREE_OPERAND (*expr_p, 0);
4813 tree op1 = TREE_OPERAND (*expr_p, 1);
4814 tree t, arg, dest, src, expr;
4815
4816 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4817 arg = unshare_expr (arg);
4818 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4819 src = build_fold_addr_expr_loc (loc, op1);
4820 dest = build_fold_addr_expr_loc (loc, op0);
4821 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4822 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4823
4824 expr
4825 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4826 SET_EXPR_LOCATION (expr, loc);
4827 *expr_p = expr;
4828
4829 return GS_OK;
4830 }
4831
4832 /* Gimplify a comparison between two aggregate objects of integral scalar
4833 mode as a comparison between the bitwise equivalent scalar values. */
4834
4835 static enum gimplify_status
4836 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4837 {
4838 location_t loc = EXPR_LOCATION (*expr_p);
4839 tree op0 = TREE_OPERAND (*expr_p, 0);
4840 tree op1 = TREE_OPERAND (*expr_p, 1);
4841
4842 tree type = TREE_TYPE (op0);
4843 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4844
4845 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4846 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4847
4848 *expr_p
4849 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4850
4851 return GS_OK;
4852 }
4853
4854 /* Gimplify an expression sequence. This function gimplifies each
4855 expression and rewrites the original expression with the last
4856 expression of the sequence in GIMPLE form.
4857
4858 PRE_P points to the list where the side effects for all the
4859 expressions in the sequence will be emitted.
4860
4861 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4862
4863 static enum gimplify_status
4864 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4865 {
4866 tree t = *expr_p;
4867
4868 do
4869 {
4870 tree *sub_p = &TREE_OPERAND (t, 0);
4871
4872 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4873 gimplify_compound_expr (sub_p, pre_p, false);
4874 else
4875 gimplify_stmt (sub_p, pre_p);
4876
4877 t = TREE_OPERAND (t, 1);
4878 }
4879 while (TREE_CODE (t) == COMPOUND_EXPR);
4880
4881 *expr_p = t;
4882 if (want_value)
4883 return GS_OK;
4884 else
4885 {
4886 gimplify_stmt (expr_p, pre_p);
4887 return GS_ALL_DONE;
4888 }
4889 }
4890
4891 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4892 gimplify. After gimplification, EXPR_P will point to a new temporary
4893 that holds the original value of the SAVE_EXPR node.
4894
4895 PRE_P points to the list where side effects that must happen before
4896 *EXPR_P should be stored. */
4897
4898 static enum gimplify_status
4899 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4900 {
4901 enum gimplify_status ret = GS_ALL_DONE;
4902 tree val;
4903
4904 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4905 val = TREE_OPERAND (*expr_p, 0);
4906
4907 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4908 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4909 {
4910 /* The operand may be a void-valued expression such as SAVE_EXPRs
4911 generated by the Java frontend for class initialization. It is
4912 being executed only for its side-effects. */
4913 if (TREE_TYPE (val) == void_type_node)
4914 {
4915 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4916 is_gimple_stmt, fb_none);
4917 val = NULL;
4918 }
4919 else
4920 val = get_initialized_tmp_var (val, pre_p, post_p);
4921
4922 TREE_OPERAND (*expr_p, 0) = val;
4923 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4924 }
4925
4926 *expr_p = val;
4927
4928 return ret;
4929 }
4930
4931 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4932
4933 unary_expr
4934 : ...
4935 | '&' varname
4936 ...
4937
4938 PRE_P points to the list where side effects that must happen before
4939 *EXPR_P should be stored.
4940
4941 POST_P points to the list where side effects that must happen after
4942 *EXPR_P should be stored. */
4943
4944 static enum gimplify_status
4945 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4946 {
4947 tree expr = *expr_p;
4948 tree op0 = TREE_OPERAND (expr, 0);
4949 enum gimplify_status ret;
4950 location_t loc = EXPR_LOCATION (*expr_p);
4951
4952 switch (TREE_CODE (op0))
4953 {
4954 case INDIRECT_REF:
4955 do_indirect_ref:
4956 /* Check if we are dealing with an expression of the form '&*ptr'.
4957 While the front end folds away '&*ptr' into 'ptr', these
4958 expressions may be generated internally by the compiler (e.g.,
4959 builtins like __builtin_va_end). */
4960 /* Caution: the silent array decomposition semantics we allow for
4961 ADDR_EXPR means we can't always discard the pair. */
4962 /* Gimplification of the ADDR_EXPR operand may drop
4963 cv-qualification conversions, so make sure we add them if
4964 needed. */
4965 {
4966 tree op00 = TREE_OPERAND (op0, 0);
4967 tree t_expr = TREE_TYPE (expr);
4968 tree t_op00 = TREE_TYPE (op00);
4969
4970 if (!useless_type_conversion_p (t_expr, t_op00))
4971 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4972 *expr_p = op00;
4973 ret = GS_OK;
4974 }
4975 break;
4976
4977 case VIEW_CONVERT_EXPR:
4978 /* Take the address of our operand and then convert it to the type of
4979 this ADDR_EXPR.
4980
4981 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4982 all clear. The impact of this transformation is even less clear. */
4983
4984 /* If the operand is a useless conversion, look through it. Doing so
4985 guarantees that the ADDR_EXPR and its operand will remain of the
4986 same type. */
4987 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4988 op0 = TREE_OPERAND (op0, 0);
4989
4990 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4991 build_fold_addr_expr_loc (loc,
4992 TREE_OPERAND (op0, 0)));
4993 ret = GS_OK;
4994 break;
4995
4996 case MEM_REF:
4997 if (integer_zerop (TREE_OPERAND (op0, 1)))
4998 goto do_indirect_ref;
4999
5000 /* ... fall through ... */
5001
5002 default:
5003 /* If we see a call to a declared builtin or see its address
5004 being taken (we can unify those cases here) then we can mark
5005 the builtin for implicit generation by GCC. */
5006 if (TREE_CODE (op0) == FUNCTION_DECL
5007 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5008 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5009 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5010
5011 /* We use fb_either here because the C frontend sometimes takes
5012 the address of a call that returns a struct; see
5013 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5014 the implied temporary explicit. */
5015
5016 /* Make the operand addressable. */
5017 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5018 is_gimple_addressable, fb_either);
5019 if (ret == GS_ERROR)
5020 break;
5021
5022 /* Then mark it. Beware that it may not be possible to do so directly
5023 if a temporary has been created by the gimplification. */
5024 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5025
5026 op0 = TREE_OPERAND (expr, 0);
5027
5028 /* For various reasons, the gimplification of the expression
5029 may have made a new INDIRECT_REF. */
5030 if (TREE_CODE (op0) == INDIRECT_REF)
5031 goto do_indirect_ref;
5032
5033 mark_addressable (TREE_OPERAND (expr, 0));
5034
5035 /* The FEs may end up building ADDR_EXPRs early on a decl with
5036 an incomplete type. Re-build ADDR_EXPRs in canonical form
5037 here. */
5038 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5039 *expr_p = build_fold_addr_expr (op0);
5040
5041 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5042 recompute_tree_invariant_for_addr_expr (*expr_p);
5043
5044 /* If we re-built the ADDR_EXPR add a conversion to the original type
5045 if required. */
5046 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5047 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5048
5049 break;
5050 }
5051
5052 return ret;
5053 }
5054
5055 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5056 value; output operands should be a gimple lvalue. */
5057
5058 static enum gimplify_status
5059 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5060 {
5061 tree expr;
5062 int noutputs;
5063 const char **oconstraints;
5064 int i;
5065 tree link;
5066 const char *constraint;
5067 bool allows_mem, allows_reg, is_inout;
5068 enum gimplify_status ret, tret;
5069 gasm *stmt;
5070 vec<tree, va_gc> *inputs;
5071 vec<tree, va_gc> *outputs;
5072 vec<tree, va_gc> *clobbers;
5073 vec<tree, va_gc> *labels;
5074 tree link_next;
5075
5076 expr = *expr_p;
5077 noutputs = list_length (ASM_OUTPUTS (expr));
5078 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5079
5080 inputs = NULL;
5081 outputs = NULL;
5082 clobbers = NULL;
5083 labels = NULL;
5084
5085 ret = GS_ALL_DONE;
5086 link_next = NULL_TREE;
5087 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5088 {
5089 bool ok;
5090 size_t constraint_len;
5091
5092 link_next = TREE_CHAIN (link);
5093
5094 oconstraints[i]
5095 = constraint
5096 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5097 constraint_len = strlen (constraint);
5098 if (constraint_len == 0)
5099 continue;
5100
5101 ok = parse_output_constraint (&constraint, i, 0, 0,
5102 &allows_mem, &allows_reg, &is_inout);
5103 if (!ok)
5104 {
5105 ret = GS_ERROR;
5106 is_inout = false;
5107 }
5108
5109 if (!allows_reg && allows_mem)
5110 mark_addressable (TREE_VALUE (link));
5111
5112 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5113 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5114 fb_lvalue | fb_mayfail);
5115 if (tret == GS_ERROR)
5116 {
5117 error ("invalid lvalue in asm output %d", i);
5118 ret = tret;
5119 }
5120
5121 vec_safe_push (outputs, link);
5122 TREE_CHAIN (link) = NULL_TREE;
5123
5124 if (is_inout)
5125 {
5126 /* An input/output operand. To give the optimizers more
5127 flexibility, split it into separate input and output
5128 operands. */
5129 tree input;
5130 char buf[10];
5131
5132 /* Turn the in/out constraint into an output constraint. */
5133 char *p = xstrdup (constraint);
5134 p[0] = '=';
5135 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5136
5137 /* And add a matching input constraint. */
5138 if (allows_reg)
5139 {
5140 sprintf (buf, "%d", i);
5141
5142 /* If there are multiple alternatives in the constraint,
5143 handle each of them individually. Those that allow register
5144 will be replaced with operand number, the others will stay
5145 unchanged. */
5146 if (strchr (p, ',') != NULL)
5147 {
5148 size_t len = 0, buflen = strlen (buf);
5149 char *beg, *end, *str, *dst;
5150
5151 for (beg = p + 1;;)
5152 {
5153 end = strchr (beg, ',');
5154 if (end == NULL)
5155 end = strchr (beg, '\0');
5156 if ((size_t) (end - beg) < buflen)
5157 len += buflen + 1;
5158 else
5159 len += end - beg + 1;
5160 if (*end)
5161 beg = end + 1;
5162 else
5163 break;
5164 }
5165
5166 str = (char *) alloca (len);
5167 for (beg = p + 1, dst = str;;)
5168 {
5169 const char *tem;
5170 bool mem_p, reg_p, inout_p;
5171
5172 end = strchr (beg, ',');
5173 if (end)
5174 *end = '\0';
5175 beg[-1] = '=';
5176 tem = beg - 1;
5177 parse_output_constraint (&tem, i, 0, 0,
5178 &mem_p, &reg_p, &inout_p);
5179 if (dst != str)
5180 *dst++ = ',';
5181 if (reg_p)
5182 {
5183 memcpy (dst, buf, buflen);
5184 dst += buflen;
5185 }
5186 else
5187 {
5188 if (end)
5189 len = end - beg;
5190 else
5191 len = strlen (beg);
5192 memcpy (dst, beg, len);
5193 dst += len;
5194 }
5195 if (end)
5196 beg = end + 1;
5197 else
5198 break;
5199 }
5200 *dst = '\0';
5201 input = build_string (dst - str, str);
5202 }
5203 else
5204 input = build_string (strlen (buf), buf);
5205 }
5206 else
5207 input = build_string (constraint_len - 1, constraint + 1);
5208
5209 free (p);
5210
5211 input = build_tree_list (build_tree_list (NULL_TREE, input),
5212 unshare_expr (TREE_VALUE (link)));
5213 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5214 }
5215 }
5216
5217 link_next = NULL_TREE;
5218 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5219 {
5220 link_next = TREE_CHAIN (link);
5221 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5222 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5223 oconstraints, &allows_mem, &allows_reg);
5224
5225 /* If we can't make copies, we can only accept memory. */
5226 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5227 {
5228 if (allows_mem)
5229 allows_reg = 0;
5230 else
5231 {
5232 error ("impossible constraint in %<asm%>");
5233 error ("non-memory input %d must stay in memory", i);
5234 return GS_ERROR;
5235 }
5236 }
5237
5238 /* If the operand is a memory input, it should be an lvalue. */
5239 if (!allows_reg && allows_mem)
5240 {
5241 tree inputv = TREE_VALUE (link);
5242 STRIP_NOPS (inputv);
5243 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5244 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5245 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5246 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5247 || TREE_CODE (inputv) == MODIFY_EXPR)
5248 TREE_VALUE (link) = error_mark_node;
5249 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5250 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5251 mark_addressable (TREE_VALUE (link));
5252 if (tret == GS_ERROR)
5253 {
5254 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5255 input_location = EXPR_LOCATION (TREE_VALUE (link));
5256 error ("memory input %d is not directly addressable", i);
5257 ret = tret;
5258 }
5259 }
5260 else
5261 {
5262 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5263 is_gimple_asm_val, fb_rvalue);
5264 if (tret == GS_ERROR)
5265 ret = tret;
5266 }
5267
5268 TREE_CHAIN (link) = NULL_TREE;
5269 vec_safe_push (inputs, link);
5270 }
5271
5272 link_next = NULL_TREE;
5273 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5274 {
5275 link_next = TREE_CHAIN (link);
5276 TREE_CHAIN (link) = NULL_TREE;
5277 vec_safe_push (clobbers, link);
5278 }
5279
5280 link_next = NULL_TREE;
5281 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5282 {
5283 link_next = TREE_CHAIN (link);
5284 TREE_CHAIN (link) = NULL_TREE;
5285 vec_safe_push (labels, link);
5286 }
5287
5288 /* Do not add ASMs with errors to the gimple IL stream. */
5289 if (ret != GS_ERROR)
5290 {
5291 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5292 inputs, outputs, clobbers, labels);
5293
5294 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5295 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5296
5297 gimplify_seq_add_stmt (pre_p, stmt);
5298 }
5299
5300 return ret;
5301 }
5302
5303 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5304 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5305 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5306 return to this function.
5307
5308 FIXME should we complexify the prequeue handling instead? Or use flags
5309 for all the cleanups and let the optimizer tighten them up? The current
5310 code seems pretty fragile; it will break on a cleanup within any
5311 non-conditional nesting. But any such nesting would be broken, anyway;
5312 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5313 and continues out of it. We can do that at the RTL level, though, so
5314 having an optimizer to tighten up try/finally regions would be a Good
5315 Thing. */
5316
5317 static enum gimplify_status
5318 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5319 {
5320 gimple_stmt_iterator iter;
5321 gimple_seq body_sequence = NULL;
5322
5323 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5324
5325 /* We only care about the number of conditions between the innermost
5326 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5327 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5328 int old_conds = gimplify_ctxp->conditions;
5329 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5330 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5331 gimplify_ctxp->conditions = 0;
5332 gimplify_ctxp->conditional_cleanups = NULL;
5333 gimplify_ctxp->in_cleanup_point_expr = true;
5334
5335 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5336
5337 gimplify_ctxp->conditions = old_conds;
5338 gimplify_ctxp->conditional_cleanups = old_cleanups;
5339 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5340
5341 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5342 {
5343 gimple *wce = gsi_stmt (iter);
5344
5345 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5346 {
5347 if (gsi_one_before_end_p (iter))
5348 {
5349 /* Note that gsi_insert_seq_before and gsi_remove do not
5350 scan operands, unlike some other sequence mutators. */
5351 if (!gimple_wce_cleanup_eh_only (wce))
5352 gsi_insert_seq_before_without_update (&iter,
5353 gimple_wce_cleanup (wce),
5354 GSI_SAME_STMT);
5355 gsi_remove (&iter, true);
5356 break;
5357 }
5358 else
5359 {
5360 gtry *gtry;
5361 gimple_seq seq;
5362 enum gimple_try_flags kind;
5363
5364 if (gimple_wce_cleanup_eh_only (wce))
5365 kind = GIMPLE_TRY_CATCH;
5366 else
5367 kind = GIMPLE_TRY_FINALLY;
5368 seq = gsi_split_seq_after (iter);
5369
5370 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5371 /* Do not use gsi_replace here, as it may scan operands.
5372 We want to do a simple structural modification only. */
5373 gsi_set_stmt (&iter, gtry);
5374 iter = gsi_start (gtry->eval);
5375 }
5376 }
5377 else
5378 gsi_next (&iter);
5379 }
5380
5381 gimplify_seq_add_seq (pre_p, body_sequence);
5382 if (temp)
5383 {
5384 *expr_p = temp;
5385 return GS_OK;
5386 }
5387 else
5388 {
5389 *expr_p = NULL;
5390 return GS_ALL_DONE;
5391 }
5392 }
5393
5394 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5395 is the cleanup action required. EH_ONLY is true if the cleanup should
5396 only be executed if an exception is thrown, not on normal exit. */
5397
5398 static void
5399 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5400 {
5401 gimple *wce;
5402 gimple_seq cleanup_stmts = NULL;
5403
5404 /* Errors can result in improperly nested cleanups. Which results in
5405 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5406 if (seen_error ())
5407 return;
5408
5409 if (gimple_conditional_context ())
5410 {
5411 /* If we're in a conditional context, this is more complex. We only
5412 want to run the cleanup if we actually ran the initialization that
5413 necessitates it, but we want to run it after the end of the
5414 conditional context. So we wrap the try/finally around the
5415 condition and use a flag to determine whether or not to actually
5416 run the destructor. Thus
5417
5418 test ? f(A()) : 0
5419
5420 becomes (approximately)
5421
5422 flag = 0;
5423 try {
5424 if (test) { A::A(temp); flag = 1; val = f(temp); }
5425 else { val = 0; }
5426 } finally {
5427 if (flag) A::~A(temp);
5428 }
5429 val
5430 */
5431 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5432 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5433 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5434
5435 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5436 gimplify_stmt (&cleanup, &cleanup_stmts);
5437 wce = gimple_build_wce (cleanup_stmts);
5438
5439 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5440 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5441 gimplify_seq_add_stmt (pre_p, ftrue);
5442
5443 /* Because of this manipulation, and the EH edges that jump
5444 threading cannot redirect, the temporary (VAR) will appear
5445 to be used uninitialized. Don't warn. */
5446 TREE_NO_WARNING (var) = 1;
5447 }
5448 else
5449 {
5450 gimplify_stmt (&cleanup, &cleanup_stmts);
5451 wce = gimple_build_wce (cleanup_stmts);
5452 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5453 gimplify_seq_add_stmt (pre_p, wce);
5454 }
5455 }
5456
5457 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5458
5459 static enum gimplify_status
5460 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5461 {
5462 tree targ = *expr_p;
5463 tree temp = TARGET_EXPR_SLOT (targ);
5464 tree init = TARGET_EXPR_INITIAL (targ);
5465 enum gimplify_status ret;
5466
5467 if (init)
5468 {
5469 tree cleanup = NULL_TREE;
5470
5471 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5472 to the temps list. Handle also variable length TARGET_EXPRs. */
5473 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5474 {
5475 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5476 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5477 gimplify_vla_decl (temp, pre_p);
5478 }
5479 else
5480 gimple_add_tmp_var (temp);
5481
5482 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5483 expression is supposed to initialize the slot. */
5484 if (VOID_TYPE_P (TREE_TYPE (init)))
5485 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5486 else
5487 {
5488 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5489 init = init_expr;
5490 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5491 init = NULL;
5492 ggc_free (init_expr);
5493 }
5494 if (ret == GS_ERROR)
5495 {
5496 /* PR c++/28266 Make sure this is expanded only once. */
5497 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5498 return GS_ERROR;
5499 }
5500 if (init)
5501 gimplify_and_add (init, pre_p);
5502
5503 /* If needed, push the cleanup for the temp. */
5504 if (TARGET_EXPR_CLEANUP (targ))
5505 {
5506 if (CLEANUP_EH_ONLY (targ))
5507 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5508 CLEANUP_EH_ONLY (targ), pre_p);
5509 else
5510 cleanup = TARGET_EXPR_CLEANUP (targ);
5511 }
5512
5513 /* Add a clobber for the temporary going out of scope, like
5514 gimplify_bind_expr. */
5515 if (gimplify_ctxp->in_cleanup_point_expr
5516 && needs_to_live_in_memory (temp)
5517 && flag_stack_reuse == SR_ALL)
5518 {
5519 tree clobber = build_constructor (TREE_TYPE (temp),
5520 NULL);
5521 TREE_THIS_VOLATILE (clobber) = true;
5522 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5523 if (cleanup)
5524 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5525 clobber);
5526 else
5527 cleanup = clobber;
5528 }
5529
5530 if (cleanup)
5531 gimple_push_cleanup (temp, cleanup, false, pre_p);
5532
5533 /* Only expand this once. */
5534 TREE_OPERAND (targ, 3) = init;
5535 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5536 }
5537 else
5538 /* We should have expanded this before. */
5539 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5540
5541 *expr_p = temp;
5542 return GS_OK;
5543 }
5544
5545 /* Gimplification of expression trees. */
5546
5547 /* Gimplify an expression which appears at statement context. The
5548 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5549 NULL, a new sequence is allocated.
5550
5551 Return true if we actually added a statement to the queue. */
5552
5553 bool
5554 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5555 {
5556 gimple_seq_node last;
5557
5558 last = gimple_seq_last (*seq_p);
5559 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5560 return last != gimple_seq_last (*seq_p);
5561 }
5562
5563 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5564 to CTX. If entries already exist, force them to be some flavor of private.
5565 If there is no enclosing parallel, do nothing. */
5566
5567 void
5568 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5569 {
5570 splay_tree_node n;
5571
5572 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5573 return;
5574
5575 do
5576 {
5577 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5578 if (n != NULL)
5579 {
5580 if (n->value & GOVD_SHARED)
5581 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5582 else if (n->value & GOVD_MAP)
5583 n->value |= GOVD_MAP_TO_ONLY;
5584 else
5585 return;
5586 }
5587 else if ((ctx->region_type & ORT_TARGET) != 0)
5588 {
5589 if (ctx->target_map_scalars_firstprivate)
5590 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5591 else
5592 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5593 }
5594 else if (ctx->region_type != ORT_WORKSHARE
5595 && ctx->region_type != ORT_SIMD
5596 && ctx->region_type != ORT_ACC
5597 && !(ctx->region_type & ORT_TARGET_DATA))
5598 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5599
5600 ctx = ctx->outer_context;
5601 }
5602 while (ctx);
5603 }
5604
5605 /* Similarly for each of the type sizes of TYPE. */
5606
5607 static void
5608 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5609 {
5610 if (type == NULL || type == error_mark_node)
5611 return;
5612 type = TYPE_MAIN_VARIANT (type);
5613
5614 if (ctx->privatized_types->add (type))
5615 return;
5616
5617 switch (TREE_CODE (type))
5618 {
5619 case INTEGER_TYPE:
5620 case ENUMERAL_TYPE:
5621 case BOOLEAN_TYPE:
5622 case REAL_TYPE:
5623 case FIXED_POINT_TYPE:
5624 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5625 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5626 break;
5627
5628 case ARRAY_TYPE:
5629 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5630 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5631 break;
5632
5633 case RECORD_TYPE:
5634 case UNION_TYPE:
5635 case QUAL_UNION_TYPE:
5636 {
5637 tree field;
5638 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5639 if (TREE_CODE (field) == FIELD_DECL)
5640 {
5641 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5642 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5643 }
5644 }
5645 break;
5646
5647 case POINTER_TYPE:
5648 case REFERENCE_TYPE:
5649 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5650 break;
5651
5652 default:
5653 break;
5654 }
5655
5656 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5657 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5658 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5659 }
5660
5661 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5662
5663 static void
5664 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5665 {
5666 splay_tree_node n;
5667 unsigned int nflags;
5668 tree t;
5669
5670 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5671 return;
5672
5673 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5674 there are constructors involved somewhere. */
5675 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5676 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5677 flags |= GOVD_SEEN;
5678
5679 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5680 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5681 {
5682 /* We shouldn't be re-adding the decl with the same data
5683 sharing class. */
5684 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5685 nflags = n->value | flags;
5686 /* The only combination of data sharing classes we should see is
5687 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5688 reduction variables to be used in data sharing clauses. */
5689 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5690 || ((nflags & GOVD_DATA_SHARE_CLASS)
5691 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5692 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5693 n->value = nflags;
5694 return;
5695 }
5696
5697 /* When adding a variable-sized variable, we have to handle all sorts
5698 of additional bits of data: the pointer replacement variable, and
5699 the parameters of the type. */
5700 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5701 {
5702 /* Add the pointer replacement variable as PRIVATE if the variable
5703 replacement is private, else FIRSTPRIVATE since we'll need the
5704 address of the original variable either for SHARED, or for the
5705 copy into or out of the context. */
5706 if (!(flags & GOVD_LOCAL))
5707 {
5708 if (flags & GOVD_MAP)
5709 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5710 else if (flags & GOVD_PRIVATE)
5711 nflags = GOVD_PRIVATE;
5712 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5713 && (flags & GOVD_FIRSTPRIVATE))
5714 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5715 else
5716 nflags = GOVD_FIRSTPRIVATE;
5717 nflags |= flags & GOVD_SEEN;
5718 t = DECL_VALUE_EXPR (decl);
5719 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5720 t = TREE_OPERAND (t, 0);
5721 gcc_assert (DECL_P (t));
5722 omp_add_variable (ctx, t, nflags);
5723 }
5724
5725 /* Add all of the variable and type parameters (which should have
5726 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5727 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5728 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5729 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5730
5731 /* The variable-sized variable itself is never SHARED, only some form
5732 of PRIVATE. The sharing would take place via the pointer variable
5733 which we remapped above. */
5734 if (flags & GOVD_SHARED)
5735 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5736 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5737
5738 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5739 alloca statement we generate for the variable, so make sure it
5740 is available. This isn't automatically needed for the SHARED
5741 case, since we won't be allocating local storage then.
5742 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5743 in this case omp_notice_variable will be called later
5744 on when it is gimplified. */
5745 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5746 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5747 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5748 }
5749 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5750 && lang_hooks.decls.omp_privatize_by_reference (decl))
5751 {
5752 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5753
5754 /* Similar to the direct variable sized case above, we'll need the
5755 size of references being privatized. */
5756 if ((flags & GOVD_SHARED) == 0)
5757 {
5758 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5759 if (DECL_P (t))
5760 omp_notice_variable (ctx, t, true);
5761 }
5762 }
5763
5764 if (n != NULL)
5765 n->value |= flags;
5766 else
5767 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5768 }
5769
5770 /* Notice a threadprivate variable DECL used in OMP context CTX.
5771 This just prints out diagnostics about threadprivate variable uses
5772 in untied tasks. If DECL2 is non-NULL, prevent this warning
5773 on that variable. */
5774
5775 static bool
5776 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5777 tree decl2)
5778 {
5779 splay_tree_node n;
5780 struct gimplify_omp_ctx *octx;
5781
5782 for (octx = ctx; octx; octx = octx->outer_context)
5783 if ((octx->region_type & ORT_TARGET) != 0)
5784 {
5785 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5786 if (n == NULL)
5787 {
5788 error ("threadprivate variable %qE used in target region",
5789 DECL_NAME (decl));
5790 error_at (octx->location, "enclosing target region");
5791 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5792 }
5793 if (decl2)
5794 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5795 }
5796
5797 if (ctx->region_type != ORT_UNTIED_TASK)
5798 return false;
5799 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5800 if (n == NULL)
5801 {
5802 error ("threadprivate variable %qE used in untied task",
5803 DECL_NAME (decl));
5804 error_at (ctx->location, "enclosing task");
5805 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5806 }
5807 if (decl2)
5808 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5809 return false;
5810 }
5811
5812 /* Determine outer default flags for DECL mentioned in an OMP region
5813 but not declared in an enclosing clause.
5814
5815 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5816 remapped firstprivate instead of shared. To some extent this is
5817 addressed in omp_firstprivatize_type_sizes, but not
5818 effectively. */
5819
5820 static unsigned
5821 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
5822 bool in_code, unsigned flags)
5823 {
5824 enum omp_clause_default_kind default_kind = ctx->default_kind;
5825 enum omp_clause_default_kind kind;
5826
5827 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5828 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5829 default_kind = kind;
5830
5831 switch (default_kind)
5832 {
5833 case OMP_CLAUSE_DEFAULT_NONE:
5834 {
5835 const char *rtype;
5836
5837 if (ctx->region_type & ORT_PARALLEL)
5838 rtype = "parallel";
5839 else if (ctx->region_type & ORT_TASK)
5840 rtype = "task";
5841 else if (ctx->region_type & ORT_TEAMS)
5842 rtype = "teams";
5843 else
5844 gcc_unreachable ();
5845
5846 error ("%qE not specified in enclosing %s",
5847 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
5848 error_at (ctx->location, "enclosing %s", rtype);
5849 }
5850 /* FALLTHRU */
5851 case OMP_CLAUSE_DEFAULT_SHARED:
5852 flags |= GOVD_SHARED;
5853 break;
5854 case OMP_CLAUSE_DEFAULT_PRIVATE:
5855 flags |= GOVD_PRIVATE;
5856 break;
5857 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5858 flags |= GOVD_FIRSTPRIVATE;
5859 break;
5860 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5861 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5862 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5863 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
5864 {
5865 omp_notice_variable (octx, decl, in_code);
5866 for (; octx; octx = octx->outer_context)
5867 {
5868 splay_tree_node n2;
5869
5870 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5871 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
5872 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
5873 continue;
5874 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5875 {
5876 flags |= GOVD_FIRSTPRIVATE;
5877 goto found_outer;
5878 }
5879 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5880 {
5881 flags |= GOVD_SHARED;
5882 goto found_outer;
5883 }
5884 }
5885 }
5886
5887 if (TREE_CODE (decl) == PARM_DECL
5888 || (!is_global_var (decl)
5889 && DECL_CONTEXT (decl) == current_function_decl))
5890 flags |= GOVD_FIRSTPRIVATE;
5891 else
5892 flags |= GOVD_SHARED;
5893 found_outer:
5894 break;
5895
5896 default:
5897 gcc_unreachable ();
5898 }
5899
5900 return flags;
5901 }
5902
5903 /* Record the fact that DECL was used within the OMP context CTX.
5904 IN_CODE is true when real code uses DECL, and false when we should
5905 merely emit default(none) errors. Return true if DECL is going to
5906 be remapped and thus DECL shouldn't be gimplified into its
5907 DECL_VALUE_EXPR (if any). */
5908
5909 static bool
5910 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5911 {
5912 splay_tree_node n;
5913 unsigned flags = in_code ? GOVD_SEEN : 0;
5914 bool ret = false, shared;
5915
5916 if (error_operand_p (decl))
5917 return false;
5918
5919 if (ctx->region_type == ORT_NONE)
5920 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
5921
5922 /* Threadprivate variables are predetermined. */
5923 if (is_global_var (decl))
5924 {
5925 if (DECL_THREAD_LOCAL_P (decl))
5926 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5927
5928 if (DECL_HAS_VALUE_EXPR_P (decl))
5929 {
5930 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5931
5932 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5933 return omp_notice_threadprivate_variable (ctx, decl, value);
5934 }
5935 }
5936
5937 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5938 if ((ctx->region_type & ORT_TARGET) != 0)
5939 {
5940 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5941 if (n == NULL)
5942 {
5943 unsigned nflags = flags;
5944 if (ctx->target_map_pointers_as_0len_arrays
5945 || ctx->target_map_scalars_firstprivate)
5946 {
5947 bool is_declare_target = false;
5948 bool is_scalar = false;
5949 if (is_global_var (decl)
5950 && varpool_node::get_create (decl)->offloadable)
5951 {
5952 struct gimplify_omp_ctx *octx;
5953 for (octx = ctx->outer_context;
5954 octx; octx = octx->outer_context)
5955 {
5956 n = splay_tree_lookup (octx->variables,
5957 (splay_tree_key)decl);
5958 if (n
5959 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
5960 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5961 break;
5962 }
5963 is_declare_target = octx == NULL;
5964 }
5965 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
5966 {
5967 tree type = TREE_TYPE (decl);
5968 if (TREE_CODE (type) == REFERENCE_TYPE)
5969 type = TREE_TYPE (type);
5970 if (TREE_CODE (type) == COMPLEX_TYPE)
5971 type = TREE_TYPE (type);
5972 if (INTEGRAL_TYPE_P (type)
5973 || SCALAR_FLOAT_TYPE_P (type)
5974 || TREE_CODE (type) == POINTER_TYPE)
5975 is_scalar = true;
5976 }
5977 if (is_declare_target)
5978 ;
5979 else if (ctx->target_map_pointers_as_0len_arrays
5980 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
5981 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
5982 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
5983 == POINTER_TYPE)))
5984 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
5985 else if (is_scalar)
5986 nflags |= GOVD_FIRSTPRIVATE;
5987 }
5988
5989 struct gimplify_omp_ctx *octx = ctx->outer_context;
5990 if ((ctx->region_type & ORT_ACC) && octx)
5991 {
5992 /* Look in outer OpenACC contexts, to see if there's a
5993 data attribute for this variable. */
5994 omp_notice_variable (octx, decl, in_code);
5995
5996 for (; octx; octx = octx->outer_context)
5997 {
5998 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
5999 break;
6000 splay_tree_node n2
6001 = splay_tree_lookup (octx->variables,
6002 (splay_tree_key) decl);
6003 if (n2)
6004 {
6005 nflags |= GOVD_MAP;
6006 goto found_outer;
6007 }
6008 }
6009 }
6010
6011 {
6012 tree type = TREE_TYPE (decl);
6013
6014 if (nflags == flags
6015 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6016 && lang_hooks.decls.omp_privatize_by_reference (decl))
6017 type = TREE_TYPE (type);
6018 if (nflags == flags
6019 && !lang_hooks.types.omp_mappable_type (type))
6020 {
6021 error ("%qD referenced in target region does not have "
6022 "a mappable type", decl);
6023 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6024 }
6025 else if (nflags == flags)
6026 nflags |= GOVD_MAP;
6027 }
6028 found_outer:
6029 omp_add_variable (ctx, decl, nflags);
6030 }
6031 else
6032 {
6033 /* If nothing changed, there's nothing left to do. */
6034 if ((n->value & flags) == flags)
6035 return ret;
6036 n->value |= flags;
6037 }
6038 goto do_outer;
6039 }
6040
6041 if (n == NULL)
6042 {
6043 if (ctx->region_type == ORT_WORKSHARE
6044 || ctx->region_type == ORT_SIMD
6045 || ctx->region_type == ORT_ACC
6046 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6047 goto do_outer;
6048
6049 flags = omp_default_clause (ctx, decl, in_code, flags);
6050
6051 if ((flags & GOVD_PRIVATE)
6052 && lang_hooks.decls.omp_private_outer_ref (decl))
6053 flags |= GOVD_PRIVATE_OUTER_REF;
6054
6055 omp_add_variable (ctx, decl, flags);
6056
6057 shared = (flags & GOVD_SHARED) != 0;
6058 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6059 goto do_outer;
6060 }
6061
6062 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6063 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6064 && DECL_SIZE (decl)
6065 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6066 {
6067 splay_tree_node n2;
6068 tree t = DECL_VALUE_EXPR (decl);
6069 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6070 t = TREE_OPERAND (t, 0);
6071 gcc_assert (DECL_P (t));
6072 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6073 n2->value |= GOVD_SEEN;
6074 }
6075
6076 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6077 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6078
6079 /* If nothing changed, there's nothing left to do. */
6080 if ((n->value & flags) == flags)
6081 return ret;
6082 flags |= n->value;
6083 n->value = flags;
6084
6085 do_outer:
6086 /* If the variable is private in the current context, then we don't
6087 need to propagate anything to an outer context. */
6088 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6089 return ret;
6090 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6091 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6092 return ret;
6093 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6094 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6095 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6096 return ret;
6097 if (ctx->outer_context
6098 && omp_notice_variable (ctx->outer_context, decl, in_code))
6099 return true;
6100 return ret;
6101 }
6102
6103 /* Verify that DECL is private within CTX. If there's specific information
6104 to the contrary in the innermost scope, generate an error. */
6105
6106 static bool
6107 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6108 {
6109 splay_tree_node n;
6110
6111 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6112 if (n != NULL)
6113 {
6114 if (n->value & GOVD_SHARED)
6115 {
6116 if (ctx == gimplify_omp_ctxp)
6117 {
6118 if (simd)
6119 error ("iteration variable %qE is predetermined linear",
6120 DECL_NAME (decl));
6121 else
6122 error ("iteration variable %qE should be private",
6123 DECL_NAME (decl));
6124 n->value = GOVD_PRIVATE;
6125 return true;
6126 }
6127 else
6128 return false;
6129 }
6130 else if ((n->value & GOVD_EXPLICIT) != 0
6131 && (ctx == gimplify_omp_ctxp
6132 || (ctx->region_type == ORT_COMBINED_PARALLEL
6133 && gimplify_omp_ctxp->outer_context == ctx)))
6134 {
6135 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6136 error ("iteration variable %qE should not be firstprivate",
6137 DECL_NAME (decl));
6138 else if ((n->value & GOVD_REDUCTION) != 0)
6139 error ("iteration variable %qE should not be reduction",
6140 DECL_NAME (decl));
6141 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6142 error ("iteration variable %qE should not be linear",
6143 DECL_NAME (decl));
6144 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6145 error ("iteration variable %qE should not be lastprivate",
6146 DECL_NAME (decl));
6147 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6148 error ("iteration variable %qE should not be private",
6149 DECL_NAME (decl));
6150 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6151 error ("iteration variable %qE is predetermined linear",
6152 DECL_NAME (decl));
6153 }
6154 return (ctx == gimplify_omp_ctxp
6155 || (ctx->region_type == ORT_COMBINED_PARALLEL
6156 && gimplify_omp_ctxp->outer_context == ctx));
6157 }
6158
6159 if (ctx->region_type != ORT_WORKSHARE
6160 && ctx->region_type != ORT_SIMD
6161 && ctx->region_type != ORT_ACC)
6162 return false;
6163 else if (ctx->outer_context)
6164 return omp_is_private (ctx->outer_context, decl, simd);
6165 return false;
6166 }
6167
6168 /* Return true if DECL is private within a parallel region
6169 that binds to the current construct's context or in parallel
6170 region's REDUCTION clause. */
6171
6172 static bool
6173 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6174 {
6175 splay_tree_node n;
6176
6177 do
6178 {
6179 ctx = ctx->outer_context;
6180 if (ctx == NULL)
6181 {
6182 if (is_global_var (decl))
6183 return false;
6184
6185 /* References might be private, but might be shared too,
6186 when checking for copyprivate, assume they might be
6187 private, otherwise assume they might be shared. */
6188 if (copyprivate)
6189 return true;
6190
6191 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6192 return false;
6193
6194 /* Treat C++ privatized non-static data members outside
6195 of the privatization the same. */
6196 if (omp_member_access_dummy_var (decl))
6197 return false;
6198
6199 return true;
6200 }
6201
6202 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6203
6204 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6205 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6206 continue;
6207
6208 if (n != NULL)
6209 {
6210 if ((n->value & GOVD_LOCAL) != 0
6211 && omp_member_access_dummy_var (decl))
6212 return false;
6213 return (n->value & GOVD_SHARED) == 0;
6214 }
6215 }
6216 while (ctx->region_type == ORT_WORKSHARE
6217 || ctx->region_type == ORT_SIMD
6218 || ctx->region_type == ORT_ACC);
6219 return false;
6220 }
6221
6222 /* Return true if the CTX is combined with distribute and thus
6223 lastprivate can't be supported. */
6224
6225 static bool
6226 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6227 {
6228 do
6229 {
6230 if (ctx->outer_context == NULL)
6231 return false;
6232 ctx = ctx->outer_context;
6233 switch (ctx->region_type)
6234 {
6235 case ORT_WORKSHARE:
6236 if (!ctx->combined_loop)
6237 return false;
6238 if (ctx->distribute)
6239 return lang_GNU_Fortran ();
6240 break;
6241 case ORT_COMBINED_PARALLEL:
6242 break;
6243 case ORT_COMBINED_TEAMS:
6244 return lang_GNU_Fortran ();
6245 default:
6246 return false;
6247 }
6248 }
6249 while (1);
6250 }
6251
6252 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6253
6254 static tree
6255 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6256 {
6257 tree t = *tp;
6258
6259 /* If this node has been visited, unmark it and keep looking. */
6260 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6261 return t;
6262
6263 if (IS_TYPE_OR_DECL_P (t))
6264 *walk_subtrees = 0;
6265 return NULL_TREE;
6266 }
6267
6268 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6269 and previous omp contexts. */
6270
6271 static void
6272 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6273 enum omp_region_type region_type,
6274 enum tree_code code)
6275 {
6276 struct gimplify_omp_ctx *ctx, *outer_ctx;
6277 tree c;
6278 hash_map<tree, tree> *struct_map_to_clause = NULL;
6279 tree *prev_list_p = NULL;
6280
6281 ctx = new_omp_context (region_type);
6282 outer_ctx = ctx->outer_context;
6283 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6284 {
6285 ctx->target_map_pointers_as_0len_arrays = true;
6286 /* FIXME: For Fortran we want to set this too, when
6287 the Fortran FE is updated to OpenMP 4.5. */
6288 ctx->target_map_scalars_firstprivate = true;
6289 }
6290 if (!lang_GNU_Fortran ())
6291 switch (code)
6292 {
6293 case OMP_TARGET:
6294 case OMP_TARGET_DATA:
6295 case OMP_TARGET_ENTER_DATA:
6296 case OMP_TARGET_EXIT_DATA:
6297 ctx->target_firstprivatize_array_bases = true;
6298 default:
6299 break;
6300 }
6301
6302 while ((c = *list_p) != NULL)
6303 {
6304 bool remove = false;
6305 bool notice_outer = true;
6306 const char *check_non_private = NULL;
6307 unsigned int flags;
6308 tree decl;
6309
6310 switch (OMP_CLAUSE_CODE (c))
6311 {
6312 case OMP_CLAUSE_PRIVATE:
6313 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6314 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6315 {
6316 flags |= GOVD_PRIVATE_OUTER_REF;
6317 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6318 }
6319 else
6320 notice_outer = false;
6321 goto do_add;
6322 case OMP_CLAUSE_SHARED:
6323 flags = GOVD_SHARED | GOVD_EXPLICIT;
6324 goto do_add;
6325 case OMP_CLAUSE_FIRSTPRIVATE:
6326 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6327 check_non_private = "firstprivate";
6328 goto do_add;
6329 case OMP_CLAUSE_LASTPRIVATE:
6330 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6331 check_non_private = "lastprivate";
6332 decl = OMP_CLAUSE_DECL (c);
6333 if (omp_no_lastprivate (ctx))
6334 {
6335 notice_outer = false;
6336 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6337 }
6338 else if (error_operand_p (decl))
6339 goto do_add;
6340 else if (outer_ctx
6341 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6342 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6343 && splay_tree_lookup (outer_ctx->variables,
6344 (splay_tree_key) decl) == NULL)
6345 {
6346 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6347 if (outer_ctx->outer_context)
6348 omp_notice_variable (outer_ctx->outer_context, decl, true);
6349 }
6350 else if (outer_ctx
6351 && (outer_ctx->region_type & ORT_TASK) != 0
6352 && outer_ctx->combined_loop
6353 && splay_tree_lookup (outer_ctx->variables,
6354 (splay_tree_key) decl) == NULL)
6355 {
6356 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6357 if (outer_ctx->outer_context)
6358 omp_notice_variable (outer_ctx->outer_context, decl, true);
6359 }
6360 else if (outer_ctx
6361 && (outer_ctx->region_type == ORT_WORKSHARE
6362 || outer_ctx->region_type == ORT_ACC)
6363 && outer_ctx->combined_loop
6364 && splay_tree_lookup (outer_ctx->variables,
6365 (splay_tree_key) decl) == NULL
6366 && !omp_check_private (outer_ctx, decl, false))
6367 {
6368 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6369 if (outer_ctx->outer_context
6370 && (outer_ctx->outer_context->region_type
6371 == ORT_COMBINED_PARALLEL)
6372 && splay_tree_lookup (outer_ctx->outer_context->variables,
6373 (splay_tree_key) decl) == NULL)
6374 {
6375 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6376 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6377 if (octx->outer_context)
6378 omp_notice_variable (octx->outer_context, decl, true);
6379 }
6380 else if (outer_ctx->outer_context)
6381 omp_notice_variable (outer_ctx->outer_context, decl, true);
6382 }
6383 goto do_add;
6384 case OMP_CLAUSE_REDUCTION:
6385 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6386 /* OpenACC permits reductions on private variables. */
6387 if (!(region_type & ORT_ACC))
6388 check_non_private = "reduction";
6389 decl = OMP_CLAUSE_DECL (c);
6390 if (TREE_CODE (decl) == MEM_REF)
6391 {
6392 tree type = TREE_TYPE (decl);
6393 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6394 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6395 {
6396 remove = true;
6397 break;
6398 }
6399 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6400 if (DECL_P (v))
6401 {
6402 omp_firstprivatize_variable (ctx, v);
6403 omp_notice_variable (ctx, v, true);
6404 }
6405 decl = TREE_OPERAND (decl, 0);
6406 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6407 {
6408 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6409 NULL, is_gimple_val, fb_rvalue)
6410 == GS_ERROR)
6411 {
6412 remove = true;
6413 break;
6414 }
6415 v = TREE_OPERAND (decl, 1);
6416 if (DECL_P (v))
6417 {
6418 omp_firstprivatize_variable (ctx, v);
6419 omp_notice_variable (ctx, v, true);
6420 }
6421 decl = TREE_OPERAND (decl, 0);
6422 }
6423 if (TREE_CODE (decl) == ADDR_EXPR
6424 || TREE_CODE (decl) == INDIRECT_REF)
6425 decl = TREE_OPERAND (decl, 0);
6426 }
6427 goto do_add_decl;
6428 case OMP_CLAUSE_LINEAR:
6429 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6430 is_gimple_val, fb_rvalue) == GS_ERROR)
6431 {
6432 remove = true;
6433 break;
6434 }
6435 else
6436 {
6437 if (code == OMP_SIMD
6438 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6439 {
6440 struct gimplify_omp_ctx *octx = outer_ctx;
6441 if (octx
6442 && octx->region_type == ORT_WORKSHARE
6443 && octx->combined_loop
6444 && !octx->distribute)
6445 {
6446 if (octx->outer_context
6447 && (octx->outer_context->region_type
6448 == ORT_COMBINED_PARALLEL))
6449 octx = octx->outer_context->outer_context;
6450 else
6451 octx = octx->outer_context;
6452 }
6453 if (octx
6454 && octx->region_type == ORT_WORKSHARE
6455 && octx->combined_loop
6456 && octx->distribute
6457 && !lang_GNU_Fortran ())
6458 {
6459 error_at (OMP_CLAUSE_LOCATION (c),
6460 "%<linear%> clause for variable other than "
6461 "loop iterator specified on construct "
6462 "combined with %<distribute%>");
6463 remove = true;
6464 break;
6465 }
6466 }
6467 /* For combined #pragma omp parallel for simd, need to put
6468 lastprivate and perhaps firstprivate too on the
6469 parallel. Similarly for #pragma omp for simd. */
6470 struct gimplify_omp_ctx *octx = outer_ctx;
6471 decl = NULL_TREE;
6472 if (omp_no_lastprivate (ctx))
6473 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6474 do
6475 {
6476 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6477 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6478 break;
6479 decl = OMP_CLAUSE_DECL (c);
6480 if (error_operand_p (decl))
6481 {
6482 decl = NULL_TREE;
6483 break;
6484 }
6485 flags = GOVD_SEEN;
6486 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6487 flags |= GOVD_FIRSTPRIVATE;
6488 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6489 flags |= GOVD_LASTPRIVATE;
6490 if (octx
6491 && octx->region_type == ORT_WORKSHARE
6492 && octx->combined_loop)
6493 {
6494 if (octx->outer_context
6495 && (octx->outer_context->region_type
6496 == ORT_COMBINED_PARALLEL))
6497 octx = octx->outer_context;
6498 else if (omp_check_private (octx, decl, false))
6499 break;
6500 }
6501 else if (octx
6502 && (octx->region_type & ORT_TASK) != 0
6503 && octx->combined_loop)
6504 ;
6505 else if (octx
6506 && octx->region_type == ORT_COMBINED_PARALLEL
6507 && ctx->region_type == ORT_WORKSHARE
6508 && octx == outer_ctx)
6509 flags = GOVD_SEEN | GOVD_SHARED;
6510 else if (octx
6511 && octx->region_type == ORT_COMBINED_TEAMS)
6512 flags = GOVD_SEEN | GOVD_SHARED;
6513 else if (octx
6514 && octx->region_type == ORT_COMBINED_TARGET)
6515 {
6516 flags &= ~GOVD_LASTPRIVATE;
6517 if (flags == GOVD_SEEN)
6518 break;
6519 }
6520 else
6521 break;
6522 splay_tree_node on
6523 = splay_tree_lookup (octx->variables,
6524 (splay_tree_key) decl);
6525 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6526 {
6527 octx = NULL;
6528 break;
6529 }
6530 omp_add_variable (octx, decl, flags);
6531 if (octx->outer_context == NULL)
6532 break;
6533 octx = octx->outer_context;
6534 }
6535 while (1);
6536 if (octx
6537 && decl
6538 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6539 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6540 omp_notice_variable (octx, decl, true);
6541 }
6542 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6543 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6544 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6545 {
6546 notice_outer = false;
6547 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6548 }
6549 goto do_add;
6550
6551 case OMP_CLAUSE_MAP:
6552 decl = OMP_CLAUSE_DECL (c);
6553 if (error_operand_p (decl))
6554 remove = true;
6555 switch (code)
6556 {
6557 case OMP_TARGET:
6558 break;
6559 case OMP_TARGET_DATA:
6560 case OMP_TARGET_ENTER_DATA:
6561 case OMP_TARGET_EXIT_DATA:
6562 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6563 || (OMP_CLAUSE_MAP_KIND (c)
6564 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6565 /* For target {,enter ,exit }data only the array slice is
6566 mapped, but not the pointer to it. */
6567 remove = true;
6568 break;
6569 default:
6570 break;
6571 }
6572 if (remove)
6573 break;
6574 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6575 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6576 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6577 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6578 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6579 {
6580 remove = true;
6581 break;
6582 }
6583 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6584 || (OMP_CLAUSE_MAP_KIND (c)
6585 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6586 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6587 {
6588 OMP_CLAUSE_SIZE (c)
6589 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6590 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6591 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6592 }
6593 if (!DECL_P (decl))
6594 {
6595 tree d = decl, *pd;
6596 if (TREE_CODE (d) == ARRAY_REF)
6597 {
6598 while (TREE_CODE (d) == ARRAY_REF)
6599 d = TREE_OPERAND (d, 0);
6600 if (TREE_CODE (d) == COMPONENT_REF
6601 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6602 decl = d;
6603 }
6604 pd = &OMP_CLAUSE_DECL (c);
6605 if (d == decl
6606 && TREE_CODE (decl) == INDIRECT_REF
6607 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6608 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6609 == REFERENCE_TYPE))
6610 {
6611 pd = &TREE_OPERAND (decl, 0);
6612 decl = TREE_OPERAND (decl, 0);
6613 }
6614 if (TREE_CODE (decl) == COMPONENT_REF)
6615 {
6616 while (TREE_CODE (decl) == COMPONENT_REF)
6617 decl = TREE_OPERAND (decl, 0);
6618 }
6619 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6620 == GS_ERROR)
6621 {
6622 remove = true;
6623 break;
6624 }
6625 if (DECL_P (decl))
6626 {
6627 if (error_operand_p (decl))
6628 {
6629 remove = true;
6630 break;
6631 }
6632
6633 if (TYPE_SIZE_UNIT (TREE_TYPE (decl)) == NULL
6634 || (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
6635 != INTEGER_CST))
6636 {
6637 error_at (OMP_CLAUSE_LOCATION (c),
6638 "mapping field %qE of variable length "
6639 "structure", OMP_CLAUSE_DECL (c));
6640 remove = true;
6641 break;
6642 }
6643
6644 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6645 {
6646 /* Error recovery. */
6647 if (prev_list_p == NULL)
6648 {
6649 remove = true;
6650 break;
6651 }
6652 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6653 {
6654 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6655 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6656 {
6657 remove = true;
6658 break;
6659 }
6660 }
6661 }
6662
6663 tree offset;
6664 HOST_WIDE_INT bitsize, bitpos;
6665 machine_mode mode;
6666 int unsignedp, reversep, volatilep = 0;
6667 tree base = OMP_CLAUSE_DECL (c);
6668 while (TREE_CODE (base) == ARRAY_REF)
6669 base = TREE_OPERAND (base, 0);
6670 if (TREE_CODE (base) == INDIRECT_REF)
6671 base = TREE_OPERAND (base, 0);
6672 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6673 &mode, &unsignedp, &reversep,
6674 &volatilep, false);
6675 gcc_assert (base == decl
6676 && (offset == NULL_TREE
6677 || TREE_CODE (offset) == INTEGER_CST));
6678
6679 splay_tree_node n
6680 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6681 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
6682 == GOMP_MAP_ALWAYS_POINTER);
6683 if (n == NULL || (n->value & GOVD_MAP) == 0)
6684 {
6685 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6686 OMP_CLAUSE_MAP);
6687 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
6688 OMP_CLAUSE_DECL (l) = decl;
6689 OMP_CLAUSE_SIZE (l) = size_int (1);
6690 if (struct_map_to_clause == NULL)
6691 struct_map_to_clause = new hash_map<tree, tree>;
6692 struct_map_to_clause->put (decl, l);
6693 if (ptr)
6694 {
6695 enum gomp_map_kind mkind
6696 = code == OMP_TARGET_EXIT_DATA
6697 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
6698 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6699 OMP_CLAUSE_MAP);
6700 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6701 OMP_CLAUSE_DECL (c2)
6702 = unshare_expr (OMP_CLAUSE_DECL (c));
6703 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
6704 OMP_CLAUSE_SIZE (c2)
6705 = TYPE_SIZE_UNIT (ptr_type_node);
6706 OMP_CLAUSE_CHAIN (l) = c2;
6707 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6708 {
6709 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6710 tree c3
6711 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6712 OMP_CLAUSE_MAP);
6713 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6714 OMP_CLAUSE_DECL (c3)
6715 = unshare_expr (OMP_CLAUSE_DECL (c4));
6716 OMP_CLAUSE_SIZE (c3)
6717 = TYPE_SIZE_UNIT (ptr_type_node);
6718 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6719 OMP_CLAUSE_CHAIN (c2) = c3;
6720 }
6721 *prev_list_p = l;
6722 prev_list_p = NULL;
6723 }
6724 else
6725 {
6726 OMP_CLAUSE_CHAIN (l) = c;
6727 *list_p = l;
6728 list_p = &OMP_CLAUSE_CHAIN (l);
6729 }
6730 flags = GOVD_MAP | GOVD_EXPLICIT;
6731 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6732 flags |= GOVD_SEEN;
6733 goto do_add_decl;
6734 }
6735 else
6736 {
6737 tree *osc = struct_map_to_clause->get (decl);
6738 tree *sc = NULL, *scp = NULL;
6739 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6740 n->value |= GOVD_SEEN;
6741 offset_int o1, o2;
6742 if (offset)
6743 o1 = wi::to_offset (offset);
6744 else
6745 o1 = 0;
6746 if (bitpos)
6747 o1 = o1 + bitpos / BITS_PER_UNIT;
6748 for (sc = &OMP_CLAUSE_CHAIN (*osc);
6749 *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
6750 if (ptr && sc == prev_list_p)
6751 break;
6752 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6753 != COMPONENT_REF
6754 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6755 != INDIRECT_REF)
6756 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6757 != ARRAY_REF))
6758 break;
6759 else
6760 {
6761 tree offset2;
6762 HOST_WIDE_INT bitsize2, bitpos2;
6763 base = OMP_CLAUSE_DECL (*sc);
6764 if (TREE_CODE (base) == ARRAY_REF)
6765 {
6766 while (TREE_CODE (base) == ARRAY_REF)
6767 base = TREE_OPERAND (base, 0);
6768 if (TREE_CODE (base) != COMPONENT_REF
6769 || (TREE_CODE (TREE_TYPE (base))
6770 != ARRAY_TYPE))
6771 break;
6772 }
6773 else if (TREE_CODE (base) == INDIRECT_REF
6774 && (TREE_CODE (TREE_OPERAND (base, 0))
6775 == COMPONENT_REF)
6776 && (TREE_CODE (TREE_TYPE
6777 (TREE_OPERAND (base, 0)))
6778 == REFERENCE_TYPE))
6779 base = TREE_OPERAND (base, 0);
6780 base = get_inner_reference (base, &bitsize2,
6781 &bitpos2, &offset2,
6782 &mode, &unsignedp,
6783 &reversep, &volatilep,
6784 false);
6785 if (base != decl)
6786 break;
6787 if (scp)
6788 continue;
6789 gcc_assert (offset == NULL_TREE
6790 || TREE_CODE (offset) == INTEGER_CST);
6791 tree d1 = OMP_CLAUSE_DECL (*sc);
6792 tree d2 = OMP_CLAUSE_DECL (c);
6793 while (TREE_CODE (d1) == ARRAY_REF)
6794 d1 = TREE_OPERAND (d1, 0);
6795 while (TREE_CODE (d2) == ARRAY_REF)
6796 d2 = TREE_OPERAND (d2, 0);
6797 if (TREE_CODE (d1) == INDIRECT_REF)
6798 d1 = TREE_OPERAND (d1, 0);
6799 if (TREE_CODE (d2) == INDIRECT_REF)
6800 d2 = TREE_OPERAND (d2, 0);
6801 while (TREE_CODE (d1) == COMPONENT_REF)
6802 if (TREE_CODE (d2) == COMPONENT_REF
6803 && TREE_OPERAND (d1, 1)
6804 == TREE_OPERAND (d2, 1))
6805 {
6806 d1 = TREE_OPERAND (d1, 0);
6807 d2 = TREE_OPERAND (d2, 0);
6808 }
6809 else
6810 break;
6811 if (d1 == d2)
6812 {
6813 error_at (OMP_CLAUSE_LOCATION (c),
6814 "%qE appears more than once in map "
6815 "clauses", OMP_CLAUSE_DECL (c));
6816 remove = true;
6817 break;
6818 }
6819 if (offset2)
6820 o2 = wi::to_offset (offset2);
6821 else
6822 o2 = 0;
6823 if (bitpos2)
6824 o2 = o2 + bitpos2 / BITS_PER_UNIT;
6825 if (wi::ltu_p (o1, o2)
6826 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
6827 {
6828 if (ptr)
6829 scp = sc;
6830 else
6831 break;
6832 }
6833 }
6834 if (remove)
6835 break;
6836 OMP_CLAUSE_SIZE (*osc)
6837 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
6838 size_one_node);
6839 if (ptr)
6840 {
6841 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6842 OMP_CLAUSE_MAP);
6843 tree cl = NULL_TREE;
6844 enum gomp_map_kind mkind
6845 = code == OMP_TARGET_EXIT_DATA
6846 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
6847 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6848 OMP_CLAUSE_DECL (c2)
6849 = unshare_expr (OMP_CLAUSE_DECL (c));
6850 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
6851 OMP_CLAUSE_SIZE (c2)
6852 = TYPE_SIZE_UNIT (ptr_type_node);
6853 cl = scp ? *prev_list_p : c2;
6854 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6855 {
6856 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6857 tree c3
6858 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6859 OMP_CLAUSE_MAP);
6860 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6861 OMP_CLAUSE_DECL (c3)
6862 = unshare_expr (OMP_CLAUSE_DECL (c4));
6863 OMP_CLAUSE_SIZE (c3)
6864 = TYPE_SIZE_UNIT (ptr_type_node);
6865 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6866 if (!scp)
6867 OMP_CLAUSE_CHAIN (c2) = c3;
6868 else
6869 cl = c3;
6870 }
6871 if (scp)
6872 *scp = c2;
6873 if (sc == prev_list_p)
6874 {
6875 *sc = cl;
6876 prev_list_p = NULL;
6877 }
6878 else
6879 {
6880 *prev_list_p = OMP_CLAUSE_CHAIN (c);
6881 list_p = prev_list_p;
6882 prev_list_p = NULL;
6883 OMP_CLAUSE_CHAIN (c) = *sc;
6884 *sc = cl;
6885 continue;
6886 }
6887 }
6888 else if (*sc != c)
6889 {
6890 *list_p = OMP_CLAUSE_CHAIN (c);
6891 OMP_CLAUSE_CHAIN (c) = *sc;
6892 *sc = c;
6893 continue;
6894 }
6895 }
6896 }
6897 if (!remove
6898 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
6899 && OMP_CLAUSE_CHAIN (c)
6900 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
6901 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
6902 == GOMP_MAP_ALWAYS_POINTER))
6903 prev_list_p = list_p;
6904 break;
6905 }
6906 flags = GOVD_MAP | GOVD_EXPLICIT;
6907 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
6908 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
6909 flags |= GOVD_MAP_ALWAYS_TO;
6910 goto do_add;
6911
6912 case OMP_CLAUSE_DEPEND:
6913 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
6914 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
6915 {
6916 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
6917 omp-low.c. */
6918 break;
6919 }
6920 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6921 {
6922 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6923 NULL, is_gimple_val, fb_rvalue);
6924 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6925 }
6926 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6927 {
6928 remove = true;
6929 break;
6930 }
6931 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6932 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6933 is_gimple_val, fb_rvalue) == GS_ERROR)
6934 {
6935 remove = true;
6936 break;
6937 }
6938 break;
6939
6940 case OMP_CLAUSE_TO:
6941 case OMP_CLAUSE_FROM:
6942 case OMP_CLAUSE__CACHE_:
6943 decl = OMP_CLAUSE_DECL (c);
6944 if (error_operand_p (decl))
6945 {
6946 remove = true;
6947 break;
6948 }
6949 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6950 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6951 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6952 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6953 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6954 {
6955 remove = true;
6956 break;
6957 }
6958 if (!DECL_P (decl))
6959 {
6960 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6961 NULL, is_gimple_lvalue, fb_lvalue)
6962 == GS_ERROR)
6963 {
6964 remove = true;
6965 break;
6966 }
6967 break;
6968 }
6969 goto do_notice;
6970
6971 case OMP_CLAUSE_USE_DEVICE_PTR:
6972 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6973 goto do_add;
6974 case OMP_CLAUSE_IS_DEVICE_PTR:
6975 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6976 goto do_add;
6977
6978 do_add:
6979 decl = OMP_CLAUSE_DECL (c);
6980 do_add_decl:
6981 if (error_operand_p (decl))
6982 {
6983 remove = true;
6984 break;
6985 }
6986 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
6987 {
6988 tree t = omp_member_access_dummy_var (decl);
6989 if (t)
6990 {
6991 tree v = DECL_VALUE_EXPR (decl);
6992 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
6993 if (outer_ctx)
6994 omp_notice_variable (outer_ctx, t, true);
6995 }
6996 }
6997 omp_add_variable (ctx, decl, flags);
6998 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6999 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7000 {
7001 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7002 GOVD_LOCAL | GOVD_SEEN);
7003 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7004 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7005 find_decl_expr,
7006 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7007 NULL) == NULL_TREE)
7008 omp_add_variable (ctx,
7009 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7010 GOVD_LOCAL | GOVD_SEEN);
7011 gimplify_omp_ctxp = ctx;
7012 push_gimplify_context ();
7013
7014 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7015 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7016
7017 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7018 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7019 pop_gimplify_context
7020 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7021 push_gimplify_context ();
7022 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7023 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7024 pop_gimplify_context
7025 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7026 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7027 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7028
7029 gimplify_omp_ctxp = outer_ctx;
7030 }
7031 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7032 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7033 {
7034 gimplify_omp_ctxp = ctx;
7035 push_gimplify_context ();
7036 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7037 {
7038 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7039 NULL, NULL);
7040 TREE_SIDE_EFFECTS (bind) = 1;
7041 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7042 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7043 }
7044 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7045 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7046 pop_gimplify_context
7047 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7048 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7049
7050 gimplify_omp_ctxp = outer_ctx;
7051 }
7052 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7053 && OMP_CLAUSE_LINEAR_STMT (c))
7054 {
7055 gimplify_omp_ctxp = ctx;
7056 push_gimplify_context ();
7057 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7058 {
7059 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7060 NULL, NULL);
7061 TREE_SIDE_EFFECTS (bind) = 1;
7062 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7063 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7064 }
7065 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7066 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7067 pop_gimplify_context
7068 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7069 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7070
7071 gimplify_omp_ctxp = outer_ctx;
7072 }
7073 if (notice_outer)
7074 goto do_notice;
7075 break;
7076
7077 case OMP_CLAUSE_COPYIN:
7078 case OMP_CLAUSE_COPYPRIVATE:
7079 decl = OMP_CLAUSE_DECL (c);
7080 if (error_operand_p (decl))
7081 {
7082 remove = true;
7083 break;
7084 }
7085 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7086 && !remove
7087 && !omp_check_private (ctx, decl, true))
7088 {
7089 remove = true;
7090 if (is_global_var (decl))
7091 {
7092 if (DECL_THREAD_LOCAL_P (decl))
7093 remove = false;
7094 else if (DECL_HAS_VALUE_EXPR_P (decl))
7095 {
7096 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7097
7098 if (value
7099 && DECL_P (value)
7100 && DECL_THREAD_LOCAL_P (value))
7101 remove = false;
7102 }
7103 }
7104 if (remove)
7105 error_at (OMP_CLAUSE_LOCATION (c),
7106 "copyprivate variable %qE is not threadprivate"
7107 " or private in outer context", DECL_NAME (decl));
7108 }
7109 do_notice:
7110 if (outer_ctx)
7111 omp_notice_variable (outer_ctx, decl, true);
7112 if (check_non_private
7113 && region_type == ORT_WORKSHARE
7114 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7115 || decl == OMP_CLAUSE_DECL (c)
7116 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7117 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7118 == ADDR_EXPR
7119 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7120 == POINTER_PLUS_EXPR
7121 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7122 (OMP_CLAUSE_DECL (c), 0), 0))
7123 == ADDR_EXPR)))))
7124 && omp_check_private (ctx, decl, false))
7125 {
7126 error ("%s variable %qE is private in outer context",
7127 check_non_private, DECL_NAME (decl));
7128 remove = true;
7129 }
7130 break;
7131
7132 case OMP_CLAUSE_IF:
7133 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7134 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7135 {
7136 const char *p[2];
7137 for (int i = 0; i < 2; i++)
7138 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7139 {
7140 case OMP_PARALLEL: p[i] = "parallel"; break;
7141 case OMP_TASK: p[i] = "task"; break;
7142 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7143 case OMP_TARGET_DATA: p[i] = "target data"; break;
7144 case OMP_TARGET: p[i] = "target"; break;
7145 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7146 case OMP_TARGET_ENTER_DATA:
7147 p[i] = "target enter data"; break;
7148 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7149 default: gcc_unreachable ();
7150 }
7151 error_at (OMP_CLAUSE_LOCATION (c),
7152 "expected %qs %<if%> clause modifier rather than %qs",
7153 p[0], p[1]);
7154 remove = true;
7155 }
7156 /* Fall through. */
7157
7158 case OMP_CLAUSE_FINAL:
7159 OMP_CLAUSE_OPERAND (c, 0)
7160 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7161 /* Fall through. */
7162
7163 case OMP_CLAUSE_SCHEDULE:
7164 case OMP_CLAUSE_NUM_THREADS:
7165 case OMP_CLAUSE_NUM_TEAMS:
7166 case OMP_CLAUSE_THREAD_LIMIT:
7167 case OMP_CLAUSE_DIST_SCHEDULE:
7168 case OMP_CLAUSE_DEVICE:
7169 case OMP_CLAUSE_PRIORITY:
7170 case OMP_CLAUSE_GRAINSIZE:
7171 case OMP_CLAUSE_NUM_TASKS:
7172 case OMP_CLAUSE_HINT:
7173 case OMP_CLAUSE__CILK_FOR_COUNT_:
7174 case OMP_CLAUSE_ASYNC:
7175 case OMP_CLAUSE_WAIT:
7176 case OMP_CLAUSE_NUM_GANGS:
7177 case OMP_CLAUSE_NUM_WORKERS:
7178 case OMP_CLAUSE_VECTOR_LENGTH:
7179 case OMP_CLAUSE_WORKER:
7180 case OMP_CLAUSE_VECTOR:
7181 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7182 is_gimple_val, fb_rvalue) == GS_ERROR)
7183 remove = true;
7184 break;
7185
7186 case OMP_CLAUSE_GANG:
7187 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7188 is_gimple_val, fb_rvalue) == GS_ERROR)
7189 remove = true;
7190 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7191 is_gimple_val, fb_rvalue) == GS_ERROR)
7192 remove = true;
7193 break;
7194
7195 case OMP_CLAUSE_TILE:
7196 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7197 list = TREE_CHAIN (list))
7198 {
7199 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7200 is_gimple_val, fb_rvalue) == GS_ERROR)
7201 remove = true;
7202 }
7203 break;
7204
7205 case OMP_CLAUSE_DEVICE_RESIDENT:
7206 case OMP_CLAUSE_USE_DEVICE:
7207 remove = true;
7208 break;
7209
7210 case OMP_CLAUSE_NOWAIT:
7211 case OMP_CLAUSE_ORDERED:
7212 case OMP_CLAUSE_UNTIED:
7213 case OMP_CLAUSE_COLLAPSE:
7214 case OMP_CLAUSE_AUTO:
7215 case OMP_CLAUSE_SEQ:
7216 case OMP_CLAUSE_INDEPENDENT:
7217 case OMP_CLAUSE_MERGEABLE:
7218 case OMP_CLAUSE_PROC_BIND:
7219 case OMP_CLAUSE_SAFELEN:
7220 case OMP_CLAUSE_SIMDLEN:
7221 case OMP_CLAUSE_NOGROUP:
7222 case OMP_CLAUSE_THREADS:
7223 case OMP_CLAUSE_SIMD:
7224 break;
7225
7226 case OMP_CLAUSE_DEFAULTMAP:
7227 ctx->target_map_scalars_firstprivate = false;
7228 break;
7229
7230 case OMP_CLAUSE_ALIGNED:
7231 decl = OMP_CLAUSE_DECL (c);
7232 if (error_operand_p (decl))
7233 {
7234 remove = true;
7235 break;
7236 }
7237 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7238 is_gimple_val, fb_rvalue) == GS_ERROR)
7239 {
7240 remove = true;
7241 break;
7242 }
7243 if (!is_global_var (decl)
7244 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7245 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7246 break;
7247
7248 case OMP_CLAUSE_DEFAULT:
7249 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7250 break;
7251
7252 default:
7253 gcc_unreachable ();
7254 }
7255
7256 if (remove)
7257 *list_p = OMP_CLAUSE_CHAIN (c);
7258 else
7259 list_p = &OMP_CLAUSE_CHAIN (c);
7260 }
7261
7262 gimplify_omp_ctxp = ctx;
7263 if (struct_map_to_clause)
7264 delete struct_map_to_clause;
7265 }
7266
7267 struct gimplify_adjust_omp_clauses_data
7268 {
7269 tree *list_p;
7270 gimple_seq *pre_p;
7271 };
7272
7273 /* For all variables that were not actually used within the context,
7274 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7275
7276 static int
7277 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7278 {
7279 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7280 gimple_seq *pre_p
7281 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7282 tree decl = (tree) n->key;
7283 unsigned flags = n->value;
7284 enum omp_clause_code code;
7285 tree clause;
7286 bool private_debug;
7287
7288 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7289 return 0;
7290 if ((flags & GOVD_SEEN) == 0)
7291 return 0;
7292 if (flags & GOVD_DEBUG_PRIVATE)
7293 {
7294 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7295 private_debug = true;
7296 }
7297 else if (flags & GOVD_MAP)
7298 private_debug = false;
7299 else
7300 private_debug
7301 = lang_hooks.decls.omp_private_debug_clause (decl,
7302 !!(flags & GOVD_SHARED));
7303 if (private_debug)
7304 code = OMP_CLAUSE_PRIVATE;
7305 else if (flags & GOVD_MAP)
7306 code = OMP_CLAUSE_MAP;
7307 else if (flags & GOVD_SHARED)
7308 {
7309 if (is_global_var (decl))
7310 {
7311 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7312 while (ctx != NULL)
7313 {
7314 splay_tree_node on
7315 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7316 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7317 | GOVD_PRIVATE | GOVD_REDUCTION
7318 | GOVD_LINEAR | GOVD_MAP)) != 0)
7319 break;
7320 ctx = ctx->outer_context;
7321 }
7322 if (ctx == NULL)
7323 return 0;
7324 }
7325 code = OMP_CLAUSE_SHARED;
7326 }
7327 else if (flags & GOVD_PRIVATE)
7328 code = OMP_CLAUSE_PRIVATE;
7329 else if (flags & GOVD_FIRSTPRIVATE)
7330 code = OMP_CLAUSE_FIRSTPRIVATE;
7331 else if (flags & GOVD_LASTPRIVATE)
7332 code = OMP_CLAUSE_LASTPRIVATE;
7333 else if (flags & GOVD_ALIGNED)
7334 return 0;
7335 else
7336 gcc_unreachable ();
7337
7338 clause = build_omp_clause (input_location, code);
7339 OMP_CLAUSE_DECL (clause) = decl;
7340 OMP_CLAUSE_CHAIN (clause) = *list_p;
7341 if (private_debug)
7342 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7343 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7344 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7345 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7346 {
7347 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7348 OMP_CLAUSE_DECL (nc) = decl;
7349 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7350 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7351 OMP_CLAUSE_DECL (clause)
7352 = build_simple_mem_ref_loc (input_location, decl);
7353 OMP_CLAUSE_DECL (clause)
7354 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7355 build_int_cst (build_pointer_type (char_type_node), 0));
7356 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7357 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7358 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7359 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7360 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7361 OMP_CLAUSE_CHAIN (nc) = *list_p;
7362 OMP_CLAUSE_CHAIN (clause) = nc;
7363 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7364 gimplify_omp_ctxp = ctx->outer_context;
7365 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7366 pre_p, NULL, is_gimple_val, fb_rvalue);
7367 gimplify_omp_ctxp = ctx;
7368 }
7369 else if (code == OMP_CLAUSE_MAP)
7370 {
7371 OMP_CLAUSE_SET_MAP_KIND (clause,
7372 flags & GOVD_MAP_TO_ONLY
7373 ? GOMP_MAP_TO
7374 : GOMP_MAP_TOFROM);
7375 if (DECL_SIZE (decl)
7376 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7377 {
7378 tree decl2 = DECL_VALUE_EXPR (decl);
7379 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7380 decl2 = TREE_OPERAND (decl2, 0);
7381 gcc_assert (DECL_P (decl2));
7382 tree mem = build_simple_mem_ref (decl2);
7383 OMP_CLAUSE_DECL (clause) = mem;
7384 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7385 if (gimplify_omp_ctxp->outer_context)
7386 {
7387 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7388 omp_notice_variable (ctx, decl2, true);
7389 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7390 }
7391 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7392 OMP_CLAUSE_MAP);
7393 OMP_CLAUSE_DECL (nc) = decl;
7394 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7395 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7396 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7397 else
7398 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7399 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7400 OMP_CLAUSE_CHAIN (clause) = nc;
7401 }
7402 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7403 && lang_hooks.decls.omp_privatize_by_reference (decl))
7404 {
7405 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7406 OMP_CLAUSE_SIZE (clause)
7407 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7408 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7409 gimplify_omp_ctxp = ctx->outer_context;
7410 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7411 pre_p, NULL, is_gimple_val, fb_rvalue);
7412 gimplify_omp_ctxp = ctx;
7413 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7414 OMP_CLAUSE_MAP);
7415 OMP_CLAUSE_DECL (nc) = decl;
7416 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7417 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7418 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7419 OMP_CLAUSE_CHAIN (clause) = nc;
7420 }
7421 else
7422 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7423 }
7424 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7425 {
7426 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7427 OMP_CLAUSE_DECL (nc) = decl;
7428 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7429 OMP_CLAUSE_CHAIN (nc) = *list_p;
7430 OMP_CLAUSE_CHAIN (clause) = nc;
7431 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7432 gimplify_omp_ctxp = ctx->outer_context;
7433 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7434 gimplify_omp_ctxp = ctx;
7435 }
7436 *list_p = clause;
7437 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7438 gimplify_omp_ctxp = ctx->outer_context;
7439 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7440 gimplify_omp_ctxp = ctx;
7441 return 0;
7442 }
7443
7444 static void
7445 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p,
7446 enum tree_code code)
7447 {
7448 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7449 tree c, decl;
7450
7451 while ((c = *list_p) != NULL)
7452 {
7453 splay_tree_node n;
7454 bool remove = false;
7455
7456 switch (OMP_CLAUSE_CODE (c))
7457 {
7458 case OMP_CLAUSE_PRIVATE:
7459 case OMP_CLAUSE_SHARED:
7460 case OMP_CLAUSE_FIRSTPRIVATE:
7461 case OMP_CLAUSE_LINEAR:
7462 decl = OMP_CLAUSE_DECL (c);
7463 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7464 remove = !(n->value & GOVD_SEEN);
7465 if (! remove)
7466 {
7467 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
7468 if ((n->value & GOVD_DEBUG_PRIVATE)
7469 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7470 {
7471 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7472 || ((n->value & GOVD_DATA_SHARE_CLASS)
7473 == GOVD_PRIVATE));
7474 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
7475 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7476 }
7477 }
7478 break;
7479
7480 case OMP_CLAUSE_LASTPRIVATE:
7481 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
7482 accurately reflect the presence of a FIRSTPRIVATE clause. */
7483 decl = OMP_CLAUSE_DECL (c);
7484 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7485 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7486 = (n->value & GOVD_FIRSTPRIVATE) != 0;
7487 if (omp_no_lastprivate (ctx))
7488 {
7489 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7490 remove = true;
7491 else
7492 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
7493 }
7494 else if (code == OMP_DISTRIBUTE
7495 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7496 {
7497 remove = true;
7498 error_at (OMP_CLAUSE_LOCATION (c),
7499 "same variable used in %<firstprivate%> and "
7500 "%<lastprivate%> clauses on %<distribute%> "
7501 "construct");
7502 }
7503 break;
7504
7505 case OMP_CLAUSE_ALIGNED:
7506 decl = OMP_CLAUSE_DECL (c);
7507 if (!is_global_var (decl))
7508 {
7509 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7510 remove = n == NULL || !(n->value & GOVD_SEEN);
7511 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7512 {
7513 struct gimplify_omp_ctx *octx;
7514 if (n != NULL
7515 && (n->value & (GOVD_DATA_SHARE_CLASS
7516 & ~GOVD_FIRSTPRIVATE)))
7517 remove = true;
7518 else
7519 for (octx = ctx->outer_context; octx;
7520 octx = octx->outer_context)
7521 {
7522 n = splay_tree_lookup (octx->variables,
7523 (splay_tree_key) decl);
7524 if (n == NULL)
7525 continue;
7526 if (n->value & GOVD_LOCAL)
7527 break;
7528 /* We have to avoid assigning a shared variable
7529 to itself when trying to add
7530 __builtin_assume_aligned. */
7531 if (n->value & GOVD_SHARED)
7532 {
7533 remove = true;
7534 break;
7535 }
7536 }
7537 }
7538 }
7539 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
7540 {
7541 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7542 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7543 remove = true;
7544 }
7545 break;
7546
7547 case OMP_CLAUSE_MAP:
7548 if (code == OMP_TARGET_EXIT_DATA
7549 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7550 {
7551 remove = true;
7552 break;
7553 }
7554 decl = OMP_CLAUSE_DECL (c);
7555 if (!DECL_P (decl))
7556 {
7557 if ((ctx->region_type & ORT_TARGET) != 0
7558 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7559 {
7560 if (TREE_CODE (decl) == INDIRECT_REF
7561 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7562 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7563 == REFERENCE_TYPE))
7564 decl = TREE_OPERAND (decl, 0);
7565 if (TREE_CODE (decl) == COMPONENT_REF)
7566 {
7567 while (TREE_CODE (decl) == COMPONENT_REF)
7568 decl = TREE_OPERAND (decl, 0);
7569 if (DECL_P (decl))
7570 {
7571 n = splay_tree_lookup (ctx->variables,
7572 (splay_tree_key) decl);
7573 if (!(n->value & GOVD_SEEN))
7574 remove = true;
7575 }
7576 }
7577 }
7578 break;
7579 }
7580 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7581 if ((ctx->region_type & ORT_TARGET) != 0
7582 && !(n->value & GOVD_SEEN)
7583 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0)
7584 {
7585 remove = true;
7586 /* For struct element mapping, if struct is never referenced
7587 in target block and none of the mapping has always modifier,
7588 remove all the struct element mappings, which immediately
7589 follow the GOMP_MAP_STRUCT map clause. */
7590 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
7591 {
7592 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
7593 while (cnt--)
7594 OMP_CLAUSE_CHAIN (c)
7595 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
7596 }
7597 }
7598 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
7599 && code == OMP_TARGET_EXIT_DATA)
7600 remove = true;
7601 else if (DECL_SIZE (decl)
7602 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
7603 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
7604 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
7605 && (OMP_CLAUSE_MAP_KIND (c)
7606 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7607 {
7608 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
7609 for these, TREE_CODE (DECL_SIZE (decl)) will always be
7610 INTEGER_CST. */
7611 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
7612
7613 tree decl2 = DECL_VALUE_EXPR (decl);
7614 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7615 decl2 = TREE_OPERAND (decl2, 0);
7616 gcc_assert (DECL_P (decl2));
7617 tree mem = build_simple_mem_ref (decl2);
7618 OMP_CLAUSE_DECL (c) = mem;
7619 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7620 if (ctx->outer_context)
7621 {
7622 omp_notice_variable (ctx->outer_context, decl2, true);
7623 omp_notice_variable (ctx->outer_context,
7624 OMP_CLAUSE_SIZE (c), true);
7625 }
7626 if (((ctx->region_type & ORT_TARGET) != 0
7627 || !ctx->target_firstprivatize_array_bases)
7628 && ((n->value & GOVD_SEEN) == 0
7629 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
7630 {
7631 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7632 OMP_CLAUSE_MAP);
7633 OMP_CLAUSE_DECL (nc) = decl;
7634 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7635 if (ctx->target_firstprivatize_array_bases)
7636 OMP_CLAUSE_SET_MAP_KIND (nc,
7637 GOMP_MAP_FIRSTPRIVATE_POINTER);
7638 else
7639 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7640 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
7641 OMP_CLAUSE_CHAIN (c) = nc;
7642 c = nc;
7643 }
7644 }
7645 else
7646 {
7647 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7648 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
7649 gcc_assert ((n->value & GOVD_SEEN) == 0
7650 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
7651 == 0));
7652 }
7653 break;
7654
7655 case OMP_CLAUSE_TO:
7656 case OMP_CLAUSE_FROM:
7657 case OMP_CLAUSE__CACHE_:
7658 decl = OMP_CLAUSE_DECL (c);
7659 if (!DECL_P (decl))
7660 break;
7661 if (DECL_SIZE (decl)
7662 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7663 {
7664 tree decl2 = DECL_VALUE_EXPR (decl);
7665 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7666 decl2 = TREE_OPERAND (decl2, 0);
7667 gcc_assert (DECL_P (decl2));
7668 tree mem = build_simple_mem_ref (decl2);
7669 OMP_CLAUSE_DECL (c) = mem;
7670 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7671 if (ctx->outer_context)
7672 {
7673 omp_notice_variable (ctx->outer_context, decl2, true);
7674 omp_notice_variable (ctx->outer_context,
7675 OMP_CLAUSE_SIZE (c), true);
7676 }
7677 }
7678 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7679 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
7680 break;
7681
7682 case OMP_CLAUSE_REDUCTION:
7683 case OMP_CLAUSE_COPYIN:
7684 case OMP_CLAUSE_COPYPRIVATE:
7685 case OMP_CLAUSE_IF:
7686 case OMP_CLAUSE_NUM_THREADS:
7687 case OMP_CLAUSE_NUM_TEAMS:
7688 case OMP_CLAUSE_THREAD_LIMIT:
7689 case OMP_CLAUSE_DIST_SCHEDULE:
7690 case OMP_CLAUSE_DEVICE:
7691 case OMP_CLAUSE_SCHEDULE:
7692 case OMP_CLAUSE_NOWAIT:
7693 case OMP_CLAUSE_ORDERED:
7694 case OMP_CLAUSE_DEFAULT:
7695 case OMP_CLAUSE_UNTIED:
7696 case OMP_CLAUSE_COLLAPSE:
7697 case OMP_CLAUSE_FINAL:
7698 case OMP_CLAUSE_MERGEABLE:
7699 case OMP_CLAUSE_PROC_BIND:
7700 case OMP_CLAUSE_SAFELEN:
7701 case OMP_CLAUSE_SIMDLEN:
7702 case OMP_CLAUSE_DEPEND:
7703 case OMP_CLAUSE_PRIORITY:
7704 case OMP_CLAUSE_GRAINSIZE:
7705 case OMP_CLAUSE_NUM_TASKS:
7706 case OMP_CLAUSE_NOGROUP:
7707 case OMP_CLAUSE_THREADS:
7708 case OMP_CLAUSE_SIMD:
7709 case OMP_CLAUSE_HINT:
7710 case OMP_CLAUSE_DEFAULTMAP:
7711 case OMP_CLAUSE_USE_DEVICE_PTR:
7712 case OMP_CLAUSE_IS_DEVICE_PTR:
7713 case OMP_CLAUSE__CILK_FOR_COUNT_:
7714 case OMP_CLAUSE_ASYNC:
7715 case OMP_CLAUSE_WAIT:
7716 case OMP_CLAUSE_DEVICE_RESIDENT:
7717 case OMP_CLAUSE_USE_DEVICE:
7718 case OMP_CLAUSE_INDEPENDENT:
7719 case OMP_CLAUSE_NUM_GANGS:
7720 case OMP_CLAUSE_NUM_WORKERS:
7721 case OMP_CLAUSE_VECTOR_LENGTH:
7722 case OMP_CLAUSE_GANG:
7723 case OMP_CLAUSE_WORKER:
7724 case OMP_CLAUSE_VECTOR:
7725 case OMP_CLAUSE_AUTO:
7726 case OMP_CLAUSE_SEQ:
7727 case OMP_CLAUSE_TILE:
7728 break;
7729
7730 default:
7731 gcc_unreachable ();
7732 }
7733
7734 if (remove)
7735 *list_p = OMP_CLAUSE_CHAIN (c);
7736 else
7737 list_p = &OMP_CLAUSE_CHAIN (c);
7738 }
7739
7740 /* Add in any implicit data sharing. */
7741 struct gimplify_adjust_omp_clauses_data data;
7742 data.list_p = list_p;
7743 data.pre_p = pre_p;
7744 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
7745
7746 gimplify_omp_ctxp = ctx->outer_context;
7747 delete_omp_context (ctx);
7748 }
7749
7750 /* Gimplify OACC_CACHE. */
7751
7752 static void
7753 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
7754 {
7755 tree expr = *expr_p;
7756
7757 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
7758 OACC_CACHE);
7759 gimplify_adjust_omp_clauses (pre_p, &OACC_CACHE_CLAUSES (expr), OACC_CACHE);
7760
7761 /* TODO: Do something sensible with this information. */
7762
7763 *expr_p = NULL_TREE;
7764 }
7765
7766 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
7767 gimplification of the body, as well as scanning the body for used
7768 variables. We need to do this scan now, because variable-sized
7769 decls will be decomposed during gimplification. */
7770
7771 static void
7772 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
7773 {
7774 tree expr = *expr_p;
7775 gimple *g;
7776 gimple_seq body = NULL;
7777
7778 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
7779 OMP_PARALLEL_COMBINED (expr)
7780 ? ORT_COMBINED_PARALLEL
7781 : ORT_PARALLEL, OMP_PARALLEL);
7782
7783 push_gimplify_context ();
7784
7785 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
7786 if (gimple_code (g) == GIMPLE_BIND)
7787 pop_gimplify_context (g);
7788 else
7789 pop_gimplify_context (NULL);
7790
7791 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr),
7792 OMP_PARALLEL);
7793
7794 g = gimple_build_omp_parallel (body,
7795 OMP_PARALLEL_CLAUSES (expr),
7796 NULL_TREE, NULL_TREE);
7797 if (OMP_PARALLEL_COMBINED (expr))
7798 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
7799 gimplify_seq_add_stmt (pre_p, g);
7800 *expr_p = NULL_TREE;
7801 }
7802
7803 /* Gimplify the contents of an OMP_TASK statement. This involves
7804 gimplification of the body, as well as scanning the body for used
7805 variables. We need to do this scan now, because variable-sized
7806 decls will be decomposed during gimplification. */
7807
7808 static void
7809 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
7810 {
7811 tree expr = *expr_p;
7812 gimple *g;
7813 gimple_seq body = NULL;
7814
7815 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
7816 find_omp_clause (OMP_TASK_CLAUSES (expr),
7817 OMP_CLAUSE_UNTIED)
7818 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
7819
7820 push_gimplify_context ();
7821
7822 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
7823 if (gimple_code (g) == GIMPLE_BIND)
7824 pop_gimplify_context (g);
7825 else
7826 pop_gimplify_context (NULL);
7827
7828 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr), OMP_TASK);
7829
7830 g = gimple_build_omp_task (body,
7831 OMP_TASK_CLAUSES (expr),
7832 NULL_TREE, NULL_TREE,
7833 NULL_TREE, NULL_TREE, NULL_TREE);
7834 gimplify_seq_add_stmt (pre_p, g);
7835 *expr_p = NULL_TREE;
7836 }
7837
7838 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
7839 with non-NULL OMP_FOR_INIT. */
7840
7841 static tree
7842 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
7843 {
7844 *walk_subtrees = 0;
7845 switch (TREE_CODE (*tp))
7846 {
7847 case OMP_FOR:
7848 *walk_subtrees = 1;
7849 /* FALLTHRU */
7850 case OMP_SIMD:
7851 if (OMP_FOR_INIT (*tp) != NULL_TREE)
7852 return *tp;
7853 break;
7854 case BIND_EXPR:
7855 case STATEMENT_LIST:
7856 case OMP_PARALLEL:
7857 *walk_subtrees = 1;
7858 break;
7859 default:
7860 break;
7861 }
7862 return NULL_TREE;
7863 }
7864
7865 /* Gimplify the gross structure of an OMP_FOR statement. */
7866
7867 static enum gimplify_status
7868 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
7869 {
7870 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
7871 enum gimplify_status ret = GS_ALL_DONE;
7872 enum gimplify_status tret;
7873 gomp_for *gfor;
7874 gimple_seq for_body, for_pre_body;
7875 int i;
7876 bitmap has_decl_expr = NULL;
7877 enum omp_region_type ort = ORT_WORKSHARE;
7878
7879 orig_for_stmt = for_stmt = *expr_p;
7880
7881 switch (TREE_CODE (for_stmt))
7882 {
7883 case OMP_FOR:
7884 case CILK_FOR:
7885 case OMP_DISTRIBUTE:
7886 break;
7887 case OACC_LOOP:
7888 ort = ORT_ACC;
7889 break;
7890 case OMP_TASKLOOP:
7891 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
7892 ort = ORT_UNTIED_TASK;
7893 else
7894 ort = ORT_TASK;
7895 break;
7896 case OMP_SIMD:
7897 case CILK_SIMD:
7898 ort = ORT_SIMD;
7899 break;
7900 default:
7901 gcc_unreachable ();
7902 }
7903
7904 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
7905 clause for the IV. */
7906 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7907 {
7908 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
7909 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7910 decl = TREE_OPERAND (t, 0);
7911 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7912 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7913 && OMP_CLAUSE_DECL (c) == decl)
7914 {
7915 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
7916 break;
7917 }
7918 }
7919
7920 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
7921 {
7922 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
7923 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
7924 find_combined_omp_for, NULL, NULL);
7925 if (inner_for_stmt == NULL_TREE)
7926 {
7927 gcc_assert (seen_error ());
7928 *expr_p = NULL_TREE;
7929 return GS_ERROR;
7930 }
7931 }
7932
7933 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
7934 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
7935 TREE_CODE (for_stmt));
7936
7937 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
7938 gimplify_omp_ctxp->distribute = true;
7939
7940 /* Handle OMP_FOR_INIT. */
7941 for_pre_body = NULL;
7942 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
7943 {
7944 has_decl_expr = BITMAP_ALLOC (NULL);
7945 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
7946 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
7947 == VAR_DECL)
7948 {
7949 t = OMP_FOR_PRE_BODY (for_stmt);
7950 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7951 }
7952 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
7953 {
7954 tree_stmt_iterator si;
7955 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
7956 tsi_next (&si))
7957 {
7958 t = tsi_stmt (si);
7959 if (TREE_CODE (t) == DECL_EXPR
7960 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
7961 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7962 }
7963 }
7964 }
7965 if (OMP_FOR_PRE_BODY (for_stmt))
7966 {
7967 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
7968 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
7969 else
7970 {
7971 struct gimplify_omp_ctx ctx;
7972 memset (&ctx, 0, sizeof (ctx));
7973 ctx.region_type = ORT_NONE;
7974 gimplify_omp_ctxp = &ctx;
7975 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
7976 gimplify_omp_ctxp = NULL;
7977 }
7978 }
7979 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
7980
7981 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
7982 for_stmt = inner_for_stmt;
7983
7984 /* For taskloop, need to gimplify the start, end and step before the
7985 taskloop, outside of the taskloop omp context. */
7986 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
7987 {
7988 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7989 {
7990 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7991 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
7992 {
7993 TREE_OPERAND (t, 1)
7994 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
7995 pre_p, NULL);
7996 tree c = build_omp_clause (input_location,
7997 OMP_CLAUSE_FIRSTPRIVATE);
7998 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
7999 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8000 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8001 }
8002
8003 /* Handle OMP_FOR_COND. */
8004 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8005 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8006 {
8007 TREE_OPERAND (t, 1)
8008 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8009 gimple_seq_empty_p (for_pre_body)
8010 ? pre_p : &for_pre_body, NULL);
8011 tree c = build_omp_clause (input_location,
8012 OMP_CLAUSE_FIRSTPRIVATE);
8013 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8014 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8015 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8016 }
8017
8018 /* Handle OMP_FOR_INCR. */
8019 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8020 if (TREE_CODE (t) == MODIFY_EXPR)
8021 {
8022 decl = TREE_OPERAND (t, 0);
8023 t = TREE_OPERAND (t, 1);
8024 tree *tp = &TREE_OPERAND (t, 1);
8025 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8026 tp = &TREE_OPERAND (t, 0);
8027
8028 if (!is_gimple_constant (*tp))
8029 {
8030 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8031 ? pre_p : &for_pre_body;
8032 *tp = get_initialized_tmp_var (*tp, seq, NULL);
8033 tree c = build_omp_clause (input_location,
8034 OMP_CLAUSE_FIRSTPRIVATE);
8035 OMP_CLAUSE_DECL (c) = *tp;
8036 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8037 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8038 }
8039 }
8040 }
8041
8042 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8043 OMP_TASKLOOP);
8044 }
8045
8046 if (orig_for_stmt != for_stmt)
8047 gimplify_omp_ctxp->combined_loop = true;
8048
8049 for_body = NULL;
8050 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8051 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8052 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8053 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8054
8055 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8056 bool is_doacross = false;
8057 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8058 {
8059 is_doacross = true;
8060 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8061 (OMP_FOR_INIT (for_stmt))
8062 * 2);
8063 }
8064 int collapse = 1;
8065 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8066 if (c)
8067 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8068 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8069 {
8070 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8071 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8072 decl = TREE_OPERAND (t, 0);
8073 gcc_assert (DECL_P (decl));
8074 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8075 || POINTER_TYPE_P (TREE_TYPE (decl)));
8076 if (is_doacross)
8077 {
8078 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8079 gimplify_omp_ctxp->loop_iter_var.quick_push
8080 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8081 else
8082 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8083 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8084 }
8085
8086 /* Make sure the iteration variable is private. */
8087 tree c = NULL_TREE;
8088 tree c2 = NULL_TREE;
8089 if (orig_for_stmt != for_stmt)
8090 /* Do this only on innermost construct for combined ones. */;
8091 else if (ort == ORT_SIMD)
8092 {
8093 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8094 (splay_tree_key) decl);
8095 omp_is_private (gimplify_omp_ctxp, decl,
8096 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8097 != 1));
8098 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8099 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8100 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8101 {
8102 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8103 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8104 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8105 if ((has_decl_expr
8106 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8107 || omp_no_lastprivate (gimplify_omp_ctxp))
8108 {
8109 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8110 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8111 }
8112 struct gimplify_omp_ctx *outer
8113 = gimplify_omp_ctxp->outer_context;
8114 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8115 {
8116 if (outer->region_type == ORT_WORKSHARE
8117 && outer->combined_loop)
8118 {
8119 n = splay_tree_lookup (outer->variables,
8120 (splay_tree_key)decl);
8121 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8122 {
8123 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8124 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8125 }
8126 else
8127 {
8128 struct gimplify_omp_ctx *octx = outer->outer_context;
8129 if (octx
8130 && octx->region_type == ORT_COMBINED_PARALLEL
8131 && octx->outer_context
8132 && (octx->outer_context->region_type
8133 == ORT_WORKSHARE)
8134 && octx->outer_context->combined_loop)
8135 {
8136 octx = octx->outer_context;
8137 n = splay_tree_lookup (octx->variables,
8138 (splay_tree_key)decl);
8139 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8140 {
8141 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8142 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8143 }
8144 }
8145 }
8146 }
8147 }
8148
8149 OMP_CLAUSE_DECL (c) = decl;
8150 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8151 OMP_FOR_CLAUSES (for_stmt) = c;
8152 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8153 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8154 {
8155 if (outer->region_type == ORT_WORKSHARE
8156 && outer->combined_loop)
8157 {
8158 if (outer->outer_context
8159 && (outer->outer_context->region_type
8160 == ORT_COMBINED_PARALLEL))
8161 outer = outer->outer_context;
8162 else if (omp_check_private (outer, decl, false))
8163 outer = NULL;
8164 }
8165 else if (((outer->region_type & ORT_TASK) != 0)
8166 && outer->combined_loop
8167 && !omp_check_private (gimplify_omp_ctxp,
8168 decl, false))
8169 ;
8170 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8171 outer = NULL;
8172 if (outer)
8173 {
8174 n = splay_tree_lookup (outer->variables,
8175 (splay_tree_key)decl);
8176 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8177 {
8178 omp_add_variable (outer, decl,
8179 GOVD_LASTPRIVATE | GOVD_SEEN);
8180 if (outer->region_type == ORT_COMBINED_PARALLEL
8181 && outer->outer_context
8182 && (outer->outer_context->region_type
8183 == ORT_WORKSHARE)
8184 && outer->outer_context->combined_loop)
8185 {
8186 outer = outer->outer_context;
8187 n = splay_tree_lookup (outer->variables,
8188 (splay_tree_key)decl);
8189 if (omp_check_private (outer, decl, false))
8190 outer = NULL;
8191 else if (n == NULL
8192 || ((n->value & GOVD_DATA_SHARE_CLASS)
8193 == 0))
8194 omp_add_variable (outer, decl,
8195 GOVD_LASTPRIVATE
8196 | GOVD_SEEN);
8197 else
8198 outer = NULL;
8199 }
8200 if (outer && outer->outer_context
8201 && (outer->outer_context->region_type
8202 == ORT_COMBINED_TEAMS))
8203 {
8204 outer = outer->outer_context;
8205 n = splay_tree_lookup (outer->variables,
8206 (splay_tree_key)decl);
8207 if (n == NULL
8208 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8209 omp_add_variable (outer, decl,
8210 GOVD_SHARED | GOVD_SEEN);
8211 else
8212 outer = NULL;
8213 }
8214 if (outer && outer->outer_context)
8215 omp_notice_variable (outer->outer_context, decl,
8216 true);
8217 }
8218 }
8219 }
8220 }
8221 else
8222 {
8223 bool lastprivate
8224 = (!has_decl_expr
8225 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8226 && !omp_no_lastprivate (gimplify_omp_ctxp);
8227 struct gimplify_omp_ctx *outer
8228 = gimplify_omp_ctxp->outer_context;
8229 if (outer && lastprivate)
8230 {
8231 if (outer->region_type == ORT_WORKSHARE
8232 && outer->combined_loop)
8233 {
8234 n = splay_tree_lookup (outer->variables,
8235 (splay_tree_key)decl);
8236 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8237 {
8238 lastprivate = false;
8239 outer = NULL;
8240 }
8241 else if (outer->outer_context
8242 && (outer->outer_context->region_type
8243 == ORT_COMBINED_PARALLEL))
8244 outer = outer->outer_context;
8245 else if (omp_check_private (outer, decl, false))
8246 outer = NULL;
8247 }
8248 else if (((outer->region_type & ORT_TASK) != 0)
8249 && outer->combined_loop
8250 && !omp_check_private (gimplify_omp_ctxp,
8251 decl, false))
8252 ;
8253 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8254 outer = NULL;
8255 if (outer)
8256 {
8257 n = splay_tree_lookup (outer->variables,
8258 (splay_tree_key)decl);
8259 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8260 {
8261 omp_add_variable (outer, decl,
8262 GOVD_LASTPRIVATE | GOVD_SEEN);
8263 if (outer->region_type == ORT_COMBINED_PARALLEL
8264 && outer->outer_context
8265 && (outer->outer_context->region_type
8266 == ORT_WORKSHARE)
8267 && outer->outer_context->combined_loop)
8268 {
8269 outer = outer->outer_context;
8270 n = splay_tree_lookup (outer->variables,
8271 (splay_tree_key)decl);
8272 if (omp_check_private (outer, decl, false))
8273 outer = NULL;
8274 else if (n == NULL
8275 || ((n->value & GOVD_DATA_SHARE_CLASS)
8276 == 0))
8277 omp_add_variable (outer, decl,
8278 GOVD_LASTPRIVATE
8279 | GOVD_SEEN);
8280 else
8281 outer = NULL;
8282 }
8283 if (outer && outer->outer_context
8284 && (outer->outer_context->region_type
8285 == ORT_COMBINED_TEAMS))
8286 {
8287 outer = outer->outer_context;
8288 n = splay_tree_lookup (outer->variables,
8289 (splay_tree_key)decl);
8290 if (n == NULL
8291 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8292 omp_add_variable (outer, decl,
8293 GOVD_SHARED | GOVD_SEEN);
8294 else
8295 outer = NULL;
8296 }
8297 if (outer && outer->outer_context)
8298 omp_notice_variable (outer->outer_context, decl,
8299 true);
8300 }
8301 }
8302 }
8303
8304 c = build_omp_clause (input_location,
8305 lastprivate ? OMP_CLAUSE_LASTPRIVATE
8306 : OMP_CLAUSE_PRIVATE);
8307 OMP_CLAUSE_DECL (c) = decl;
8308 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8309 OMP_FOR_CLAUSES (for_stmt) = c;
8310 omp_add_variable (gimplify_omp_ctxp, decl,
8311 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
8312 | GOVD_EXPLICIT | GOVD_SEEN);
8313 c = NULL_TREE;
8314 }
8315 }
8316 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
8317 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8318 else
8319 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
8320
8321 /* If DECL is not a gimple register, create a temporary variable to act
8322 as an iteration counter. This is valid, since DECL cannot be
8323 modified in the body of the loop. Similarly for any iteration vars
8324 in simd with collapse > 1 where the iterator vars must be
8325 lastprivate. */
8326 if (orig_for_stmt != for_stmt)
8327 var = decl;
8328 else if (!is_gimple_reg (decl)
8329 || (ort == ORT_SIMD
8330 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
8331 {
8332 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8333 TREE_OPERAND (t, 0) = var;
8334
8335 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
8336
8337 if (ort == ORT_SIMD
8338 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8339 {
8340 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8341 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
8342 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
8343 OMP_CLAUSE_DECL (c2) = var;
8344 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
8345 OMP_FOR_CLAUSES (for_stmt) = c2;
8346 omp_add_variable (gimplify_omp_ctxp, var,
8347 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
8348 if (c == NULL_TREE)
8349 {
8350 c = c2;
8351 c2 = NULL_TREE;
8352 }
8353 }
8354 else
8355 omp_add_variable (gimplify_omp_ctxp, var,
8356 GOVD_PRIVATE | GOVD_SEEN);
8357 }
8358 else
8359 var = decl;
8360
8361 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8362 is_gimple_val, fb_rvalue);
8363 ret = MIN (ret, tret);
8364 if (ret == GS_ERROR)
8365 return ret;
8366
8367 /* Handle OMP_FOR_COND. */
8368 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8369 gcc_assert (COMPARISON_CLASS_P (t));
8370 gcc_assert (TREE_OPERAND (t, 0) == decl);
8371
8372 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8373 is_gimple_val, fb_rvalue);
8374 ret = MIN (ret, tret);
8375
8376 /* Handle OMP_FOR_INCR. */
8377 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8378 switch (TREE_CODE (t))
8379 {
8380 case PREINCREMENT_EXPR:
8381 case POSTINCREMENT_EXPR:
8382 {
8383 tree decl = TREE_OPERAND (t, 0);
8384 /* c_omp_for_incr_canonicalize_ptr() should have been
8385 called to massage things appropriately. */
8386 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8387
8388 if (orig_for_stmt != for_stmt)
8389 break;
8390 t = build_int_cst (TREE_TYPE (decl), 1);
8391 if (c)
8392 OMP_CLAUSE_LINEAR_STEP (c) = t;
8393 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8394 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8395 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8396 break;
8397 }
8398
8399 case PREDECREMENT_EXPR:
8400 case POSTDECREMENT_EXPR:
8401 /* c_omp_for_incr_canonicalize_ptr() should have been
8402 called to massage things appropriately. */
8403 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8404 if (orig_for_stmt != for_stmt)
8405 break;
8406 t = build_int_cst (TREE_TYPE (decl), -1);
8407 if (c)
8408 OMP_CLAUSE_LINEAR_STEP (c) = t;
8409 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8410 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8411 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8412 break;
8413
8414 case MODIFY_EXPR:
8415 gcc_assert (TREE_OPERAND (t, 0) == decl);
8416 TREE_OPERAND (t, 0) = var;
8417
8418 t = TREE_OPERAND (t, 1);
8419 switch (TREE_CODE (t))
8420 {
8421 case PLUS_EXPR:
8422 if (TREE_OPERAND (t, 1) == decl)
8423 {
8424 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
8425 TREE_OPERAND (t, 0) = var;
8426 break;
8427 }
8428
8429 /* Fallthru. */
8430 case MINUS_EXPR:
8431 case POINTER_PLUS_EXPR:
8432 gcc_assert (TREE_OPERAND (t, 0) == decl);
8433 TREE_OPERAND (t, 0) = var;
8434 break;
8435 default:
8436 gcc_unreachable ();
8437 }
8438
8439 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8440 is_gimple_val, fb_rvalue);
8441 ret = MIN (ret, tret);
8442 if (c)
8443 {
8444 tree step = TREE_OPERAND (t, 1);
8445 tree stept = TREE_TYPE (decl);
8446 if (POINTER_TYPE_P (stept))
8447 stept = sizetype;
8448 step = fold_convert (stept, step);
8449 if (TREE_CODE (t) == MINUS_EXPR)
8450 step = fold_build1 (NEGATE_EXPR, stept, step);
8451 OMP_CLAUSE_LINEAR_STEP (c) = step;
8452 if (step != TREE_OPERAND (t, 1))
8453 {
8454 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
8455 &for_pre_body, NULL,
8456 is_gimple_val, fb_rvalue);
8457 ret = MIN (ret, tret);
8458 }
8459 }
8460 break;
8461
8462 default:
8463 gcc_unreachable ();
8464 }
8465
8466 if (c2)
8467 {
8468 gcc_assert (c);
8469 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
8470 }
8471
8472 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
8473 {
8474 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
8475 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8476 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
8477 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8478 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
8479 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
8480 && OMP_CLAUSE_DECL (c) == decl)
8481 {
8482 if (is_doacross && (collapse == 1 || i >= collapse))
8483 t = var;
8484 else
8485 {
8486 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8487 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8488 gcc_assert (TREE_OPERAND (t, 0) == var);
8489 t = TREE_OPERAND (t, 1);
8490 gcc_assert (TREE_CODE (t) == PLUS_EXPR
8491 || TREE_CODE (t) == MINUS_EXPR
8492 || TREE_CODE (t) == POINTER_PLUS_EXPR);
8493 gcc_assert (TREE_OPERAND (t, 0) == var);
8494 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
8495 is_doacross ? var : decl,
8496 TREE_OPERAND (t, 1));
8497 }
8498 gimple_seq *seq;
8499 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
8500 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
8501 else
8502 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
8503 gimplify_assign (decl, t, seq);
8504 }
8505 }
8506 }
8507
8508 BITMAP_FREE (has_decl_expr);
8509
8510 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8511 {
8512 push_gimplify_context ();
8513 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
8514 {
8515 OMP_FOR_BODY (orig_for_stmt)
8516 = build3 (BIND_EXPR, void_type_node, NULL,
8517 OMP_FOR_BODY (orig_for_stmt), NULL);
8518 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
8519 }
8520 }
8521
8522 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
8523 &for_body);
8524
8525 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8526 {
8527 if (gimple_code (g) == GIMPLE_BIND)
8528 pop_gimplify_context (g);
8529 else
8530 pop_gimplify_context (NULL);
8531 }
8532
8533 if (orig_for_stmt != for_stmt)
8534 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8535 {
8536 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8537 decl = TREE_OPERAND (t, 0);
8538 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8539 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8540 gimplify_omp_ctxp = ctx->outer_context;
8541 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8542 gimplify_omp_ctxp = ctx;
8543 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
8544 TREE_OPERAND (t, 0) = var;
8545 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8546 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
8547 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
8548 }
8549
8550 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt),
8551 TREE_CODE (orig_for_stmt));
8552
8553 int kind;
8554 switch (TREE_CODE (orig_for_stmt))
8555 {
8556 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
8557 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
8558 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
8559 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
8560 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
8561 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
8562 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
8563 default:
8564 gcc_unreachable ();
8565 }
8566 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
8567 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
8568 for_pre_body);
8569 if (orig_for_stmt != for_stmt)
8570 gimple_omp_for_set_combined_p (gfor, true);
8571 if (gimplify_omp_ctxp
8572 && (gimplify_omp_ctxp->combined_loop
8573 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
8574 && gimplify_omp_ctxp->outer_context
8575 && gimplify_omp_ctxp->outer_context->combined_loop)))
8576 {
8577 gimple_omp_for_set_combined_into_p (gfor, true);
8578 if (gimplify_omp_ctxp->combined_loop)
8579 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
8580 else
8581 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
8582 }
8583
8584 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8585 {
8586 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8587 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
8588 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
8589 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8590 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
8591 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
8592 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8593 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
8594 }
8595
8596 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
8597 constructs with GIMPLE_OMP_TASK sandwiched in between them.
8598 The outer taskloop stands for computing the number of iterations,
8599 counts for collapsed loops and holding taskloop specific clauses.
8600 The task construct stands for the effect of data sharing on the
8601 explicit task it creates and the inner taskloop stands for expansion
8602 of the static loop inside of the explicit task construct. */
8603 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8604 {
8605 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
8606 tree task_clauses = NULL_TREE;
8607 tree c = *gfor_clauses_ptr;
8608 tree *gtask_clauses_ptr = &task_clauses;
8609 tree outer_for_clauses = NULL_TREE;
8610 tree *gforo_clauses_ptr = &outer_for_clauses;
8611 for (; c; c = OMP_CLAUSE_CHAIN (c))
8612 switch (OMP_CLAUSE_CODE (c))
8613 {
8614 /* These clauses are allowed on task, move them there. */
8615 case OMP_CLAUSE_SHARED:
8616 case OMP_CLAUSE_FIRSTPRIVATE:
8617 case OMP_CLAUSE_DEFAULT:
8618 case OMP_CLAUSE_IF:
8619 case OMP_CLAUSE_UNTIED:
8620 case OMP_CLAUSE_FINAL:
8621 case OMP_CLAUSE_MERGEABLE:
8622 case OMP_CLAUSE_PRIORITY:
8623 *gtask_clauses_ptr = c;
8624 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8625 break;
8626 case OMP_CLAUSE_PRIVATE:
8627 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
8628 {
8629 /* We want private on outer for and firstprivate
8630 on task. */
8631 *gtask_clauses_ptr
8632 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8633 OMP_CLAUSE_FIRSTPRIVATE);
8634 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
8635 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
8636 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
8637 *gforo_clauses_ptr = c;
8638 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8639 }
8640 else
8641 {
8642 *gtask_clauses_ptr = c;
8643 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8644 }
8645 break;
8646 /* These clauses go into outer taskloop clauses. */
8647 case OMP_CLAUSE_GRAINSIZE:
8648 case OMP_CLAUSE_NUM_TASKS:
8649 case OMP_CLAUSE_NOGROUP:
8650 *gforo_clauses_ptr = c;
8651 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8652 break;
8653 /* Taskloop clause we duplicate on both taskloops. */
8654 case OMP_CLAUSE_COLLAPSE:
8655 *gfor_clauses_ptr = c;
8656 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8657 *gforo_clauses_ptr = copy_node (c);
8658 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
8659 break;
8660 /* For lastprivate, keep the clause on inner taskloop, and add
8661 a shared clause on task. If the same decl is also firstprivate,
8662 add also firstprivate clause on the inner taskloop. */
8663 case OMP_CLAUSE_LASTPRIVATE:
8664 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
8665 {
8666 /* For taskloop C++ lastprivate IVs, we want:
8667 1) private on outer taskloop
8668 2) firstprivate and shared on task
8669 3) lastprivate on inner taskloop */
8670 *gtask_clauses_ptr
8671 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8672 OMP_CLAUSE_FIRSTPRIVATE);
8673 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
8674 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
8675 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
8676 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
8677 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8678 OMP_CLAUSE_PRIVATE);
8679 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
8680 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
8681 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
8682 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
8683 }
8684 *gfor_clauses_ptr = c;
8685 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
8686 *gtask_clauses_ptr
8687 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
8688 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
8689 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8690 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
8691 gtask_clauses_ptr
8692 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
8693 break;
8694 default:
8695 gcc_unreachable ();
8696 }
8697 *gfor_clauses_ptr = NULL_TREE;
8698 *gtask_clauses_ptr = NULL_TREE;
8699 *gforo_clauses_ptr = NULL_TREE;
8700 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
8701 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
8702 NULL_TREE, NULL_TREE, NULL_TREE);
8703 gimple_omp_task_set_taskloop_p (g, true);
8704 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
8705 gomp_for *gforo
8706 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
8707 gimple_omp_for_collapse (gfor),
8708 gimple_omp_for_pre_body (gfor));
8709 gimple_omp_for_set_pre_body (gfor, NULL);
8710 gimple_omp_for_set_combined_p (gforo, true);
8711 gimple_omp_for_set_combined_into_p (gfor, true);
8712 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
8713 {
8714 t = unshare_expr (gimple_omp_for_index (gfor, i));
8715 gimple_omp_for_set_index (gforo, i, t);
8716 t = unshare_expr (gimple_omp_for_initial (gfor, i));
8717 gimple_omp_for_set_initial (gforo, i, t);
8718 gimple_omp_for_set_cond (gforo, i,
8719 gimple_omp_for_cond (gfor, i));
8720 t = unshare_expr (gimple_omp_for_final (gfor, i));
8721 gimple_omp_for_set_final (gforo, i, t);
8722 t = unshare_expr (gimple_omp_for_incr (gfor, i));
8723 gimple_omp_for_set_incr (gforo, i, t);
8724 }
8725 gimplify_seq_add_stmt (pre_p, gforo);
8726 }
8727 else
8728 gimplify_seq_add_stmt (pre_p, gfor);
8729 if (ret != GS_ALL_DONE)
8730 return GS_ERROR;
8731 *expr_p = NULL_TREE;
8732 return GS_ALL_DONE;
8733 }
8734
8735 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
8736 of OMP_TARGET's body. */
8737
8738 static tree
8739 find_omp_teams (tree *tp, int *walk_subtrees, void *)
8740 {
8741 *walk_subtrees = 0;
8742 switch (TREE_CODE (*tp))
8743 {
8744 case OMP_TEAMS:
8745 return *tp;
8746 case BIND_EXPR:
8747 case STATEMENT_LIST:
8748 *walk_subtrees = 1;
8749 break;
8750 default:
8751 break;
8752 }
8753 return NULL_TREE;
8754 }
8755
8756 /* Helper function of optimize_target_teams, determine if the expression
8757 can be computed safely before the target construct on the host. */
8758
8759 static tree
8760 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
8761 {
8762 splay_tree_node n;
8763
8764 if (TYPE_P (*tp))
8765 {
8766 *walk_subtrees = 0;
8767 return NULL_TREE;
8768 }
8769 switch (TREE_CODE (*tp))
8770 {
8771 case VAR_DECL:
8772 case PARM_DECL:
8773 case RESULT_DECL:
8774 *walk_subtrees = 0;
8775 if (error_operand_p (*tp)
8776 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
8777 || DECL_HAS_VALUE_EXPR_P (*tp)
8778 || DECL_THREAD_LOCAL_P (*tp)
8779 || TREE_SIDE_EFFECTS (*tp)
8780 || TREE_THIS_VOLATILE (*tp))
8781 return *tp;
8782 if (is_global_var (*tp)
8783 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
8784 || lookup_attribute ("omp declare target link",
8785 DECL_ATTRIBUTES (*tp))))
8786 return *tp;
8787 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8788 (splay_tree_key) *tp);
8789 if (n == NULL)
8790 {
8791 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
8792 return NULL_TREE;
8793 return *tp;
8794 }
8795 else if (n->value & GOVD_LOCAL)
8796 return *tp;
8797 else if (n->value & GOVD_FIRSTPRIVATE)
8798 return NULL_TREE;
8799 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
8800 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
8801 return NULL_TREE;
8802 return *tp;
8803 case INTEGER_CST:
8804 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
8805 return *tp;
8806 return NULL_TREE;
8807 case TARGET_EXPR:
8808 if (TARGET_EXPR_INITIAL (*tp)
8809 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
8810 return *tp;
8811 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
8812 walk_subtrees, NULL);
8813 /* Allow some reasonable subset of integral arithmetics. */
8814 case PLUS_EXPR:
8815 case MINUS_EXPR:
8816 case MULT_EXPR:
8817 case TRUNC_DIV_EXPR:
8818 case CEIL_DIV_EXPR:
8819 case FLOOR_DIV_EXPR:
8820 case ROUND_DIV_EXPR:
8821 case TRUNC_MOD_EXPR:
8822 case CEIL_MOD_EXPR:
8823 case FLOOR_MOD_EXPR:
8824 case ROUND_MOD_EXPR:
8825 case RDIV_EXPR:
8826 case EXACT_DIV_EXPR:
8827 case MIN_EXPR:
8828 case MAX_EXPR:
8829 case LSHIFT_EXPR:
8830 case RSHIFT_EXPR:
8831 case BIT_IOR_EXPR:
8832 case BIT_XOR_EXPR:
8833 case BIT_AND_EXPR:
8834 case NEGATE_EXPR:
8835 case ABS_EXPR:
8836 case BIT_NOT_EXPR:
8837 case NON_LVALUE_EXPR:
8838 CASE_CONVERT:
8839 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
8840 return *tp;
8841 return NULL_TREE;
8842 /* And disallow anything else, except for comparisons. */
8843 default:
8844 if (COMPARISON_CLASS_P (*tp))
8845 return NULL_TREE;
8846 return *tp;
8847 }
8848 }
8849
8850 /* Try to determine if the num_teams and/or thread_limit expressions
8851 can have their values determined already before entering the
8852 target construct.
8853 INTEGER_CSTs trivially are,
8854 integral decls that are firstprivate (explicitly or implicitly)
8855 or explicitly map(always, to:) or map(always, tofrom:) on the target
8856 region too, and expressions involving simple arithmetics on those
8857 too, function calls are not ok, dereferencing something neither etc.
8858 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
8859 EXPR based on what we find:
8860 0 stands for clause not specified at all, use implementation default
8861 -1 stands for value that can't be determined easily before entering
8862 the target construct.
8863 If teams construct is not present at all, use 1 for num_teams
8864 and 0 for thread_limit (only one team is involved, and the thread
8865 limit is implementation defined. */
8866
8867 static void
8868 optimize_target_teams (tree target, gimple_seq *pre_p)
8869 {
8870 tree body = OMP_BODY (target);
8871 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
8872 tree num_teams = integer_zero_node;
8873 tree thread_limit = integer_zero_node;
8874 location_t num_teams_loc = EXPR_LOCATION (target);
8875 location_t thread_limit_loc = EXPR_LOCATION (target);
8876 tree c, *p, expr;
8877 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
8878
8879 if (teams == NULL_TREE)
8880 num_teams = integer_one_node;
8881 else
8882 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
8883 {
8884 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
8885 {
8886 p = &num_teams;
8887 num_teams_loc = OMP_CLAUSE_LOCATION (c);
8888 }
8889 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
8890 {
8891 p = &thread_limit;
8892 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
8893 }
8894 else
8895 continue;
8896 expr = OMP_CLAUSE_OPERAND (c, 0);
8897 if (TREE_CODE (expr) == INTEGER_CST)
8898 {
8899 *p = expr;
8900 continue;
8901 }
8902 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
8903 {
8904 *p = integer_minus_one_node;
8905 continue;
8906 }
8907 *p = expr;
8908 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
8909 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
8910 == GS_ERROR)
8911 {
8912 gimplify_omp_ctxp = target_ctx;
8913 *p = integer_minus_one_node;
8914 continue;
8915 }
8916 gimplify_omp_ctxp = target_ctx;
8917 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
8918 OMP_CLAUSE_OPERAND (c, 0) = *p;
8919 }
8920 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
8921 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
8922 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
8923 OMP_TARGET_CLAUSES (target) = c;
8924 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
8925 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
8926 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
8927 OMP_TARGET_CLAUSES (target) = c;
8928 }
8929
8930 /* Gimplify the gross structure of several OMP constructs. */
8931
8932 static void
8933 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
8934 {
8935 tree expr = *expr_p;
8936 gimple *stmt;
8937 gimple_seq body = NULL;
8938 enum omp_region_type ort;
8939
8940 switch (TREE_CODE (expr))
8941 {
8942 case OMP_SECTIONS:
8943 case OMP_SINGLE:
8944 ort = ORT_WORKSHARE;
8945 break;
8946 case OMP_TARGET:
8947 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
8948 break;
8949 case OACC_KERNELS:
8950 ort = ORT_ACC_KERNELS;
8951 break;
8952 case OACC_PARALLEL:
8953 ort = ORT_ACC_PARALLEL;
8954 break;
8955 case OACC_DATA:
8956 ort = ORT_ACC_DATA;
8957 break;
8958 case OMP_TARGET_DATA:
8959 ort = ORT_TARGET_DATA;
8960 break;
8961 case OMP_TEAMS:
8962 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
8963 break;
8964 default:
8965 gcc_unreachable ();
8966 }
8967 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
8968 TREE_CODE (expr));
8969 if (TREE_CODE (expr) == OMP_TARGET)
8970 optimize_target_teams (expr, pre_p);
8971 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
8972 {
8973 push_gimplify_context ();
8974 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
8975 if (gimple_code (g) == GIMPLE_BIND)
8976 pop_gimplify_context (g);
8977 else
8978 pop_gimplify_context (NULL);
8979 if ((ort & ORT_TARGET_DATA) != 0)
8980 {
8981 enum built_in_function end_ix;
8982 switch (TREE_CODE (expr))
8983 {
8984 case OACC_DATA:
8985 end_ix = BUILT_IN_GOACC_DATA_END;
8986 break;
8987 case OMP_TARGET_DATA:
8988 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
8989 break;
8990 default:
8991 gcc_unreachable ();
8992 }
8993 tree fn = builtin_decl_explicit (end_ix);
8994 g = gimple_build_call (fn, 0);
8995 gimple_seq cleanup = NULL;
8996 gimple_seq_add_stmt (&cleanup, g);
8997 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8998 body = NULL;
8999 gimple_seq_add_stmt (&body, g);
9000 }
9001 }
9002 else
9003 gimplify_and_add (OMP_BODY (expr), &body);
9004 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr), TREE_CODE (expr));
9005
9006 switch (TREE_CODE (expr))
9007 {
9008 case OACC_DATA:
9009 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9010 OMP_CLAUSES (expr));
9011 break;
9012 case OACC_KERNELS:
9013 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9014 OMP_CLAUSES (expr));
9015 break;
9016 case OACC_PARALLEL:
9017 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9018 OMP_CLAUSES (expr));
9019 break;
9020 case OMP_SECTIONS:
9021 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9022 break;
9023 case OMP_SINGLE:
9024 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9025 break;
9026 case OMP_TARGET:
9027 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9028 OMP_CLAUSES (expr));
9029 break;
9030 case OMP_TARGET_DATA:
9031 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9032 OMP_CLAUSES (expr));
9033 break;
9034 case OMP_TEAMS:
9035 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9036 break;
9037 default:
9038 gcc_unreachable ();
9039 }
9040
9041 gimplify_seq_add_stmt (pre_p, stmt);
9042 *expr_p = NULL_TREE;
9043 }
9044
9045 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9046 target update constructs. */
9047
9048 static void
9049 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9050 {
9051 tree expr = *expr_p;
9052 int kind;
9053 gomp_target *stmt;
9054 enum omp_region_type ort = ORT_WORKSHARE;
9055
9056 switch (TREE_CODE (expr))
9057 {
9058 case OACC_ENTER_DATA:
9059 case OACC_EXIT_DATA:
9060 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9061 ort = ORT_ACC;
9062 break;
9063 case OACC_UPDATE:
9064 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9065 ort = ORT_ACC;
9066 break;
9067 case OMP_TARGET_UPDATE:
9068 kind = GF_OMP_TARGET_KIND_UPDATE;
9069 break;
9070 case OMP_TARGET_ENTER_DATA:
9071 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9072 break;
9073 case OMP_TARGET_EXIT_DATA:
9074 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9075 break;
9076 default:
9077 gcc_unreachable ();
9078 }
9079 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9080 ort, TREE_CODE (expr));
9081 gimplify_adjust_omp_clauses (pre_p, &OMP_STANDALONE_CLAUSES (expr),
9082 TREE_CODE (expr));
9083 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9084
9085 gimplify_seq_add_stmt (pre_p, stmt);
9086 *expr_p = NULL_TREE;
9087 }
9088
9089 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9090 stabilized the lhs of the atomic operation as *ADDR. Return true if
9091 EXPR is this stabilized form. */
9092
9093 static bool
9094 goa_lhs_expr_p (tree expr, tree addr)
9095 {
9096 /* Also include casts to other type variants. The C front end is fond
9097 of adding these for e.g. volatile variables. This is like
9098 STRIP_TYPE_NOPS but includes the main variant lookup. */
9099 STRIP_USELESS_TYPE_CONVERSION (expr);
9100
9101 if (TREE_CODE (expr) == INDIRECT_REF)
9102 {
9103 expr = TREE_OPERAND (expr, 0);
9104 while (expr != addr
9105 && (CONVERT_EXPR_P (expr)
9106 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9107 && TREE_CODE (expr) == TREE_CODE (addr)
9108 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9109 {
9110 expr = TREE_OPERAND (expr, 0);
9111 addr = TREE_OPERAND (addr, 0);
9112 }
9113 if (expr == addr)
9114 return true;
9115 return (TREE_CODE (addr) == ADDR_EXPR
9116 && TREE_CODE (expr) == ADDR_EXPR
9117 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9118 }
9119 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9120 return true;
9121 return false;
9122 }
9123
9124 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9125 expression does not involve the lhs, evaluate it into a temporary.
9126 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9127 or -1 if an error was encountered. */
9128
9129 static int
9130 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9131 tree lhs_var)
9132 {
9133 tree expr = *expr_p;
9134 int saw_lhs;
9135
9136 if (goa_lhs_expr_p (expr, lhs_addr))
9137 {
9138 *expr_p = lhs_var;
9139 return 1;
9140 }
9141 if (is_gimple_val (expr))
9142 return 0;
9143
9144 saw_lhs = 0;
9145 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9146 {
9147 case tcc_binary:
9148 case tcc_comparison:
9149 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9150 lhs_var);
9151 case tcc_unary:
9152 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9153 lhs_var);
9154 break;
9155 case tcc_expression:
9156 switch (TREE_CODE (expr))
9157 {
9158 case TRUTH_ANDIF_EXPR:
9159 case TRUTH_ORIF_EXPR:
9160 case TRUTH_AND_EXPR:
9161 case TRUTH_OR_EXPR:
9162 case TRUTH_XOR_EXPR:
9163 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9164 lhs_addr, lhs_var);
9165 case TRUTH_NOT_EXPR:
9166 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9167 lhs_addr, lhs_var);
9168 break;
9169 case COMPOUND_EXPR:
9170 /* Break out any preevaluations from cp_build_modify_expr. */
9171 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9172 expr = TREE_OPERAND (expr, 1))
9173 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9174 *expr_p = expr;
9175 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9176 default:
9177 break;
9178 }
9179 break;
9180 default:
9181 break;
9182 }
9183
9184 if (saw_lhs == 0)
9185 {
9186 enum gimplify_status gs;
9187 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9188 if (gs != GS_ALL_DONE)
9189 saw_lhs = -1;
9190 }
9191
9192 return saw_lhs;
9193 }
9194
9195 /* Gimplify an OMP_ATOMIC statement. */
9196
9197 static enum gimplify_status
9198 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9199 {
9200 tree addr = TREE_OPERAND (*expr_p, 0);
9201 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9202 ? NULL : TREE_OPERAND (*expr_p, 1);
9203 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9204 tree tmp_load;
9205 gomp_atomic_load *loadstmt;
9206 gomp_atomic_store *storestmt;
9207
9208 tmp_load = create_tmp_reg (type);
9209 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9210 return GS_ERROR;
9211
9212 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9213 != GS_ALL_DONE)
9214 return GS_ERROR;
9215
9216 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9217 gimplify_seq_add_stmt (pre_p, loadstmt);
9218 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9219 != GS_ALL_DONE)
9220 return GS_ERROR;
9221
9222 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9223 rhs = tmp_load;
9224 storestmt = gimple_build_omp_atomic_store (rhs);
9225 gimplify_seq_add_stmt (pre_p, storestmt);
9226 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9227 {
9228 gimple_omp_atomic_set_seq_cst (loadstmt);
9229 gimple_omp_atomic_set_seq_cst (storestmt);
9230 }
9231 switch (TREE_CODE (*expr_p))
9232 {
9233 case OMP_ATOMIC_READ:
9234 case OMP_ATOMIC_CAPTURE_OLD:
9235 *expr_p = tmp_load;
9236 gimple_omp_atomic_set_need_value (loadstmt);
9237 break;
9238 case OMP_ATOMIC_CAPTURE_NEW:
9239 *expr_p = rhs;
9240 gimple_omp_atomic_set_need_value (storestmt);
9241 break;
9242 default:
9243 *expr_p = NULL;
9244 break;
9245 }
9246
9247 return GS_ALL_DONE;
9248 }
9249
9250 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9251 body, and adding some EH bits. */
9252
9253 static enum gimplify_status
9254 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9255 {
9256 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
9257 gimple *body_stmt;
9258 gtransaction *trans_stmt;
9259 gimple_seq body = NULL;
9260 int subcode = 0;
9261
9262 /* Wrap the transaction body in a BIND_EXPR so we have a context
9263 where to put decls for OMP. */
9264 if (TREE_CODE (tbody) != BIND_EXPR)
9265 {
9266 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
9267 TREE_SIDE_EFFECTS (bind) = 1;
9268 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
9269 TRANSACTION_EXPR_BODY (expr) = bind;
9270 }
9271
9272 push_gimplify_context ();
9273 temp = voidify_wrapper_expr (*expr_p, NULL);
9274
9275 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
9276 pop_gimplify_context (body_stmt);
9277
9278 trans_stmt = gimple_build_transaction (body, NULL);
9279 if (TRANSACTION_EXPR_OUTER (expr))
9280 subcode = GTMA_IS_OUTER;
9281 else if (TRANSACTION_EXPR_RELAXED (expr))
9282 subcode = GTMA_IS_RELAXED;
9283 gimple_transaction_set_subcode (trans_stmt, subcode);
9284
9285 gimplify_seq_add_stmt (pre_p, trans_stmt);
9286
9287 if (temp)
9288 {
9289 *expr_p = temp;
9290 return GS_OK;
9291 }
9292
9293 *expr_p = NULL_TREE;
9294 return GS_ALL_DONE;
9295 }
9296
9297 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
9298 is the OMP_BODY of the original EXPR (which has already been
9299 gimplified so it's not present in the EXPR).
9300
9301 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
9302
9303 static gimple *
9304 gimplify_omp_ordered (tree expr, gimple_seq body)
9305 {
9306 tree c, decls;
9307 int failures = 0;
9308 unsigned int i;
9309 tree source_c = NULL_TREE;
9310 tree sink_c = NULL_TREE;
9311
9312 if (gimplify_omp_ctxp)
9313 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
9314 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9315 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
9316 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
9317 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
9318 {
9319 error_at (OMP_CLAUSE_LOCATION (c),
9320 "%<ordered%> construct with %<depend%> clause must be "
9321 "closely nested inside a loop with %<ordered%> clause "
9322 "with a parameter");
9323 failures++;
9324 }
9325 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9326 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9327 {
9328 bool fail = false;
9329 for (decls = OMP_CLAUSE_DECL (c), i = 0;
9330 decls && TREE_CODE (decls) == TREE_LIST;
9331 decls = TREE_CHAIN (decls), ++i)
9332 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
9333 continue;
9334 else if (TREE_VALUE (decls)
9335 != gimplify_omp_ctxp->loop_iter_var[2 * i])
9336 {
9337 error_at (OMP_CLAUSE_LOCATION (c),
9338 "variable %qE is not an iteration "
9339 "of outermost loop %d, expected %qE",
9340 TREE_VALUE (decls), i + 1,
9341 gimplify_omp_ctxp->loop_iter_var[2 * i]);
9342 fail = true;
9343 failures++;
9344 }
9345 else
9346 TREE_VALUE (decls)
9347 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
9348 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
9349 {
9350 error_at (OMP_CLAUSE_LOCATION (c),
9351 "number of variables in %<depend(sink)%> "
9352 "clause does not match number of "
9353 "iteration variables");
9354 failures++;
9355 }
9356 sink_c = c;
9357 }
9358 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9359 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9360 {
9361 if (source_c)
9362 {
9363 error_at (OMP_CLAUSE_LOCATION (c),
9364 "more than one %<depend(source)%> clause on an "
9365 "%<ordered%> construct");
9366 failures++;
9367 }
9368 else
9369 source_c = c;
9370 }
9371 if (source_c && sink_c)
9372 {
9373 error_at (OMP_CLAUSE_LOCATION (source_c),
9374 "%<depend(source)%> clause specified together with "
9375 "%<depend(sink:)%> clauses on the same construct");
9376 failures++;
9377 }
9378
9379 if (failures)
9380 return gimple_build_nop ();
9381 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
9382 }
9383
9384 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
9385 expression produces a value to be used as an operand inside a GIMPLE
9386 statement, the value will be stored back in *EXPR_P. This value will
9387 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
9388 an SSA_NAME. The corresponding sequence of GIMPLE statements is
9389 emitted in PRE_P and POST_P.
9390
9391 Additionally, this process may overwrite parts of the input
9392 expression during gimplification. Ideally, it should be
9393 possible to do non-destructive gimplification.
9394
9395 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
9396 the expression needs to evaluate to a value to be used as
9397 an operand in a GIMPLE statement, this value will be stored in
9398 *EXPR_P on exit. This happens when the caller specifies one
9399 of fb_lvalue or fb_rvalue fallback flags.
9400
9401 PRE_P will contain the sequence of GIMPLE statements corresponding
9402 to the evaluation of EXPR and all the side-effects that must
9403 be executed before the main expression. On exit, the last
9404 statement of PRE_P is the core statement being gimplified. For
9405 instance, when gimplifying 'if (++a)' the last statement in
9406 PRE_P will be 'if (t.1)' where t.1 is the result of
9407 pre-incrementing 'a'.
9408
9409 POST_P will contain the sequence of GIMPLE statements corresponding
9410 to the evaluation of all the side-effects that must be executed
9411 after the main expression. If this is NULL, the post
9412 side-effects are stored at the end of PRE_P.
9413
9414 The reason why the output is split in two is to handle post
9415 side-effects explicitly. In some cases, an expression may have
9416 inner and outer post side-effects which need to be emitted in
9417 an order different from the one given by the recursive
9418 traversal. For instance, for the expression (*p--)++ the post
9419 side-effects of '--' must actually occur *after* the post
9420 side-effects of '++'. However, gimplification will first visit
9421 the inner expression, so if a separate POST sequence was not
9422 used, the resulting sequence would be:
9423
9424 1 t.1 = *p
9425 2 p = p - 1
9426 3 t.2 = t.1 + 1
9427 4 *p = t.2
9428
9429 However, the post-decrement operation in line #2 must not be
9430 evaluated until after the store to *p at line #4, so the
9431 correct sequence should be:
9432
9433 1 t.1 = *p
9434 2 t.2 = t.1 + 1
9435 3 *p = t.2
9436 4 p = p - 1
9437
9438 So, by specifying a separate post queue, it is possible
9439 to emit the post side-effects in the correct order.
9440 If POST_P is NULL, an internal queue will be used. Before
9441 returning to the caller, the sequence POST_P is appended to
9442 the main output sequence PRE_P.
9443
9444 GIMPLE_TEST_F points to a function that takes a tree T and
9445 returns nonzero if T is in the GIMPLE form requested by the
9446 caller. The GIMPLE predicates are in gimple.c.
9447
9448 FALLBACK tells the function what sort of a temporary we want if
9449 gimplification cannot produce an expression that complies with
9450 GIMPLE_TEST_F.
9451
9452 fb_none means that no temporary should be generated
9453 fb_rvalue means that an rvalue is OK to generate
9454 fb_lvalue means that an lvalue is OK to generate
9455 fb_either means that either is OK, but an lvalue is preferable.
9456 fb_mayfail means that gimplification may fail (in which case
9457 GS_ERROR will be returned)
9458
9459 The return value is either GS_ERROR or GS_ALL_DONE, since this
9460 function iterates until EXPR is completely gimplified or an error
9461 occurs. */
9462
9463 enum gimplify_status
9464 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
9465 bool (*gimple_test_f) (tree), fallback_t fallback)
9466 {
9467 tree tmp;
9468 gimple_seq internal_pre = NULL;
9469 gimple_seq internal_post = NULL;
9470 tree save_expr;
9471 bool is_statement;
9472 location_t saved_location;
9473 enum gimplify_status ret;
9474 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
9475
9476 save_expr = *expr_p;
9477 if (save_expr == NULL_TREE)
9478 return GS_ALL_DONE;
9479
9480 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
9481 is_statement = gimple_test_f == is_gimple_stmt;
9482 if (is_statement)
9483 gcc_assert (pre_p);
9484
9485 /* Consistency checks. */
9486 if (gimple_test_f == is_gimple_reg)
9487 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
9488 else if (gimple_test_f == is_gimple_val
9489 || gimple_test_f == is_gimple_call_addr
9490 || gimple_test_f == is_gimple_condexpr
9491 || gimple_test_f == is_gimple_mem_rhs
9492 || gimple_test_f == is_gimple_mem_rhs_or_call
9493 || gimple_test_f == is_gimple_reg_rhs
9494 || gimple_test_f == is_gimple_reg_rhs_or_call
9495 || gimple_test_f == is_gimple_asm_val
9496 || gimple_test_f == is_gimple_mem_ref_addr)
9497 gcc_assert (fallback & fb_rvalue);
9498 else if (gimple_test_f == is_gimple_min_lval
9499 || gimple_test_f == is_gimple_lvalue)
9500 gcc_assert (fallback & fb_lvalue);
9501 else if (gimple_test_f == is_gimple_addressable)
9502 gcc_assert (fallback & fb_either);
9503 else if (gimple_test_f == is_gimple_stmt)
9504 gcc_assert (fallback == fb_none);
9505 else
9506 {
9507 /* We should have recognized the GIMPLE_TEST_F predicate to
9508 know what kind of fallback to use in case a temporary is
9509 needed to hold the value or address of *EXPR_P. */
9510 gcc_unreachable ();
9511 }
9512
9513 /* We used to check the predicate here and return immediately if it
9514 succeeds. This is wrong; the design is for gimplification to be
9515 idempotent, and for the predicates to only test for valid forms, not
9516 whether they are fully simplified. */
9517 if (pre_p == NULL)
9518 pre_p = &internal_pre;
9519
9520 if (post_p == NULL)
9521 post_p = &internal_post;
9522
9523 /* Remember the last statements added to PRE_P and POST_P. Every
9524 new statement added by the gimplification helpers needs to be
9525 annotated with location information. To centralize the
9526 responsibility, we remember the last statement that had been
9527 added to both queues before gimplifying *EXPR_P. If
9528 gimplification produces new statements in PRE_P and POST_P, those
9529 statements will be annotated with the same location information
9530 as *EXPR_P. */
9531 pre_last_gsi = gsi_last (*pre_p);
9532 post_last_gsi = gsi_last (*post_p);
9533
9534 saved_location = input_location;
9535 if (save_expr != error_mark_node
9536 && EXPR_HAS_LOCATION (*expr_p))
9537 input_location = EXPR_LOCATION (*expr_p);
9538
9539 /* Loop over the specific gimplifiers until the toplevel node
9540 remains the same. */
9541 do
9542 {
9543 /* Strip away as many useless type conversions as possible
9544 at the toplevel. */
9545 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
9546
9547 /* Remember the expr. */
9548 save_expr = *expr_p;
9549
9550 /* Die, die, die, my darling. */
9551 if (save_expr == error_mark_node
9552 || (TREE_TYPE (save_expr)
9553 && TREE_TYPE (save_expr) == error_mark_node))
9554 {
9555 ret = GS_ERROR;
9556 break;
9557 }
9558
9559 /* Do any language-specific gimplification. */
9560 ret = ((enum gimplify_status)
9561 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
9562 if (ret == GS_OK)
9563 {
9564 if (*expr_p == NULL_TREE)
9565 break;
9566 if (*expr_p != save_expr)
9567 continue;
9568 }
9569 else if (ret != GS_UNHANDLED)
9570 break;
9571
9572 /* Make sure that all the cases set 'ret' appropriately. */
9573 ret = GS_UNHANDLED;
9574 switch (TREE_CODE (*expr_p))
9575 {
9576 /* First deal with the special cases. */
9577
9578 case POSTINCREMENT_EXPR:
9579 case POSTDECREMENT_EXPR:
9580 case PREINCREMENT_EXPR:
9581 case PREDECREMENT_EXPR:
9582 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
9583 fallback != fb_none,
9584 TREE_TYPE (*expr_p));
9585 break;
9586
9587 case VIEW_CONVERT_EXPR:
9588 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
9589 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
9590 {
9591 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
9592 post_p, is_gimple_val, fb_rvalue);
9593 recalculate_side_effects (*expr_p);
9594 break;
9595 }
9596 /* Fallthru. */
9597
9598 case ARRAY_REF:
9599 case ARRAY_RANGE_REF:
9600 case REALPART_EXPR:
9601 case IMAGPART_EXPR:
9602 case COMPONENT_REF:
9603 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
9604 fallback ? fallback : fb_rvalue);
9605 break;
9606
9607 case COND_EXPR:
9608 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
9609
9610 /* C99 code may assign to an array in a structure value of a
9611 conditional expression, and this has undefined behavior
9612 only on execution, so create a temporary if an lvalue is
9613 required. */
9614 if (fallback == fb_lvalue)
9615 {
9616 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
9617 mark_addressable (*expr_p);
9618 ret = GS_OK;
9619 }
9620 break;
9621
9622 case CALL_EXPR:
9623 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
9624
9625 /* C99 code may assign to an array in a structure returned
9626 from a function, and this has undefined behavior only on
9627 execution, so create a temporary if an lvalue is
9628 required. */
9629 if (fallback == fb_lvalue)
9630 {
9631 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
9632 mark_addressable (*expr_p);
9633 ret = GS_OK;
9634 }
9635 break;
9636
9637 case TREE_LIST:
9638 gcc_unreachable ();
9639
9640 case COMPOUND_EXPR:
9641 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
9642 break;
9643
9644 case COMPOUND_LITERAL_EXPR:
9645 ret = gimplify_compound_literal_expr (expr_p, pre_p,
9646 gimple_test_f, fallback);
9647 break;
9648
9649 case MODIFY_EXPR:
9650 case INIT_EXPR:
9651 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
9652 fallback != fb_none);
9653 break;
9654
9655 case TRUTH_ANDIF_EXPR:
9656 case TRUTH_ORIF_EXPR:
9657 {
9658 /* Preserve the original type of the expression and the
9659 source location of the outer expression. */
9660 tree org_type = TREE_TYPE (*expr_p);
9661 *expr_p = gimple_boolify (*expr_p);
9662 *expr_p = build3_loc (input_location, COND_EXPR,
9663 org_type, *expr_p,
9664 fold_convert_loc
9665 (input_location,
9666 org_type, boolean_true_node),
9667 fold_convert_loc
9668 (input_location,
9669 org_type, boolean_false_node));
9670 ret = GS_OK;
9671 break;
9672 }
9673
9674 case TRUTH_NOT_EXPR:
9675 {
9676 tree type = TREE_TYPE (*expr_p);
9677 /* The parsers are careful to generate TRUTH_NOT_EXPR
9678 only with operands that are always zero or one.
9679 We do not fold here but handle the only interesting case
9680 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
9681 *expr_p = gimple_boolify (*expr_p);
9682 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
9683 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
9684 TREE_TYPE (*expr_p),
9685 TREE_OPERAND (*expr_p, 0));
9686 else
9687 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
9688 TREE_TYPE (*expr_p),
9689 TREE_OPERAND (*expr_p, 0),
9690 build_int_cst (TREE_TYPE (*expr_p), 1));
9691 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
9692 *expr_p = fold_convert_loc (input_location, type, *expr_p);
9693 ret = GS_OK;
9694 break;
9695 }
9696
9697 case ADDR_EXPR:
9698 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
9699 break;
9700
9701 case ANNOTATE_EXPR:
9702 {
9703 tree cond = TREE_OPERAND (*expr_p, 0);
9704 tree kind = TREE_OPERAND (*expr_p, 1);
9705 tree type = TREE_TYPE (cond);
9706 if (!INTEGRAL_TYPE_P (type))
9707 {
9708 *expr_p = cond;
9709 ret = GS_OK;
9710 break;
9711 }
9712 tree tmp = create_tmp_var (type);
9713 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
9714 gcall *call
9715 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
9716 gimple_call_set_lhs (call, tmp);
9717 gimplify_seq_add_stmt (pre_p, call);
9718 *expr_p = tmp;
9719 ret = GS_ALL_DONE;
9720 break;
9721 }
9722
9723 case VA_ARG_EXPR:
9724 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
9725 break;
9726
9727 CASE_CONVERT:
9728 if (IS_EMPTY_STMT (*expr_p))
9729 {
9730 ret = GS_ALL_DONE;
9731 break;
9732 }
9733
9734 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
9735 || fallback == fb_none)
9736 {
9737 /* Just strip a conversion to void (or in void context) and
9738 try again. */
9739 *expr_p = TREE_OPERAND (*expr_p, 0);
9740 ret = GS_OK;
9741 break;
9742 }
9743
9744 ret = gimplify_conversion (expr_p);
9745 if (ret == GS_ERROR)
9746 break;
9747 if (*expr_p != save_expr)
9748 break;
9749 /* FALLTHRU */
9750
9751 case FIX_TRUNC_EXPR:
9752 /* unary_expr: ... | '(' cast ')' val | ... */
9753 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
9754 is_gimple_val, fb_rvalue);
9755 recalculate_side_effects (*expr_p);
9756 break;
9757
9758 case INDIRECT_REF:
9759 {
9760 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
9761 bool notrap = TREE_THIS_NOTRAP (*expr_p);
9762 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
9763
9764 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
9765 if (*expr_p != save_expr)
9766 {
9767 ret = GS_OK;
9768 break;
9769 }
9770
9771 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
9772 is_gimple_reg, fb_rvalue);
9773 if (ret == GS_ERROR)
9774 break;
9775
9776 recalculate_side_effects (*expr_p);
9777 *expr_p = fold_build2_loc (input_location, MEM_REF,
9778 TREE_TYPE (*expr_p),
9779 TREE_OPERAND (*expr_p, 0),
9780 build_int_cst (saved_ptr_type, 0));
9781 TREE_THIS_VOLATILE (*expr_p) = volatilep;
9782 TREE_THIS_NOTRAP (*expr_p) = notrap;
9783 ret = GS_OK;
9784 break;
9785 }
9786
9787 /* We arrive here through the various re-gimplifcation paths. */
9788 case MEM_REF:
9789 /* First try re-folding the whole thing. */
9790 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
9791 TREE_OPERAND (*expr_p, 0),
9792 TREE_OPERAND (*expr_p, 1));
9793 if (tmp)
9794 {
9795 REF_REVERSE_STORAGE_ORDER (tmp)
9796 = REF_REVERSE_STORAGE_ORDER (*expr_p);
9797 *expr_p = tmp;
9798 recalculate_side_effects (*expr_p);
9799 ret = GS_OK;
9800 break;
9801 }
9802 /* Avoid re-gimplifying the address operand if it is already
9803 in suitable form. Re-gimplifying would mark the address
9804 operand addressable. Always gimplify when not in SSA form
9805 as we still may have to gimplify decls with value-exprs. */
9806 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
9807 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
9808 {
9809 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
9810 is_gimple_mem_ref_addr, fb_rvalue);
9811 if (ret == GS_ERROR)
9812 break;
9813 }
9814 recalculate_side_effects (*expr_p);
9815 ret = GS_ALL_DONE;
9816 break;
9817
9818 /* Constants need not be gimplified. */
9819 case INTEGER_CST:
9820 case REAL_CST:
9821 case FIXED_CST:
9822 case STRING_CST:
9823 case COMPLEX_CST:
9824 case VECTOR_CST:
9825 /* Drop the overflow flag on constants, we do not want
9826 that in the GIMPLE IL. */
9827 if (TREE_OVERFLOW_P (*expr_p))
9828 *expr_p = drop_tree_overflow (*expr_p);
9829 ret = GS_ALL_DONE;
9830 break;
9831
9832 case CONST_DECL:
9833 /* If we require an lvalue, such as for ADDR_EXPR, retain the
9834 CONST_DECL node. Otherwise the decl is replaceable by its
9835 value. */
9836 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
9837 if (fallback & fb_lvalue)
9838 ret = GS_ALL_DONE;
9839 else
9840 {
9841 *expr_p = DECL_INITIAL (*expr_p);
9842 ret = GS_OK;
9843 }
9844 break;
9845
9846 case DECL_EXPR:
9847 ret = gimplify_decl_expr (expr_p, pre_p);
9848 break;
9849
9850 case BIND_EXPR:
9851 ret = gimplify_bind_expr (expr_p, pre_p);
9852 break;
9853
9854 case LOOP_EXPR:
9855 ret = gimplify_loop_expr (expr_p, pre_p);
9856 break;
9857
9858 case SWITCH_EXPR:
9859 ret = gimplify_switch_expr (expr_p, pre_p);
9860 break;
9861
9862 case EXIT_EXPR:
9863 ret = gimplify_exit_expr (expr_p);
9864 break;
9865
9866 case GOTO_EXPR:
9867 /* If the target is not LABEL, then it is a computed jump
9868 and the target needs to be gimplified. */
9869 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
9870 {
9871 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
9872 NULL, is_gimple_val, fb_rvalue);
9873 if (ret == GS_ERROR)
9874 break;
9875 }
9876 gimplify_seq_add_stmt (pre_p,
9877 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
9878 ret = GS_ALL_DONE;
9879 break;
9880
9881 case PREDICT_EXPR:
9882 gimplify_seq_add_stmt (pre_p,
9883 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
9884 PREDICT_EXPR_OUTCOME (*expr_p)));
9885 ret = GS_ALL_DONE;
9886 break;
9887
9888 case LABEL_EXPR:
9889 ret = GS_ALL_DONE;
9890 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
9891 == current_function_decl);
9892 gimplify_seq_add_stmt (pre_p,
9893 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
9894 break;
9895
9896 case CASE_LABEL_EXPR:
9897 ret = gimplify_case_label_expr (expr_p, pre_p);
9898 break;
9899
9900 case RETURN_EXPR:
9901 ret = gimplify_return_expr (*expr_p, pre_p);
9902 break;
9903
9904 case CONSTRUCTOR:
9905 /* Don't reduce this in place; let gimplify_init_constructor work its
9906 magic. Buf if we're just elaborating this for side effects, just
9907 gimplify any element that has side-effects. */
9908 if (fallback == fb_none)
9909 {
9910 unsigned HOST_WIDE_INT ix;
9911 tree val;
9912 tree temp = NULL_TREE;
9913 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
9914 if (TREE_SIDE_EFFECTS (val))
9915 append_to_statement_list (val, &temp);
9916
9917 *expr_p = temp;
9918 ret = temp ? GS_OK : GS_ALL_DONE;
9919 }
9920 /* C99 code may assign to an array in a constructed
9921 structure or union, and this has undefined behavior only
9922 on execution, so create a temporary if an lvalue is
9923 required. */
9924 else if (fallback == fb_lvalue)
9925 {
9926 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
9927 mark_addressable (*expr_p);
9928 ret = GS_OK;
9929 }
9930 else
9931 ret = GS_ALL_DONE;
9932 break;
9933
9934 /* The following are special cases that are not handled by the
9935 original GIMPLE grammar. */
9936
9937 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
9938 eliminated. */
9939 case SAVE_EXPR:
9940 ret = gimplify_save_expr (expr_p, pre_p, post_p);
9941 break;
9942
9943 case BIT_FIELD_REF:
9944 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
9945 post_p, is_gimple_lvalue, fb_either);
9946 recalculate_side_effects (*expr_p);
9947 break;
9948
9949 case TARGET_MEM_REF:
9950 {
9951 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
9952
9953 if (TMR_BASE (*expr_p))
9954 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
9955 post_p, is_gimple_mem_ref_addr, fb_either);
9956 if (TMR_INDEX (*expr_p))
9957 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
9958 post_p, is_gimple_val, fb_rvalue);
9959 if (TMR_INDEX2 (*expr_p))
9960 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
9961 post_p, is_gimple_val, fb_rvalue);
9962 /* TMR_STEP and TMR_OFFSET are always integer constants. */
9963 ret = MIN (r0, r1);
9964 }
9965 break;
9966
9967 case NON_LVALUE_EXPR:
9968 /* This should have been stripped above. */
9969 gcc_unreachable ();
9970
9971 case ASM_EXPR:
9972 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
9973 break;
9974
9975 case TRY_FINALLY_EXPR:
9976 case TRY_CATCH_EXPR:
9977 {
9978 gimple_seq eval, cleanup;
9979 gtry *try_;
9980
9981 /* Calls to destructors are generated automatically in FINALLY/CATCH
9982 block. They should have location as UNKNOWN_LOCATION. However,
9983 gimplify_call_expr will reset these call stmts to input_location
9984 if it finds stmt's location is unknown. To prevent resetting for
9985 destructors, we set the input_location to unknown.
9986 Note that this only affects the destructor calls in FINALLY/CATCH
9987 block, and will automatically reset to its original value by the
9988 end of gimplify_expr. */
9989 input_location = UNKNOWN_LOCATION;
9990 eval = cleanup = NULL;
9991 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
9992 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
9993 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
9994 if (gimple_seq_empty_p (cleanup))
9995 {
9996 gimple_seq_add_seq (pre_p, eval);
9997 ret = GS_ALL_DONE;
9998 break;
9999 }
10000 try_ = gimple_build_try (eval, cleanup,
10001 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10002 ? GIMPLE_TRY_FINALLY
10003 : GIMPLE_TRY_CATCH);
10004 if (EXPR_HAS_LOCATION (save_expr))
10005 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10006 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10007 gimple_set_location (try_, saved_location);
10008 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10009 gimple_try_set_catch_is_cleanup (try_,
10010 TRY_CATCH_IS_CLEANUP (*expr_p));
10011 gimplify_seq_add_stmt (pre_p, try_);
10012 ret = GS_ALL_DONE;
10013 break;
10014 }
10015
10016 case CLEANUP_POINT_EXPR:
10017 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10018 break;
10019
10020 case TARGET_EXPR:
10021 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10022 break;
10023
10024 case CATCH_EXPR:
10025 {
10026 gimple *c;
10027 gimple_seq handler = NULL;
10028 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10029 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10030 gimplify_seq_add_stmt (pre_p, c);
10031 ret = GS_ALL_DONE;
10032 break;
10033 }
10034
10035 case EH_FILTER_EXPR:
10036 {
10037 gimple *ehf;
10038 gimple_seq failure = NULL;
10039
10040 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10041 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10042 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10043 gimplify_seq_add_stmt (pre_p, ehf);
10044 ret = GS_ALL_DONE;
10045 break;
10046 }
10047
10048 case OBJ_TYPE_REF:
10049 {
10050 enum gimplify_status r0, r1;
10051 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10052 post_p, is_gimple_val, fb_rvalue);
10053 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10054 post_p, is_gimple_val, fb_rvalue);
10055 TREE_SIDE_EFFECTS (*expr_p) = 0;
10056 ret = MIN (r0, r1);
10057 }
10058 break;
10059
10060 case LABEL_DECL:
10061 /* We get here when taking the address of a label. We mark
10062 the label as "forced"; meaning it can never be removed and
10063 it is a potential target for any computed goto. */
10064 FORCED_LABEL (*expr_p) = 1;
10065 ret = GS_ALL_DONE;
10066 break;
10067
10068 case STATEMENT_LIST:
10069 ret = gimplify_statement_list (expr_p, pre_p);
10070 break;
10071
10072 case WITH_SIZE_EXPR:
10073 {
10074 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10075 post_p == &internal_post ? NULL : post_p,
10076 gimple_test_f, fallback);
10077 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10078 is_gimple_val, fb_rvalue);
10079 ret = GS_ALL_DONE;
10080 }
10081 break;
10082
10083 case VAR_DECL:
10084 case PARM_DECL:
10085 ret = gimplify_var_or_parm_decl (expr_p);
10086 break;
10087
10088 case RESULT_DECL:
10089 /* When within an OMP context, notice uses of variables. */
10090 if (gimplify_omp_ctxp)
10091 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10092 ret = GS_ALL_DONE;
10093 break;
10094
10095 case SSA_NAME:
10096 /* Allow callbacks into the gimplifier during optimization. */
10097 ret = GS_ALL_DONE;
10098 break;
10099
10100 case OMP_PARALLEL:
10101 gimplify_omp_parallel (expr_p, pre_p);
10102 ret = GS_ALL_DONE;
10103 break;
10104
10105 case OMP_TASK:
10106 gimplify_omp_task (expr_p, pre_p);
10107 ret = GS_ALL_DONE;
10108 break;
10109
10110 case OMP_FOR:
10111 case OMP_SIMD:
10112 case CILK_SIMD:
10113 case CILK_FOR:
10114 case OMP_DISTRIBUTE:
10115 case OMP_TASKLOOP:
10116 case OACC_LOOP:
10117 ret = gimplify_omp_for (expr_p, pre_p);
10118 break;
10119
10120 case OACC_CACHE:
10121 gimplify_oacc_cache (expr_p, pre_p);
10122 ret = GS_ALL_DONE;
10123 break;
10124
10125 case OACC_HOST_DATA:
10126 case OACC_DECLARE:
10127 sorry ("directive not yet implemented");
10128 ret = GS_ALL_DONE;
10129 break;
10130
10131 case OACC_DATA:
10132 case OACC_KERNELS:
10133 case OACC_PARALLEL:
10134 case OMP_SECTIONS:
10135 case OMP_SINGLE:
10136 case OMP_TARGET:
10137 case OMP_TARGET_DATA:
10138 case OMP_TEAMS:
10139 gimplify_omp_workshare (expr_p, pre_p);
10140 ret = GS_ALL_DONE;
10141 break;
10142
10143 case OACC_ENTER_DATA:
10144 case OACC_EXIT_DATA:
10145 case OACC_UPDATE:
10146 case OMP_TARGET_UPDATE:
10147 case OMP_TARGET_ENTER_DATA:
10148 case OMP_TARGET_EXIT_DATA:
10149 gimplify_omp_target_update (expr_p, pre_p);
10150 ret = GS_ALL_DONE;
10151 break;
10152
10153 case OMP_SECTION:
10154 case OMP_MASTER:
10155 case OMP_TASKGROUP:
10156 case OMP_ORDERED:
10157 case OMP_CRITICAL:
10158 {
10159 gimple_seq body = NULL;
10160 gimple *g;
10161
10162 gimplify_and_add (OMP_BODY (*expr_p), &body);
10163 switch (TREE_CODE (*expr_p))
10164 {
10165 case OMP_SECTION:
10166 g = gimple_build_omp_section (body);
10167 break;
10168 case OMP_MASTER:
10169 g = gimple_build_omp_master (body);
10170 break;
10171 case OMP_TASKGROUP:
10172 {
10173 gimple_seq cleanup = NULL;
10174 tree fn
10175 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10176 g = gimple_build_call (fn, 0);
10177 gimple_seq_add_stmt (&cleanup, g);
10178 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10179 body = NULL;
10180 gimple_seq_add_stmt (&body, g);
10181 g = gimple_build_omp_taskgroup (body);
10182 }
10183 break;
10184 case OMP_ORDERED:
10185 g = gimplify_omp_ordered (*expr_p, body);
10186 break;
10187 case OMP_CRITICAL:
10188 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10189 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10190 gimplify_adjust_omp_clauses (pre_p,
10191 &OMP_CRITICAL_CLAUSES (*expr_p),
10192 OMP_CRITICAL);
10193 g = gimple_build_omp_critical (body,
10194 OMP_CRITICAL_NAME (*expr_p),
10195 OMP_CRITICAL_CLAUSES (*expr_p));
10196 break;
10197 default:
10198 gcc_unreachable ();
10199 }
10200 gimplify_seq_add_stmt (pre_p, g);
10201 ret = GS_ALL_DONE;
10202 break;
10203 }
10204
10205 case OMP_ATOMIC:
10206 case OMP_ATOMIC_READ:
10207 case OMP_ATOMIC_CAPTURE_OLD:
10208 case OMP_ATOMIC_CAPTURE_NEW:
10209 ret = gimplify_omp_atomic (expr_p, pre_p);
10210 break;
10211
10212 case TRANSACTION_EXPR:
10213 ret = gimplify_transaction (expr_p, pre_p);
10214 break;
10215
10216 case TRUTH_AND_EXPR:
10217 case TRUTH_OR_EXPR:
10218 case TRUTH_XOR_EXPR:
10219 {
10220 tree orig_type = TREE_TYPE (*expr_p);
10221 tree new_type, xop0, xop1;
10222 *expr_p = gimple_boolify (*expr_p);
10223 new_type = TREE_TYPE (*expr_p);
10224 if (!useless_type_conversion_p (orig_type, new_type))
10225 {
10226 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
10227 ret = GS_OK;
10228 break;
10229 }
10230
10231 /* Boolified binary truth expressions are semantically equivalent
10232 to bitwise binary expressions. Canonicalize them to the
10233 bitwise variant. */
10234 switch (TREE_CODE (*expr_p))
10235 {
10236 case TRUTH_AND_EXPR:
10237 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10238 break;
10239 case TRUTH_OR_EXPR:
10240 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
10241 break;
10242 case TRUTH_XOR_EXPR:
10243 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
10244 break;
10245 default:
10246 break;
10247 }
10248 /* Now make sure that operands have compatible type to
10249 expression's new_type. */
10250 xop0 = TREE_OPERAND (*expr_p, 0);
10251 xop1 = TREE_OPERAND (*expr_p, 1);
10252 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
10253 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
10254 new_type,
10255 xop0);
10256 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
10257 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
10258 new_type,
10259 xop1);
10260 /* Continue classified as tcc_binary. */
10261 goto expr_2;
10262 }
10263
10264 case FMA_EXPR:
10265 case VEC_COND_EXPR:
10266 case VEC_PERM_EXPR:
10267 /* Classified as tcc_expression. */
10268 goto expr_3;
10269
10270 case POINTER_PLUS_EXPR:
10271 {
10272 enum gimplify_status r0, r1;
10273 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10274 post_p, is_gimple_val, fb_rvalue);
10275 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10276 post_p, is_gimple_val, fb_rvalue);
10277 recalculate_side_effects (*expr_p);
10278 ret = MIN (r0, r1);
10279 break;
10280 }
10281
10282 case CILK_SYNC_STMT:
10283 {
10284 if (!fn_contains_cilk_spawn_p (cfun))
10285 {
10286 error_at (EXPR_LOCATION (*expr_p),
10287 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
10288 ret = GS_ERROR;
10289 }
10290 else
10291 {
10292 gimplify_cilk_sync (expr_p, pre_p);
10293 ret = GS_ALL_DONE;
10294 }
10295 break;
10296 }
10297
10298 default:
10299 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
10300 {
10301 case tcc_comparison:
10302 /* Handle comparison of objects of non scalar mode aggregates
10303 with a call to memcmp. It would be nice to only have to do
10304 this for variable-sized objects, but then we'd have to allow
10305 the same nest of reference nodes we allow for MODIFY_EXPR and
10306 that's too complex.
10307
10308 Compare scalar mode aggregates as scalar mode values. Using
10309 memcmp for them would be very inefficient at best, and is
10310 plain wrong if bitfields are involved. */
10311 {
10312 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
10313
10314 /* Vector comparisons need no boolification. */
10315 if (TREE_CODE (type) == VECTOR_TYPE)
10316 goto expr_2;
10317 else if (!AGGREGATE_TYPE_P (type))
10318 {
10319 tree org_type = TREE_TYPE (*expr_p);
10320 *expr_p = gimple_boolify (*expr_p);
10321 if (!useless_type_conversion_p (org_type,
10322 TREE_TYPE (*expr_p)))
10323 {
10324 *expr_p = fold_convert_loc (input_location,
10325 org_type, *expr_p);
10326 ret = GS_OK;
10327 }
10328 else
10329 goto expr_2;
10330 }
10331 else if (TYPE_MODE (type) != BLKmode)
10332 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
10333 else
10334 ret = gimplify_variable_sized_compare (expr_p);
10335
10336 break;
10337 }
10338
10339 /* If *EXPR_P does not need to be special-cased, handle it
10340 according to its class. */
10341 case tcc_unary:
10342 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10343 post_p, is_gimple_val, fb_rvalue);
10344 break;
10345
10346 case tcc_binary:
10347 expr_2:
10348 {
10349 enum gimplify_status r0, r1;
10350
10351 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10352 post_p, is_gimple_val, fb_rvalue);
10353 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10354 post_p, is_gimple_val, fb_rvalue);
10355
10356 ret = MIN (r0, r1);
10357 break;
10358 }
10359
10360 expr_3:
10361 {
10362 enum gimplify_status r0, r1, r2;
10363
10364 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10365 post_p, is_gimple_val, fb_rvalue);
10366 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10367 post_p, is_gimple_val, fb_rvalue);
10368 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
10369 post_p, is_gimple_val, fb_rvalue);
10370
10371 ret = MIN (MIN (r0, r1), r2);
10372 break;
10373 }
10374
10375 case tcc_declaration:
10376 case tcc_constant:
10377 ret = GS_ALL_DONE;
10378 goto dont_recalculate;
10379
10380 default:
10381 gcc_unreachable ();
10382 }
10383
10384 recalculate_side_effects (*expr_p);
10385
10386 dont_recalculate:
10387 break;
10388 }
10389
10390 gcc_assert (*expr_p || ret != GS_OK);
10391 }
10392 while (ret == GS_OK);
10393
10394 /* If we encountered an error_mark somewhere nested inside, either
10395 stub out the statement or propagate the error back out. */
10396 if (ret == GS_ERROR)
10397 {
10398 if (is_statement)
10399 *expr_p = NULL;
10400 goto out;
10401 }
10402
10403 /* This was only valid as a return value from the langhook, which
10404 we handled. Make sure it doesn't escape from any other context. */
10405 gcc_assert (ret != GS_UNHANDLED);
10406
10407 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
10408 {
10409 /* We aren't looking for a value, and we don't have a valid
10410 statement. If it doesn't have side-effects, throw it away. */
10411 if (!TREE_SIDE_EFFECTS (*expr_p))
10412 *expr_p = NULL;
10413 else if (!TREE_THIS_VOLATILE (*expr_p))
10414 {
10415 /* This is probably a _REF that contains something nested that
10416 has side effects. Recurse through the operands to find it. */
10417 enum tree_code code = TREE_CODE (*expr_p);
10418
10419 switch (code)
10420 {
10421 case COMPONENT_REF:
10422 case REALPART_EXPR:
10423 case IMAGPART_EXPR:
10424 case VIEW_CONVERT_EXPR:
10425 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10426 gimple_test_f, fallback);
10427 break;
10428
10429 case ARRAY_REF:
10430 case ARRAY_RANGE_REF:
10431 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10432 gimple_test_f, fallback);
10433 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10434 gimple_test_f, fallback);
10435 break;
10436
10437 default:
10438 /* Anything else with side-effects must be converted to
10439 a valid statement before we get here. */
10440 gcc_unreachable ();
10441 }
10442
10443 *expr_p = NULL;
10444 }
10445 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
10446 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
10447 {
10448 /* Historically, the compiler has treated a bare reference
10449 to a non-BLKmode volatile lvalue as forcing a load. */
10450 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
10451
10452 /* Normally, we do not want to create a temporary for a
10453 TREE_ADDRESSABLE type because such a type should not be
10454 copied by bitwise-assignment. However, we make an
10455 exception here, as all we are doing here is ensuring that
10456 we read the bytes that make up the type. We use
10457 create_tmp_var_raw because create_tmp_var will abort when
10458 given a TREE_ADDRESSABLE type. */
10459 tree tmp = create_tmp_var_raw (type, "vol");
10460 gimple_add_tmp_var (tmp);
10461 gimplify_assign (tmp, *expr_p, pre_p);
10462 *expr_p = NULL;
10463 }
10464 else
10465 /* We can't do anything useful with a volatile reference to
10466 an incomplete type, so just throw it away. Likewise for
10467 a BLKmode type, since any implicit inner load should
10468 already have been turned into an explicit one by the
10469 gimplification process. */
10470 *expr_p = NULL;
10471 }
10472
10473 /* If we are gimplifying at the statement level, we're done. Tack
10474 everything together and return. */
10475 if (fallback == fb_none || is_statement)
10476 {
10477 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
10478 it out for GC to reclaim it. */
10479 *expr_p = NULL_TREE;
10480
10481 if (!gimple_seq_empty_p (internal_pre)
10482 || !gimple_seq_empty_p (internal_post))
10483 {
10484 gimplify_seq_add_seq (&internal_pre, internal_post);
10485 gimplify_seq_add_seq (pre_p, internal_pre);
10486 }
10487
10488 /* The result of gimplifying *EXPR_P is going to be the last few
10489 statements in *PRE_P and *POST_P. Add location information
10490 to all the statements that were added by the gimplification
10491 helpers. */
10492 if (!gimple_seq_empty_p (*pre_p))
10493 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
10494
10495 if (!gimple_seq_empty_p (*post_p))
10496 annotate_all_with_location_after (*post_p, post_last_gsi,
10497 input_location);
10498
10499 goto out;
10500 }
10501
10502 #ifdef ENABLE_GIMPLE_CHECKING
10503 if (*expr_p)
10504 {
10505 enum tree_code code = TREE_CODE (*expr_p);
10506 /* These expressions should already be in gimple IR form. */
10507 gcc_assert (code != MODIFY_EXPR
10508 && code != ASM_EXPR
10509 && code != BIND_EXPR
10510 && code != CATCH_EXPR
10511 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
10512 && code != EH_FILTER_EXPR
10513 && code != GOTO_EXPR
10514 && code != LABEL_EXPR
10515 && code != LOOP_EXPR
10516 && code != SWITCH_EXPR
10517 && code != TRY_FINALLY_EXPR
10518 && code != OACC_PARALLEL
10519 && code != OACC_KERNELS
10520 && code != OACC_DATA
10521 && code != OACC_HOST_DATA
10522 && code != OACC_DECLARE
10523 && code != OACC_UPDATE
10524 && code != OACC_ENTER_DATA
10525 && code != OACC_EXIT_DATA
10526 && code != OACC_CACHE
10527 && code != OMP_CRITICAL
10528 && code != OMP_FOR
10529 && code != OACC_LOOP
10530 && code != OMP_MASTER
10531 && code != OMP_TASKGROUP
10532 && code != OMP_ORDERED
10533 && code != OMP_PARALLEL
10534 && code != OMP_SECTIONS
10535 && code != OMP_SECTION
10536 && code != OMP_SINGLE);
10537 }
10538 #endif
10539
10540 /* Otherwise we're gimplifying a subexpression, so the resulting
10541 value is interesting. If it's a valid operand that matches
10542 GIMPLE_TEST_F, we're done. Unless we are handling some
10543 post-effects internally; if that's the case, we need to copy into
10544 a temporary before adding the post-effects to POST_P. */
10545 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
10546 goto out;
10547
10548 /* Otherwise, we need to create a new temporary for the gimplified
10549 expression. */
10550
10551 /* We can't return an lvalue if we have an internal postqueue. The
10552 object the lvalue refers to would (probably) be modified by the
10553 postqueue; we need to copy the value out first, which means an
10554 rvalue. */
10555 if ((fallback & fb_lvalue)
10556 && gimple_seq_empty_p (internal_post)
10557 && is_gimple_addressable (*expr_p))
10558 {
10559 /* An lvalue will do. Take the address of the expression, store it
10560 in a temporary, and replace the expression with an INDIRECT_REF of
10561 that temporary. */
10562 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
10563 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
10564 *expr_p = build_simple_mem_ref (tmp);
10565 }
10566 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
10567 {
10568 /* An rvalue will do. Assign the gimplified expression into a
10569 new temporary TMP and replace the original expression with
10570 TMP. First, make sure that the expression has a type so that
10571 it can be assigned into a temporary. */
10572 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
10573 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
10574 }
10575 else
10576 {
10577 #ifdef ENABLE_GIMPLE_CHECKING
10578 if (!(fallback & fb_mayfail))
10579 {
10580 fprintf (stderr, "gimplification failed:\n");
10581 print_generic_expr (stderr, *expr_p, 0);
10582 debug_tree (*expr_p);
10583 internal_error ("gimplification failed");
10584 }
10585 #endif
10586 gcc_assert (fallback & fb_mayfail);
10587
10588 /* If this is an asm statement, and the user asked for the
10589 impossible, don't die. Fail and let gimplify_asm_expr
10590 issue an error. */
10591 ret = GS_ERROR;
10592 goto out;
10593 }
10594
10595 /* Make sure the temporary matches our predicate. */
10596 gcc_assert ((*gimple_test_f) (*expr_p));
10597
10598 if (!gimple_seq_empty_p (internal_post))
10599 {
10600 annotate_all_with_location (internal_post, input_location);
10601 gimplify_seq_add_seq (pre_p, internal_post);
10602 }
10603
10604 out:
10605 input_location = saved_location;
10606 return ret;
10607 }
10608
10609 /* Look through TYPE for variable-sized objects and gimplify each such
10610 size that we find. Add to LIST_P any statements generated. */
10611
10612 void
10613 gimplify_type_sizes (tree type, gimple_seq *list_p)
10614 {
10615 tree field, t;
10616
10617 if (type == NULL || type == error_mark_node)
10618 return;
10619
10620 /* We first do the main variant, then copy into any other variants. */
10621 type = TYPE_MAIN_VARIANT (type);
10622
10623 /* Avoid infinite recursion. */
10624 if (TYPE_SIZES_GIMPLIFIED (type))
10625 return;
10626
10627 TYPE_SIZES_GIMPLIFIED (type) = 1;
10628
10629 switch (TREE_CODE (type))
10630 {
10631 case INTEGER_TYPE:
10632 case ENUMERAL_TYPE:
10633 case BOOLEAN_TYPE:
10634 case REAL_TYPE:
10635 case FIXED_POINT_TYPE:
10636 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
10637 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
10638
10639 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
10640 {
10641 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
10642 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
10643 }
10644 break;
10645
10646 case ARRAY_TYPE:
10647 /* These types may not have declarations, so handle them here. */
10648 gimplify_type_sizes (TREE_TYPE (type), list_p);
10649 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
10650 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
10651 with assigned stack slots, for -O1+ -g they should be tracked
10652 by VTA. */
10653 if (!(TYPE_NAME (type)
10654 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
10655 && DECL_IGNORED_P (TYPE_NAME (type)))
10656 && TYPE_DOMAIN (type)
10657 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
10658 {
10659 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
10660 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
10661 DECL_IGNORED_P (t) = 0;
10662 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
10663 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
10664 DECL_IGNORED_P (t) = 0;
10665 }
10666 break;
10667
10668 case RECORD_TYPE:
10669 case UNION_TYPE:
10670 case QUAL_UNION_TYPE:
10671 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
10672 if (TREE_CODE (field) == FIELD_DECL)
10673 {
10674 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
10675 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
10676 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
10677 gimplify_type_sizes (TREE_TYPE (field), list_p);
10678 }
10679 break;
10680
10681 case POINTER_TYPE:
10682 case REFERENCE_TYPE:
10683 /* We used to recurse on the pointed-to type here, which turned out to
10684 be incorrect because its definition might refer to variables not
10685 yet initialized at this point if a forward declaration is involved.
10686
10687 It was actually useful for anonymous pointed-to types to ensure
10688 that the sizes evaluation dominates every possible later use of the
10689 values. Restricting to such types here would be safe since there
10690 is no possible forward declaration around, but would introduce an
10691 undesirable middle-end semantic to anonymity. We then defer to
10692 front-ends the responsibility of ensuring that the sizes are
10693 evaluated both early and late enough, e.g. by attaching artificial
10694 type declarations to the tree. */
10695 break;
10696
10697 default:
10698 break;
10699 }
10700
10701 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
10702 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
10703
10704 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
10705 {
10706 TYPE_SIZE (t) = TYPE_SIZE (type);
10707 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
10708 TYPE_SIZES_GIMPLIFIED (t) = 1;
10709 }
10710 }
10711
10712 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
10713 a size or position, has had all of its SAVE_EXPRs evaluated.
10714 We add any required statements to *STMT_P. */
10715
10716 void
10717 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
10718 {
10719 tree expr = *expr_p;
10720
10721 /* We don't do anything if the value isn't there, is constant, or contains
10722 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
10723 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
10724 will want to replace it with a new variable, but that will cause problems
10725 if this type is from outside the function. It's OK to have that here. */
10726 if (is_gimple_sizepos (expr))
10727 return;
10728
10729 *expr_p = unshare_expr (expr);
10730
10731 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
10732 }
10733
10734 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
10735 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
10736 is true, also gimplify the parameters. */
10737
10738 gbind *
10739 gimplify_body (tree fndecl, bool do_parms)
10740 {
10741 location_t saved_location = input_location;
10742 gimple_seq parm_stmts, seq;
10743 gimple *outer_stmt;
10744 gbind *outer_bind;
10745 struct cgraph_node *cgn;
10746
10747 timevar_push (TV_TREE_GIMPLIFY);
10748
10749 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
10750 gimplification. */
10751 default_rtl_profile ();
10752
10753 gcc_assert (gimplify_ctxp == NULL);
10754 push_gimplify_context ();
10755
10756 if (flag_openacc || flag_openmp)
10757 {
10758 gcc_assert (gimplify_omp_ctxp == NULL);
10759 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
10760 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
10761 }
10762
10763 /* Unshare most shared trees in the body and in that of any nested functions.
10764 It would seem we don't have to do this for nested functions because
10765 they are supposed to be output and then the outer function gimplified
10766 first, but the g++ front end doesn't always do it that way. */
10767 unshare_body (fndecl);
10768 unvisit_body (fndecl);
10769
10770 cgn = cgraph_node::get (fndecl);
10771 if (cgn && cgn->origin)
10772 nonlocal_vlas = new hash_set<tree>;
10773
10774 /* Make sure input_location isn't set to something weird. */
10775 input_location = DECL_SOURCE_LOCATION (fndecl);
10776
10777 /* Resolve callee-copies. This has to be done before processing
10778 the body so that DECL_VALUE_EXPR gets processed correctly. */
10779 parm_stmts = do_parms ? gimplify_parameters () : NULL;
10780
10781 /* Gimplify the function's body. */
10782 seq = NULL;
10783 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
10784 outer_stmt = gimple_seq_first_stmt (seq);
10785 if (!outer_stmt)
10786 {
10787 outer_stmt = gimple_build_nop ();
10788 gimplify_seq_add_stmt (&seq, outer_stmt);
10789 }
10790
10791 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
10792 not the case, wrap everything in a GIMPLE_BIND to make it so. */
10793 if (gimple_code (outer_stmt) == GIMPLE_BIND
10794 && gimple_seq_first (seq) == gimple_seq_last (seq))
10795 outer_bind = as_a <gbind *> (outer_stmt);
10796 else
10797 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
10798
10799 DECL_SAVED_TREE (fndecl) = NULL_TREE;
10800
10801 /* If we had callee-copies statements, insert them at the beginning
10802 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
10803 if (!gimple_seq_empty_p (parm_stmts))
10804 {
10805 tree parm;
10806
10807 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
10808 gimple_bind_set_body (outer_bind, parm_stmts);
10809
10810 for (parm = DECL_ARGUMENTS (current_function_decl);
10811 parm; parm = DECL_CHAIN (parm))
10812 if (DECL_HAS_VALUE_EXPR_P (parm))
10813 {
10814 DECL_HAS_VALUE_EXPR_P (parm) = 0;
10815 DECL_IGNORED_P (parm) = 0;
10816 }
10817 }
10818
10819 if (nonlocal_vlas)
10820 {
10821 if (nonlocal_vla_vars)
10822 {
10823 /* tree-nested.c may later on call declare_vars (..., true);
10824 which relies on BLOCK_VARS chain to be the tail of the
10825 gimple_bind_vars chain. Ensure we don't violate that
10826 assumption. */
10827 if (gimple_bind_block (outer_bind)
10828 == DECL_INITIAL (current_function_decl))
10829 declare_vars (nonlocal_vla_vars, outer_bind, true);
10830 else
10831 BLOCK_VARS (DECL_INITIAL (current_function_decl))
10832 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
10833 nonlocal_vla_vars);
10834 nonlocal_vla_vars = NULL_TREE;
10835 }
10836 delete nonlocal_vlas;
10837 nonlocal_vlas = NULL;
10838 }
10839
10840 if ((flag_openacc || flag_openmp || flag_openmp_simd)
10841 && gimplify_omp_ctxp)
10842 {
10843 delete_omp_context (gimplify_omp_ctxp);
10844 gimplify_omp_ctxp = NULL;
10845 }
10846
10847 pop_gimplify_context (outer_bind);
10848 gcc_assert (gimplify_ctxp == NULL);
10849
10850 if (flag_checking && !seen_error ())
10851 verify_gimple_in_seq (gimple_bind_body (outer_bind));
10852
10853 timevar_pop (TV_TREE_GIMPLIFY);
10854 input_location = saved_location;
10855
10856 return outer_bind;
10857 }
10858
10859 typedef char *char_p; /* For DEF_VEC_P. */
10860
10861 /* Return whether we should exclude FNDECL from instrumentation. */
10862
10863 static bool
10864 flag_instrument_functions_exclude_p (tree fndecl)
10865 {
10866 vec<char_p> *v;
10867
10868 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
10869 if (v && v->length () > 0)
10870 {
10871 const char *name;
10872 int i;
10873 char *s;
10874
10875 name = lang_hooks.decl_printable_name (fndecl, 0);
10876 FOR_EACH_VEC_ELT (*v, i, s)
10877 if (strstr (name, s) != NULL)
10878 return true;
10879 }
10880
10881 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
10882 if (v && v->length () > 0)
10883 {
10884 const char *name;
10885 int i;
10886 char *s;
10887
10888 name = DECL_SOURCE_FILE (fndecl);
10889 FOR_EACH_VEC_ELT (*v, i, s)
10890 if (strstr (name, s) != NULL)
10891 return true;
10892 }
10893
10894 return false;
10895 }
10896
10897 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
10898 node for the function we want to gimplify.
10899
10900 Return the sequence of GIMPLE statements corresponding to the body
10901 of FNDECL. */
10902
10903 void
10904 gimplify_function_tree (tree fndecl)
10905 {
10906 tree parm, ret;
10907 gimple_seq seq;
10908 gbind *bind;
10909
10910 gcc_assert (!gimple_body (fndecl));
10911
10912 if (DECL_STRUCT_FUNCTION (fndecl))
10913 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
10914 else
10915 push_struct_function (fndecl);
10916
10917 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
10918 if necessary. */
10919 cfun->curr_properties |= PROP_gimple_lva;
10920
10921 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
10922 {
10923 /* Preliminarily mark non-addressed complex variables as eligible
10924 for promotion to gimple registers. We'll transform their uses
10925 as we find them. */
10926 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
10927 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
10928 && !TREE_THIS_VOLATILE (parm)
10929 && !needs_to_live_in_memory (parm))
10930 DECL_GIMPLE_REG_P (parm) = 1;
10931 }
10932
10933 ret = DECL_RESULT (fndecl);
10934 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
10935 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
10936 && !needs_to_live_in_memory (ret))
10937 DECL_GIMPLE_REG_P (ret) = 1;
10938
10939 bind = gimplify_body (fndecl, true);
10940
10941 /* The tree body of the function is no longer needed, replace it
10942 with the new GIMPLE body. */
10943 seq = NULL;
10944 gimple_seq_add_stmt (&seq, bind);
10945 gimple_set_body (fndecl, seq);
10946
10947 /* If we're instrumenting function entry/exit, then prepend the call to
10948 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
10949 catch the exit hook. */
10950 /* ??? Add some way to ignore exceptions for this TFE. */
10951 if (flag_instrument_function_entry_exit
10952 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
10953 && !flag_instrument_functions_exclude_p (fndecl))
10954 {
10955 tree x;
10956 gbind *new_bind;
10957 gimple *tf;
10958 gimple_seq cleanup = NULL, body = NULL;
10959 tree tmp_var;
10960 gcall *call;
10961
10962 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
10963 call = gimple_build_call (x, 1, integer_zero_node);
10964 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
10965 gimple_call_set_lhs (call, tmp_var);
10966 gimplify_seq_add_stmt (&cleanup, call);
10967 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
10968 call = gimple_build_call (x, 2,
10969 build_fold_addr_expr (current_function_decl),
10970 tmp_var);
10971 gimplify_seq_add_stmt (&cleanup, call);
10972 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
10973
10974 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
10975 call = gimple_build_call (x, 1, integer_zero_node);
10976 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
10977 gimple_call_set_lhs (call, tmp_var);
10978 gimplify_seq_add_stmt (&body, call);
10979 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
10980 call = gimple_build_call (x, 2,
10981 build_fold_addr_expr (current_function_decl),
10982 tmp_var);
10983 gimplify_seq_add_stmt (&body, call);
10984 gimplify_seq_add_stmt (&body, tf);
10985 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
10986 /* Clear the block for BIND, since it is no longer directly inside
10987 the function, but within a try block. */
10988 gimple_bind_set_block (bind, NULL);
10989
10990 /* Replace the current function body with the body
10991 wrapped in the try/finally TF. */
10992 seq = NULL;
10993 gimple_seq_add_stmt (&seq, new_bind);
10994 gimple_set_body (fndecl, seq);
10995 bind = new_bind;
10996 }
10997
10998 if ((flag_sanitize & SANITIZE_THREAD) != 0
10999 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11000 {
11001 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11002 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11003 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11004 /* Clear the block for BIND, since it is no longer directly inside
11005 the function, but within a try block. */
11006 gimple_bind_set_block (bind, NULL);
11007 /* Replace the current function body with the body
11008 wrapped in the try/finally TF. */
11009 seq = NULL;
11010 gimple_seq_add_stmt (&seq, new_bind);
11011 gimple_set_body (fndecl, seq);
11012 }
11013
11014 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11015 cfun->curr_properties |= PROP_gimple_any;
11016
11017 pop_cfun ();
11018
11019 dump_function (TDI_generic, fndecl);
11020 }
11021
11022 /* Return a dummy expression of type TYPE in order to keep going after an
11023 error. */
11024
11025 static tree
11026 dummy_object (tree type)
11027 {
11028 tree t = build_int_cst (build_pointer_type (type), 0);
11029 return build2 (MEM_REF, type, t, t);
11030 }
11031
11032 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11033 builtin function, but a very special sort of operator. */
11034
11035 enum gimplify_status
11036 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11037 gimple_seq *post_p ATTRIBUTE_UNUSED)
11038 {
11039 tree promoted_type, have_va_type;
11040 tree valist = TREE_OPERAND (*expr_p, 0);
11041 tree type = TREE_TYPE (*expr_p);
11042 tree t, tag;
11043 location_t loc = EXPR_LOCATION (*expr_p);
11044
11045 /* Verify that valist is of the proper type. */
11046 have_va_type = TREE_TYPE (valist);
11047 if (have_va_type == error_mark_node)
11048 return GS_ERROR;
11049 have_va_type = targetm.canonical_va_list_type (have_va_type);
11050
11051 if (have_va_type == NULL_TREE)
11052 {
11053 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11054 return GS_ERROR;
11055 }
11056
11057 /* Generate a diagnostic for requesting data of a type that cannot
11058 be passed through `...' due to type promotion at the call site. */
11059 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11060 != type)
11061 {
11062 static bool gave_help;
11063 bool warned;
11064
11065 /* Unfortunately, this is merely undefined, rather than a constraint
11066 violation, so we cannot make this an error. If this call is never
11067 executed, the program is still strictly conforming. */
11068 warned = warning_at (loc, 0,
11069 "%qT is promoted to %qT when passed through %<...%>",
11070 type, promoted_type);
11071 if (!gave_help && warned)
11072 {
11073 gave_help = true;
11074 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
11075 promoted_type, type);
11076 }
11077
11078 /* We can, however, treat "undefined" any way we please.
11079 Call abort to encourage the user to fix the program. */
11080 if (warned)
11081 inform (loc, "if this code is reached, the program will abort");
11082 /* Before the abort, allow the evaluation of the va_list
11083 expression to exit or longjmp. */
11084 gimplify_and_add (valist, pre_p);
11085 t = build_call_expr_loc (loc,
11086 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11087 gimplify_and_add (t, pre_p);
11088
11089 /* This is dead code, but go ahead and finish so that the
11090 mode of the result comes out right. */
11091 *expr_p = dummy_object (type);
11092 return GS_ALL_DONE;
11093 }
11094
11095 tag = build_int_cst (build_pointer_type (type), 0);
11096 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 2, valist, tag);
11097
11098 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11099 needs to be expanded. */
11100 cfun->curr_properties &= ~PROP_gimple_lva;
11101
11102 return GS_OK;
11103 }
11104
11105 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11106
11107 DST/SRC are the destination and source respectively. You can pass
11108 ungimplified trees in DST or SRC, in which case they will be
11109 converted to a gimple operand if necessary.
11110
11111 This function returns the newly created GIMPLE_ASSIGN tuple. */
11112
11113 gimple *
11114 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11115 {
11116 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11117 gimplify_and_add (t, seq_p);
11118 ggc_free (t);
11119 return gimple_seq_last_stmt (*seq_p);
11120 }
11121
11122 inline hashval_t
11123 gimplify_hasher::hash (const elt_t *p)
11124 {
11125 tree t = p->val;
11126 return iterative_hash_expr (t, 0);
11127 }
11128
11129 inline bool
11130 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11131 {
11132 tree t1 = p1->val;
11133 tree t2 = p2->val;
11134 enum tree_code code = TREE_CODE (t1);
11135
11136 if (TREE_CODE (t2) != code
11137 || TREE_TYPE (t1) != TREE_TYPE (t2))
11138 return false;
11139
11140 if (!operand_equal_p (t1, t2, 0))
11141 return false;
11142
11143 /* Only allow them to compare equal if they also hash equal; otherwise
11144 results are nondeterminate, and we fail bootstrap comparison. */
11145 gcc_checking_assert (hash (p1) == hash (p2));
11146
11147 return true;
11148 }