re PR c++/31246 (-Wunreachable-code warnings for compiler-generated code)
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
55
56
57 enum gimplify_omp_var_data
58 {
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
71 };
72
73
74 enum omp_region_type
75 {
76 ORT_WORKSHARE = 0,
77 ORT_TASK = 1,
78 ORT_PARALLEL = 2,
79 ORT_COMBINED_PARALLEL = 3
80 };
81
82 struct gimplify_omp_ctx
83 {
84 struct gimplify_omp_ctx *outer_context;
85 splay_tree variables;
86 struct pointer_set_t *privatized_types;
87 location_t location;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
90 };
91
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
94
95
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
98
99 typedef struct gimple_temp_hash_elt
100 {
101 tree val; /* Key */
102 tree temp; /* Value */
103 } elt_t;
104
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
107
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
110 void
111 mark_addressable (tree x)
112 {
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL
116 && TREE_CODE (x) != PARM_DECL
117 && TREE_CODE (x) != RESULT_DECL)
118 return ;
119 TREE_ADDRESSABLE (x) = 1;
120 }
121
122 /* Return a hash value for a formal temporary table entry. */
123
124 static hashval_t
125 gimple_tree_hash (const void *p)
126 {
127 tree t = ((const elt_t *) p)->val;
128 return iterative_hash_expr (t, 0);
129 }
130
131 /* Compare two formal temporary table entries. */
132
133 static int
134 gimple_tree_eq (const void *p1, const void *p2)
135 {
136 tree t1 = ((const elt_t *) p1)->val;
137 tree t2 = ((const elt_t *) p2)->val;
138 enum tree_code code = TREE_CODE (t1);
139
140 if (TREE_CODE (t2) != code
141 || TREE_TYPE (t1) != TREE_TYPE (t2))
142 return 0;
143
144 if (!operand_equal_p (t1, t2, 0))
145 return 0;
146
147 /* Only allow them to compare equal if they also hash equal; otherwise
148 results are nondeterminate, and we fail bootstrap comparison. */
149 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
150
151 return 1;
152 }
153
154 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
155 *SEQ_P is NULL, a new sequence is allocated. This function is
156 similar to gimple_seq_add_stmt, but does not scan the operands.
157 During gimplification, we need to manipulate statement sequences
158 before the def/use vectors have been constructed. */
159
160 static void
161 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
162 {
163 gimple_stmt_iterator si;
164
165 if (gs == NULL)
166 return;
167
168 if (*seq_p == NULL)
169 *seq_p = gimple_seq_alloc ();
170
171 si = gsi_last (*seq_p);
172
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
174 }
175
176 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
177 NULL, a new sequence is allocated. This function is
178 similar to gimple_seq_add_seq, but does not scan the operands.
179 During gimplification, we need to manipulate statement sequences
180 before the def/use vectors have been constructed. */
181
182 static void
183 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
184 {
185 gimple_stmt_iterator si;
186
187 if (src == NULL)
188 return;
189
190 if (*dst_p == NULL)
191 *dst_p = gimple_seq_alloc ();
192
193 si = gsi_last (*dst_p);
194 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
195 }
196
197 /* Set up a context for the gimplifier. */
198
199 void
200 push_gimplify_context (struct gimplify_ctx *c)
201 {
202 memset (c, '\0', sizeof (*c));
203 c->prev_context = gimplify_ctxp;
204 gimplify_ctxp = c;
205 }
206
207 /* Tear down a context for the gimplifier. If BODY is non-null, then
208 put the temporaries into the outer BIND_EXPR. Otherwise, put them
209 in the local_decls.
210
211 BODY is not a sequence, but the first tuple in a sequence. */
212
213 void
214 pop_gimplify_context (gimple body)
215 {
216 struct gimplify_ctx *c = gimplify_ctxp;
217
218 gcc_assert (c && (c->bind_expr_stack == NULL
219 || VEC_empty (gimple, c->bind_expr_stack)));
220 VEC_free (gimple, heap, c->bind_expr_stack);
221 gimplify_ctxp = c->prev_context;
222
223 if (body)
224 declare_vars (c->temps, body, false);
225 else
226 record_vars (c->temps);
227
228 if (c->temp_htab)
229 htab_delete (c->temp_htab);
230 }
231
232 static void
233 gimple_push_bind_expr (gimple gimple_bind)
234 {
235 if (gimplify_ctxp->bind_expr_stack == NULL)
236 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
237 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
238 }
239
240 static void
241 gimple_pop_bind_expr (void)
242 {
243 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
244 }
245
246 gimple
247 gimple_current_bind_expr (void)
248 {
249 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
250 }
251
252 /* Return the stack GIMPLE_BINDs created during gimplification. */
253
254 VEC(gimple, heap) *
255 gimple_bind_expr_stack (void)
256 {
257 return gimplify_ctxp->bind_expr_stack;
258 }
259
260 /* Returns true iff there is a COND_EXPR between us and the innermost
261 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
262
263 static bool
264 gimple_conditional_context (void)
265 {
266 return gimplify_ctxp->conditions > 0;
267 }
268
269 /* Note that we've entered a COND_EXPR. */
270
271 static void
272 gimple_push_condition (void)
273 {
274 #ifdef ENABLE_GIMPLE_CHECKING
275 if (gimplify_ctxp->conditions == 0)
276 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
277 #endif
278 ++(gimplify_ctxp->conditions);
279 }
280
281 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
282 now, add any conditional cleanups we've seen to the prequeue. */
283
284 static void
285 gimple_pop_condition (gimple_seq *pre_p)
286 {
287 int conds = --(gimplify_ctxp->conditions);
288
289 gcc_assert (conds >= 0);
290 if (conds == 0)
291 {
292 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
293 gimplify_ctxp->conditional_cleanups = NULL;
294 }
295 }
296
297 /* A stable comparison routine for use with splay trees and DECLs. */
298
299 static int
300 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
301 {
302 tree a = (tree) xa;
303 tree b = (tree) xb;
304
305 return DECL_UID (a) - DECL_UID (b);
306 }
307
308 /* Create a new omp construct that deals with variable remapping. */
309
310 static struct gimplify_omp_ctx *
311 new_omp_context (enum omp_region_type region_type)
312 {
313 struct gimplify_omp_ctx *c;
314
315 c = XCNEW (struct gimplify_omp_ctx);
316 c->outer_context = gimplify_omp_ctxp;
317 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
318 c->privatized_types = pointer_set_create ();
319 c->location = input_location;
320 c->region_type = region_type;
321 if (region_type != ORT_TASK)
322 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
323 else
324 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
325
326 return c;
327 }
328
329 /* Destroy an omp construct that deals with variable remapping. */
330
331 static void
332 delete_omp_context (struct gimplify_omp_ctx *c)
333 {
334 splay_tree_delete (c->variables);
335 pointer_set_destroy (c->privatized_types);
336 XDELETE (c);
337 }
338
339 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
340 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
341
342 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
343
344 static void
345 append_to_statement_list_1 (tree t, tree *list_p)
346 {
347 tree list = *list_p;
348 tree_stmt_iterator i;
349
350 if (!list)
351 {
352 if (t && TREE_CODE (t) == STATEMENT_LIST)
353 {
354 *list_p = t;
355 return;
356 }
357 *list_p = list = alloc_stmt_list ();
358 }
359
360 i = tsi_last (list);
361 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
362 }
363
364 /* Add T to the end of the list container pointed to by LIST_P.
365 If T is an expression with no effects, it is ignored. */
366
367 void
368 append_to_statement_list (tree t, tree *list_p)
369 {
370 if (t && TREE_SIDE_EFFECTS (t))
371 append_to_statement_list_1 (t, list_p);
372 }
373
374 /* Similar, but the statement is always added, regardless of side effects. */
375
376 void
377 append_to_statement_list_force (tree t, tree *list_p)
378 {
379 if (t != NULL_TREE)
380 append_to_statement_list_1 (t, list_p);
381 }
382
383 /* Both gimplify the statement T and append it to *SEQ_P. This function
384 behaves exactly as gimplify_stmt, but you don't have to pass T as a
385 reference. */
386
387 void
388 gimplify_and_add (tree t, gimple_seq *seq_p)
389 {
390 gimplify_stmt (&t, seq_p);
391 }
392
393 /* Gimplify statement T into sequence *SEQ_P, and return the first
394 tuple in the sequence of generated tuples for this statement.
395 Return NULL if gimplifying T produced no tuples. */
396
397 static gimple
398 gimplify_and_return_first (tree t, gimple_seq *seq_p)
399 {
400 gimple_stmt_iterator last = gsi_last (*seq_p);
401
402 gimplify_and_add (t, seq_p);
403
404 if (!gsi_end_p (last))
405 {
406 gsi_next (&last);
407 return gsi_stmt (last);
408 }
409 else
410 return gimple_seq_first_stmt (*seq_p);
411 }
412
413 /* Strip off a legitimate source ending from the input string NAME of
414 length LEN. Rather than having to know the names used by all of
415 our front ends, we strip off an ending of a period followed by
416 up to five characters. (Java uses ".class".) */
417
418 static inline void
419 remove_suffix (char *name, int len)
420 {
421 int i;
422
423 for (i = 2; i < 8 && len > i; i++)
424 {
425 if (name[len - i] == '.')
426 {
427 name[len - i] = '\0';
428 break;
429 }
430 }
431 }
432
433 /* Create a new temporary name with PREFIX. Returns an identifier. */
434
435 static GTY(()) unsigned int tmp_var_id_num;
436
437 tree
438 create_tmp_var_name (const char *prefix)
439 {
440 char *tmp_name;
441
442 if (prefix)
443 {
444 char *preftmp = ASTRDUP (prefix);
445
446 remove_suffix (preftmp, strlen (preftmp));
447 prefix = preftmp;
448 }
449
450 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
451 return get_identifier (tmp_name);
452 }
453
454
455 /* Create a new temporary variable declaration of type TYPE.
456 Does NOT push it into the current binding. */
457
458 tree
459 create_tmp_var_raw (tree type, const char *prefix)
460 {
461 tree tmp_var;
462 tree new_type;
463
464 /* Make the type of the variable writable. */
465 new_type = build_type_variant (type, 0, 0);
466 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
467
468 tmp_var = build_decl (input_location,
469 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
470 type);
471
472 /* The variable was declared by the compiler. */
473 DECL_ARTIFICIAL (tmp_var) = 1;
474 /* And we don't want debug info for it. */
475 DECL_IGNORED_P (tmp_var) = 1;
476
477 /* Make the variable writable. */
478 TREE_READONLY (tmp_var) = 0;
479
480 DECL_EXTERNAL (tmp_var) = 0;
481 TREE_STATIC (tmp_var) = 0;
482 TREE_USED (tmp_var) = 1;
483
484 return tmp_var;
485 }
486
487 /* Create a new temporary variable declaration of type TYPE. DOES push the
488 variable into the current binding. Further, assume that this is called
489 only from gimplification or optimization, at which point the creation of
490 certain types are bugs. */
491
492 tree
493 create_tmp_var (tree type, const char *prefix)
494 {
495 tree tmp_var;
496
497 /* We don't allow types that are addressable (meaning we can't make copies),
498 or incomplete. We also used to reject every variable size objects here,
499 but now support those for which a constant upper bound can be obtained.
500 The processing for variable sizes is performed in gimple_add_tmp_var,
501 point at which it really matters and possibly reached via paths not going
502 through this function, e.g. after direct calls to create_tmp_var_raw. */
503 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
504
505 tmp_var = create_tmp_var_raw (type, prefix);
506 gimple_add_tmp_var (tmp_var);
507 return tmp_var;
508 }
509
510 /* Create a temporary with a name derived from VAL. Subroutine of
511 lookup_tmp_var; nobody else should call this function. */
512
513 static inline tree
514 create_tmp_from_val (tree val)
515 {
516 return create_tmp_var (TREE_TYPE (val), get_name (val));
517 }
518
519 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
520 an existing expression temporary. */
521
522 static tree
523 lookup_tmp_var (tree val, bool is_formal)
524 {
525 tree ret;
526
527 /* If not optimizing, never really reuse a temporary. local-alloc
528 won't allocate any variable that is used in more than one basic
529 block, which means it will go into memory, causing much extra
530 work in reload and final and poorer code generation, outweighing
531 the extra memory allocation here. */
532 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
533 ret = create_tmp_from_val (val);
534 else
535 {
536 elt_t elt, *elt_p;
537 void **slot;
538
539 elt.val = val;
540 if (gimplify_ctxp->temp_htab == NULL)
541 gimplify_ctxp->temp_htab
542 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
543 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
544 if (*slot == NULL)
545 {
546 elt_p = XNEW (elt_t);
547 elt_p->val = val;
548 elt_p->temp = ret = create_tmp_from_val (val);
549 *slot = (void *) elt_p;
550 }
551 else
552 {
553 elt_p = (elt_t *) *slot;
554 ret = elt_p->temp;
555 }
556 }
557
558 return ret;
559 }
560
561
562 /* Return true if T is a CALL_EXPR or an expression that can be
563 assignmed to a temporary. Note that this predicate should only be
564 used during gimplification. See the rationale for this in
565 gimplify_modify_expr. */
566
567 static bool
568 is_gimple_reg_rhs_or_call (tree t)
569 {
570 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
571 || TREE_CODE (t) == CALL_EXPR);
572 }
573
574 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
575 this predicate should only be used during gimplification. See the
576 rationale for this in gimplify_modify_expr. */
577
578 static bool
579 is_gimple_mem_rhs_or_call (tree t)
580 {
581 /* If we're dealing with a renamable type, either source or dest must be
582 a renamed variable. */
583 if (is_gimple_reg_type (TREE_TYPE (t)))
584 return is_gimple_val (t);
585 else
586 return (is_gimple_val (t) || is_gimple_lvalue (t)
587 || TREE_CODE (t) == CALL_EXPR);
588 }
589
590 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
591
592 static tree
593 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
594 bool is_formal)
595 {
596 tree t, mod;
597
598 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
599 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
600 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
601 fb_rvalue);
602
603 t = lookup_tmp_var (val, is_formal);
604
605 if (is_formal
606 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
607 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
608 DECL_GIMPLE_REG_P (t) = 1;
609
610 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
611
612 if (EXPR_HAS_LOCATION (val))
613 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
614 else
615 SET_EXPR_LOCATION (mod, input_location);
616
617 /* gimplify_modify_expr might want to reduce this further. */
618 gimplify_and_add (mod, pre_p);
619 ggc_free (mod);
620
621 /* If we're gimplifying into ssa, gimplify_modify_expr will have
622 given our temporary an SSA name. Find and return it. */
623 if (gimplify_ctxp->into_ssa)
624 {
625 gimple last = gimple_seq_last_stmt (*pre_p);
626 t = gimple_get_lhs (last);
627 }
628
629 return t;
630 }
631
632 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
633 in gimplify_expr. Only use this function if:
634
635 1) The value of the unfactored expression represented by VAL will not
636 change between the initialization and use of the temporary, and
637 2) The temporary will not be otherwise modified.
638
639 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
640 and #2 means it is inappropriate for && temps.
641
642 For other cases, use get_initialized_tmp_var instead. */
643
644 tree
645 get_formal_tmp_var (tree val, gimple_seq *pre_p)
646 {
647 return internal_get_tmp_var (val, pre_p, NULL, true);
648 }
649
650 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
651 are as in gimplify_expr. */
652
653 tree
654 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
655 {
656 return internal_get_tmp_var (val, pre_p, post_p, false);
657 }
658
659 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
660 true, generate debug info for them; otherwise don't. */
661
662 void
663 declare_vars (tree vars, gimple scope, bool debug_info)
664 {
665 tree last = vars;
666 if (last)
667 {
668 tree temps, block;
669
670 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
671
672 temps = nreverse (last);
673
674 block = gimple_bind_block (scope);
675 gcc_assert (!block || TREE_CODE (block) == BLOCK);
676 if (!block || !debug_info)
677 {
678 TREE_CHAIN (last) = gimple_bind_vars (scope);
679 gimple_bind_set_vars (scope, temps);
680 }
681 else
682 {
683 /* We need to attach the nodes both to the BIND_EXPR and to its
684 associated BLOCK for debugging purposes. The key point here
685 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
686 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
687 if (BLOCK_VARS (block))
688 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
689 else
690 {
691 gimple_bind_set_vars (scope,
692 chainon (gimple_bind_vars (scope), temps));
693 BLOCK_VARS (block) = temps;
694 }
695 }
696 }
697 }
698
699 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
700 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
701 no such upper bound can be obtained. */
702
703 static void
704 force_constant_size (tree var)
705 {
706 /* The only attempt we make is by querying the maximum size of objects
707 of the variable's type. */
708
709 HOST_WIDE_INT max_size;
710
711 gcc_assert (TREE_CODE (var) == VAR_DECL);
712
713 max_size = max_int_size_in_bytes (TREE_TYPE (var));
714
715 gcc_assert (max_size >= 0);
716
717 DECL_SIZE_UNIT (var)
718 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
719 DECL_SIZE (var)
720 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
721 }
722
723 void
724 gimple_add_tmp_var (tree tmp)
725 {
726 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
727
728 /* Later processing assumes that the object size is constant, which might
729 not be true at this point. Force the use of a constant upper bound in
730 this case. */
731 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
732 force_constant_size (tmp);
733
734 DECL_CONTEXT (tmp) = current_function_decl;
735 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
736
737 if (gimplify_ctxp)
738 {
739 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
740 gimplify_ctxp->temps = tmp;
741
742 /* Mark temporaries local within the nearest enclosing parallel. */
743 if (gimplify_omp_ctxp)
744 {
745 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
746 while (ctx && ctx->region_type == ORT_WORKSHARE)
747 ctx = ctx->outer_context;
748 if (ctx)
749 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
750 }
751 }
752 else if (cfun)
753 record_vars (tmp);
754 else
755 {
756 gimple_seq body_seq;
757
758 /* This case is for nested functions. We need to expose the locals
759 they create. */
760 body_seq = gimple_body (current_function_decl);
761 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
762 }
763 }
764
765 /* Determines whether to assign a location to the statement GS. */
766
767 static bool
768 should_carry_location_p (gimple gs)
769 {
770 /* Don't emit a line note for a label. We particularly don't want to
771 emit one for the break label, since it doesn't actually correspond
772 to the beginning of the loop/switch. */
773 if (gimple_code (gs) == GIMPLE_LABEL)
774 return false;
775
776 return true;
777 }
778
779 /* Same, but for a tree. */
780
781 static bool
782 tree_should_carry_location_p (const_tree stmt)
783 {
784 /* Don't emit a line note for a label. We particularly don't want to
785 emit one for the break label, since it doesn't actually correspond
786 to the beginning of the loop/switch. */
787 if (TREE_CODE (stmt) == LABEL_EXPR)
788 return false;
789
790 /* Do not annotate empty statements, since it confuses gcov. */
791 if (!TREE_SIDE_EFFECTS (stmt))
792 return false;
793
794 return true;
795 }
796
797 /* Return true if a location should not be emitted for this statement
798 by annotate_one_with_location. */
799
800 static inline bool
801 gimple_do_not_emit_location_p (gimple g)
802 {
803 return gimple_plf (g, GF_PLF_1);
804 }
805
806 /* Mark statement G so a location will not be emitted by
807 annotate_one_with_location. */
808
809 static inline void
810 gimple_set_do_not_emit_location (gimple g)
811 {
812 /* The PLF flags are initialized to 0 when a new tuple is created,
813 so no need to initialize it anywhere. */
814 gimple_set_plf (g, GF_PLF_1, true);
815 }
816
817 /* Set the location for gimple statement GS to LOCATION. */
818
819 static void
820 annotate_one_with_location (gimple gs, location_t location)
821 {
822 if (!gimple_has_location (gs)
823 && !gimple_do_not_emit_location_p (gs)
824 && should_carry_location_p (gs))
825 gimple_set_location (gs, location);
826 }
827
828 /* Same, but for tree T. */
829
830 static void
831 tree_annotate_one_with_location (tree t, location_t location)
832 {
833 if (CAN_HAVE_LOCATION_P (t)
834 && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t))
835 SET_EXPR_LOCATION (t, location);
836 }
837
838
839 /* Set LOCATION for all the statements after iterator GSI in sequence
840 SEQ. If GSI is pointing to the end of the sequence, start with the
841 first statement in SEQ. */
842
843 static void
844 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
845 location_t location)
846 {
847 if (gsi_end_p (gsi))
848 gsi = gsi_start (seq);
849 else
850 gsi_next (&gsi);
851
852 for (; !gsi_end_p (gsi); gsi_next (&gsi))
853 annotate_one_with_location (gsi_stmt (gsi), location);
854 }
855
856
857 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
858
859 void
860 annotate_all_with_location (gimple_seq stmt_p, location_t location)
861 {
862 gimple_stmt_iterator i;
863
864 if (gimple_seq_empty_p (stmt_p))
865 return;
866
867 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
868 {
869 gimple gs = gsi_stmt (i);
870 annotate_one_with_location (gs, location);
871 }
872 }
873
874 /* Same, but for statement or statement list in *STMT_P. */
875
876 void
877 tree_annotate_all_with_location (tree *stmt_p, location_t location)
878 {
879 tree_stmt_iterator i;
880
881 if (!*stmt_p)
882 return;
883
884 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
885 {
886 tree t = tsi_stmt (i);
887
888 /* Assuming we've already been gimplified, we shouldn't
889 see nested chaining constructs anymore. */
890 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
891 && TREE_CODE (t) != COMPOUND_EXPR);
892
893 tree_annotate_one_with_location (t, location);
894 }
895 }
896
897
898 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
899 These nodes model computations that should only be done once. If we
900 were to unshare something like SAVE_EXPR(i++), the gimplification
901 process would create wrong code. */
902
903 static tree
904 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
905 {
906 enum tree_code code = TREE_CODE (*tp);
907 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
908 if (TREE_CODE_CLASS (code) == tcc_type
909 || TREE_CODE_CLASS (code) == tcc_declaration
910 || TREE_CODE_CLASS (code) == tcc_constant
911 || code == SAVE_EXPR || code == TARGET_EXPR
912 /* We can't do anything sensible with a BLOCK used as an expression,
913 but we also can't just die when we see it because of non-expression
914 uses. So just avert our eyes and cross our fingers. Silly Java. */
915 || code == BLOCK)
916 *walk_subtrees = 0;
917 else
918 {
919 gcc_assert (code != BIND_EXPR);
920 copy_tree_r (tp, walk_subtrees, data);
921 }
922
923 return NULL_TREE;
924 }
925
926 /* Callback for walk_tree to unshare most of the shared trees rooted at
927 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
928 then *TP is deep copied by calling copy_tree_r.
929
930 This unshares the same trees as copy_tree_r with the exception of
931 SAVE_EXPR nodes. These nodes model computations that should only be
932 done once. If we were to unshare something like SAVE_EXPR(i++), the
933 gimplification process would create wrong code. */
934
935 static tree
936 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
937 void *data ATTRIBUTE_UNUSED)
938 {
939 tree t = *tp;
940 enum tree_code code = TREE_CODE (t);
941
942 /* Skip types, decls, and constants. But we do want to look at their
943 types and the bounds of types. Mark them as visited so we properly
944 unmark their subtrees on the unmark pass. If we've already seen them,
945 don't look down further. */
946 if (TREE_CODE_CLASS (code) == tcc_type
947 || TREE_CODE_CLASS (code) == tcc_declaration
948 || TREE_CODE_CLASS (code) == tcc_constant)
949 {
950 if (TREE_VISITED (t))
951 *walk_subtrees = 0;
952 else
953 TREE_VISITED (t) = 1;
954 }
955
956 /* If this node has been visited already, unshare it and don't look
957 any deeper. */
958 else if (TREE_VISITED (t))
959 {
960 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
961 *walk_subtrees = 0;
962 }
963
964 /* Otherwise, mark the tree as visited and keep looking. */
965 else
966 TREE_VISITED (t) = 1;
967
968 return NULL_TREE;
969 }
970
971 static tree
972 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
973 void *data ATTRIBUTE_UNUSED)
974 {
975 if (TREE_VISITED (*tp))
976 TREE_VISITED (*tp) = 0;
977 else
978 *walk_subtrees = 0;
979
980 return NULL_TREE;
981 }
982
983 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
984 bodies of any nested functions if we are unsharing the entire body of
985 FNDECL. */
986
987 static void
988 unshare_body (tree *body_p, tree fndecl)
989 {
990 struct cgraph_node *cgn = cgraph_node (fndecl);
991
992 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
993 if (body_p == &DECL_SAVED_TREE (fndecl))
994 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
995 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
996 }
997
998 /* Likewise, but mark all trees as not visited. */
999
1000 static void
1001 unvisit_body (tree *body_p, tree fndecl)
1002 {
1003 struct cgraph_node *cgn = cgraph_node (fndecl);
1004
1005 walk_tree (body_p, unmark_visited_r, NULL, NULL);
1006 if (body_p == &DECL_SAVED_TREE (fndecl))
1007 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1008 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1009 }
1010
1011 /* Unconditionally make an unshared copy of EXPR. This is used when using
1012 stored expressions which span multiple functions, such as BINFO_VTABLE,
1013 as the normal unsharing process can't tell that they're shared. */
1014
1015 tree
1016 unshare_expr (tree expr)
1017 {
1018 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1019 return expr;
1020 }
1021 \f
1022 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1023 contain statements and have a value. Assign its value to a temporary
1024 and give it void_type_node. Returns the temporary, or NULL_TREE if
1025 WRAPPER was already void. */
1026
1027 tree
1028 voidify_wrapper_expr (tree wrapper, tree temp)
1029 {
1030 tree type = TREE_TYPE (wrapper);
1031 if (type && !VOID_TYPE_P (type))
1032 {
1033 tree *p;
1034
1035 /* Set p to point to the body of the wrapper. Loop until we find
1036 something that isn't a wrapper. */
1037 for (p = &wrapper; p && *p; )
1038 {
1039 switch (TREE_CODE (*p))
1040 {
1041 case BIND_EXPR:
1042 TREE_SIDE_EFFECTS (*p) = 1;
1043 TREE_TYPE (*p) = void_type_node;
1044 /* For a BIND_EXPR, the body is operand 1. */
1045 p = &BIND_EXPR_BODY (*p);
1046 break;
1047
1048 case CLEANUP_POINT_EXPR:
1049 case TRY_FINALLY_EXPR:
1050 case TRY_CATCH_EXPR:
1051 TREE_SIDE_EFFECTS (*p) = 1;
1052 TREE_TYPE (*p) = void_type_node;
1053 p = &TREE_OPERAND (*p, 0);
1054 break;
1055
1056 case STATEMENT_LIST:
1057 {
1058 tree_stmt_iterator i = tsi_last (*p);
1059 TREE_SIDE_EFFECTS (*p) = 1;
1060 TREE_TYPE (*p) = void_type_node;
1061 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1062 }
1063 break;
1064
1065 case COMPOUND_EXPR:
1066 /* Advance to the last statement. Set all container types to void. */
1067 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1068 {
1069 TREE_SIDE_EFFECTS (*p) = 1;
1070 TREE_TYPE (*p) = void_type_node;
1071 }
1072 break;
1073
1074 default:
1075 goto out;
1076 }
1077 }
1078
1079 out:
1080 if (p == NULL || IS_EMPTY_STMT (*p))
1081 temp = NULL_TREE;
1082 else if (temp)
1083 {
1084 /* The wrapper is on the RHS of an assignment that we're pushing
1085 down. */
1086 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1087 || TREE_CODE (temp) == MODIFY_EXPR);
1088 TREE_OPERAND (temp, 1) = *p;
1089 *p = temp;
1090 }
1091 else
1092 {
1093 temp = create_tmp_var (type, "retval");
1094 *p = build2 (INIT_EXPR, type, temp, *p);
1095 }
1096
1097 return temp;
1098 }
1099
1100 return NULL_TREE;
1101 }
1102
1103 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1104 a temporary through which they communicate. */
1105
1106 static void
1107 build_stack_save_restore (gimple *save, gimple *restore)
1108 {
1109 tree tmp_var;
1110
1111 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1112 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1113 gimple_call_set_lhs (*save, tmp_var);
1114
1115 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1116 1, tmp_var);
1117 }
1118
1119 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1120
1121 static enum gimplify_status
1122 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1123 {
1124 tree bind_expr = *expr_p;
1125 bool old_save_stack = gimplify_ctxp->save_stack;
1126 tree t;
1127 gimple gimple_bind;
1128 gimple_seq body;
1129
1130 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1131
1132 /* Mark variables seen in this bind expr. */
1133 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1134 {
1135 if (TREE_CODE (t) == VAR_DECL)
1136 {
1137 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1138
1139 /* Mark variable as local. */
1140 if (ctx && !is_global_var (t)
1141 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1142 || splay_tree_lookup (ctx->variables,
1143 (splay_tree_key) t) == NULL))
1144 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1145
1146 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1147
1148 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1149 cfun->has_local_explicit_reg_vars = true;
1150 }
1151
1152 /* Preliminarily mark non-addressed complex variables as eligible
1153 for promotion to gimple registers. We'll transform their uses
1154 as we find them.
1155 We exclude complex types if not optimizing because they can be
1156 subject to partial stores in GNU C by means of the __real__ and
1157 __imag__ operators and we cannot promote them to total stores
1158 (see gimplify_modify_expr_complex_part). */
1159 if (optimize
1160 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1161 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1162 && !TREE_THIS_VOLATILE (t)
1163 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1164 && !needs_to_live_in_memory (t))
1165 DECL_GIMPLE_REG_P (t) = 1;
1166 }
1167
1168 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1169 BIND_EXPR_BLOCK (bind_expr));
1170 gimple_push_bind_expr (gimple_bind);
1171
1172 gimplify_ctxp->save_stack = false;
1173
1174 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1175 body = NULL;
1176 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1177 gimple_bind_set_body (gimple_bind, body);
1178
1179 if (gimplify_ctxp->save_stack)
1180 {
1181 gimple stack_save, stack_restore, gs;
1182 gimple_seq cleanup, new_body;
1183
1184 /* Save stack on entry and restore it on exit. Add a try_finally
1185 block to achieve this. Note that mudflap depends on the
1186 format of the emitted code: see mx_register_decls(). */
1187 build_stack_save_restore (&stack_save, &stack_restore);
1188
1189 cleanup = new_body = NULL;
1190 gimplify_seq_add_stmt (&cleanup, stack_restore);
1191 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1192 GIMPLE_TRY_FINALLY);
1193
1194 gimplify_seq_add_stmt (&new_body, stack_save);
1195 gimplify_seq_add_stmt (&new_body, gs);
1196 gimple_bind_set_body (gimple_bind, new_body);
1197 }
1198
1199 gimplify_ctxp->save_stack = old_save_stack;
1200 gimple_pop_bind_expr ();
1201
1202 gimplify_seq_add_stmt (pre_p, gimple_bind);
1203
1204 if (temp)
1205 {
1206 *expr_p = temp;
1207 return GS_OK;
1208 }
1209
1210 *expr_p = NULL_TREE;
1211 return GS_ALL_DONE;
1212 }
1213
1214 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1215 GIMPLE value, it is assigned to a new temporary and the statement is
1216 re-written to return the temporary.
1217
1218 PRE_P points to the sequence where side effects that must happen before
1219 STMT should be stored. */
1220
1221 static enum gimplify_status
1222 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1223 {
1224 gimple ret;
1225 tree ret_expr = TREE_OPERAND (stmt, 0);
1226 tree result_decl, result;
1227
1228 if (ret_expr == error_mark_node)
1229 return GS_ERROR;
1230
1231 if (!ret_expr
1232 || TREE_CODE (ret_expr) == RESULT_DECL
1233 || ret_expr == error_mark_node)
1234 {
1235 gimple ret = gimple_build_return (ret_expr);
1236 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1237 gimplify_seq_add_stmt (pre_p, ret);
1238 return GS_ALL_DONE;
1239 }
1240
1241 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1242 result_decl = NULL_TREE;
1243 else
1244 {
1245 result_decl = TREE_OPERAND (ret_expr, 0);
1246
1247 /* See through a return by reference. */
1248 if (TREE_CODE (result_decl) == INDIRECT_REF)
1249 result_decl = TREE_OPERAND (result_decl, 0);
1250
1251 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1252 || TREE_CODE (ret_expr) == INIT_EXPR)
1253 && TREE_CODE (result_decl) == RESULT_DECL);
1254 }
1255
1256 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1257 Recall that aggregate_value_p is FALSE for any aggregate type that is
1258 returned in registers. If we're returning values in registers, then
1259 we don't want to extend the lifetime of the RESULT_DECL, particularly
1260 across another call. In addition, for those aggregates for which
1261 hard_function_value generates a PARALLEL, we'll die during normal
1262 expansion of structure assignments; there's special code in expand_return
1263 to handle this case that does not exist in expand_expr. */
1264 if (!result_decl
1265 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1266 result = result_decl;
1267 else if (gimplify_ctxp->return_temp)
1268 result = gimplify_ctxp->return_temp;
1269 else
1270 {
1271 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1272 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1273 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1274 DECL_GIMPLE_REG_P (result) = 1;
1275
1276 /* ??? With complex control flow (usually involving abnormal edges),
1277 we can wind up warning about an uninitialized value for this. Due
1278 to how this variable is constructed and initialized, this is never
1279 true. Give up and never warn. */
1280 TREE_NO_WARNING (result) = 1;
1281
1282 gimplify_ctxp->return_temp = result;
1283 }
1284
1285 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1286 Then gimplify the whole thing. */
1287 if (result != result_decl)
1288 TREE_OPERAND (ret_expr, 0) = result;
1289
1290 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1291
1292 ret = gimple_build_return (result);
1293 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1294 gimplify_seq_add_stmt (pre_p, ret);
1295
1296 return GS_ALL_DONE;
1297 }
1298
1299 static void
1300 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1301 {
1302 /* This is a variable-sized decl. Simplify its size and mark it
1303 for deferred expansion. Note that mudflap depends on the format
1304 of the emitted code: see mx_register_decls(). */
1305 tree t, addr, ptr_type;
1306
1307 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1308 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1309
1310 /* All occurrences of this decl in final gimplified code will be
1311 replaced by indirection. Setting DECL_VALUE_EXPR does two
1312 things: First, it lets the rest of the gimplifier know what
1313 replacement to use. Second, it lets the debug info know
1314 where to find the value. */
1315 ptr_type = build_pointer_type (TREE_TYPE (decl));
1316 addr = create_tmp_var (ptr_type, get_name (decl));
1317 DECL_IGNORED_P (addr) = 0;
1318 t = build_fold_indirect_ref (addr);
1319 SET_DECL_VALUE_EXPR (decl, t);
1320 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1321
1322 t = built_in_decls[BUILT_IN_ALLOCA];
1323 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1324 t = fold_convert (ptr_type, t);
1325 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1326
1327 gimplify_and_add (t, seq_p);
1328
1329 /* Indicate that we need to restore the stack level when the
1330 enclosing BIND_EXPR is exited. */
1331 gimplify_ctxp->save_stack = true;
1332 }
1333
1334
1335 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1336 and initialization explicit. */
1337
1338 static enum gimplify_status
1339 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1340 {
1341 tree stmt = *stmt_p;
1342 tree decl = DECL_EXPR_DECL (stmt);
1343
1344 *stmt_p = NULL_TREE;
1345
1346 if (TREE_TYPE (decl) == error_mark_node)
1347 return GS_ERROR;
1348
1349 if ((TREE_CODE (decl) == TYPE_DECL
1350 || TREE_CODE (decl) == VAR_DECL)
1351 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1352 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1353
1354 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1355 {
1356 tree init = DECL_INITIAL (decl);
1357
1358 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1359 || (!TREE_STATIC (decl)
1360 && flag_stack_check == GENERIC_STACK_CHECK
1361 && compare_tree_int (DECL_SIZE_UNIT (decl),
1362 STACK_CHECK_MAX_VAR_SIZE) > 0))
1363 gimplify_vla_decl (decl, seq_p);
1364
1365 if (init && init != error_mark_node)
1366 {
1367 if (!TREE_STATIC (decl))
1368 {
1369 DECL_INITIAL (decl) = NULL_TREE;
1370 init = build2 (INIT_EXPR, void_type_node, decl, init);
1371 gimplify_and_add (init, seq_p);
1372 ggc_free (init);
1373 }
1374 else
1375 /* We must still examine initializers for static variables
1376 as they may contain a label address. */
1377 walk_tree (&init, force_labels_r, NULL, NULL);
1378 }
1379
1380 /* Some front ends do not explicitly declare all anonymous
1381 artificial variables. We compensate here by declaring the
1382 variables, though it would be better if the front ends would
1383 explicitly declare them. */
1384 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1385 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1386 gimple_add_tmp_var (decl);
1387 }
1388
1389 return GS_ALL_DONE;
1390 }
1391
1392 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1393 and replacing the LOOP_EXPR with goto, but if the loop contains an
1394 EXIT_EXPR, we need to append a label for it to jump to. */
1395
1396 static enum gimplify_status
1397 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1398 {
1399 tree saved_label = gimplify_ctxp->exit_label;
1400 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1401
1402 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1403
1404 gimplify_ctxp->exit_label = NULL_TREE;
1405
1406 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1407
1408 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1409
1410 if (gimplify_ctxp->exit_label)
1411 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1412
1413 gimplify_ctxp->exit_label = saved_label;
1414
1415 *expr_p = NULL;
1416 return GS_ALL_DONE;
1417 }
1418
1419 /* Gimplifies a statement list onto a sequence. These may be created either
1420 by an enlightened front-end, or by shortcut_cond_expr. */
1421
1422 static enum gimplify_status
1423 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1424 {
1425 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1426
1427 tree_stmt_iterator i = tsi_start (*expr_p);
1428
1429 while (!tsi_end_p (i))
1430 {
1431 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1432 tsi_delink (&i);
1433 }
1434
1435 if (temp)
1436 {
1437 *expr_p = temp;
1438 return GS_OK;
1439 }
1440
1441 return GS_ALL_DONE;
1442 }
1443
1444 /* Compare two case labels. Because the front end should already have
1445 made sure that case ranges do not overlap, it is enough to only compare
1446 the CASE_LOW values of each case label. */
1447
1448 static int
1449 compare_case_labels (const void *p1, const void *p2)
1450 {
1451 const_tree const case1 = *(const_tree const*)p1;
1452 const_tree const case2 = *(const_tree const*)p2;
1453
1454 /* The 'default' case label always goes first. */
1455 if (!CASE_LOW (case1))
1456 return -1;
1457 else if (!CASE_LOW (case2))
1458 return 1;
1459 else
1460 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1461 }
1462
1463
1464 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1465
1466 void
1467 sort_case_labels (VEC(tree,heap)* label_vec)
1468 {
1469 size_t len = VEC_length (tree, label_vec);
1470 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1471 compare_case_labels);
1472 }
1473
1474
1475 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1476 branch to. */
1477
1478 static enum gimplify_status
1479 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1480 {
1481 tree switch_expr = *expr_p;
1482 gimple_seq switch_body_seq = NULL;
1483 enum gimplify_status ret;
1484
1485 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1486 fb_rvalue);
1487 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1488 return ret;
1489
1490 if (SWITCH_BODY (switch_expr))
1491 {
1492 VEC (tree,heap) *labels;
1493 VEC (tree,heap) *saved_labels;
1494 tree default_case = NULL_TREE;
1495 size_t i, len;
1496 gimple gimple_switch;
1497
1498 /* If someone can be bothered to fill in the labels, they can
1499 be bothered to null out the body too. */
1500 gcc_assert (!SWITCH_LABELS (switch_expr));
1501
1502 /* save old labels, get new ones from body, then restore the old
1503 labels. Save all the things from the switch body to append after. */
1504 saved_labels = gimplify_ctxp->case_labels;
1505 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1506
1507 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1508 labels = gimplify_ctxp->case_labels;
1509 gimplify_ctxp->case_labels = saved_labels;
1510
1511 i = 0;
1512 while (i < VEC_length (tree, labels))
1513 {
1514 tree elt = VEC_index (tree, labels, i);
1515 tree low = CASE_LOW (elt);
1516 bool remove_element = FALSE;
1517
1518 if (low)
1519 {
1520 /* Discard empty ranges. */
1521 tree high = CASE_HIGH (elt);
1522 if (high && tree_int_cst_lt (high, low))
1523 remove_element = TRUE;
1524 }
1525 else
1526 {
1527 /* The default case must be the last label in the list. */
1528 gcc_assert (!default_case);
1529 default_case = elt;
1530 remove_element = TRUE;
1531 }
1532
1533 if (remove_element)
1534 VEC_ordered_remove (tree, labels, i);
1535 else
1536 i++;
1537 }
1538 len = i;
1539
1540 if (!VEC_empty (tree, labels))
1541 sort_case_labels (labels);
1542
1543 if (!default_case)
1544 {
1545 tree type = TREE_TYPE (switch_expr);
1546
1547 /* If the switch has no default label, add one, so that we jump
1548 around the switch body. If the labels already cover the whole
1549 range of type, add the default label pointing to one of the
1550 existing labels. */
1551 if (type == void_type_node)
1552 type = TREE_TYPE (SWITCH_COND (switch_expr));
1553 if (len
1554 && INTEGRAL_TYPE_P (type)
1555 && TYPE_MIN_VALUE (type)
1556 && TYPE_MAX_VALUE (type)
1557 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1558 TYPE_MIN_VALUE (type)))
1559 {
1560 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1561 if (!high)
1562 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1563 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1564 {
1565 for (i = 1; i < len; i++)
1566 {
1567 high = CASE_LOW (VEC_index (tree, labels, i));
1568 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1569 if (!low)
1570 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1571 if ((TREE_INT_CST_LOW (low) + 1
1572 != TREE_INT_CST_LOW (high))
1573 || (TREE_INT_CST_HIGH (low)
1574 + (TREE_INT_CST_LOW (high) == 0)
1575 != TREE_INT_CST_HIGH (high)))
1576 break;
1577 }
1578 if (i == len)
1579 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1580 NULL_TREE, NULL_TREE,
1581 CASE_LABEL (VEC_index (tree,
1582 labels, 0)));
1583 }
1584 }
1585
1586 if (!default_case)
1587 {
1588 gimple new_default;
1589
1590 default_case
1591 = build3 (CASE_LABEL_EXPR, void_type_node,
1592 NULL_TREE, NULL_TREE,
1593 create_artificial_label (UNKNOWN_LOCATION));
1594 new_default = gimple_build_label (CASE_LABEL (default_case));
1595 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1596 }
1597 }
1598
1599 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1600 default_case, labels);
1601 gimplify_seq_add_stmt (pre_p, gimple_switch);
1602 gimplify_seq_add_seq (pre_p, switch_body_seq);
1603 VEC_free(tree, heap, labels);
1604 }
1605 else
1606 gcc_assert (SWITCH_LABELS (switch_expr));
1607
1608 return GS_ALL_DONE;
1609 }
1610
1611
1612 static enum gimplify_status
1613 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1614 {
1615 struct gimplify_ctx *ctxp;
1616 gimple gimple_label;
1617
1618 /* Invalid OpenMP programs can play Duff's Device type games with
1619 #pragma omp parallel. At least in the C front end, we don't
1620 detect such invalid branches until after gimplification. */
1621 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1622 if (ctxp->case_labels)
1623 break;
1624
1625 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1626 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1627 gimplify_seq_add_stmt (pre_p, gimple_label);
1628
1629 return GS_ALL_DONE;
1630 }
1631
1632 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1633 if necessary. */
1634
1635 tree
1636 build_and_jump (tree *label_p)
1637 {
1638 if (label_p == NULL)
1639 /* If there's nowhere to jump, just fall through. */
1640 return NULL_TREE;
1641
1642 if (*label_p == NULL_TREE)
1643 {
1644 tree label = create_artificial_label (UNKNOWN_LOCATION);
1645 *label_p = label;
1646 }
1647
1648 return build1 (GOTO_EXPR, void_type_node, *label_p);
1649 }
1650
1651 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1652 This also involves building a label to jump to and communicating it to
1653 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1654
1655 static enum gimplify_status
1656 gimplify_exit_expr (tree *expr_p)
1657 {
1658 tree cond = TREE_OPERAND (*expr_p, 0);
1659 tree expr;
1660
1661 expr = build_and_jump (&gimplify_ctxp->exit_label);
1662 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1663 *expr_p = expr;
1664
1665 return GS_OK;
1666 }
1667
1668 /* A helper function to be called via walk_tree. Mark all labels under *TP
1669 as being forced. To be called for DECL_INITIAL of static variables. */
1670
1671 tree
1672 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1673 {
1674 if (TYPE_P (*tp))
1675 *walk_subtrees = 0;
1676 if (TREE_CODE (*tp) == LABEL_DECL)
1677 FORCED_LABEL (*tp) = 1;
1678
1679 return NULL_TREE;
1680 }
1681
1682 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1683 different from its canonical type, wrap the whole thing inside a
1684 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1685 type.
1686
1687 The canonical type of a COMPONENT_REF is the type of the field being
1688 referenced--unless the field is a bit-field which can be read directly
1689 in a smaller mode, in which case the canonical type is the
1690 sign-appropriate type corresponding to that mode. */
1691
1692 static void
1693 canonicalize_component_ref (tree *expr_p)
1694 {
1695 tree expr = *expr_p;
1696 tree type;
1697
1698 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1699
1700 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1701 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1702 else
1703 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1704
1705 /* One could argue that all the stuff below is not necessary for
1706 the non-bitfield case and declare it a FE error if type
1707 adjustment would be needed. */
1708 if (TREE_TYPE (expr) != type)
1709 {
1710 #ifdef ENABLE_TYPES_CHECKING
1711 tree old_type = TREE_TYPE (expr);
1712 #endif
1713 int type_quals;
1714
1715 /* We need to preserve qualifiers and propagate them from
1716 operand 0. */
1717 type_quals = TYPE_QUALS (type)
1718 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1719 if (TYPE_QUALS (type) != type_quals)
1720 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1721
1722 /* Set the type of the COMPONENT_REF to the underlying type. */
1723 TREE_TYPE (expr) = type;
1724
1725 #ifdef ENABLE_TYPES_CHECKING
1726 /* It is now a FE error, if the conversion from the canonical
1727 type to the original expression type is not useless. */
1728 gcc_assert (useless_type_conversion_p (old_type, type));
1729 #endif
1730 }
1731 }
1732
1733 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1734 to foo, embed that change in the ADDR_EXPR by converting
1735 T array[U];
1736 (T *)&array
1737 ==>
1738 &array[L]
1739 where L is the lower bound. For simplicity, only do this for constant
1740 lower bound.
1741 The constraint is that the type of &array[L] is trivially convertible
1742 to T *. */
1743
1744 static void
1745 canonicalize_addr_expr (tree *expr_p)
1746 {
1747 tree expr = *expr_p;
1748 tree addr_expr = TREE_OPERAND (expr, 0);
1749 tree datype, ddatype, pddatype;
1750
1751 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1752 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1753 || TREE_CODE (addr_expr) != ADDR_EXPR)
1754 return;
1755
1756 /* The addr_expr type should be a pointer to an array. */
1757 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1758 if (TREE_CODE (datype) != ARRAY_TYPE)
1759 return;
1760
1761 /* The pointer to element type shall be trivially convertible to
1762 the expression pointer type. */
1763 ddatype = TREE_TYPE (datype);
1764 pddatype = build_pointer_type (ddatype);
1765 if (!useless_type_conversion_p (pddatype, ddatype))
1766 return;
1767
1768 /* The lower bound and element sizes must be constant. */
1769 if (!TYPE_SIZE_UNIT (ddatype)
1770 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1771 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1772 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1773 return;
1774
1775 /* All checks succeeded. Build a new node to merge the cast. */
1776 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1777 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1778 NULL_TREE, NULL_TREE);
1779 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1780 }
1781
1782 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1783 underneath as appropriate. */
1784
1785 static enum gimplify_status
1786 gimplify_conversion (tree *expr_p)
1787 {
1788 tree tem;
1789 gcc_assert (CONVERT_EXPR_P (*expr_p));
1790
1791 /* Then strip away all but the outermost conversion. */
1792 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1793
1794 /* And remove the outermost conversion if it's useless. */
1795 if (tree_ssa_useless_type_conversion (*expr_p))
1796 *expr_p = TREE_OPERAND (*expr_p, 0);
1797
1798 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1799 For example this fold (subclass *)&A into &A->subclass avoiding
1800 a need for statement. */
1801 if (CONVERT_EXPR_P (*expr_p)
1802 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1803 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1804 && (tem = maybe_fold_offset_to_address
1805 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1806 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1807 *expr_p = tem;
1808
1809 /* If we still have a conversion at the toplevel,
1810 then canonicalize some constructs. */
1811 if (CONVERT_EXPR_P (*expr_p))
1812 {
1813 tree sub = TREE_OPERAND (*expr_p, 0);
1814
1815 /* If a NOP conversion is changing the type of a COMPONENT_REF
1816 expression, then canonicalize its type now in order to expose more
1817 redundant conversions. */
1818 if (TREE_CODE (sub) == COMPONENT_REF)
1819 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1820
1821 /* If a NOP conversion is changing a pointer to array of foo
1822 to a pointer to foo, embed that change in the ADDR_EXPR. */
1823 else if (TREE_CODE (sub) == ADDR_EXPR)
1824 canonicalize_addr_expr (expr_p);
1825 }
1826
1827 /* If we have a conversion to a non-register type force the
1828 use of a VIEW_CONVERT_EXPR instead. */
1829 if (!is_gimple_reg_type (TREE_TYPE (*expr_p)))
1830 *expr_p = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1831 TREE_OPERAND (*expr_p, 0));
1832
1833 return GS_OK;
1834 }
1835
1836 /* Nonlocal VLAs seen in the current function. */
1837 static struct pointer_set_t *nonlocal_vlas;
1838
1839 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1840 DECL_VALUE_EXPR, and it's worth re-examining things. */
1841
1842 static enum gimplify_status
1843 gimplify_var_or_parm_decl (tree *expr_p)
1844 {
1845 tree decl = *expr_p;
1846
1847 /* ??? If this is a local variable, and it has not been seen in any
1848 outer BIND_EXPR, then it's probably the result of a duplicate
1849 declaration, for which we've already issued an error. It would
1850 be really nice if the front end wouldn't leak these at all.
1851 Currently the only known culprit is C++ destructors, as seen
1852 in g++.old-deja/g++.jason/binding.C. */
1853 if (TREE_CODE (decl) == VAR_DECL
1854 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1855 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1856 && decl_function_context (decl) == current_function_decl)
1857 {
1858 gcc_assert (errorcount || sorrycount);
1859 return GS_ERROR;
1860 }
1861
1862 /* When within an OpenMP context, notice uses of variables. */
1863 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1864 return GS_ALL_DONE;
1865
1866 /* If the decl is an alias for another expression, substitute it now. */
1867 if (DECL_HAS_VALUE_EXPR_P (decl))
1868 {
1869 tree value_expr = DECL_VALUE_EXPR (decl);
1870
1871 /* For referenced nonlocal VLAs add a decl for debugging purposes
1872 to the current function. */
1873 if (TREE_CODE (decl) == VAR_DECL
1874 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1875 && nonlocal_vlas != NULL
1876 && TREE_CODE (value_expr) == INDIRECT_REF
1877 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1878 && decl_function_context (decl) != current_function_decl)
1879 {
1880 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1881 while (ctx && ctx->region_type == ORT_WORKSHARE)
1882 ctx = ctx->outer_context;
1883 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1884 {
1885 tree copy = copy_node (decl), block;
1886
1887 lang_hooks.dup_lang_specific_decl (copy);
1888 SET_DECL_RTL (copy, NULL_RTX);
1889 TREE_USED (copy) = 1;
1890 block = DECL_INITIAL (current_function_decl);
1891 TREE_CHAIN (copy) = BLOCK_VARS (block);
1892 BLOCK_VARS (block) = copy;
1893 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1894 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1895 }
1896 }
1897
1898 *expr_p = unshare_expr (value_expr);
1899 return GS_OK;
1900 }
1901
1902 return GS_ALL_DONE;
1903 }
1904
1905
1906 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1907 node *EXPR_P.
1908
1909 compound_lval
1910 : min_lval '[' val ']'
1911 | min_lval '.' ID
1912 | compound_lval '[' val ']'
1913 | compound_lval '.' ID
1914
1915 This is not part of the original SIMPLE definition, which separates
1916 array and member references, but it seems reasonable to handle them
1917 together. Also, this way we don't run into problems with union
1918 aliasing; gcc requires that for accesses through a union to alias, the
1919 union reference must be explicit, which was not always the case when we
1920 were splitting up array and member refs.
1921
1922 PRE_P points to the sequence where side effects that must happen before
1923 *EXPR_P should be stored.
1924
1925 POST_P points to the sequence where side effects that must happen after
1926 *EXPR_P should be stored. */
1927
1928 static enum gimplify_status
1929 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1930 fallback_t fallback)
1931 {
1932 tree *p;
1933 VEC(tree,heap) *stack;
1934 enum gimplify_status ret = GS_OK, tret;
1935 int i;
1936
1937 /* Create a stack of the subexpressions so later we can walk them in
1938 order from inner to outer. */
1939 stack = VEC_alloc (tree, heap, 10);
1940
1941 /* We can handle anything that get_inner_reference can deal with. */
1942 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1943 {
1944 restart:
1945 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1946 if (TREE_CODE (*p) == INDIRECT_REF)
1947 *p = fold_indirect_ref (*p);
1948
1949 if (handled_component_p (*p))
1950 ;
1951 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1952 additional COMPONENT_REFs. */
1953 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1954 && gimplify_var_or_parm_decl (p) == GS_OK)
1955 goto restart;
1956 else
1957 break;
1958
1959 VEC_safe_push (tree, heap, stack, *p);
1960 }
1961
1962 gcc_assert (VEC_length (tree, stack));
1963
1964 /* Now STACK is a stack of pointers to all the refs we've walked through
1965 and P points to the innermost expression.
1966
1967 Java requires that we elaborated nodes in source order. That
1968 means we must gimplify the inner expression followed by each of
1969 the indices, in order. But we can't gimplify the inner
1970 expression until we deal with any variable bounds, sizes, or
1971 positions in order to deal with PLACEHOLDER_EXPRs.
1972
1973 So we do this in three steps. First we deal with the annotations
1974 for any variables in the components, then we gimplify the base,
1975 then we gimplify any indices, from left to right. */
1976 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1977 {
1978 tree t = VEC_index (tree, stack, i);
1979
1980 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1981 {
1982 /* Gimplify the low bound and element type size and put them into
1983 the ARRAY_REF. If these values are set, they have already been
1984 gimplified. */
1985 if (TREE_OPERAND (t, 2) == NULL_TREE)
1986 {
1987 tree low = unshare_expr (array_ref_low_bound (t));
1988 if (!is_gimple_min_invariant (low))
1989 {
1990 TREE_OPERAND (t, 2) = low;
1991 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1992 post_p, is_gimple_reg,
1993 fb_rvalue);
1994 ret = MIN (ret, tret);
1995 }
1996 }
1997
1998 if (!TREE_OPERAND (t, 3))
1999 {
2000 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2001 tree elmt_size = unshare_expr (array_ref_element_size (t));
2002 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2003
2004 /* Divide the element size by the alignment of the element
2005 type (above). */
2006 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
2007
2008 if (!is_gimple_min_invariant (elmt_size))
2009 {
2010 TREE_OPERAND (t, 3) = elmt_size;
2011 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2012 post_p, is_gimple_reg,
2013 fb_rvalue);
2014 ret = MIN (ret, tret);
2015 }
2016 }
2017 }
2018 else if (TREE_CODE (t) == COMPONENT_REF)
2019 {
2020 /* Set the field offset into T and gimplify it. */
2021 if (!TREE_OPERAND (t, 2))
2022 {
2023 tree offset = unshare_expr (component_ref_field_offset (t));
2024 tree field = TREE_OPERAND (t, 1);
2025 tree factor
2026 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2027
2028 /* Divide the offset by its alignment. */
2029 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
2030
2031 if (!is_gimple_min_invariant (offset))
2032 {
2033 TREE_OPERAND (t, 2) = offset;
2034 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2035 post_p, is_gimple_reg,
2036 fb_rvalue);
2037 ret = MIN (ret, tret);
2038 }
2039 }
2040 }
2041 }
2042
2043 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2044 so as to match the min_lval predicate. Failure to do so may result
2045 in the creation of large aggregate temporaries. */
2046 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2047 fallback | fb_lvalue);
2048 ret = MIN (ret, tret);
2049
2050 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2051 loop we also remove any useless conversions. */
2052 for (; VEC_length (tree, stack) > 0; )
2053 {
2054 tree t = VEC_pop (tree, stack);
2055
2056 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2057 {
2058 /* Gimplify the dimension. */
2059 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2060 {
2061 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2062 is_gimple_val, fb_rvalue);
2063 ret = MIN (ret, tret);
2064 }
2065 }
2066 else if (TREE_CODE (t) == BIT_FIELD_REF)
2067 {
2068 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2069 is_gimple_val, fb_rvalue);
2070 ret = MIN (ret, tret);
2071 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2072 is_gimple_val, fb_rvalue);
2073 ret = MIN (ret, tret);
2074 }
2075
2076 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2077
2078 /* The innermost expression P may have originally had
2079 TREE_SIDE_EFFECTS set which would have caused all the outer
2080 expressions in *EXPR_P leading to P to also have had
2081 TREE_SIDE_EFFECTS set. */
2082 recalculate_side_effects (t);
2083 }
2084
2085 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2086 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2087 {
2088 canonicalize_component_ref (expr_p);
2089 ret = MIN (ret, GS_OK);
2090 }
2091
2092 VEC_free (tree, heap, stack);
2093
2094 return ret;
2095 }
2096
2097 /* Gimplify the self modifying expression pointed to by EXPR_P
2098 (++, --, +=, -=).
2099
2100 PRE_P points to the list where side effects that must happen before
2101 *EXPR_P should be stored.
2102
2103 POST_P points to the list where side effects that must happen after
2104 *EXPR_P should be stored.
2105
2106 WANT_VALUE is nonzero iff we want to use the value of this expression
2107 in another expression. */
2108
2109 static enum gimplify_status
2110 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2111 bool want_value)
2112 {
2113 enum tree_code code;
2114 tree lhs, lvalue, rhs, t1;
2115 gimple_seq post = NULL, *orig_post_p = post_p;
2116 bool postfix;
2117 enum tree_code arith_code;
2118 enum gimplify_status ret;
2119
2120 code = TREE_CODE (*expr_p);
2121
2122 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2123 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2124
2125 /* Prefix or postfix? */
2126 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2127 /* Faster to treat as prefix if result is not used. */
2128 postfix = want_value;
2129 else
2130 postfix = false;
2131
2132 /* For postfix, make sure the inner expression's post side effects
2133 are executed after side effects from this expression. */
2134 if (postfix)
2135 post_p = &post;
2136
2137 /* Add or subtract? */
2138 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2139 arith_code = PLUS_EXPR;
2140 else
2141 arith_code = MINUS_EXPR;
2142
2143 /* Gimplify the LHS into a GIMPLE lvalue. */
2144 lvalue = TREE_OPERAND (*expr_p, 0);
2145 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2146 if (ret == GS_ERROR)
2147 return ret;
2148
2149 /* Extract the operands to the arithmetic operation. */
2150 lhs = lvalue;
2151 rhs = TREE_OPERAND (*expr_p, 1);
2152
2153 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2154 that as the result value and in the postqueue operation. We also
2155 make sure to make lvalue a minimal lval, see
2156 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2157 if (postfix)
2158 {
2159 if (!is_gimple_min_lval (lvalue))
2160 {
2161 mark_addressable (lvalue);
2162 lvalue = build_fold_addr_expr (lvalue);
2163 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2164 lvalue = build_fold_indirect_ref (lvalue);
2165 }
2166 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2167 if (ret == GS_ERROR)
2168 return ret;
2169 }
2170
2171 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2172 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2173 {
2174 rhs = fold_convert (sizetype, rhs);
2175 if (arith_code == MINUS_EXPR)
2176 rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2177 arith_code = POINTER_PLUS_EXPR;
2178 }
2179
2180 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2181
2182 if (postfix)
2183 {
2184 gimplify_assign (lvalue, t1, orig_post_p);
2185 gimplify_seq_add_seq (orig_post_p, post);
2186 *expr_p = lhs;
2187 return GS_ALL_DONE;
2188 }
2189 else
2190 {
2191 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2192 return GS_OK;
2193 }
2194 }
2195
2196
2197 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2198
2199 static void
2200 maybe_with_size_expr (tree *expr_p)
2201 {
2202 tree expr = *expr_p;
2203 tree type = TREE_TYPE (expr);
2204 tree size;
2205
2206 /* If we've already wrapped this or the type is error_mark_node, we can't do
2207 anything. */
2208 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2209 || type == error_mark_node)
2210 return;
2211
2212 /* If the size isn't known or is a constant, we have nothing to do. */
2213 size = TYPE_SIZE_UNIT (type);
2214 if (!size || TREE_CODE (size) == INTEGER_CST)
2215 return;
2216
2217 /* Otherwise, make a WITH_SIZE_EXPR. */
2218 size = unshare_expr (size);
2219 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2220 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2221 }
2222
2223
2224 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2225 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2226 the CALL_EXPR. */
2227
2228 static enum gimplify_status
2229 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2230 {
2231 bool (*test) (tree);
2232 fallback_t fb;
2233
2234 /* In general, we allow lvalues for function arguments to avoid
2235 extra overhead of copying large aggregates out of even larger
2236 aggregates into temporaries only to copy the temporaries to
2237 the argument list. Make optimizers happy by pulling out to
2238 temporaries those types that fit in registers. */
2239 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2240 test = is_gimple_val, fb = fb_rvalue;
2241 else
2242 test = is_gimple_lvalue, fb = fb_either;
2243
2244 /* If this is a variable sized type, we must remember the size. */
2245 maybe_with_size_expr (arg_p);
2246
2247 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2248 /* Make sure arguments have the same location as the function call
2249 itself. */
2250 protected_set_expr_location (*arg_p, call_location);
2251
2252 /* There is a sequence point before a function call. Side effects in
2253 the argument list must occur before the actual call. So, when
2254 gimplifying arguments, force gimplify_expr to use an internal
2255 post queue which is then appended to the end of PRE_P. */
2256 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2257 }
2258
2259
2260 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2261 WANT_VALUE is true if the result of the call is desired. */
2262
2263 static enum gimplify_status
2264 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2265 {
2266 tree fndecl, parms, p;
2267 enum gimplify_status ret;
2268 int i, nargs;
2269 gimple call;
2270 bool builtin_va_start_p = FALSE;
2271
2272 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2273
2274 /* For reliable diagnostics during inlining, it is necessary that
2275 every call_expr be annotated with file and line. */
2276 if (! EXPR_HAS_LOCATION (*expr_p))
2277 SET_EXPR_LOCATION (*expr_p, input_location);
2278
2279 /* This may be a call to a builtin function.
2280
2281 Builtin function calls may be transformed into different
2282 (and more efficient) builtin function calls under certain
2283 circumstances. Unfortunately, gimplification can muck things
2284 up enough that the builtin expanders are not aware that certain
2285 transformations are still valid.
2286
2287 So we attempt transformation/gimplification of the call before
2288 we gimplify the CALL_EXPR. At this time we do not manage to
2289 transform all calls in the same manner as the expanders do, but
2290 we do transform most of them. */
2291 fndecl = get_callee_fndecl (*expr_p);
2292 if (fndecl && DECL_BUILT_IN (fndecl))
2293 {
2294 tree new_tree = fold_call_expr (*expr_p, !want_value);
2295
2296 if (new_tree && new_tree != *expr_p)
2297 {
2298 /* There was a transformation of this call which computes the
2299 same value, but in a more efficient way. Return and try
2300 again. */
2301 *expr_p = new_tree;
2302 return GS_OK;
2303 }
2304
2305 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2306 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2307 {
2308 builtin_va_start_p = TRUE;
2309 if (call_expr_nargs (*expr_p) < 2)
2310 {
2311 error ("too few arguments to function %<va_start%>");
2312 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2313 return GS_OK;
2314 }
2315
2316 if (fold_builtin_next_arg (*expr_p, true))
2317 {
2318 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2319 return GS_OK;
2320 }
2321 }
2322 }
2323
2324 /* There is a sequence point before the call, so any side effects in
2325 the calling expression must occur before the actual call. Force
2326 gimplify_expr to use an internal post queue. */
2327 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2328 is_gimple_call_addr, fb_rvalue);
2329
2330 nargs = call_expr_nargs (*expr_p);
2331
2332 /* Get argument types for verification. */
2333 fndecl = get_callee_fndecl (*expr_p);
2334 parms = NULL_TREE;
2335 if (fndecl)
2336 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2337 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2338 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2339
2340 if (fndecl && DECL_ARGUMENTS (fndecl))
2341 p = DECL_ARGUMENTS (fndecl);
2342 else if (parms)
2343 p = parms;
2344 else
2345 p = NULL_TREE;
2346 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2347 ;
2348
2349 /* If the last argument is __builtin_va_arg_pack () and it is not
2350 passed as a named argument, decrease the number of CALL_EXPR
2351 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2352 if (!p
2353 && i < nargs
2354 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2355 {
2356 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2357 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2358
2359 if (last_arg_fndecl
2360 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2361 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2362 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2363 {
2364 tree call = *expr_p;
2365
2366 --nargs;
2367 *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call),
2368 nargs, CALL_EXPR_ARGP (call));
2369
2370 /* Copy all CALL_EXPR flags, location and block, except
2371 CALL_EXPR_VA_ARG_PACK flag. */
2372 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2373 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2374 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2375 = CALL_EXPR_RETURN_SLOT_OPT (call);
2376 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2377 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2378 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2379 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2380
2381 /* Set CALL_EXPR_VA_ARG_PACK. */
2382 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2383 }
2384 }
2385
2386 /* Finally, gimplify the function arguments. */
2387 if (nargs > 0)
2388 {
2389 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2390 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2391 PUSH_ARGS_REVERSED ? i-- : i++)
2392 {
2393 enum gimplify_status t;
2394
2395 /* Avoid gimplifying the second argument to va_start, which needs to
2396 be the plain PARM_DECL. */
2397 if ((i != 1) || !builtin_va_start_p)
2398 {
2399 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2400 EXPR_LOCATION (*expr_p));
2401
2402 if (t == GS_ERROR)
2403 ret = GS_ERROR;
2404 }
2405 }
2406 }
2407
2408 /* Try this again in case gimplification exposed something. */
2409 if (ret != GS_ERROR)
2410 {
2411 tree new_tree = fold_call_expr (*expr_p, !want_value);
2412
2413 if (new_tree && new_tree != *expr_p)
2414 {
2415 /* There was a transformation of this call which computes the
2416 same value, but in a more efficient way. Return and try
2417 again. */
2418 *expr_p = new_tree;
2419 return GS_OK;
2420 }
2421 }
2422 else
2423 {
2424 *expr_p = error_mark_node;
2425 return GS_ERROR;
2426 }
2427
2428 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2429 decl. This allows us to eliminate redundant or useless
2430 calls to "const" functions. */
2431 if (TREE_CODE (*expr_p) == CALL_EXPR)
2432 {
2433 int flags = call_expr_flags (*expr_p);
2434 if (flags & (ECF_CONST | ECF_PURE)
2435 /* An infinite loop is considered a side effect. */
2436 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2437 TREE_SIDE_EFFECTS (*expr_p) = 0;
2438 }
2439
2440 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2441 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2442 form and delegate the creation of a GIMPLE_CALL to
2443 gimplify_modify_expr. This is always possible because when
2444 WANT_VALUE is true, the caller wants the result of this call into
2445 a temporary, which means that we will emit an INIT_EXPR in
2446 internal_get_tmp_var which will then be handled by
2447 gimplify_modify_expr. */
2448 if (!want_value)
2449 {
2450 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2451 have to do is replicate it as a GIMPLE_CALL tuple. */
2452 call = gimple_build_call_from_tree (*expr_p);
2453 gimplify_seq_add_stmt (pre_p, call);
2454 *expr_p = NULL_TREE;
2455 }
2456
2457 return ret;
2458 }
2459
2460 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2461 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2462
2463 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2464 condition is true or false, respectively. If null, we should generate
2465 our own to skip over the evaluation of this specific expression.
2466
2467 LOCUS is the source location of the COND_EXPR.
2468
2469 This function is the tree equivalent of do_jump.
2470
2471 shortcut_cond_r should only be called by shortcut_cond_expr. */
2472
2473 static tree
2474 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2475 location_t locus)
2476 {
2477 tree local_label = NULL_TREE;
2478 tree t, expr = NULL;
2479
2480 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2481 retain the shortcut semantics. Just insert the gotos here;
2482 shortcut_cond_expr will append the real blocks later. */
2483 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2484 {
2485 location_t new_locus;
2486
2487 /* Turn if (a && b) into
2488
2489 if (a); else goto no;
2490 if (b) goto yes; else goto no;
2491 (no:) */
2492
2493 if (false_label_p == NULL)
2494 false_label_p = &local_label;
2495
2496 /* Keep the original source location on the first 'if'. */
2497 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2498 append_to_statement_list (t, &expr);
2499
2500 /* Set the source location of the && on the second 'if'. */
2501 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2503 new_locus);
2504 append_to_statement_list (t, &expr);
2505 }
2506 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2507 {
2508 location_t new_locus;
2509
2510 /* Turn if (a || b) into
2511
2512 if (a) goto yes;
2513 if (b) goto yes; else goto no;
2514 (yes:) */
2515
2516 if (true_label_p == NULL)
2517 true_label_p = &local_label;
2518
2519 /* Keep the original source location on the first 'if'. */
2520 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2521 append_to_statement_list (t, &expr);
2522
2523 /* Set the source location of the || on the second 'if'. */
2524 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2525 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2526 new_locus);
2527 append_to_statement_list (t, &expr);
2528 }
2529 else if (TREE_CODE (pred) == COND_EXPR)
2530 {
2531 location_t new_locus;
2532
2533 /* As long as we're messing with gotos, turn if (a ? b : c) into
2534 if (a)
2535 if (b) goto yes; else goto no;
2536 else
2537 if (c) goto yes; else goto no; */
2538
2539 /* Keep the original source location on the first 'if'. Set the source
2540 location of the ? on the second 'if'. */
2541 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2542 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2543 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2544 false_label_p, locus),
2545 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2546 false_label_p, new_locus));
2547 }
2548 else
2549 {
2550 expr = build3 (COND_EXPR, void_type_node, pred,
2551 build_and_jump (true_label_p),
2552 build_and_jump (false_label_p));
2553 SET_EXPR_LOCATION (expr, locus);
2554 }
2555
2556 if (local_label)
2557 {
2558 t = build1 (LABEL_EXPR, void_type_node, local_label);
2559 append_to_statement_list (t, &expr);
2560 }
2561
2562 return expr;
2563 }
2564
2565 /* Given a conditional expression EXPR with short-circuit boolean
2566 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2567 predicate appart into the equivalent sequence of conditionals. */
2568
2569 static tree
2570 shortcut_cond_expr (tree expr)
2571 {
2572 tree pred = TREE_OPERAND (expr, 0);
2573 tree then_ = TREE_OPERAND (expr, 1);
2574 tree else_ = TREE_OPERAND (expr, 2);
2575 tree true_label, false_label, end_label, t;
2576 tree *true_label_p;
2577 tree *false_label_p;
2578 bool emit_end, emit_false, jump_over_else;
2579 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2580 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2581
2582 /* First do simple transformations. */
2583 if (!else_se)
2584 {
2585 /* If there is no 'else', turn
2586 if (a && b) then c
2587 into
2588 if (a) if (b) then c. */
2589 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2590 {
2591 /* Keep the original source location on the first 'if'. */
2592 location_t locus = EXPR_HAS_LOCATION (expr)
2593 ? EXPR_LOCATION (expr) : input_location;
2594 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2595 /* Set the source location of the && on the second 'if'. */
2596 if (EXPR_HAS_LOCATION (pred))
2597 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2598 then_ = shortcut_cond_expr (expr);
2599 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2600 pred = TREE_OPERAND (pred, 0);
2601 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2602 SET_EXPR_LOCATION (expr, locus);
2603 }
2604 }
2605
2606 if (!then_se)
2607 {
2608 /* If there is no 'then', turn
2609 if (a || b); else d
2610 into
2611 if (a); else if (b); else d. */
2612 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2613 {
2614 /* Keep the original source location on the first 'if'. */
2615 location_t locus = EXPR_HAS_LOCATION (expr)
2616 ? EXPR_LOCATION (expr) : input_location;
2617 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2618 /* Set the source location of the || on the second 'if'. */
2619 if (EXPR_HAS_LOCATION (pred))
2620 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2621 else_ = shortcut_cond_expr (expr);
2622 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2623 pred = TREE_OPERAND (pred, 0);
2624 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2625 SET_EXPR_LOCATION (expr, locus);
2626 }
2627 }
2628
2629 /* If we're done, great. */
2630 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2631 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2632 return expr;
2633
2634 /* Otherwise we need to mess with gotos. Change
2635 if (a) c; else d;
2636 to
2637 if (a); else goto no;
2638 c; goto end;
2639 no: d; end:
2640 and recursively gimplify the condition. */
2641
2642 true_label = false_label = end_label = NULL_TREE;
2643
2644 /* If our arms just jump somewhere, hijack those labels so we don't
2645 generate jumps to jumps. */
2646
2647 if (then_
2648 && TREE_CODE (then_) == GOTO_EXPR
2649 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2650 {
2651 true_label = GOTO_DESTINATION (then_);
2652 then_ = NULL;
2653 then_se = false;
2654 }
2655
2656 if (else_
2657 && TREE_CODE (else_) == GOTO_EXPR
2658 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2659 {
2660 false_label = GOTO_DESTINATION (else_);
2661 else_ = NULL;
2662 else_se = false;
2663 }
2664
2665 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2666 if (true_label)
2667 true_label_p = &true_label;
2668 else
2669 true_label_p = NULL;
2670
2671 /* The 'else' branch also needs a label if it contains interesting code. */
2672 if (false_label || else_se)
2673 false_label_p = &false_label;
2674 else
2675 false_label_p = NULL;
2676
2677 /* If there was nothing else in our arms, just forward the label(s). */
2678 if (!then_se && !else_se)
2679 return shortcut_cond_r (pred, true_label_p, false_label_p,
2680 EXPR_HAS_LOCATION (expr)
2681 ? EXPR_LOCATION (expr) : input_location);
2682
2683 /* If our last subexpression already has a terminal label, reuse it. */
2684 if (else_se)
2685 t = expr_last (else_);
2686 else if (then_se)
2687 t = expr_last (then_);
2688 else
2689 t = NULL;
2690 if (t && TREE_CODE (t) == LABEL_EXPR)
2691 end_label = LABEL_EXPR_LABEL (t);
2692
2693 /* If we don't care about jumping to the 'else' branch, jump to the end
2694 if the condition is false. */
2695 if (!false_label_p)
2696 false_label_p = &end_label;
2697
2698 /* We only want to emit these labels if we aren't hijacking them. */
2699 emit_end = (end_label == NULL_TREE);
2700 emit_false = (false_label == NULL_TREE);
2701
2702 /* We only emit the jump over the else clause if we have to--if the
2703 then clause may fall through. Otherwise we can wind up with a
2704 useless jump and a useless label at the end of gimplified code,
2705 which will cause us to think that this conditional as a whole
2706 falls through even if it doesn't. If we then inline a function
2707 which ends with such a condition, that can cause us to issue an
2708 inappropriate warning about control reaching the end of a
2709 non-void function. */
2710 jump_over_else = block_may_fallthru (then_);
2711
2712 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2713 EXPR_HAS_LOCATION (expr)
2714 ? EXPR_LOCATION (expr) : input_location);
2715
2716 expr = NULL;
2717 append_to_statement_list (pred, &expr);
2718
2719 append_to_statement_list (then_, &expr);
2720 if (else_se)
2721 {
2722 if (jump_over_else)
2723 {
2724 tree last = expr_last (expr);
2725 t = build_and_jump (&end_label);
2726 if (EXPR_HAS_LOCATION (last))
2727 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2728 append_to_statement_list (t, &expr);
2729 }
2730 if (emit_false)
2731 {
2732 t = build1 (LABEL_EXPR, void_type_node, false_label);
2733 append_to_statement_list (t, &expr);
2734 }
2735 append_to_statement_list (else_, &expr);
2736 }
2737 if (emit_end && end_label)
2738 {
2739 t = build1 (LABEL_EXPR, void_type_node, end_label);
2740 append_to_statement_list (t, &expr);
2741 }
2742
2743 return expr;
2744 }
2745
2746 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2747
2748 tree
2749 gimple_boolify (tree expr)
2750 {
2751 tree type = TREE_TYPE (expr);
2752
2753 if (TREE_CODE (type) == BOOLEAN_TYPE)
2754 return expr;
2755
2756 switch (TREE_CODE (expr))
2757 {
2758 case TRUTH_AND_EXPR:
2759 case TRUTH_OR_EXPR:
2760 case TRUTH_XOR_EXPR:
2761 case TRUTH_ANDIF_EXPR:
2762 case TRUTH_ORIF_EXPR:
2763 /* Also boolify the arguments of truth exprs. */
2764 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2765 /* FALLTHRU */
2766
2767 case TRUTH_NOT_EXPR:
2768 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2769 /* FALLTHRU */
2770
2771 case EQ_EXPR: case NE_EXPR:
2772 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2773 /* These expressions always produce boolean results. */
2774 TREE_TYPE (expr) = boolean_type_node;
2775 return expr;
2776
2777 default:
2778 /* Other expressions that get here must have boolean values, but
2779 might need to be converted to the appropriate mode. */
2780 return fold_convert (boolean_type_node, expr);
2781 }
2782 }
2783
2784 /* Given a conditional expression *EXPR_P without side effects, gimplify
2785 its operands. New statements are inserted to PRE_P. */
2786
2787 static enum gimplify_status
2788 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2789 {
2790 tree expr = *expr_p, cond;
2791 enum gimplify_status ret, tret;
2792 enum tree_code code;
2793
2794 cond = gimple_boolify (COND_EXPR_COND (expr));
2795
2796 /* We need to handle && and || specially, as their gimplification
2797 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2798 code = TREE_CODE (cond);
2799 if (code == TRUTH_ANDIF_EXPR)
2800 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2801 else if (code == TRUTH_ORIF_EXPR)
2802 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2803 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2804 COND_EXPR_COND (*expr_p) = cond;
2805
2806 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2807 is_gimple_val, fb_rvalue);
2808 ret = MIN (ret, tret);
2809 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2810 is_gimple_val, fb_rvalue);
2811
2812 return MIN (ret, tret);
2813 }
2814
2815 /* Returns true if evaluating EXPR could trap.
2816 EXPR is GENERIC, while tree_could_trap_p can be called
2817 only on GIMPLE. */
2818
2819 static bool
2820 generic_expr_could_trap_p (tree expr)
2821 {
2822 unsigned i, n;
2823
2824 if (!expr || is_gimple_val (expr))
2825 return false;
2826
2827 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2828 return true;
2829
2830 n = TREE_OPERAND_LENGTH (expr);
2831 for (i = 0; i < n; i++)
2832 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2833 return true;
2834
2835 return false;
2836 }
2837
2838 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2839 into
2840
2841 if (p) if (p)
2842 t1 = a; a;
2843 else or else
2844 t1 = b; b;
2845 t1;
2846
2847 The second form is used when *EXPR_P is of type void.
2848
2849 PRE_P points to the list where side effects that must happen before
2850 *EXPR_P should be stored. */
2851
2852 static enum gimplify_status
2853 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2854 {
2855 tree expr = *expr_p;
2856 tree tmp, type, arm1, arm2;
2857 enum gimplify_status ret;
2858 tree label_true, label_false, label_cont;
2859 bool have_then_clause_p, have_else_clause_p;
2860 gimple gimple_cond;
2861 enum tree_code pred_code;
2862 gimple_seq seq = NULL;
2863
2864 type = TREE_TYPE (expr);
2865
2866 /* If this COND_EXPR has a value, copy the values into a temporary within
2867 the arms. */
2868 if (! VOID_TYPE_P (type))
2869 {
2870 tree result;
2871
2872 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2873 an addressable temporary. */
2874 if (((fallback & fb_rvalue)
2875 || !(fallback & fb_lvalue))
2876 && !TREE_ADDRESSABLE (type))
2877 {
2878 if (gimplify_ctxp->allow_rhs_cond_expr
2879 /* If either branch has side effects or could trap, it can't be
2880 evaluated unconditionally. */
2881 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2882 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2883 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2884 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2885 return gimplify_pure_cond_expr (expr_p, pre_p);
2886
2887 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2888 ret = GS_ALL_DONE;
2889 }
2890 else
2891 {
2892 tree type = build_pointer_type (TREE_TYPE (expr));
2893
2894 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2895 TREE_OPERAND (expr, 1) =
2896 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2897
2898 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2899 TREE_OPERAND (expr, 2) =
2900 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2901
2902 tmp = create_tmp_var (type, "iftmp");
2903
2904 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2905 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2906
2907 result = build_fold_indirect_ref (tmp);
2908 }
2909
2910 /* Build the then clause, 't1 = a;'. But don't build an assignment
2911 if this branch is void; in C++ it can be, if it's a throw. */
2912 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2913 TREE_OPERAND (expr, 1)
2914 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2915
2916 /* Build the else clause, 't1 = b;'. */
2917 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2918 TREE_OPERAND (expr, 2)
2919 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2920
2921 TREE_TYPE (expr) = void_type_node;
2922 recalculate_side_effects (expr);
2923
2924 /* Move the COND_EXPR to the prequeue. */
2925 gimplify_stmt (&expr, pre_p);
2926
2927 *expr_p = result;
2928 return GS_ALL_DONE;
2929 }
2930
2931 /* Make sure the condition has BOOLEAN_TYPE. */
2932 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2933
2934 /* Break apart && and || conditions. */
2935 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2936 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2937 {
2938 expr = shortcut_cond_expr (expr);
2939
2940 if (expr != *expr_p)
2941 {
2942 *expr_p = expr;
2943
2944 /* We can't rely on gimplify_expr to re-gimplify the expanded
2945 form properly, as cleanups might cause the target labels to be
2946 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2947 set up a conditional context. */
2948 gimple_push_condition ();
2949 gimplify_stmt (expr_p, &seq);
2950 gimple_pop_condition (pre_p);
2951 gimple_seq_add_seq (pre_p, seq);
2952
2953 return GS_ALL_DONE;
2954 }
2955 }
2956
2957 /* Now do the normal gimplification. */
2958
2959 /* Gimplify condition. */
2960 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2961 fb_rvalue);
2962 if (ret == GS_ERROR)
2963 return GS_ERROR;
2964 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2965
2966 gimple_push_condition ();
2967
2968 have_then_clause_p = have_else_clause_p = false;
2969 if (TREE_OPERAND (expr, 1) != NULL
2970 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2971 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2972 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2973 == current_function_decl)
2974 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2975 have different locations, otherwise we end up with incorrect
2976 location information on the branches. */
2977 && (optimize
2978 || !EXPR_HAS_LOCATION (expr)
2979 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2980 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2981 {
2982 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2983 have_then_clause_p = true;
2984 }
2985 else
2986 label_true = create_artificial_label (UNKNOWN_LOCATION);
2987 if (TREE_OPERAND (expr, 2) != NULL
2988 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2989 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2990 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2991 == current_function_decl)
2992 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2993 have different locations, otherwise we end up with incorrect
2994 location information on the branches. */
2995 && (optimize
2996 || !EXPR_HAS_LOCATION (expr)
2997 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2998 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2999 {
3000 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3001 have_else_clause_p = true;
3002 }
3003 else
3004 label_false = create_artificial_label (UNKNOWN_LOCATION);
3005
3006 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3007 &arm2);
3008
3009 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3010 label_false);
3011
3012 gimplify_seq_add_stmt (&seq, gimple_cond);
3013 label_cont = NULL_TREE;
3014 if (!have_then_clause_p)
3015 {
3016 /* For if (...) {} else { code; } put label_true after
3017 the else block. */
3018 if (TREE_OPERAND (expr, 1) == NULL_TREE
3019 && !have_else_clause_p
3020 && TREE_OPERAND (expr, 2) != NULL_TREE)
3021 label_cont = label_true;
3022 else
3023 {
3024 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3025 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3026 /* For if (...) { code; } else {} or
3027 if (...) { code; } else goto label; or
3028 if (...) { code; return; } else { ... }
3029 label_cont isn't needed. */
3030 if (!have_else_clause_p
3031 && TREE_OPERAND (expr, 2) != NULL_TREE
3032 && gimple_seq_may_fallthru (seq))
3033 {
3034 gimple g;
3035 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3036
3037 g = gimple_build_goto (label_cont);
3038
3039 /* GIMPLE_COND's are very low level; they have embedded
3040 gotos. This particular embedded goto should not be marked
3041 with the location of the original COND_EXPR, as it would
3042 correspond to the COND_EXPR's condition, not the ELSE or the
3043 THEN arms. To avoid marking it with the wrong location, flag
3044 it as "no location". */
3045 gimple_set_do_not_emit_location (g);
3046
3047 gimplify_seq_add_stmt (&seq, g);
3048 }
3049 }
3050 }
3051 if (!have_else_clause_p)
3052 {
3053 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3054 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3055 }
3056 if (label_cont)
3057 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3058
3059 gimple_pop_condition (pre_p);
3060 gimple_seq_add_seq (pre_p, seq);
3061
3062 if (ret == GS_ERROR)
3063 ; /* Do nothing. */
3064 else if (have_then_clause_p || have_else_clause_p)
3065 ret = GS_ALL_DONE;
3066 else
3067 {
3068 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3069 expr = TREE_OPERAND (expr, 0);
3070 gimplify_stmt (&expr, pre_p);
3071 }
3072
3073 *expr_p = NULL;
3074 return ret;
3075 }
3076
3077 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3078 a call to __builtin_memcpy. */
3079
3080 static enum gimplify_status
3081 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3082 gimple_seq *seq_p)
3083 {
3084 tree t, to, to_ptr, from, from_ptr;
3085 gimple gs;
3086
3087 to = TREE_OPERAND (*expr_p, 0);
3088 from = TREE_OPERAND (*expr_p, 1);
3089
3090 mark_addressable (from);
3091 from_ptr = build_fold_addr_expr (from);
3092 gimplify_arg (&from_ptr, seq_p, EXPR_LOCATION (*expr_p));
3093
3094 mark_addressable (to);
3095 to_ptr = build_fold_addr_expr (to);
3096 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3097
3098 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3099
3100 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3101
3102 if (want_value)
3103 {
3104 /* tmp = memcpy() */
3105 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3106 gimple_call_set_lhs (gs, t);
3107 gimplify_seq_add_stmt (seq_p, gs);
3108
3109 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3110 return GS_ALL_DONE;
3111 }
3112
3113 gimplify_seq_add_stmt (seq_p, gs);
3114 *expr_p = NULL;
3115 return GS_ALL_DONE;
3116 }
3117
3118 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3119 a call to __builtin_memset. In this case we know that the RHS is
3120 a CONSTRUCTOR with an empty element list. */
3121
3122 static enum gimplify_status
3123 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3124 gimple_seq *seq_p)
3125 {
3126 tree t, from, to, to_ptr;
3127 gimple gs;
3128
3129 /* Assert our assumptions, to abort instead of producing wrong code
3130 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3131 not be immediately exposed. */
3132 from = TREE_OPERAND (*expr_p, 1);
3133 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3134 from = TREE_OPERAND (from, 0);
3135
3136 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3137 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3138
3139 /* Now proceed. */
3140 to = TREE_OPERAND (*expr_p, 0);
3141
3142 to_ptr = build_fold_addr_expr (to);
3143 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3144 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3145
3146 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3147
3148 if (want_value)
3149 {
3150 /* tmp = memset() */
3151 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3152 gimple_call_set_lhs (gs, t);
3153 gimplify_seq_add_stmt (seq_p, gs);
3154
3155 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3156 return GS_ALL_DONE;
3157 }
3158
3159 gimplify_seq_add_stmt (seq_p, gs);
3160 *expr_p = NULL;
3161 return GS_ALL_DONE;
3162 }
3163
3164 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3165 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3166 assignment. Returns non-null if we detect a potential overlap. */
3167
3168 struct gimplify_init_ctor_preeval_data
3169 {
3170 /* The base decl of the lhs object. May be NULL, in which case we
3171 have to assume the lhs is indirect. */
3172 tree lhs_base_decl;
3173
3174 /* The alias set of the lhs object. */
3175 alias_set_type lhs_alias_set;
3176 };
3177
3178 static tree
3179 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3180 {
3181 struct gimplify_init_ctor_preeval_data *data
3182 = (struct gimplify_init_ctor_preeval_data *) xdata;
3183 tree t = *tp;
3184
3185 /* If we find the base object, obviously we have overlap. */
3186 if (data->lhs_base_decl == t)
3187 return t;
3188
3189 /* If the constructor component is indirect, determine if we have a
3190 potential overlap with the lhs. The only bits of information we
3191 have to go on at this point are addressability and alias sets. */
3192 if (TREE_CODE (t) == INDIRECT_REF
3193 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3194 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3195 return t;
3196
3197 /* If the constructor component is a call, determine if it can hide a
3198 potential overlap with the lhs through an INDIRECT_REF like above. */
3199 if (TREE_CODE (t) == CALL_EXPR)
3200 {
3201 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3202
3203 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3204 if (POINTER_TYPE_P (TREE_VALUE (type))
3205 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3206 && alias_sets_conflict_p (data->lhs_alias_set,
3207 get_alias_set
3208 (TREE_TYPE (TREE_VALUE (type)))))
3209 return t;
3210 }
3211
3212 if (IS_TYPE_OR_DECL_P (t))
3213 *walk_subtrees = 0;
3214 return NULL;
3215 }
3216
3217 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3218 force values that overlap with the lhs (as described by *DATA)
3219 into temporaries. */
3220
3221 static void
3222 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3223 struct gimplify_init_ctor_preeval_data *data)
3224 {
3225 enum gimplify_status one;
3226
3227 /* If the value is constant, then there's nothing to pre-evaluate. */
3228 if (TREE_CONSTANT (*expr_p))
3229 {
3230 /* Ensure it does not have side effects, it might contain a reference to
3231 the object we're initializing. */
3232 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3233 return;
3234 }
3235
3236 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3237 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3238 return;
3239
3240 /* Recurse for nested constructors. */
3241 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3242 {
3243 unsigned HOST_WIDE_INT ix;
3244 constructor_elt *ce;
3245 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3246
3247 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3248 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3249
3250 return;
3251 }
3252
3253 /* If this is a variable sized type, we must remember the size. */
3254 maybe_with_size_expr (expr_p);
3255
3256 /* Gimplify the constructor element to something appropriate for the rhs
3257 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3258 the gimplifier will consider this a store to memory. Doing this
3259 gimplification now means that we won't have to deal with complicated
3260 language-specific trees, nor trees like SAVE_EXPR that can induce
3261 exponential search behavior. */
3262 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3263 if (one == GS_ERROR)
3264 {
3265 *expr_p = NULL;
3266 return;
3267 }
3268
3269 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3270 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3271 always be true for all scalars, since is_gimple_mem_rhs insists on a
3272 temporary variable for them. */
3273 if (DECL_P (*expr_p))
3274 return;
3275
3276 /* If this is of variable size, we have no choice but to assume it doesn't
3277 overlap since we can't make a temporary for it. */
3278 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3279 return;
3280
3281 /* Otherwise, we must search for overlap ... */
3282 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3283 return;
3284
3285 /* ... and if found, force the value into a temporary. */
3286 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3287 }
3288
3289 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3290 a RANGE_EXPR in a CONSTRUCTOR for an array.
3291
3292 var = lower;
3293 loop_entry:
3294 object[var] = value;
3295 if (var == upper)
3296 goto loop_exit;
3297 var = var + 1;
3298 goto loop_entry;
3299 loop_exit:
3300
3301 We increment var _after_ the loop exit check because we might otherwise
3302 fail if upper == TYPE_MAX_VALUE (type for upper).
3303
3304 Note that we never have to deal with SAVE_EXPRs here, because this has
3305 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3306
3307 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3308 gimple_seq *, bool);
3309
3310 static void
3311 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3312 tree value, tree array_elt_type,
3313 gimple_seq *pre_p, bool cleared)
3314 {
3315 tree loop_entry_label, loop_exit_label, fall_thru_label;
3316 tree var, var_type, cref, tmp;
3317
3318 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3319 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3320 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3321
3322 /* Create and initialize the index variable. */
3323 var_type = TREE_TYPE (upper);
3324 var = create_tmp_var (var_type, NULL);
3325 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3326
3327 /* Add the loop entry label. */
3328 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3329
3330 /* Build the reference. */
3331 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3332 var, NULL_TREE, NULL_TREE);
3333
3334 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3335 the store. Otherwise just assign value to the reference. */
3336
3337 if (TREE_CODE (value) == CONSTRUCTOR)
3338 /* NB we might have to call ourself recursively through
3339 gimplify_init_ctor_eval if the value is a constructor. */
3340 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3341 pre_p, cleared);
3342 else
3343 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3344
3345 /* We exit the loop when the index var is equal to the upper bound. */
3346 gimplify_seq_add_stmt (pre_p,
3347 gimple_build_cond (EQ_EXPR, var, upper,
3348 loop_exit_label, fall_thru_label));
3349
3350 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3351
3352 /* Otherwise, increment the index var... */
3353 tmp = build2 (PLUS_EXPR, var_type, var,
3354 fold_convert (var_type, integer_one_node));
3355 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3356
3357 /* ...and jump back to the loop entry. */
3358 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3359
3360 /* Add the loop exit label. */
3361 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3362 }
3363
3364 /* Return true if FDECL is accessing a field that is zero sized. */
3365
3366 static bool
3367 zero_sized_field_decl (const_tree fdecl)
3368 {
3369 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3370 && integer_zerop (DECL_SIZE (fdecl)))
3371 return true;
3372 return false;
3373 }
3374
3375 /* Return true if TYPE is zero sized. */
3376
3377 static bool
3378 zero_sized_type (const_tree type)
3379 {
3380 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3381 && integer_zerop (TYPE_SIZE (type)))
3382 return true;
3383 return false;
3384 }
3385
3386 /* A subroutine of gimplify_init_constructor. Generate individual
3387 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3388 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3389 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3390 zeroed first. */
3391
3392 static void
3393 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3394 gimple_seq *pre_p, bool cleared)
3395 {
3396 tree array_elt_type = NULL;
3397 unsigned HOST_WIDE_INT ix;
3398 tree purpose, value;
3399
3400 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3401 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3402
3403 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3404 {
3405 tree cref;
3406
3407 /* NULL values are created above for gimplification errors. */
3408 if (value == NULL)
3409 continue;
3410
3411 if (cleared && initializer_zerop (value))
3412 continue;
3413
3414 /* ??? Here's to hoping the front end fills in all of the indices,
3415 so we don't have to figure out what's missing ourselves. */
3416 gcc_assert (purpose);
3417
3418 /* Skip zero-sized fields, unless value has side-effects. This can
3419 happen with calls to functions returning a zero-sized type, which
3420 we shouldn't discard. As a number of downstream passes don't
3421 expect sets of zero-sized fields, we rely on the gimplification of
3422 the MODIFY_EXPR we make below to drop the assignment statement. */
3423 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3424 continue;
3425
3426 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3427 whole range. */
3428 if (TREE_CODE (purpose) == RANGE_EXPR)
3429 {
3430 tree lower = TREE_OPERAND (purpose, 0);
3431 tree upper = TREE_OPERAND (purpose, 1);
3432
3433 /* If the lower bound is equal to upper, just treat it as if
3434 upper was the index. */
3435 if (simple_cst_equal (lower, upper))
3436 purpose = upper;
3437 else
3438 {
3439 gimplify_init_ctor_eval_range (object, lower, upper, value,
3440 array_elt_type, pre_p, cleared);
3441 continue;
3442 }
3443 }
3444
3445 if (array_elt_type)
3446 {
3447 /* Do not use bitsizetype for ARRAY_REF indices. */
3448 if (TYPE_DOMAIN (TREE_TYPE (object)))
3449 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3450 purpose);
3451 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3452 purpose, NULL_TREE, NULL_TREE);
3453 }
3454 else
3455 {
3456 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3457 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3458 unshare_expr (object), purpose, NULL_TREE);
3459 }
3460
3461 if (TREE_CODE (value) == CONSTRUCTOR
3462 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3463 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3464 pre_p, cleared);
3465 else
3466 {
3467 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3468 gimplify_and_add (init, pre_p);
3469 ggc_free (init);
3470 }
3471 }
3472 }
3473
3474
3475 /* Returns the appropriate RHS predicate for this LHS. */
3476
3477 gimple_predicate
3478 rhs_predicate_for (tree lhs)
3479 {
3480 if (is_gimple_reg (lhs))
3481 return is_gimple_reg_rhs_or_call;
3482 else
3483 return is_gimple_mem_rhs_or_call;
3484 }
3485
3486 /* Gimplify a C99 compound literal expression. This just means adding
3487 the DECL_EXPR before the current statement and using its anonymous
3488 decl instead. */
3489
3490 static enum gimplify_status
3491 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3492 {
3493 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3494 tree decl = DECL_EXPR_DECL (decl_s);
3495 /* Mark the decl as addressable if the compound literal
3496 expression is addressable now, otherwise it is marked too late
3497 after we gimplify the initialization expression. */
3498 if (TREE_ADDRESSABLE (*expr_p))
3499 TREE_ADDRESSABLE (decl) = 1;
3500
3501 /* Preliminarily mark non-addressed complex variables as eligible
3502 for promotion to gimple registers. We'll transform their uses
3503 as we find them. */
3504 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3505 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3506 && !TREE_THIS_VOLATILE (decl)
3507 && !needs_to_live_in_memory (decl))
3508 DECL_GIMPLE_REG_P (decl) = 1;
3509
3510 /* This decl isn't mentioned in the enclosing block, so add it to the
3511 list of temps. FIXME it seems a bit of a kludge to say that
3512 anonymous artificial vars aren't pushed, but everything else is. */
3513 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3514 gimple_add_tmp_var (decl);
3515
3516 gimplify_and_add (decl_s, pre_p);
3517 *expr_p = decl;
3518 return GS_OK;
3519 }
3520
3521 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3522 return a new CONSTRUCTOR if something changed. */
3523
3524 static tree
3525 optimize_compound_literals_in_ctor (tree orig_ctor)
3526 {
3527 tree ctor = orig_ctor;
3528 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3529 unsigned int idx, num = VEC_length (constructor_elt, elts);
3530
3531 for (idx = 0; idx < num; idx++)
3532 {
3533 tree value = VEC_index (constructor_elt, elts, idx)->value;
3534 tree newval = value;
3535 if (TREE_CODE (value) == CONSTRUCTOR)
3536 newval = optimize_compound_literals_in_ctor (value);
3537 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3538 {
3539 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3540 tree decl = DECL_EXPR_DECL (decl_s);
3541 tree init = DECL_INITIAL (decl);
3542
3543 if (!TREE_ADDRESSABLE (value)
3544 && !TREE_ADDRESSABLE (decl)
3545 && init)
3546 newval = optimize_compound_literals_in_ctor (init);
3547 }
3548 if (newval == value)
3549 continue;
3550
3551 if (ctor == orig_ctor)
3552 {
3553 ctor = copy_node (orig_ctor);
3554 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3555 elts = CONSTRUCTOR_ELTS (ctor);
3556 }
3557 VEC_index (constructor_elt, elts, idx)->value = newval;
3558 }
3559 return ctor;
3560 }
3561
3562
3563
3564 /* A subroutine of gimplify_modify_expr. Break out elements of a
3565 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3566
3567 Note that we still need to clear any elements that don't have explicit
3568 initializers, so if not all elements are initialized we keep the
3569 original MODIFY_EXPR, we just remove all of the constructor elements.
3570
3571 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3572 GS_ERROR if we would have to create a temporary when gimplifying
3573 this constructor. Otherwise, return GS_OK.
3574
3575 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3576
3577 static enum gimplify_status
3578 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3579 bool want_value, bool notify_temp_creation)
3580 {
3581 tree object, ctor, type;
3582 enum gimplify_status ret;
3583 VEC(constructor_elt,gc) *elts;
3584
3585 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3586
3587 if (!notify_temp_creation)
3588 {
3589 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3590 is_gimple_lvalue, fb_lvalue);
3591 if (ret == GS_ERROR)
3592 return ret;
3593 }
3594
3595 object = TREE_OPERAND (*expr_p, 0);
3596 ctor = TREE_OPERAND (*expr_p, 1) =
3597 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3598 type = TREE_TYPE (ctor);
3599 elts = CONSTRUCTOR_ELTS (ctor);
3600 ret = GS_ALL_DONE;
3601
3602 switch (TREE_CODE (type))
3603 {
3604 case RECORD_TYPE:
3605 case UNION_TYPE:
3606 case QUAL_UNION_TYPE:
3607 case ARRAY_TYPE:
3608 {
3609 struct gimplify_init_ctor_preeval_data preeval_data;
3610 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3611 HOST_WIDE_INT num_nonzero_elements;
3612 bool cleared, valid_const_initializer;
3613
3614 /* Aggregate types must lower constructors to initialization of
3615 individual elements. The exception is that a CONSTRUCTOR node
3616 with no elements indicates zero-initialization of the whole. */
3617 if (VEC_empty (constructor_elt, elts))
3618 {
3619 if (notify_temp_creation)
3620 return GS_OK;
3621 break;
3622 }
3623
3624 /* Fetch information about the constructor to direct later processing.
3625 We might want to make static versions of it in various cases, and
3626 can only do so if it known to be a valid constant initializer. */
3627 valid_const_initializer
3628 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3629 &num_ctor_elements, &cleared);
3630
3631 /* If a const aggregate variable is being initialized, then it
3632 should never be a lose to promote the variable to be static. */
3633 if (valid_const_initializer
3634 && num_nonzero_elements > 1
3635 && TREE_READONLY (object)
3636 && TREE_CODE (object) == VAR_DECL
3637 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3638 {
3639 if (notify_temp_creation)
3640 return GS_ERROR;
3641 DECL_INITIAL (object) = ctor;
3642 TREE_STATIC (object) = 1;
3643 if (!DECL_NAME (object))
3644 DECL_NAME (object) = create_tmp_var_name ("C");
3645 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3646
3647 /* ??? C++ doesn't automatically append a .<number> to the
3648 assembler name, and even when it does, it looks a FE private
3649 data structures to figure out what that number should be,
3650 which are not set for this variable. I suppose this is
3651 important for local statics for inline functions, which aren't
3652 "local" in the object file sense. So in order to get a unique
3653 TU-local symbol, we must invoke the lhd version now. */
3654 lhd_set_decl_assembler_name (object);
3655
3656 *expr_p = NULL_TREE;
3657 break;
3658 }
3659
3660 /* If there are "lots" of initialized elements, even discounting
3661 those that are not address constants (and thus *must* be
3662 computed at runtime), then partition the constructor into
3663 constant and non-constant parts. Block copy the constant
3664 parts in, then generate code for the non-constant parts. */
3665 /* TODO. There's code in cp/typeck.c to do this. */
3666
3667 num_type_elements = count_type_elements (type, true);
3668
3669 /* If count_type_elements could not determine number of type elements
3670 for a constant-sized object, assume clearing is needed.
3671 Don't do this for variable-sized objects, as store_constructor
3672 will ignore the clearing of variable-sized objects. */
3673 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3674 cleared = true;
3675 /* If there are "lots" of zeros, then block clear the object first. */
3676 else if (num_type_elements - num_nonzero_elements
3677 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3678 && num_nonzero_elements < num_type_elements/4)
3679 cleared = true;
3680 /* ??? This bit ought not be needed. For any element not present
3681 in the initializer, we should simply set them to zero. Except
3682 we'd need to *find* the elements that are not present, and that
3683 requires trickery to avoid quadratic compile-time behavior in
3684 large cases or excessive memory use in small cases. */
3685 else if (num_ctor_elements < num_type_elements)
3686 cleared = true;
3687
3688 /* If there are "lots" of initialized elements, and all of them
3689 are valid address constants, then the entire initializer can
3690 be dropped to memory, and then memcpy'd out. Don't do this
3691 for sparse arrays, though, as it's more efficient to follow
3692 the standard CONSTRUCTOR behavior of memset followed by
3693 individual element initialization. Also don't do this for small
3694 all-zero initializers (which aren't big enough to merit
3695 clearing), and don't try to make bitwise copies of
3696 TREE_ADDRESSABLE types. */
3697 if (valid_const_initializer
3698 && !(cleared || num_nonzero_elements == 0)
3699 && !TREE_ADDRESSABLE (type))
3700 {
3701 HOST_WIDE_INT size = int_size_in_bytes (type);
3702 unsigned int align;
3703
3704 /* ??? We can still get unbounded array types, at least
3705 from the C++ front end. This seems wrong, but attempt
3706 to work around it for now. */
3707 if (size < 0)
3708 {
3709 size = int_size_in_bytes (TREE_TYPE (object));
3710 if (size >= 0)
3711 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3712 }
3713
3714 /* Find the maximum alignment we can assume for the object. */
3715 /* ??? Make use of DECL_OFFSET_ALIGN. */
3716 if (DECL_P (object))
3717 align = DECL_ALIGN (object);
3718 else
3719 align = TYPE_ALIGN (type);
3720
3721 if (size > 0
3722 && num_nonzero_elements > 1
3723 && !can_move_by_pieces (size, align))
3724 {
3725 tree new_tree;
3726
3727 if (notify_temp_creation)
3728 return GS_ERROR;
3729
3730 new_tree = create_tmp_var_raw (type, "C");
3731
3732 gimple_add_tmp_var (new_tree);
3733 TREE_STATIC (new_tree) = 1;
3734 TREE_READONLY (new_tree) = 1;
3735 DECL_INITIAL (new_tree) = ctor;
3736 if (align > DECL_ALIGN (new_tree))
3737 {
3738 DECL_ALIGN (new_tree) = align;
3739 DECL_USER_ALIGN (new_tree) = 1;
3740 }
3741 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3742
3743 TREE_OPERAND (*expr_p, 1) = new_tree;
3744
3745 /* This is no longer an assignment of a CONSTRUCTOR, but
3746 we still may have processing to do on the LHS. So
3747 pretend we didn't do anything here to let that happen. */
3748 return GS_UNHANDLED;
3749 }
3750 }
3751
3752 if (notify_temp_creation)
3753 return GS_OK;
3754
3755 /* If there are nonzero elements, pre-evaluate to capture elements
3756 overlapping with the lhs into temporaries. We must do this before
3757 clearing to fetch the values before they are zeroed-out. */
3758 if (num_nonzero_elements > 0)
3759 {
3760 preeval_data.lhs_base_decl = get_base_address (object);
3761 if (!DECL_P (preeval_data.lhs_base_decl))
3762 preeval_data.lhs_base_decl = NULL;
3763 preeval_data.lhs_alias_set = get_alias_set (object);
3764
3765 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3766 pre_p, post_p, &preeval_data);
3767 }
3768
3769 if (cleared)
3770 {
3771 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3772 Note that we still have to gimplify, in order to handle the
3773 case of variable sized types. Avoid shared tree structures. */
3774 CONSTRUCTOR_ELTS (ctor) = NULL;
3775 TREE_SIDE_EFFECTS (ctor) = 0;
3776 object = unshare_expr (object);
3777 gimplify_stmt (expr_p, pre_p);
3778 }
3779
3780 /* If we have not block cleared the object, or if there are nonzero
3781 elements in the constructor, add assignments to the individual
3782 scalar fields of the object. */
3783 if (!cleared || num_nonzero_elements > 0)
3784 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3785
3786 *expr_p = NULL_TREE;
3787 }
3788 break;
3789
3790 case COMPLEX_TYPE:
3791 {
3792 tree r, i;
3793
3794 if (notify_temp_creation)
3795 return GS_OK;
3796
3797 /* Extract the real and imaginary parts out of the ctor. */
3798 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3799 r = VEC_index (constructor_elt, elts, 0)->value;
3800 i = VEC_index (constructor_elt, elts, 1)->value;
3801 if (r == NULL || i == NULL)
3802 {
3803 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3804 if (r == NULL)
3805 r = zero;
3806 if (i == NULL)
3807 i = zero;
3808 }
3809
3810 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3811 represent creation of a complex value. */
3812 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3813 {
3814 ctor = build_complex (type, r, i);
3815 TREE_OPERAND (*expr_p, 1) = ctor;
3816 }
3817 else
3818 {
3819 ctor = build2 (COMPLEX_EXPR, type, r, i);
3820 TREE_OPERAND (*expr_p, 1) = ctor;
3821 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3822 pre_p,
3823 post_p,
3824 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3825 fb_rvalue);
3826 }
3827 }
3828 break;
3829
3830 case VECTOR_TYPE:
3831 {
3832 unsigned HOST_WIDE_INT ix;
3833 constructor_elt *ce;
3834
3835 if (notify_temp_creation)
3836 return GS_OK;
3837
3838 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3839 if (TREE_CONSTANT (ctor))
3840 {
3841 bool constant_p = true;
3842 tree value;
3843
3844 /* Even when ctor is constant, it might contain non-*_CST
3845 elements, such as addresses or trapping values like
3846 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3847 in VECTOR_CST nodes. */
3848 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3849 if (!CONSTANT_CLASS_P (value))
3850 {
3851 constant_p = false;
3852 break;
3853 }
3854
3855 if (constant_p)
3856 {
3857 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3858 break;
3859 }
3860
3861 /* Don't reduce an initializer constant even if we can't
3862 make a VECTOR_CST. It won't do anything for us, and it'll
3863 prevent us from representing it as a single constant. */
3864 if (initializer_constant_valid_p (ctor, type))
3865 break;
3866
3867 TREE_CONSTANT (ctor) = 0;
3868 }
3869
3870 /* Vector types use CONSTRUCTOR all the way through gimple
3871 compilation as a general initializer. */
3872 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3873 {
3874 enum gimplify_status tret;
3875 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3876 fb_rvalue);
3877 if (tret == GS_ERROR)
3878 ret = GS_ERROR;
3879 }
3880 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3881 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3882 }
3883 break;
3884
3885 default:
3886 /* So how did we get a CONSTRUCTOR for a scalar type? */
3887 gcc_unreachable ();
3888 }
3889
3890 if (ret == GS_ERROR)
3891 return GS_ERROR;
3892 else if (want_value)
3893 {
3894 *expr_p = object;
3895 return GS_OK;
3896 }
3897 else
3898 {
3899 /* If we have gimplified both sides of the initializer but have
3900 not emitted an assignment, do so now. */
3901 if (*expr_p)
3902 {
3903 tree lhs = TREE_OPERAND (*expr_p, 0);
3904 tree rhs = TREE_OPERAND (*expr_p, 1);
3905 gimple init = gimple_build_assign (lhs, rhs);
3906 gimplify_seq_add_stmt (pre_p, init);
3907 *expr_p = NULL;
3908 }
3909
3910 return GS_ALL_DONE;
3911 }
3912 }
3913
3914 /* Given a pointer value OP0, return a simplified version of an
3915 indirection through OP0, or NULL_TREE if no simplification is
3916 possible. Note that the resulting type may be different from
3917 the type pointed to in the sense that it is still compatible
3918 from the langhooks point of view. */
3919
3920 tree
3921 gimple_fold_indirect_ref (tree t)
3922 {
3923 tree type = TREE_TYPE (TREE_TYPE (t));
3924 tree sub = t;
3925 tree subtype;
3926
3927 STRIP_USELESS_TYPE_CONVERSION (sub);
3928 subtype = TREE_TYPE (sub);
3929 if (!POINTER_TYPE_P (subtype))
3930 return NULL_TREE;
3931
3932 if (TREE_CODE (sub) == ADDR_EXPR)
3933 {
3934 tree op = TREE_OPERAND (sub, 0);
3935 tree optype = TREE_TYPE (op);
3936 /* *&p => p */
3937 if (useless_type_conversion_p (type, optype))
3938 return op;
3939
3940 /* *(foo *)&fooarray => fooarray[0] */
3941 if (TREE_CODE (optype) == ARRAY_TYPE
3942 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3943 {
3944 tree type_domain = TYPE_DOMAIN (optype);
3945 tree min_val = size_zero_node;
3946 if (type_domain && TYPE_MIN_VALUE (type_domain))
3947 min_val = TYPE_MIN_VALUE (type_domain);
3948 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3949 }
3950 }
3951
3952 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3953 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3954 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3955 {
3956 tree type_domain;
3957 tree min_val = size_zero_node;
3958 tree osub = sub;
3959 sub = gimple_fold_indirect_ref (sub);
3960 if (! sub)
3961 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3962 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3963 if (type_domain && TYPE_MIN_VALUE (type_domain))
3964 min_val = TYPE_MIN_VALUE (type_domain);
3965 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3966 }
3967
3968 return NULL_TREE;
3969 }
3970
3971 /* Given a pointer value OP0, return a simplified version of an
3972 indirection through OP0, or NULL_TREE if no simplification is
3973 possible. This may only be applied to a rhs of an expression.
3974 Note that the resulting type may be different from the type pointed
3975 to in the sense that it is still compatible from the langhooks
3976 point of view. */
3977
3978 static tree
3979 gimple_fold_indirect_ref_rhs (tree t)
3980 {
3981 return gimple_fold_indirect_ref (t);
3982 }
3983
3984 /* Subroutine of gimplify_modify_expr to do simplifications of
3985 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3986 something changes. */
3987
3988 static enum gimplify_status
3989 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3990 gimple_seq *pre_p, gimple_seq *post_p,
3991 bool want_value)
3992 {
3993 enum gimplify_status ret = GS_OK;
3994
3995 while (ret != GS_UNHANDLED)
3996 switch (TREE_CODE (*from_p))
3997 {
3998 case VAR_DECL:
3999 /* If we're assigning from a read-only variable initialized with
4000 a constructor, do the direct assignment from the constructor,
4001 but only if neither source nor target are volatile since this
4002 latter assignment might end up being done on a per-field basis. */
4003 if (DECL_INITIAL (*from_p)
4004 && TREE_READONLY (*from_p)
4005 && !TREE_THIS_VOLATILE (*from_p)
4006 && !TREE_THIS_VOLATILE (*to_p)
4007 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4008 {
4009 tree old_from = *from_p;
4010
4011 /* Move the constructor into the RHS. */
4012 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4013
4014 /* Let's see if gimplify_init_constructor will need to put
4015 it in memory. If so, revert the change. */
4016 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4017 if (ret == GS_ERROR)
4018 {
4019 *from_p = old_from;
4020 /* Fall through. */
4021 }
4022 else
4023 {
4024 ret = GS_OK;
4025 break;
4026 }
4027 }
4028 ret = GS_UNHANDLED;
4029 break;
4030 case INDIRECT_REF:
4031 {
4032 /* If we have code like
4033
4034 *(const A*)(A*)&x
4035
4036 where the type of "x" is a (possibly cv-qualified variant
4037 of "A"), treat the entire expression as identical to "x".
4038 This kind of code arises in C++ when an object is bound
4039 to a const reference, and if "x" is a TARGET_EXPR we want
4040 to take advantage of the optimization below. */
4041 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4042 if (t)
4043 {
4044 *from_p = t;
4045 ret = GS_OK;
4046 }
4047 else
4048 ret = GS_UNHANDLED;
4049 break;
4050 }
4051
4052 case TARGET_EXPR:
4053 {
4054 /* If we are initializing something from a TARGET_EXPR, strip the
4055 TARGET_EXPR and initialize it directly, if possible. This can't
4056 be done if the initializer is void, since that implies that the
4057 temporary is set in some non-trivial way.
4058
4059 ??? What about code that pulls out the temp and uses it
4060 elsewhere? I think that such code never uses the TARGET_EXPR as
4061 an initializer. If I'm wrong, we'll die because the temp won't
4062 have any RTL. In that case, I guess we'll need to replace
4063 references somehow. */
4064 tree init = TARGET_EXPR_INITIAL (*from_p);
4065
4066 if (init
4067 && !VOID_TYPE_P (TREE_TYPE (init)))
4068 {
4069 *from_p = init;
4070 ret = GS_OK;
4071 }
4072 else
4073 ret = GS_UNHANDLED;
4074 }
4075 break;
4076
4077 case COMPOUND_EXPR:
4078 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4079 caught. */
4080 gimplify_compound_expr (from_p, pre_p, true);
4081 ret = GS_OK;
4082 break;
4083
4084 case CONSTRUCTOR:
4085 /* If we're initializing from a CONSTRUCTOR, break this into
4086 individual MODIFY_EXPRs. */
4087 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4088 false);
4089
4090 case COND_EXPR:
4091 /* If we're assigning to a non-register type, push the assignment
4092 down into the branches. This is mandatory for ADDRESSABLE types,
4093 since we cannot generate temporaries for such, but it saves a
4094 copy in other cases as well. */
4095 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4096 {
4097 /* This code should mirror the code in gimplify_cond_expr. */
4098 enum tree_code code = TREE_CODE (*expr_p);
4099 tree cond = *from_p;
4100 tree result = *to_p;
4101
4102 ret = gimplify_expr (&result, pre_p, post_p,
4103 is_gimple_lvalue, fb_lvalue);
4104 if (ret != GS_ERROR)
4105 ret = GS_OK;
4106
4107 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4108 TREE_OPERAND (cond, 1)
4109 = build2 (code, void_type_node, result,
4110 TREE_OPERAND (cond, 1));
4111 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4112 TREE_OPERAND (cond, 2)
4113 = build2 (code, void_type_node, unshare_expr (result),
4114 TREE_OPERAND (cond, 2));
4115
4116 TREE_TYPE (cond) = void_type_node;
4117 recalculate_side_effects (cond);
4118
4119 if (want_value)
4120 {
4121 gimplify_and_add (cond, pre_p);
4122 *expr_p = unshare_expr (result);
4123 }
4124 else
4125 *expr_p = cond;
4126 return ret;
4127 }
4128 else
4129 ret = GS_UNHANDLED;
4130 break;
4131
4132 case CALL_EXPR:
4133 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4134 return slot so that we don't generate a temporary. */
4135 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4136 && aggregate_value_p (*from_p, *from_p))
4137 {
4138 bool use_target;
4139
4140 if (!(rhs_predicate_for (*to_p))(*from_p))
4141 /* If we need a temporary, *to_p isn't accurate. */
4142 use_target = false;
4143 else if (TREE_CODE (*to_p) == RESULT_DECL
4144 && DECL_NAME (*to_p) == NULL_TREE
4145 && needs_to_live_in_memory (*to_p))
4146 /* It's OK to use the return slot directly unless it's an NRV. */
4147 use_target = true;
4148 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4149 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4150 /* Don't force regs into memory. */
4151 use_target = false;
4152 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4153 /* It's OK to use the target directly if it's being
4154 initialized. */
4155 use_target = true;
4156 else if (!is_gimple_non_addressable (*to_p))
4157 /* Don't use the original target if it's already addressable;
4158 if its address escapes, and the called function uses the
4159 NRV optimization, a conforming program could see *to_p
4160 change before the called function returns; see c++/19317.
4161 When optimizing, the return_slot pass marks more functions
4162 as safe after we have escape info. */
4163 use_target = false;
4164 else
4165 use_target = true;
4166
4167 if (use_target)
4168 {
4169 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4170 mark_addressable (*to_p);
4171 }
4172 }
4173
4174 ret = GS_UNHANDLED;
4175 break;
4176
4177 /* If we're initializing from a container, push the initialization
4178 inside it. */
4179 case CLEANUP_POINT_EXPR:
4180 case BIND_EXPR:
4181 case STATEMENT_LIST:
4182 {
4183 tree wrap = *from_p;
4184 tree t;
4185
4186 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4187 fb_lvalue);
4188 if (ret != GS_ERROR)
4189 ret = GS_OK;
4190
4191 t = voidify_wrapper_expr (wrap, *expr_p);
4192 gcc_assert (t == *expr_p);
4193
4194 if (want_value)
4195 {
4196 gimplify_and_add (wrap, pre_p);
4197 *expr_p = unshare_expr (*to_p);
4198 }
4199 else
4200 *expr_p = wrap;
4201 return GS_OK;
4202 }
4203
4204 case COMPOUND_LITERAL_EXPR:
4205 {
4206 tree complit = TREE_OPERAND (*expr_p, 1);
4207 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4208 tree decl = DECL_EXPR_DECL (decl_s);
4209 tree init = DECL_INITIAL (decl);
4210
4211 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4212 into struct T x = { 0, 1, 2 } if the address of the
4213 compound literal has never been taken. */
4214 if (!TREE_ADDRESSABLE (complit)
4215 && !TREE_ADDRESSABLE (decl)
4216 && init)
4217 {
4218 *expr_p = copy_node (*expr_p);
4219 TREE_OPERAND (*expr_p, 1) = init;
4220 return GS_OK;
4221 }
4222 }
4223
4224 default:
4225 ret = GS_UNHANDLED;
4226 break;
4227 }
4228
4229 return ret;
4230 }
4231
4232
4233 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4234 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4235 DECL_GIMPLE_REG_P set.
4236
4237 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4238 other, unmodified part of the complex object just before the total store.
4239 As a consequence, if the object is still uninitialized, an undefined value
4240 will be loaded into a register, which may result in a spurious exception
4241 if the register is floating-point and the value happens to be a signaling
4242 NaN for example. Then the fully-fledged complex operations lowering pass
4243 followed by a DCE pass are necessary in order to fix things up. */
4244
4245 static enum gimplify_status
4246 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4247 bool want_value)
4248 {
4249 enum tree_code code, ocode;
4250 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4251
4252 lhs = TREE_OPERAND (*expr_p, 0);
4253 rhs = TREE_OPERAND (*expr_p, 1);
4254 code = TREE_CODE (lhs);
4255 lhs = TREE_OPERAND (lhs, 0);
4256
4257 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4258 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4259 other = get_formal_tmp_var (other, pre_p);
4260
4261 realpart = code == REALPART_EXPR ? rhs : other;
4262 imagpart = code == REALPART_EXPR ? other : rhs;
4263
4264 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4265 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4266 else
4267 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4268
4269 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4270 *expr_p = (want_value) ? rhs : NULL_TREE;
4271
4272 return GS_ALL_DONE;
4273 }
4274
4275
4276 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4277
4278 modify_expr
4279 : varname '=' rhs
4280 | '*' ID '=' rhs
4281
4282 PRE_P points to the list where side effects that must happen before
4283 *EXPR_P should be stored.
4284
4285 POST_P points to the list where side effects that must happen after
4286 *EXPR_P should be stored.
4287
4288 WANT_VALUE is nonzero iff we want to use the value of this expression
4289 in another expression. */
4290
4291 static enum gimplify_status
4292 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4293 bool want_value)
4294 {
4295 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4296 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4297 enum gimplify_status ret = GS_UNHANDLED;
4298 gimple assign;
4299
4300 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4301 || TREE_CODE (*expr_p) == INIT_EXPR);
4302
4303 /* Insert pointer conversions required by the middle-end that are not
4304 required by the frontend. This fixes middle-end type checking for
4305 for example gcc.dg/redecl-6.c. */
4306 if (POINTER_TYPE_P (TREE_TYPE (*to_p))
4307 && lang_hooks.types_compatible_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4308 {
4309 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4310 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4311 *from_p = fold_convert (TREE_TYPE (*to_p), *from_p);
4312 }
4313
4314 /* See if any simplifications can be done based on what the RHS is. */
4315 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4316 want_value);
4317 if (ret != GS_UNHANDLED)
4318 return ret;
4319
4320 /* For zero sized types only gimplify the left hand side and right hand
4321 side as statements and throw away the assignment. Do this after
4322 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4323 types properly. */
4324 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4325 {
4326 gimplify_stmt (from_p, pre_p);
4327 gimplify_stmt (to_p, pre_p);
4328 *expr_p = NULL_TREE;
4329 return GS_ALL_DONE;
4330 }
4331
4332 /* If the value being copied is of variable width, compute the length
4333 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4334 before gimplifying any of the operands so that we can resolve any
4335 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4336 the size of the expression to be copied, not of the destination, so
4337 that is what we must do here. */
4338 maybe_with_size_expr (from_p);
4339
4340 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4341 if (ret == GS_ERROR)
4342 return ret;
4343
4344 /* As a special case, we have to temporarily allow for assignments
4345 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4346 a toplevel statement, when gimplifying the GENERIC expression
4347 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4348 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4349
4350 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4351 prevent gimplify_expr from trying to create a new temporary for
4352 foo's LHS, we tell it that it should only gimplify until it
4353 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4354 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4355 and all we need to do here is set 'a' to be its LHS. */
4356 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4357 fb_rvalue);
4358 if (ret == GS_ERROR)
4359 return ret;
4360
4361 /* Now see if the above changed *from_p to something we handle specially. */
4362 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4363 want_value);
4364 if (ret != GS_UNHANDLED)
4365 return ret;
4366
4367 /* If we've got a variable sized assignment between two lvalues (i.e. does
4368 not involve a call), then we can make things a bit more straightforward
4369 by converting the assignment to memcpy or memset. */
4370 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4371 {
4372 tree from = TREE_OPERAND (*from_p, 0);
4373 tree size = TREE_OPERAND (*from_p, 1);
4374
4375 if (TREE_CODE (from) == CONSTRUCTOR)
4376 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4377
4378 if (is_gimple_addressable (from))
4379 {
4380 *from_p = from;
4381 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4382 pre_p);
4383 }
4384 }
4385
4386 /* Transform partial stores to non-addressable complex variables into
4387 total stores. This allows us to use real instead of virtual operands
4388 for these variables, which improves optimization. */
4389 if ((TREE_CODE (*to_p) == REALPART_EXPR
4390 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4391 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4392 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4393
4394 /* Try to alleviate the effects of the gimplification creating artificial
4395 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4396 if (!gimplify_ctxp->into_ssa
4397 && DECL_P (*from_p)
4398 && DECL_IGNORED_P (*from_p)
4399 && DECL_P (*to_p)
4400 && !DECL_IGNORED_P (*to_p))
4401 {
4402 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4403 DECL_NAME (*from_p)
4404 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4405 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4406 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4407 }
4408
4409 if (TREE_CODE (*from_p) == CALL_EXPR)
4410 {
4411 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4412 instead of a GIMPLE_ASSIGN. */
4413 assign = gimple_build_call_from_tree (*from_p);
4414 gimple_call_set_lhs (assign, *to_p);
4415 }
4416 else
4417 {
4418 assign = gimple_build_assign (*to_p, *from_p);
4419 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4420 }
4421
4422 gimplify_seq_add_stmt (pre_p, assign);
4423
4424 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4425 {
4426 /* If we've somehow already got an SSA_NAME on the LHS, then
4427 we've probably modified it twice. Not good. */
4428 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4429 *to_p = make_ssa_name (*to_p, assign);
4430 gimple_set_lhs (assign, *to_p);
4431 }
4432
4433 if (want_value)
4434 {
4435 *expr_p = unshare_expr (*to_p);
4436 return GS_OK;
4437 }
4438 else
4439 *expr_p = NULL;
4440
4441 return GS_ALL_DONE;
4442 }
4443
4444 /* Gimplify a comparison between two variable-sized objects. Do this
4445 with a call to BUILT_IN_MEMCMP. */
4446
4447 static enum gimplify_status
4448 gimplify_variable_sized_compare (tree *expr_p)
4449 {
4450 tree op0 = TREE_OPERAND (*expr_p, 0);
4451 tree op1 = TREE_OPERAND (*expr_p, 1);
4452 tree t, arg, dest, src;
4453
4454 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4455 arg = unshare_expr (arg);
4456 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4457 src = build_fold_addr_expr (op1);
4458 dest = build_fold_addr_expr (op0);
4459 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4460 t = build_call_expr (t, 3, dest, src, arg);
4461 *expr_p
4462 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4463
4464 return GS_OK;
4465 }
4466
4467 /* Gimplify a comparison between two aggregate objects of integral scalar
4468 mode as a comparison between the bitwise equivalent scalar values. */
4469
4470 static enum gimplify_status
4471 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4472 {
4473 tree op0 = TREE_OPERAND (*expr_p, 0);
4474 tree op1 = TREE_OPERAND (*expr_p, 1);
4475
4476 tree type = TREE_TYPE (op0);
4477 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4478
4479 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
4480 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
4481
4482 *expr_p
4483 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4484
4485 return GS_OK;
4486 }
4487
4488 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4489 points to the expression to gimplify.
4490
4491 Expressions of the form 'a && b' are gimplified to:
4492
4493 a && b ? true : false
4494
4495 LOCUS is the source location to be put on the generated COND_EXPR.
4496 gimplify_cond_expr will do the rest. */
4497
4498 static enum gimplify_status
4499 gimplify_boolean_expr (tree *expr_p, location_t locus)
4500 {
4501 /* Preserve the original type of the expression. */
4502 tree type = TREE_TYPE (*expr_p);
4503
4504 *expr_p = build3 (COND_EXPR, type, *expr_p,
4505 fold_convert (type, boolean_true_node),
4506 fold_convert (type, boolean_false_node));
4507
4508 SET_EXPR_LOCATION (*expr_p, locus);
4509
4510 return GS_OK;
4511 }
4512
4513 /* Gimplifies an expression sequence. This function gimplifies each
4514 expression and re-writes the original expression with the last
4515 expression of the sequence in GIMPLE form.
4516
4517 PRE_P points to the list where the side effects for all the
4518 expressions in the sequence will be emitted.
4519
4520 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4521
4522 static enum gimplify_status
4523 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4524 {
4525 tree t = *expr_p;
4526
4527 do
4528 {
4529 tree *sub_p = &TREE_OPERAND (t, 0);
4530
4531 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4532 gimplify_compound_expr (sub_p, pre_p, false);
4533 else
4534 gimplify_stmt (sub_p, pre_p);
4535
4536 t = TREE_OPERAND (t, 1);
4537 }
4538 while (TREE_CODE (t) == COMPOUND_EXPR);
4539
4540 *expr_p = t;
4541 if (want_value)
4542 return GS_OK;
4543 else
4544 {
4545 gimplify_stmt (expr_p, pre_p);
4546 return GS_ALL_DONE;
4547 }
4548 }
4549
4550
4551 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4552 gimplify. After gimplification, EXPR_P will point to a new temporary
4553 that holds the original value of the SAVE_EXPR node.
4554
4555 PRE_P points to the list where side effects that must happen before
4556 *EXPR_P should be stored. */
4557
4558 static enum gimplify_status
4559 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4560 {
4561 enum gimplify_status ret = GS_ALL_DONE;
4562 tree val;
4563
4564 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4565 val = TREE_OPERAND (*expr_p, 0);
4566
4567 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4568 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4569 {
4570 /* The operand may be a void-valued expression such as SAVE_EXPRs
4571 generated by the Java frontend for class initialization. It is
4572 being executed only for its side-effects. */
4573 if (TREE_TYPE (val) == void_type_node)
4574 {
4575 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4576 is_gimple_stmt, fb_none);
4577 val = NULL;
4578 }
4579 else
4580 val = get_initialized_tmp_var (val, pre_p, post_p);
4581
4582 TREE_OPERAND (*expr_p, 0) = val;
4583 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4584 }
4585
4586 *expr_p = val;
4587
4588 return ret;
4589 }
4590
4591 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4592
4593 unary_expr
4594 : ...
4595 | '&' varname
4596 ...
4597
4598 PRE_P points to the list where side effects that must happen before
4599 *EXPR_P should be stored.
4600
4601 POST_P points to the list where side effects that must happen after
4602 *EXPR_P should be stored. */
4603
4604 static enum gimplify_status
4605 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4606 {
4607 tree expr = *expr_p;
4608 tree op0 = TREE_OPERAND (expr, 0);
4609 enum gimplify_status ret;
4610
4611 switch (TREE_CODE (op0))
4612 {
4613 case INDIRECT_REF:
4614 case MISALIGNED_INDIRECT_REF:
4615 do_indirect_ref:
4616 /* Check if we are dealing with an expression of the form '&*ptr'.
4617 While the front end folds away '&*ptr' into 'ptr', these
4618 expressions may be generated internally by the compiler (e.g.,
4619 builtins like __builtin_va_end). */
4620 /* Caution: the silent array decomposition semantics we allow for
4621 ADDR_EXPR means we can't always discard the pair. */
4622 /* Gimplification of the ADDR_EXPR operand may drop
4623 cv-qualification conversions, so make sure we add them if
4624 needed. */
4625 {
4626 tree op00 = TREE_OPERAND (op0, 0);
4627 tree t_expr = TREE_TYPE (expr);
4628 tree t_op00 = TREE_TYPE (op00);
4629
4630 if (!useless_type_conversion_p (t_expr, t_op00))
4631 op00 = fold_convert (TREE_TYPE (expr), op00);
4632 *expr_p = op00;
4633 ret = GS_OK;
4634 }
4635 break;
4636
4637 case VIEW_CONVERT_EXPR:
4638 /* Take the address of our operand and then convert it to the type of
4639 this ADDR_EXPR.
4640
4641 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4642 all clear. The impact of this transformation is even less clear. */
4643
4644 /* If the operand is a useless conversion, look through it. Doing so
4645 guarantees that the ADDR_EXPR and its operand will remain of the
4646 same type. */
4647 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4648 op0 = TREE_OPERAND (op0, 0);
4649
4650 *expr_p = fold_convert (TREE_TYPE (expr),
4651 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
4652 ret = GS_OK;
4653 break;
4654
4655 default:
4656 /* We use fb_either here because the C frontend sometimes takes
4657 the address of a call that returns a struct; see
4658 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4659 the implied temporary explicit. */
4660
4661 /* Mark the RHS addressable. */
4662 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4663 is_gimple_addressable, fb_either);
4664 if (ret == GS_ERROR)
4665 break;
4666
4667 /* We cannot rely on making the RHS addressable if it is
4668 a temporary created by gimplification. In this case create a
4669 new temporary that is initialized by a copy (which will
4670 become a store after we mark it addressable).
4671 This mostly happens if the frontend passed us something that
4672 it could not mark addressable yet, like a fortran
4673 pass-by-reference parameter (int) floatvar. */
4674 if (is_gimple_reg (TREE_OPERAND (expr, 0)))
4675 TREE_OPERAND (expr, 0)
4676 = get_initialized_tmp_var (TREE_OPERAND (expr, 0), pre_p, post_p);
4677
4678 op0 = TREE_OPERAND (expr, 0);
4679
4680 /* For various reasons, the gimplification of the expression
4681 may have made a new INDIRECT_REF. */
4682 if (TREE_CODE (op0) == INDIRECT_REF)
4683 goto do_indirect_ref;
4684
4685 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4686 recompute_tree_invariant_for_addr_expr (expr);
4687
4688 mark_addressable (TREE_OPERAND (expr, 0));
4689 break;
4690 }
4691
4692 return ret;
4693 }
4694
4695 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4696 value; output operands should be a gimple lvalue. */
4697
4698 static enum gimplify_status
4699 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4700 {
4701 tree expr;
4702 int noutputs;
4703 const char **oconstraints;
4704 int i;
4705 tree link;
4706 const char *constraint;
4707 bool allows_mem, allows_reg, is_inout;
4708 enum gimplify_status ret, tret;
4709 gimple stmt;
4710 VEC(tree, gc) *inputs;
4711 VEC(tree, gc) *outputs;
4712 VEC(tree, gc) *clobbers;
4713 tree link_next;
4714
4715 expr = *expr_p;
4716 noutputs = list_length (ASM_OUTPUTS (expr));
4717 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4718
4719 inputs = outputs = clobbers = NULL;
4720
4721 ret = GS_ALL_DONE;
4722 link_next = NULL_TREE;
4723 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4724 {
4725 bool ok;
4726 size_t constraint_len;
4727
4728 link_next = TREE_CHAIN (link);
4729
4730 oconstraints[i]
4731 = constraint
4732 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4733 constraint_len = strlen (constraint);
4734 if (constraint_len == 0)
4735 continue;
4736
4737 ok = parse_output_constraint (&constraint, i, 0, 0,
4738 &allows_mem, &allows_reg, &is_inout);
4739 if (!ok)
4740 {
4741 ret = GS_ERROR;
4742 is_inout = false;
4743 }
4744
4745 if (!allows_reg && allows_mem)
4746 mark_addressable (TREE_VALUE (link));
4747
4748 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4749 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4750 fb_lvalue | fb_mayfail);
4751 if (tret == GS_ERROR)
4752 {
4753 error ("invalid lvalue in asm output %d", i);
4754 ret = tret;
4755 }
4756
4757 VEC_safe_push (tree, gc, outputs, link);
4758 TREE_CHAIN (link) = NULL_TREE;
4759
4760 if (is_inout)
4761 {
4762 /* An input/output operand. To give the optimizers more
4763 flexibility, split it into separate input and output
4764 operands. */
4765 tree input;
4766 char buf[10];
4767
4768 /* Turn the in/out constraint into an output constraint. */
4769 char *p = xstrdup (constraint);
4770 p[0] = '=';
4771 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4772
4773 /* And add a matching input constraint. */
4774 if (allows_reg)
4775 {
4776 sprintf (buf, "%d", i);
4777
4778 /* If there are multiple alternatives in the constraint,
4779 handle each of them individually. Those that allow register
4780 will be replaced with operand number, the others will stay
4781 unchanged. */
4782 if (strchr (p, ',') != NULL)
4783 {
4784 size_t len = 0, buflen = strlen (buf);
4785 char *beg, *end, *str, *dst;
4786
4787 for (beg = p + 1;;)
4788 {
4789 end = strchr (beg, ',');
4790 if (end == NULL)
4791 end = strchr (beg, '\0');
4792 if ((size_t) (end - beg) < buflen)
4793 len += buflen + 1;
4794 else
4795 len += end - beg + 1;
4796 if (*end)
4797 beg = end + 1;
4798 else
4799 break;
4800 }
4801
4802 str = (char *) alloca (len);
4803 for (beg = p + 1, dst = str;;)
4804 {
4805 const char *tem;
4806 bool mem_p, reg_p, inout_p;
4807
4808 end = strchr (beg, ',');
4809 if (end)
4810 *end = '\0';
4811 beg[-1] = '=';
4812 tem = beg - 1;
4813 parse_output_constraint (&tem, i, 0, 0,
4814 &mem_p, &reg_p, &inout_p);
4815 if (dst != str)
4816 *dst++ = ',';
4817 if (reg_p)
4818 {
4819 memcpy (dst, buf, buflen);
4820 dst += buflen;
4821 }
4822 else
4823 {
4824 if (end)
4825 len = end - beg;
4826 else
4827 len = strlen (beg);
4828 memcpy (dst, beg, len);
4829 dst += len;
4830 }
4831 if (end)
4832 beg = end + 1;
4833 else
4834 break;
4835 }
4836 *dst = '\0';
4837 input = build_string (dst - str, str);
4838 }
4839 else
4840 input = build_string (strlen (buf), buf);
4841 }
4842 else
4843 input = build_string (constraint_len - 1, constraint + 1);
4844
4845 free (p);
4846
4847 input = build_tree_list (build_tree_list (NULL_TREE, input),
4848 unshare_expr (TREE_VALUE (link)));
4849 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4850 }
4851 }
4852
4853 link_next = NULL_TREE;
4854 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4855 {
4856 link_next = TREE_CHAIN (link);
4857 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4858 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4859 oconstraints, &allows_mem, &allows_reg);
4860
4861 /* If we can't make copies, we can only accept memory. */
4862 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4863 {
4864 if (allows_mem)
4865 allows_reg = 0;
4866 else
4867 {
4868 error ("impossible constraint in %<asm%>");
4869 error ("non-memory input %d must stay in memory", i);
4870 return GS_ERROR;
4871 }
4872 }
4873
4874 /* If the operand is a memory input, it should be an lvalue. */
4875 if (!allows_reg && allows_mem)
4876 {
4877 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4878 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4879 mark_addressable (TREE_VALUE (link));
4880 if (tret == GS_ERROR)
4881 {
4882 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4883 input_location = EXPR_LOCATION (TREE_VALUE (link));
4884 error ("memory input %d is not directly addressable", i);
4885 ret = tret;
4886 }
4887 }
4888 else
4889 {
4890 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4891 is_gimple_asm_val, fb_rvalue);
4892 if (tret == GS_ERROR)
4893 ret = tret;
4894 }
4895
4896 TREE_CHAIN (link) = NULL_TREE;
4897 VEC_safe_push (tree, gc, inputs, link);
4898 }
4899
4900 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4901 VEC_safe_push (tree, gc, clobbers, link);
4902
4903 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4904 inputs, outputs, clobbers);
4905
4906 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4907 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4908
4909 gimplify_seq_add_stmt (pre_p, stmt);
4910
4911 return ret;
4912 }
4913
4914 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4915 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4916 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4917 return to this function.
4918
4919 FIXME should we complexify the prequeue handling instead? Or use flags
4920 for all the cleanups and let the optimizer tighten them up? The current
4921 code seems pretty fragile; it will break on a cleanup within any
4922 non-conditional nesting. But any such nesting would be broken, anyway;
4923 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4924 and continues out of it. We can do that at the RTL level, though, so
4925 having an optimizer to tighten up try/finally regions would be a Good
4926 Thing. */
4927
4928 static enum gimplify_status
4929 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4930 {
4931 gimple_stmt_iterator iter;
4932 gimple_seq body_sequence = NULL;
4933
4934 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4935
4936 /* We only care about the number of conditions between the innermost
4937 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4938 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4939 int old_conds = gimplify_ctxp->conditions;
4940 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4941 gimplify_ctxp->conditions = 0;
4942 gimplify_ctxp->conditional_cleanups = NULL;
4943
4944 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4945
4946 gimplify_ctxp->conditions = old_conds;
4947 gimplify_ctxp->conditional_cleanups = old_cleanups;
4948
4949 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4950 {
4951 gimple wce = gsi_stmt (iter);
4952
4953 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4954 {
4955 if (gsi_one_before_end_p (iter))
4956 {
4957 /* Note that gsi_insert_seq_before and gsi_remove do not
4958 scan operands, unlike some other sequence mutators. */
4959 gsi_insert_seq_before_without_update (&iter,
4960 gimple_wce_cleanup (wce),
4961 GSI_SAME_STMT);
4962 gsi_remove (&iter, true);
4963 break;
4964 }
4965 else
4966 {
4967 gimple gtry;
4968 gimple_seq seq;
4969 enum gimple_try_flags kind;
4970
4971 if (gimple_wce_cleanup_eh_only (wce))
4972 kind = GIMPLE_TRY_CATCH;
4973 else
4974 kind = GIMPLE_TRY_FINALLY;
4975 seq = gsi_split_seq_after (iter);
4976
4977 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
4978 /* Do not use gsi_replace here, as it may scan operands.
4979 We want to do a simple structural modification only. */
4980 *gsi_stmt_ptr (&iter) = gtry;
4981 iter = gsi_start (seq);
4982 }
4983 }
4984 else
4985 gsi_next (&iter);
4986 }
4987
4988 gimplify_seq_add_seq (pre_p, body_sequence);
4989 if (temp)
4990 {
4991 *expr_p = temp;
4992 return GS_OK;
4993 }
4994 else
4995 {
4996 *expr_p = NULL;
4997 return GS_ALL_DONE;
4998 }
4999 }
5000
5001 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5002 is the cleanup action required. EH_ONLY is true if the cleanup should
5003 only be executed if an exception is thrown, not on normal exit. */
5004
5005 static void
5006 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5007 {
5008 gimple wce;
5009 gimple_seq cleanup_stmts = NULL;
5010
5011 /* Errors can result in improperly nested cleanups. Which results in
5012 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5013 if (errorcount || sorrycount)
5014 return;
5015
5016 if (gimple_conditional_context ())
5017 {
5018 /* If we're in a conditional context, this is more complex. We only
5019 want to run the cleanup if we actually ran the initialization that
5020 necessitates it, but we want to run it after the end of the
5021 conditional context. So we wrap the try/finally around the
5022 condition and use a flag to determine whether or not to actually
5023 run the destructor. Thus
5024
5025 test ? f(A()) : 0
5026
5027 becomes (approximately)
5028
5029 flag = 0;
5030 try {
5031 if (test) { A::A(temp); flag = 1; val = f(temp); }
5032 else { val = 0; }
5033 } finally {
5034 if (flag) A::~A(temp);
5035 }
5036 val
5037 */
5038 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5039 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5040 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5041
5042 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5043 gimplify_stmt (&cleanup, &cleanup_stmts);
5044 wce = gimple_build_wce (cleanup_stmts);
5045
5046 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5047 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5048 gimplify_seq_add_stmt (pre_p, ftrue);
5049
5050 /* Because of this manipulation, and the EH edges that jump
5051 threading cannot redirect, the temporary (VAR) will appear
5052 to be used uninitialized. Don't warn. */
5053 TREE_NO_WARNING (var) = 1;
5054 }
5055 else
5056 {
5057 gimplify_stmt (&cleanup, &cleanup_stmts);
5058 wce = gimple_build_wce (cleanup_stmts);
5059 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5060 gimplify_seq_add_stmt (pre_p, wce);
5061 }
5062 }
5063
5064 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5065
5066 static enum gimplify_status
5067 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5068 {
5069 tree targ = *expr_p;
5070 tree temp = TARGET_EXPR_SLOT (targ);
5071 tree init = TARGET_EXPR_INITIAL (targ);
5072 enum gimplify_status ret;
5073
5074 if (init)
5075 {
5076 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5077 to the temps list. Handle also variable length TARGET_EXPRs. */
5078 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5079 {
5080 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5081 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5082 gimplify_vla_decl (temp, pre_p);
5083 }
5084 else
5085 gimple_add_tmp_var (temp);
5086
5087 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5088 expression is supposed to initialize the slot. */
5089 if (VOID_TYPE_P (TREE_TYPE (init)))
5090 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5091 else
5092 {
5093 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5094 init = init_expr;
5095 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5096 init = NULL;
5097 ggc_free (init_expr);
5098 }
5099 if (ret == GS_ERROR)
5100 {
5101 /* PR c++/28266 Make sure this is expanded only once. */
5102 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5103 return GS_ERROR;
5104 }
5105 if (init)
5106 gimplify_and_add (init, pre_p);
5107
5108 /* If needed, push the cleanup for the temp. */
5109 if (TARGET_EXPR_CLEANUP (targ))
5110 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5111 CLEANUP_EH_ONLY (targ), pre_p);
5112
5113 /* Only expand this once. */
5114 TREE_OPERAND (targ, 3) = init;
5115 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5116 }
5117 else
5118 /* We should have expanded this before. */
5119 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5120
5121 *expr_p = temp;
5122 return GS_OK;
5123 }
5124
5125 /* Gimplification of expression trees. */
5126
5127 /* Gimplify an expression which appears at statement context. The
5128 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5129 NULL, a new sequence is allocated.
5130
5131 Return true if we actually added a statement to the queue. */
5132
5133 bool
5134 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5135 {
5136 gimple_seq_node last;
5137
5138 if (!*seq_p)
5139 *seq_p = gimple_seq_alloc ();
5140
5141 last = gimple_seq_last (*seq_p);
5142 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5143 return last != gimple_seq_last (*seq_p);
5144 }
5145
5146
5147 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5148 to CTX. If entries already exist, force them to be some flavor of private.
5149 If there is no enclosing parallel, do nothing. */
5150
5151 void
5152 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5153 {
5154 splay_tree_node n;
5155
5156 if (decl == NULL || !DECL_P (decl))
5157 return;
5158
5159 do
5160 {
5161 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5162 if (n != NULL)
5163 {
5164 if (n->value & GOVD_SHARED)
5165 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5166 else
5167 return;
5168 }
5169 else if (ctx->region_type != ORT_WORKSHARE)
5170 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5171
5172 ctx = ctx->outer_context;
5173 }
5174 while (ctx);
5175 }
5176
5177 /* Similarly for each of the type sizes of TYPE. */
5178
5179 static void
5180 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5181 {
5182 if (type == NULL || type == error_mark_node)
5183 return;
5184 type = TYPE_MAIN_VARIANT (type);
5185
5186 if (pointer_set_insert (ctx->privatized_types, type))
5187 return;
5188
5189 switch (TREE_CODE (type))
5190 {
5191 case INTEGER_TYPE:
5192 case ENUMERAL_TYPE:
5193 case BOOLEAN_TYPE:
5194 case REAL_TYPE:
5195 case FIXED_POINT_TYPE:
5196 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5197 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5198 break;
5199
5200 case ARRAY_TYPE:
5201 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5202 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5203 break;
5204
5205 case RECORD_TYPE:
5206 case UNION_TYPE:
5207 case QUAL_UNION_TYPE:
5208 {
5209 tree field;
5210 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5211 if (TREE_CODE (field) == FIELD_DECL)
5212 {
5213 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5214 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5215 }
5216 }
5217 break;
5218
5219 case POINTER_TYPE:
5220 case REFERENCE_TYPE:
5221 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5222 break;
5223
5224 default:
5225 break;
5226 }
5227
5228 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5229 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5230 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5231 }
5232
5233 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5234
5235 static void
5236 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5237 {
5238 splay_tree_node n;
5239 unsigned int nflags;
5240 tree t;
5241
5242 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5243 return;
5244
5245 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5246 there are constructors involved somewhere. */
5247 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5248 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5249 flags |= GOVD_SEEN;
5250
5251 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5252 if (n != NULL)
5253 {
5254 /* We shouldn't be re-adding the decl with the same data
5255 sharing class. */
5256 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5257 /* The only combination of data sharing classes we should see is
5258 FIRSTPRIVATE and LASTPRIVATE. */
5259 nflags = n->value | flags;
5260 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5261 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5262 n->value = nflags;
5263 return;
5264 }
5265
5266 /* When adding a variable-sized variable, we have to handle all sorts
5267 of additional bits of data: the pointer replacement variable, and
5268 the parameters of the type. */
5269 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5270 {
5271 /* Add the pointer replacement variable as PRIVATE if the variable
5272 replacement is private, else FIRSTPRIVATE since we'll need the
5273 address of the original variable either for SHARED, or for the
5274 copy into or out of the context. */
5275 if (!(flags & GOVD_LOCAL))
5276 {
5277 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5278 nflags |= flags & GOVD_SEEN;
5279 t = DECL_VALUE_EXPR (decl);
5280 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5281 t = TREE_OPERAND (t, 0);
5282 gcc_assert (DECL_P (t));
5283 omp_add_variable (ctx, t, nflags);
5284 }
5285
5286 /* Add all of the variable and type parameters (which should have
5287 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5288 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5289 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5290 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5291
5292 /* The variable-sized variable itself is never SHARED, only some form
5293 of PRIVATE. The sharing would take place via the pointer variable
5294 which we remapped above. */
5295 if (flags & GOVD_SHARED)
5296 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5297 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5298
5299 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5300 alloca statement we generate for the variable, so make sure it
5301 is available. This isn't automatically needed for the SHARED
5302 case, since we won't be allocating local storage then.
5303 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5304 in this case omp_notice_variable will be called later
5305 on when it is gimplified. */
5306 else if (! (flags & GOVD_LOCAL))
5307 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5308 }
5309 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5310 {
5311 gcc_assert ((flags & GOVD_LOCAL) == 0);
5312 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5313
5314 /* Similar to the direct variable sized case above, we'll need the
5315 size of references being privatized. */
5316 if ((flags & GOVD_SHARED) == 0)
5317 {
5318 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5319 if (TREE_CODE (t) != INTEGER_CST)
5320 omp_notice_variable (ctx, t, true);
5321 }
5322 }
5323
5324 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5325 }
5326
5327 /* Record the fact that DECL was used within the OpenMP context CTX.
5328 IN_CODE is true when real code uses DECL, and false when we should
5329 merely emit default(none) errors. Return true if DECL is going to
5330 be remapped and thus DECL shouldn't be gimplified into its
5331 DECL_VALUE_EXPR (if any). */
5332
5333 static bool
5334 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5335 {
5336 splay_tree_node n;
5337 unsigned flags = in_code ? GOVD_SEEN : 0;
5338 bool ret = false, shared;
5339
5340 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5341 return false;
5342
5343 /* Threadprivate variables are predetermined. */
5344 if (is_global_var (decl))
5345 {
5346 if (DECL_THREAD_LOCAL_P (decl))
5347 return false;
5348
5349 if (DECL_HAS_VALUE_EXPR_P (decl))
5350 {
5351 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5352
5353 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5354 return false;
5355 }
5356 }
5357
5358 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5359 if (n == NULL)
5360 {
5361 enum omp_clause_default_kind default_kind, kind;
5362 struct gimplify_omp_ctx *octx;
5363
5364 if (ctx->region_type == ORT_WORKSHARE)
5365 goto do_outer;
5366
5367 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5368 remapped firstprivate instead of shared. To some extent this is
5369 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5370 default_kind = ctx->default_kind;
5371 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5372 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5373 default_kind = kind;
5374
5375 switch (default_kind)
5376 {
5377 case OMP_CLAUSE_DEFAULT_NONE:
5378 error ("%qE not specified in enclosing parallel",
5379 DECL_NAME (decl));
5380 error_at (ctx->location, "enclosing parallel");
5381 /* FALLTHRU */
5382 case OMP_CLAUSE_DEFAULT_SHARED:
5383 flags |= GOVD_SHARED;
5384 break;
5385 case OMP_CLAUSE_DEFAULT_PRIVATE:
5386 flags |= GOVD_PRIVATE;
5387 break;
5388 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5389 flags |= GOVD_FIRSTPRIVATE;
5390 break;
5391 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5392 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5393 gcc_assert (ctx->region_type == ORT_TASK);
5394 if (ctx->outer_context)
5395 omp_notice_variable (ctx->outer_context, decl, in_code);
5396 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5397 {
5398 splay_tree_node n2;
5399
5400 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5401 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5402 {
5403 flags |= GOVD_FIRSTPRIVATE;
5404 break;
5405 }
5406 if ((octx->region_type & ORT_PARALLEL) != 0)
5407 break;
5408 }
5409 if (flags & GOVD_FIRSTPRIVATE)
5410 break;
5411 if (octx == NULL
5412 && (TREE_CODE (decl) == PARM_DECL
5413 || (!is_global_var (decl)
5414 && DECL_CONTEXT (decl) == current_function_decl)))
5415 {
5416 flags |= GOVD_FIRSTPRIVATE;
5417 break;
5418 }
5419 flags |= GOVD_SHARED;
5420 break;
5421 default:
5422 gcc_unreachable ();
5423 }
5424
5425 if ((flags & GOVD_PRIVATE)
5426 && lang_hooks.decls.omp_private_outer_ref (decl))
5427 flags |= GOVD_PRIVATE_OUTER_REF;
5428
5429 omp_add_variable (ctx, decl, flags);
5430
5431 shared = (flags & GOVD_SHARED) != 0;
5432 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5433 goto do_outer;
5434 }
5435
5436 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5437 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5438 && DECL_SIZE (decl)
5439 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5440 {
5441 splay_tree_node n2;
5442 tree t = DECL_VALUE_EXPR (decl);
5443 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5444 t = TREE_OPERAND (t, 0);
5445 gcc_assert (DECL_P (t));
5446 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5447 n2->value |= GOVD_SEEN;
5448 }
5449
5450 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5451 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5452
5453 /* If nothing changed, there's nothing left to do. */
5454 if ((n->value & flags) == flags)
5455 return ret;
5456 flags |= n->value;
5457 n->value = flags;
5458
5459 do_outer:
5460 /* If the variable is private in the current context, then we don't
5461 need to propagate anything to an outer context. */
5462 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5463 return ret;
5464 if (ctx->outer_context
5465 && omp_notice_variable (ctx->outer_context, decl, in_code))
5466 return true;
5467 return ret;
5468 }
5469
5470 /* Verify that DECL is private within CTX. If there's specific information
5471 to the contrary in the innermost scope, generate an error. */
5472
5473 static bool
5474 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5475 {
5476 splay_tree_node n;
5477
5478 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5479 if (n != NULL)
5480 {
5481 if (n->value & GOVD_SHARED)
5482 {
5483 if (ctx == gimplify_omp_ctxp)
5484 {
5485 error ("iteration variable %qE should be private",
5486 DECL_NAME (decl));
5487 n->value = GOVD_PRIVATE;
5488 return true;
5489 }
5490 else
5491 return false;
5492 }
5493 else if ((n->value & GOVD_EXPLICIT) != 0
5494 && (ctx == gimplify_omp_ctxp
5495 || (ctx->region_type == ORT_COMBINED_PARALLEL
5496 && gimplify_omp_ctxp->outer_context == ctx)))
5497 {
5498 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5499 error ("iteration variable %qE should not be firstprivate",
5500 DECL_NAME (decl));
5501 else if ((n->value & GOVD_REDUCTION) != 0)
5502 error ("iteration variable %qE should not be reduction",
5503 DECL_NAME (decl));
5504 }
5505 return (ctx == gimplify_omp_ctxp
5506 || (ctx->region_type == ORT_COMBINED_PARALLEL
5507 && gimplify_omp_ctxp->outer_context == ctx));
5508 }
5509
5510 if (ctx->region_type != ORT_WORKSHARE)
5511 return false;
5512 else if (ctx->outer_context)
5513 return omp_is_private (ctx->outer_context, decl);
5514 return false;
5515 }
5516
5517 /* Return true if DECL is private within a parallel region
5518 that binds to the current construct's context or in parallel
5519 region's REDUCTION clause. */
5520
5521 static bool
5522 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5523 {
5524 splay_tree_node n;
5525
5526 do
5527 {
5528 ctx = ctx->outer_context;
5529 if (ctx == NULL)
5530 return !(is_global_var (decl)
5531 /* References might be private, but might be shared too. */
5532 || lang_hooks.decls.omp_privatize_by_reference (decl));
5533
5534 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5535 if (n != NULL)
5536 return (n->value & GOVD_SHARED) == 0;
5537 }
5538 while (ctx->region_type == ORT_WORKSHARE);
5539 return false;
5540 }
5541
5542 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5543 and previous omp contexts. */
5544
5545 static void
5546 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5547 enum omp_region_type region_type)
5548 {
5549 struct gimplify_omp_ctx *ctx, *outer_ctx;
5550 struct gimplify_ctx gctx;
5551 tree c;
5552
5553 ctx = new_omp_context (region_type);
5554 outer_ctx = ctx->outer_context;
5555
5556 while ((c = *list_p) != NULL)
5557 {
5558 bool remove = false;
5559 bool notice_outer = true;
5560 const char *check_non_private = NULL;
5561 unsigned int flags;
5562 tree decl;
5563
5564 switch (OMP_CLAUSE_CODE (c))
5565 {
5566 case OMP_CLAUSE_PRIVATE:
5567 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5568 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5569 {
5570 flags |= GOVD_PRIVATE_OUTER_REF;
5571 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5572 }
5573 else
5574 notice_outer = false;
5575 goto do_add;
5576 case OMP_CLAUSE_SHARED:
5577 flags = GOVD_SHARED | GOVD_EXPLICIT;
5578 goto do_add;
5579 case OMP_CLAUSE_FIRSTPRIVATE:
5580 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5581 check_non_private = "firstprivate";
5582 goto do_add;
5583 case OMP_CLAUSE_LASTPRIVATE:
5584 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5585 check_non_private = "lastprivate";
5586 goto do_add;
5587 case OMP_CLAUSE_REDUCTION:
5588 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5589 check_non_private = "reduction";
5590 goto do_add;
5591
5592 do_add:
5593 decl = OMP_CLAUSE_DECL (c);
5594 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5595 {
5596 remove = true;
5597 break;
5598 }
5599 omp_add_variable (ctx, decl, flags);
5600 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5601 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5602 {
5603 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5604 GOVD_LOCAL | GOVD_SEEN);
5605 gimplify_omp_ctxp = ctx;
5606 push_gimplify_context (&gctx);
5607
5608 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5609 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5610
5611 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5612 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5613 pop_gimplify_context
5614 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5615 push_gimplify_context (&gctx);
5616 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5617 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5618 pop_gimplify_context
5619 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5620 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5621 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5622
5623 gimplify_omp_ctxp = outer_ctx;
5624 }
5625 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5626 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5627 {
5628 gimplify_omp_ctxp = ctx;
5629 push_gimplify_context (&gctx);
5630 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5631 {
5632 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5633 NULL, NULL);
5634 TREE_SIDE_EFFECTS (bind) = 1;
5635 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5636 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5637 }
5638 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5639 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5640 pop_gimplify_context
5641 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5642 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5643
5644 gimplify_omp_ctxp = outer_ctx;
5645 }
5646 if (notice_outer)
5647 goto do_notice;
5648 break;
5649
5650 case OMP_CLAUSE_COPYIN:
5651 case OMP_CLAUSE_COPYPRIVATE:
5652 decl = OMP_CLAUSE_DECL (c);
5653 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5654 {
5655 remove = true;
5656 break;
5657 }
5658 do_notice:
5659 if (outer_ctx)
5660 omp_notice_variable (outer_ctx, decl, true);
5661 if (check_non_private
5662 && region_type == ORT_WORKSHARE
5663 && omp_check_private (ctx, decl))
5664 {
5665 error ("%s variable %qE is private in outer context",
5666 check_non_private, DECL_NAME (decl));
5667 remove = true;
5668 }
5669 break;
5670
5671 case OMP_CLAUSE_IF:
5672 OMP_CLAUSE_OPERAND (c, 0)
5673 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5674 /* Fall through. */
5675
5676 case OMP_CLAUSE_SCHEDULE:
5677 case OMP_CLAUSE_NUM_THREADS:
5678 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5679 is_gimple_val, fb_rvalue) == GS_ERROR)
5680 remove = true;
5681 break;
5682
5683 case OMP_CLAUSE_NOWAIT:
5684 case OMP_CLAUSE_ORDERED:
5685 case OMP_CLAUSE_UNTIED:
5686 case OMP_CLAUSE_COLLAPSE:
5687 break;
5688
5689 case OMP_CLAUSE_DEFAULT:
5690 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5691 break;
5692
5693 default:
5694 gcc_unreachable ();
5695 }
5696
5697 if (remove)
5698 *list_p = OMP_CLAUSE_CHAIN (c);
5699 else
5700 list_p = &OMP_CLAUSE_CHAIN (c);
5701 }
5702
5703 gimplify_omp_ctxp = ctx;
5704 }
5705
5706 /* For all variables that were not actually used within the context,
5707 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5708
5709 static int
5710 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5711 {
5712 tree *list_p = (tree *) data;
5713 tree decl = (tree) n->key;
5714 unsigned flags = n->value;
5715 enum omp_clause_code code;
5716 tree clause;
5717 bool private_debug;
5718
5719 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5720 return 0;
5721 if ((flags & GOVD_SEEN) == 0)
5722 return 0;
5723 if (flags & GOVD_DEBUG_PRIVATE)
5724 {
5725 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5726 private_debug = true;
5727 }
5728 else
5729 private_debug
5730 = lang_hooks.decls.omp_private_debug_clause (decl,
5731 !!(flags & GOVD_SHARED));
5732 if (private_debug)
5733 code = OMP_CLAUSE_PRIVATE;
5734 else if (flags & GOVD_SHARED)
5735 {
5736 if (is_global_var (decl))
5737 {
5738 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5739 while (ctx != NULL)
5740 {
5741 splay_tree_node on
5742 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5743 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5744 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5745 break;
5746 ctx = ctx->outer_context;
5747 }
5748 if (ctx == NULL)
5749 return 0;
5750 }
5751 code = OMP_CLAUSE_SHARED;
5752 }
5753 else if (flags & GOVD_PRIVATE)
5754 code = OMP_CLAUSE_PRIVATE;
5755 else if (flags & GOVD_FIRSTPRIVATE)
5756 code = OMP_CLAUSE_FIRSTPRIVATE;
5757 else
5758 gcc_unreachable ();
5759
5760 clause = build_omp_clause (input_location, code);
5761 OMP_CLAUSE_DECL (clause) = decl;
5762 OMP_CLAUSE_CHAIN (clause) = *list_p;
5763 if (private_debug)
5764 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5765 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5766 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5767 *list_p = clause;
5768 lang_hooks.decls.omp_finish_clause (clause);
5769
5770 return 0;
5771 }
5772
5773 static void
5774 gimplify_adjust_omp_clauses (tree *list_p)
5775 {
5776 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5777 tree c, decl;
5778
5779 while ((c = *list_p) != NULL)
5780 {
5781 splay_tree_node n;
5782 bool remove = false;
5783
5784 switch (OMP_CLAUSE_CODE (c))
5785 {
5786 case OMP_CLAUSE_PRIVATE:
5787 case OMP_CLAUSE_SHARED:
5788 case OMP_CLAUSE_FIRSTPRIVATE:
5789 decl = OMP_CLAUSE_DECL (c);
5790 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5791 remove = !(n->value & GOVD_SEEN);
5792 if (! remove)
5793 {
5794 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5795 if ((n->value & GOVD_DEBUG_PRIVATE)
5796 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5797 {
5798 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5799 || ((n->value & GOVD_DATA_SHARE_CLASS)
5800 == GOVD_PRIVATE));
5801 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5802 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5803 }
5804 }
5805 break;
5806
5807 case OMP_CLAUSE_LASTPRIVATE:
5808 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5809 accurately reflect the presence of a FIRSTPRIVATE clause. */
5810 decl = OMP_CLAUSE_DECL (c);
5811 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5812 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5813 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5814 break;
5815
5816 case OMP_CLAUSE_REDUCTION:
5817 case OMP_CLAUSE_COPYIN:
5818 case OMP_CLAUSE_COPYPRIVATE:
5819 case OMP_CLAUSE_IF:
5820 case OMP_CLAUSE_NUM_THREADS:
5821 case OMP_CLAUSE_SCHEDULE:
5822 case OMP_CLAUSE_NOWAIT:
5823 case OMP_CLAUSE_ORDERED:
5824 case OMP_CLAUSE_DEFAULT:
5825 case OMP_CLAUSE_UNTIED:
5826 case OMP_CLAUSE_COLLAPSE:
5827 break;
5828
5829 default:
5830 gcc_unreachable ();
5831 }
5832
5833 if (remove)
5834 *list_p = OMP_CLAUSE_CHAIN (c);
5835 else
5836 list_p = &OMP_CLAUSE_CHAIN (c);
5837 }
5838
5839 /* Add in any implicit data sharing. */
5840 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5841
5842 gimplify_omp_ctxp = ctx->outer_context;
5843 delete_omp_context (ctx);
5844 }
5845
5846 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5847 gimplification of the body, as well as scanning the body for used
5848 variables. We need to do this scan now, because variable-sized
5849 decls will be decomposed during gimplification. */
5850
5851 static void
5852 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5853 {
5854 tree expr = *expr_p;
5855 gimple g;
5856 gimple_seq body = NULL;
5857 struct gimplify_ctx gctx;
5858
5859 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5860 OMP_PARALLEL_COMBINED (expr)
5861 ? ORT_COMBINED_PARALLEL
5862 : ORT_PARALLEL);
5863
5864 push_gimplify_context (&gctx);
5865
5866 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5867 if (gimple_code (g) == GIMPLE_BIND)
5868 pop_gimplify_context (g);
5869 else
5870 pop_gimplify_context (NULL);
5871
5872 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5873
5874 g = gimple_build_omp_parallel (body,
5875 OMP_PARALLEL_CLAUSES (expr),
5876 NULL_TREE, NULL_TREE);
5877 if (OMP_PARALLEL_COMBINED (expr))
5878 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5879 gimplify_seq_add_stmt (pre_p, g);
5880 *expr_p = NULL_TREE;
5881 }
5882
5883 /* Gimplify the contents of an OMP_TASK statement. This involves
5884 gimplification of the body, as well as scanning the body for used
5885 variables. We need to do this scan now, because variable-sized
5886 decls will be decomposed during gimplification. */
5887
5888 static void
5889 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5890 {
5891 tree expr = *expr_p;
5892 gimple g;
5893 gimple_seq body = NULL;
5894 struct gimplify_ctx gctx;
5895
5896 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5897
5898 push_gimplify_context (&gctx);
5899
5900 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5901 if (gimple_code (g) == GIMPLE_BIND)
5902 pop_gimplify_context (g);
5903 else
5904 pop_gimplify_context (NULL);
5905
5906 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
5907
5908 g = gimple_build_omp_task (body,
5909 OMP_TASK_CLAUSES (expr),
5910 NULL_TREE, NULL_TREE,
5911 NULL_TREE, NULL_TREE, NULL_TREE);
5912 gimplify_seq_add_stmt (pre_p, g);
5913 *expr_p = NULL_TREE;
5914 }
5915
5916 /* Gimplify the gross structure of an OMP_FOR statement. */
5917
5918 static enum gimplify_status
5919 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
5920 {
5921 tree for_stmt, decl, var, t;
5922 enum gimplify_status ret = GS_ALL_DONE;
5923 enum gimplify_status tret;
5924 gimple gfor;
5925 gimple_seq for_body, for_pre_body;
5926 int i;
5927
5928 for_stmt = *expr_p;
5929
5930 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
5931 ORT_WORKSHARE);
5932
5933 /* Handle OMP_FOR_INIT. */
5934 for_pre_body = NULL;
5935 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
5936 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
5937
5938 for_body = gimple_seq_alloc ();
5939 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5940 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
5941 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5942 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
5943 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5944 {
5945 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5946 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5947 decl = TREE_OPERAND (t, 0);
5948 gcc_assert (DECL_P (decl));
5949 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
5950 || POINTER_TYPE_P (TREE_TYPE (decl)));
5951
5952 /* Make sure the iteration variable is private. */
5953 if (omp_is_private (gimplify_omp_ctxp, decl))
5954 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5955 else
5956 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5957
5958 /* If DECL is not a gimple register, create a temporary variable to act
5959 as an iteration counter. This is valid, since DECL cannot be
5960 modified in the body of the loop. */
5961 if (!is_gimple_reg (decl))
5962 {
5963 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5964 TREE_OPERAND (t, 0) = var;
5965
5966 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
5967
5968 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5969 }
5970 else
5971 var = decl;
5972
5973 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5974 is_gimple_val, fb_rvalue);
5975 ret = MIN (ret, tret);
5976 if (ret == GS_ERROR)
5977 return ret;
5978
5979 /* Handle OMP_FOR_COND. */
5980 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5981 gcc_assert (COMPARISON_CLASS_P (t));
5982 gcc_assert (TREE_OPERAND (t, 0) == decl);
5983
5984 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5985 is_gimple_val, fb_rvalue);
5986 ret = MIN (ret, tret);
5987
5988 /* Handle OMP_FOR_INCR. */
5989 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5990 switch (TREE_CODE (t))
5991 {
5992 case PREINCREMENT_EXPR:
5993 case POSTINCREMENT_EXPR:
5994 t = build_int_cst (TREE_TYPE (decl), 1);
5995 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5996 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
5997 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
5998 break;
5999
6000 case PREDECREMENT_EXPR:
6001 case POSTDECREMENT_EXPR:
6002 t = build_int_cst (TREE_TYPE (decl), -1);
6003 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6004 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6005 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6006 break;
6007
6008 case MODIFY_EXPR:
6009 gcc_assert (TREE_OPERAND (t, 0) == decl);
6010 TREE_OPERAND (t, 0) = var;
6011
6012 t = TREE_OPERAND (t, 1);
6013 switch (TREE_CODE (t))
6014 {
6015 case PLUS_EXPR:
6016 if (TREE_OPERAND (t, 1) == decl)
6017 {
6018 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6019 TREE_OPERAND (t, 0) = var;
6020 break;
6021 }
6022
6023 /* Fallthru. */
6024 case MINUS_EXPR:
6025 case POINTER_PLUS_EXPR:
6026 gcc_assert (TREE_OPERAND (t, 0) == decl);
6027 TREE_OPERAND (t, 0) = var;
6028 break;
6029 default:
6030 gcc_unreachable ();
6031 }
6032
6033 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6034 is_gimple_val, fb_rvalue);
6035 ret = MIN (ret, tret);
6036 break;
6037
6038 default:
6039 gcc_unreachable ();
6040 }
6041
6042 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6043 {
6044 tree c;
6045 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6046 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6047 && OMP_CLAUSE_DECL (c) == decl
6048 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6049 {
6050 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6051 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6052 gcc_assert (TREE_OPERAND (t, 0) == var);
6053 t = TREE_OPERAND (t, 1);
6054 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6055 || TREE_CODE (t) == MINUS_EXPR
6056 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6057 gcc_assert (TREE_OPERAND (t, 0) == var);
6058 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6059 TREE_OPERAND (t, 1));
6060 gimplify_assign (decl, t,
6061 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6062 }
6063 }
6064 }
6065
6066 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6067
6068 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6069
6070 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6071 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6072 for_pre_body);
6073
6074 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6075 {
6076 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6077 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6078 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6079 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6080 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6081 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6082 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6083 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6084 }
6085
6086 gimplify_seq_add_stmt (pre_p, gfor);
6087 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6088 }
6089
6090 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6091 In particular, OMP_SECTIONS and OMP_SINGLE. */
6092
6093 static void
6094 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6095 {
6096 tree expr = *expr_p;
6097 gimple stmt;
6098 gimple_seq body = NULL;
6099
6100 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6101 gimplify_and_add (OMP_BODY (expr), &body);
6102 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6103
6104 if (TREE_CODE (expr) == OMP_SECTIONS)
6105 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6106 else if (TREE_CODE (expr) == OMP_SINGLE)
6107 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6108 else
6109 gcc_unreachable ();
6110
6111 gimplify_seq_add_stmt (pre_p, stmt);
6112 }
6113
6114 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6115 stabilized the lhs of the atomic operation as *ADDR. Return true if
6116 EXPR is this stabilized form. */
6117
6118 static bool
6119 goa_lhs_expr_p (tree expr, tree addr)
6120 {
6121 /* Also include casts to other type variants. The C front end is fond
6122 of adding these for e.g. volatile variables. This is like
6123 STRIP_TYPE_NOPS but includes the main variant lookup. */
6124 while ((CONVERT_EXPR_P (expr)
6125 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6126 && TREE_OPERAND (expr, 0) != error_mark_node
6127 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6128 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
6129 expr = TREE_OPERAND (expr, 0);
6130
6131 if (TREE_CODE (expr) == INDIRECT_REF)
6132 {
6133 expr = TREE_OPERAND (expr, 0);
6134 while (expr != addr
6135 && (CONVERT_EXPR_P (expr)
6136 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6137 && TREE_CODE (expr) == TREE_CODE (addr)
6138 && TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6139 == TYPE_MAIN_VARIANT (TREE_TYPE (addr)))
6140 {
6141 expr = TREE_OPERAND (expr, 0);
6142 addr = TREE_OPERAND (addr, 0);
6143 }
6144 if (expr == addr)
6145 return true;
6146 return (TREE_CODE (addr) == ADDR_EXPR
6147 && TREE_CODE (expr) == ADDR_EXPR
6148 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6149 }
6150 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6151 return true;
6152 return false;
6153 }
6154
6155 /* Walk *EXPR_P and replace
6156 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6157 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6158 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6159
6160 static int
6161 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6162 tree lhs_var)
6163 {
6164 tree expr = *expr_p;
6165 int saw_lhs;
6166
6167 if (goa_lhs_expr_p (expr, lhs_addr))
6168 {
6169 *expr_p = lhs_var;
6170 return 1;
6171 }
6172 if (is_gimple_val (expr))
6173 return 0;
6174
6175 saw_lhs = 0;
6176 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6177 {
6178 case tcc_binary:
6179 case tcc_comparison:
6180 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6181 lhs_var);
6182 case tcc_unary:
6183 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6184 lhs_var);
6185 break;
6186 case tcc_expression:
6187 switch (TREE_CODE (expr))
6188 {
6189 case TRUTH_ANDIF_EXPR:
6190 case TRUTH_ORIF_EXPR:
6191 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6192 lhs_addr, lhs_var);
6193 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6194 lhs_addr, lhs_var);
6195 break;
6196 default:
6197 break;
6198 }
6199 break;
6200 default:
6201 break;
6202 }
6203
6204 if (saw_lhs == 0)
6205 {
6206 enum gimplify_status gs;
6207 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6208 if (gs != GS_ALL_DONE)
6209 saw_lhs = -1;
6210 }
6211
6212 return saw_lhs;
6213 }
6214
6215
6216 /* Gimplify an OMP_ATOMIC statement. */
6217
6218 static enum gimplify_status
6219 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6220 {
6221 tree addr = TREE_OPERAND (*expr_p, 0);
6222 tree rhs = TREE_OPERAND (*expr_p, 1);
6223 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6224 tree tmp_load;
6225
6226 tmp_load = create_tmp_var (type, NULL);
6227 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6228 return GS_ERROR;
6229
6230 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6231 != GS_ALL_DONE)
6232 return GS_ERROR;
6233
6234 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6235 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6236 != GS_ALL_DONE)
6237 return GS_ERROR;
6238 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6239 *expr_p = NULL;
6240
6241 return GS_ALL_DONE;
6242 }
6243
6244
6245 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6246 expression produces a value to be used as an operand inside a GIMPLE
6247 statement, the value will be stored back in *EXPR_P. This value will
6248 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6249 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6250 emitted in PRE_P and POST_P.
6251
6252 Additionally, this process may overwrite parts of the input
6253 expression during gimplification. Ideally, it should be
6254 possible to do non-destructive gimplification.
6255
6256 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6257 the expression needs to evaluate to a value to be used as
6258 an operand in a GIMPLE statement, this value will be stored in
6259 *EXPR_P on exit. This happens when the caller specifies one
6260 of fb_lvalue or fb_rvalue fallback flags.
6261
6262 PRE_P will contain the sequence of GIMPLE statements corresponding
6263 to the evaluation of EXPR and all the side-effects that must
6264 be executed before the main expression. On exit, the last
6265 statement of PRE_P is the core statement being gimplified. For
6266 instance, when gimplifying 'if (++a)' the last statement in
6267 PRE_P will be 'if (t.1)' where t.1 is the result of
6268 pre-incrementing 'a'.
6269
6270 POST_P will contain the sequence of GIMPLE statements corresponding
6271 to the evaluation of all the side-effects that must be executed
6272 after the main expression. If this is NULL, the post
6273 side-effects are stored at the end of PRE_P.
6274
6275 The reason why the output is split in two is to handle post
6276 side-effects explicitly. In some cases, an expression may have
6277 inner and outer post side-effects which need to be emitted in
6278 an order different from the one given by the recursive
6279 traversal. For instance, for the expression (*p--)++ the post
6280 side-effects of '--' must actually occur *after* the post
6281 side-effects of '++'. However, gimplification will first visit
6282 the inner expression, so if a separate POST sequence was not
6283 used, the resulting sequence would be:
6284
6285 1 t.1 = *p
6286 2 p = p - 1
6287 3 t.2 = t.1 + 1
6288 4 *p = t.2
6289
6290 However, the post-decrement operation in line #2 must not be
6291 evaluated until after the store to *p at line #4, so the
6292 correct sequence should be:
6293
6294 1 t.1 = *p
6295 2 t.2 = t.1 + 1
6296 3 *p = t.2
6297 4 p = p - 1
6298
6299 So, by specifying a separate post queue, it is possible
6300 to emit the post side-effects in the correct order.
6301 If POST_P is NULL, an internal queue will be used. Before
6302 returning to the caller, the sequence POST_P is appended to
6303 the main output sequence PRE_P.
6304
6305 GIMPLE_TEST_F points to a function that takes a tree T and
6306 returns nonzero if T is in the GIMPLE form requested by the
6307 caller. The GIMPLE predicates are in tree-gimple.c.
6308
6309 FALLBACK tells the function what sort of a temporary we want if
6310 gimplification cannot produce an expression that complies with
6311 GIMPLE_TEST_F.
6312
6313 fb_none means that no temporary should be generated
6314 fb_rvalue means that an rvalue is OK to generate
6315 fb_lvalue means that an lvalue is OK to generate
6316 fb_either means that either is OK, but an lvalue is preferable.
6317 fb_mayfail means that gimplification may fail (in which case
6318 GS_ERROR will be returned)
6319
6320 The return value is either GS_ERROR or GS_ALL_DONE, since this
6321 function iterates until EXPR is completely gimplified or an error
6322 occurs. */
6323
6324 enum gimplify_status
6325 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6326 bool (*gimple_test_f) (tree), fallback_t fallback)
6327 {
6328 tree tmp;
6329 gimple_seq internal_pre = NULL;
6330 gimple_seq internal_post = NULL;
6331 tree save_expr;
6332 bool is_statement;
6333 location_t saved_location;
6334 enum gimplify_status ret;
6335 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6336
6337 save_expr = *expr_p;
6338 if (save_expr == NULL_TREE)
6339 return GS_ALL_DONE;
6340
6341 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6342 is_statement = gimple_test_f == is_gimple_stmt;
6343 if (is_statement)
6344 gcc_assert (pre_p);
6345
6346 /* Consistency checks. */
6347 if (gimple_test_f == is_gimple_reg)
6348 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6349 else if (gimple_test_f == is_gimple_val
6350 || gimple_test_f == is_gimple_call_addr
6351 || gimple_test_f == is_gimple_condexpr
6352 || gimple_test_f == is_gimple_mem_rhs
6353 || gimple_test_f == is_gimple_mem_rhs_or_call
6354 || gimple_test_f == is_gimple_reg_rhs
6355 || gimple_test_f == is_gimple_reg_rhs_or_call
6356 || gimple_test_f == is_gimple_asm_val)
6357 gcc_assert (fallback & fb_rvalue);
6358 else if (gimple_test_f == is_gimple_min_lval
6359 || gimple_test_f == is_gimple_lvalue)
6360 gcc_assert (fallback & fb_lvalue);
6361 else if (gimple_test_f == is_gimple_addressable)
6362 gcc_assert (fallback & fb_either);
6363 else if (gimple_test_f == is_gimple_stmt)
6364 gcc_assert (fallback == fb_none);
6365 else
6366 {
6367 /* We should have recognized the GIMPLE_TEST_F predicate to
6368 know what kind of fallback to use in case a temporary is
6369 needed to hold the value or address of *EXPR_P. */
6370 gcc_unreachable ();
6371 }
6372
6373 /* We used to check the predicate here and return immediately if it
6374 succeeds. This is wrong; the design is for gimplification to be
6375 idempotent, and for the predicates to only test for valid forms, not
6376 whether they are fully simplified. */
6377 if (pre_p == NULL)
6378 pre_p = &internal_pre;
6379
6380 if (post_p == NULL)
6381 post_p = &internal_post;
6382
6383 /* Remember the last statements added to PRE_P and POST_P. Every
6384 new statement added by the gimplification helpers needs to be
6385 annotated with location information. To centralize the
6386 responsibility, we remember the last statement that had been
6387 added to both queues before gimplifying *EXPR_P. If
6388 gimplification produces new statements in PRE_P and POST_P, those
6389 statements will be annotated with the same location information
6390 as *EXPR_P. */
6391 pre_last_gsi = gsi_last (*pre_p);
6392 post_last_gsi = gsi_last (*post_p);
6393
6394 saved_location = input_location;
6395 if (save_expr != error_mark_node
6396 && EXPR_HAS_LOCATION (*expr_p))
6397 input_location = EXPR_LOCATION (*expr_p);
6398
6399 /* Loop over the specific gimplifiers until the toplevel node
6400 remains the same. */
6401 do
6402 {
6403 /* Strip away as many useless type conversions as possible
6404 at the toplevel. */
6405 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6406
6407 /* Remember the expr. */
6408 save_expr = *expr_p;
6409
6410 /* Die, die, die, my darling. */
6411 if (save_expr == error_mark_node
6412 || (TREE_TYPE (save_expr)
6413 && TREE_TYPE (save_expr) == error_mark_node))
6414 {
6415 ret = GS_ERROR;
6416 break;
6417 }
6418
6419 /* Do any language-specific gimplification. */
6420 ret = ((enum gimplify_status)
6421 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6422 if (ret == GS_OK)
6423 {
6424 if (*expr_p == NULL_TREE)
6425 break;
6426 if (*expr_p != save_expr)
6427 continue;
6428 }
6429 else if (ret != GS_UNHANDLED)
6430 break;
6431
6432 ret = GS_OK;
6433 switch (TREE_CODE (*expr_p))
6434 {
6435 /* First deal with the special cases. */
6436
6437 case POSTINCREMENT_EXPR:
6438 case POSTDECREMENT_EXPR:
6439 case PREINCREMENT_EXPR:
6440 case PREDECREMENT_EXPR:
6441 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6442 fallback != fb_none);
6443 break;
6444
6445 case ARRAY_REF:
6446 case ARRAY_RANGE_REF:
6447 case REALPART_EXPR:
6448 case IMAGPART_EXPR:
6449 case COMPONENT_REF:
6450 case VIEW_CONVERT_EXPR:
6451 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6452 fallback ? fallback : fb_rvalue);
6453 break;
6454
6455 case COND_EXPR:
6456 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6457
6458 /* C99 code may assign to an array in a structure value of a
6459 conditional expression, and this has undefined behavior
6460 only on execution, so create a temporary if an lvalue is
6461 required. */
6462 if (fallback == fb_lvalue)
6463 {
6464 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6465 mark_addressable (*expr_p);
6466 }
6467 break;
6468
6469 case CALL_EXPR:
6470 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6471
6472 /* C99 code may assign to an array in a structure returned
6473 from a function, and this has undefined behavior only on
6474 execution, so create a temporary if an lvalue is
6475 required. */
6476 if (fallback == fb_lvalue)
6477 {
6478 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6479 mark_addressable (*expr_p);
6480 }
6481 break;
6482
6483 case TREE_LIST:
6484 gcc_unreachable ();
6485
6486 case COMPOUND_EXPR:
6487 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6488 break;
6489
6490 case COMPOUND_LITERAL_EXPR:
6491 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6492 break;
6493
6494 case MODIFY_EXPR:
6495 case INIT_EXPR:
6496 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6497 fallback != fb_none);
6498 break;
6499
6500 case TRUTH_ANDIF_EXPR:
6501 case TRUTH_ORIF_EXPR:
6502 /* Pass the source location of the outer expression. */
6503 ret = gimplify_boolean_expr (expr_p, saved_location);
6504 break;
6505
6506 case TRUTH_NOT_EXPR:
6507 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6508 {
6509 tree type = TREE_TYPE (*expr_p);
6510 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6511 ret = GS_OK;
6512 break;
6513 }
6514
6515 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6516 is_gimple_val, fb_rvalue);
6517 recalculate_side_effects (*expr_p);
6518 break;
6519
6520 case ADDR_EXPR:
6521 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6522 break;
6523
6524 case VA_ARG_EXPR:
6525 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6526 break;
6527
6528 CASE_CONVERT:
6529 if (IS_EMPTY_STMT (*expr_p))
6530 {
6531 ret = GS_ALL_DONE;
6532 break;
6533 }
6534
6535 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6536 || fallback == fb_none)
6537 {
6538 /* Just strip a conversion to void (or in void context) and
6539 try again. */
6540 *expr_p = TREE_OPERAND (*expr_p, 0);
6541 break;
6542 }
6543
6544 ret = gimplify_conversion (expr_p);
6545 if (ret == GS_ERROR)
6546 break;
6547 if (*expr_p != save_expr)
6548 break;
6549 /* FALLTHRU */
6550
6551 case FIX_TRUNC_EXPR:
6552 /* unary_expr: ... | '(' cast ')' val | ... */
6553 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6554 is_gimple_val, fb_rvalue);
6555 recalculate_side_effects (*expr_p);
6556 break;
6557
6558 case INDIRECT_REF:
6559 *expr_p = fold_indirect_ref (*expr_p);
6560 if (*expr_p != save_expr)
6561 break;
6562 /* else fall through. */
6563 case ALIGN_INDIRECT_REF:
6564 case MISALIGNED_INDIRECT_REF:
6565 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6566 is_gimple_reg, fb_rvalue);
6567 recalculate_side_effects (*expr_p);
6568 break;
6569
6570 /* Constants need not be gimplified. */
6571 case INTEGER_CST:
6572 case REAL_CST:
6573 case FIXED_CST:
6574 case STRING_CST:
6575 case COMPLEX_CST:
6576 case VECTOR_CST:
6577 ret = GS_ALL_DONE;
6578 break;
6579
6580 case CONST_DECL:
6581 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6582 CONST_DECL node. Otherwise the decl is replaceable by its
6583 value. */
6584 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6585 if (fallback & fb_lvalue)
6586 ret = GS_ALL_DONE;
6587 else
6588 *expr_p = DECL_INITIAL (*expr_p);
6589 break;
6590
6591 case DECL_EXPR:
6592 ret = gimplify_decl_expr (expr_p, pre_p);
6593 break;
6594
6595 case EXC_PTR_EXPR:
6596 /* FIXME make this a decl. */
6597 ret = GS_ALL_DONE;
6598 break;
6599
6600 case BIND_EXPR:
6601 ret = gimplify_bind_expr (expr_p, pre_p);
6602 break;
6603
6604 case LOOP_EXPR:
6605 ret = gimplify_loop_expr (expr_p, pre_p);
6606 break;
6607
6608 case SWITCH_EXPR:
6609 ret = gimplify_switch_expr (expr_p, pre_p);
6610 break;
6611
6612 case EXIT_EXPR:
6613 ret = gimplify_exit_expr (expr_p);
6614 break;
6615
6616 case GOTO_EXPR:
6617 /* If the target is not LABEL, then it is a computed jump
6618 and the target needs to be gimplified. */
6619 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6620 {
6621 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6622 NULL, is_gimple_val, fb_rvalue);
6623 if (ret == GS_ERROR)
6624 break;
6625 }
6626 gimplify_seq_add_stmt (pre_p,
6627 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6628 break;
6629
6630 case PREDICT_EXPR:
6631 gimplify_seq_add_stmt (pre_p,
6632 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6633 PREDICT_EXPR_OUTCOME (*expr_p)));
6634 ret = GS_ALL_DONE;
6635 break;
6636
6637 case LABEL_EXPR:
6638 ret = GS_ALL_DONE;
6639 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6640 == current_function_decl);
6641 gimplify_seq_add_stmt (pre_p,
6642 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6643 break;
6644
6645 case CASE_LABEL_EXPR:
6646 ret = gimplify_case_label_expr (expr_p, pre_p);
6647 break;
6648
6649 case RETURN_EXPR:
6650 ret = gimplify_return_expr (*expr_p, pre_p);
6651 break;
6652
6653 case CONSTRUCTOR:
6654 /* Don't reduce this in place; let gimplify_init_constructor work its
6655 magic. Buf if we're just elaborating this for side effects, just
6656 gimplify any element that has side-effects. */
6657 if (fallback == fb_none)
6658 {
6659 unsigned HOST_WIDE_INT ix;
6660 constructor_elt *ce;
6661 tree temp = NULL_TREE;
6662 for (ix = 0;
6663 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6664 ix, ce);
6665 ix++)
6666 if (TREE_SIDE_EFFECTS (ce->value))
6667 append_to_statement_list (ce->value, &temp);
6668
6669 *expr_p = temp;
6670 ret = GS_OK;
6671 }
6672 /* C99 code may assign to an array in a constructed
6673 structure or union, and this has undefined behavior only
6674 on execution, so create a temporary if an lvalue is
6675 required. */
6676 else if (fallback == fb_lvalue)
6677 {
6678 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6679 mark_addressable (*expr_p);
6680 }
6681 else
6682 ret = GS_ALL_DONE;
6683 break;
6684
6685 /* The following are special cases that are not handled by the
6686 original GIMPLE grammar. */
6687
6688 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6689 eliminated. */
6690 case SAVE_EXPR:
6691 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6692 break;
6693
6694 case BIT_FIELD_REF:
6695 {
6696 enum gimplify_status r0, r1, r2;
6697
6698 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6699 post_p, is_gimple_lvalue, fb_either);
6700 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6701 post_p, is_gimple_val, fb_rvalue);
6702 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6703 post_p, is_gimple_val, fb_rvalue);
6704 recalculate_side_effects (*expr_p);
6705
6706 ret = MIN (r0, MIN (r1, r2));
6707 }
6708 break;
6709
6710 case TARGET_MEM_REF:
6711 {
6712 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6713
6714 if (TMR_SYMBOL (*expr_p))
6715 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6716 post_p, is_gimple_lvalue, fb_either);
6717 else if (TMR_BASE (*expr_p))
6718 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6719 post_p, is_gimple_val, fb_either);
6720 if (TMR_INDEX (*expr_p))
6721 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6722 post_p, is_gimple_val, fb_rvalue);
6723 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6724 ret = MIN (r0, r1);
6725 }
6726 break;
6727
6728 case NON_LVALUE_EXPR:
6729 /* This should have been stripped above. */
6730 gcc_unreachable ();
6731
6732 case ASM_EXPR:
6733 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6734 break;
6735
6736 case TRY_FINALLY_EXPR:
6737 case TRY_CATCH_EXPR:
6738 {
6739 gimple_seq eval, cleanup;
6740 gimple try_;
6741
6742 eval = cleanup = NULL;
6743 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6744 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6745 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6746 if (gimple_seq_empty_p (cleanup))
6747 {
6748 gimple_seq_add_seq (pre_p, eval);
6749 ret = GS_ALL_DONE;
6750 break;
6751 }
6752 try_ = gimple_build_try (eval, cleanup,
6753 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6754 ? GIMPLE_TRY_FINALLY
6755 : GIMPLE_TRY_CATCH);
6756 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6757 gimple_try_set_catch_is_cleanup (try_,
6758 TRY_CATCH_IS_CLEANUP (*expr_p));
6759 gimplify_seq_add_stmt (pre_p, try_);
6760 ret = GS_ALL_DONE;
6761 break;
6762 }
6763
6764 case CLEANUP_POINT_EXPR:
6765 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6766 break;
6767
6768 case TARGET_EXPR:
6769 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6770 break;
6771
6772 case CATCH_EXPR:
6773 {
6774 gimple c;
6775 gimple_seq handler = NULL;
6776 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6777 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6778 gimplify_seq_add_stmt (pre_p, c);
6779 ret = GS_ALL_DONE;
6780 break;
6781 }
6782
6783 case EH_FILTER_EXPR:
6784 {
6785 gimple ehf;
6786 gimple_seq failure = NULL;
6787
6788 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6789 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6790 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6791 gimple_eh_filter_set_must_not_throw
6792 (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p));
6793 gimplify_seq_add_stmt (pre_p, ehf);
6794 ret = GS_ALL_DONE;
6795 break;
6796 }
6797
6798 case OBJ_TYPE_REF:
6799 {
6800 enum gimplify_status r0, r1;
6801 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6802 post_p, is_gimple_val, fb_rvalue);
6803 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6804 post_p, is_gimple_val, fb_rvalue);
6805 TREE_SIDE_EFFECTS (*expr_p) = 0;
6806 ret = MIN (r0, r1);
6807 }
6808 break;
6809
6810 case LABEL_DECL:
6811 /* We get here when taking the address of a label. We mark
6812 the label as "forced"; meaning it can never be removed and
6813 it is a potential target for any computed goto. */
6814 FORCED_LABEL (*expr_p) = 1;
6815 ret = GS_ALL_DONE;
6816 break;
6817
6818 case STATEMENT_LIST:
6819 ret = gimplify_statement_list (expr_p, pre_p);
6820 break;
6821
6822 case WITH_SIZE_EXPR:
6823 {
6824 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6825 post_p == &internal_post ? NULL : post_p,
6826 gimple_test_f, fallback);
6827 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6828 is_gimple_val, fb_rvalue);
6829 }
6830 break;
6831
6832 case VAR_DECL:
6833 case PARM_DECL:
6834 ret = gimplify_var_or_parm_decl (expr_p);
6835 break;
6836
6837 case RESULT_DECL:
6838 /* When within an OpenMP context, notice uses of variables. */
6839 if (gimplify_omp_ctxp)
6840 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6841 ret = GS_ALL_DONE;
6842 break;
6843
6844 case SSA_NAME:
6845 /* Allow callbacks into the gimplifier during optimization. */
6846 ret = GS_ALL_DONE;
6847 break;
6848
6849 case OMP_PARALLEL:
6850 gimplify_omp_parallel (expr_p, pre_p);
6851 ret = GS_ALL_DONE;
6852 break;
6853
6854 case OMP_TASK:
6855 gimplify_omp_task (expr_p, pre_p);
6856 ret = GS_ALL_DONE;
6857 break;
6858
6859 case OMP_FOR:
6860 ret = gimplify_omp_for (expr_p, pre_p);
6861 break;
6862
6863 case OMP_SECTIONS:
6864 case OMP_SINGLE:
6865 gimplify_omp_workshare (expr_p, pre_p);
6866 ret = GS_ALL_DONE;
6867 break;
6868
6869 case OMP_SECTION:
6870 case OMP_MASTER:
6871 case OMP_ORDERED:
6872 case OMP_CRITICAL:
6873 {
6874 gimple_seq body = NULL;
6875 gimple g;
6876
6877 gimplify_and_add (OMP_BODY (*expr_p), &body);
6878 switch (TREE_CODE (*expr_p))
6879 {
6880 case OMP_SECTION:
6881 g = gimple_build_omp_section (body);
6882 break;
6883 case OMP_MASTER:
6884 g = gimple_build_omp_master (body);
6885 break;
6886 case OMP_ORDERED:
6887 g = gimple_build_omp_ordered (body);
6888 break;
6889 case OMP_CRITICAL:
6890 g = gimple_build_omp_critical (body,
6891 OMP_CRITICAL_NAME (*expr_p));
6892 break;
6893 default:
6894 gcc_unreachable ();
6895 }
6896 gimplify_seq_add_stmt (pre_p, g);
6897 ret = GS_ALL_DONE;
6898 break;
6899 }
6900
6901 case OMP_ATOMIC:
6902 ret = gimplify_omp_atomic (expr_p, pre_p);
6903 break;
6904
6905 case POINTER_PLUS_EXPR:
6906 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6907 The second is gimple immediate saving a need for extra statement.
6908 */
6909 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6910 && (tmp = maybe_fold_offset_to_address
6911 (EXPR_LOCATION (*expr_p),
6912 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6913 TREE_TYPE (*expr_p))))
6914 {
6915 *expr_p = tmp;
6916 break;
6917 }
6918 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6919 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6920 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6921 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6922 0),0)))
6923 && (tmp = maybe_fold_offset_to_address
6924 (EXPR_LOCATION (*expr_p),
6925 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6926 TREE_OPERAND (*expr_p, 1),
6927 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6928 0)))))
6929 {
6930 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6931 break;
6932 }
6933 /* FALLTHRU */
6934
6935 default:
6936 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6937 {
6938 case tcc_comparison:
6939 /* Handle comparison of objects of non scalar mode aggregates
6940 with a call to memcmp. It would be nice to only have to do
6941 this for variable-sized objects, but then we'd have to allow
6942 the same nest of reference nodes we allow for MODIFY_EXPR and
6943 that's too complex.
6944
6945 Compare scalar mode aggregates as scalar mode values. Using
6946 memcmp for them would be very inefficient at best, and is
6947 plain wrong if bitfields are involved. */
6948 {
6949 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6950
6951 if (!AGGREGATE_TYPE_P (type))
6952 goto expr_2;
6953 else if (TYPE_MODE (type) != BLKmode)
6954 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6955 else
6956 ret = gimplify_variable_sized_compare (expr_p);
6957
6958 break;
6959 }
6960
6961 /* If *EXPR_P does not need to be special-cased, handle it
6962 according to its class. */
6963 case tcc_unary:
6964 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6965 post_p, is_gimple_val, fb_rvalue);
6966 break;
6967
6968 case tcc_binary:
6969 expr_2:
6970 {
6971 enum gimplify_status r0, r1;
6972
6973 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6974 post_p, is_gimple_val, fb_rvalue);
6975 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6976 post_p, is_gimple_val, fb_rvalue);
6977
6978 ret = MIN (r0, r1);
6979 break;
6980 }
6981
6982 case tcc_declaration:
6983 case tcc_constant:
6984 ret = GS_ALL_DONE;
6985 goto dont_recalculate;
6986
6987 default:
6988 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
6989 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
6990 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
6991 goto expr_2;
6992 }
6993
6994 recalculate_side_effects (*expr_p);
6995
6996 dont_recalculate:
6997 break;
6998 }
6999
7000 /* If we replaced *expr_p, gimplify again. */
7001 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7002 ret = GS_ALL_DONE;
7003 }
7004 while (ret == GS_OK);
7005
7006 /* If we encountered an error_mark somewhere nested inside, either
7007 stub out the statement or propagate the error back out. */
7008 if (ret == GS_ERROR)
7009 {
7010 if (is_statement)
7011 *expr_p = NULL;
7012 goto out;
7013 }
7014
7015 /* This was only valid as a return value from the langhook, which
7016 we handled. Make sure it doesn't escape from any other context. */
7017 gcc_assert (ret != GS_UNHANDLED);
7018
7019 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7020 {
7021 /* We aren't looking for a value, and we don't have a valid
7022 statement. If it doesn't have side-effects, throw it away. */
7023 if (!TREE_SIDE_EFFECTS (*expr_p))
7024 *expr_p = NULL;
7025 else if (!TREE_THIS_VOLATILE (*expr_p))
7026 {
7027 /* This is probably a _REF that contains something nested that
7028 has side effects. Recurse through the operands to find it. */
7029 enum tree_code code = TREE_CODE (*expr_p);
7030
7031 switch (code)
7032 {
7033 case COMPONENT_REF:
7034 case REALPART_EXPR:
7035 case IMAGPART_EXPR:
7036 case VIEW_CONVERT_EXPR:
7037 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7038 gimple_test_f, fallback);
7039 break;
7040
7041 case ARRAY_REF:
7042 case ARRAY_RANGE_REF:
7043 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7044 gimple_test_f, fallback);
7045 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7046 gimple_test_f, fallback);
7047 break;
7048
7049 default:
7050 /* Anything else with side-effects must be converted to
7051 a valid statement before we get here. */
7052 gcc_unreachable ();
7053 }
7054
7055 *expr_p = NULL;
7056 }
7057 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7058 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7059 {
7060 /* Historically, the compiler has treated a bare reference
7061 to a non-BLKmode volatile lvalue as forcing a load. */
7062 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7063
7064 /* Normally, we do not want to create a temporary for a
7065 TREE_ADDRESSABLE type because such a type should not be
7066 copied by bitwise-assignment. However, we make an
7067 exception here, as all we are doing here is ensuring that
7068 we read the bytes that make up the type. We use
7069 create_tmp_var_raw because create_tmp_var will abort when
7070 given a TREE_ADDRESSABLE type. */
7071 tree tmp = create_tmp_var_raw (type, "vol");
7072 gimple_add_tmp_var (tmp);
7073 gimplify_assign (tmp, *expr_p, pre_p);
7074 *expr_p = NULL;
7075 }
7076 else
7077 /* We can't do anything useful with a volatile reference to
7078 an incomplete type, so just throw it away. Likewise for
7079 a BLKmode type, since any implicit inner load should
7080 already have been turned into an explicit one by the
7081 gimplification process. */
7082 *expr_p = NULL;
7083 }
7084
7085 /* If we are gimplifying at the statement level, we're done. Tack
7086 everything together and return. */
7087 if (fallback == fb_none || is_statement)
7088 {
7089 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7090 it out for GC to reclaim it. */
7091 *expr_p = NULL_TREE;
7092
7093 if (!gimple_seq_empty_p (internal_pre)
7094 || !gimple_seq_empty_p (internal_post))
7095 {
7096 gimplify_seq_add_seq (&internal_pre, internal_post);
7097 gimplify_seq_add_seq (pre_p, internal_pre);
7098 }
7099
7100 /* The result of gimplifying *EXPR_P is going to be the last few
7101 statements in *PRE_P and *POST_P. Add location information
7102 to all the statements that were added by the gimplification
7103 helpers. */
7104 if (!gimple_seq_empty_p (*pre_p))
7105 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7106
7107 if (!gimple_seq_empty_p (*post_p))
7108 annotate_all_with_location_after (*post_p, post_last_gsi,
7109 input_location);
7110
7111 goto out;
7112 }
7113
7114 #ifdef ENABLE_GIMPLE_CHECKING
7115 if (*expr_p)
7116 {
7117 enum tree_code code = TREE_CODE (*expr_p);
7118 /* These expressions should already be in gimple IR form. */
7119 gcc_assert (code != MODIFY_EXPR
7120 && code != ASM_EXPR
7121 && code != BIND_EXPR
7122 && code != CATCH_EXPR
7123 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7124 && code != EH_FILTER_EXPR
7125 && code != GOTO_EXPR
7126 && code != LABEL_EXPR
7127 && code != LOOP_EXPR
7128 && code != RESX_EXPR
7129 && code != SWITCH_EXPR
7130 && code != TRY_FINALLY_EXPR
7131 && code != OMP_CRITICAL
7132 && code != OMP_FOR
7133 && code != OMP_MASTER
7134 && code != OMP_ORDERED
7135 && code != OMP_PARALLEL
7136 && code != OMP_SECTIONS
7137 && code != OMP_SECTION
7138 && code != OMP_SINGLE);
7139 }
7140 #endif
7141
7142 /* Otherwise we're gimplifying a subexpression, so the resulting
7143 value is interesting. If it's a valid operand that matches
7144 GIMPLE_TEST_F, we're done. Unless we are handling some
7145 post-effects internally; if that's the case, we need to copy into
7146 a temporary before adding the post-effects to POST_P. */
7147 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7148 goto out;
7149
7150 /* Otherwise, we need to create a new temporary for the gimplified
7151 expression. */
7152
7153 /* We can't return an lvalue if we have an internal postqueue. The
7154 object the lvalue refers to would (probably) be modified by the
7155 postqueue; we need to copy the value out first, which means an
7156 rvalue. */
7157 if ((fallback & fb_lvalue)
7158 && gimple_seq_empty_p (internal_post)
7159 && is_gimple_addressable (*expr_p))
7160 {
7161 /* An lvalue will do. Take the address of the expression, store it
7162 in a temporary, and replace the expression with an INDIRECT_REF of
7163 that temporary. */
7164 tmp = build_fold_addr_expr (*expr_p);
7165 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7166 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7167 }
7168 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7169 {
7170 /* An rvalue will do. Assign the gimplified expression into a
7171 new temporary TMP and replace the original expression with
7172 TMP. First, make sure that the expression has a type so that
7173 it can be assigned into a temporary. */
7174 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7175
7176 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7177 /* The postqueue might change the value of the expression between
7178 the initialization and use of the temporary, so we can't use a
7179 formal temp. FIXME do we care? */
7180 {
7181 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7182 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7183 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7184 DECL_GIMPLE_REG_P (*expr_p) = 1;
7185 }
7186 else
7187 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7188 }
7189 else
7190 {
7191 #ifdef ENABLE_GIMPLE_CHECKING
7192 if (!(fallback & fb_mayfail))
7193 {
7194 fprintf (stderr, "gimplification failed:\n");
7195 print_generic_expr (stderr, *expr_p, 0);
7196 debug_tree (*expr_p);
7197 internal_error ("gimplification failed");
7198 }
7199 #endif
7200 gcc_assert (fallback & fb_mayfail);
7201
7202 /* If this is an asm statement, and the user asked for the
7203 impossible, don't die. Fail and let gimplify_asm_expr
7204 issue an error. */
7205 ret = GS_ERROR;
7206 goto out;
7207 }
7208
7209 /* Make sure the temporary matches our predicate. */
7210 gcc_assert ((*gimple_test_f) (*expr_p));
7211
7212 if (!gimple_seq_empty_p (internal_post))
7213 {
7214 annotate_all_with_location (internal_post, input_location);
7215 gimplify_seq_add_seq (pre_p, internal_post);
7216 }
7217
7218 out:
7219 input_location = saved_location;
7220 return ret;
7221 }
7222
7223 /* Look through TYPE for variable-sized objects and gimplify each such
7224 size that we find. Add to LIST_P any statements generated. */
7225
7226 void
7227 gimplify_type_sizes (tree type, gimple_seq *list_p)
7228 {
7229 tree field, t;
7230
7231 if (type == NULL || type == error_mark_node)
7232 return;
7233
7234 /* We first do the main variant, then copy into any other variants. */
7235 type = TYPE_MAIN_VARIANT (type);
7236
7237 /* Avoid infinite recursion. */
7238 if (TYPE_SIZES_GIMPLIFIED (type))
7239 return;
7240
7241 TYPE_SIZES_GIMPLIFIED (type) = 1;
7242
7243 switch (TREE_CODE (type))
7244 {
7245 case INTEGER_TYPE:
7246 case ENUMERAL_TYPE:
7247 case BOOLEAN_TYPE:
7248 case REAL_TYPE:
7249 case FIXED_POINT_TYPE:
7250 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7251 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7252
7253 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7254 {
7255 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7256 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7257 }
7258 break;
7259
7260 case ARRAY_TYPE:
7261 /* These types may not have declarations, so handle them here. */
7262 gimplify_type_sizes (TREE_TYPE (type), list_p);
7263 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7264 /* When not optimizing, ensure VLA bounds aren't removed. */
7265 if (!optimize
7266 && TYPE_DOMAIN (type)
7267 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7268 {
7269 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7270 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7271 DECL_IGNORED_P (t) = 0;
7272 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7273 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7274 DECL_IGNORED_P (t) = 0;
7275 }
7276 break;
7277
7278 case RECORD_TYPE:
7279 case UNION_TYPE:
7280 case QUAL_UNION_TYPE:
7281 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7282 if (TREE_CODE (field) == FIELD_DECL)
7283 {
7284 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7285 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7286 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7287 gimplify_type_sizes (TREE_TYPE (field), list_p);
7288 }
7289 break;
7290
7291 case POINTER_TYPE:
7292 case REFERENCE_TYPE:
7293 /* We used to recurse on the pointed-to type here, which turned out to
7294 be incorrect because its definition might refer to variables not
7295 yet initialized at this point if a forward declaration is involved.
7296
7297 It was actually useful for anonymous pointed-to types to ensure
7298 that the sizes evaluation dominates every possible later use of the
7299 values. Restricting to such types here would be safe since there
7300 is no possible forward declaration around, but would introduce an
7301 undesirable middle-end semantic to anonymity. We then defer to
7302 front-ends the responsibility of ensuring that the sizes are
7303 evaluated both early and late enough, e.g. by attaching artificial
7304 type declarations to the tree. */
7305 break;
7306
7307 default:
7308 break;
7309 }
7310
7311 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7312 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7313
7314 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7315 {
7316 TYPE_SIZE (t) = TYPE_SIZE (type);
7317 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7318 TYPE_SIZES_GIMPLIFIED (t) = 1;
7319 }
7320 }
7321
7322 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7323 a size or position, has had all of its SAVE_EXPRs evaluated.
7324 We add any required statements to *STMT_P. */
7325
7326 void
7327 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7328 {
7329 tree type, expr = *expr_p;
7330
7331 /* We don't do anything if the value isn't there, is constant, or contains
7332 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7333 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7334 will want to replace it with a new variable, but that will cause problems
7335 if this type is from outside the function. It's OK to have that here. */
7336 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7337 || TREE_CODE (expr) == VAR_DECL
7338 || CONTAINS_PLACEHOLDER_P (expr))
7339 return;
7340
7341 type = TREE_TYPE (expr);
7342 *expr_p = unshare_expr (expr);
7343
7344 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7345 expr = *expr_p;
7346
7347 /* Verify that we've an exact type match with the original expression.
7348 In particular, we do not wish to drop a "sizetype" in favour of a
7349 type of similar dimensions. We don't want to pollute the generic
7350 type-stripping code with this knowledge because it doesn't matter
7351 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7352 and friends retain their "sizetype-ness". */
7353 if (TREE_TYPE (expr) != type
7354 && TREE_CODE (type) == INTEGER_TYPE
7355 && TYPE_IS_SIZETYPE (type))
7356 {
7357 tree tmp;
7358 gimple stmt;
7359
7360 *expr_p = create_tmp_var (type, NULL);
7361 tmp = build1 (NOP_EXPR, type, expr);
7362 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7363 if (EXPR_HAS_LOCATION (expr))
7364 gimple_set_location (stmt, EXPR_LOCATION (expr));
7365 else
7366 gimple_set_location (stmt, input_location);
7367 }
7368 }
7369
7370
7371 /* Gimplify the body of statements pointed to by BODY_P and return a
7372 GIMPLE_BIND containing the sequence of GIMPLE statements
7373 corresponding to BODY_P. FNDECL is the function decl containing
7374 *BODY_P. */
7375
7376 gimple
7377 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7378 {
7379 location_t saved_location = input_location;
7380 gimple_seq parm_stmts, seq;
7381 gimple outer_bind;
7382 struct gimplify_ctx gctx;
7383
7384 timevar_push (TV_TREE_GIMPLIFY);
7385
7386 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7387 gimplification. */
7388 default_rtl_profile ();
7389
7390 gcc_assert (gimplify_ctxp == NULL);
7391 push_gimplify_context (&gctx);
7392
7393 /* Unshare most shared trees in the body and in that of any nested functions.
7394 It would seem we don't have to do this for nested functions because
7395 they are supposed to be output and then the outer function gimplified
7396 first, but the g++ front end doesn't always do it that way. */
7397 unshare_body (body_p, fndecl);
7398 unvisit_body (body_p, fndecl);
7399
7400 if (cgraph_node (fndecl)->origin)
7401 nonlocal_vlas = pointer_set_create ();
7402
7403 /* Make sure input_location isn't set to something weird. */
7404 input_location = DECL_SOURCE_LOCATION (fndecl);
7405
7406 /* Resolve callee-copies. This has to be done before processing
7407 the body so that DECL_VALUE_EXPR gets processed correctly. */
7408 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7409
7410 /* Gimplify the function's body. */
7411 seq = NULL;
7412 gimplify_stmt (body_p, &seq);
7413 outer_bind = gimple_seq_first_stmt (seq);
7414 if (!outer_bind)
7415 {
7416 outer_bind = gimple_build_nop ();
7417 gimplify_seq_add_stmt (&seq, outer_bind);
7418 }
7419
7420 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7421 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7422 if (gimple_code (outer_bind) == GIMPLE_BIND
7423 && gimple_seq_first (seq) == gimple_seq_last (seq))
7424 ;
7425 else
7426 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7427
7428 *body_p = NULL_TREE;
7429
7430 /* If we had callee-copies statements, insert them at the beginning
7431 of the function. */
7432 if (!gimple_seq_empty_p (parm_stmts))
7433 {
7434 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7435 gimple_bind_set_body (outer_bind, parm_stmts);
7436 }
7437
7438 if (nonlocal_vlas)
7439 {
7440 pointer_set_destroy (nonlocal_vlas);
7441 nonlocal_vlas = NULL;
7442 }
7443
7444 pop_gimplify_context (outer_bind);
7445 gcc_assert (gimplify_ctxp == NULL);
7446
7447 #ifdef ENABLE_TYPES_CHECKING
7448 if (!errorcount && !sorrycount)
7449 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7450 #endif
7451
7452 timevar_pop (TV_TREE_GIMPLIFY);
7453 input_location = saved_location;
7454
7455 return outer_bind;
7456 }
7457
7458 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7459 node for the function we want to gimplify.
7460
7461 Returns the sequence of GIMPLE statements corresponding to the body
7462 of FNDECL. */
7463
7464 void
7465 gimplify_function_tree (tree fndecl)
7466 {
7467 tree oldfn, parm, ret;
7468 gimple_seq seq;
7469 gimple bind;
7470
7471 oldfn = current_function_decl;
7472 current_function_decl = fndecl;
7473 if (DECL_STRUCT_FUNCTION (fndecl))
7474 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7475 else
7476 push_struct_function (fndecl);
7477
7478 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7479 {
7480 /* Preliminarily mark non-addressed complex variables as eligible
7481 for promotion to gimple registers. We'll transform their uses
7482 as we find them. */
7483 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7484 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7485 && !TREE_THIS_VOLATILE (parm)
7486 && !needs_to_live_in_memory (parm))
7487 DECL_GIMPLE_REG_P (parm) = 1;
7488 }
7489
7490 ret = DECL_RESULT (fndecl);
7491 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7492 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7493 && !needs_to_live_in_memory (ret))
7494 DECL_GIMPLE_REG_P (ret) = 1;
7495
7496 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7497
7498 /* The tree body of the function is no longer needed, replace it
7499 with the new GIMPLE body. */
7500 seq = gimple_seq_alloc ();
7501 gimple_seq_add_stmt (&seq, bind);
7502 gimple_set_body (fndecl, seq);
7503
7504 /* If we're instrumenting function entry/exit, then prepend the call to
7505 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7506 catch the exit hook. */
7507 /* ??? Add some way to ignore exceptions for this TFE. */
7508 if (flag_instrument_function_entry_exit
7509 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7510 && !flag_instrument_functions_exclude_p (fndecl))
7511 {
7512 tree x;
7513 gimple new_bind;
7514 gimple tf;
7515 gimple_seq cleanup = NULL, body = NULL;
7516
7517 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7518 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7519 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7520
7521 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7522 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7523 gimplify_seq_add_stmt (&body, tf);
7524 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7525 /* Clear the block for BIND, since it is no longer directly inside
7526 the function, but within a try block. */
7527 gimple_bind_set_block (bind, NULL);
7528
7529 /* Replace the current function body with the body
7530 wrapped in the try/finally TF. */
7531 seq = gimple_seq_alloc ();
7532 gimple_seq_add_stmt (&seq, new_bind);
7533 gimple_set_body (fndecl, seq);
7534 }
7535
7536 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7537
7538 current_function_decl = oldfn;
7539 pop_cfun ();
7540 }
7541
7542
7543 /* Some transformations like inlining may invalidate the GIMPLE form
7544 for operands. This function traverses all the operands in STMT and
7545 gimplifies anything that is not a valid gimple operand. Any new
7546 GIMPLE statements are inserted before *GSI_P. */
7547
7548 void
7549 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7550 {
7551 size_t i, num_ops;
7552 tree orig_lhs = NULL_TREE, lhs, t;
7553 gimple_seq pre = NULL;
7554 gimple post_stmt = NULL;
7555 struct gimplify_ctx gctx;
7556
7557 push_gimplify_context (&gctx);
7558 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7559
7560 switch (gimple_code (stmt))
7561 {
7562 case GIMPLE_COND:
7563 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7564 is_gimple_val, fb_rvalue);
7565 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7566 is_gimple_val, fb_rvalue);
7567 break;
7568 case GIMPLE_SWITCH:
7569 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7570 is_gimple_val, fb_rvalue);
7571 break;
7572 case GIMPLE_OMP_ATOMIC_LOAD:
7573 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7574 is_gimple_val, fb_rvalue);
7575 break;
7576 case GIMPLE_ASM:
7577 {
7578 size_t i, noutputs = gimple_asm_noutputs (stmt);
7579 const char *constraint, **oconstraints;
7580 bool allows_mem, allows_reg, is_inout;
7581
7582 oconstraints
7583 = (const char **) alloca ((noutputs) * sizeof (const char *));
7584 for (i = 0; i < noutputs; i++)
7585 {
7586 tree op = gimple_asm_output_op (stmt, i);
7587 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7588 oconstraints[i] = constraint;
7589 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7590 &allows_reg, &is_inout);
7591 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7592 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7593 fb_lvalue | fb_mayfail);
7594 }
7595 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7596 {
7597 tree op = gimple_asm_input_op (stmt, i);
7598 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7599 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7600 oconstraints, &allows_mem, &allows_reg);
7601 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7602 allows_reg = 0;
7603 if (!allows_reg && allows_mem)
7604 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7605 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7606 else
7607 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7608 is_gimple_asm_val, fb_rvalue);
7609 }
7610 }
7611 break;
7612 default:
7613 /* NOTE: We start gimplifying operands from last to first to
7614 make sure that side-effects on the RHS of calls, assignments
7615 and ASMs are executed before the LHS. The ordering is not
7616 important for other statements. */
7617 num_ops = gimple_num_ops (stmt);
7618 orig_lhs = gimple_get_lhs (stmt);
7619 for (i = num_ops; i > 0; i--)
7620 {
7621 tree op = gimple_op (stmt, i - 1);
7622 if (op == NULL_TREE)
7623 continue;
7624 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7625 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7626 else if (i == 2
7627 && is_gimple_assign (stmt)
7628 && num_ops == 2
7629 && get_gimple_rhs_class (gimple_expr_code (stmt))
7630 == GIMPLE_SINGLE_RHS)
7631 gimplify_expr (&op, &pre, NULL,
7632 rhs_predicate_for (gimple_assign_lhs (stmt)),
7633 fb_rvalue);
7634 else if (i == 2 && is_gimple_call (stmt))
7635 {
7636 if (TREE_CODE (op) == FUNCTION_DECL)
7637 continue;
7638 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7639 }
7640 else
7641 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7642 gimple_set_op (stmt, i - 1, op);
7643 }
7644
7645 lhs = gimple_get_lhs (stmt);
7646 /* If the LHS changed it in a way that requires a simple RHS,
7647 create temporary. */
7648 if (lhs && !is_gimple_reg (lhs))
7649 {
7650 bool need_temp = false;
7651
7652 if (is_gimple_assign (stmt)
7653 && num_ops == 2
7654 && get_gimple_rhs_class (gimple_expr_code (stmt))
7655 == GIMPLE_SINGLE_RHS)
7656 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7657 rhs_predicate_for (gimple_assign_lhs (stmt)),
7658 fb_rvalue);
7659 else if (is_gimple_reg (lhs))
7660 {
7661 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7662 {
7663 if (is_gimple_call (stmt))
7664 {
7665 i = gimple_call_flags (stmt);
7666 if ((i & ECF_LOOPING_CONST_OR_PURE)
7667 || !(i & (ECF_CONST | ECF_PURE)))
7668 need_temp = true;
7669 }
7670 if (stmt_can_throw_internal (stmt))
7671 need_temp = true;
7672 }
7673 }
7674 else
7675 {
7676 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7677 need_temp = true;
7678 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7679 {
7680 if (is_gimple_call (stmt))
7681 {
7682 tree fndecl = gimple_call_fndecl (stmt);
7683
7684 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7685 && !(fndecl && DECL_RESULT (fndecl)
7686 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7687 need_temp = true;
7688 }
7689 else
7690 need_temp = true;
7691 }
7692 }
7693 if (need_temp)
7694 {
7695 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7696
7697 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7698 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7699 DECL_GIMPLE_REG_P (temp) = 1;
7700 if (TREE_CODE (orig_lhs) == SSA_NAME)
7701 orig_lhs = SSA_NAME_VAR (orig_lhs);
7702
7703 if (gimple_in_ssa_p (cfun))
7704 temp = make_ssa_name (temp, NULL);
7705 gimple_set_lhs (stmt, temp);
7706 post_stmt = gimple_build_assign (lhs, temp);
7707 if (TREE_CODE (lhs) == SSA_NAME)
7708 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7709 }
7710 }
7711 break;
7712 }
7713
7714 if (gimple_referenced_vars (cfun))
7715 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7716 add_referenced_var (t);
7717
7718 if (!gimple_seq_empty_p (pre))
7719 {
7720 if (gimple_in_ssa_p (cfun))
7721 {
7722 gimple_stmt_iterator i;
7723
7724 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7725 mark_symbols_for_renaming (gsi_stmt (i));
7726 }
7727 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7728 }
7729 if (post_stmt)
7730 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7731
7732 pop_gimplify_context (NULL);
7733 }
7734
7735
7736 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7737 force the result to be either ssa_name or an invariant, otherwise
7738 just force it to be a rhs expression. If VAR is not NULL, make the
7739 base variable of the final destination be VAR if suitable. */
7740
7741 tree
7742 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7743 {
7744 tree t;
7745 enum gimplify_status ret;
7746 gimple_predicate gimple_test_f;
7747 struct gimplify_ctx gctx;
7748
7749 *stmts = NULL;
7750
7751 if (is_gimple_val (expr))
7752 return expr;
7753
7754 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7755
7756 push_gimplify_context (&gctx);
7757 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7758 gimplify_ctxp->allow_rhs_cond_expr = true;
7759
7760 if (var)
7761 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7762
7763 if (TREE_CODE (expr) != MODIFY_EXPR
7764 && TREE_TYPE (expr) == void_type_node)
7765 {
7766 gimplify_and_add (expr, stmts);
7767 expr = NULL_TREE;
7768 }
7769 else
7770 {
7771 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7772 gcc_assert (ret != GS_ERROR);
7773 }
7774
7775 if (gimple_referenced_vars (cfun))
7776 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7777 add_referenced_var (t);
7778
7779 pop_gimplify_context (NULL);
7780
7781 return expr;
7782 }
7783
7784 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7785 some statements are produced, emits them at GSI. If BEFORE is true.
7786 the statements are appended before GSI, otherwise they are appended after
7787 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7788 GSI_CONTINUE_LINKING are the usual values). */
7789
7790 tree
7791 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7792 bool simple_p, tree var, bool before,
7793 enum gsi_iterator_update m)
7794 {
7795 gimple_seq stmts;
7796
7797 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7798
7799 if (!gimple_seq_empty_p (stmts))
7800 {
7801 if (gimple_in_ssa_p (cfun))
7802 {
7803 gimple_stmt_iterator i;
7804
7805 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7806 mark_symbols_for_renaming (gsi_stmt (i));
7807 }
7808
7809 if (before)
7810 gsi_insert_seq_before (gsi, stmts, m);
7811 else
7812 gsi_insert_seq_after (gsi, stmts, m);
7813 }
7814
7815 return expr;
7816 }
7817
7818 #include "gt-gimplify.h"