re PR middle-end/36326 (gimplification of aggregate copies introduces extra aggregate...
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
55
56
57 enum gimplify_omp_var_data
58 {
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
71 };
72
73
74 enum omp_region_type
75 {
76 ORT_WORKSHARE = 0,
77 ORT_TASK = 1,
78 ORT_PARALLEL = 2,
79 ORT_COMBINED_PARALLEL = 3
80 };
81
82 struct gimplify_omp_ctx
83 {
84 struct gimplify_omp_ctx *outer_context;
85 splay_tree variables;
86 struct pointer_set_t *privatized_types;
87 location_t location;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
90 };
91
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
94
95
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
98
99 typedef struct gimple_temp_hash_elt
100 {
101 tree val; /* Key */
102 tree temp; /* Value */
103 } elt_t;
104
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
107
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
110 static void
111 mark_addressable (tree x)
112 {
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
116 return ;
117 TREE_ADDRESSABLE (x) = 1;
118 }
119
120 /* Return a hash value for a formal temporary table entry. */
121
122 static hashval_t
123 gimple_tree_hash (const void *p)
124 {
125 tree t = ((const elt_t *) p)->val;
126 return iterative_hash_expr (t, 0);
127 }
128
129 /* Compare two formal temporary table entries. */
130
131 static int
132 gimple_tree_eq (const void *p1, const void *p2)
133 {
134 tree t1 = ((const elt_t *) p1)->val;
135 tree t2 = ((const elt_t *) p2)->val;
136 enum tree_code code = TREE_CODE (t1);
137
138 if (TREE_CODE (t2) != code
139 || TREE_TYPE (t1) != TREE_TYPE (t2))
140 return 0;
141
142 if (!operand_equal_p (t1, t2, 0))
143 return 0;
144
145 /* Only allow them to compare equal if they also hash equal; otherwise
146 results are nondeterminate, and we fail bootstrap comparison. */
147 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
148
149 return 1;
150 }
151
152 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
153 *SEQ_P is NULL, a new sequence is allocated. This function is
154 similar to gimple_seq_add_stmt, but does not scan the operands.
155 During gimplification, we need to manipulate statement sequences
156 before the def/use vectors have been constructed. */
157
158 static void
159 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
160 {
161 gimple_stmt_iterator si;
162
163 if (gs == NULL)
164 return;
165
166 if (*seq_p == NULL)
167 *seq_p = gimple_seq_alloc ();
168
169 si = gsi_last (*seq_p);
170
171 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
172 }
173
174 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
175 NULL, a new sequence is allocated. This function is
176 similar to gimple_seq_add_seq, but does not scan the operands.
177 During gimplification, we need to manipulate statement sequences
178 before the def/use vectors have been constructed. */
179
180 static void
181 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
182 {
183 gimple_stmt_iterator si;
184
185 if (src == NULL)
186 return;
187
188 if (*dst_p == NULL)
189 *dst_p = gimple_seq_alloc ();
190
191 si = gsi_last (*dst_p);
192 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
193 }
194
195 /* Set up a context for the gimplifier. */
196
197 void
198 push_gimplify_context (struct gimplify_ctx *c)
199 {
200 memset (c, '\0', sizeof (*c));
201 c->prev_context = gimplify_ctxp;
202 gimplify_ctxp = c;
203 }
204
205 /* Tear down a context for the gimplifier. If BODY is non-null, then
206 put the temporaries into the outer BIND_EXPR. Otherwise, put them
207 in the local_decls.
208
209 BODY is not a sequence, but the first tuple in a sequence. */
210
211 void
212 pop_gimplify_context (gimple body)
213 {
214 struct gimplify_ctx *c = gimplify_ctxp;
215 tree t;
216
217 gcc_assert (c && (c->bind_expr_stack == NULL
218 || VEC_empty (gimple, c->bind_expr_stack)));
219 VEC_free (gimple, heap, c->bind_expr_stack);
220 gimplify_ctxp = c->prev_context;
221
222 for (t = c->temps; t ; t = TREE_CHAIN (t))
223 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
224
225 if (body)
226 declare_vars (c->temps, body, false);
227 else
228 record_vars (c->temps);
229
230 if (c->temp_htab)
231 htab_delete (c->temp_htab);
232 }
233
234 static void
235 gimple_push_bind_expr (gimple gimple_bind)
236 {
237 if (gimplify_ctxp->bind_expr_stack == NULL)
238 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
239 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
240 }
241
242 static void
243 gimple_pop_bind_expr (void)
244 {
245 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
246 }
247
248 gimple
249 gimple_current_bind_expr (void)
250 {
251 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252 }
253
254 /* Return the stack GIMPLE_BINDs created during gimplification. */
255
256 VEC(gimple, heap) *
257 gimple_bind_expr_stack (void)
258 {
259 return gimplify_ctxp->bind_expr_stack;
260 }
261
262 /* Returns true iff there is a COND_EXPR between us and the innermost
263 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
264
265 static bool
266 gimple_conditional_context (void)
267 {
268 return gimplify_ctxp->conditions > 0;
269 }
270
271 /* Note that we've entered a COND_EXPR. */
272
273 static void
274 gimple_push_condition (void)
275 {
276 #ifdef ENABLE_GIMPLE_CHECKING
277 if (gimplify_ctxp->conditions == 0)
278 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
279 #endif
280 ++(gimplify_ctxp->conditions);
281 }
282
283 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
284 now, add any conditional cleanups we've seen to the prequeue. */
285
286 static void
287 gimple_pop_condition (gimple_seq *pre_p)
288 {
289 int conds = --(gimplify_ctxp->conditions);
290
291 gcc_assert (conds >= 0);
292 if (conds == 0)
293 {
294 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
295 gimplify_ctxp->conditional_cleanups = NULL;
296 }
297 }
298
299 /* A stable comparison routine for use with splay trees and DECLs. */
300
301 static int
302 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
303 {
304 tree a = (tree) xa;
305 tree b = (tree) xb;
306
307 return DECL_UID (a) - DECL_UID (b);
308 }
309
310 /* Create a new omp construct that deals with variable remapping. */
311
312 static struct gimplify_omp_ctx *
313 new_omp_context (enum omp_region_type region_type)
314 {
315 struct gimplify_omp_ctx *c;
316
317 c = XCNEW (struct gimplify_omp_ctx);
318 c->outer_context = gimplify_omp_ctxp;
319 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
320 c->privatized_types = pointer_set_create ();
321 c->location = input_location;
322 c->region_type = region_type;
323 if (region_type != ORT_TASK)
324 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
325 else
326 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
327
328 return c;
329 }
330
331 /* Destroy an omp construct that deals with variable remapping. */
332
333 static void
334 delete_omp_context (struct gimplify_omp_ctx *c)
335 {
336 splay_tree_delete (c->variables);
337 pointer_set_destroy (c->privatized_types);
338 XDELETE (c);
339 }
340
341 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
342 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
343
344 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
345
346 static void
347 append_to_statement_list_1 (tree t, tree *list_p)
348 {
349 tree list = *list_p;
350 tree_stmt_iterator i;
351
352 if (!list)
353 {
354 if (t && TREE_CODE (t) == STATEMENT_LIST)
355 {
356 *list_p = t;
357 return;
358 }
359 *list_p = list = alloc_stmt_list ();
360 }
361
362 i = tsi_last (list);
363 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364 }
365
366 /* Add T to the end of the list container pointed to by LIST_P.
367 If T is an expression with no effects, it is ignored. */
368
369 void
370 append_to_statement_list (tree t, tree *list_p)
371 {
372 if (t && TREE_SIDE_EFFECTS (t))
373 append_to_statement_list_1 (t, list_p);
374 }
375
376 /* Similar, but the statement is always added, regardless of side effects. */
377
378 void
379 append_to_statement_list_force (tree t, tree *list_p)
380 {
381 if (t != NULL_TREE)
382 append_to_statement_list_1 (t, list_p);
383 }
384
385 /* Both gimplify the statement T and append it to *SEQ_P. This function
386 behaves exactly as gimplify_stmt, but you don't have to pass T as a
387 reference. */
388
389 void
390 gimplify_and_add (tree t, gimple_seq *seq_p)
391 {
392 gimplify_stmt (&t, seq_p);
393 }
394
395 /* Gimplify statement T into sequence *SEQ_P, and return the first
396 tuple in the sequence of generated tuples for this statement.
397 Return NULL if gimplifying T produced no tuples. */
398
399 static gimple
400 gimplify_and_return_first (tree t, gimple_seq *seq_p)
401 {
402 gimple_stmt_iterator last = gsi_last (*seq_p);
403
404 gimplify_and_add (t, seq_p);
405
406 if (!gsi_end_p (last))
407 {
408 gsi_next (&last);
409 return gsi_stmt (last);
410 }
411 else
412 return gimple_seq_first_stmt (*seq_p);
413 }
414
415 /* Strip off a legitimate source ending from the input string NAME of
416 length LEN. Rather than having to know the names used by all of
417 our front ends, we strip off an ending of a period followed by
418 up to five characters. (Java uses ".class".) */
419
420 static inline void
421 remove_suffix (char *name, int len)
422 {
423 int i;
424
425 for (i = 2; i < 8 && len > i; i++)
426 {
427 if (name[len - i] == '.')
428 {
429 name[len - i] = '\0';
430 break;
431 }
432 }
433 }
434
435 /* Subroutine for find_single_pointer_decl. */
436
437 static tree
438 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
439 void *data)
440 {
441 tree *pdecl = (tree *) data;
442
443 /* We are only looking for pointers at the same level as the
444 original tree; we must not look through any indirections.
445 Returning anything other than NULL_TREE will cause the caller to
446 not find a base. */
447 if (REFERENCE_CLASS_P (*tp))
448 return *tp;
449
450 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
451 {
452 if (*pdecl)
453 {
454 /* We already found a pointer decl; return anything other
455 than NULL_TREE to unwind from walk_tree signalling that
456 we have a duplicate. */
457 return *tp;
458 }
459 *pdecl = *tp;
460 }
461
462 return NULL_TREE;
463 }
464
465 /* Find the single DECL of pointer type in the tree T, used directly
466 rather than via an indirection, and return it. If there are zero
467 or more than one such DECLs, return NULL. */
468
469 static tree
470 find_single_pointer_decl (tree t)
471 {
472 tree decl = NULL_TREE;
473
474 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
475 {
476 /* find_single_pointer_decl_1 returns a nonzero value, causing
477 walk_tree to return a nonzero value, to indicate that it
478 found more than one pointer DECL or that it found an
479 indirection. */
480 return NULL_TREE;
481 }
482
483 return decl;
484 }
485
486 /* Create a new temporary name with PREFIX. Returns an identifier. */
487
488 static GTY(()) unsigned int tmp_var_id_num;
489
490 tree
491 create_tmp_var_name (const char *prefix)
492 {
493 char *tmp_name;
494
495 if (prefix)
496 {
497 char *preftmp = ASTRDUP (prefix);
498
499 remove_suffix (preftmp, strlen (preftmp));
500 prefix = preftmp;
501 }
502
503 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
504 return get_identifier (tmp_name);
505 }
506
507
508 /* Create a new temporary variable declaration of type TYPE.
509 Does NOT push it into the current binding. */
510
511 tree
512 create_tmp_var_raw (tree type, const char *prefix)
513 {
514 tree tmp_var;
515 tree new_type;
516
517 /* Make the type of the variable writable. */
518 new_type = build_type_variant (type, 0, 0);
519 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
520
521 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
522 type);
523
524 /* The variable was declared by the compiler. */
525 DECL_ARTIFICIAL (tmp_var) = 1;
526 /* And we don't want debug info for it. */
527 DECL_IGNORED_P (tmp_var) = 1;
528
529 /* Make the variable writable. */
530 TREE_READONLY (tmp_var) = 0;
531
532 DECL_EXTERNAL (tmp_var) = 0;
533 TREE_STATIC (tmp_var) = 0;
534 TREE_USED (tmp_var) = 1;
535
536 return tmp_var;
537 }
538
539 /* Create a new temporary variable declaration of type TYPE. DOES push the
540 variable into the current binding. Further, assume that this is called
541 only from gimplification or optimization, at which point the creation of
542 certain types are bugs. */
543
544 tree
545 create_tmp_var (tree type, const char *prefix)
546 {
547 tree tmp_var;
548
549 /* We don't allow types that are addressable (meaning we can't make copies),
550 or incomplete. We also used to reject every variable size objects here,
551 but now support those for which a constant upper bound can be obtained.
552 The processing for variable sizes is performed in gimple_add_tmp_var,
553 point at which it really matters and possibly reached via paths not going
554 through this function, e.g. after direct calls to create_tmp_var_raw. */
555 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
556
557 tmp_var = create_tmp_var_raw (type, prefix);
558 gimple_add_tmp_var (tmp_var);
559 return tmp_var;
560 }
561
562 /* Create a temporary with a name derived from VAL. Subroutine of
563 lookup_tmp_var; nobody else should call this function. */
564
565 static inline tree
566 create_tmp_from_val (tree val)
567 {
568 return create_tmp_var (TREE_TYPE (val), get_name (val));
569 }
570
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. */
573
574 static tree
575 lookup_tmp_var (tree val, bool is_formal)
576 {
577 tree ret;
578
579 /* If not optimizing, never really reuse a temporary. local-alloc
580 won't allocate any variable that is used in more than one basic
581 block, which means it will go into memory, causing much extra
582 work in reload and final and poorer code generation, outweighing
583 the extra memory allocation here. */
584 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
585 ret = create_tmp_from_val (val);
586 else
587 {
588 elt_t elt, *elt_p;
589 void **slot;
590
591 elt.val = val;
592 if (gimplify_ctxp->temp_htab == NULL)
593 gimplify_ctxp->temp_htab
594 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
595 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
596 if (*slot == NULL)
597 {
598 elt_p = XNEW (elt_t);
599 elt_p->val = val;
600 elt_p->temp = ret = create_tmp_from_val (val);
601 *slot = (void *) elt_p;
602 }
603 else
604 {
605 elt_p = (elt_t *) *slot;
606 ret = elt_p->temp;
607 }
608 }
609
610 if (is_formal)
611 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
612
613 return ret;
614 }
615
616
617 /* Return true if T is a CALL_EXPR or an expression that can be
618 assignmed to a temporary. Note that this predicate should only be
619 used during gimplification. See the rationale for this in
620 gimplify_modify_expr. */
621
622 static bool
623 is_gimple_formal_tmp_or_call_rhs (tree t)
624 {
625 return TREE_CODE (t) == CALL_EXPR || is_gimple_formal_tmp_rhs (t);
626 }
627
628 /* Returns true iff T is a valid RHS for an assignment to a renamed
629 user -- or front-end generated artificial -- variable. */
630
631 static bool
632 is_gimple_reg_or_call_rhs (tree t)
633 {
634 /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto
635 and the LHS is a user variable, then we need to introduce a formal
636 temporary. This way the optimizers can determine that the user
637 variable is only modified if evaluation of the RHS does not throw.
638
639 Don't force a temp of a non-renamable type; the copy could be
640 arbitrarily expensive. Instead we will generate a VDEF for
641 the assignment. */
642
643 if (is_gimple_reg_type (TREE_TYPE (t))
644 && ((TREE_CODE (t) == CALL_EXPR && TREE_SIDE_EFFECTS (t))
645 || tree_could_throw_p (t)))
646 return false;
647
648 return is_gimple_formal_tmp_or_call_rhs (t);
649 }
650
651 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
652 this predicate should only be used during gimplification. See the
653 rationale for this in gimplify_modify_expr. */
654
655 static bool
656 is_gimple_mem_or_call_rhs (tree t)
657 {
658 /* If we're dealing with a renamable type, either source or dest must be
659 a renamed variable. */
660 if (is_gimple_reg_type (TREE_TYPE (t)))
661 return is_gimple_val (t);
662 else
663 return is_gimple_formal_tmp_or_call_rhs (t);
664 }
665
666
667 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
668 in gimplify_expr. Only use this function if:
669
670 1) The value of the unfactored expression represented by VAL will not
671 change between the initialization and use of the temporary, and
672 2) The temporary will not be otherwise modified.
673
674 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
675 and #2 means it is inappropriate for && temps.
676
677 For other cases, use get_initialized_tmp_var instead. */
678
679 static tree
680 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
681 bool is_formal)
682 {
683 tree t, mod;
684
685 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
686 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
687 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_or_call_rhs,
688 fb_rvalue);
689
690 t = lookup_tmp_var (val, is_formal);
691
692 if (is_formal)
693 {
694 tree u = find_single_pointer_decl (val);
695
696 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
697 u = DECL_GET_RESTRICT_BASE (u);
698 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
699 {
700 if (DECL_BASED_ON_RESTRICT_P (t))
701 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
702 else
703 {
704 DECL_BASED_ON_RESTRICT_P (t) = 1;
705 SET_DECL_RESTRICT_BASE (t, u);
706 }
707 }
708 }
709
710 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
711 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
712 DECL_GIMPLE_REG_P (t) = 1;
713
714 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
715
716 if (EXPR_HAS_LOCATION (val))
717 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
718 else
719 SET_EXPR_LOCATION (mod, input_location);
720
721 /* gimplify_modify_expr might want to reduce this further. */
722 gimplify_and_add (mod, pre_p);
723 ggc_free (mod);
724
725 /* If we're gimplifying into ssa, gimplify_modify_expr will have
726 given our temporary an SSA name. Find and return it. */
727 if (gimplify_ctxp->into_ssa)
728 {
729 gimple last = gimple_seq_last_stmt (*pre_p);
730 t = gimple_get_lhs (last);
731 }
732
733 return t;
734 }
735
736 /* Returns a formal temporary variable initialized with VAL. PRE_P
737 points to a sequence where side-effects needed to compute VAL should be
738 stored. */
739
740 tree
741 get_formal_tmp_var (tree val, gimple_seq *pre_p)
742 {
743 return internal_get_tmp_var (val, pre_p, NULL, true);
744 }
745
746 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
747 are as in gimplify_expr. */
748
749 tree
750 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
751 {
752 return internal_get_tmp_var (val, pre_p, post_p, false);
753 }
754
755 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
756 true, generate debug info for them; otherwise don't. */
757
758 void
759 declare_vars (tree vars, gimple scope, bool debug_info)
760 {
761 tree last = vars;
762 if (last)
763 {
764 tree temps, block;
765
766 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
767
768 temps = nreverse (last);
769
770 block = gimple_bind_block (scope);
771 gcc_assert (!block || TREE_CODE (block) == BLOCK);
772 if (!block || !debug_info)
773 {
774 TREE_CHAIN (last) = gimple_bind_vars (scope);
775 gimple_bind_set_vars (scope, temps);
776 }
777 else
778 {
779 /* We need to attach the nodes both to the BIND_EXPR and to its
780 associated BLOCK for debugging purposes. The key point here
781 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
782 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
783 if (BLOCK_VARS (block))
784 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
785 else
786 {
787 gimple_bind_set_vars (scope,
788 chainon (gimple_bind_vars (scope), temps));
789 BLOCK_VARS (block) = temps;
790 }
791 }
792 }
793 }
794
795 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
796 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
797 no such upper bound can be obtained. */
798
799 static void
800 force_constant_size (tree var)
801 {
802 /* The only attempt we make is by querying the maximum size of objects
803 of the variable's type. */
804
805 HOST_WIDE_INT max_size;
806
807 gcc_assert (TREE_CODE (var) == VAR_DECL);
808
809 max_size = max_int_size_in_bytes (TREE_TYPE (var));
810
811 gcc_assert (max_size >= 0);
812
813 DECL_SIZE_UNIT (var)
814 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
815 DECL_SIZE (var)
816 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
817 }
818
819 void
820 gimple_add_tmp_var (tree tmp)
821 {
822 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
823
824 /* Later processing assumes that the object size is constant, which might
825 not be true at this point. Force the use of a constant upper bound in
826 this case. */
827 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
828 force_constant_size (tmp);
829
830 DECL_CONTEXT (tmp) = current_function_decl;
831 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
832
833 if (gimplify_ctxp)
834 {
835 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
836 gimplify_ctxp->temps = tmp;
837
838 /* Mark temporaries local within the nearest enclosing parallel. */
839 if (gimplify_omp_ctxp)
840 {
841 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
842 while (ctx && ctx->region_type == ORT_WORKSHARE)
843 ctx = ctx->outer_context;
844 if (ctx)
845 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
846 }
847 }
848 else if (cfun)
849 record_vars (tmp);
850 else
851 {
852 gimple_seq body_seq;
853
854 /* This case is for nested functions. We need to expose the locals
855 they create. */
856 body_seq = gimple_body (current_function_decl);
857 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
858 }
859 }
860
861 /* Determines whether to assign a location to the statement GS. */
862
863 static bool
864 should_carry_location_p (gimple gs)
865 {
866 /* Don't emit a line note for a label. We particularly don't want to
867 emit one for the break label, since it doesn't actually correspond
868 to the beginning of the loop/switch. */
869 if (gimple_code (gs) == GIMPLE_LABEL)
870 return false;
871
872 return true;
873 }
874
875 /* Same, but for a tree. */
876
877 static bool
878 tree_should_carry_location_p (const_tree stmt)
879 {
880 /* Don't emit a line note for a label. We particularly don't want to
881 emit one for the break label, since it doesn't actually correspond
882 to the beginning of the loop/switch. */
883 if (TREE_CODE (stmt) == LABEL_EXPR)
884 return false;
885
886 /* Do not annotate empty statements, since it confuses gcov. */
887 if (!TREE_SIDE_EFFECTS (stmt))
888 return false;
889
890 return true;
891 }
892
893 /* Return true if a location should not be emitted for this statement
894 by annotate_one_with_location. */
895
896 static inline bool
897 gimple_do_not_emit_location_p (gimple g)
898 {
899 return gimple_plf (g, GF_PLF_1);
900 }
901
902 /* Mark statement G so a location will not be emitted by
903 annotate_one_with_location. */
904
905 static inline void
906 gimple_set_do_not_emit_location (gimple g)
907 {
908 /* The PLF flags are initialized to 0 when a new tuple is created,
909 so no need to initialize it anywhere. */
910 gimple_set_plf (g, GF_PLF_1, true);
911 }
912
913 /* Set the location for gimple statement GS to LOCUS. */
914
915 static void
916 annotate_one_with_location (gimple gs, location_t location)
917 {
918 if (!gimple_has_location (gs)
919 && !gimple_do_not_emit_location_p (gs)
920 && should_carry_location_p (gs))
921 gimple_set_location (gs, location);
922 }
923
924 /* Same, but for tree T. */
925
926 static void
927 tree_annotate_one_with_location (tree t, location_t location)
928 {
929 if (CAN_HAVE_LOCATION_P (t)
930 && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t))
931 SET_EXPR_LOCATION (t, location);
932 }
933
934
935 /* Set LOCATION for all the statements after iterator GSI in sequence
936 SEQ. If GSI is pointing to the end of the sequence, start with the
937 first statement in SEQ. */
938
939 static void
940 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
941 location_t location)
942 {
943 if (gsi_end_p (gsi))
944 gsi = gsi_start (seq);
945 else
946 gsi_next (&gsi);
947
948 for (; !gsi_end_p (gsi); gsi_next (&gsi))
949 annotate_one_with_location (gsi_stmt (gsi), location);
950 }
951
952
953 /* Set the location for all the statements in a sequence STMT_P to LOCUS. */
954
955 void
956 annotate_all_with_location (gimple_seq stmt_p, location_t location)
957 {
958 gimple_stmt_iterator i;
959
960 if (gimple_seq_empty_p (stmt_p))
961 return;
962
963 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
964 {
965 gimple gs = gsi_stmt (i);
966 annotate_one_with_location (gs, location);
967 }
968 }
969
970 /* Same, but for statement or statement list in *STMT_P. */
971
972 void
973 tree_annotate_all_with_location (tree *stmt_p, location_t location)
974 {
975 tree_stmt_iterator i;
976
977 if (!*stmt_p)
978 return;
979
980 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
981 {
982 tree t = tsi_stmt (i);
983
984 /* Assuming we've already been gimplified, we shouldn't
985 see nested chaining constructs anymore. */
986 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
987 && TREE_CODE (t) != COMPOUND_EXPR);
988
989 tree_annotate_one_with_location (t, location);
990 }
991 }
992
993
994 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
995 These nodes model computations that should only be done once. If we
996 were to unshare something like SAVE_EXPR(i++), the gimplification
997 process would create wrong code. */
998
999 static tree
1000 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
1001 {
1002 enum tree_code code = TREE_CODE (*tp);
1003 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
1004 if (TREE_CODE_CLASS (code) == tcc_type
1005 || TREE_CODE_CLASS (code) == tcc_declaration
1006 || TREE_CODE_CLASS (code) == tcc_constant
1007 || code == SAVE_EXPR || code == TARGET_EXPR
1008 /* We can't do anything sensible with a BLOCK used as an expression,
1009 but we also can't just die when we see it because of non-expression
1010 uses. So just avert our eyes and cross our fingers. Silly Java. */
1011 || code == BLOCK)
1012 *walk_subtrees = 0;
1013 else
1014 {
1015 gcc_assert (code != BIND_EXPR);
1016 copy_tree_r (tp, walk_subtrees, data);
1017 }
1018
1019 return NULL_TREE;
1020 }
1021
1022 /* Callback for walk_tree to unshare most of the shared trees rooted at
1023 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
1024 then *TP is deep copied by calling copy_tree_r.
1025
1026 This unshares the same trees as copy_tree_r with the exception of
1027 SAVE_EXPR nodes. These nodes model computations that should only be
1028 done once. If we were to unshare something like SAVE_EXPR(i++), the
1029 gimplification process would create wrong code. */
1030
1031 static tree
1032 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
1033 void *data ATTRIBUTE_UNUSED)
1034 {
1035 tree t = *tp;
1036 enum tree_code code = TREE_CODE (t);
1037
1038 /* Skip types, decls, and constants. But we do want to look at their
1039 types and the bounds of types. Mark them as visited so we properly
1040 unmark their subtrees on the unmark pass. If we've already seen them,
1041 don't look down further. */
1042 if (TREE_CODE_CLASS (code) == tcc_type
1043 || TREE_CODE_CLASS (code) == tcc_declaration
1044 || TREE_CODE_CLASS (code) == tcc_constant)
1045 {
1046 if (TREE_VISITED (t))
1047 *walk_subtrees = 0;
1048 else
1049 TREE_VISITED (t) = 1;
1050 }
1051
1052 /* If this node has been visited already, unshare it and don't look
1053 any deeper. */
1054 else if (TREE_VISITED (t))
1055 {
1056 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
1057 *walk_subtrees = 0;
1058 }
1059
1060 /* Otherwise, mark the tree as visited and keep looking. */
1061 else
1062 TREE_VISITED (t) = 1;
1063
1064 return NULL_TREE;
1065 }
1066
1067 static tree
1068 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
1069 void *data ATTRIBUTE_UNUSED)
1070 {
1071 if (TREE_VISITED (*tp))
1072 TREE_VISITED (*tp) = 0;
1073 else
1074 *walk_subtrees = 0;
1075
1076 return NULL_TREE;
1077 }
1078
1079 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
1080 bodies of any nested functions if we are unsharing the entire body of
1081 FNDECL. */
1082
1083 static void
1084 unshare_body (tree *body_p, tree fndecl)
1085 {
1086 struct cgraph_node *cgn = cgraph_node (fndecl);
1087
1088 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
1089 if (body_p == &DECL_SAVED_TREE (fndecl))
1090 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1091 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1092 }
1093
1094 /* Likewise, but mark all trees as not visited. */
1095
1096 static void
1097 unvisit_body (tree *body_p, tree fndecl)
1098 {
1099 struct cgraph_node *cgn = cgraph_node (fndecl);
1100
1101 walk_tree (body_p, unmark_visited_r, NULL, NULL);
1102 if (body_p == &DECL_SAVED_TREE (fndecl))
1103 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1104 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1105 }
1106
1107 /* Unconditionally make an unshared copy of EXPR. This is used when using
1108 stored expressions which span multiple functions, such as BINFO_VTABLE,
1109 as the normal unsharing process can't tell that they're shared. */
1110
1111 tree
1112 unshare_expr (tree expr)
1113 {
1114 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1115 return expr;
1116 }
1117 \f
1118 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1119 contain statements and have a value. Assign its value to a temporary
1120 and give it void_type_node. Returns the temporary, or NULL_TREE if
1121 WRAPPER was already void. */
1122
1123 tree
1124 voidify_wrapper_expr (tree wrapper, tree temp)
1125 {
1126 tree type = TREE_TYPE (wrapper);
1127 if (type && !VOID_TYPE_P (type))
1128 {
1129 tree *p;
1130
1131 /* Set p to point to the body of the wrapper. Loop until we find
1132 something that isn't a wrapper. */
1133 for (p = &wrapper; p && *p; )
1134 {
1135 switch (TREE_CODE (*p))
1136 {
1137 case BIND_EXPR:
1138 TREE_SIDE_EFFECTS (*p) = 1;
1139 TREE_TYPE (*p) = void_type_node;
1140 /* For a BIND_EXPR, the body is operand 1. */
1141 p = &BIND_EXPR_BODY (*p);
1142 break;
1143
1144 case CLEANUP_POINT_EXPR:
1145 case TRY_FINALLY_EXPR:
1146 case TRY_CATCH_EXPR:
1147 TREE_SIDE_EFFECTS (*p) = 1;
1148 TREE_TYPE (*p) = void_type_node;
1149 p = &TREE_OPERAND (*p, 0);
1150 break;
1151
1152 case STATEMENT_LIST:
1153 {
1154 tree_stmt_iterator i = tsi_last (*p);
1155 TREE_SIDE_EFFECTS (*p) = 1;
1156 TREE_TYPE (*p) = void_type_node;
1157 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1158 }
1159 break;
1160
1161 case COMPOUND_EXPR:
1162 /* Advance to the last statement. Set all container types to void. */
1163 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1164 {
1165 TREE_SIDE_EFFECTS (*p) = 1;
1166 TREE_TYPE (*p) = void_type_node;
1167 }
1168 break;
1169
1170 default:
1171 goto out;
1172 }
1173 }
1174
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1179 {
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1186 }
1187 else
1188 {
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1191 }
1192
1193 return temp;
1194 }
1195
1196 return NULL_TREE;
1197 }
1198
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1201
1202 static void
1203 build_stack_save_restore (gimple *save, gimple *restore)
1204 {
1205 tree tmp_var;
1206
1207 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1210
1211 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1212 1, tmp_var);
1213 }
1214
1215 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1216
1217 static enum gimplify_status
1218 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1219 {
1220 tree bind_expr = *expr_p;
1221 bool old_save_stack = gimplify_ctxp->save_stack;
1222 tree t;
1223 gimple gimple_bind;
1224 gimple_seq body;
1225
1226 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1227
1228 /* Mark variables seen in this bind expr. */
1229 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1230 {
1231 if (TREE_CODE (t) == VAR_DECL)
1232 {
1233 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1234
1235 /* Mark variable as local. */
1236 if (ctx && !is_global_var (t)
1237 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1238 || splay_tree_lookup (ctx->variables,
1239 (splay_tree_key) t) == NULL))
1240 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1241
1242 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1243 }
1244
1245 /* Preliminarily mark non-addressed complex variables as eligible
1246 for promotion to gimple registers. We'll transform their uses
1247 as we find them. */
1248 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1249 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1250 && !TREE_THIS_VOLATILE (t)
1251 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1252 && !needs_to_live_in_memory (t))
1253 DECL_GIMPLE_REG_P (t) = 1;
1254 }
1255
1256 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1257 BIND_EXPR_BLOCK (bind_expr));
1258 gimple_push_bind_expr (gimple_bind);
1259
1260 gimplify_ctxp->save_stack = false;
1261
1262 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1263 body = NULL;
1264 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1265 gimple_bind_set_body (gimple_bind, body);
1266
1267 if (gimplify_ctxp->save_stack)
1268 {
1269 gimple stack_save, stack_restore, gs;
1270 gimple_seq cleanup, new_body;
1271
1272 /* Save stack on entry and restore it on exit. Add a try_finally
1273 block to achieve this. Note that mudflap depends on the
1274 format of the emitted code: see mx_register_decls(). */
1275 build_stack_save_restore (&stack_save, &stack_restore);
1276
1277 cleanup = new_body = NULL;
1278 gimplify_seq_add_stmt (&cleanup, stack_restore);
1279 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1280 GIMPLE_TRY_FINALLY);
1281
1282 gimplify_seq_add_stmt (&new_body, stack_save);
1283 gimplify_seq_add_stmt (&new_body, gs);
1284 gimple_bind_set_body (gimple_bind, new_body);
1285 }
1286
1287 gimplify_ctxp->save_stack = old_save_stack;
1288 gimple_pop_bind_expr ();
1289
1290 gimplify_seq_add_stmt (pre_p, gimple_bind);
1291
1292 if (temp)
1293 {
1294 *expr_p = temp;
1295 return GS_OK;
1296 }
1297
1298 *expr_p = NULL_TREE;
1299 return GS_ALL_DONE;
1300 }
1301
1302 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1303 GIMPLE value, it is assigned to a new temporary and the statement is
1304 re-written to return the temporary.
1305
1306 PRE_P points to the sequence where side effects that must happen before
1307 STMT should be stored. */
1308
1309 static enum gimplify_status
1310 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1311 {
1312 gimple ret;
1313 tree ret_expr = TREE_OPERAND (stmt, 0);
1314 tree result_decl, result;
1315
1316 if (ret_expr == error_mark_node)
1317 return GS_ERROR;
1318
1319 if (!ret_expr
1320 || TREE_CODE (ret_expr) == RESULT_DECL
1321 || ret_expr == error_mark_node)
1322 {
1323 gimple ret = gimple_build_return (ret_expr);
1324 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1325 gimplify_seq_add_stmt (pre_p, ret);
1326 return GS_ALL_DONE;
1327 }
1328
1329 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1330 result_decl = NULL_TREE;
1331 else
1332 {
1333 result_decl = TREE_OPERAND (ret_expr, 0);
1334
1335 /* See through a return by reference. */
1336 if (TREE_CODE (result_decl) == INDIRECT_REF)
1337 result_decl = TREE_OPERAND (result_decl, 0);
1338
1339 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1340 || TREE_CODE (ret_expr) == INIT_EXPR)
1341 && TREE_CODE (result_decl) == RESULT_DECL);
1342 }
1343
1344 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1345 Recall that aggregate_value_p is FALSE for any aggregate type that is
1346 returned in registers. If we're returning values in registers, then
1347 we don't want to extend the lifetime of the RESULT_DECL, particularly
1348 across another call. In addition, for those aggregates for which
1349 hard_function_value generates a PARALLEL, we'll die during normal
1350 expansion of structure assignments; there's special code in expand_return
1351 to handle this case that does not exist in expand_expr. */
1352 if (!result_decl
1353 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1354 result = result_decl;
1355 else if (gimplify_ctxp->return_temp)
1356 result = gimplify_ctxp->return_temp;
1357 else
1358 {
1359 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1360 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1361 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1362 DECL_GIMPLE_REG_P (result) = 1;
1363
1364 /* ??? With complex control flow (usually involving abnormal edges),
1365 we can wind up warning about an uninitialized value for this. Due
1366 to how this variable is constructed and initialized, this is never
1367 true. Give up and never warn. */
1368 TREE_NO_WARNING (result) = 1;
1369
1370 gimplify_ctxp->return_temp = result;
1371 }
1372
1373 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1374 Then gimplify the whole thing. */
1375 if (result != result_decl)
1376 TREE_OPERAND (ret_expr, 0) = result;
1377
1378 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1379
1380 ret = gimple_build_return (result);
1381 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1382 gimplify_seq_add_stmt (pre_p, ret);
1383
1384 return GS_ALL_DONE;
1385 }
1386
1387 static void
1388 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1389 {
1390 /* This is a variable-sized decl. Simplify its size and mark it
1391 for deferred expansion. Note that mudflap depends on the format
1392 of the emitted code: see mx_register_decls(). */
1393 tree t, addr, ptr_type;
1394
1395 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1396 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1397
1398 /* All occurrences of this decl in final gimplified code will be
1399 replaced by indirection. Setting DECL_VALUE_EXPR does two
1400 things: First, it lets the rest of the gimplifier know what
1401 replacement to use. Second, it lets the debug info know
1402 where to find the value. */
1403 ptr_type = build_pointer_type (TREE_TYPE (decl));
1404 addr = create_tmp_var (ptr_type, get_name (decl));
1405 DECL_IGNORED_P (addr) = 0;
1406 t = build_fold_indirect_ref (addr);
1407 SET_DECL_VALUE_EXPR (decl, t);
1408 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1409
1410 t = built_in_decls[BUILT_IN_ALLOCA];
1411 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1412 t = fold_convert (ptr_type, t);
1413 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1414
1415 gimplify_and_add (t, seq_p);
1416
1417 /* Indicate that we need to restore the stack level when the
1418 enclosing BIND_EXPR is exited. */
1419 gimplify_ctxp->save_stack = true;
1420 }
1421
1422
1423 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1424 and initialization explicit. */
1425
1426 static enum gimplify_status
1427 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1428 {
1429 tree stmt = *stmt_p;
1430 tree decl = DECL_EXPR_DECL (stmt);
1431
1432 *stmt_p = NULL_TREE;
1433
1434 if (TREE_TYPE (decl) == error_mark_node)
1435 return GS_ERROR;
1436
1437 if ((TREE_CODE (decl) == TYPE_DECL
1438 || TREE_CODE (decl) == VAR_DECL)
1439 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1440 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1441
1442 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1443 {
1444 tree init = DECL_INITIAL (decl);
1445
1446 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1447 || (!TREE_STATIC (decl)
1448 && flag_stack_check == GENERIC_STACK_CHECK
1449 && compare_tree_int (DECL_SIZE_UNIT (decl),
1450 STACK_CHECK_MAX_VAR_SIZE) > 0))
1451 gimplify_vla_decl (decl, seq_p);
1452
1453 if (init && init != error_mark_node)
1454 {
1455 if (!TREE_STATIC (decl))
1456 {
1457 DECL_INITIAL (decl) = NULL_TREE;
1458 init = build2 (INIT_EXPR, void_type_node, decl, init);
1459 gimplify_and_add (init, seq_p);
1460 ggc_free (init);
1461 }
1462 else
1463 /* We must still examine initializers for static variables
1464 as they may contain a label address. */
1465 walk_tree (&init, force_labels_r, NULL, NULL);
1466 }
1467
1468 /* Some front ends do not explicitly declare all anonymous
1469 artificial variables. We compensate here by declaring the
1470 variables, though it would be better if the front ends would
1471 explicitly declare them. */
1472 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1473 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1474 gimple_add_tmp_var (decl);
1475 }
1476
1477 return GS_ALL_DONE;
1478 }
1479
1480 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1481 and replacing the LOOP_EXPR with goto, but if the loop contains an
1482 EXIT_EXPR, we need to append a label for it to jump to. */
1483
1484 static enum gimplify_status
1485 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1486 {
1487 tree saved_label = gimplify_ctxp->exit_label;
1488 tree start_label = create_artificial_label ();
1489
1490 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1491
1492 gimplify_ctxp->exit_label = NULL_TREE;
1493
1494 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1495
1496 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1497
1498 if (gimplify_ctxp->exit_label)
1499 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1500
1501 gimplify_ctxp->exit_label = saved_label;
1502
1503 *expr_p = NULL;
1504 return GS_ALL_DONE;
1505 }
1506
1507 /* Gimplifies a statement list onto a sequence. These may be created either
1508 by an enlightened front-end, or by shortcut_cond_expr. */
1509
1510 static enum gimplify_status
1511 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1512 {
1513 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1514
1515 tree_stmt_iterator i = tsi_start (*expr_p);
1516
1517 while (!tsi_end_p (i))
1518 {
1519 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1520 tsi_delink (&i);
1521 }
1522
1523 if (temp)
1524 {
1525 *expr_p = temp;
1526 return GS_OK;
1527 }
1528
1529 return GS_ALL_DONE;
1530 }
1531
1532 /* Compare two case labels. Because the front end should already have
1533 made sure that case ranges do not overlap, it is enough to only compare
1534 the CASE_LOW values of each case label. */
1535
1536 static int
1537 compare_case_labels (const void *p1, const void *p2)
1538 {
1539 const_tree const case1 = *(const_tree const*)p1;
1540 const_tree const case2 = *(const_tree const*)p2;
1541
1542 /* The 'default' case label always goes first. */
1543 if (!CASE_LOW (case1))
1544 return -1;
1545 else if (!CASE_LOW (case2))
1546 return 1;
1547 else
1548 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1549 }
1550
1551
1552 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1553
1554 void
1555 sort_case_labels (VEC(tree,heap)* label_vec)
1556 {
1557 size_t len = VEC_length (tree, label_vec);
1558 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1559 compare_case_labels);
1560 }
1561
1562
1563 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1564 branch to. */
1565
1566 static enum gimplify_status
1567 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1568 {
1569 tree switch_expr = *expr_p;
1570 gimple_seq switch_body_seq = NULL;
1571 enum gimplify_status ret;
1572
1573 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1574 fb_rvalue);
1575 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1576 return ret;
1577
1578 if (SWITCH_BODY (switch_expr))
1579 {
1580 VEC (tree,heap) *labels;
1581 VEC (tree,heap) *saved_labels;
1582 tree default_case = NULL_TREE;
1583 size_t i, len;
1584 gimple gimple_switch;
1585
1586 /* If someone can be bothered to fill in the labels, they can
1587 be bothered to null out the body too. */
1588 gcc_assert (!SWITCH_LABELS (switch_expr));
1589
1590 /* save old labels, get new ones from body, then restore the old
1591 labels. Save all the things from the switch body to append after. */
1592 saved_labels = gimplify_ctxp->case_labels;
1593 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1594
1595 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1596 labels = gimplify_ctxp->case_labels;
1597 gimplify_ctxp->case_labels = saved_labels;
1598
1599 i = 0;
1600 while (i < VEC_length (tree, labels))
1601 {
1602 tree elt = VEC_index (tree, labels, i);
1603 tree low = CASE_LOW (elt);
1604 bool remove_element = FALSE;
1605
1606 if (low)
1607 {
1608 /* Discard empty ranges. */
1609 tree high = CASE_HIGH (elt);
1610 if (high && tree_int_cst_lt (high, low))
1611 remove_element = TRUE;
1612 }
1613 else
1614 {
1615 /* The default case must be the last label in the list. */
1616 gcc_assert (!default_case);
1617 default_case = elt;
1618 remove_element = TRUE;
1619 }
1620
1621 if (remove_element)
1622 VEC_ordered_remove (tree, labels, i);
1623 else
1624 i++;
1625 }
1626 len = i;
1627
1628 if (!default_case)
1629 {
1630 gimple new_default;
1631
1632 /* If the switch has no default label, add one, so that we jump
1633 around the switch body. */
1634 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1635 NULL_TREE, create_artificial_label ());
1636 new_default = gimple_build_label (CASE_LABEL (default_case));
1637 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1638 }
1639
1640 if (!VEC_empty (tree, labels))
1641 sort_case_labels (labels);
1642
1643 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1644 default_case, labels);
1645 gimplify_seq_add_stmt (pre_p, gimple_switch);
1646 gimplify_seq_add_seq (pre_p, switch_body_seq);
1647 VEC_free(tree, heap, labels);
1648 }
1649 else
1650 gcc_assert (SWITCH_LABELS (switch_expr));
1651
1652 return GS_ALL_DONE;
1653 }
1654
1655
1656 static enum gimplify_status
1657 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1658 {
1659 struct gimplify_ctx *ctxp;
1660 gimple gimple_label;
1661
1662 /* Invalid OpenMP programs can play Duff's Device type games with
1663 #pragma omp parallel. At least in the C front end, we don't
1664 detect such invalid branches until after gimplification. */
1665 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1666 if (ctxp->case_labels)
1667 break;
1668
1669 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1670 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1671 gimplify_seq_add_stmt (pre_p, gimple_label);
1672
1673 return GS_ALL_DONE;
1674 }
1675
1676 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1677 if necessary. */
1678
1679 tree
1680 build_and_jump (tree *label_p)
1681 {
1682 if (label_p == NULL)
1683 /* If there's nowhere to jump, just fall through. */
1684 return NULL_TREE;
1685
1686 if (*label_p == NULL_TREE)
1687 {
1688 tree label = create_artificial_label ();
1689 *label_p = label;
1690 }
1691
1692 return build1 (GOTO_EXPR, void_type_node, *label_p);
1693 }
1694
1695 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1696 This also involves building a label to jump to and communicating it to
1697 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1698
1699 static enum gimplify_status
1700 gimplify_exit_expr (tree *expr_p)
1701 {
1702 tree cond = TREE_OPERAND (*expr_p, 0);
1703 tree expr;
1704
1705 expr = build_and_jump (&gimplify_ctxp->exit_label);
1706 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1707 *expr_p = expr;
1708
1709 return GS_OK;
1710 }
1711
1712 /* A helper function to be called via walk_tree. Mark all labels under *TP
1713 as being forced. To be called for DECL_INITIAL of static variables. */
1714
1715 tree
1716 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1717 {
1718 if (TYPE_P (*tp))
1719 *walk_subtrees = 0;
1720 if (TREE_CODE (*tp) == LABEL_DECL)
1721 FORCED_LABEL (*tp) = 1;
1722
1723 return NULL_TREE;
1724 }
1725
1726 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1727 different from its canonical type, wrap the whole thing inside a
1728 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1729 type.
1730
1731 The canonical type of a COMPONENT_REF is the type of the field being
1732 referenced--unless the field is a bit-field which can be read directly
1733 in a smaller mode, in which case the canonical type is the
1734 sign-appropriate type corresponding to that mode. */
1735
1736 static void
1737 canonicalize_component_ref (tree *expr_p)
1738 {
1739 tree expr = *expr_p;
1740 tree type;
1741
1742 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1743
1744 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1745 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1746 else
1747 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1748
1749 /* One could argue that all the stuff below is not necessary for
1750 the non-bitfield case and declare it a FE error if type
1751 adjustment would be needed. */
1752 if (TREE_TYPE (expr) != type)
1753 {
1754 #ifdef ENABLE_TYPES_CHECKING
1755 tree old_type = TREE_TYPE (expr);
1756 #endif
1757 int type_quals;
1758
1759 /* We need to preserve qualifiers and propagate them from
1760 operand 0. */
1761 type_quals = TYPE_QUALS (type)
1762 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1763 if (TYPE_QUALS (type) != type_quals)
1764 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1765
1766 /* Set the type of the COMPONENT_REF to the underlying type. */
1767 TREE_TYPE (expr) = type;
1768
1769 #ifdef ENABLE_TYPES_CHECKING
1770 /* It is now a FE error, if the conversion from the canonical
1771 type to the original expression type is not useless. */
1772 gcc_assert (useless_type_conversion_p (old_type, type));
1773 #endif
1774 }
1775 }
1776
1777 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1778 to foo, embed that change in the ADDR_EXPR by converting
1779 T array[U];
1780 (T *)&array
1781 ==>
1782 &array[L]
1783 where L is the lower bound. For simplicity, only do this for constant
1784 lower bound.
1785 The constraint is that the type of &array[L] is trivially convertible
1786 to T *. */
1787
1788 static void
1789 canonicalize_addr_expr (tree *expr_p)
1790 {
1791 tree expr = *expr_p;
1792 tree addr_expr = TREE_OPERAND (expr, 0);
1793 tree datype, ddatype, pddatype;
1794
1795 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1796 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1797 || TREE_CODE (addr_expr) != ADDR_EXPR)
1798 return;
1799
1800 /* The addr_expr type should be a pointer to an array. */
1801 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1802 if (TREE_CODE (datype) != ARRAY_TYPE)
1803 return;
1804
1805 /* The pointer to element type shall be trivially convertible to
1806 the expression pointer type. */
1807 ddatype = TREE_TYPE (datype);
1808 pddatype = build_pointer_type (ddatype);
1809 if (!useless_type_conversion_p (pddatype, ddatype))
1810 return;
1811
1812 /* The lower bound and element sizes must be constant. */
1813 if (!TYPE_SIZE_UNIT (ddatype)
1814 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1815 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1816 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1817 return;
1818
1819 /* All checks succeeded. Build a new node to merge the cast. */
1820 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1821 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1822 NULL_TREE, NULL_TREE);
1823 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1824 }
1825
1826 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1827 underneath as appropriate. */
1828
1829 static enum gimplify_status
1830 gimplify_conversion (tree *expr_p)
1831 {
1832 tree tem;
1833 gcc_assert (CONVERT_EXPR_P (*expr_p));
1834
1835 /* Then strip away all but the outermost conversion. */
1836 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1837
1838 /* And remove the outermost conversion if it's useless. */
1839 if (tree_ssa_useless_type_conversion (*expr_p))
1840 *expr_p = TREE_OPERAND (*expr_p, 0);
1841
1842 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1843 For example this fold (subclass *)&A into &A->subclass avoiding
1844 a need for statement. */
1845 if (CONVERT_EXPR_P (*expr_p)
1846 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1847 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1848 && (tem = maybe_fold_offset_to_address
1849 (TREE_OPERAND (*expr_p, 0),
1850 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1851 *expr_p = tem;
1852
1853 /* If we still have a conversion at the toplevel,
1854 then canonicalize some constructs. */
1855 if (CONVERT_EXPR_P (*expr_p))
1856 {
1857 tree sub = TREE_OPERAND (*expr_p, 0);
1858
1859 /* If a NOP conversion is changing the type of a COMPONENT_REF
1860 expression, then canonicalize its type now in order to expose more
1861 redundant conversions. */
1862 if (TREE_CODE (sub) == COMPONENT_REF)
1863 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1864
1865 /* If a NOP conversion is changing a pointer to array of foo
1866 to a pointer to foo, embed that change in the ADDR_EXPR. */
1867 else if (TREE_CODE (sub) == ADDR_EXPR)
1868 canonicalize_addr_expr (expr_p);
1869 }
1870
1871 /* If we have a conversion to a non-register type force the
1872 use of a VIEW_CONVERT_EXPR instead. */
1873 if (!is_gimple_reg_type (TREE_TYPE (*expr_p)))
1874 *expr_p = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1875 TREE_OPERAND (*expr_p, 0));
1876
1877 return GS_OK;
1878 }
1879
1880 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1881 DECL_VALUE_EXPR, and it's worth re-examining things. */
1882
1883 static enum gimplify_status
1884 gimplify_var_or_parm_decl (tree *expr_p)
1885 {
1886 tree decl = *expr_p;
1887
1888 /* ??? If this is a local variable, and it has not been seen in any
1889 outer BIND_EXPR, then it's probably the result of a duplicate
1890 declaration, for which we've already issued an error. It would
1891 be really nice if the front end wouldn't leak these at all.
1892 Currently the only known culprit is C++ destructors, as seen
1893 in g++.old-deja/g++.jason/binding.C. */
1894 if (TREE_CODE (decl) == VAR_DECL
1895 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1896 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1897 && decl_function_context (decl) == current_function_decl)
1898 {
1899 gcc_assert (errorcount || sorrycount);
1900 return GS_ERROR;
1901 }
1902
1903 /* When within an OpenMP context, notice uses of variables. */
1904 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1905 return GS_ALL_DONE;
1906
1907 /* If the decl is an alias for another expression, substitute it now. */
1908 if (DECL_HAS_VALUE_EXPR_P (decl))
1909 {
1910 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1911 return GS_OK;
1912 }
1913
1914 return GS_ALL_DONE;
1915 }
1916
1917
1918 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1919 node *EXPR_P.
1920
1921 compound_lval
1922 : min_lval '[' val ']'
1923 | min_lval '.' ID
1924 | compound_lval '[' val ']'
1925 | compound_lval '.' ID
1926
1927 This is not part of the original SIMPLE definition, which separates
1928 array and member references, but it seems reasonable to handle them
1929 together. Also, this way we don't run into problems with union
1930 aliasing; gcc requires that for accesses through a union to alias, the
1931 union reference must be explicit, which was not always the case when we
1932 were splitting up array and member refs.
1933
1934 PRE_P points to the sequence where side effects that must happen before
1935 *EXPR_P should be stored.
1936
1937 POST_P points to the sequence where side effects that must happen after
1938 *EXPR_P should be stored. */
1939
1940 static enum gimplify_status
1941 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1942 fallback_t fallback)
1943 {
1944 tree *p;
1945 VEC(tree,heap) *stack;
1946 enum gimplify_status ret = GS_OK, tret;
1947 int i;
1948
1949 /* Create a stack of the subexpressions so later we can walk them in
1950 order from inner to outer. */
1951 stack = VEC_alloc (tree, heap, 10);
1952
1953 /* We can handle anything that get_inner_reference can deal with. */
1954 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1955 {
1956 restart:
1957 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1958 if (TREE_CODE (*p) == INDIRECT_REF)
1959 *p = fold_indirect_ref (*p);
1960
1961 if (handled_component_p (*p))
1962 ;
1963 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1964 additional COMPONENT_REFs. */
1965 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1966 && gimplify_var_or_parm_decl (p) == GS_OK)
1967 goto restart;
1968 else
1969 break;
1970
1971 VEC_safe_push (tree, heap, stack, *p);
1972 }
1973
1974 gcc_assert (VEC_length (tree, stack));
1975
1976 /* Now STACK is a stack of pointers to all the refs we've walked through
1977 and P points to the innermost expression.
1978
1979 Java requires that we elaborated nodes in source order. That
1980 means we must gimplify the inner expression followed by each of
1981 the indices, in order. But we can't gimplify the inner
1982 expression until we deal with any variable bounds, sizes, or
1983 positions in order to deal with PLACEHOLDER_EXPRs.
1984
1985 So we do this in three steps. First we deal with the annotations
1986 for any variables in the components, then we gimplify the base,
1987 then we gimplify any indices, from left to right. */
1988 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1989 {
1990 tree t = VEC_index (tree, stack, i);
1991
1992 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1993 {
1994 /* Gimplify the low bound and element type size and put them into
1995 the ARRAY_REF. If these values are set, they have already been
1996 gimplified. */
1997 if (TREE_OPERAND (t, 2) == NULL_TREE)
1998 {
1999 tree low = unshare_expr (array_ref_low_bound (t));
2000 if (!is_gimple_min_invariant (low))
2001 {
2002 TREE_OPERAND (t, 2) = low;
2003 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2004 post_p, is_gimple_formal_tmp_reg,
2005 fb_rvalue);
2006 ret = MIN (ret, tret);
2007 }
2008 }
2009
2010 if (!TREE_OPERAND (t, 3))
2011 {
2012 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2013 tree elmt_size = unshare_expr (array_ref_element_size (t));
2014 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2015
2016 /* Divide the element size by the alignment of the element
2017 type (above). */
2018 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
2019
2020 if (!is_gimple_min_invariant (elmt_size))
2021 {
2022 TREE_OPERAND (t, 3) = elmt_size;
2023 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2024 post_p, is_gimple_formal_tmp_reg,
2025 fb_rvalue);
2026 ret = MIN (ret, tret);
2027 }
2028 }
2029 }
2030 else if (TREE_CODE (t) == COMPONENT_REF)
2031 {
2032 /* Set the field offset into T and gimplify it. */
2033 if (!TREE_OPERAND (t, 2))
2034 {
2035 tree offset = unshare_expr (component_ref_field_offset (t));
2036 tree field = TREE_OPERAND (t, 1);
2037 tree factor
2038 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2039
2040 /* Divide the offset by its alignment. */
2041 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
2042
2043 if (!is_gimple_min_invariant (offset))
2044 {
2045 TREE_OPERAND (t, 2) = offset;
2046 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2047 post_p, is_gimple_formal_tmp_reg,
2048 fb_rvalue);
2049 ret = MIN (ret, tret);
2050 }
2051 }
2052 }
2053 }
2054
2055 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2056 so as to match the min_lval predicate. Failure to do so may result
2057 in the creation of large aggregate temporaries. */
2058 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2059 fallback | fb_lvalue);
2060 ret = MIN (ret, tret);
2061
2062 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2063 loop we also remove any useless conversions. */
2064 for (; VEC_length (tree, stack) > 0; )
2065 {
2066 tree t = VEC_pop (tree, stack);
2067
2068 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2069 {
2070 /* Gimplify the dimension.
2071 Temporary fix for gcc.c-torture/execute/20040313-1.c.
2072 Gimplify non-constant array indices into a temporary
2073 variable.
2074 FIXME - The real fix is to gimplify post-modify
2075 expressions into a minimal gimple lvalue. However, that
2076 exposes bugs in alias analysis. The alias analyzer does
2077 not handle &PTR->FIELD very well. Will fix after the
2078 branch is merged into mainline (dnovillo 2004-05-03). */
2079 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2080 {
2081 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2082 is_gimple_formal_tmp_reg, fb_rvalue);
2083 ret = MIN (ret, tret);
2084 }
2085 }
2086 else if (TREE_CODE (t) == BIT_FIELD_REF)
2087 {
2088 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2089 is_gimple_val, fb_rvalue);
2090 ret = MIN (ret, tret);
2091 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2092 is_gimple_val, fb_rvalue);
2093 ret = MIN (ret, tret);
2094 }
2095
2096 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2097
2098 /* The innermost expression P may have originally had
2099 TREE_SIDE_EFFECTS set which would have caused all the outer
2100 expressions in *EXPR_P leading to P to also have had
2101 TREE_SIDE_EFFECTS set. */
2102 recalculate_side_effects (t);
2103 }
2104
2105 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2106 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2107 {
2108 canonicalize_component_ref (expr_p);
2109 ret = MIN (ret, GS_OK);
2110 }
2111
2112 VEC_free (tree, heap, stack);
2113
2114 return ret;
2115 }
2116
2117 /* Gimplify the self modifying expression pointed to by EXPR_P
2118 (++, --, +=, -=).
2119
2120 PRE_P points to the list where side effects that must happen before
2121 *EXPR_P should be stored.
2122
2123 POST_P points to the list where side effects that must happen after
2124 *EXPR_P should be stored.
2125
2126 WANT_VALUE is nonzero iff we want to use the value of this expression
2127 in another expression. */
2128
2129 static enum gimplify_status
2130 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2131 bool want_value)
2132 {
2133 enum tree_code code;
2134 tree lhs, lvalue, rhs, t1;
2135 gimple_seq post = NULL, *orig_post_p = post_p;
2136 bool postfix;
2137 enum tree_code arith_code;
2138 enum gimplify_status ret;
2139
2140 code = TREE_CODE (*expr_p);
2141
2142 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2143 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2144
2145 /* Prefix or postfix? */
2146 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2147 /* Faster to treat as prefix if result is not used. */
2148 postfix = want_value;
2149 else
2150 postfix = false;
2151
2152 /* For postfix, make sure the inner expression's post side effects
2153 are executed after side effects from this expression. */
2154 if (postfix)
2155 post_p = &post;
2156
2157 /* Add or subtract? */
2158 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2159 arith_code = PLUS_EXPR;
2160 else
2161 arith_code = MINUS_EXPR;
2162
2163 /* Gimplify the LHS into a GIMPLE lvalue. */
2164 lvalue = TREE_OPERAND (*expr_p, 0);
2165 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2166 if (ret == GS_ERROR)
2167 return ret;
2168
2169 /* Extract the operands to the arithmetic operation. */
2170 lhs = lvalue;
2171 rhs = TREE_OPERAND (*expr_p, 1);
2172
2173 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2174 that as the result value and in the postqueue operation. */
2175 if (postfix)
2176 {
2177 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2178 if (ret == GS_ERROR)
2179 return ret;
2180 }
2181
2182 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2183 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2184 {
2185 rhs = fold_convert (sizetype, rhs);
2186 if (arith_code == MINUS_EXPR)
2187 rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2188 arith_code = POINTER_PLUS_EXPR;
2189 }
2190
2191 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2192
2193 if (postfix)
2194 {
2195 gimplify_assign (lvalue, t1, orig_post_p);
2196 gimplify_seq_add_seq (orig_post_p, post);
2197 *expr_p = lhs;
2198 return GS_ALL_DONE;
2199 }
2200 else
2201 {
2202 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2203 return GS_OK;
2204 }
2205 }
2206
2207
2208 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2209
2210 static void
2211 maybe_with_size_expr (tree *expr_p)
2212 {
2213 tree expr = *expr_p;
2214 tree type = TREE_TYPE (expr);
2215 tree size;
2216
2217 /* If we've already wrapped this or the type is error_mark_node, we can't do
2218 anything. */
2219 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2220 || type == error_mark_node)
2221 return;
2222
2223 /* If the size isn't known or is a constant, we have nothing to do. */
2224 size = TYPE_SIZE_UNIT (type);
2225 if (!size || TREE_CODE (size) == INTEGER_CST)
2226 return;
2227
2228 /* Otherwise, make a WITH_SIZE_EXPR. */
2229 size = unshare_expr (size);
2230 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2231 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2232 }
2233
2234
2235 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2236 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2237 the CALL_EXPR. */
2238
2239 static enum gimplify_status
2240 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2241 {
2242 bool (*test) (tree);
2243 fallback_t fb;
2244
2245 /* In general, we allow lvalues for function arguments to avoid
2246 extra overhead of copying large aggregates out of even larger
2247 aggregates into temporaries only to copy the temporaries to
2248 the argument list. Make optimizers happy by pulling out to
2249 temporaries those types that fit in registers. */
2250 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2251 test = is_gimple_val, fb = fb_rvalue;
2252 else
2253 test = is_gimple_lvalue, fb = fb_either;
2254
2255 /* If this is a variable sized type, we must remember the size. */
2256 maybe_with_size_expr (arg_p);
2257
2258 /* Make sure arguments have the same location as the function call
2259 itself. */
2260 protected_set_expr_location (*arg_p, call_location);
2261
2262 /* There is a sequence point before a function call. Side effects in
2263 the argument list must occur before the actual call. So, when
2264 gimplifying arguments, force gimplify_expr to use an internal
2265 post queue which is then appended to the end of PRE_P. */
2266 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2267 }
2268
2269
2270 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2271 WANT_VALUE is true if the result of the call is desired. */
2272
2273 static enum gimplify_status
2274 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2275 {
2276 tree fndecl, parms, p;
2277 enum gimplify_status ret;
2278 int i, nargs;
2279 gimple call;
2280 bool builtin_va_start_p = FALSE;
2281
2282 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2283
2284 /* For reliable diagnostics during inlining, it is necessary that
2285 every call_expr be annotated with file and line. */
2286 if (! EXPR_HAS_LOCATION (*expr_p))
2287 SET_EXPR_LOCATION (*expr_p, input_location);
2288
2289 /* This may be a call to a builtin function.
2290
2291 Builtin function calls may be transformed into different
2292 (and more efficient) builtin function calls under certain
2293 circumstances. Unfortunately, gimplification can muck things
2294 up enough that the builtin expanders are not aware that certain
2295 transformations are still valid.
2296
2297 So we attempt transformation/gimplification of the call before
2298 we gimplify the CALL_EXPR. At this time we do not manage to
2299 transform all calls in the same manner as the expanders do, but
2300 we do transform most of them. */
2301 fndecl = get_callee_fndecl (*expr_p);
2302 if (fndecl && DECL_BUILT_IN (fndecl))
2303 {
2304 tree new_tree = fold_call_expr (*expr_p, !want_value);
2305
2306 if (new_tree && new_tree != *expr_p)
2307 {
2308 /* There was a transformation of this call which computes the
2309 same value, but in a more efficient way. Return and try
2310 again. */
2311 *expr_p = new_tree;
2312 return GS_OK;
2313 }
2314
2315 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2316 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2317 {
2318 builtin_va_start_p = TRUE;
2319 if (call_expr_nargs (*expr_p) < 2)
2320 {
2321 error ("too few arguments to function %<va_start%>");
2322 *expr_p = build_empty_stmt ();
2323 return GS_OK;
2324 }
2325
2326 if (fold_builtin_next_arg (*expr_p, true))
2327 {
2328 *expr_p = build_empty_stmt ();
2329 return GS_OK;
2330 }
2331 }
2332 }
2333
2334 /* There is a sequence point before the call, so any side effects in
2335 the calling expression must occur before the actual call. Force
2336 gimplify_expr to use an internal post queue. */
2337 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2338 is_gimple_call_addr, fb_rvalue);
2339
2340 nargs = call_expr_nargs (*expr_p);
2341
2342 /* Get argument types for verification. */
2343 fndecl = get_callee_fndecl (*expr_p);
2344 parms = NULL_TREE;
2345 if (fndecl)
2346 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2347 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2348 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2349
2350 if (fndecl && DECL_ARGUMENTS (fndecl))
2351 p = DECL_ARGUMENTS (fndecl);
2352 else if (parms)
2353 p = parms;
2354 else
2355 {
2356 if (nargs != 0)
2357 CALL_CANNOT_INLINE_P (*expr_p) = 1;
2358 p = NULL_TREE;
2359 }
2360 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2361 ;
2362
2363 /* If the last argument is __builtin_va_arg_pack () and it is not
2364 passed as a named argument, decrease the number of CALL_EXPR
2365 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2366 if (!p
2367 && i < nargs
2368 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2369 {
2370 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2371 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2372
2373 if (last_arg_fndecl
2374 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2375 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2376 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2377 {
2378 tree call = *expr_p;
2379
2380 --nargs;
2381 *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call),
2382 nargs, CALL_EXPR_ARGP (call));
2383
2384 /* Copy all CALL_EXPR flags, location and block, except
2385 CALL_EXPR_VA_ARG_PACK flag. */
2386 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2387 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2388 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2389 = CALL_EXPR_RETURN_SLOT_OPT (call);
2390 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2391 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2392 SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call));
2393 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2394
2395 /* Set CALL_EXPR_VA_ARG_PACK. */
2396 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2397 }
2398 }
2399
2400 /* Finally, gimplify the function arguments. */
2401 if (nargs > 0)
2402 {
2403 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2404 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2405 PUSH_ARGS_REVERSED ? i-- : i++)
2406 {
2407 enum gimplify_status t;
2408
2409 /* Avoid gimplifying the second argument to va_start, which needs to
2410 be the plain PARM_DECL. */
2411 if ((i != 1) || !builtin_va_start_p)
2412 {
2413 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2414 EXPR_LOCATION (*expr_p));
2415
2416 if (t == GS_ERROR)
2417 ret = GS_ERROR;
2418 }
2419 }
2420 }
2421
2422 /* Try this again in case gimplification exposed something. */
2423 if (ret != GS_ERROR)
2424 {
2425 tree new_tree = fold_call_expr (*expr_p, !want_value);
2426
2427 if (new_tree && new_tree != *expr_p)
2428 {
2429 /* There was a transformation of this call which computes the
2430 same value, but in a more efficient way. Return and try
2431 again. */
2432 *expr_p = new_tree;
2433 return GS_OK;
2434 }
2435 }
2436 else
2437 {
2438 *expr_p = error_mark_node;
2439 return GS_ERROR;
2440 }
2441
2442 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2443 decl. This allows us to eliminate redundant or useless
2444 calls to "const" functions. */
2445 if (TREE_CODE (*expr_p) == CALL_EXPR)
2446 {
2447 int flags = call_expr_flags (*expr_p);
2448 if (flags & (ECF_CONST | ECF_PURE)
2449 /* An infinite loop is considered a side effect. */
2450 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2451 TREE_SIDE_EFFECTS (*expr_p) = 0;
2452 }
2453
2454 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2455 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2456 form and delegate the creation of a GIMPLE_CALL to
2457 gimplify_modify_expr. This is always possible because when
2458 WANT_VALUE is true, the caller wants the result of this call into
2459 a temporary, which means that we will emit an INIT_EXPR in
2460 internal_get_tmp_var which will then be handled by
2461 gimplify_modify_expr. */
2462 if (!want_value)
2463 {
2464 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2465 have to do is replicate it as a GIMPLE_CALL tuple. */
2466 call = gimple_build_call_from_tree (*expr_p);
2467 gimplify_seq_add_stmt (pre_p, call);
2468 *expr_p = NULL_TREE;
2469 }
2470
2471 return ret;
2472 }
2473
2474 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2475 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2476
2477 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2478 condition is true or false, respectively. If null, we should generate
2479 our own to skip over the evaluation of this specific expression.
2480
2481 This function is the tree equivalent of do_jump.
2482
2483 shortcut_cond_r should only be called by shortcut_cond_expr. */
2484
2485 static tree
2486 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2487 {
2488 tree local_label = NULL_TREE;
2489 tree t, expr = NULL;
2490
2491 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2492 retain the shortcut semantics. Just insert the gotos here;
2493 shortcut_cond_expr will append the real blocks later. */
2494 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2495 {
2496 /* Turn if (a && b) into
2497
2498 if (a); else goto no;
2499 if (b) goto yes; else goto no;
2500 (no:) */
2501
2502 if (false_label_p == NULL)
2503 false_label_p = &local_label;
2504
2505 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2506 append_to_statement_list (t, &expr);
2507
2508 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2509 false_label_p);
2510 append_to_statement_list (t, &expr);
2511 }
2512 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2513 {
2514 /* Turn if (a || b) into
2515
2516 if (a) goto yes;
2517 if (b) goto yes; else goto no;
2518 (yes:) */
2519
2520 if (true_label_p == NULL)
2521 true_label_p = &local_label;
2522
2523 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2524 append_to_statement_list (t, &expr);
2525
2526 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2527 false_label_p);
2528 append_to_statement_list (t, &expr);
2529 }
2530 else if (TREE_CODE (pred) == COND_EXPR)
2531 {
2532 /* As long as we're messing with gotos, turn if (a ? b : c) into
2533 if (a)
2534 if (b) goto yes; else goto no;
2535 else
2536 if (c) goto yes; else goto no; */
2537 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2538 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2539 false_label_p),
2540 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2541 false_label_p));
2542 }
2543 else
2544 {
2545 expr = build3 (COND_EXPR, void_type_node, pred,
2546 build_and_jump (true_label_p),
2547 build_and_jump (false_label_p));
2548 }
2549
2550 if (local_label)
2551 {
2552 t = build1 (LABEL_EXPR, void_type_node, local_label);
2553 append_to_statement_list (t, &expr);
2554 }
2555
2556 return expr;
2557 }
2558
2559 /* Given a conditional expression EXPR with short-circuit boolean
2560 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2561 predicate appart into the equivalent sequence of conditionals. */
2562
2563 static tree
2564 shortcut_cond_expr (tree expr)
2565 {
2566 tree pred = TREE_OPERAND (expr, 0);
2567 tree then_ = TREE_OPERAND (expr, 1);
2568 tree else_ = TREE_OPERAND (expr, 2);
2569 tree true_label, false_label, end_label, t;
2570 tree *true_label_p;
2571 tree *false_label_p;
2572 bool emit_end, emit_false, jump_over_else;
2573 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2574 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2575
2576 /* First do simple transformations. */
2577 if (!else_se)
2578 {
2579 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2580 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2581 {
2582 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2583 then_ = shortcut_cond_expr (expr);
2584 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2585 pred = TREE_OPERAND (pred, 0);
2586 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2587 }
2588 }
2589
2590 if (!then_se)
2591 {
2592 /* If there is no 'then', turn
2593 if (a || b); else d
2594 into
2595 if (a); else if (b); else d. */
2596 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2597 {
2598 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2599 else_ = shortcut_cond_expr (expr);
2600 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2601 pred = TREE_OPERAND (pred, 0);
2602 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2603 }
2604 }
2605
2606 /* If we're done, great. */
2607 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2608 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2609 return expr;
2610
2611 /* Otherwise we need to mess with gotos. Change
2612 if (a) c; else d;
2613 to
2614 if (a); else goto no;
2615 c; goto end;
2616 no: d; end:
2617 and recursively gimplify the condition. */
2618
2619 true_label = false_label = end_label = NULL_TREE;
2620
2621 /* If our arms just jump somewhere, hijack those labels so we don't
2622 generate jumps to jumps. */
2623
2624 if (then_
2625 && TREE_CODE (then_) == GOTO_EXPR
2626 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2627 {
2628 true_label = GOTO_DESTINATION (then_);
2629 then_ = NULL;
2630 then_se = false;
2631 }
2632
2633 if (else_
2634 && TREE_CODE (else_) == GOTO_EXPR
2635 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2636 {
2637 false_label = GOTO_DESTINATION (else_);
2638 else_ = NULL;
2639 else_se = false;
2640 }
2641
2642 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2643 if (true_label)
2644 true_label_p = &true_label;
2645 else
2646 true_label_p = NULL;
2647
2648 /* The 'else' branch also needs a label if it contains interesting code. */
2649 if (false_label || else_se)
2650 false_label_p = &false_label;
2651 else
2652 false_label_p = NULL;
2653
2654 /* If there was nothing else in our arms, just forward the label(s). */
2655 if (!then_se && !else_se)
2656 return shortcut_cond_r (pred, true_label_p, false_label_p);
2657
2658 /* If our last subexpression already has a terminal label, reuse it. */
2659 if (else_se)
2660 expr = expr_last (else_);
2661 else if (then_se)
2662 expr = expr_last (then_);
2663 else
2664 expr = NULL;
2665 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2666 end_label = LABEL_EXPR_LABEL (expr);
2667
2668 /* If we don't care about jumping to the 'else' branch, jump to the end
2669 if the condition is false. */
2670 if (!false_label_p)
2671 false_label_p = &end_label;
2672
2673 /* We only want to emit these labels if we aren't hijacking them. */
2674 emit_end = (end_label == NULL_TREE);
2675 emit_false = (false_label == NULL_TREE);
2676
2677 /* We only emit the jump over the else clause if we have to--if the
2678 then clause may fall through. Otherwise we can wind up with a
2679 useless jump and a useless label at the end of gimplified code,
2680 which will cause us to think that this conditional as a whole
2681 falls through even if it doesn't. If we then inline a function
2682 which ends with such a condition, that can cause us to issue an
2683 inappropriate warning about control reaching the end of a
2684 non-void function. */
2685 jump_over_else = block_may_fallthru (then_);
2686
2687 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2688
2689 expr = NULL;
2690 append_to_statement_list (pred, &expr);
2691
2692 append_to_statement_list (then_, &expr);
2693 if (else_se)
2694 {
2695 if (jump_over_else)
2696 {
2697 t = build_and_jump (&end_label);
2698 append_to_statement_list (t, &expr);
2699 }
2700 if (emit_false)
2701 {
2702 t = build1 (LABEL_EXPR, void_type_node, false_label);
2703 append_to_statement_list (t, &expr);
2704 }
2705 append_to_statement_list (else_, &expr);
2706 }
2707 if (emit_end && end_label)
2708 {
2709 t = build1 (LABEL_EXPR, void_type_node, end_label);
2710 append_to_statement_list (t, &expr);
2711 }
2712
2713 return expr;
2714 }
2715
2716 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2717
2718 tree
2719 gimple_boolify (tree expr)
2720 {
2721 tree type = TREE_TYPE (expr);
2722
2723 if (TREE_CODE (type) == BOOLEAN_TYPE)
2724 return expr;
2725
2726 switch (TREE_CODE (expr))
2727 {
2728 case TRUTH_AND_EXPR:
2729 case TRUTH_OR_EXPR:
2730 case TRUTH_XOR_EXPR:
2731 case TRUTH_ANDIF_EXPR:
2732 case TRUTH_ORIF_EXPR:
2733 /* Also boolify the arguments of truth exprs. */
2734 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2735 /* FALLTHRU */
2736
2737 case TRUTH_NOT_EXPR:
2738 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2739 /* FALLTHRU */
2740
2741 case EQ_EXPR: case NE_EXPR:
2742 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2743 /* These expressions always produce boolean results. */
2744 TREE_TYPE (expr) = boolean_type_node;
2745 return expr;
2746
2747 default:
2748 /* Other expressions that get here must have boolean values, but
2749 might need to be converted to the appropriate mode. */
2750 return fold_convert (boolean_type_node, expr);
2751 }
2752 }
2753
2754 /* Given a conditional expression *EXPR_P without side effects, gimplify
2755 its operands. New statements are inserted to PRE_P. */
2756
2757 static enum gimplify_status
2758 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2759 {
2760 tree expr = *expr_p, cond;
2761 enum gimplify_status ret, tret;
2762 enum tree_code code;
2763
2764 cond = gimple_boolify (COND_EXPR_COND (expr));
2765
2766 /* We need to handle && and || specially, as their gimplification
2767 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2768 code = TREE_CODE (cond);
2769 if (code == TRUTH_ANDIF_EXPR)
2770 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2771 else if (code == TRUTH_ORIF_EXPR)
2772 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2773 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2774 COND_EXPR_COND (*expr_p) = cond;
2775
2776 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2777 is_gimple_val, fb_rvalue);
2778 ret = MIN (ret, tret);
2779 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2780 is_gimple_val, fb_rvalue);
2781
2782 return MIN (ret, tret);
2783 }
2784
2785 /* Returns true if evaluating EXPR could trap.
2786 EXPR is GENERIC, while tree_could_trap_p can be called
2787 only on GIMPLE. */
2788
2789 static bool
2790 generic_expr_could_trap_p (tree expr)
2791 {
2792 unsigned i, n;
2793
2794 if (!expr || is_gimple_val (expr))
2795 return false;
2796
2797 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2798 return true;
2799
2800 n = TREE_OPERAND_LENGTH (expr);
2801 for (i = 0; i < n; i++)
2802 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2803 return true;
2804
2805 return false;
2806 }
2807
2808 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2809 into
2810
2811 if (p) if (p)
2812 t1 = a; a;
2813 else or else
2814 t1 = b; b;
2815 t1;
2816
2817 The second form is used when *EXPR_P is of type void.
2818
2819 PRE_P points to the list where side effects that must happen before
2820 *EXPR_P should be stored. */
2821
2822 static enum gimplify_status
2823 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2824 {
2825 tree expr = *expr_p;
2826 tree tmp, type, arm1, arm2;
2827 enum gimplify_status ret;
2828 tree label_true, label_false, label_cont;
2829 bool have_then_clause_p, have_else_clause_p;
2830 gimple gimple_cond;
2831 enum tree_code pred_code;
2832 gimple_seq seq = NULL;
2833
2834 type = TREE_TYPE (expr);
2835
2836 /* If this COND_EXPR has a value, copy the values into a temporary within
2837 the arms. */
2838 if (! VOID_TYPE_P (type))
2839 {
2840 tree result;
2841
2842 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2843 an addressable temporary. */
2844 if (((fallback & fb_rvalue)
2845 || !(fallback & fb_lvalue))
2846 && !TREE_ADDRESSABLE (type))
2847 {
2848 if (gimplify_ctxp->allow_rhs_cond_expr
2849 /* If either branch has side effects or could trap, it can't be
2850 evaluated unconditionally. */
2851 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2852 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2853 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2854 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2855 return gimplify_pure_cond_expr (expr_p, pre_p);
2856
2857 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2858 ret = GS_ALL_DONE;
2859 }
2860 else
2861 {
2862 tree type = build_pointer_type (TREE_TYPE (expr));
2863
2864 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2865 TREE_OPERAND (expr, 1) =
2866 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2867
2868 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2869 TREE_OPERAND (expr, 2) =
2870 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2871
2872 tmp = create_tmp_var (type, "iftmp");
2873
2874 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2875 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2876
2877 result = build_fold_indirect_ref (tmp);
2878 }
2879
2880 /* Build the then clause, 't1 = a;'. But don't build an assignment
2881 if this branch is void; in C++ it can be, if it's a throw. */
2882 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2883 TREE_OPERAND (expr, 1)
2884 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2885
2886 /* Build the else clause, 't1 = b;'. */
2887 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2888 TREE_OPERAND (expr, 2)
2889 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2890
2891 TREE_TYPE (expr) = void_type_node;
2892 recalculate_side_effects (expr);
2893
2894 /* Move the COND_EXPR to the prequeue. */
2895 gimplify_stmt (&expr, pre_p);
2896
2897 *expr_p = result;
2898 return GS_ALL_DONE;
2899 }
2900
2901 /* Make sure the condition has BOOLEAN_TYPE. */
2902 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2903
2904 /* Break apart && and || conditions. */
2905 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2906 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2907 {
2908 expr = shortcut_cond_expr (expr);
2909
2910 if (expr != *expr_p)
2911 {
2912 *expr_p = expr;
2913
2914 /* We can't rely on gimplify_expr to re-gimplify the expanded
2915 form properly, as cleanups might cause the target labels to be
2916 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2917 set up a conditional context. */
2918 gimple_push_condition ();
2919 gimplify_stmt (expr_p, &seq);
2920 gimple_pop_condition (pre_p);
2921 gimple_seq_add_seq (pre_p, seq);
2922
2923 return GS_ALL_DONE;
2924 }
2925 }
2926
2927 /* Now do the normal gimplification. */
2928
2929 /* Gimplify condition. */
2930 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2931 fb_rvalue);
2932 if (ret == GS_ERROR)
2933 return GS_ERROR;
2934 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2935
2936 gimple_push_condition ();
2937
2938 have_then_clause_p = have_else_clause_p = false;
2939 if (TREE_OPERAND (expr, 1) != NULL
2940 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2941 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2942 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2943 == current_function_decl)
2944 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2945 have different locations, otherwise we end up with incorrect
2946 location information on the branches. */
2947 && (optimize
2948 || !EXPR_HAS_LOCATION (expr)
2949 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2950 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2951 {
2952 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2953 have_then_clause_p = true;
2954 }
2955 else
2956 label_true = create_artificial_label ();
2957 if (TREE_OPERAND (expr, 2) != NULL
2958 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2959 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2960 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2961 == current_function_decl)
2962 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2963 have different locations, otherwise we end up with incorrect
2964 location information on the branches. */
2965 && (optimize
2966 || !EXPR_HAS_LOCATION (expr)
2967 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2968 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2969 {
2970 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
2971 have_else_clause_p = true;
2972 }
2973 else
2974 label_false = create_artificial_label ();
2975
2976 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
2977 &arm2);
2978
2979 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
2980 label_false);
2981
2982 gimplify_seq_add_stmt (&seq, gimple_cond);
2983 label_cont = NULL_TREE;
2984 if (!have_then_clause_p)
2985 {
2986 /* For if (...) {} else { code; } put label_true after
2987 the else block. */
2988 if (TREE_OPERAND (expr, 1) == NULL_TREE
2989 && !have_else_clause_p
2990 && TREE_OPERAND (expr, 2) != NULL_TREE)
2991 label_cont = label_true;
2992 else
2993 {
2994 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
2995 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
2996 /* For if (...) { code; } else {} or
2997 if (...) { code; } else goto label; or
2998 if (...) { code; return; } else { ... }
2999 label_cont isn't needed. */
3000 if (!have_else_clause_p
3001 && TREE_OPERAND (expr, 2) != NULL_TREE
3002 && gimple_seq_may_fallthru (seq))
3003 {
3004 gimple g;
3005 label_cont = create_artificial_label ();
3006
3007 g = gimple_build_goto (label_cont);
3008
3009 /* GIMPLE_COND's are very low level; they have embedded
3010 gotos. This particular embedded goto should not be marked
3011 with the location of the original COND_EXPR, as it would
3012 correspond to the COND_EXPR's condition, not the ELSE or the
3013 THEN arms. To avoid marking it with the wrong location, flag
3014 it as "no location". */
3015 gimple_set_do_not_emit_location (g);
3016
3017 gimplify_seq_add_stmt (&seq, g);
3018 }
3019 }
3020 }
3021 if (!have_else_clause_p)
3022 {
3023 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3024 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3025 }
3026 if (label_cont)
3027 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3028
3029 gimple_pop_condition (pre_p);
3030 gimple_seq_add_seq (pre_p, seq);
3031
3032 if (ret == GS_ERROR)
3033 ; /* Do nothing. */
3034 else if (have_then_clause_p || have_else_clause_p)
3035 ret = GS_ALL_DONE;
3036 else
3037 {
3038 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3039 expr = TREE_OPERAND (expr, 0);
3040 gimplify_stmt (&expr, pre_p);
3041 }
3042
3043 *expr_p = NULL;
3044 return ret;
3045 }
3046
3047 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3048 a call to __builtin_memcpy. */
3049
3050 static enum gimplify_status
3051 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3052 gimple_seq *seq_p)
3053 {
3054 tree t, to, to_ptr, from, from_ptr;
3055 gimple gs;
3056
3057 to = TREE_OPERAND (*expr_p, 0);
3058 from = TREE_OPERAND (*expr_p, 1);
3059
3060 from_ptr = build_fold_addr_expr (from);
3061 gimplify_arg (&from_ptr, seq_p, EXPR_LOCATION (*expr_p));
3062
3063 to_ptr = build_fold_addr_expr (to);
3064 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3065
3066 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3067
3068 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3069
3070 if (want_value)
3071 {
3072 /* tmp = memcpy() */
3073 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3074 gimple_call_set_lhs (gs, t);
3075 gimplify_seq_add_stmt (seq_p, gs);
3076
3077 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3078 return GS_ALL_DONE;
3079 }
3080
3081 gimplify_seq_add_stmt (seq_p, gs);
3082 *expr_p = NULL;
3083 return GS_ALL_DONE;
3084 }
3085
3086 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3087 a call to __builtin_memset. In this case we know that the RHS is
3088 a CONSTRUCTOR with an empty element list. */
3089
3090 static enum gimplify_status
3091 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3092 gimple_seq *seq_p)
3093 {
3094 tree t, from, to, to_ptr;
3095 gimple gs;
3096
3097 /* Assert our assumptions, to abort instead of producing wrong code
3098 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3099 not be immediately exposed. */
3100 from = TREE_OPERAND (*expr_p, 1);
3101 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3102 from = TREE_OPERAND (from, 0);
3103
3104 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3105 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3106
3107 /* Now proceed. */
3108 to = TREE_OPERAND (*expr_p, 0);
3109
3110 to_ptr = build_fold_addr_expr (to);
3111 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3112 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3113
3114 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3115
3116 if (want_value)
3117 {
3118 /* tmp = memset() */
3119 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3120 gimple_call_set_lhs (gs, t);
3121 gimplify_seq_add_stmt (seq_p, gs);
3122
3123 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3124 return GS_ALL_DONE;
3125 }
3126
3127 gimplify_seq_add_stmt (seq_p, gs);
3128 *expr_p = NULL;
3129 return GS_ALL_DONE;
3130 }
3131
3132 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3133 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3134 assignment. Returns non-null if we detect a potential overlap. */
3135
3136 struct gimplify_init_ctor_preeval_data
3137 {
3138 /* The base decl of the lhs object. May be NULL, in which case we
3139 have to assume the lhs is indirect. */
3140 tree lhs_base_decl;
3141
3142 /* The alias set of the lhs object. */
3143 alias_set_type lhs_alias_set;
3144 };
3145
3146 static tree
3147 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3148 {
3149 struct gimplify_init_ctor_preeval_data *data
3150 = (struct gimplify_init_ctor_preeval_data *) xdata;
3151 tree t = *tp;
3152
3153 /* If we find the base object, obviously we have overlap. */
3154 if (data->lhs_base_decl == t)
3155 return t;
3156
3157 /* If the constructor component is indirect, determine if we have a
3158 potential overlap with the lhs. The only bits of information we
3159 have to go on at this point are addressability and alias sets. */
3160 if (TREE_CODE (t) == INDIRECT_REF
3161 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3162 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3163 return t;
3164
3165 /* If the constructor component is a call, determine if it can hide a
3166 potential overlap with the lhs through an INDIRECT_REF like above. */
3167 if (TREE_CODE (t) == CALL_EXPR)
3168 {
3169 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3170
3171 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3172 if (POINTER_TYPE_P (TREE_VALUE (type))
3173 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3174 && alias_sets_conflict_p (data->lhs_alias_set,
3175 get_alias_set
3176 (TREE_TYPE (TREE_VALUE (type)))))
3177 return t;
3178 }
3179
3180 if (IS_TYPE_OR_DECL_P (t))
3181 *walk_subtrees = 0;
3182 return NULL;
3183 }
3184
3185 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3186 force values that overlap with the lhs (as described by *DATA)
3187 into temporaries. */
3188
3189 static void
3190 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3191 struct gimplify_init_ctor_preeval_data *data)
3192 {
3193 enum gimplify_status one;
3194
3195 /* If the value is constant, then there's nothing to pre-evaluate. */
3196 if (TREE_CONSTANT (*expr_p))
3197 {
3198 /* Ensure it does not have side effects, it might contain a reference to
3199 the object we're initializing. */
3200 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3201 return;
3202 }
3203
3204 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3205 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3206 return;
3207
3208 /* Recurse for nested constructors. */
3209 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3210 {
3211 unsigned HOST_WIDE_INT ix;
3212 constructor_elt *ce;
3213 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3214
3215 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3216 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3217
3218 return;
3219 }
3220
3221 /* If this is a variable sized type, we must remember the size. */
3222 maybe_with_size_expr (expr_p);
3223
3224 /* Gimplify the constructor element to something appropriate for the rhs
3225 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3226 the gimplifier will consider this a store to memory. Doing this
3227 gimplification now means that we won't have to deal with complicated
3228 language-specific trees, nor trees like SAVE_EXPR that can induce
3229 exponential search behavior. */
3230 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3231 if (one == GS_ERROR)
3232 {
3233 *expr_p = NULL;
3234 return;
3235 }
3236
3237 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3238 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3239 always be true for all scalars, since is_gimple_mem_rhs insists on a
3240 temporary variable for them. */
3241 if (DECL_P (*expr_p))
3242 return;
3243
3244 /* If this is of variable size, we have no choice but to assume it doesn't
3245 overlap since we can't make a temporary for it. */
3246 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3247 return;
3248
3249 /* Otherwise, we must search for overlap ... */
3250 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3251 return;
3252
3253 /* ... and if found, force the value into a temporary. */
3254 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3255 }
3256
3257 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3258 a RANGE_EXPR in a CONSTRUCTOR for an array.
3259
3260 var = lower;
3261 loop_entry:
3262 object[var] = value;
3263 if (var == upper)
3264 goto loop_exit;
3265 var = var + 1;
3266 goto loop_entry;
3267 loop_exit:
3268
3269 We increment var _after_ the loop exit check because we might otherwise
3270 fail if upper == TYPE_MAX_VALUE (type for upper).
3271
3272 Note that we never have to deal with SAVE_EXPRs here, because this has
3273 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3274
3275 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3276 gimple_seq *, bool);
3277
3278 static void
3279 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3280 tree value, tree array_elt_type,
3281 gimple_seq *pre_p, bool cleared)
3282 {
3283 tree loop_entry_label, loop_exit_label, fall_thru_label;
3284 tree var, var_type, cref, tmp;
3285
3286 loop_entry_label = create_artificial_label ();
3287 loop_exit_label = create_artificial_label ();
3288 fall_thru_label = create_artificial_label ();
3289
3290 /* Create and initialize the index variable. */
3291 var_type = TREE_TYPE (upper);
3292 var = create_tmp_var (var_type, NULL);
3293 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3294
3295 /* Add the loop entry label. */
3296 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3297
3298 /* Build the reference. */
3299 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3300 var, NULL_TREE, NULL_TREE);
3301
3302 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3303 the store. Otherwise just assign value to the reference. */
3304
3305 if (TREE_CODE (value) == CONSTRUCTOR)
3306 /* NB we might have to call ourself recursively through
3307 gimplify_init_ctor_eval if the value is a constructor. */
3308 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3309 pre_p, cleared);
3310 else
3311 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3312
3313 /* We exit the loop when the index var is equal to the upper bound. */
3314 gimplify_seq_add_stmt (pre_p,
3315 gimple_build_cond (EQ_EXPR, var, upper,
3316 loop_exit_label, fall_thru_label));
3317
3318 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3319
3320 /* Otherwise, increment the index var... */
3321 tmp = build2 (PLUS_EXPR, var_type, var,
3322 fold_convert (var_type, integer_one_node));
3323 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3324
3325 /* ...and jump back to the loop entry. */
3326 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3327
3328 /* Add the loop exit label. */
3329 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3330 }
3331
3332 /* Return true if FDECL is accessing a field that is zero sized. */
3333
3334 static bool
3335 zero_sized_field_decl (const_tree fdecl)
3336 {
3337 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3338 && integer_zerop (DECL_SIZE (fdecl)))
3339 return true;
3340 return false;
3341 }
3342
3343 /* Return true if TYPE is zero sized. */
3344
3345 static bool
3346 zero_sized_type (const_tree type)
3347 {
3348 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3349 && integer_zerop (TYPE_SIZE (type)))
3350 return true;
3351 return false;
3352 }
3353
3354 /* A subroutine of gimplify_init_constructor. Generate individual
3355 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3356 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3357 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3358 zeroed first. */
3359
3360 static void
3361 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3362 gimple_seq *pre_p, bool cleared)
3363 {
3364 tree array_elt_type = NULL;
3365 unsigned HOST_WIDE_INT ix;
3366 tree purpose, value;
3367
3368 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3369 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3370
3371 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3372 {
3373 tree cref;
3374
3375 /* NULL values are created above for gimplification errors. */
3376 if (value == NULL)
3377 continue;
3378
3379 if (cleared && initializer_zerop (value))
3380 continue;
3381
3382 /* ??? Here's to hoping the front end fills in all of the indices,
3383 so we don't have to figure out what's missing ourselves. */
3384 gcc_assert (purpose);
3385
3386 /* Skip zero-sized fields, unless value has side-effects. This can
3387 happen with calls to functions returning a zero-sized type, which
3388 we shouldn't discard. As a number of downstream passes don't
3389 expect sets of zero-sized fields, we rely on the gimplification of
3390 the MODIFY_EXPR we make below to drop the assignment statement. */
3391 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3392 continue;
3393
3394 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3395 whole range. */
3396 if (TREE_CODE (purpose) == RANGE_EXPR)
3397 {
3398 tree lower = TREE_OPERAND (purpose, 0);
3399 tree upper = TREE_OPERAND (purpose, 1);
3400
3401 /* If the lower bound is equal to upper, just treat it as if
3402 upper was the index. */
3403 if (simple_cst_equal (lower, upper))
3404 purpose = upper;
3405 else
3406 {
3407 gimplify_init_ctor_eval_range (object, lower, upper, value,
3408 array_elt_type, pre_p, cleared);
3409 continue;
3410 }
3411 }
3412
3413 if (array_elt_type)
3414 {
3415 /* Do not use bitsizetype for ARRAY_REF indices. */
3416 if (TYPE_DOMAIN (TREE_TYPE (object)))
3417 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3418 purpose);
3419 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3420 purpose, NULL_TREE, NULL_TREE);
3421 }
3422 else
3423 {
3424 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3425 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3426 unshare_expr (object), purpose, NULL_TREE);
3427 }
3428
3429 if (TREE_CODE (value) == CONSTRUCTOR
3430 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3431 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3432 pre_p, cleared);
3433 else
3434 {
3435 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3436 gimplify_and_add (init, pre_p);
3437 ggc_free (init);
3438 }
3439 }
3440 }
3441
3442
3443 /* Returns the appropriate RHS predicate for this LHS. */
3444
3445 gimple_predicate
3446 rhs_predicate_for (tree lhs)
3447 {
3448 if (is_gimple_formal_tmp_var (lhs))
3449 return is_gimple_formal_tmp_or_call_rhs;
3450 else if (is_gimple_reg (lhs))
3451 return is_gimple_reg_or_call_rhs;
3452 else
3453 return is_gimple_mem_or_call_rhs;
3454 }
3455
3456
3457 /* A subroutine of gimplify_modify_expr. Break out elements of a
3458 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3459
3460 Note that we still need to clear any elements that don't have explicit
3461 initializers, so if not all elements are initialized we keep the
3462 original MODIFY_EXPR, we just remove all of the constructor elements.
3463
3464 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3465 GS_ERROR if we would have to create a temporary when gimplifying
3466 this constructor. Otherwise, return GS_OK.
3467
3468 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3469
3470 static enum gimplify_status
3471 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3472 bool want_value, bool notify_temp_creation)
3473 {
3474 tree object;
3475 tree ctor = TREE_OPERAND (*expr_p, 1);
3476 tree type = TREE_TYPE (ctor);
3477 enum gimplify_status ret;
3478 VEC(constructor_elt,gc) *elts;
3479
3480 if (TREE_CODE (ctor) != CONSTRUCTOR)
3481 return GS_UNHANDLED;
3482
3483 if (!notify_temp_creation)
3484 {
3485 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3486 is_gimple_lvalue, fb_lvalue);
3487 if (ret == GS_ERROR)
3488 return ret;
3489 }
3490
3491 object = TREE_OPERAND (*expr_p, 0);
3492 elts = CONSTRUCTOR_ELTS (ctor);
3493 ret = GS_ALL_DONE;
3494
3495 switch (TREE_CODE (type))
3496 {
3497 case RECORD_TYPE:
3498 case UNION_TYPE:
3499 case QUAL_UNION_TYPE:
3500 case ARRAY_TYPE:
3501 {
3502 struct gimplify_init_ctor_preeval_data preeval_data;
3503 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3504 HOST_WIDE_INT num_nonzero_elements;
3505 bool cleared, valid_const_initializer;
3506
3507 /* Aggregate types must lower constructors to initialization of
3508 individual elements. The exception is that a CONSTRUCTOR node
3509 with no elements indicates zero-initialization of the whole. */
3510 if (VEC_empty (constructor_elt, elts))
3511 {
3512 if (notify_temp_creation)
3513 return GS_OK;
3514 break;
3515 }
3516
3517 /* Fetch information about the constructor to direct later processing.
3518 We might want to make static versions of it in various cases, and
3519 can only do so if it known to be a valid constant initializer. */
3520 valid_const_initializer
3521 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3522 &num_ctor_elements, &cleared);
3523
3524 /* If a const aggregate variable is being initialized, then it
3525 should never be a lose to promote the variable to be static. */
3526 if (valid_const_initializer
3527 && num_nonzero_elements > 1
3528 && TREE_READONLY (object)
3529 && TREE_CODE (object) == VAR_DECL)
3530 {
3531 if (notify_temp_creation)
3532 return GS_ERROR;
3533 DECL_INITIAL (object) = ctor;
3534 TREE_STATIC (object) = 1;
3535 if (!DECL_NAME (object))
3536 DECL_NAME (object) = create_tmp_var_name ("C");
3537 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3538
3539 /* ??? C++ doesn't automatically append a .<number> to the
3540 assembler name, and even when it does, it looks a FE private
3541 data structures to figure out what that number should be,
3542 which are not set for this variable. I suppose this is
3543 important for local statics for inline functions, which aren't
3544 "local" in the object file sense. So in order to get a unique
3545 TU-local symbol, we must invoke the lhd version now. */
3546 lhd_set_decl_assembler_name (object);
3547
3548 *expr_p = NULL_TREE;
3549 break;
3550 }
3551
3552 /* If there are "lots" of initialized elements, even discounting
3553 those that are not address constants (and thus *must* be
3554 computed at runtime), then partition the constructor into
3555 constant and non-constant parts. Block copy the constant
3556 parts in, then generate code for the non-constant parts. */
3557 /* TODO. There's code in cp/typeck.c to do this. */
3558
3559 num_type_elements = count_type_elements (type, true);
3560
3561 /* If count_type_elements could not determine number of type elements
3562 for a constant-sized object, assume clearing is needed.
3563 Don't do this for variable-sized objects, as store_constructor
3564 will ignore the clearing of variable-sized objects. */
3565 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3566 cleared = true;
3567 /* If there are "lots" of zeros, then block clear the object first. */
3568 else if (num_type_elements - num_nonzero_elements
3569 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3570 && num_nonzero_elements < num_type_elements/4)
3571 cleared = true;
3572 /* ??? This bit ought not be needed. For any element not present
3573 in the initializer, we should simply set them to zero. Except
3574 we'd need to *find* the elements that are not present, and that
3575 requires trickery to avoid quadratic compile-time behavior in
3576 large cases or excessive memory use in small cases. */
3577 else if (num_ctor_elements < num_type_elements)
3578 cleared = true;
3579
3580 /* If there are "lots" of initialized elements, and all of them
3581 are valid address constants, then the entire initializer can
3582 be dropped to memory, and then memcpy'd out. Don't do this
3583 for sparse arrays, though, as it's more efficient to follow
3584 the standard CONSTRUCTOR behavior of memset followed by
3585 individual element initialization. */
3586 if (valid_const_initializer && !cleared)
3587 {
3588 HOST_WIDE_INT size = int_size_in_bytes (type);
3589 unsigned int align;
3590
3591 /* ??? We can still get unbounded array types, at least
3592 from the C++ front end. This seems wrong, but attempt
3593 to work around it for now. */
3594 if (size < 0)
3595 {
3596 size = int_size_in_bytes (TREE_TYPE (object));
3597 if (size >= 0)
3598 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3599 }
3600
3601 /* Find the maximum alignment we can assume for the object. */
3602 /* ??? Make use of DECL_OFFSET_ALIGN. */
3603 if (DECL_P (object))
3604 align = DECL_ALIGN (object);
3605 else
3606 align = TYPE_ALIGN (type);
3607
3608 if (size > 0 && !can_move_by_pieces (size, align))
3609 {
3610 tree new_tree;
3611
3612 if (notify_temp_creation)
3613 return GS_ERROR;
3614
3615 new_tree = create_tmp_var_raw (type, "C");
3616
3617 gimple_add_tmp_var (new_tree);
3618 TREE_STATIC (new_tree) = 1;
3619 TREE_READONLY (new_tree) = 1;
3620 DECL_INITIAL (new_tree) = ctor;
3621 if (align > DECL_ALIGN (new_tree))
3622 {
3623 DECL_ALIGN (new_tree) = align;
3624 DECL_USER_ALIGN (new_tree) = 1;
3625 }
3626 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3627
3628 TREE_OPERAND (*expr_p, 1) = new_tree;
3629
3630 /* This is no longer an assignment of a CONSTRUCTOR, but
3631 we still may have processing to do on the LHS. So
3632 pretend we didn't do anything here to let that happen. */
3633 return GS_UNHANDLED;
3634 }
3635 }
3636
3637 if (notify_temp_creation)
3638 return GS_OK;
3639
3640 /* If there are nonzero elements, pre-evaluate to capture elements
3641 overlapping with the lhs into temporaries. We must do this before
3642 clearing to fetch the values before they are zeroed-out. */
3643 if (num_nonzero_elements > 0)
3644 {
3645 preeval_data.lhs_base_decl = get_base_address (object);
3646 if (!DECL_P (preeval_data.lhs_base_decl))
3647 preeval_data.lhs_base_decl = NULL;
3648 preeval_data.lhs_alias_set = get_alias_set (object);
3649
3650 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3651 pre_p, post_p, &preeval_data);
3652 }
3653
3654 if (cleared)
3655 {
3656 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3657 Note that we still have to gimplify, in order to handle the
3658 case of variable sized types. Avoid shared tree structures. */
3659 CONSTRUCTOR_ELTS (ctor) = NULL;
3660 TREE_SIDE_EFFECTS (ctor) = 0;
3661 object = unshare_expr (object);
3662 gimplify_stmt (expr_p, pre_p);
3663 }
3664
3665 /* If we have not block cleared the object, or if there are nonzero
3666 elements in the constructor, add assignments to the individual
3667 scalar fields of the object. */
3668 if (!cleared || num_nonzero_elements > 0)
3669 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3670
3671 *expr_p = NULL_TREE;
3672 }
3673 break;
3674
3675 case COMPLEX_TYPE:
3676 {
3677 tree r, i;
3678
3679 if (notify_temp_creation)
3680 return GS_OK;
3681
3682 /* Extract the real and imaginary parts out of the ctor. */
3683 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3684 r = VEC_index (constructor_elt, elts, 0)->value;
3685 i = VEC_index (constructor_elt, elts, 1)->value;
3686 if (r == NULL || i == NULL)
3687 {
3688 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3689 if (r == NULL)
3690 r = zero;
3691 if (i == NULL)
3692 i = zero;
3693 }
3694
3695 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3696 represent creation of a complex value. */
3697 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3698 {
3699 ctor = build_complex (type, r, i);
3700 TREE_OPERAND (*expr_p, 1) = ctor;
3701 }
3702 else
3703 {
3704 ctor = build2 (COMPLEX_EXPR, type, r, i);
3705 TREE_OPERAND (*expr_p, 1) = ctor;
3706 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3707 pre_p,
3708 post_p,
3709 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3710 fb_rvalue);
3711 }
3712 }
3713 break;
3714
3715 case VECTOR_TYPE:
3716 {
3717 unsigned HOST_WIDE_INT ix;
3718 constructor_elt *ce;
3719
3720 if (notify_temp_creation)
3721 return GS_OK;
3722
3723 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3724 if (TREE_CONSTANT (ctor))
3725 {
3726 bool constant_p = true;
3727 tree value;
3728
3729 /* Even when ctor is constant, it might contain non-*_CST
3730 elements, such as addresses or trapping values like
3731 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3732 in VECTOR_CST nodes. */
3733 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3734 if (!CONSTANT_CLASS_P (value))
3735 {
3736 constant_p = false;
3737 break;
3738 }
3739
3740 if (constant_p)
3741 {
3742 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3743 break;
3744 }
3745
3746 /* Don't reduce an initializer constant even if we can't
3747 make a VECTOR_CST. It won't do anything for us, and it'll
3748 prevent us from representing it as a single constant. */
3749 if (initializer_constant_valid_p (ctor, type))
3750 break;
3751
3752 TREE_CONSTANT (ctor) = 0;
3753 }
3754
3755 /* Vector types use CONSTRUCTOR all the way through gimple
3756 compilation as a general initializer. */
3757 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3758 {
3759 enum gimplify_status tret;
3760 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3761 fb_rvalue);
3762 if (tret == GS_ERROR)
3763 ret = GS_ERROR;
3764 }
3765 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3766 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3767 }
3768 break;
3769
3770 default:
3771 /* So how did we get a CONSTRUCTOR for a scalar type? */
3772 gcc_unreachable ();
3773 }
3774
3775 if (ret == GS_ERROR)
3776 return GS_ERROR;
3777 else if (want_value)
3778 {
3779 *expr_p = object;
3780 return GS_OK;
3781 }
3782 else
3783 {
3784 /* If we have gimplified both sides of the initializer but have
3785 not emitted an assignment, do so now. */
3786 if (*expr_p)
3787 {
3788 tree lhs = TREE_OPERAND (*expr_p, 0);
3789 tree rhs = TREE_OPERAND (*expr_p, 1);
3790 gimple init = gimple_build_assign (lhs, rhs);
3791 gimplify_seq_add_stmt (pre_p, init);
3792 *expr_p = NULL;
3793 }
3794
3795 return GS_ALL_DONE;
3796 }
3797 }
3798
3799 /* Given a pointer value OP0, return a simplified version of an
3800 indirection through OP0, or NULL_TREE if no simplification is
3801 possible. Note that the resulting type may be different from
3802 the type pointed to in the sense that it is still compatible
3803 from the langhooks point of view. */
3804
3805 tree
3806 gimple_fold_indirect_ref (tree t)
3807 {
3808 tree type = TREE_TYPE (TREE_TYPE (t));
3809 tree sub = t;
3810 tree subtype;
3811
3812 STRIP_USELESS_TYPE_CONVERSION (sub);
3813 subtype = TREE_TYPE (sub);
3814 if (!POINTER_TYPE_P (subtype))
3815 return NULL_TREE;
3816
3817 if (TREE_CODE (sub) == ADDR_EXPR)
3818 {
3819 tree op = TREE_OPERAND (sub, 0);
3820 tree optype = TREE_TYPE (op);
3821 /* *&p => p */
3822 if (useless_type_conversion_p (type, optype))
3823 return op;
3824
3825 /* *(foo *)&fooarray => fooarray[0] */
3826 if (TREE_CODE (optype) == ARRAY_TYPE
3827 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3828 {
3829 tree type_domain = TYPE_DOMAIN (optype);
3830 tree min_val = size_zero_node;
3831 if (type_domain && TYPE_MIN_VALUE (type_domain))
3832 min_val = TYPE_MIN_VALUE (type_domain);
3833 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3834 }
3835 }
3836
3837 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3838 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3839 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3840 {
3841 tree type_domain;
3842 tree min_val = size_zero_node;
3843 tree osub = sub;
3844 sub = gimple_fold_indirect_ref (sub);
3845 if (! sub)
3846 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3847 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3848 if (type_domain && TYPE_MIN_VALUE (type_domain))
3849 min_val = TYPE_MIN_VALUE (type_domain);
3850 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3851 }
3852
3853 return NULL_TREE;
3854 }
3855
3856 /* Given a pointer value OP0, return a simplified version of an
3857 indirection through OP0, or NULL_TREE if no simplification is
3858 possible. This may only be applied to a rhs of an expression.
3859 Note that the resulting type may be different from the type pointed
3860 to in the sense that it is still compatible from the langhooks
3861 point of view. */
3862
3863 static tree
3864 gimple_fold_indirect_ref_rhs (tree t)
3865 {
3866 return gimple_fold_indirect_ref (t);
3867 }
3868
3869 /* Subroutine of gimplify_modify_expr to do simplifications of
3870 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3871 something changes. */
3872
3873 static enum gimplify_status
3874 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3875 gimple_seq *pre_p, gimple_seq *post_p,
3876 bool want_value)
3877 {
3878 enum gimplify_status ret = GS_OK;
3879
3880 while (ret != GS_UNHANDLED)
3881 switch (TREE_CODE (*from_p))
3882 {
3883 case VAR_DECL:
3884 /* If we're assigning from a constant constructor, move the
3885 constructor expression to the RHS of the MODIFY_EXPR. */
3886 if (DECL_INITIAL (*from_p)
3887 && TREE_READONLY (*from_p)
3888 && !TREE_THIS_VOLATILE (*from_p)
3889 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
3890 {
3891 tree old_from = *from_p;
3892
3893 /* Move the constructor into the RHS. */
3894 *from_p = unshare_expr (DECL_INITIAL (*from_p));
3895
3896 /* Let's see if gimplify_init_constructor will need to put
3897 it in memory. If so, revert the change. */
3898 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
3899 if (ret == GS_ERROR)
3900 {
3901 *from_p = old_from;
3902 /* Fall through. */
3903 }
3904 else
3905 {
3906 ret = GS_OK;
3907 break;
3908 }
3909 }
3910 ret = GS_UNHANDLED;
3911 break;
3912 case INDIRECT_REF:
3913 {
3914 /* If we have code like
3915
3916 *(const A*)(A*)&x
3917
3918 where the type of "x" is a (possibly cv-qualified variant
3919 of "A"), treat the entire expression as identical to "x".
3920 This kind of code arises in C++ when an object is bound
3921 to a const reference, and if "x" is a TARGET_EXPR we want
3922 to take advantage of the optimization below. */
3923 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3924 if (t)
3925 {
3926 *from_p = t;
3927 ret = GS_OK;
3928 }
3929 else
3930 ret = GS_UNHANDLED;
3931 break;
3932 }
3933
3934 case TARGET_EXPR:
3935 {
3936 /* If we are initializing something from a TARGET_EXPR, strip the
3937 TARGET_EXPR and initialize it directly, if possible. This can't
3938 be done if the initializer is void, since that implies that the
3939 temporary is set in some non-trivial way.
3940
3941 ??? What about code that pulls out the temp and uses it
3942 elsewhere? I think that such code never uses the TARGET_EXPR as
3943 an initializer. If I'm wrong, we'll die because the temp won't
3944 have any RTL. In that case, I guess we'll need to replace
3945 references somehow. */
3946 tree init = TARGET_EXPR_INITIAL (*from_p);
3947
3948 if (init
3949 && !VOID_TYPE_P (TREE_TYPE (init)))
3950 {
3951 *from_p = init;
3952 ret = GS_OK;
3953 }
3954 else
3955 ret = GS_UNHANDLED;
3956 }
3957 break;
3958
3959 case COMPOUND_EXPR:
3960 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3961 caught. */
3962 gimplify_compound_expr (from_p, pre_p, true);
3963 ret = GS_OK;
3964 break;
3965
3966 case CONSTRUCTOR:
3967 /* If we're initializing from a CONSTRUCTOR, break this into
3968 individual MODIFY_EXPRs. */
3969 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
3970 false);
3971
3972 case COND_EXPR:
3973 /* If we're assigning to a non-register type, push the assignment
3974 down into the branches. This is mandatory for ADDRESSABLE types,
3975 since we cannot generate temporaries for such, but it saves a
3976 copy in other cases as well. */
3977 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3978 {
3979 /* This code should mirror the code in gimplify_cond_expr. */
3980 enum tree_code code = TREE_CODE (*expr_p);
3981 tree cond = *from_p;
3982 tree result = *to_p;
3983
3984 ret = gimplify_expr (&result, pre_p, post_p,
3985 is_gimple_lvalue, fb_lvalue);
3986 if (ret != GS_ERROR)
3987 ret = GS_OK;
3988
3989 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3990 TREE_OPERAND (cond, 1)
3991 = build2 (code, void_type_node, result,
3992 TREE_OPERAND (cond, 1));
3993 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3994 TREE_OPERAND (cond, 2)
3995 = build2 (code, void_type_node, unshare_expr (result),
3996 TREE_OPERAND (cond, 2));
3997
3998 TREE_TYPE (cond) = void_type_node;
3999 recalculate_side_effects (cond);
4000
4001 if (want_value)
4002 {
4003 gimplify_and_add (cond, pre_p);
4004 *expr_p = unshare_expr (result);
4005 }
4006 else
4007 *expr_p = cond;
4008 return ret;
4009 }
4010 else
4011 ret = GS_UNHANDLED;
4012 break;
4013
4014 case CALL_EXPR:
4015 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4016 return slot so that we don't generate a temporary. */
4017 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4018 && aggregate_value_p (*from_p, *from_p))
4019 {
4020 bool use_target;
4021
4022 if (!(rhs_predicate_for (*to_p))(*from_p))
4023 /* If we need a temporary, *to_p isn't accurate. */
4024 use_target = false;
4025 else if (TREE_CODE (*to_p) == RESULT_DECL
4026 && DECL_NAME (*to_p) == NULL_TREE
4027 && needs_to_live_in_memory (*to_p))
4028 /* It's OK to use the return slot directly unless it's an NRV. */
4029 use_target = true;
4030 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4031 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4032 /* Don't force regs into memory. */
4033 use_target = false;
4034 else if (TREE_CODE (*to_p) == VAR_DECL
4035 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
4036 /* Don't use the original target if it's a formal temp; we
4037 don't want to take their addresses. */
4038 use_target = false;
4039 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4040 /* It's OK to use the target directly if it's being
4041 initialized. */
4042 use_target = true;
4043 else if (!is_gimple_non_addressable (*to_p))
4044 /* Don't use the original target if it's already addressable;
4045 if its address escapes, and the called function uses the
4046 NRV optimization, a conforming program could see *to_p
4047 change before the called function returns; see c++/19317.
4048 When optimizing, the return_slot pass marks more functions
4049 as safe after we have escape info. */
4050 use_target = false;
4051 else
4052 use_target = true;
4053
4054 if (use_target)
4055 {
4056 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4057 mark_addressable (*to_p);
4058 }
4059 }
4060
4061 ret = GS_UNHANDLED;
4062 break;
4063
4064 /* If we're initializing from a container, push the initialization
4065 inside it. */
4066 case CLEANUP_POINT_EXPR:
4067 case BIND_EXPR:
4068 case STATEMENT_LIST:
4069 {
4070 tree wrap = *from_p;
4071 tree t;
4072
4073 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4074 fb_lvalue);
4075 if (ret != GS_ERROR)
4076 ret = GS_OK;
4077
4078 t = voidify_wrapper_expr (wrap, *expr_p);
4079 gcc_assert (t == *expr_p);
4080
4081 if (want_value)
4082 {
4083 gimplify_and_add (wrap, pre_p);
4084 *expr_p = unshare_expr (*to_p);
4085 }
4086 else
4087 *expr_p = wrap;
4088 return GS_OK;
4089 }
4090
4091 default:
4092 ret = GS_UNHANDLED;
4093 break;
4094 }
4095
4096 return ret;
4097 }
4098
4099
4100 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4101 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4102 DECL_GIMPLE_REG_P set.
4103
4104 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4105 other, unmodified part of the complex object just before the total store.
4106 As a consequence, if the object is still uninitialized, an undefined value
4107 will be loaded into a register, which may result in a spurious exception
4108 if the register is floating-point and the value happens to be a signaling
4109 NaN for example. Then the fully-fledged complex operations lowering pass
4110 followed by a DCE pass are necessary in order to fix things up. */
4111
4112 static enum gimplify_status
4113 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4114 bool want_value)
4115 {
4116 enum tree_code code, ocode;
4117 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4118
4119 lhs = TREE_OPERAND (*expr_p, 0);
4120 rhs = TREE_OPERAND (*expr_p, 1);
4121 code = TREE_CODE (lhs);
4122 lhs = TREE_OPERAND (lhs, 0);
4123
4124 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4125 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4126 other = get_formal_tmp_var (other, pre_p);
4127
4128 realpart = code == REALPART_EXPR ? rhs : other;
4129 imagpart = code == REALPART_EXPR ? other : rhs;
4130
4131 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4132 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4133 else
4134 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4135
4136 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4137 *expr_p = (want_value) ? rhs : NULL_TREE;
4138
4139 return GS_ALL_DONE;
4140 }
4141
4142
4143 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4144
4145 modify_expr
4146 : varname '=' rhs
4147 | '*' ID '=' rhs
4148
4149 PRE_P points to the list where side effects that must happen before
4150 *EXPR_P should be stored.
4151
4152 POST_P points to the list where side effects that must happen after
4153 *EXPR_P should be stored.
4154
4155 WANT_VALUE is nonzero iff we want to use the value of this expression
4156 in another expression. */
4157
4158 static enum gimplify_status
4159 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4160 bool want_value)
4161 {
4162 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4163 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4164 enum gimplify_status ret = GS_UNHANDLED;
4165 gimple assign;
4166
4167 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4168 || TREE_CODE (*expr_p) == INIT_EXPR);
4169
4170 /* Insert pointer conversions required by the middle-end that are not
4171 required by the frontend. This fixes middle-end type checking for
4172 for example gcc.dg/redecl-6.c. */
4173 if (POINTER_TYPE_P (TREE_TYPE (*to_p))
4174 && lang_hooks.types_compatible_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4175 {
4176 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4177 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4178 *from_p = fold_convert (TREE_TYPE (*to_p), *from_p);
4179 }
4180
4181 /* See if any simplifications can be done based on what the RHS is. */
4182 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4183 want_value);
4184 if (ret != GS_UNHANDLED)
4185 return ret;
4186
4187 /* For zero sized types only gimplify the left hand side and right hand
4188 side as statements and throw away the assignment. Do this after
4189 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4190 types properly. */
4191 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4192 {
4193 gimplify_stmt (from_p, pre_p);
4194 gimplify_stmt (to_p, pre_p);
4195 *expr_p = NULL_TREE;
4196 return GS_ALL_DONE;
4197 }
4198
4199 /* If the value being copied is of variable width, compute the length
4200 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4201 before gimplifying any of the operands so that we can resolve any
4202 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4203 the size of the expression to be copied, not of the destination, so
4204 that is what we must do here. */
4205 maybe_with_size_expr (from_p);
4206
4207 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4208 if (ret == GS_ERROR)
4209 return ret;
4210
4211 /* As a special case, we have to temporarily allow for assignments
4212 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4213 a toplevel statement, when gimplifying the GENERIC expression
4214 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4215 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4216
4217 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4218 prevent gimplify_expr from trying to create a new temporary for
4219 foo's LHS, we tell it that it should only gimplify until it
4220 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4221 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4222 and all we need to do here is set 'a' to be its LHS. */
4223 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4224 fb_rvalue);
4225 if (ret == GS_ERROR)
4226 return ret;
4227
4228 /* Now see if the above changed *from_p to something we handle specially. */
4229 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4230 want_value);
4231 if (ret != GS_UNHANDLED)
4232 return ret;
4233
4234 /* If we've got a variable sized assignment between two lvalues (i.e. does
4235 not involve a call), then we can make things a bit more straightforward
4236 by converting the assignment to memcpy or memset. */
4237 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4238 {
4239 tree from = TREE_OPERAND (*from_p, 0);
4240 tree size = TREE_OPERAND (*from_p, 1);
4241
4242 if (TREE_CODE (from) == CONSTRUCTOR)
4243 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4244
4245 if (is_gimple_addressable (from))
4246 {
4247 *from_p = from;
4248 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4249 pre_p);
4250 }
4251 }
4252
4253 /* Transform partial stores to non-addressable complex variables into
4254 total stores. This allows us to use real instead of virtual operands
4255 for these variables, which improves optimization. */
4256 if ((TREE_CODE (*to_p) == REALPART_EXPR
4257 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4258 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4259 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4260
4261 /* Try to alleviate the effects of the gimplification creating artificial
4262 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4263 if (!gimplify_ctxp->into_ssa
4264 && DECL_P (*from_p)
4265 && DECL_IGNORED_P (*from_p)
4266 && DECL_P (*to_p)
4267 && !DECL_IGNORED_P (*to_p))
4268 {
4269 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4270 DECL_NAME (*from_p)
4271 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4272 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4273 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4274 }
4275
4276 if (TREE_CODE (*from_p) == CALL_EXPR)
4277 {
4278 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4279 instead of a GIMPLE_ASSIGN. */
4280 assign = gimple_build_call_from_tree (*from_p);
4281 gimple_call_set_lhs (assign, *to_p);
4282 }
4283 else
4284 assign = gimple_build_assign (*to_p, *from_p);
4285
4286 gimplify_seq_add_stmt (pre_p, assign);
4287
4288 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4289 {
4290 /* If we've somehow already got an SSA_NAME on the LHS, then
4291 we've probably modified it twice. Not good. */
4292 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4293 *to_p = make_ssa_name (*to_p, assign);
4294 gimple_set_lhs (assign, *to_p);
4295 }
4296
4297 if (want_value)
4298 {
4299 *expr_p = unshare_expr (*to_p);
4300 return GS_OK;
4301 }
4302 else
4303 *expr_p = NULL;
4304
4305 return GS_ALL_DONE;
4306 }
4307
4308 /* Gimplify a comparison between two variable-sized objects. Do this
4309 with a call to BUILT_IN_MEMCMP. */
4310
4311 static enum gimplify_status
4312 gimplify_variable_sized_compare (tree *expr_p)
4313 {
4314 tree op0 = TREE_OPERAND (*expr_p, 0);
4315 tree op1 = TREE_OPERAND (*expr_p, 1);
4316 tree t, arg, dest, src;
4317
4318 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4319 arg = unshare_expr (arg);
4320 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4321 src = build_fold_addr_expr (op1);
4322 dest = build_fold_addr_expr (op0);
4323 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4324 t = build_call_expr (t, 3, dest, src, arg);
4325 *expr_p
4326 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4327
4328 return GS_OK;
4329 }
4330
4331 /* Gimplify a comparison between two aggregate objects of integral scalar
4332 mode as a comparison between the bitwise equivalent scalar values. */
4333
4334 static enum gimplify_status
4335 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4336 {
4337 tree op0 = TREE_OPERAND (*expr_p, 0);
4338 tree op1 = TREE_OPERAND (*expr_p, 1);
4339
4340 tree type = TREE_TYPE (op0);
4341 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4342
4343 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
4344 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
4345
4346 *expr_p
4347 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4348
4349 return GS_OK;
4350 }
4351
4352 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4353 points to the expression to gimplify.
4354
4355 Expressions of the form 'a && b' are gimplified to:
4356
4357 a && b ? true : false
4358
4359 gimplify_cond_expr will do the rest.
4360
4361 PRE_P points to the list where side effects that must happen before
4362 *EXPR_P should be stored. */
4363
4364 static enum gimplify_status
4365 gimplify_boolean_expr (tree *expr_p)
4366 {
4367 /* Preserve the original type of the expression. */
4368 tree type = TREE_TYPE (*expr_p);
4369
4370 *expr_p = build3 (COND_EXPR, type, *expr_p,
4371 fold_convert (type, boolean_true_node),
4372 fold_convert (type, boolean_false_node));
4373
4374 return GS_OK;
4375 }
4376
4377 /* Gimplifies an expression sequence. This function gimplifies each
4378 expression and re-writes the original expression with the last
4379 expression of the sequence in GIMPLE form.
4380
4381 PRE_P points to the list where the side effects for all the
4382 expressions in the sequence will be emitted.
4383
4384 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4385
4386 static enum gimplify_status
4387 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4388 {
4389 tree t = *expr_p;
4390
4391 do
4392 {
4393 tree *sub_p = &TREE_OPERAND (t, 0);
4394
4395 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4396 gimplify_compound_expr (sub_p, pre_p, false);
4397 else
4398 gimplify_stmt (sub_p, pre_p);
4399
4400 t = TREE_OPERAND (t, 1);
4401 }
4402 while (TREE_CODE (t) == COMPOUND_EXPR);
4403
4404 *expr_p = t;
4405 if (want_value)
4406 return GS_OK;
4407 else
4408 {
4409 gimplify_stmt (expr_p, pre_p);
4410 return GS_ALL_DONE;
4411 }
4412 }
4413
4414
4415 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4416 gimplify. After gimplification, EXPR_P will point to a new temporary
4417 that holds the original value of the SAVE_EXPR node.
4418
4419 PRE_P points to the list where side effects that must happen before
4420 *EXPR_P should be stored. */
4421
4422 static enum gimplify_status
4423 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4424 {
4425 enum gimplify_status ret = GS_ALL_DONE;
4426 tree val;
4427
4428 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4429 val = TREE_OPERAND (*expr_p, 0);
4430
4431 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4432 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4433 {
4434 /* The operand may be a void-valued expression such as SAVE_EXPRs
4435 generated by the Java frontend for class initialization. It is
4436 being executed only for its side-effects. */
4437 if (TREE_TYPE (val) == void_type_node)
4438 {
4439 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4440 is_gimple_stmt, fb_none);
4441 val = NULL;
4442 }
4443 else
4444 val = get_initialized_tmp_var (val, pre_p, post_p);
4445
4446 TREE_OPERAND (*expr_p, 0) = val;
4447 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4448 }
4449
4450 *expr_p = val;
4451
4452 return ret;
4453 }
4454
4455 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4456
4457 unary_expr
4458 : ...
4459 | '&' varname
4460 ...
4461
4462 PRE_P points to the list where side effects that must happen before
4463 *EXPR_P should be stored.
4464
4465 POST_P points to the list where side effects that must happen after
4466 *EXPR_P should be stored. */
4467
4468 static enum gimplify_status
4469 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4470 {
4471 tree expr = *expr_p;
4472 tree op0 = TREE_OPERAND (expr, 0);
4473 enum gimplify_status ret;
4474
4475 switch (TREE_CODE (op0))
4476 {
4477 case INDIRECT_REF:
4478 case MISALIGNED_INDIRECT_REF:
4479 do_indirect_ref:
4480 /* Check if we are dealing with an expression of the form '&*ptr'.
4481 While the front end folds away '&*ptr' into 'ptr', these
4482 expressions may be generated internally by the compiler (e.g.,
4483 builtins like __builtin_va_end). */
4484 /* Caution: the silent array decomposition semantics we allow for
4485 ADDR_EXPR means we can't always discard the pair. */
4486 /* Gimplification of the ADDR_EXPR operand may drop
4487 cv-qualification conversions, so make sure we add them if
4488 needed. */
4489 {
4490 tree op00 = TREE_OPERAND (op0, 0);
4491 tree t_expr = TREE_TYPE (expr);
4492 tree t_op00 = TREE_TYPE (op00);
4493
4494 if (!useless_type_conversion_p (t_expr, t_op00))
4495 op00 = fold_convert (TREE_TYPE (expr), op00);
4496 *expr_p = op00;
4497 ret = GS_OK;
4498 }
4499 break;
4500
4501 case VIEW_CONVERT_EXPR:
4502 /* Take the address of our operand and then convert it to the type of
4503 this ADDR_EXPR.
4504
4505 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4506 all clear. The impact of this transformation is even less clear. */
4507
4508 /* If the operand is a useless conversion, look through it. Doing so
4509 guarantees that the ADDR_EXPR and its operand will remain of the
4510 same type. */
4511 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4512 op0 = TREE_OPERAND (op0, 0);
4513
4514 *expr_p = fold_convert (TREE_TYPE (expr),
4515 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
4516 ret = GS_OK;
4517 break;
4518
4519 default:
4520 /* We use fb_either here because the C frontend sometimes takes
4521 the address of a call that returns a struct; see
4522 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4523 the implied temporary explicit. */
4524
4525 /* Mark the RHS addressable. */
4526 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4527 is_gimple_addressable, fb_either);
4528 if (ret == GS_ERROR)
4529 break;
4530
4531 /* We cannot rely on making the RHS addressable if it is
4532 a temporary created by gimplification. In this case create a
4533 new temporary that is initialized by a copy (which will
4534 become a store after we mark it addressable).
4535 This mostly happens if the frontend passed us something that
4536 it could not mark addressable yet, like a fortran
4537 pass-by-reference parameter (int) floatvar. */
4538 if (is_gimple_formal_tmp_var (TREE_OPERAND (expr, 0)))
4539 TREE_OPERAND (expr, 0)
4540 = get_initialized_tmp_var (TREE_OPERAND (expr, 0), pre_p, post_p);
4541
4542 op0 = TREE_OPERAND (expr, 0);
4543
4544 /* For various reasons, the gimplification of the expression
4545 may have made a new INDIRECT_REF. */
4546 if (TREE_CODE (op0) == INDIRECT_REF)
4547 goto do_indirect_ref;
4548
4549 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4550 recompute_tree_invariant_for_addr_expr (expr);
4551
4552 mark_addressable (TREE_OPERAND (expr, 0));
4553 break;
4554 }
4555
4556 return ret;
4557 }
4558
4559 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4560 value; output operands should be a gimple lvalue. */
4561
4562 static enum gimplify_status
4563 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4564 {
4565 tree expr;
4566 int noutputs;
4567 const char **oconstraints;
4568 int i;
4569 tree link;
4570 const char *constraint;
4571 bool allows_mem, allows_reg, is_inout;
4572 enum gimplify_status ret, tret;
4573 gimple stmt;
4574 VEC(tree, gc) *inputs;
4575 VEC(tree, gc) *outputs;
4576 VEC(tree, gc) *clobbers;
4577 tree link_next;
4578
4579 expr = *expr_p;
4580 noutputs = list_length (ASM_OUTPUTS (expr));
4581 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4582
4583 inputs = outputs = clobbers = NULL;
4584
4585 ret = GS_ALL_DONE;
4586 link_next = NULL_TREE;
4587 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4588 {
4589 bool ok;
4590 size_t constraint_len;
4591
4592 link_next = TREE_CHAIN (link);
4593
4594 oconstraints[i]
4595 = constraint
4596 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4597 constraint_len = strlen (constraint);
4598 if (constraint_len == 0)
4599 continue;
4600
4601 ok = parse_output_constraint (&constraint, i, 0, 0,
4602 &allows_mem, &allows_reg, &is_inout);
4603 if (!ok)
4604 {
4605 ret = GS_ERROR;
4606 is_inout = false;
4607 }
4608
4609 if (!allows_reg && allows_mem)
4610 mark_addressable (TREE_VALUE (link));
4611
4612 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4613 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4614 fb_lvalue | fb_mayfail);
4615 if (tret == GS_ERROR)
4616 {
4617 error ("invalid lvalue in asm output %d", i);
4618 ret = tret;
4619 }
4620
4621 VEC_safe_push (tree, gc, outputs, link);
4622 TREE_CHAIN (link) = NULL_TREE;
4623
4624 if (is_inout)
4625 {
4626 /* An input/output operand. To give the optimizers more
4627 flexibility, split it into separate input and output
4628 operands. */
4629 tree input;
4630 char buf[10];
4631
4632 /* Turn the in/out constraint into an output constraint. */
4633 char *p = xstrdup (constraint);
4634 p[0] = '=';
4635 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4636
4637 /* And add a matching input constraint. */
4638 if (allows_reg)
4639 {
4640 sprintf (buf, "%d", i);
4641
4642 /* If there are multiple alternatives in the constraint,
4643 handle each of them individually. Those that allow register
4644 will be replaced with operand number, the others will stay
4645 unchanged. */
4646 if (strchr (p, ',') != NULL)
4647 {
4648 size_t len = 0, buflen = strlen (buf);
4649 char *beg, *end, *str, *dst;
4650
4651 for (beg = p + 1;;)
4652 {
4653 end = strchr (beg, ',');
4654 if (end == NULL)
4655 end = strchr (beg, '\0');
4656 if ((size_t) (end - beg) < buflen)
4657 len += buflen + 1;
4658 else
4659 len += end - beg + 1;
4660 if (*end)
4661 beg = end + 1;
4662 else
4663 break;
4664 }
4665
4666 str = (char *) alloca (len);
4667 for (beg = p + 1, dst = str;;)
4668 {
4669 const char *tem;
4670 bool mem_p, reg_p, inout_p;
4671
4672 end = strchr (beg, ',');
4673 if (end)
4674 *end = '\0';
4675 beg[-1] = '=';
4676 tem = beg - 1;
4677 parse_output_constraint (&tem, i, 0, 0,
4678 &mem_p, &reg_p, &inout_p);
4679 if (dst != str)
4680 *dst++ = ',';
4681 if (reg_p)
4682 {
4683 memcpy (dst, buf, buflen);
4684 dst += buflen;
4685 }
4686 else
4687 {
4688 if (end)
4689 len = end - beg;
4690 else
4691 len = strlen (beg);
4692 memcpy (dst, beg, len);
4693 dst += len;
4694 }
4695 if (end)
4696 beg = end + 1;
4697 else
4698 break;
4699 }
4700 *dst = '\0';
4701 input = build_string (dst - str, str);
4702 }
4703 else
4704 input = build_string (strlen (buf), buf);
4705 }
4706 else
4707 input = build_string (constraint_len - 1, constraint + 1);
4708
4709 free (p);
4710
4711 input = build_tree_list (build_tree_list (NULL_TREE, input),
4712 unshare_expr (TREE_VALUE (link)));
4713 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4714 }
4715 }
4716
4717 link_next = NULL_TREE;
4718 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4719 {
4720 link_next = TREE_CHAIN (link);
4721 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4722 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4723 oconstraints, &allows_mem, &allows_reg);
4724
4725 /* If we can't make copies, we can only accept memory. */
4726 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4727 {
4728 if (allows_mem)
4729 allows_reg = 0;
4730 else
4731 {
4732 error ("impossible constraint in %<asm%>");
4733 error ("non-memory input %d must stay in memory", i);
4734 return GS_ERROR;
4735 }
4736 }
4737
4738 /* If the operand is a memory input, it should be an lvalue. */
4739 if (!allows_reg && allows_mem)
4740 {
4741 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4742 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4743 mark_addressable (TREE_VALUE (link));
4744 if (tret == GS_ERROR)
4745 {
4746 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4747 input_location = EXPR_LOCATION (TREE_VALUE (link));
4748 error ("memory input %d is not directly addressable", i);
4749 ret = tret;
4750 }
4751 }
4752 else
4753 {
4754 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4755 is_gimple_asm_val, fb_rvalue);
4756 if (tret == GS_ERROR)
4757 ret = tret;
4758 }
4759
4760 TREE_CHAIN (link) = NULL_TREE;
4761 VEC_safe_push (tree, gc, inputs, link);
4762 }
4763
4764 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4765 VEC_safe_push (tree, gc, clobbers, link);
4766
4767 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4768 inputs, outputs, clobbers);
4769
4770 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4771 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4772
4773 gimplify_seq_add_stmt (pre_p, stmt);
4774
4775 return ret;
4776 }
4777
4778 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4779 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4780 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4781 return to this function.
4782
4783 FIXME should we complexify the prequeue handling instead? Or use flags
4784 for all the cleanups and let the optimizer tighten them up? The current
4785 code seems pretty fragile; it will break on a cleanup within any
4786 non-conditional nesting. But any such nesting would be broken, anyway;
4787 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4788 and continues out of it. We can do that at the RTL level, though, so
4789 having an optimizer to tighten up try/finally regions would be a Good
4790 Thing. */
4791
4792 static enum gimplify_status
4793 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4794 {
4795 gimple_stmt_iterator iter;
4796 gimple_seq body_sequence = NULL;
4797
4798 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4799
4800 /* We only care about the number of conditions between the innermost
4801 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4802 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4803 int old_conds = gimplify_ctxp->conditions;
4804 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4805 gimplify_ctxp->conditions = 0;
4806 gimplify_ctxp->conditional_cleanups = NULL;
4807
4808 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4809
4810 gimplify_ctxp->conditions = old_conds;
4811 gimplify_ctxp->conditional_cleanups = old_cleanups;
4812
4813 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4814 {
4815 gimple wce = gsi_stmt (iter);
4816
4817 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4818 {
4819 if (gsi_one_before_end_p (iter))
4820 {
4821 /* Note that gsi_insert_seq_before and gsi_remove do not
4822 scan operands, unlike some other sequence mutators. */
4823 gsi_insert_seq_before_without_update (&iter,
4824 gimple_wce_cleanup (wce),
4825 GSI_SAME_STMT);
4826 gsi_remove (&iter, true);
4827 break;
4828 }
4829 else
4830 {
4831 gimple gtry;
4832 gimple_seq seq;
4833 enum gimple_try_flags kind;
4834
4835 if (gimple_wce_cleanup_eh_only (wce))
4836 kind = GIMPLE_TRY_CATCH;
4837 else
4838 kind = GIMPLE_TRY_FINALLY;
4839 seq = gsi_split_seq_after (iter);
4840
4841 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
4842 /* Do not use gsi_replace here, as it may scan operands.
4843 We want to do a simple structural modification only. */
4844 *gsi_stmt_ptr (&iter) = gtry;
4845 iter = gsi_start (seq);
4846 }
4847 }
4848 else
4849 gsi_next (&iter);
4850 }
4851
4852 gimplify_seq_add_seq (pre_p, body_sequence);
4853 if (temp)
4854 {
4855 *expr_p = temp;
4856 return GS_OK;
4857 }
4858 else
4859 {
4860 *expr_p = NULL;
4861 return GS_ALL_DONE;
4862 }
4863 }
4864
4865 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4866 is the cleanup action required. EH_ONLY is true if the cleanup should
4867 only be executed if an exception is thrown, not on normal exit. */
4868
4869 static void
4870 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
4871 {
4872 gimple wce;
4873 gimple_seq cleanup_stmts = NULL;
4874
4875 /* Errors can result in improperly nested cleanups. Which results in
4876 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
4877 if (errorcount || sorrycount)
4878 return;
4879
4880 if (gimple_conditional_context ())
4881 {
4882 /* If we're in a conditional context, this is more complex. We only
4883 want to run the cleanup if we actually ran the initialization that
4884 necessitates it, but we want to run it after the end of the
4885 conditional context. So we wrap the try/finally around the
4886 condition and use a flag to determine whether or not to actually
4887 run the destructor. Thus
4888
4889 test ? f(A()) : 0
4890
4891 becomes (approximately)
4892
4893 flag = 0;
4894 try {
4895 if (test) { A::A(temp); flag = 1; val = f(temp); }
4896 else { val = 0; }
4897 } finally {
4898 if (flag) A::~A(temp);
4899 }
4900 val
4901 */
4902 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4903 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
4904 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
4905
4906 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4907 gimplify_stmt (&cleanup, &cleanup_stmts);
4908 wce = gimple_build_wce (cleanup_stmts);
4909
4910 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
4911 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
4912 gimplify_seq_add_stmt (pre_p, ftrue);
4913
4914 /* Because of this manipulation, and the EH edges that jump
4915 threading cannot redirect, the temporary (VAR) will appear
4916 to be used uninitialized. Don't warn. */
4917 TREE_NO_WARNING (var) = 1;
4918 }
4919 else
4920 {
4921 gimplify_stmt (&cleanup, &cleanup_stmts);
4922 wce = gimple_build_wce (cleanup_stmts);
4923 gimple_wce_set_cleanup_eh_only (wce, eh_only);
4924 gimplify_seq_add_stmt (pre_p, wce);
4925 }
4926 }
4927
4928 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4929
4930 static enum gimplify_status
4931 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4932 {
4933 tree targ = *expr_p;
4934 tree temp = TARGET_EXPR_SLOT (targ);
4935 tree init = TARGET_EXPR_INITIAL (targ);
4936 enum gimplify_status ret;
4937
4938 if (init)
4939 {
4940 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4941 to the temps list. Handle also variable length TARGET_EXPRs. */
4942 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
4943 {
4944 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
4945 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
4946 gimplify_vla_decl (temp, pre_p);
4947 }
4948 else
4949 gimple_add_tmp_var (temp);
4950
4951 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4952 expression is supposed to initialize the slot. */
4953 if (VOID_TYPE_P (TREE_TYPE (init)))
4954 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4955 else
4956 {
4957 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
4958 init = init_expr;
4959 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4960 init = NULL;
4961 ggc_free (init_expr);
4962 }
4963 if (ret == GS_ERROR)
4964 {
4965 /* PR c++/28266 Make sure this is expanded only once. */
4966 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4967 return GS_ERROR;
4968 }
4969 if (init)
4970 gimplify_and_add (init, pre_p);
4971
4972 /* If needed, push the cleanup for the temp. */
4973 if (TARGET_EXPR_CLEANUP (targ))
4974 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4975 CLEANUP_EH_ONLY (targ), pre_p);
4976
4977 /* Only expand this once. */
4978 TREE_OPERAND (targ, 3) = init;
4979 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4980 }
4981 else
4982 /* We should have expanded this before. */
4983 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4984
4985 *expr_p = temp;
4986 return GS_OK;
4987 }
4988
4989 /* Gimplification of expression trees. */
4990
4991 /* Gimplify an expression which appears at statement context. The
4992 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
4993 NULL, a new sequence is allocated.
4994
4995 Return true if we actually added a statement to the queue. */
4996
4997 bool
4998 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
4999 {
5000 gimple_seq_node last;
5001
5002 if (!*seq_p)
5003 *seq_p = gimple_seq_alloc ();
5004
5005 last = gimple_seq_last (*seq_p);
5006 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5007 return last != gimple_seq_last (*seq_p);
5008 }
5009
5010
5011 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5012 to CTX. If entries already exist, force them to be some flavor of private.
5013 If there is no enclosing parallel, do nothing. */
5014
5015 void
5016 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5017 {
5018 splay_tree_node n;
5019
5020 if (decl == NULL || !DECL_P (decl))
5021 return;
5022
5023 do
5024 {
5025 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5026 if (n != NULL)
5027 {
5028 if (n->value & GOVD_SHARED)
5029 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5030 else
5031 return;
5032 }
5033 else if (ctx->region_type != ORT_WORKSHARE)
5034 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5035
5036 ctx = ctx->outer_context;
5037 }
5038 while (ctx);
5039 }
5040
5041 /* Similarly for each of the type sizes of TYPE. */
5042
5043 static void
5044 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5045 {
5046 if (type == NULL || type == error_mark_node)
5047 return;
5048 type = TYPE_MAIN_VARIANT (type);
5049
5050 if (pointer_set_insert (ctx->privatized_types, type))
5051 return;
5052
5053 switch (TREE_CODE (type))
5054 {
5055 case INTEGER_TYPE:
5056 case ENUMERAL_TYPE:
5057 case BOOLEAN_TYPE:
5058 case REAL_TYPE:
5059 case FIXED_POINT_TYPE:
5060 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5061 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5062 break;
5063
5064 case ARRAY_TYPE:
5065 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5066 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5067 break;
5068
5069 case RECORD_TYPE:
5070 case UNION_TYPE:
5071 case QUAL_UNION_TYPE:
5072 {
5073 tree field;
5074 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5075 if (TREE_CODE (field) == FIELD_DECL)
5076 {
5077 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5078 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5079 }
5080 }
5081 break;
5082
5083 case POINTER_TYPE:
5084 case REFERENCE_TYPE:
5085 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5086 break;
5087
5088 default:
5089 break;
5090 }
5091
5092 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5093 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5094 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5095 }
5096
5097 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5098
5099 static void
5100 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5101 {
5102 splay_tree_node n;
5103 unsigned int nflags;
5104 tree t;
5105
5106 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5107 return;
5108
5109 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5110 there are constructors involved somewhere. */
5111 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5112 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5113 flags |= GOVD_SEEN;
5114
5115 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5116 if (n != NULL)
5117 {
5118 /* We shouldn't be re-adding the decl with the same data
5119 sharing class. */
5120 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5121 /* The only combination of data sharing classes we should see is
5122 FIRSTPRIVATE and LASTPRIVATE. */
5123 nflags = n->value | flags;
5124 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5125 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5126 n->value = nflags;
5127 return;
5128 }
5129
5130 /* When adding a variable-sized variable, we have to handle all sorts
5131 of additional bits of data: the pointer replacement variable, and
5132 the parameters of the type. */
5133 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5134 {
5135 /* Add the pointer replacement variable as PRIVATE if the variable
5136 replacement is private, else FIRSTPRIVATE since we'll need the
5137 address of the original variable either for SHARED, or for the
5138 copy into or out of the context. */
5139 if (!(flags & GOVD_LOCAL))
5140 {
5141 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5142 nflags |= flags & GOVD_SEEN;
5143 t = DECL_VALUE_EXPR (decl);
5144 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5145 t = TREE_OPERAND (t, 0);
5146 gcc_assert (DECL_P (t));
5147 omp_add_variable (ctx, t, nflags);
5148 }
5149
5150 /* Add all of the variable and type parameters (which should have
5151 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5152 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5153 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5154 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5155
5156 /* The variable-sized variable itself is never SHARED, only some form
5157 of PRIVATE. The sharing would take place via the pointer variable
5158 which we remapped above. */
5159 if (flags & GOVD_SHARED)
5160 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5161 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5162
5163 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5164 alloca statement we generate for the variable, so make sure it
5165 is available. This isn't automatically needed for the SHARED
5166 case, since we won't be allocating local storage then.
5167 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5168 in this case omp_notice_variable will be called later
5169 on when it is gimplified. */
5170 else if (! (flags & GOVD_LOCAL))
5171 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5172 }
5173 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5174 {
5175 gcc_assert ((flags & GOVD_LOCAL) == 0);
5176 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5177
5178 /* Similar to the direct variable sized case above, we'll need the
5179 size of references being privatized. */
5180 if ((flags & GOVD_SHARED) == 0)
5181 {
5182 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5183 if (TREE_CODE (t) != INTEGER_CST)
5184 omp_notice_variable (ctx, t, true);
5185 }
5186 }
5187
5188 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5189 }
5190
5191 /* Record the fact that DECL was used within the OpenMP context CTX.
5192 IN_CODE is true when real code uses DECL, and false when we should
5193 merely emit default(none) errors. Return true if DECL is going to
5194 be remapped and thus DECL shouldn't be gimplified into its
5195 DECL_VALUE_EXPR (if any). */
5196
5197 static bool
5198 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5199 {
5200 splay_tree_node n;
5201 unsigned flags = in_code ? GOVD_SEEN : 0;
5202 bool ret = false, shared;
5203
5204 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5205 return false;
5206
5207 /* Threadprivate variables are predetermined. */
5208 if (is_global_var (decl))
5209 {
5210 if (DECL_THREAD_LOCAL_P (decl))
5211 return false;
5212
5213 if (DECL_HAS_VALUE_EXPR_P (decl))
5214 {
5215 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5216
5217 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5218 return false;
5219 }
5220 }
5221
5222 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5223 if (n == NULL)
5224 {
5225 enum omp_clause_default_kind default_kind, kind;
5226 struct gimplify_omp_ctx *octx;
5227
5228 if (ctx->region_type == ORT_WORKSHARE)
5229 goto do_outer;
5230
5231 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5232 remapped firstprivate instead of shared. To some extent this is
5233 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5234 default_kind = ctx->default_kind;
5235 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5236 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5237 default_kind = kind;
5238
5239 switch (default_kind)
5240 {
5241 case OMP_CLAUSE_DEFAULT_NONE:
5242 error ("%qs not specified in enclosing parallel",
5243 IDENTIFIER_POINTER (DECL_NAME (decl)));
5244 error ("%Henclosing parallel", &ctx->location);
5245 /* FALLTHRU */
5246 case OMP_CLAUSE_DEFAULT_SHARED:
5247 flags |= GOVD_SHARED;
5248 break;
5249 case OMP_CLAUSE_DEFAULT_PRIVATE:
5250 flags |= GOVD_PRIVATE;
5251 break;
5252 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5253 flags |= GOVD_FIRSTPRIVATE;
5254 break;
5255 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5256 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5257 gcc_assert (ctx->region_type == ORT_TASK);
5258 if (ctx->outer_context)
5259 omp_notice_variable (ctx->outer_context, decl, in_code);
5260 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5261 {
5262 splay_tree_node n2;
5263
5264 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5265 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5266 {
5267 flags |= GOVD_FIRSTPRIVATE;
5268 break;
5269 }
5270 if ((octx->region_type & ORT_PARALLEL) != 0)
5271 break;
5272 }
5273 if (flags & GOVD_FIRSTPRIVATE)
5274 break;
5275 if (octx == NULL
5276 && (TREE_CODE (decl) == PARM_DECL
5277 || (!is_global_var (decl)
5278 && DECL_CONTEXT (decl) == current_function_decl)))
5279 {
5280 flags |= GOVD_FIRSTPRIVATE;
5281 break;
5282 }
5283 flags |= GOVD_SHARED;
5284 break;
5285 default:
5286 gcc_unreachable ();
5287 }
5288
5289 if ((flags & GOVD_PRIVATE)
5290 && lang_hooks.decls.omp_private_outer_ref (decl))
5291 flags |= GOVD_PRIVATE_OUTER_REF;
5292
5293 omp_add_variable (ctx, decl, flags);
5294
5295 shared = (flags & GOVD_SHARED) != 0;
5296 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5297 goto do_outer;
5298 }
5299
5300 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5301 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5302
5303 /* If nothing changed, there's nothing left to do. */
5304 if ((n->value & flags) == flags)
5305 return ret;
5306 flags |= n->value;
5307 n->value = flags;
5308
5309 do_outer:
5310 /* If the variable is private in the current context, then we don't
5311 need to propagate anything to an outer context. */
5312 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5313 return ret;
5314 if (ctx->outer_context
5315 && omp_notice_variable (ctx->outer_context, decl, in_code))
5316 return true;
5317 return ret;
5318 }
5319
5320 /* Verify that DECL is private within CTX. If there's specific information
5321 to the contrary in the innermost scope, generate an error. */
5322
5323 static bool
5324 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5325 {
5326 splay_tree_node n;
5327
5328 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5329 if (n != NULL)
5330 {
5331 if (n->value & GOVD_SHARED)
5332 {
5333 if (ctx == gimplify_omp_ctxp)
5334 {
5335 error ("iteration variable %qs should be private",
5336 IDENTIFIER_POINTER (DECL_NAME (decl)));
5337 n->value = GOVD_PRIVATE;
5338 return true;
5339 }
5340 else
5341 return false;
5342 }
5343 else if ((n->value & GOVD_EXPLICIT) != 0
5344 && (ctx == gimplify_omp_ctxp
5345 || (ctx->region_type == ORT_COMBINED_PARALLEL
5346 && gimplify_omp_ctxp->outer_context == ctx)))
5347 {
5348 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5349 error ("iteration variable %qs should not be firstprivate",
5350 IDENTIFIER_POINTER (DECL_NAME (decl)));
5351 else if ((n->value & GOVD_REDUCTION) != 0)
5352 error ("iteration variable %qs should not be reduction",
5353 IDENTIFIER_POINTER (DECL_NAME (decl)));
5354 }
5355 return (ctx == gimplify_omp_ctxp
5356 || (ctx->region_type == ORT_COMBINED_PARALLEL
5357 && gimplify_omp_ctxp->outer_context == ctx));
5358 }
5359
5360 if (ctx->region_type != ORT_WORKSHARE)
5361 return false;
5362 else if (ctx->outer_context)
5363 return omp_is_private (ctx->outer_context, decl);
5364 return false;
5365 }
5366
5367 /* Return true if DECL is private within a parallel region
5368 that binds to the current construct's context or in parallel
5369 region's REDUCTION clause. */
5370
5371 static bool
5372 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5373 {
5374 splay_tree_node n;
5375
5376 do
5377 {
5378 ctx = ctx->outer_context;
5379 if (ctx == NULL)
5380 return !(is_global_var (decl)
5381 /* References might be private, but might be shared too. */
5382 || lang_hooks.decls.omp_privatize_by_reference (decl));
5383
5384 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5385 if (n != NULL)
5386 return (n->value & GOVD_SHARED) == 0;
5387 }
5388 while (ctx->region_type == ORT_WORKSHARE);
5389 return false;
5390 }
5391
5392 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5393 and previous omp contexts. */
5394
5395 static void
5396 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5397 enum omp_region_type region_type)
5398 {
5399 struct gimplify_omp_ctx *ctx, *outer_ctx;
5400 struct gimplify_ctx gctx;
5401 tree c;
5402
5403 ctx = new_omp_context (region_type);
5404 outer_ctx = ctx->outer_context;
5405
5406 while ((c = *list_p) != NULL)
5407 {
5408 bool remove = false;
5409 bool notice_outer = true;
5410 const char *check_non_private = NULL;
5411 unsigned int flags;
5412 tree decl;
5413
5414 switch (OMP_CLAUSE_CODE (c))
5415 {
5416 case OMP_CLAUSE_PRIVATE:
5417 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5418 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5419 {
5420 flags |= GOVD_PRIVATE_OUTER_REF;
5421 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5422 }
5423 else
5424 notice_outer = false;
5425 goto do_add;
5426 case OMP_CLAUSE_SHARED:
5427 flags = GOVD_SHARED | GOVD_EXPLICIT;
5428 goto do_add;
5429 case OMP_CLAUSE_FIRSTPRIVATE:
5430 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5431 check_non_private = "firstprivate";
5432 goto do_add;
5433 case OMP_CLAUSE_LASTPRIVATE:
5434 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5435 check_non_private = "lastprivate";
5436 goto do_add;
5437 case OMP_CLAUSE_REDUCTION:
5438 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5439 check_non_private = "reduction";
5440 goto do_add;
5441
5442 do_add:
5443 decl = OMP_CLAUSE_DECL (c);
5444 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5445 {
5446 remove = true;
5447 break;
5448 }
5449 omp_add_variable (ctx, decl, flags);
5450 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5451 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5452 {
5453 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5454 GOVD_LOCAL | GOVD_SEEN);
5455 gimplify_omp_ctxp = ctx;
5456 push_gimplify_context (&gctx);
5457
5458 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5459 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5460
5461 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5462 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5463 pop_gimplify_context
5464 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5465 push_gimplify_context (&gctx);
5466 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5467 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5468 pop_gimplify_context
5469 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5470 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5471 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5472
5473 gimplify_omp_ctxp = outer_ctx;
5474 }
5475 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5476 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5477 {
5478 gimplify_omp_ctxp = ctx;
5479 push_gimplify_context (&gctx);
5480 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5481 {
5482 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5483 NULL, NULL);
5484 TREE_SIDE_EFFECTS (bind) = 1;
5485 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5486 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5487 }
5488 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5489 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5490 pop_gimplify_context
5491 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5492 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5493
5494 gimplify_omp_ctxp = outer_ctx;
5495 }
5496 if (notice_outer)
5497 goto do_notice;
5498 break;
5499
5500 case OMP_CLAUSE_COPYIN:
5501 case OMP_CLAUSE_COPYPRIVATE:
5502 decl = OMP_CLAUSE_DECL (c);
5503 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5504 {
5505 remove = true;
5506 break;
5507 }
5508 do_notice:
5509 if (outer_ctx)
5510 omp_notice_variable (outer_ctx, decl, true);
5511 if (check_non_private
5512 && region_type == ORT_WORKSHARE
5513 && omp_check_private (ctx, decl))
5514 {
5515 error ("%s variable %qs is private in outer context",
5516 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
5517 remove = true;
5518 }
5519 break;
5520
5521 case OMP_CLAUSE_IF:
5522 OMP_CLAUSE_OPERAND (c, 0)
5523 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5524 /* Fall through. */
5525
5526 case OMP_CLAUSE_SCHEDULE:
5527 case OMP_CLAUSE_NUM_THREADS:
5528 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5529 is_gimple_val, fb_rvalue) == GS_ERROR)
5530 remove = true;
5531 break;
5532
5533 case OMP_CLAUSE_NOWAIT:
5534 case OMP_CLAUSE_ORDERED:
5535 case OMP_CLAUSE_UNTIED:
5536 case OMP_CLAUSE_COLLAPSE:
5537 break;
5538
5539 case OMP_CLAUSE_DEFAULT:
5540 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5541 break;
5542
5543 default:
5544 gcc_unreachable ();
5545 }
5546
5547 if (remove)
5548 *list_p = OMP_CLAUSE_CHAIN (c);
5549 else
5550 list_p = &OMP_CLAUSE_CHAIN (c);
5551 }
5552
5553 gimplify_omp_ctxp = ctx;
5554 }
5555
5556 /* For all variables that were not actually used within the context,
5557 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5558
5559 static int
5560 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5561 {
5562 tree *list_p = (tree *) data;
5563 tree decl = (tree) n->key;
5564 unsigned flags = n->value;
5565 enum omp_clause_code code;
5566 tree clause;
5567 bool private_debug;
5568
5569 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5570 return 0;
5571 if ((flags & GOVD_SEEN) == 0)
5572 return 0;
5573 if (flags & GOVD_DEBUG_PRIVATE)
5574 {
5575 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5576 private_debug = true;
5577 }
5578 else
5579 private_debug
5580 = lang_hooks.decls.omp_private_debug_clause (decl,
5581 !!(flags & GOVD_SHARED));
5582 if (private_debug)
5583 code = OMP_CLAUSE_PRIVATE;
5584 else if (flags & GOVD_SHARED)
5585 {
5586 if (is_global_var (decl))
5587 {
5588 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5589 while (ctx != NULL)
5590 {
5591 splay_tree_node on
5592 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5593 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5594 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5595 break;
5596 ctx = ctx->outer_context;
5597 }
5598 if (ctx == NULL)
5599 return 0;
5600 }
5601 code = OMP_CLAUSE_SHARED;
5602 }
5603 else if (flags & GOVD_PRIVATE)
5604 code = OMP_CLAUSE_PRIVATE;
5605 else if (flags & GOVD_FIRSTPRIVATE)
5606 code = OMP_CLAUSE_FIRSTPRIVATE;
5607 else
5608 gcc_unreachable ();
5609
5610 clause = build_omp_clause (code);
5611 OMP_CLAUSE_DECL (clause) = decl;
5612 OMP_CLAUSE_CHAIN (clause) = *list_p;
5613 if (private_debug)
5614 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5615 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5616 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5617 *list_p = clause;
5618 lang_hooks.decls.omp_finish_clause (clause);
5619
5620 return 0;
5621 }
5622
5623 static void
5624 gimplify_adjust_omp_clauses (tree *list_p)
5625 {
5626 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5627 tree c, decl;
5628
5629 while ((c = *list_p) != NULL)
5630 {
5631 splay_tree_node n;
5632 bool remove = false;
5633
5634 switch (OMP_CLAUSE_CODE (c))
5635 {
5636 case OMP_CLAUSE_PRIVATE:
5637 case OMP_CLAUSE_SHARED:
5638 case OMP_CLAUSE_FIRSTPRIVATE:
5639 decl = OMP_CLAUSE_DECL (c);
5640 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5641 remove = !(n->value & GOVD_SEEN);
5642 if (! remove)
5643 {
5644 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5645 if ((n->value & GOVD_DEBUG_PRIVATE)
5646 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5647 {
5648 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5649 || ((n->value & GOVD_DATA_SHARE_CLASS)
5650 == GOVD_PRIVATE));
5651 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5652 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5653 }
5654 }
5655 break;
5656
5657 case OMP_CLAUSE_LASTPRIVATE:
5658 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5659 accurately reflect the presence of a FIRSTPRIVATE clause. */
5660 decl = OMP_CLAUSE_DECL (c);
5661 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5662 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5663 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5664 break;
5665
5666 case OMP_CLAUSE_REDUCTION:
5667 case OMP_CLAUSE_COPYIN:
5668 case OMP_CLAUSE_COPYPRIVATE:
5669 case OMP_CLAUSE_IF:
5670 case OMP_CLAUSE_NUM_THREADS:
5671 case OMP_CLAUSE_SCHEDULE:
5672 case OMP_CLAUSE_NOWAIT:
5673 case OMP_CLAUSE_ORDERED:
5674 case OMP_CLAUSE_DEFAULT:
5675 case OMP_CLAUSE_UNTIED:
5676 case OMP_CLAUSE_COLLAPSE:
5677 break;
5678
5679 default:
5680 gcc_unreachable ();
5681 }
5682
5683 if (remove)
5684 *list_p = OMP_CLAUSE_CHAIN (c);
5685 else
5686 list_p = &OMP_CLAUSE_CHAIN (c);
5687 }
5688
5689 /* Add in any implicit data sharing. */
5690 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5691
5692 gimplify_omp_ctxp = ctx->outer_context;
5693 delete_omp_context (ctx);
5694 }
5695
5696 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5697 gimplification of the body, as well as scanning the body for used
5698 variables. We need to do this scan now, because variable-sized
5699 decls will be decomposed during gimplification. */
5700
5701 static void
5702 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5703 {
5704 tree expr = *expr_p;
5705 gimple g;
5706 gimple_seq body = NULL;
5707 struct gimplify_ctx gctx;
5708
5709 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5710 OMP_PARALLEL_COMBINED (expr)
5711 ? ORT_COMBINED_PARALLEL
5712 : ORT_PARALLEL);
5713
5714 push_gimplify_context (&gctx);
5715
5716 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5717 if (gimple_code (g) == GIMPLE_BIND)
5718 pop_gimplify_context (g);
5719 else
5720 pop_gimplify_context (NULL);
5721
5722 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5723
5724 g = gimple_build_omp_parallel (body,
5725 OMP_PARALLEL_CLAUSES (expr),
5726 NULL_TREE, NULL_TREE);
5727 if (OMP_PARALLEL_COMBINED (expr))
5728 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5729 gimplify_seq_add_stmt (pre_p, g);
5730 *expr_p = NULL_TREE;
5731 }
5732
5733 /* Gimplify the contents of an OMP_TASK statement. This involves
5734 gimplification of the body, as well as scanning the body for used
5735 variables. We need to do this scan now, because variable-sized
5736 decls will be decomposed during gimplification. */
5737
5738 static void
5739 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5740 {
5741 tree expr = *expr_p;
5742 gimple g;
5743 gimple_seq body = NULL;
5744 struct gimplify_ctx gctx;
5745
5746 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5747
5748 push_gimplify_context (&gctx);
5749
5750 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5751 if (gimple_code (g) == GIMPLE_BIND)
5752 pop_gimplify_context (g);
5753 else
5754 pop_gimplify_context (NULL);
5755
5756 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
5757
5758 g = gimple_build_omp_task (body,
5759 OMP_TASK_CLAUSES (expr),
5760 NULL_TREE, NULL_TREE,
5761 NULL_TREE, NULL_TREE, NULL_TREE);
5762 gimplify_seq_add_stmt (pre_p, g);
5763 *expr_p = NULL_TREE;
5764 }
5765
5766 /* Gimplify the gross structure of an OMP_FOR statement. */
5767
5768 static enum gimplify_status
5769 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
5770 {
5771 tree for_stmt, decl, var, t;
5772 enum gimplify_status ret = GS_OK;
5773 gimple gfor;
5774 gimple_seq for_body, for_pre_body;
5775 int i;
5776
5777 for_stmt = *expr_p;
5778
5779 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
5780 ORT_WORKSHARE);
5781
5782 /* Handle OMP_FOR_INIT. */
5783 for_pre_body = NULL;
5784 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
5785 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
5786
5787 for_body = gimple_seq_alloc ();
5788 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5789 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
5790 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5791 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
5792 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5793 {
5794 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5795 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5796 decl = TREE_OPERAND (t, 0);
5797 gcc_assert (DECL_P (decl));
5798 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
5799 || POINTER_TYPE_P (TREE_TYPE (decl)));
5800
5801 /* Make sure the iteration variable is private. */
5802 if (omp_is_private (gimplify_omp_ctxp, decl))
5803 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5804 else
5805 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5806
5807 /* If DECL is not a gimple register, create a temporary variable to act
5808 as an iteration counter. This is valid, since DECL cannot be
5809 modified in the body of the loop. */
5810 if (!is_gimple_reg (decl))
5811 {
5812 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5813 TREE_OPERAND (t, 0) = var;
5814
5815 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
5816
5817 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5818 }
5819 else
5820 var = decl;
5821
5822 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5823 is_gimple_val, fb_rvalue);
5824 if (ret == GS_ERROR)
5825 return ret;
5826
5827 /* Handle OMP_FOR_COND. */
5828 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5829 gcc_assert (COMPARISON_CLASS_P (t));
5830 gcc_assert (TREE_OPERAND (t, 0) == decl);
5831
5832 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5833 is_gimple_val, fb_rvalue);
5834
5835 /* Handle OMP_FOR_INCR. */
5836 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5837 switch (TREE_CODE (t))
5838 {
5839 case PREINCREMENT_EXPR:
5840 case POSTINCREMENT_EXPR:
5841 t = build_int_cst (TREE_TYPE (decl), 1);
5842 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5843 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
5844 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
5845 break;
5846
5847 case PREDECREMENT_EXPR:
5848 case POSTDECREMENT_EXPR:
5849 t = build_int_cst (TREE_TYPE (decl), -1);
5850 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5851 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
5852 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
5853 break;
5854
5855 case MODIFY_EXPR:
5856 gcc_assert (TREE_OPERAND (t, 0) == decl);
5857 TREE_OPERAND (t, 0) = var;
5858
5859 t = TREE_OPERAND (t, 1);
5860 switch (TREE_CODE (t))
5861 {
5862 case PLUS_EXPR:
5863 if (TREE_OPERAND (t, 1) == decl)
5864 {
5865 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5866 TREE_OPERAND (t, 0) = var;
5867 break;
5868 }
5869
5870 /* Fallthru. */
5871 case MINUS_EXPR:
5872 case POINTER_PLUS_EXPR:
5873 gcc_assert (TREE_OPERAND (t, 0) == decl);
5874 TREE_OPERAND (t, 0) = var;
5875 break;
5876 default:
5877 gcc_unreachable ();
5878 }
5879
5880 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5881 is_gimple_val, fb_rvalue);
5882 break;
5883
5884 default:
5885 gcc_unreachable ();
5886 }
5887
5888 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
5889 {
5890 tree c;
5891 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
5892 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5893 && OMP_CLAUSE_DECL (c) == decl
5894 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
5895 {
5896 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5897 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5898 gcc_assert (TREE_OPERAND (t, 0) == var);
5899 t = TREE_OPERAND (t, 1);
5900 gcc_assert (TREE_CODE (t) == PLUS_EXPR
5901 || TREE_CODE (t) == MINUS_EXPR
5902 || TREE_CODE (t) == POINTER_PLUS_EXPR);
5903 gcc_assert (TREE_OPERAND (t, 0) == var);
5904 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
5905 TREE_OPERAND (t, 1));
5906 gimplify_assign (decl, t,
5907 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5908 }
5909 }
5910 }
5911
5912 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
5913
5914 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5915
5916 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
5917 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
5918 for_pre_body);
5919
5920 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5921 {
5922 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5923 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
5924 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
5925 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5926 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
5927 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
5928 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5929 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
5930 }
5931
5932 gimplify_seq_add_stmt (pre_p, gfor);
5933 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5934 }
5935
5936 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5937 In particular, OMP_SECTIONS and OMP_SINGLE. */
5938
5939 static void
5940 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
5941 {
5942 tree expr = *expr_p;
5943 gimple stmt;
5944 gimple_seq body = NULL;
5945
5946 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
5947 gimplify_and_add (OMP_BODY (expr), &body);
5948 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
5949
5950 if (TREE_CODE (expr) == OMP_SECTIONS)
5951 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
5952 else if (TREE_CODE (expr) == OMP_SINGLE)
5953 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
5954 else
5955 gcc_unreachable ();
5956
5957 gimplify_seq_add_stmt (pre_p, stmt);
5958 }
5959
5960 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5961 stabilized the lhs of the atomic operation as *ADDR. Return true if
5962 EXPR is this stabilized form. */
5963
5964 static bool
5965 goa_lhs_expr_p (tree expr, tree addr)
5966 {
5967 /* Also include casts to other type variants. The C front end is fond
5968 of adding these for e.g. volatile variables. This is like
5969 STRIP_TYPE_NOPS but includes the main variant lookup. */
5970 while ((CONVERT_EXPR_P (expr)
5971 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5972 && TREE_OPERAND (expr, 0) != error_mark_node
5973 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5974 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5975 expr = TREE_OPERAND (expr, 0);
5976
5977 if (TREE_CODE (expr) == INDIRECT_REF)
5978 {
5979 expr = TREE_OPERAND (expr, 0);
5980 while (expr != addr
5981 && (CONVERT_EXPR_P (expr)
5982 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5983 && TREE_CODE (expr) == TREE_CODE (addr)
5984 && TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5985 == TYPE_MAIN_VARIANT (TREE_TYPE (addr)))
5986 {
5987 expr = TREE_OPERAND (expr, 0);
5988 addr = TREE_OPERAND (addr, 0);
5989 }
5990 if (expr == addr)
5991 return true;
5992 return (TREE_CODE (addr) == ADDR_EXPR
5993 && TREE_CODE (expr) == ADDR_EXPR
5994 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
5995 }
5996 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5997 return true;
5998 return false;
5999 }
6000
6001 /* Walk *EXPR_P and replace
6002 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6003 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6004 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6005
6006 static int
6007 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6008 tree lhs_var)
6009 {
6010 tree expr = *expr_p;
6011 int saw_lhs;
6012
6013 if (goa_lhs_expr_p (expr, lhs_addr))
6014 {
6015 *expr_p = lhs_var;
6016 return 1;
6017 }
6018 if (is_gimple_val (expr))
6019 return 0;
6020
6021 saw_lhs = 0;
6022 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6023 {
6024 case tcc_binary:
6025 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6026 lhs_var);
6027 case tcc_unary:
6028 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6029 lhs_var);
6030 break;
6031 default:
6032 break;
6033 }
6034
6035 if (saw_lhs == 0)
6036 {
6037 enum gimplify_status gs;
6038 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6039 if (gs != GS_ALL_DONE)
6040 saw_lhs = -1;
6041 }
6042
6043 return saw_lhs;
6044 }
6045
6046
6047 /* Gimplify an OMP_ATOMIC statement. */
6048
6049 static enum gimplify_status
6050 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6051 {
6052 tree addr = TREE_OPERAND (*expr_p, 0);
6053 tree rhs = TREE_OPERAND (*expr_p, 1);
6054 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6055 tree tmp_load;
6056
6057 tmp_load = create_tmp_var (type, NULL);
6058 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6059 return GS_ERROR;
6060
6061 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6062 != GS_ALL_DONE)
6063 return GS_ERROR;
6064
6065 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6066 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6067 != GS_ALL_DONE)
6068 return GS_ERROR;
6069 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6070 *expr_p = NULL;
6071
6072 return GS_ALL_DONE;
6073 }
6074
6075
6076 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6077 expression produces a value to be used as an operand inside a GIMPLE
6078 statement, the value will be stored back in *EXPR_P. This value will
6079 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6080 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6081 emitted in PRE_P and POST_P.
6082
6083 Additionally, this process may overwrite parts of the input
6084 expression during gimplification. Ideally, it should be
6085 possible to do non-destructive gimplification.
6086
6087 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6088 the expression needs to evaluate to a value to be used as
6089 an operand in a GIMPLE statement, this value will be stored in
6090 *EXPR_P on exit. This happens when the caller specifies one
6091 of fb_lvalue or fb_rvalue fallback flags.
6092
6093 PRE_P will contain the sequence of GIMPLE statements corresponding
6094 to the evaluation of EXPR and all the side-effects that must
6095 be executed before the main expression. On exit, the last
6096 statement of PRE_P is the core statement being gimplified. For
6097 instance, when gimplifying 'if (++a)' the last statement in
6098 PRE_P will be 'if (t.1)' where t.1 is the result of
6099 pre-incrementing 'a'.
6100
6101 POST_P will contain the sequence of GIMPLE statements corresponding
6102 to the evaluation of all the side-effects that must be executed
6103 after the main expression. If this is NULL, the post
6104 side-effects are stored at the end of PRE_P.
6105
6106 The reason why the output is split in two is to handle post
6107 side-effects explicitly. In some cases, an expression may have
6108 inner and outer post side-effects which need to be emitted in
6109 an order different from the one given by the recursive
6110 traversal. For instance, for the expression (*p--)++ the post
6111 side-effects of '--' must actually occur *after* the post
6112 side-effects of '++'. However, gimplification will first visit
6113 the inner expression, so if a separate POST sequence was not
6114 used, the resulting sequence would be:
6115
6116 1 t.1 = *p
6117 2 p = p - 1
6118 3 t.2 = t.1 + 1
6119 4 *p = t.2
6120
6121 However, the post-decrement operation in line #2 must not be
6122 evaluated until after the store to *p at line #4, so the
6123 correct sequence should be:
6124
6125 1 t.1 = *p
6126 2 t.2 = t.1 + 1
6127 3 *p = t.2
6128 4 p = p - 1
6129
6130 So, by specifying a separate post queue, it is possible
6131 to emit the post side-effects in the correct order.
6132 If POST_P is NULL, an internal queue will be used. Before
6133 returning to the caller, the sequence POST_P is appended to
6134 the main output sequence PRE_P.
6135
6136 GIMPLE_TEST_F points to a function that takes a tree T and
6137 returns nonzero if T is in the GIMPLE form requested by the
6138 caller. The GIMPLE predicates are in tree-gimple.c.
6139
6140 FALLBACK tells the function what sort of a temporary we want if
6141 gimplification cannot produce an expression that complies with
6142 GIMPLE_TEST_F.
6143
6144 fb_none means that no temporary should be generated
6145 fb_rvalue means that an rvalue is OK to generate
6146 fb_lvalue means that an lvalue is OK to generate
6147 fb_either means that either is OK, but an lvalue is preferable.
6148 fb_mayfail means that gimplification may fail (in which case
6149 GS_ERROR will be returned)
6150
6151 The return value is either GS_ERROR or GS_ALL_DONE, since this
6152 function iterates until EXPR is completely gimplified or an error
6153 occurs. */
6154
6155 enum gimplify_status
6156 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6157 bool (*gimple_test_f) (tree), fallback_t fallback)
6158 {
6159 tree tmp;
6160 gimple_seq internal_pre = NULL;
6161 gimple_seq internal_post = NULL;
6162 tree save_expr;
6163 bool is_statement;
6164 location_t saved_location;
6165 enum gimplify_status ret;
6166 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6167
6168 save_expr = *expr_p;
6169 if (save_expr == NULL_TREE)
6170 return GS_ALL_DONE;
6171
6172 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6173 is_statement = gimple_test_f == is_gimple_stmt;
6174 if (is_statement)
6175 gcc_assert (pre_p);
6176
6177 /* Consistency checks. */
6178 if (gimple_test_f == is_gimple_reg)
6179 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6180 else if (gimple_test_f == is_gimple_val
6181 || gimple_test_f == is_gimple_formal_tmp_rhs
6182 || gimple_test_f == is_gimple_formal_tmp_or_call_rhs
6183 || gimple_test_f == is_gimple_formal_tmp_reg
6184 || gimple_test_f == is_gimple_formal_tmp_var
6185 || gimple_test_f == is_gimple_call_addr
6186 || gimple_test_f == is_gimple_condexpr
6187 || gimple_test_f == is_gimple_mem_rhs
6188 || gimple_test_f == is_gimple_mem_or_call_rhs
6189 || gimple_test_f == is_gimple_reg_rhs
6190 || gimple_test_f == is_gimple_reg_or_call_rhs
6191 || gimple_test_f == is_gimple_asm_val)
6192 gcc_assert (fallback & fb_rvalue);
6193 else if (gimple_test_f == is_gimple_min_lval
6194 || gimple_test_f == is_gimple_lvalue)
6195 gcc_assert (fallback & fb_lvalue);
6196 else if (gimple_test_f == is_gimple_addressable)
6197 gcc_assert (fallback & fb_either);
6198 else if (gimple_test_f == is_gimple_stmt)
6199 gcc_assert (fallback == fb_none);
6200 else
6201 {
6202 /* We should have recognized the GIMPLE_TEST_F predicate to
6203 know what kind of fallback to use in case a temporary is
6204 needed to hold the value or address of *EXPR_P. */
6205 gcc_unreachable ();
6206 }
6207
6208 /* We used to check the predicate here and return immediately if it
6209 succeeds. This is wrong; the design is for gimplification to be
6210 idempotent, and for the predicates to only test for valid forms, not
6211 whether they are fully simplified. */
6212 if (pre_p == NULL)
6213 pre_p = &internal_pre;
6214
6215 if (post_p == NULL)
6216 post_p = &internal_post;
6217
6218 /* Remember the last statements added to PRE_P and POST_P. Every
6219 new statement added by the gimplification helpers needs to be
6220 annotated with location information. To centralize the
6221 responsibility, we remember the last statement that had been
6222 added to both queues before gimplifying *EXPR_P. If
6223 gimplification produces new statements in PRE_P and POST_P, those
6224 statements will be annotated with the same location information
6225 as *EXPR_P. */
6226 pre_last_gsi = gsi_last (*pre_p);
6227 post_last_gsi = gsi_last (*post_p);
6228
6229 saved_location = input_location;
6230 if (save_expr != error_mark_node
6231 && EXPR_HAS_LOCATION (*expr_p))
6232 input_location = EXPR_LOCATION (*expr_p);
6233
6234 /* Loop over the specific gimplifiers until the toplevel node
6235 remains the same. */
6236 do
6237 {
6238 /* Strip away as many useless type conversions as possible
6239 at the toplevel. */
6240 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6241
6242 /* Remember the expr. */
6243 save_expr = *expr_p;
6244
6245 /* Die, die, die, my darling. */
6246 if (save_expr == error_mark_node
6247 || (TREE_TYPE (save_expr)
6248 && TREE_TYPE (save_expr) == error_mark_node))
6249 {
6250 ret = GS_ERROR;
6251 break;
6252 }
6253
6254 /* Do any language-specific gimplification. */
6255 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
6256 if (ret == GS_OK)
6257 {
6258 if (*expr_p == NULL_TREE)
6259 break;
6260 if (*expr_p != save_expr)
6261 continue;
6262 }
6263 else if (ret != GS_UNHANDLED)
6264 break;
6265
6266 ret = GS_OK;
6267 switch (TREE_CODE (*expr_p))
6268 {
6269 /* First deal with the special cases. */
6270
6271 case POSTINCREMENT_EXPR:
6272 case POSTDECREMENT_EXPR:
6273 case PREINCREMENT_EXPR:
6274 case PREDECREMENT_EXPR:
6275 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6276 fallback != fb_none);
6277 break;
6278
6279 case ARRAY_REF:
6280 case ARRAY_RANGE_REF:
6281 case REALPART_EXPR:
6282 case IMAGPART_EXPR:
6283 case COMPONENT_REF:
6284 case VIEW_CONVERT_EXPR:
6285 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6286 fallback ? fallback : fb_rvalue);
6287 break;
6288
6289 case COND_EXPR:
6290 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6291
6292 /* C99 code may assign to an array in a structure value of a
6293 conditional expression, and this has undefined behavior
6294 only on execution, so create a temporary if an lvalue is
6295 required. */
6296 if (fallback == fb_lvalue)
6297 {
6298 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6299 mark_addressable (*expr_p);
6300 }
6301 break;
6302
6303 case CALL_EXPR:
6304 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6305
6306 /* C99 code may assign to an array in a structure returned
6307 from a function, and this has undefined behavior only on
6308 execution, so create a temporary if an lvalue is
6309 required. */
6310 if (fallback == fb_lvalue)
6311 {
6312 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6313 mark_addressable (*expr_p);
6314 }
6315 break;
6316
6317 case TREE_LIST:
6318 gcc_unreachable ();
6319
6320 case COMPOUND_EXPR:
6321 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6322 break;
6323
6324 case MODIFY_EXPR:
6325 case INIT_EXPR:
6326 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6327 fallback != fb_none);
6328 break;
6329
6330 case TRUTH_ANDIF_EXPR:
6331 case TRUTH_ORIF_EXPR:
6332 ret = gimplify_boolean_expr (expr_p);
6333 break;
6334
6335 case TRUTH_NOT_EXPR:
6336 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6337 {
6338 tree type = TREE_TYPE (*expr_p);
6339 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6340 ret = GS_OK;
6341 break;
6342 }
6343
6344 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6345 is_gimple_val, fb_rvalue);
6346 recalculate_side_effects (*expr_p);
6347 break;
6348
6349 case ADDR_EXPR:
6350 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6351 break;
6352
6353 case VA_ARG_EXPR:
6354 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6355 break;
6356
6357 CASE_CONVERT:
6358 if (IS_EMPTY_STMT (*expr_p))
6359 {
6360 ret = GS_ALL_DONE;
6361 break;
6362 }
6363
6364 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6365 || fallback == fb_none)
6366 {
6367 /* Just strip a conversion to void (or in void context) and
6368 try again. */
6369 *expr_p = TREE_OPERAND (*expr_p, 0);
6370 break;
6371 }
6372
6373 ret = gimplify_conversion (expr_p);
6374 if (ret == GS_ERROR)
6375 break;
6376 if (*expr_p != save_expr)
6377 break;
6378 /* FALLTHRU */
6379
6380 case FIX_TRUNC_EXPR:
6381 /* unary_expr: ... | '(' cast ')' val | ... */
6382 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6383 is_gimple_val, fb_rvalue);
6384 recalculate_side_effects (*expr_p);
6385 break;
6386
6387 case INDIRECT_REF:
6388 *expr_p = fold_indirect_ref (*expr_p);
6389 if (*expr_p != save_expr)
6390 break;
6391 /* else fall through. */
6392 case ALIGN_INDIRECT_REF:
6393 case MISALIGNED_INDIRECT_REF:
6394 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6395 is_gimple_reg, fb_rvalue);
6396 recalculate_side_effects (*expr_p);
6397 break;
6398
6399 /* Constants need not be gimplified. */
6400 case INTEGER_CST:
6401 case REAL_CST:
6402 case FIXED_CST:
6403 case STRING_CST:
6404 case COMPLEX_CST:
6405 case VECTOR_CST:
6406 ret = GS_ALL_DONE;
6407 break;
6408
6409 case CONST_DECL:
6410 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6411 CONST_DECL node. Otherwise the decl is replaceable by its
6412 value. */
6413 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6414 if (fallback & fb_lvalue)
6415 ret = GS_ALL_DONE;
6416 else
6417 *expr_p = DECL_INITIAL (*expr_p);
6418 break;
6419
6420 case DECL_EXPR:
6421 ret = gimplify_decl_expr (expr_p, pre_p);
6422 break;
6423
6424 case EXC_PTR_EXPR:
6425 /* FIXME make this a decl. */
6426 ret = GS_ALL_DONE;
6427 break;
6428
6429 case BIND_EXPR:
6430 ret = gimplify_bind_expr (expr_p, pre_p);
6431 break;
6432
6433 case LOOP_EXPR:
6434 ret = gimplify_loop_expr (expr_p, pre_p);
6435 break;
6436
6437 case SWITCH_EXPR:
6438 ret = gimplify_switch_expr (expr_p, pre_p);
6439 break;
6440
6441 case EXIT_EXPR:
6442 ret = gimplify_exit_expr (expr_p);
6443 break;
6444
6445 case GOTO_EXPR:
6446 /* If the target is not LABEL, then it is a computed jump
6447 and the target needs to be gimplified. */
6448 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6449 {
6450 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6451 NULL, is_gimple_val, fb_rvalue);
6452 if (ret == GS_ERROR)
6453 break;
6454 }
6455 gimplify_seq_add_stmt (pre_p,
6456 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6457 break;
6458
6459 case PREDICT_EXPR:
6460 gimplify_seq_add_stmt (pre_p,
6461 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6462 PREDICT_EXPR_OUTCOME (*expr_p)));
6463 ret = GS_ALL_DONE;
6464 break;
6465
6466 case LABEL_EXPR:
6467 ret = GS_ALL_DONE;
6468 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6469 == current_function_decl);
6470 gimplify_seq_add_stmt (pre_p,
6471 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6472 break;
6473
6474 case CASE_LABEL_EXPR:
6475 ret = gimplify_case_label_expr (expr_p, pre_p);
6476 break;
6477
6478 case RETURN_EXPR:
6479 ret = gimplify_return_expr (*expr_p, pre_p);
6480 break;
6481
6482 case CONSTRUCTOR:
6483 /* Don't reduce this in place; let gimplify_init_constructor work its
6484 magic. Buf if we're just elaborating this for side effects, just
6485 gimplify any element that has side-effects. */
6486 if (fallback == fb_none)
6487 {
6488 unsigned HOST_WIDE_INT ix;
6489 constructor_elt *ce;
6490 tree temp = NULL_TREE;
6491 for (ix = 0;
6492 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6493 ix, ce);
6494 ix++)
6495 if (TREE_SIDE_EFFECTS (ce->value))
6496 append_to_statement_list (ce->value, &temp);
6497
6498 *expr_p = temp;
6499 ret = GS_OK;
6500 }
6501 /* C99 code may assign to an array in a constructed
6502 structure or union, and this has undefined behavior only
6503 on execution, so create a temporary if an lvalue is
6504 required. */
6505 else if (fallback == fb_lvalue)
6506 {
6507 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6508 mark_addressable (*expr_p);
6509 }
6510 else
6511 ret = GS_ALL_DONE;
6512 break;
6513
6514 /* The following are special cases that are not handled by the
6515 original GIMPLE grammar. */
6516
6517 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6518 eliminated. */
6519 case SAVE_EXPR:
6520 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6521 break;
6522
6523 case BIT_FIELD_REF:
6524 {
6525 enum gimplify_status r0, r1, r2;
6526
6527 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6528 post_p, is_gimple_lvalue, fb_either);
6529 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6530 post_p, is_gimple_val, fb_rvalue);
6531 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6532 post_p, is_gimple_val, fb_rvalue);
6533 recalculate_side_effects (*expr_p);
6534
6535 ret = MIN (r0, MIN (r1, r2));
6536 }
6537 break;
6538
6539 case NON_LVALUE_EXPR:
6540 /* This should have been stripped above. */
6541 gcc_unreachable ();
6542
6543 case ASM_EXPR:
6544 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6545 break;
6546
6547 case TRY_FINALLY_EXPR:
6548 case TRY_CATCH_EXPR:
6549 {
6550 gimple_seq eval, cleanup;
6551 gimple try_;
6552
6553 eval = cleanup = NULL;
6554 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6555 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6556 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6557 if (gimple_seq_empty_p (cleanup))
6558 {
6559 gimple_seq_add_seq (pre_p, eval);
6560 ret = GS_ALL_DONE;
6561 break;
6562 }
6563 try_ = gimple_build_try (eval, cleanup,
6564 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6565 ? GIMPLE_TRY_FINALLY
6566 : GIMPLE_TRY_CATCH);
6567 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6568 gimple_try_set_catch_is_cleanup (try_,
6569 TRY_CATCH_IS_CLEANUP (*expr_p));
6570 gimplify_seq_add_stmt (pre_p, try_);
6571 ret = GS_ALL_DONE;
6572 break;
6573 }
6574
6575 case CLEANUP_POINT_EXPR:
6576 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6577 break;
6578
6579 case TARGET_EXPR:
6580 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6581 break;
6582
6583 case CATCH_EXPR:
6584 {
6585 gimple c;
6586 gimple_seq handler = NULL;
6587 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6588 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6589 gimplify_seq_add_stmt (pre_p, c);
6590 ret = GS_ALL_DONE;
6591 break;
6592 }
6593
6594 case EH_FILTER_EXPR:
6595 {
6596 gimple ehf;
6597 gimple_seq failure = NULL;
6598
6599 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6600 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6601 gimple_eh_filter_set_must_not_throw
6602 (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p));
6603 gimplify_seq_add_stmt (pre_p, ehf);
6604 ret = GS_ALL_DONE;
6605 break;
6606 }
6607
6608 case CHANGE_DYNAMIC_TYPE_EXPR:
6609 {
6610 gimple cdt;
6611
6612 ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
6613 pre_p, post_p, is_gimple_reg, fb_lvalue);
6614 cdt = gimple_build_cdt (CHANGE_DYNAMIC_TYPE_NEW_TYPE (*expr_p),
6615 CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p));
6616 gimplify_seq_add_stmt (pre_p, cdt);
6617 ret = GS_ALL_DONE;
6618 }
6619 break;
6620
6621 case OBJ_TYPE_REF:
6622 {
6623 enum gimplify_status r0, r1;
6624 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6625 post_p, is_gimple_val, fb_rvalue);
6626 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6627 post_p, is_gimple_val, fb_rvalue);
6628 TREE_SIDE_EFFECTS (*expr_p) = 0;
6629 ret = MIN (r0, r1);
6630 }
6631 break;
6632
6633 case LABEL_DECL:
6634 /* We get here when taking the address of a label. We mark
6635 the label as "forced"; meaning it can never be removed and
6636 it is a potential target for any computed goto. */
6637 FORCED_LABEL (*expr_p) = 1;
6638 ret = GS_ALL_DONE;
6639 break;
6640
6641 case STATEMENT_LIST:
6642 ret = gimplify_statement_list (expr_p, pre_p);
6643 break;
6644
6645 case WITH_SIZE_EXPR:
6646 {
6647 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6648 post_p == &internal_post ? NULL : post_p,
6649 gimple_test_f, fallback);
6650 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6651 is_gimple_val, fb_rvalue);
6652 }
6653 break;
6654
6655 case VAR_DECL:
6656 case PARM_DECL:
6657 ret = gimplify_var_or_parm_decl (expr_p);
6658 break;
6659
6660 case RESULT_DECL:
6661 /* When within an OpenMP context, notice uses of variables. */
6662 if (gimplify_omp_ctxp)
6663 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6664 ret = GS_ALL_DONE;
6665 break;
6666
6667 case SSA_NAME:
6668 /* Allow callbacks into the gimplifier during optimization. */
6669 ret = GS_ALL_DONE;
6670 break;
6671
6672 case OMP_PARALLEL:
6673 gimplify_omp_parallel (expr_p, pre_p);
6674 ret = GS_ALL_DONE;
6675 break;
6676
6677 case OMP_TASK:
6678 gimplify_omp_task (expr_p, pre_p);
6679 ret = GS_ALL_DONE;
6680 break;
6681
6682 case OMP_FOR:
6683 ret = gimplify_omp_for (expr_p, pre_p);
6684 break;
6685
6686 case OMP_SECTIONS:
6687 case OMP_SINGLE:
6688 gimplify_omp_workshare (expr_p, pre_p);
6689 ret = GS_ALL_DONE;
6690 break;
6691
6692 case OMP_SECTION:
6693 case OMP_MASTER:
6694 case OMP_ORDERED:
6695 case OMP_CRITICAL:
6696 {
6697 gimple_seq body = NULL;
6698 gimple g;
6699
6700 gimplify_and_add (OMP_BODY (*expr_p), &body);
6701 switch (TREE_CODE (*expr_p))
6702 {
6703 case OMP_SECTION:
6704 g = gimple_build_omp_section (body);
6705 break;
6706 case OMP_MASTER:
6707 g = gimple_build_omp_master (body);
6708 break;
6709 case OMP_ORDERED:
6710 g = gimple_build_omp_ordered (body);
6711 break;
6712 case OMP_CRITICAL:
6713 g = gimple_build_omp_critical (body,
6714 OMP_CRITICAL_NAME (*expr_p));
6715 break;
6716 default:
6717 gcc_unreachable ();
6718 }
6719 gimplify_seq_add_stmt (pre_p, g);
6720 ret = GS_ALL_DONE;
6721 break;
6722 }
6723
6724 case OMP_ATOMIC:
6725 ret = gimplify_omp_atomic (expr_p, pre_p);
6726 break;
6727
6728 case POINTER_PLUS_EXPR:
6729 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6730 The second is gimple immediate saving a need for extra statement.
6731 */
6732 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6733 && (tmp = maybe_fold_offset_to_address
6734 (TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6735 TREE_TYPE (*expr_p))))
6736 {
6737 *expr_p = tmp;
6738 break;
6739 }
6740 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6741 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6742 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6743 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6744 0),0)))
6745 && (tmp = maybe_fold_offset_to_address
6746 (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6747 TREE_OPERAND (*expr_p, 1),
6748 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6749 0)))))
6750 {
6751 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6752 break;
6753 }
6754 /* FALLTHRU */
6755
6756 default:
6757 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6758 {
6759 case tcc_comparison:
6760 /* Handle comparison of objects of non scalar mode aggregates
6761 with a call to memcmp. It would be nice to only have to do
6762 this for variable-sized objects, but then we'd have to allow
6763 the same nest of reference nodes we allow for MODIFY_EXPR and
6764 that's too complex.
6765
6766 Compare scalar mode aggregates as scalar mode values. Using
6767 memcmp for them would be very inefficient at best, and is
6768 plain wrong if bitfields are involved. */
6769 {
6770 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6771
6772 if (!AGGREGATE_TYPE_P (type))
6773 goto expr_2;
6774 else if (TYPE_MODE (type) != BLKmode)
6775 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6776 else
6777 ret = gimplify_variable_sized_compare (expr_p);
6778
6779 break;
6780 }
6781
6782 /* If *EXPR_P does not need to be special-cased, handle it
6783 according to its class. */
6784 case tcc_unary:
6785 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6786 post_p, is_gimple_val, fb_rvalue);
6787 break;
6788
6789 case tcc_binary:
6790 expr_2:
6791 {
6792 enum gimplify_status r0, r1;
6793
6794 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6795 post_p, is_gimple_val, fb_rvalue);
6796 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6797 post_p, is_gimple_val, fb_rvalue);
6798
6799 ret = MIN (r0, r1);
6800 break;
6801 }
6802
6803 case tcc_declaration:
6804 case tcc_constant:
6805 ret = GS_ALL_DONE;
6806 goto dont_recalculate;
6807
6808 default:
6809 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
6810 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
6811 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
6812 goto expr_2;
6813 }
6814
6815 recalculate_side_effects (*expr_p);
6816
6817 dont_recalculate:
6818 break;
6819 }
6820
6821 /* If we replaced *expr_p, gimplify again. */
6822 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
6823 ret = GS_ALL_DONE;
6824 }
6825 while (ret == GS_OK);
6826
6827 /* If we encountered an error_mark somewhere nested inside, either
6828 stub out the statement or propagate the error back out. */
6829 if (ret == GS_ERROR)
6830 {
6831 if (is_statement)
6832 *expr_p = NULL;
6833 goto out;
6834 }
6835
6836 /* This was only valid as a return value from the langhook, which
6837 we handled. Make sure it doesn't escape from any other context. */
6838 gcc_assert (ret != GS_UNHANDLED);
6839
6840 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6841 {
6842 /* We aren't looking for a value, and we don't have a valid
6843 statement. If it doesn't have side-effects, throw it away. */
6844 if (!TREE_SIDE_EFFECTS (*expr_p))
6845 *expr_p = NULL;
6846 else if (!TREE_THIS_VOLATILE (*expr_p))
6847 {
6848 /* This is probably a _REF that contains something nested that
6849 has side effects. Recurse through the operands to find it. */
6850 enum tree_code code = TREE_CODE (*expr_p);
6851
6852 switch (code)
6853 {
6854 case COMPONENT_REF:
6855 case REALPART_EXPR:
6856 case IMAGPART_EXPR:
6857 case VIEW_CONVERT_EXPR:
6858 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6859 gimple_test_f, fallback);
6860 break;
6861
6862 case ARRAY_REF:
6863 case ARRAY_RANGE_REF:
6864 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6865 gimple_test_f, fallback);
6866 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6867 gimple_test_f, fallback);
6868 break;
6869
6870 default:
6871 /* Anything else with side-effects must be converted to
6872 a valid statement before we get here. */
6873 gcc_unreachable ();
6874 }
6875
6876 *expr_p = NULL;
6877 }
6878 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
6879 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6880 {
6881 /* Historically, the compiler has treated a bare reference
6882 to a non-BLKmode volatile lvalue as forcing a load. */
6883 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
6884
6885 /* Normally, we do not want to create a temporary for a
6886 TREE_ADDRESSABLE type because such a type should not be
6887 copied by bitwise-assignment. However, we make an
6888 exception here, as all we are doing here is ensuring that
6889 we read the bytes that make up the type. We use
6890 create_tmp_var_raw because create_tmp_var will abort when
6891 given a TREE_ADDRESSABLE type. */
6892 tree tmp = create_tmp_var_raw (type, "vol");
6893 gimple_add_tmp_var (tmp);
6894 gimplify_assign (tmp, *expr_p, pre_p);
6895 *expr_p = NULL;
6896 }
6897 else
6898 /* We can't do anything useful with a volatile reference to
6899 an incomplete type, so just throw it away. Likewise for
6900 a BLKmode type, since any implicit inner load should
6901 already have been turned into an explicit one by the
6902 gimplification process. */
6903 *expr_p = NULL;
6904 }
6905
6906 /* If we are gimplifying at the statement level, we're done. Tack
6907 everything together and return. */
6908 if (fallback == fb_none || is_statement)
6909 {
6910 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
6911 it out for GC to reclaim it. */
6912 *expr_p = NULL_TREE;
6913
6914 if (!gimple_seq_empty_p (internal_pre)
6915 || !gimple_seq_empty_p (internal_post))
6916 {
6917 gimplify_seq_add_seq (&internal_pre, internal_post);
6918 gimplify_seq_add_seq (pre_p, internal_pre);
6919 }
6920
6921 /* The result of gimplifying *EXPR_P is going to be the last few
6922 statements in *PRE_P and *POST_P. Add location information
6923 to all the statements that were added by the gimplification
6924 helpers. */
6925 if (!gimple_seq_empty_p (*pre_p))
6926 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
6927
6928 if (!gimple_seq_empty_p (*post_p))
6929 annotate_all_with_location_after (*post_p, post_last_gsi,
6930 input_location);
6931
6932 goto out;
6933 }
6934
6935 #ifdef ENABLE_GIMPLE_CHECKING
6936 if (*expr_p)
6937 {
6938 enum tree_code code = TREE_CODE (*expr_p);
6939 /* These expressions should already be in gimple IR form. */
6940 gcc_assert (code != MODIFY_EXPR
6941 && code != ASM_EXPR
6942 && code != BIND_EXPR
6943 && code != CATCH_EXPR
6944 && code != COND_EXPR
6945 && code != EH_FILTER_EXPR
6946 && code != GOTO_EXPR
6947 && code != LABEL_EXPR
6948 && code != LOOP_EXPR
6949 && code != RESX_EXPR
6950 && code != SWITCH_EXPR
6951 && code != TRY_FINALLY_EXPR
6952 && code != OMP_CRITICAL
6953 && code != OMP_FOR
6954 && code != OMP_MASTER
6955 && code != OMP_ORDERED
6956 && code != OMP_PARALLEL
6957 && code != OMP_SECTIONS
6958 && code != OMP_SECTION
6959 && code != OMP_SINGLE);
6960 }
6961 #endif
6962
6963 /* Otherwise we're gimplifying a subexpression, so the resulting
6964 value is interesting. If it's a valid operand that matches
6965 GIMPLE_TEST_F, we're done. Unless we are handling some
6966 post-effects internally; if that's the case, we need to copy into
6967 a temporary before adding the post-effects to POST_P. */
6968 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6969 goto out;
6970
6971 /* Otherwise, we need to create a new temporary for the gimplified
6972 expression. */
6973
6974 /* We can't return an lvalue if we have an internal postqueue. The
6975 object the lvalue refers to would (probably) be modified by the
6976 postqueue; we need to copy the value out first, which means an
6977 rvalue. */
6978 if ((fallback & fb_lvalue)
6979 && gimple_seq_empty_p (internal_post)
6980 && is_gimple_addressable (*expr_p))
6981 {
6982 /* An lvalue will do. Take the address of the expression, store it
6983 in a temporary, and replace the expression with an INDIRECT_REF of
6984 that temporary. */
6985 tmp = build_fold_addr_expr (*expr_p);
6986 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6987 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6988 }
6989 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_or_call_rhs (*expr_p))
6990 {
6991 /* An rvalue will do. Assign the gimplified expression into a
6992 new temporary TMP and replace the original expression with
6993 TMP. First, make sure that the expression has a type so that
6994 it can be assigned into a temporary. */
6995 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6996
6997 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
6998 /* The postqueue might change the value of the expression between
6999 the initialization and use of the temporary, so we can't use a
7000 formal temp. FIXME do we care? */
7001 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7002 else
7003 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7004
7005 if (TREE_CODE (*expr_p) != SSA_NAME)
7006 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
7007 }
7008 else
7009 {
7010 #ifdef ENABLE_GIMPLE_CHECKING
7011 if (!(fallback & fb_mayfail))
7012 {
7013 fprintf (stderr, "gimplification failed:\n");
7014 print_generic_expr (stderr, *expr_p, 0);
7015 debug_tree (*expr_p);
7016 internal_error ("gimplification failed");
7017 }
7018 #endif
7019 gcc_assert (fallback & fb_mayfail);
7020
7021 /* If this is an asm statement, and the user asked for the
7022 impossible, don't die. Fail and let gimplify_asm_expr
7023 issue an error. */
7024 ret = GS_ERROR;
7025 goto out;
7026 }
7027
7028 /* Make sure the temporary matches our predicate. */
7029 gcc_assert ((*gimple_test_f) (*expr_p));
7030
7031 if (!gimple_seq_empty_p (internal_post))
7032 {
7033 annotate_all_with_location (internal_post, input_location);
7034 gimplify_seq_add_seq (pre_p, internal_post);
7035 }
7036
7037 out:
7038 input_location = saved_location;
7039 return ret;
7040 }
7041
7042 /* Look through TYPE for variable-sized objects and gimplify each such
7043 size that we find. Add to LIST_P any statements generated. */
7044
7045 void
7046 gimplify_type_sizes (tree type, gimple_seq *list_p)
7047 {
7048 tree field, t;
7049
7050 if (type == NULL || type == error_mark_node)
7051 return;
7052
7053 /* We first do the main variant, then copy into any other variants. */
7054 type = TYPE_MAIN_VARIANT (type);
7055
7056 /* Avoid infinite recursion. */
7057 if (TYPE_SIZES_GIMPLIFIED (type))
7058 return;
7059
7060 TYPE_SIZES_GIMPLIFIED (type) = 1;
7061
7062 switch (TREE_CODE (type))
7063 {
7064 case INTEGER_TYPE:
7065 case ENUMERAL_TYPE:
7066 case BOOLEAN_TYPE:
7067 case REAL_TYPE:
7068 case FIXED_POINT_TYPE:
7069 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7070 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7071
7072 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7073 {
7074 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7075 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7076 }
7077 break;
7078
7079 case ARRAY_TYPE:
7080 /* These types may not have declarations, so handle them here. */
7081 gimplify_type_sizes (TREE_TYPE (type), list_p);
7082 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7083 /* When not optimizing, ensure VLA bounds aren't removed. */
7084 if (!optimize
7085 && TYPE_DOMAIN (type)
7086 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7087 {
7088 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7089 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7090 DECL_IGNORED_P (t) = 0;
7091 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7092 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7093 DECL_IGNORED_P (t) = 0;
7094 }
7095 break;
7096
7097 case RECORD_TYPE:
7098 case UNION_TYPE:
7099 case QUAL_UNION_TYPE:
7100 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7101 if (TREE_CODE (field) == FIELD_DECL)
7102 {
7103 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7104 gimplify_type_sizes (TREE_TYPE (field), list_p);
7105 }
7106 break;
7107
7108 case POINTER_TYPE:
7109 case REFERENCE_TYPE:
7110 /* We used to recurse on the pointed-to type here, which turned out to
7111 be incorrect because its definition might refer to variables not
7112 yet initialized at this point if a forward declaration is involved.
7113
7114 It was actually useful for anonymous pointed-to types to ensure
7115 that the sizes evaluation dominates every possible later use of the
7116 values. Restricting to such types here would be safe since there
7117 is no possible forward declaration around, but would introduce an
7118 undesirable middle-end semantic to anonymity. We then defer to
7119 front-ends the responsibility of ensuring that the sizes are
7120 evaluated both early and late enough, e.g. by attaching artificial
7121 type declarations to the tree. */
7122 break;
7123
7124 default:
7125 break;
7126 }
7127
7128 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7129 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7130
7131 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7132 {
7133 TYPE_SIZE (t) = TYPE_SIZE (type);
7134 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7135 TYPE_SIZES_GIMPLIFIED (t) = 1;
7136 }
7137 }
7138
7139 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7140 a size or position, has had all of its SAVE_EXPRs evaluated.
7141 We add any required statements to *STMT_P. */
7142
7143 void
7144 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7145 {
7146 tree type, expr = *expr_p;
7147
7148 /* We don't do anything if the value isn't there, is constant, or contains
7149 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7150 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7151 will want to replace it with a new variable, but that will cause problems
7152 if this type is from outside the function. It's OK to have that here. */
7153 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7154 || TREE_CODE (expr) == VAR_DECL
7155 || CONTAINS_PLACEHOLDER_P (expr))
7156 return;
7157
7158 type = TREE_TYPE (expr);
7159 *expr_p = unshare_expr (expr);
7160
7161 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7162 expr = *expr_p;
7163
7164 /* Verify that we've an exact type match with the original expression.
7165 In particular, we do not wish to drop a "sizetype" in favour of a
7166 type of similar dimensions. We don't want to pollute the generic
7167 type-stripping code with this knowledge because it doesn't matter
7168 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7169 and friends retain their "sizetype-ness". */
7170 if (TREE_TYPE (expr) != type
7171 && TREE_CODE (type) == INTEGER_TYPE
7172 && TYPE_IS_SIZETYPE (type))
7173 {
7174 tree tmp;
7175 gimple stmt;
7176
7177 *expr_p = create_tmp_var (type, NULL);
7178 tmp = build1 (NOP_EXPR, type, expr);
7179 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7180 if (EXPR_HAS_LOCATION (expr))
7181 gimple_set_location (stmt, *EXPR_LOCUS (expr));
7182 else
7183 gimple_set_location (stmt, input_location);
7184 }
7185 }
7186
7187
7188 /* Gimplify the body of statements pointed to by BODY_P and return a
7189 GIMPLE_BIND containing the sequence of GIMPLE statements
7190 corresponding to BODY_P. FNDECL is the function decl containing
7191 *BODY_P. */
7192
7193 gimple
7194 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7195 {
7196 location_t saved_location = input_location;
7197 gimple_seq parm_stmts, seq;
7198 gimple outer_bind;
7199 struct gimplify_ctx gctx;
7200
7201 timevar_push (TV_TREE_GIMPLIFY);
7202
7203 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7204 gimplification. */
7205 default_rtl_profile ();
7206
7207 gcc_assert (gimplify_ctxp == NULL);
7208 push_gimplify_context (&gctx);
7209
7210 /* Unshare most shared trees in the body and in that of any nested functions.
7211 It would seem we don't have to do this for nested functions because
7212 they are supposed to be output and then the outer function gimplified
7213 first, but the g++ front end doesn't always do it that way. */
7214 unshare_body (body_p, fndecl);
7215 unvisit_body (body_p, fndecl);
7216
7217 /* Make sure input_location isn't set to something weird. */
7218 input_location = DECL_SOURCE_LOCATION (fndecl);
7219
7220 /* Resolve callee-copies. This has to be done before processing
7221 the body so that DECL_VALUE_EXPR gets processed correctly. */
7222 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7223
7224 /* Gimplify the function's body. */
7225 seq = NULL;
7226 gimplify_stmt (body_p, &seq);
7227 outer_bind = gimple_seq_first_stmt (seq);
7228 if (!outer_bind)
7229 {
7230 outer_bind = gimple_build_nop ();
7231 gimplify_seq_add_stmt (&seq, outer_bind);
7232 }
7233
7234 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7235 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7236 if (gimple_code (outer_bind) == GIMPLE_BIND
7237 && gimple_seq_first (seq) == gimple_seq_last (seq))
7238 ;
7239 else
7240 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7241
7242 *body_p = NULL_TREE;
7243
7244 /* If we had callee-copies statements, insert them at the beginning
7245 of the function. */
7246 if (!gimple_seq_empty_p (parm_stmts))
7247 {
7248 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7249 gimple_bind_set_body (outer_bind, parm_stmts);
7250 }
7251
7252 pop_gimplify_context (outer_bind);
7253 gcc_assert (gimplify_ctxp == NULL);
7254
7255 #ifdef ENABLE_TYPES_CHECKING
7256 if (!errorcount && !sorrycount)
7257 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7258 #endif
7259
7260 timevar_pop (TV_TREE_GIMPLIFY);
7261 input_location = saved_location;
7262
7263 return outer_bind;
7264 }
7265
7266 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7267 node for the function we want to gimplify.
7268
7269 Returns the sequence of GIMPLE statements corresponding to the body
7270 of FNDECL. */
7271
7272 void
7273 gimplify_function_tree (tree fndecl)
7274 {
7275 tree oldfn, parm, ret;
7276 gimple_seq seq;
7277 gimple bind;
7278
7279 oldfn = current_function_decl;
7280 current_function_decl = fndecl;
7281 if (DECL_STRUCT_FUNCTION (fndecl))
7282 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7283 else
7284 push_struct_function (fndecl);
7285
7286 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7287 {
7288 /* Preliminarily mark non-addressed complex variables as eligible
7289 for promotion to gimple registers. We'll transform their uses
7290 as we find them. */
7291 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7292 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7293 && !TREE_THIS_VOLATILE (parm)
7294 && !needs_to_live_in_memory (parm))
7295 DECL_GIMPLE_REG_P (parm) = 1;
7296 }
7297
7298 ret = DECL_RESULT (fndecl);
7299 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7300 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7301 && !needs_to_live_in_memory (ret))
7302 DECL_GIMPLE_REG_P (ret) = 1;
7303
7304 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7305
7306 /* The tree body of the function is no longer needed, replace it
7307 with the new GIMPLE body. */
7308 seq = gimple_seq_alloc ();
7309 gimple_seq_add_stmt (&seq, bind);
7310 gimple_set_body (fndecl, seq);
7311
7312 /* If we're instrumenting function entry/exit, then prepend the call to
7313 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7314 catch the exit hook. */
7315 /* ??? Add some way to ignore exceptions for this TFE. */
7316 if (flag_instrument_function_entry_exit
7317 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7318 && !flag_instrument_functions_exclude_p (fndecl))
7319 {
7320 tree x;
7321 gimple new_bind;
7322 gimple tf;
7323 gimple_seq cleanup = NULL, body = NULL;
7324
7325 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7326 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7327 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7328
7329 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7330 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7331 gimplify_seq_add_stmt (&body, tf);
7332 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7333 /* Clear the block for BIND, since it is no longer directly inside
7334 the function, but within a try block. */
7335 gimple_bind_set_block (bind, NULL);
7336
7337 /* Replace the current function body with the body
7338 wrapped in the try/finally TF. */
7339 seq = gimple_seq_alloc ();
7340 gimple_seq_add_stmt (&seq, new_bind);
7341 gimple_set_body (fndecl, seq);
7342 }
7343
7344 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7345
7346 current_function_decl = oldfn;
7347 pop_cfun ();
7348 }
7349
7350
7351 /* Some transformations like inlining may invalidate the GIMPLE form
7352 for operands. This function traverses all the operands in STMT and
7353 gimplifies anything that is not a valid gimple operand. Any new
7354 GIMPLE statements are inserted before *GSI_P. */
7355
7356 void
7357 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7358 {
7359 size_t i, num_ops;
7360 tree orig_lhs = NULL_TREE, lhs, t;
7361 gimple_seq pre = NULL;
7362 gimple post_stmt = NULL;
7363 struct gimplify_ctx gctx;
7364
7365 push_gimplify_context (&gctx);
7366 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7367
7368 switch (gimple_code (stmt))
7369 {
7370 case GIMPLE_COND:
7371 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7372 is_gimple_val, fb_rvalue);
7373 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7374 is_gimple_val, fb_rvalue);
7375 break;
7376 case GIMPLE_OMP_ATOMIC_LOAD:
7377 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7378 is_gimple_val, fb_rvalue);
7379 break;
7380 case GIMPLE_ASM:
7381 {
7382 size_t i, noutputs = gimple_asm_noutputs (stmt);
7383 const char *constraint, **oconstraints;
7384 bool allows_mem, allows_reg, is_inout;
7385
7386 oconstraints
7387 = (const char **) alloca ((noutputs) * sizeof (const char *));
7388 for (i = 0; i < noutputs; i++)
7389 {
7390 tree op = gimple_asm_output_op (stmt, i);
7391 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7392 oconstraints[i] = constraint;
7393 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7394 &allows_reg, &is_inout);
7395 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7396 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7397 fb_lvalue | fb_mayfail);
7398 }
7399 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7400 {
7401 tree op = gimple_asm_input_op (stmt, i);
7402 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7403 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7404 oconstraints, &allows_mem, &allows_reg);
7405 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7406 allows_reg = 0;
7407 if (!allows_reg && allows_mem)
7408 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7409 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7410 else
7411 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7412 is_gimple_asm_val, fb_rvalue);
7413 }
7414 }
7415 break;
7416 default:
7417 /* NOTE: We start gimplifying operands from last to first to
7418 make sure that side-effects on the RHS of calls, assignments
7419 and ASMs are executed before the LHS. The ordering is not
7420 important for other statements. */
7421 num_ops = gimple_num_ops (stmt);
7422 orig_lhs = gimple_get_lhs (stmt);
7423 for (i = num_ops; i > 0; i--)
7424 {
7425 tree op = gimple_op (stmt, i - 1);
7426 if (op == NULL_TREE)
7427 continue;
7428 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7429 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7430 else if (i == 2
7431 && is_gimple_assign (stmt)
7432 && num_ops == 2
7433 && get_gimple_rhs_class (gimple_expr_code (stmt))
7434 == GIMPLE_SINGLE_RHS)
7435 gimplify_expr (&op, &pre, NULL,
7436 rhs_predicate_for (gimple_assign_lhs (stmt)),
7437 fb_rvalue);
7438 else if (i == 2 && is_gimple_call (stmt))
7439 {
7440 if (TREE_CODE (op) == FUNCTION_DECL)
7441 continue;
7442 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7443 }
7444 else
7445 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7446 gimple_set_op (stmt, i - 1, op);
7447 }
7448
7449 lhs = gimple_get_lhs (stmt);
7450 /* If regimplification of the LHS changed it in a way that requires
7451 a simple RHS, create temporary. */
7452 if (orig_lhs != lhs && !is_gimple_formal_tmp_var (lhs))
7453 {
7454 bool need_temp = false;
7455
7456 if (is_gimple_assign (stmt)
7457 && num_ops == 2
7458 && get_gimple_rhs_class (gimple_expr_code (stmt))
7459 == GIMPLE_SINGLE_RHS)
7460 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7461 rhs_predicate_for (gimple_assign_lhs (stmt)),
7462 fb_rvalue);
7463 else if (is_gimple_reg (lhs))
7464 {
7465 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7466 {
7467 if (is_gimple_call (stmt))
7468 {
7469 i = gimple_call_flags (stmt);
7470 if ((i & ECF_LOOPING_CONST_OR_PURE)
7471 || !(i & (ECF_CONST | ECF_PURE)))
7472 need_temp = true;
7473 }
7474 if (stmt_can_throw_internal (stmt))
7475 need_temp = true;
7476 }
7477 }
7478 else
7479 {
7480 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7481 need_temp = true;
7482 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7483 {
7484 if (is_gimple_call (stmt))
7485 {
7486 tree fndecl = gimple_call_fndecl (stmt);
7487
7488 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7489 && !(fndecl && DECL_RESULT (fndecl)
7490 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7491 need_temp = true;
7492 }
7493 else
7494 need_temp = true;
7495 }
7496 }
7497 if (need_temp)
7498 {
7499 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7500
7501 DECL_GIMPLE_FORMAL_TEMP_P (temp) = 1;
7502 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7503 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7504 DECL_GIMPLE_REG_P (temp) = 1;
7505 if (TREE_CODE (orig_lhs) == SSA_NAME)
7506 orig_lhs = SSA_NAME_VAR (orig_lhs);
7507 if (TREE_CODE (orig_lhs) == VAR_DECL
7508 && DECL_BASED_ON_RESTRICT_P (orig_lhs))
7509 {
7510 DECL_BASED_ON_RESTRICT_P (temp) = 1;
7511 SET_DECL_RESTRICT_BASE (temp,
7512 DECL_GET_RESTRICT_BASE (orig_lhs));
7513 }
7514
7515 if (gimple_in_ssa_p (cfun))
7516 temp = make_ssa_name (temp, NULL);
7517 gimple_set_lhs (stmt, temp);
7518 post_stmt = gimple_build_assign (lhs, temp);
7519 if (TREE_CODE (lhs) == SSA_NAME)
7520 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7521 }
7522 }
7523 break;
7524 }
7525
7526 if (gimple_referenced_vars (cfun))
7527 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7528 add_referenced_var (t);
7529
7530 if (!gimple_seq_empty_p (pre))
7531 {
7532 if (gimple_in_ssa_p (cfun))
7533 {
7534 gimple_stmt_iterator i;
7535
7536 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7537 mark_symbols_for_renaming (gsi_stmt (i));
7538 }
7539 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7540 }
7541 if (post_stmt)
7542 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7543
7544 pop_gimplify_context (NULL);
7545 }
7546
7547
7548 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7549 force the result to be either ssa_name or an invariant, otherwise
7550 just force it to be a rhs expression. If VAR is not NULL, make the
7551 base variable of the final destination be VAR if suitable. */
7552
7553 tree
7554 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7555 {
7556 tree t;
7557 enum gimplify_status ret;
7558 gimple_predicate gimple_test_f;
7559 struct gimplify_ctx gctx;
7560
7561 *stmts = NULL;
7562
7563 if (is_gimple_val (expr))
7564 return expr;
7565
7566 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7567
7568 push_gimplify_context (&gctx);
7569 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7570 gimplify_ctxp->allow_rhs_cond_expr = true;
7571
7572 if (var)
7573 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7574
7575 if (TREE_CODE (expr) != MODIFY_EXPR
7576 && TREE_TYPE (expr) == void_type_node)
7577 {
7578 gimplify_and_add (expr, stmts);
7579 expr = NULL_TREE;
7580 }
7581 else
7582 {
7583 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7584 gcc_assert (ret != GS_ERROR);
7585 }
7586
7587 if (gimple_referenced_vars (cfun))
7588 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7589 add_referenced_var (t);
7590
7591 pop_gimplify_context (NULL);
7592
7593 return expr;
7594 }
7595
7596 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7597 some statements are produced, emits them at GSI. If BEFORE is true.
7598 the statements are appended before GSI, otherwise they are appended after
7599 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7600 GSI_CONTINUE_LINKING are the usual values). */
7601
7602 tree
7603 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7604 bool simple_p, tree var, bool before,
7605 enum gsi_iterator_update m)
7606 {
7607 gimple_seq stmts;
7608
7609 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7610
7611 if (!gimple_seq_empty_p (stmts))
7612 {
7613 if (gimple_in_ssa_p (cfun))
7614 {
7615 gimple_stmt_iterator i;
7616
7617 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7618 mark_symbols_for_renaming (gsi_stmt (i));
7619 }
7620
7621 if (before)
7622 gsi_insert_seq_before (gsi, stmts, m);
7623 else
7624 gsi_insert_seq_after (gsi, stmts, m);
7625 }
7626
7627 return expr;
7628 }
7629
7630 #include "gt-gimplify.h"