re PR middle-end/39127 (Invalid GIMPLE with OpenMP)
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
55
56
57 enum gimplify_omp_var_data
58 {
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
71 };
72
73
74 enum omp_region_type
75 {
76 ORT_WORKSHARE = 0,
77 ORT_TASK = 1,
78 ORT_PARALLEL = 2,
79 ORT_COMBINED_PARALLEL = 3
80 };
81
82 struct gimplify_omp_ctx
83 {
84 struct gimplify_omp_ctx *outer_context;
85 splay_tree variables;
86 struct pointer_set_t *privatized_types;
87 location_t location;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
90 };
91
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
94
95
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
98
99 typedef struct gimple_temp_hash_elt
100 {
101 tree val; /* Key */
102 tree temp; /* Value */
103 } elt_t;
104
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
107
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
110 static void
111 mark_addressable (tree x)
112 {
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
116 return ;
117 TREE_ADDRESSABLE (x) = 1;
118 }
119
120 /* Return a hash value for a formal temporary table entry. */
121
122 static hashval_t
123 gimple_tree_hash (const void *p)
124 {
125 tree t = ((const elt_t *) p)->val;
126 return iterative_hash_expr (t, 0);
127 }
128
129 /* Compare two formal temporary table entries. */
130
131 static int
132 gimple_tree_eq (const void *p1, const void *p2)
133 {
134 tree t1 = ((const elt_t *) p1)->val;
135 tree t2 = ((const elt_t *) p2)->val;
136 enum tree_code code = TREE_CODE (t1);
137
138 if (TREE_CODE (t2) != code
139 || TREE_TYPE (t1) != TREE_TYPE (t2))
140 return 0;
141
142 if (!operand_equal_p (t1, t2, 0))
143 return 0;
144
145 /* Only allow them to compare equal if they also hash equal; otherwise
146 results are nondeterminate, and we fail bootstrap comparison. */
147 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
148
149 return 1;
150 }
151
152 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
153 *SEQ_P is NULL, a new sequence is allocated. This function is
154 similar to gimple_seq_add_stmt, but does not scan the operands.
155 During gimplification, we need to manipulate statement sequences
156 before the def/use vectors have been constructed. */
157
158 static void
159 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
160 {
161 gimple_stmt_iterator si;
162
163 if (gs == NULL)
164 return;
165
166 if (*seq_p == NULL)
167 *seq_p = gimple_seq_alloc ();
168
169 si = gsi_last (*seq_p);
170
171 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
172 }
173
174 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
175 NULL, a new sequence is allocated. This function is
176 similar to gimple_seq_add_seq, but does not scan the operands.
177 During gimplification, we need to manipulate statement sequences
178 before the def/use vectors have been constructed. */
179
180 static void
181 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
182 {
183 gimple_stmt_iterator si;
184
185 if (src == NULL)
186 return;
187
188 if (*dst_p == NULL)
189 *dst_p = gimple_seq_alloc ();
190
191 si = gsi_last (*dst_p);
192 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
193 }
194
195 /* Set up a context for the gimplifier. */
196
197 void
198 push_gimplify_context (struct gimplify_ctx *c)
199 {
200 memset (c, '\0', sizeof (*c));
201 c->prev_context = gimplify_ctxp;
202 gimplify_ctxp = c;
203 }
204
205 /* Tear down a context for the gimplifier. If BODY is non-null, then
206 put the temporaries into the outer BIND_EXPR. Otherwise, put them
207 in the local_decls.
208
209 BODY is not a sequence, but the first tuple in a sequence. */
210
211 void
212 pop_gimplify_context (gimple body)
213 {
214 struct gimplify_ctx *c = gimplify_ctxp;
215 tree t;
216
217 gcc_assert (c && (c->bind_expr_stack == NULL
218 || VEC_empty (gimple, c->bind_expr_stack)));
219 VEC_free (gimple, heap, c->bind_expr_stack);
220 gimplify_ctxp = c->prev_context;
221
222 for (t = c->temps; t ; t = TREE_CHAIN (t))
223 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
224
225 if (body)
226 declare_vars (c->temps, body, false);
227 else
228 record_vars (c->temps);
229
230 if (c->temp_htab)
231 htab_delete (c->temp_htab);
232 }
233
234 static void
235 gimple_push_bind_expr (gimple gimple_bind)
236 {
237 if (gimplify_ctxp->bind_expr_stack == NULL)
238 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
239 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
240 }
241
242 static void
243 gimple_pop_bind_expr (void)
244 {
245 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
246 }
247
248 gimple
249 gimple_current_bind_expr (void)
250 {
251 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252 }
253
254 /* Return the stack GIMPLE_BINDs created during gimplification. */
255
256 VEC(gimple, heap) *
257 gimple_bind_expr_stack (void)
258 {
259 return gimplify_ctxp->bind_expr_stack;
260 }
261
262 /* Returns true iff there is a COND_EXPR between us and the innermost
263 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
264
265 static bool
266 gimple_conditional_context (void)
267 {
268 return gimplify_ctxp->conditions > 0;
269 }
270
271 /* Note that we've entered a COND_EXPR. */
272
273 static void
274 gimple_push_condition (void)
275 {
276 #ifdef ENABLE_GIMPLE_CHECKING
277 if (gimplify_ctxp->conditions == 0)
278 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
279 #endif
280 ++(gimplify_ctxp->conditions);
281 }
282
283 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
284 now, add any conditional cleanups we've seen to the prequeue. */
285
286 static void
287 gimple_pop_condition (gimple_seq *pre_p)
288 {
289 int conds = --(gimplify_ctxp->conditions);
290
291 gcc_assert (conds >= 0);
292 if (conds == 0)
293 {
294 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
295 gimplify_ctxp->conditional_cleanups = NULL;
296 }
297 }
298
299 /* A stable comparison routine for use with splay trees and DECLs. */
300
301 static int
302 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
303 {
304 tree a = (tree) xa;
305 tree b = (tree) xb;
306
307 return DECL_UID (a) - DECL_UID (b);
308 }
309
310 /* Create a new omp construct that deals with variable remapping. */
311
312 static struct gimplify_omp_ctx *
313 new_omp_context (enum omp_region_type region_type)
314 {
315 struct gimplify_omp_ctx *c;
316
317 c = XCNEW (struct gimplify_omp_ctx);
318 c->outer_context = gimplify_omp_ctxp;
319 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
320 c->privatized_types = pointer_set_create ();
321 c->location = input_location;
322 c->region_type = region_type;
323 if (region_type != ORT_TASK)
324 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
325 else
326 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
327
328 return c;
329 }
330
331 /* Destroy an omp construct that deals with variable remapping. */
332
333 static void
334 delete_omp_context (struct gimplify_omp_ctx *c)
335 {
336 splay_tree_delete (c->variables);
337 pointer_set_destroy (c->privatized_types);
338 XDELETE (c);
339 }
340
341 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
342 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
343
344 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
345
346 static void
347 append_to_statement_list_1 (tree t, tree *list_p)
348 {
349 tree list = *list_p;
350 tree_stmt_iterator i;
351
352 if (!list)
353 {
354 if (t && TREE_CODE (t) == STATEMENT_LIST)
355 {
356 *list_p = t;
357 return;
358 }
359 *list_p = list = alloc_stmt_list ();
360 }
361
362 i = tsi_last (list);
363 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364 }
365
366 /* Add T to the end of the list container pointed to by LIST_P.
367 If T is an expression with no effects, it is ignored. */
368
369 void
370 append_to_statement_list (tree t, tree *list_p)
371 {
372 if (t && TREE_SIDE_EFFECTS (t))
373 append_to_statement_list_1 (t, list_p);
374 }
375
376 /* Similar, but the statement is always added, regardless of side effects. */
377
378 void
379 append_to_statement_list_force (tree t, tree *list_p)
380 {
381 if (t != NULL_TREE)
382 append_to_statement_list_1 (t, list_p);
383 }
384
385 /* Both gimplify the statement T and append it to *SEQ_P. This function
386 behaves exactly as gimplify_stmt, but you don't have to pass T as a
387 reference. */
388
389 void
390 gimplify_and_add (tree t, gimple_seq *seq_p)
391 {
392 gimplify_stmt (&t, seq_p);
393 }
394
395 /* Gimplify statement T into sequence *SEQ_P, and return the first
396 tuple in the sequence of generated tuples for this statement.
397 Return NULL if gimplifying T produced no tuples. */
398
399 static gimple
400 gimplify_and_return_first (tree t, gimple_seq *seq_p)
401 {
402 gimple_stmt_iterator last = gsi_last (*seq_p);
403
404 gimplify_and_add (t, seq_p);
405
406 if (!gsi_end_p (last))
407 {
408 gsi_next (&last);
409 return gsi_stmt (last);
410 }
411 else
412 return gimple_seq_first_stmt (*seq_p);
413 }
414
415 /* Strip off a legitimate source ending from the input string NAME of
416 length LEN. Rather than having to know the names used by all of
417 our front ends, we strip off an ending of a period followed by
418 up to five characters. (Java uses ".class".) */
419
420 static inline void
421 remove_suffix (char *name, int len)
422 {
423 int i;
424
425 for (i = 2; i < 8 && len > i; i++)
426 {
427 if (name[len - i] == '.')
428 {
429 name[len - i] = '\0';
430 break;
431 }
432 }
433 }
434
435 /* Subroutine for find_single_pointer_decl. */
436
437 static tree
438 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
439 void *data)
440 {
441 tree *pdecl = (tree *) data;
442
443 /* We are only looking for pointers at the same level as the
444 original tree; we must not look through any indirections.
445 Returning anything other than NULL_TREE will cause the caller to
446 not find a base. */
447 if (REFERENCE_CLASS_P (*tp))
448 return *tp;
449
450 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
451 {
452 if (*pdecl)
453 {
454 /* We already found a pointer decl; return anything other
455 than NULL_TREE to unwind from walk_tree signalling that
456 we have a duplicate. */
457 return *tp;
458 }
459 *pdecl = *tp;
460 }
461
462 return NULL_TREE;
463 }
464
465 /* Find the single DECL of pointer type in the tree T, used directly
466 rather than via an indirection, and return it. If there are zero
467 or more than one such DECLs, return NULL. */
468
469 static tree
470 find_single_pointer_decl (tree t)
471 {
472 tree decl = NULL_TREE;
473
474 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
475 {
476 /* find_single_pointer_decl_1 returns a nonzero value, causing
477 walk_tree to return a nonzero value, to indicate that it
478 found more than one pointer DECL or that it found an
479 indirection. */
480 return NULL_TREE;
481 }
482
483 return decl;
484 }
485
486 /* Create a new temporary name with PREFIX. Returns an identifier. */
487
488 static GTY(()) unsigned int tmp_var_id_num;
489
490 tree
491 create_tmp_var_name (const char *prefix)
492 {
493 char *tmp_name;
494
495 if (prefix)
496 {
497 char *preftmp = ASTRDUP (prefix);
498
499 remove_suffix (preftmp, strlen (preftmp));
500 prefix = preftmp;
501 }
502
503 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
504 return get_identifier (tmp_name);
505 }
506
507
508 /* Create a new temporary variable declaration of type TYPE.
509 Does NOT push it into the current binding. */
510
511 tree
512 create_tmp_var_raw (tree type, const char *prefix)
513 {
514 tree tmp_var;
515 tree new_type;
516
517 /* Make the type of the variable writable. */
518 new_type = build_type_variant (type, 0, 0);
519 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
520
521 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
522 type);
523
524 /* The variable was declared by the compiler. */
525 DECL_ARTIFICIAL (tmp_var) = 1;
526 /* And we don't want debug info for it. */
527 DECL_IGNORED_P (tmp_var) = 1;
528
529 /* Make the variable writable. */
530 TREE_READONLY (tmp_var) = 0;
531
532 DECL_EXTERNAL (tmp_var) = 0;
533 TREE_STATIC (tmp_var) = 0;
534 TREE_USED (tmp_var) = 1;
535
536 return tmp_var;
537 }
538
539 /* Create a new temporary variable declaration of type TYPE. DOES push the
540 variable into the current binding. Further, assume that this is called
541 only from gimplification or optimization, at which point the creation of
542 certain types are bugs. */
543
544 tree
545 create_tmp_var (tree type, const char *prefix)
546 {
547 tree tmp_var;
548
549 /* We don't allow types that are addressable (meaning we can't make copies),
550 or incomplete. We also used to reject every variable size objects here,
551 but now support those for which a constant upper bound can be obtained.
552 The processing for variable sizes is performed in gimple_add_tmp_var,
553 point at which it really matters and possibly reached via paths not going
554 through this function, e.g. after direct calls to create_tmp_var_raw. */
555 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
556
557 tmp_var = create_tmp_var_raw (type, prefix);
558 gimple_add_tmp_var (tmp_var);
559 return tmp_var;
560 }
561
562 /* Create a temporary with a name derived from VAL. Subroutine of
563 lookup_tmp_var; nobody else should call this function. */
564
565 static inline tree
566 create_tmp_from_val (tree val)
567 {
568 return create_tmp_var (TREE_TYPE (val), get_name (val));
569 }
570
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. */
573
574 static tree
575 lookup_tmp_var (tree val, bool is_formal)
576 {
577 tree ret;
578
579 /* If not optimizing, never really reuse a temporary. local-alloc
580 won't allocate any variable that is used in more than one basic
581 block, which means it will go into memory, causing much extra
582 work in reload and final and poorer code generation, outweighing
583 the extra memory allocation here. */
584 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
585 ret = create_tmp_from_val (val);
586 else
587 {
588 elt_t elt, *elt_p;
589 void **slot;
590
591 elt.val = val;
592 if (gimplify_ctxp->temp_htab == NULL)
593 gimplify_ctxp->temp_htab
594 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
595 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
596 if (*slot == NULL)
597 {
598 elt_p = XNEW (elt_t);
599 elt_p->val = val;
600 elt_p->temp = ret = create_tmp_from_val (val);
601 *slot = (void *) elt_p;
602 }
603 else
604 {
605 elt_p = (elt_t *) *slot;
606 ret = elt_p->temp;
607 }
608 }
609
610 if (is_formal)
611 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
612
613 return ret;
614 }
615
616
617 /* Return true if T is a CALL_EXPR or an expression that can be
618 assignmed to a temporary. Note that this predicate should only be
619 used during gimplification. See the rationale for this in
620 gimplify_modify_expr. */
621
622 static bool
623 is_gimple_formal_tmp_or_call_rhs (tree t)
624 {
625 return TREE_CODE (t) == CALL_EXPR || is_gimple_formal_tmp_rhs (t);
626 }
627
628 /* Returns true iff T is a valid RHS for an assignment to a renamed
629 user -- or front-end generated artificial -- variable. */
630
631 static bool
632 is_gimple_reg_or_call_rhs (tree t)
633 {
634 /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto
635 and the LHS is a user variable, then we need to introduce a formal
636 temporary. This way the optimizers can determine that the user
637 variable is only modified if evaluation of the RHS does not throw.
638
639 Don't force a temp of a non-renamable type; the copy could be
640 arbitrarily expensive. Instead we will generate a VDEF for
641 the assignment. */
642
643 if (is_gimple_reg_type (TREE_TYPE (t))
644 && ((TREE_CODE (t) == CALL_EXPR && TREE_SIDE_EFFECTS (t))
645 || tree_could_throw_p (t)))
646 return false;
647
648 return is_gimple_formal_tmp_or_call_rhs (t);
649 }
650
651 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
652 this predicate should only be used during gimplification. See the
653 rationale for this in gimplify_modify_expr. */
654
655 static bool
656 is_gimple_mem_or_call_rhs (tree t)
657 {
658 /* If we're dealing with a renamable type, either source or dest must be
659 a renamed variable. */
660 if (is_gimple_reg_type (TREE_TYPE (t)))
661 return is_gimple_val (t);
662 else
663 return is_gimple_formal_tmp_or_call_rhs (t);
664 }
665
666
667 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
668 in gimplify_expr. Only use this function if:
669
670 1) The value of the unfactored expression represented by VAL will not
671 change between the initialization and use of the temporary, and
672 2) The temporary will not be otherwise modified.
673
674 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
675 and #2 means it is inappropriate for && temps.
676
677 For other cases, use get_initialized_tmp_var instead. */
678
679 static tree
680 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
681 bool is_formal)
682 {
683 tree t, mod;
684
685 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
686 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
687 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_or_call_rhs,
688 fb_rvalue);
689
690 t = lookup_tmp_var (val, is_formal);
691
692 if (is_formal)
693 {
694 tree u = find_single_pointer_decl (val);
695
696 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
697 u = DECL_GET_RESTRICT_BASE (u);
698 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
699 {
700 if (DECL_BASED_ON_RESTRICT_P (t))
701 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
702 else
703 {
704 DECL_BASED_ON_RESTRICT_P (t) = 1;
705 SET_DECL_RESTRICT_BASE (t, u);
706 }
707 }
708 }
709
710 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
711 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
712 DECL_GIMPLE_REG_P (t) = 1;
713
714 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
715
716 if (EXPR_HAS_LOCATION (val))
717 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
718 else
719 SET_EXPR_LOCATION (mod, input_location);
720
721 /* gimplify_modify_expr might want to reduce this further. */
722 gimplify_and_add (mod, pre_p);
723 ggc_free (mod);
724
725 /* If we're gimplifying into ssa, gimplify_modify_expr will have
726 given our temporary an SSA name. Find and return it. */
727 if (gimplify_ctxp->into_ssa)
728 {
729 gimple last = gimple_seq_last_stmt (*pre_p);
730 t = gimple_get_lhs (last);
731 }
732
733 return t;
734 }
735
736 /* Returns a formal temporary variable initialized with VAL. PRE_P
737 points to a sequence where side-effects needed to compute VAL should be
738 stored. */
739
740 tree
741 get_formal_tmp_var (tree val, gimple_seq *pre_p)
742 {
743 return internal_get_tmp_var (val, pre_p, NULL, true);
744 }
745
746 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
747 are as in gimplify_expr. */
748
749 tree
750 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
751 {
752 return internal_get_tmp_var (val, pre_p, post_p, false);
753 }
754
755 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
756 true, generate debug info for them; otherwise don't. */
757
758 void
759 declare_vars (tree vars, gimple scope, bool debug_info)
760 {
761 tree last = vars;
762 if (last)
763 {
764 tree temps, block;
765
766 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
767
768 temps = nreverse (last);
769
770 block = gimple_bind_block (scope);
771 gcc_assert (!block || TREE_CODE (block) == BLOCK);
772 if (!block || !debug_info)
773 {
774 TREE_CHAIN (last) = gimple_bind_vars (scope);
775 gimple_bind_set_vars (scope, temps);
776 }
777 else
778 {
779 /* We need to attach the nodes both to the BIND_EXPR and to its
780 associated BLOCK for debugging purposes. The key point here
781 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
782 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
783 if (BLOCK_VARS (block))
784 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
785 else
786 {
787 gimple_bind_set_vars (scope,
788 chainon (gimple_bind_vars (scope), temps));
789 BLOCK_VARS (block) = temps;
790 }
791 }
792 }
793 }
794
795 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
796 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
797 no such upper bound can be obtained. */
798
799 static void
800 force_constant_size (tree var)
801 {
802 /* The only attempt we make is by querying the maximum size of objects
803 of the variable's type. */
804
805 HOST_WIDE_INT max_size;
806
807 gcc_assert (TREE_CODE (var) == VAR_DECL);
808
809 max_size = max_int_size_in_bytes (TREE_TYPE (var));
810
811 gcc_assert (max_size >= 0);
812
813 DECL_SIZE_UNIT (var)
814 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
815 DECL_SIZE (var)
816 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
817 }
818
819 void
820 gimple_add_tmp_var (tree tmp)
821 {
822 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
823
824 /* Later processing assumes that the object size is constant, which might
825 not be true at this point. Force the use of a constant upper bound in
826 this case. */
827 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
828 force_constant_size (tmp);
829
830 DECL_CONTEXT (tmp) = current_function_decl;
831 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
832
833 if (gimplify_ctxp)
834 {
835 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
836 gimplify_ctxp->temps = tmp;
837
838 /* Mark temporaries local within the nearest enclosing parallel. */
839 if (gimplify_omp_ctxp)
840 {
841 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
842 while (ctx && ctx->region_type == ORT_WORKSHARE)
843 ctx = ctx->outer_context;
844 if (ctx)
845 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
846 }
847 }
848 else if (cfun)
849 record_vars (tmp);
850 else
851 {
852 gimple_seq body_seq;
853
854 /* This case is for nested functions. We need to expose the locals
855 they create. */
856 body_seq = gimple_body (current_function_decl);
857 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
858 }
859 }
860
861 /* Determines whether to assign a location to the statement GS. */
862
863 static bool
864 should_carry_location_p (gimple gs)
865 {
866 /* Don't emit a line note for a label. We particularly don't want to
867 emit one for the break label, since it doesn't actually correspond
868 to the beginning of the loop/switch. */
869 if (gimple_code (gs) == GIMPLE_LABEL)
870 return false;
871
872 return true;
873 }
874
875 /* Same, but for a tree. */
876
877 static bool
878 tree_should_carry_location_p (const_tree stmt)
879 {
880 /* Don't emit a line note for a label. We particularly don't want to
881 emit one for the break label, since it doesn't actually correspond
882 to the beginning of the loop/switch. */
883 if (TREE_CODE (stmt) == LABEL_EXPR)
884 return false;
885
886 /* Do not annotate empty statements, since it confuses gcov. */
887 if (!TREE_SIDE_EFFECTS (stmt))
888 return false;
889
890 return true;
891 }
892
893 /* Return true if a location should not be emitted for this statement
894 by annotate_one_with_location. */
895
896 static inline bool
897 gimple_do_not_emit_location_p (gimple g)
898 {
899 return gimple_plf (g, GF_PLF_1);
900 }
901
902 /* Mark statement G so a location will not be emitted by
903 annotate_one_with_location. */
904
905 static inline void
906 gimple_set_do_not_emit_location (gimple g)
907 {
908 /* The PLF flags are initialized to 0 when a new tuple is created,
909 so no need to initialize it anywhere. */
910 gimple_set_plf (g, GF_PLF_1, true);
911 }
912
913 /* Set the location for gimple statement GS to LOCUS. */
914
915 static void
916 annotate_one_with_location (gimple gs, location_t location)
917 {
918 if (!gimple_has_location (gs)
919 && !gimple_do_not_emit_location_p (gs)
920 && should_carry_location_p (gs))
921 gimple_set_location (gs, location);
922 }
923
924 /* Same, but for tree T. */
925
926 static void
927 tree_annotate_one_with_location (tree t, location_t location)
928 {
929 if (CAN_HAVE_LOCATION_P (t)
930 && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t))
931 SET_EXPR_LOCATION (t, location);
932 }
933
934
935 /* Set LOCATION for all the statements after iterator GSI in sequence
936 SEQ. If GSI is pointing to the end of the sequence, start with the
937 first statement in SEQ. */
938
939 static void
940 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
941 location_t location)
942 {
943 if (gsi_end_p (gsi))
944 gsi = gsi_start (seq);
945 else
946 gsi_next (&gsi);
947
948 for (; !gsi_end_p (gsi); gsi_next (&gsi))
949 annotate_one_with_location (gsi_stmt (gsi), location);
950 }
951
952
953 /* Set the location for all the statements in a sequence STMT_P to LOCUS. */
954
955 void
956 annotate_all_with_location (gimple_seq stmt_p, location_t location)
957 {
958 gimple_stmt_iterator i;
959
960 if (gimple_seq_empty_p (stmt_p))
961 return;
962
963 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
964 {
965 gimple gs = gsi_stmt (i);
966 annotate_one_with_location (gs, location);
967 }
968 }
969
970 /* Same, but for statement or statement list in *STMT_P. */
971
972 void
973 tree_annotate_all_with_location (tree *stmt_p, location_t location)
974 {
975 tree_stmt_iterator i;
976
977 if (!*stmt_p)
978 return;
979
980 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
981 {
982 tree t = tsi_stmt (i);
983
984 /* Assuming we've already been gimplified, we shouldn't
985 see nested chaining constructs anymore. */
986 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
987 && TREE_CODE (t) != COMPOUND_EXPR);
988
989 tree_annotate_one_with_location (t, location);
990 }
991 }
992
993
994 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
995 These nodes model computations that should only be done once. If we
996 were to unshare something like SAVE_EXPR(i++), the gimplification
997 process would create wrong code. */
998
999 static tree
1000 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
1001 {
1002 enum tree_code code = TREE_CODE (*tp);
1003 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
1004 if (TREE_CODE_CLASS (code) == tcc_type
1005 || TREE_CODE_CLASS (code) == tcc_declaration
1006 || TREE_CODE_CLASS (code) == tcc_constant
1007 || code == SAVE_EXPR || code == TARGET_EXPR
1008 /* We can't do anything sensible with a BLOCK used as an expression,
1009 but we also can't just die when we see it because of non-expression
1010 uses. So just avert our eyes and cross our fingers. Silly Java. */
1011 || code == BLOCK)
1012 *walk_subtrees = 0;
1013 else
1014 {
1015 gcc_assert (code != BIND_EXPR);
1016 copy_tree_r (tp, walk_subtrees, data);
1017 }
1018
1019 return NULL_TREE;
1020 }
1021
1022 /* Callback for walk_tree to unshare most of the shared trees rooted at
1023 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
1024 then *TP is deep copied by calling copy_tree_r.
1025
1026 This unshares the same trees as copy_tree_r with the exception of
1027 SAVE_EXPR nodes. These nodes model computations that should only be
1028 done once. If we were to unshare something like SAVE_EXPR(i++), the
1029 gimplification process would create wrong code. */
1030
1031 static tree
1032 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
1033 void *data ATTRIBUTE_UNUSED)
1034 {
1035 tree t = *tp;
1036 enum tree_code code = TREE_CODE (t);
1037
1038 /* Skip types, decls, and constants. But we do want to look at their
1039 types and the bounds of types. Mark them as visited so we properly
1040 unmark their subtrees on the unmark pass. If we've already seen them,
1041 don't look down further. */
1042 if (TREE_CODE_CLASS (code) == tcc_type
1043 || TREE_CODE_CLASS (code) == tcc_declaration
1044 || TREE_CODE_CLASS (code) == tcc_constant)
1045 {
1046 if (TREE_VISITED (t))
1047 *walk_subtrees = 0;
1048 else
1049 TREE_VISITED (t) = 1;
1050 }
1051
1052 /* If this node has been visited already, unshare it and don't look
1053 any deeper. */
1054 else if (TREE_VISITED (t))
1055 {
1056 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
1057 *walk_subtrees = 0;
1058 }
1059
1060 /* Otherwise, mark the tree as visited and keep looking. */
1061 else
1062 TREE_VISITED (t) = 1;
1063
1064 return NULL_TREE;
1065 }
1066
1067 static tree
1068 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
1069 void *data ATTRIBUTE_UNUSED)
1070 {
1071 if (TREE_VISITED (*tp))
1072 TREE_VISITED (*tp) = 0;
1073 else
1074 *walk_subtrees = 0;
1075
1076 return NULL_TREE;
1077 }
1078
1079 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
1080 bodies of any nested functions if we are unsharing the entire body of
1081 FNDECL. */
1082
1083 static void
1084 unshare_body (tree *body_p, tree fndecl)
1085 {
1086 struct cgraph_node *cgn = cgraph_node (fndecl);
1087
1088 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
1089 if (body_p == &DECL_SAVED_TREE (fndecl))
1090 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1091 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1092 }
1093
1094 /* Likewise, but mark all trees as not visited. */
1095
1096 static void
1097 unvisit_body (tree *body_p, tree fndecl)
1098 {
1099 struct cgraph_node *cgn = cgraph_node (fndecl);
1100
1101 walk_tree (body_p, unmark_visited_r, NULL, NULL);
1102 if (body_p == &DECL_SAVED_TREE (fndecl))
1103 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1104 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1105 }
1106
1107 /* Unconditionally make an unshared copy of EXPR. This is used when using
1108 stored expressions which span multiple functions, such as BINFO_VTABLE,
1109 as the normal unsharing process can't tell that they're shared. */
1110
1111 tree
1112 unshare_expr (tree expr)
1113 {
1114 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1115 return expr;
1116 }
1117 \f
1118 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1119 contain statements and have a value. Assign its value to a temporary
1120 and give it void_type_node. Returns the temporary, or NULL_TREE if
1121 WRAPPER was already void. */
1122
1123 tree
1124 voidify_wrapper_expr (tree wrapper, tree temp)
1125 {
1126 tree type = TREE_TYPE (wrapper);
1127 if (type && !VOID_TYPE_P (type))
1128 {
1129 tree *p;
1130
1131 /* Set p to point to the body of the wrapper. Loop until we find
1132 something that isn't a wrapper. */
1133 for (p = &wrapper; p && *p; )
1134 {
1135 switch (TREE_CODE (*p))
1136 {
1137 case BIND_EXPR:
1138 TREE_SIDE_EFFECTS (*p) = 1;
1139 TREE_TYPE (*p) = void_type_node;
1140 /* For a BIND_EXPR, the body is operand 1. */
1141 p = &BIND_EXPR_BODY (*p);
1142 break;
1143
1144 case CLEANUP_POINT_EXPR:
1145 case TRY_FINALLY_EXPR:
1146 case TRY_CATCH_EXPR:
1147 TREE_SIDE_EFFECTS (*p) = 1;
1148 TREE_TYPE (*p) = void_type_node;
1149 p = &TREE_OPERAND (*p, 0);
1150 break;
1151
1152 case STATEMENT_LIST:
1153 {
1154 tree_stmt_iterator i = tsi_last (*p);
1155 TREE_SIDE_EFFECTS (*p) = 1;
1156 TREE_TYPE (*p) = void_type_node;
1157 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1158 }
1159 break;
1160
1161 case COMPOUND_EXPR:
1162 /* Advance to the last statement. Set all container types to void. */
1163 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1164 {
1165 TREE_SIDE_EFFECTS (*p) = 1;
1166 TREE_TYPE (*p) = void_type_node;
1167 }
1168 break;
1169
1170 default:
1171 goto out;
1172 }
1173 }
1174
1175 out:
1176 if (p == NULL || IS_EMPTY_STMT (*p))
1177 temp = NULL_TREE;
1178 else if (temp)
1179 {
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1181 down. */
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1183 || TREE_CODE (temp) == MODIFY_EXPR);
1184 TREE_OPERAND (temp, 1) = *p;
1185 *p = temp;
1186 }
1187 else
1188 {
1189 temp = create_tmp_var (type, "retval");
1190 *p = build2 (INIT_EXPR, type, temp, *p);
1191 }
1192
1193 return temp;
1194 }
1195
1196 return NULL_TREE;
1197 }
1198
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1201
1202 static void
1203 build_stack_save_restore (gimple *save, gimple *restore)
1204 {
1205 tree tmp_var;
1206
1207 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1209 gimple_call_set_lhs (*save, tmp_var);
1210
1211 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1212 1, tmp_var);
1213 }
1214
1215 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1216
1217 static enum gimplify_status
1218 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1219 {
1220 tree bind_expr = *expr_p;
1221 bool old_save_stack = gimplify_ctxp->save_stack;
1222 tree t;
1223 gimple gimple_bind;
1224 gimple_seq body;
1225
1226 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1227
1228 /* Mark variables seen in this bind expr. */
1229 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1230 {
1231 if (TREE_CODE (t) == VAR_DECL)
1232 {
1233 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1234
1235 /* Mark variable as local. */
1236 if (ctx && !is_global_var (t)
1237 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1238 || splay_tree_lookup (ctx->variables,
1239 (splay_tree_key) t) == NULL))
1240 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1241
1242 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1243 }
1244
1245 /* Preliminarily mark non-addressed complex variables as eligible
1246 for promotion to gimple registers. We'll transform their uses
1247 as we find them. */
1248 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1249 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1250 && !TREE_THIS_VOLATILE (t)
1251 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1252 && !needs_to_live_in_memory (t))
1253 DECL_GIMPLE_REG_P (t) = 1;
1254 }
1255
1256 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1257 BIND_EXPR_BLOCK (bind_expr));
1258 gimple_push_bind_expr (gimple_bind);
1259
1260 gimplify_ctxp->save_stack = false;
1261
1262 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1263 body = NULL;
1264 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1265 gimple_bind_set_body (gimple_bind, body);
1266
1267 if (gimplify_ctxp->save_stack)
1268 {
1269 gimple stack_save, stack_restore, gs;
1270 gimple_seq cleanup, new_body;
1271
1272 /* Save stack on entry and restore it on exit. Add a try_finally
1273 block to achieve this. Note that mudflap depends on the
1274 format of the emitted code: see mx_register_decls(). */
1275 build_stack_save_restore (&stack_save, &stack_restore);
1276
1277 cleanup = new_body = NULL;
1278 gimplify_seq_add_stmt (&cleanup, stack_restore);
1279 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1280 GIMPLE_TRY_FINALLY);
1281
1282 gimplify_seq_add_stmt (&new_body, stack_save);
1283 gimplify_seq_add_stmt (&new_body, gs);
1284 gimple_bind_set_body (gimple_bind, new_body);
1285 }
1286
1287 gimplify_ctxp->save_stack = old_save_stack;
1288 gimple_pop_bind_expr ();
1289
1290 gimplify_seq_add_stmt (pre_p, gimple_bind);
1291
1292 if (temp)
1293 {
1294 *expr_p = temp;
1295 return GS_OK;
1296 }
1297
1298 *expr_p = NULL_TREE;
1299 return GS_ALL_DONE;
1300 }
1301
1302 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1303 GIMPLE value, it is assigned to a new temporary and the statement is
1304 re-written to return the temporary.
1305
1306 PRE_P points to the sequence where side effects that must happen before
1307 STMT should be stored. */
1308
1309 static enum gimplify_status
1310 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1311 {
1312 gimple ret;
1313 tree ret_expr = TREE_OPERAND (stmt, 0);
1314 tree result_decl, result;
1315
1316 if (ret_expr == error_mark_node)
1317 return GS_ERROR;
1318
1319 if (!ret_expr
1320 || TREE_CODE (ret_expr) == RESULT_DECL
1321 || ret_expr == error_mark_node)
1322 {
1323 gimple ret = gimple_build_return (ret_expr);
1324 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1325 gimplify_seq_add_stmt (pre_p, ret);
1326 return GS_ALL_DONE;
1327 }
1328
1329 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1330 result_decl = NULL_TREE;
1331 else
1332 {
1333 result_decl = TREE_OPERAND (ret_expr, 0);
1334
1335 /* See through a return by reference. */
1336 if (TREE_CODE (result_decl) == INDIRECT_REF)
1337 result_decl = TREE_OPERAND (result_decl, 0);
1338
1339 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1340 || TREE_CODE (ret_expr) == INIT_EXPR)
1341 && TREE_CODE (result_decl) == RESULT_DECL);
1342 }
1343
1344 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1345 Recall that aggregate_value_p is FALSE for any aggregate type that is
1346 returned in registers. If we're returning values in registers, then
1347 we don't want to extend the lifetime of the RESULT_DECL, particularly
1348 across another call. In addition, for those aggregates for which
1349 hard_function_value generates a PARALLEL, we'll die during normal
1350 expansion of structure assignments; there's special code in expand_return
1351 to handle this case that does not exist in expand_expr. */
1352 if (!result_decl
1353 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1354 result = result_decl;
1355 else if (gimplify_ctxp->return_temp)
1356 result = gimplify_ctxp->return_temp;
1357 else
1358 {
1359 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1360 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1361 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1362 DECL_GIMPLE_REG_P (result) = 1;
1363
1364 /* ??? With complex control flow (usually involving abnormal edges),
1365 we can wind up warning about an uninitialized value for this. Due
1366 to how this variable is constructed and initialized, this is never
1367 true. Give up and never warn. */
1368 TREE_NO_WARNING (result) = 1;
1369
1370 gimplify_ctxp->return_temp = result;
1371 }
1372
1373 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1374 Then gimplify the whole thing. */
1375 if (result != result_decl)
1376 TREE_OPERAND (ret_expr, 0) = result;
1377
1378 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1379
1380 ret = gimple_build_return (result);
1381 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1382 gimplify_seq_add_stmt (pre_p, ret);
1383
1384 return GS_ALL_DONE;
1385 }
1386
1387 static void
1388 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1389 {
1390 /* This is a variable-sized decl. Simplify its size and mark it
1391 for deferred expansion. Note that mudflap depends on the format
1392 of the emitted code: see mx_register_decls(). */
1393 tree t, addr, ptr_type;
1394
1395 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1396 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1397
1398 /* All occurrences of this decl in final gimplified code will be
1399 replaced by indirection. Setting DECL_VALUE_EXPR does two
1400 things: First, it lets the rest of the gimplifier know what
1401 replacement to use. Second, it lets the debug info know
1402 where to find the value. */
1403 ptr_type = build_pointer_type (TREE_TYPE (decl));
1404 addr = create_tmp_var (ptr_type, get_name (decl));
1405 DECL_IGNORED_P (addr) = 0;
1406 t = build_fold_indirect_ref (addr);
1407 SET_DECL_VALUE_EXPR (decl, t);
1408 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1409
1410 t = built_in_decls[BUILT_IN_ALLOCA];
1411 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1412 t = fold_convert (ptr_type, t);
1413 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1414
1415 gimplify_and_add (t, seq_p);
1416
1417 /* Indicate that we need to restore the stack level when the
1418 enclosing BIND_EXPR is exited. */
1419 gimplify_ctxp->save_stack = true;
1420 }
1421
1422
1423 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1424 and initialization explicit. */
1425
1426 static enum gimplify_status
1427 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1428 {
1429 tree stmt = *stmt_p;
1430 tree decl = DECL_EXPR_DECL (stmt);
1431
1432 *stmt_p = NULL_TREE;
1433
1434 if (TREE_TYPE (decl) == error_mark_node)
1435 return GS_ERROR;
1436
1437 if ((TREE_CODE (decl) == TYPE_DECL
1438 || TREE_CODE (decl) == VAR_DECL)
1439 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1440 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1441
1442 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1443 {
1444 tree init = DECL_INITIAL (decl);
1445
1446 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1447 || (!TREE_STATIC (decl)
1448 && flag_stack_check == GENERIC_STACK_CHECK
1449 && compare_tree_int (DECL_SIZE_UNIT (decl),
1450 STACK_CHECK_MAX_VAR_SIZE) > 0))
1451 gimplify_vla_decl (decl, seq_p);
1452
1453 if (init && init != error_mark_node)
1454 {
1455 if (!TREE_STATIC (decl))
1456 {
1457 DECL_INITIAL (decl) = NULL_TREE;
1458 init = build2 (INIT_EXPR, void_type_node, decl, init);
1459 gimplify_and_add (init, seq_p);
1460 ggc_free (init);
1461 }
1462 else
1463 /* We must still examine initializers for static variables
1464 as they may contain a label address. */
1465 walk_tree (&init, force_labels_r, NULL, NULL);
1466 }
1467
1468 /* Some front ends do not explicitly declare all anonymous
1469 artificial variables. We compensate here by declaring the
1470 variables, though it would be better if the front ends would
1471 explicitly declare them. */
1472 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1473 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1474 gimple_add_tmp_var (decl);
1475 }
1476
1477 return GS_ALL_DONE;
1478 }
1479
1480 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1481 and replacing the LOOP_EXPR with goto, but if the loop contains an
1482 EXIT_EXPR, we need to append a label for it to jump to. */
1483
1484 static enum gimplify_status
1485 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1486 {
1487 tree saved_label = gimplify_ctxp->exit_label;
1488 tree start_label = create_artificial_label ();
1489
1490 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1491
1492 gimplify_ctxp->exit_label = NULL_TREE;
1493
1494 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1495
1496 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1497
1498 if (gimplify_ctxp->exit_label)
1499 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1500
1501 gimplify_ctxp->exit_label = saved_label;
1502
1503 *expr_p = NULL;
1504 return GS_ALL_DONE;
1505 }
1506
1507 /* Gimplifies a statement list onto a sequence. These may be created either
1508 by an enlightened front-end, or by shortcut_cond_expr. */
1509
1510 static enum gimplify_status
1511 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1512 {
1513 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1514
1515 tree_stmt_iterator i = tsi_start (*expr_p);
1516
1517 while (!tsi_end_p (i))
1518 {
1519 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1520 tsi_delink (&i);
1521 }
1522
1523 if (temp)
1524 {
1525 *expr_p = temp;
1526 return GS_OK;
1527 }
1528
1529 return GS_ALL_DONE;
1530 }
1531
1532 /* Compare two case labels. Because the front end should already have
1533 made sure that case ranges do not overlap, it is enough to only compare
1534 the CASE_LOW values of each case label. */
1535
1536 static int
1537 compare_case_labels (const void *p1, const void *p2)
1538 {
1539 const_tree const case1 = *(const_tree const*)p1;
1540 const_tree const case2 = *(const_tree const*)p2;
1541
1542 /* The 'default' case label always goes first. */
1543 if (!CASE_LOW (case1))
1544 return -1;
1545 else if (!CASE_LOW (case2))
1546 return 1;
1547 else
1548 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1549 }
1550
1551
1552 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1553
1554 void
1555 sort_case_labels (VEC(tree,heap)* label_vec)
1556 {
1557 size_t len = VEC_length (tree, label_vec);
1558 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1559 compare_case_labels);
1560 }
1561
1562
1563 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1564 branch to. */
1565
1566 static enum gimplify_status
1567 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1568 {
1569 tree switch_expr = *expr_p;
1570 gimple_seq switch_body_seq = NULL;
1571 enum gimplify_status ret;
1572
1573 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1574 fb_rvalue);
1575 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1576 return ret;
1577
1578 if (SWITCH_BODY (switch_expr))
1579 {
1580 VEC (tree,heap) *labels;
1581 VEC (tree,heap) *saved_labels;
1582 tree default_case = NULL_TREE;
1583 size_t i, len;
1584 gimple gimple_switch;
1585
1586 /* If someone can be bothered to fill in the labels, they can
1587 be bothered to null out the body too. */
1588 gcc_assert (!SWITCH_LABELS (switch_expr));
1589
1590 /* save old labels, get new ones from body, then restore the old
1591 labels. Save all the things from the switch body to append after. */
1592 saved_labels = gimplify_ctxp->case_labels;
1593 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1594
1595 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1596 labels = gimplify_ctxp->case_labels;
1597 gimplify_ctxp->case_labels = saved_labels;
1598
1599 i = 0;
1600 while (i < VEC_length (tree, labels))
1601 {
1602 tree elt = VEC_index (tree, labels, i);
1603 tree low = CASE_LOW (elt);
1604 bool remove_element = FALSE;
1605
1606 if (low)
1607 {
1608 /* Discard empty ranges. */
1609 tree high = CASE_HIGH (elt);
1610 if (high && tree_int_cst_lt (high, low))
1611 remove_element = TRUE;
1612 }
1613 else
1614 {
1615 /* The default case must be the last label in the list. */
1616 gcc_assert (!default_case);
1617 default_case = elt;
1618 remove_element = TRUE;
1619 }
1620
1621 if (remove_element)
1622 VEC_ordered_remove (tree, labels, i);
1623 else
1624 i++;
1625 }
1626 len = i;
1627
1628 if (!default_case)
1629 {
1630 gimple new_default;
1631
1632 /* If the switch has no default label, add one, so that we jump
1633 around the switch body. */
1634 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1635 NULL_TREE, create_artificial_label ());
1636 new_default = gimple_build_label (CASE_LABEL (default_case));
1637 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1638 }
1639
1640 if (!VEC_empty (tree, labels))
1641 sort_case_labels (labels);
1642
1643 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1644 default_case, labels);
1645 gimplify_seq_add_stmt (pre_p, gimple_switch);
1646 gimplify_seq_add_seq (pre_p, switch_body_seq);
1647 VEC_free(tree, heap, labels);
1648 }
1649 else
1650 gcc_assert (SWITCH_LABELS (switch_expr));
1651
1652 return GS_ALL_DONE;
1653 }
1654
1655
1656 static enum gimplify_status
1657 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1658 {
1659 struct gimplify_ctx *ctxp;
1660 gimple gimple_label;
1661
1662 /* Invalid OpenMP programs can play Duff's Device type games with
1663 #pragma omp parallel. At least in the C front end, we don't
1664 detect such invalid branches until after gimplification. */
1665 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1666 if (ctxp->case_labels)
1667 break;
1668
1669 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1670 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1671 gimplify_seq_add_stmt (pre_p, gimple_label);
1672
1673 return GS_ALL_DONE;
1674 }
1675
1676 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1677 if necessary. */
1678
1679 tree
1680 build_and_jump (tree *label_p)
1681 {
1682 if (label_p == NULL)
1683 /* If there's nowhere to jump, just fall through. */
1684 return NULL_TREE;
1685
1686 if (*label_p == NULL_TREE)
1687 {
1688 tree label = create_artificial_label ();
1689 *label_p = label;
1690 }
1691
1692 return build1 (GOTO_EXPR, void_type_node, *label_p);
1693 }
1694
1695 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1696 This also involves building a label to jump to and communicating it to
1697 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1698
1699 static enum gimplify_status
1700 gimplify_exit_expr (tree *expr_p)
1701 {
1702 tree cond = TREE_OPERAND (*expr_p, 0);
1703 tree expr;
1704
1705 expr = build_and_jump (&gimplify_ctxp->exit_label);
1706 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1707 *expr_p = expr;
1708
1709 return GS_OK;
1710 }
1711
1712 /* A helper function to be called via walk_tree. Mark all labels under *TP
1713 as being forced. To be called for DECL_INITIAL of static variables. */
1714
1715 tree
1716 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1717 {
1718 if (TYPE_P (*tp))
1719 *walk_subtrees = 0;
1720 if (TREE_CODE (*tp) == LABEL_DECL)
1721 FORCED_LABEL (*tp) = 1;
1722
1723 return NULL_TREE;
1724 }
1725
1726 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1727 different from its canonical type, wrap the whole thing inside a
1728 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1729 type.
1730
1731 The canonical type of a COMPONENT_REF is the type of the field being
1732 referenced--unless the field is a bit-field which can be read directly
1733 in a smaller mode, in which case the canonical type is the
1734 sign-appropriate type corresponding to that mode. */
1735
1736 static void
1737 canonicalize_component_ref (tree *expr_p)
1738 {
1739 tree expr = *expr_p;
1740 tree type;
1741
1742 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1743
1744 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1745 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1746 else
1747 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1748
1749 /* One could argue that all the stuff below is not necessary for
1750 the non-bitfield case and declare it a FE error if type
1751 adjustment would be needed. */
1752 if (TREE_TYPE (expr) != type)
1753 {
1754 #ifdef ENABLE_TYPES_CHECKING
1755 tree old_type = TREE_TYPE (expr);
1756 #endif
1757 int type_quals;
1758
1759 /* We need to preserve qualifiers and propagate them from
1760 operand 0. */
1761 type_quals = TYPE_QUALS (type)
1762 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1763 if (TYPE_QUALS (type) != type_quals)
1764 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1765
1766 /* Set the type of the COMPONENT_REF to the underlying type. */
1767 TREE_TYPE (expr) = type;
1768
1769 #ifdef ENABLE_TYPES_CHECKING
1770 /* It is now a FE error, if the conversion from the canonical
1771 type to the original expression type is not useless. */
1772 gcc_assert (useless_type_conversion_p (old_type, type));
1773 #endif
1774 }
1775 }
1776
1777 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1778 to foo, embed that change in the ADDR_EXPR by converting
1779 T array[U];
1780 (T *)&array
1781 ==>
1782 &array[L]
1783 where L is the lower bound. For simplicity, only do this for constant
1784 lower bound.
1785 The constraint is that the type of &array[L] is trivially convertible
1786 to T *. */
1787
1788 static void
1789 canonicalize_addr_expr (tree *expr_p)
1790 {
1791 tree expr = *expr_p;
1792 tree addr_expr = TREE_OPERAND (expr, 0);
1793 tree datype, ddatype, pddatype;
1794
1795 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1796 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1797 || TREE_CODE (addr_expr) != ADDR_EXPR)
1798 return;
1799
1800 /* The addr_expr type should be a pointer to an array. */
1801 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1802 if (TREE_CODE (datype) != ARRAY_TYPE)
1803 return;
1804
1805 /* The pointer to element type shall be trivially convertible to
1806 the expression pointer type. */
1807 ddatype = TREE_TYPE (datype);
1808 pddatype = build_pointer_type (ddatype);
1809 if (!useless_type_conversion_p (pddatype, ddatype))
1810 return;
1811
1812 /* The lower bound and element sizes must be constant. */
1813 if (!TYPE_SIZE_UNIT (ddatype)
1814 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1815 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1816 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1817 return;
1818
1819 /* All checks succeeded. Build a new node to merge the cast. */
1820 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1821 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1822 NULL_TREE, NULL_TREE);
1823 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1824 }
1825
1826 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1827 underneath as appropriate. */
1828
1829 static enum gimplify_status
1830 gimplify_conversion (tree *expr_p)
1831 {
1832 tree tem;
1833 gcc_assert (CONVERT_EXPR_P (*expr_p));
1834
1835 /* Then strip away all but the outermost conversion. */
1836 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1837
1838 /* And remove the outermost conversion if it's useless. */
1839 if (tree_ssa_useless_type_conversion (*expr_p))
1840 *expr_p = TREE_OPERAND (*expr_p, 0);
1841
1842 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1843 For example this fold (subclass *)&A into &A->subclass avoiding
1844 a need for statement. */
1845 if (CONVERT_EXPR_P (*expr_p)
1846 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1847 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1848 && (tem = maybe_fold_offset_to_address
1849 (TREE_OPERAND (*expr_p, 0),
1850 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1851 *expr_p = tem;
1852
1853 /* If we still have a conversion at the toplevel,
1854 then canonicalize some constructs. */
1855 if (CONVERT_EXPR_P (*expr_p))
1856 {
1857 tree sub = TREE_OPERAND (*expr_p, 0);
1858
1859 /* If a NOP conversion is changing the type of a COMPONENT_REF
1860 expression, then canonicalize its type now in order to expose more
1861 redundant conversions. */
1862 if (TREE_CODE (sub) == COMPONENT_REF)
1863 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1864
1865 /* If a NOP conversion is changing a pointer to array of foo
1866 to a pointer to foo, embed that change in the ADDR_EXPR. */
1867 else if (TREE_CODE (sub) == ADDR_EXPR)
1868 canonicalize_addr_expr (expr_p);
1869 }
1870
1871 /* If we have a conversion to a non-register type force the
1872 use of a VIEW_CONVERT_EXPR instead. */
1873 if (!is_gimple_reg_type (TREE_TYPE (*expr_p)))
1874 *expr_p = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1875 TREE_OPERAND (*expr_p, 0));
1876
1877 return GS_OK;
1878 }
1879
1880 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1881 DECL_VALUE_EXPR, and it's worth re-examining things. */
1882
1883 static enum gimplify_status
1884 gimplify_var_or_parm_decl (tree *expr_p)
1885 {
1886 tree decl = *expr_p;
1887
1888 /* ??? If this is a local variable, and it has not been seen in any
1889 outer BIND_EXPR, then it's probably the result of a duplicate
1890 declaration, for which we've already issued an error. It would
1891 be really nice if the front end wouldn't leak these at all.
1892 Currently the only known culprit is C++ destructors, as seen
1893 in g++.old-deja/g++.jason/binding.C. */
1894 if (TREE_CODE (decl) == VAR_DECL
1895 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1896 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1897 && decl_function_context (decl) == current_function_decl)
1898 {
1899 gcc_assert (errorcount || sorrycount);
1900 return GS_ERROR;
1901 }
1902
1903 /* When within an OpenMP context, notice uses of variables. */
1904 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1905 return GS_ALL_DONE;
1906
1907 /* If the decl is an alias for another expression, substitute it now. */
1908 if (DECL_HAS_VALUE_EXPR_P (decl))
1909 {
1910 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1911 return GS_OK;
1912 }
1913
1914 return GS_ALL_DONE;
1915 }
1916
1917
1918 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1919 node *EXPR_P.
1920
1921 compound_lval
1922 : min_lval '[' val ']'
1923 | min_lval '.' ID
1924 | compound_lval '[' val ']'
1925 | compound_lval '.' ID
1926
1927 This is not part of the original SIMPLE definition, which separates
1928 array and member references, but it seems reasonable to handle them
1929 together. Also, this way we don't run into problems with union
1930 aliasing; gcc requires that for accesses through a union to alias, the
1931 union reference must be explicit, which was not always the case when we
1932 were splitting up array and member refs.
1933
1934 PRE_P points to the sequence where side effects that must happen before
1935 *EXPR_P should be stored.
1936
1937 POST_P points to the sequence where side effects that must happen after
1938 *EXPR_P should be stored. */
1939
1940 static enum gimplify_status
1941 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1942 fallback_t fallback)
1943 {
1944 tree *p;
1945 VEC(tree,heap) *stack;
1946 enum gimplify_status ret = GS_OK, tret;
1947 int i;
1948
1949 /* Create a stack of the subexpressions so later we can walk them in
1950 order from inner to outer. */
1951 stack = VEC_alloc (tree, heap, 10);
1952
1953 /* We can handle anything that get_inner_reference can deal with. */
1954 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1955 {
1956 restart:
1957 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1958 if (TREE_CODE (*p) == INDIRECT_REF)
1959 *p = fold_indirect_ref (*p);
1960
1961 if (handled_component_p (*p))
1962 ;
1963 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1964 additional COMPONENT_REFs. */
1965 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1966 && gimplify_var_or_parm_decl (p) == GS_OK)
1967 goto restart;
1968 else
1969 break;
1970
1971 VEC_safe_push (tree, heap, stack, *p);
1972 }
1973
1974 gcc_assert (VEC_length (tree, stack));
1975
1976 /* Now STACK is a stack of pointers to all the refs we've walked through
1977 and P points to the innermost expression.
1978
1979 Java requires that we elaborated nodes in source order. That
1980 means we must gimplify the inner expression followed by each of
1981 the indices, in order. But we can't gimplify the inner
1982 expression until we deal with any variable bounds, sizes, or
1983 positions in order to deal with PLACEHOLDER_EXPRs.
1984
1985 So we do this in three steps. First we deal with the annotations
1986 for any variables in the components, then we gimplify the base,
1987 then we gimplify any indices, from left to right. */
1988 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1989 {
1990 tree t = VEC_index (tree, stack, i);
1991
1992 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1993 {
1994 /* Gimplify the low bound and element type size and put them into
1995 the ARRAY_REF. If these values are set, they have already been
1996 gimplified. */
1997 if (TREE_OPERAND (t, 2) == NULL_TREE)
1998 {
1999 tree low = unshare_expr (array_ref_low_bound (t));
2000 if (!is_gimple_min_invariant (low))
2001 {
2002 TREE_OPERAND (t, 2) = low;
2003 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2004 post_p, is_gimple_formal_tmp_reg,
2005 fb_rvalue);
2006 ret = MIN (ret, tret);
2007 }
2008 }
2009
2010 if (!TREE_OPERAND (t, 3))
2011 {
2012 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2013 tree elmt_size = unshare_expr (array_ref_element_size (t));
2014 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2015
2016 /* Divide the element size by the alignment of the element
2017 type (above). */
2018 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
2019
2020 if (!is_gimple_min_invariant (elmt_size))
2021 {
2022 TREE_OPERAND (t, 3) = elmt_size;
2023 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2024 post_p, is_gimple_formal_tmp_reg,
2025 fb_rvalue);
2026 ret = MIN (ret, tret);
2027 }
2028 }
2029 }
2030 else if (TREE_CODE (t) == COMPONENT_REF)
2031 {
2032 /* Set the field offset into T and gimplify it. */
2033 if (!TREE_OPERAND (t, 2))
2034 {
2035 tree offset = unshare_expr (component_ref_field_offset (t));
2036 tree field = TREE_OPERAND (t, 1);
2037 tree factor
2038 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2039
2040 /* Divide the offset by its alignment. */
2041 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
2042
2043 if (!is_gimple_min_invariant (offset))
2044 {
2045 TREE_OPERAND (t, 2) = offset;
2046 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2047 post_p, is_gimple_formal_tmp_reg,
2048 fb_rvalue);
2049 ret = MIN (ret, tret);
2050 }
2051 }
2052 }
2053 }
2054
2055 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2056 so as to match the min_lval predicate. Failure to do so may result
2057 in the creation of large aggregate temporaries. */
2058 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2059 fallback | fb_lvalue);
2060 ret = MIN (ret, tret);
2061
2062 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2063 loop we also remove any useless conversions. */
2064 for (; VEC_length (tree, stack) > 0; )
2065 {
2066 tree t = VEC_pop (tree, stack);
2067
2068 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2069 {
2070 /* Gimplify the dimension.
2071 Temporary fix for gcc.c-torture/execute/20040313-1.c.
2072 Gimplify non-constant array indices into a temporary
2073 variable.
2074 FIXME - The real fix is to gimplify post-modify
2075 expressions into a minimal gimple lvalue. However, that
2076 exposes bugs in alias analysis. The alias analyzer does
2077 not handle &PTR->FIELD very well. Will fix after the
2078 branch is merged into mainline (dnovillo 2004-05-03). */
2079 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2080 {
2081 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2082 is_gimple_formal_tmp_reg, fb_rvalue);
2083 ret = MIN (ret, tret);
2084 }
2085 }
2086 else if (TREE_CODE (t) == BIT_FIELD_REF)
2087 {
2088 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2089 is_gimple_val, fb_rvalue);
2090 ret = MIN (ret, tret);
2091 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2092 is_gimple_val, fb_rvalue);
2093 ret = MIN (ret, tret);
2094 }
2095
2096 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2097
2098 /* The innermost expression P may have originally had
2099 TREE_SIDE_EFFECTS set which would have caused all the outer
2100 expressions in *EXPR_P leading to P to also have had
2101 TREE_SIDE_EFFECTS set. */
2102 recalculate_side_effects (t);
2103 }
2104
2105 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2106 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2107 {
2108 canonicalize_component_ref (expr_p);
2109 ret = MIN (ret, GS_OK);
2110 }
2111
2112 VEC_free (tree, heap, stack);
2113
2114 return ret;
2115 }
2116
2117 /* Gimplify the self modifying expression pointed to by EXPR_P
2118 (++, --, +=, -=).
2119
2120 PRE_P points to the list where side effects that must happen before
2121 *EXPR_P should be stored.
2122
2123 POST_P points to the list where side effects that must happen after
2124 *EXPR_P should be stored.
2125
2126 WANT_VALUE is nonzero iff we want to use the value of this expression
2127 in another expression. */
2128
2129 static enum gimplify_status
2130 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2131 bool want_value)
2132 {
2133 enum tree_code code;
2134 tree lhs, lvalue, rhs, t1;
2135 gimple_seq post = NULL, *orig_post_p = post_p;
2136 bool postfix;
2137 enum tree_code arith_code;
2138 enum gimplify_status ret;
2139
2140 code = TREE_CODE (*expr_p);
2141
2142 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2143 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2144
2145 /* Prefix or postfix? */
2146 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2147 /* Faster to treat as prefix if result is not used. */
2148 postfix = want_value;
2149 else
2150 postfix = false;
2151
2152 /* For postfix, make sure the inner expression's post side effects
2153 are executed after side effects from this expression. */
2154 if (postfix)
2155 post_p = &post;
2156
2157 /* Add or subtract? */
2158 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2159 arith_code = PLUS_EXPR;
2160 else
2161 arith_code = MINUS_EXPR;
2162
2163 /* Gimplify the LHS into a GIMPLE lvalue. */
2164 lvalue = TREE_OPERAND (*expr_p, 0);
2165 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2166 if (ret == GS_ERROR)
2167 return ret;
2168
2169 /* Extract the operands to the arithmetic operation. */
2170 lhs = lvalue;
2171 rhs = TREE_OPERAND (*expr_p, 1);
2172
2173 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2174 that as the result value and in the postqueue operation. */
2175 if (postfix)
2176 {
2177 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2178 if (ret == GS_ERROR)
2179 return ret;
2180 }
2181
2182 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2183 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2184 {
2185 rhs = fold_convert (sizetype, rhs);
2186 if (arith_code == MINUS_EXPR)
2187 rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2188 arith_code = POINTER_PLUS_EXPR;
2189 }
2190
2191 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2192
2193 if (postfix)
2194 {
2195 gimplify_assign (lvalue, t1, orig_post_p);
2196 gimplify_seq_add_seq (orig_post_p, post);
2197 *expr_p = lhs;
2198 return GS_ALL_DONE;
2199 }
2200 else
2201 {
2202 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2203 return GS_OK;
2204 }
2205 }
2206
2207
2208 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2209
2210 static void
2211 maybe_with_size_expr (tree *expr_p)
2212 {
2213 tree expr = *expr_p;
2214 tree type = TREE_TYPE (expr);
2215 tree size;
2216
2217 /* If we've already wrapped this or the type is error_mark_node, we can't do
2218 anything. */
2219 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2220 || type == error_mark_node)
2221 return;
2222
2223 /* If the size isn't known or is a constant, we have nothing to do. */
2224 size = TYPE_SIZE_UNIT (type);
2225 if (!size || TREE_CODE (size) == INTEGER_CST)
2226 return;
2227
2228 /* Otherwise, make a WITH_SIZE_EXPR. */
2229 size = unshare_expr (size);
2230 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2231 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2232 }
2233
2234
2235 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2236 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2237 the CALL_EXPR. */
2238
2239 static enum gimplify_status
2240 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2241 {
2242 bool (*test) (tree);
2243 fallback_t fb;
2244
2245 /* In general, we allow lvalues for function arguments to avoid
2246 extra overhead of copying large aggregates out of even larger
2247 aggregates into temporaries only to copy the temporaries to
2248 the argument list. Make optimizers happy by pulling out to
2249 temporaries those types that fit in registers. */
2250 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2251 test = is_gimple_val, fb = fb_rvalue;
2252 else
2253 test = is_gimple_lvalue, fb = fb_either;
2254
2255 /* If this is a variable sized type, we must remember the size. */
2256 maybe_with_size_expr (arg_p);
2257
2258 /* Make sure arguments have the same location as the function call
2259 itself. */
2260 protected_set_expr_location (*arg_p, call_location);
2261
2262 /* There is a sequence point before a function call. Side effects in
2263 the argument list must occur before the actual call. So, when
2264 gimplifying arguments, force gimplify_expr to use an internal
2265 post queue which is then appended to the end of PRE_P. */
2266 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2267 }
2268
2269
2270 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2271 WANT_VALUE is true if the result of the call is desired. */
2272
2273 static enum gimplify_status
2274 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2275 {
2276 tree fndecl, parms, p;
2277 enum gimplify_status ret;
2278 int i, nargs;
2279 gimple call;
2280 bool builtin_va_start_p = FALSE;
2281
2282 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2283
2284 /* For reliable diagnostics during inlining, it is necessary that
2285 every call_expr be annotated with file and line. */
2286 if (! EXPR_HAS_LOCATION (*expr_p))
2287 SET_EXPR_LOCATION (*expr_p, input_location);
2288
2289 /* This may be a call to a builtin function.
2290
2291 Builtin function calls may be transformed into different
2292 (and more efficient) builtin function calls under certain
2293 circumstances. Unfortunately, gimplification can muck things
2294 up enough that the builtin expanders are not aware that certain
2295 transformations are still valid.
2296
2297 So we attempt transformation/gimplification of the call before
2298 we gimplify the CALL_EXPR. At this time we do not manage to
2299 transform all calls in the same manner as the expanders do, but
2300 we do transform most of them. */
2301 fndecl = get_callee_fndecl (*expr_p);
2302 if (fndecl && DECL_BUILT_IN (fndecl))
2303 {
2304 tree new_tree = fold_call_expr (*expr_p, !want_value);
2305
2306 if (new_tree && new_tree != *expr_p)
2307 {
2308 /* There was a transformation of this call which computes the
2309 same value, but in a more efficient way. Return and try
2310 again. */
2311 *expr_p = new_tree;
2312 return GS_OK;
2313 }
2314
2315 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2316 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2317 {
2318 builtin_va_start_p = TRUE;
2319 if (call_expr_nargs (*expr_p) < 2)
2320 {
2321 error ("too few arguments to function %<va_start%>");
2322 *expr_p = build_empty_stmt ();
2323 return GS_OK;
2324 }
2325
2326 if (fold_builtin_next_arg (*expr_p, true))
2327 {
2328 *expr_p = build_empty_stmt ();
2329 return GS_OK;
2330 }
2331 }
2332 }
2333
2334 /* There is a sequence point before the call, so any side effects in
2335 the calling expression must occur before the actual call. Force
2336 gimplify_expr to use an internal post queue. */
2337 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2338 is_gimple_call_addr, fb_rvalue);
2339
2340 nargs = call_expr_nargs (*expr_p);
2341
2342 /* Get argument types for verification. */
2343 fndecl = get_callee_fndecl (*expr_p);
2344 parms = NULL_TREE;
2345 if (fndecl)
2346 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2347 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2348 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2349
2350 if (fndecl && DECL_ARGUMENTS (fndecl))
2351 p = DECL_ARGUMENTS (fndecl);
2352 else if (parms)
2353 p = parms;
2354 else
2355 {
2356 if (nargs != 0)
2357 CALL_CANNOT_INLINE_P (*expr_p) = 1;
2358 p = NULL_TREE;
2359 }
2360 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2361 ;
2362
2363 /* If the last argument is __builtin_va_arg_pack () and it is not
2364 passed as a named argument, decrease the number of CALL_EXPR
2365 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2366 if (!p
2367 && i < nargs
2368 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2369 {
2370 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2371 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2372
2373 if (last_arg_fndecl
2374 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2375 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2376 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2377 {
2378 tree call = *expr_p;
2379
2380 --nargs;
2381 *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call),
2382 nargs, CALL_EXPR_ARGP (call));
2383
2384 /* Copy all CALL_EXPR flags, location and block, except
2385 CALL_EXPR_VA_ARG_PACK flag. */
2386 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2387 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2388 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2389 = CALL_EXPR_RETURN_SLOT_OPT (call);
2390 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2391 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2392 SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call));
2393 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2394
2395 /* Set CALL_EXPR_VA_ARG_PACK. */
2396 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2397 }
2398 }
2399
2400 /* Finally, gimplify the function arguments. */
2401 if (nargs > 0)
2402 {
2403 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2404 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2405 PUSH_ARGS_REVERSED ? i-- : i++)
2406 {
2407 enum gimplify_status t;
2408
2409 /* Avoid gimplifying the second argument to va_start, which needs to
2410 be the plain PARM_DECL. */
2411 if ((i != 1) || !builtin_va_start_p)
2412 {
2413 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2414 EXPR_LOCATION (*expr_p));
2415
2416 if (t == GS_ERROR)
2417 ret = GS_ERROR;
2418 }
2419 }
2420 }
2421
2422 /* Try this again in case gimplification exposed something. */
2423 if (ret != GS_ERROR)
2424 {
2425 tree new_tree = fold_call_expr (*expr_p, !want_value);
2426
2427 if (new_tree && new_tree != *expr_p)
2428 {
2429 /* There was a transformation of this call which computes the
2430 same value, but in a more efficient way. Return and try
2431 again. */
2432 *expr_p = new_tree;
2433 return GS_OK;
2434 }
2435 }
2436 else
2437 {
2438 *expr_p = error_mark_node;
2439 return GS_ERROR;
2440 }
2441
2442 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2443 decl. This allows us to eliminate redundant or useless
2444 calls to "const" functions. */
2445 if (TREE_CODE (*expr_p) == CALL_EXPR)
2446 {
2447 int flags = call_expr_flags (*expr_p);
2448 if (flags & (ECF_CONST | ECF_PURE)
2449 /* An infinite loop is considered a side effect. */
2450 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2451 TREE_SIDE_EFFECTS (*expr_p) = 0;
2452 }
2453
2454 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2455 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2456 form and delegate the creation of a GIMPLE_CALL to
2457 gimplify_modify_expr. This is always possible because when
2458 WANT_VALUE is true, the caller wants the result of this call into
2459 a temporary, which means that we will emit an INIT_EXPR in
2460 internal_get_tmp_var which will then be handled by
2461 gimplify_modify_expr. */
2462 if (!want_value)
2463 {
2464 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2465 have to do is replicate it as a GIMPLE_CALL tuple. */
2466 call = gimple_build_call_from_tree (*expr_p);
2467 gimplify_seq_add_stmt (pre_p, call);
2468 *expr_p = NULL_TREE;
2469 }
2470
2471 return ret;
2472 }
2473
2474 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2475 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2476
2477 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2478 condition is true or false, respectively. If null, we should generate
2479 our own to skip over the evaluation of this specific expression.
2480
2481 This function is the tree equivalent of do_jump.
2482
2483 shortcut_cond_r should only be called by shortcut_cond_expr. */
2484
2485 static tree
2486 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2487 {
2488 tree local_label = NULL_TREE;
2489 tree t, expr = NULL;
2490
2491 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2492 retain the shortcut semantics. Just insert the gotos here;
2493 shortcut_cond_expr will append the real blocks later. */
2494 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2495 {
2496 /* Turn if (a && b) into
2497
2498 if (a); else goto no;
2499 if (b) goto yes; else goto no;
2500 (no:) */
2501
2502 if (false_label_p == NULL)
2503 false_label_p = &local_label;
2504
2505 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2506 append_to_statement_list (t, &expr);
2507
2508 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2509 false_label_p);
2510 append_to_statement_list (t, &expr);
2511 }
2512 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2513 {
2514 /* Turn if (a || b) into
2515
2516 if (a) goto yes;
2517 if (b) goto yes; else goto no;
2518 (yes:) */
2519
2520 if (true_label_p == NULL)
2521 true_label_p = &local_label;
2522
2523 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2524 append_to_statement_list (t, &expr);
2525
2526 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2527 false_label_p);
2528 append_to_statement_list (t, &expr);
2529 }
2530 else if (TREE_CODE (pred) == COND_EXPR)
2531 {
2532 /* As long as we're messing with gotos, turn if (a ? b : c) into
2533 if (a)
2534 if (b) goto yes; else goto no;
2535 else
2536 if (c) goto yes; else goto no; */
2537 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2538 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2539 false_label_p),
2540 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2541 false_label_p));
2542 }
2543 else
2544 {
2545 expr = build3 (COND_EXPR, void_type_node, pred,
2546 build_and_jump (true_label_p),
2547 build_and_jump (false_label_p));
2548 }
2549
2550 if (local_label)
2551 {
2552 t = build1 (LABEL_EXPR, void_type_node, local_label);
2553 append_to_statement_list (t, &expr);
2554 }
2555
2556 return expr;
2557 }
2558
2559 /* Given a conditional expression EXPR with short-circuit boolean
2560 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2561 predicate appart into the equivalent sequence of conditionals. */
2562
2563 static tree
2564 shortcut_cond_expr (tree expr)
2565 {
2566 tree pred = TREE_OPERAND (expr, 0);
2567 tree then_ = TREE_OPERAND (expr, 1);
2568 tree else_ = TREE_OPERAND (expr, 2);
2569 tree true_label, false_label, end_label, t;
2570 tree *true_label_p;
2571 tree *false_label_p;
2572 bool emit_end, emit_false, jump_over_else;
2573 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2574 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2575
2576 /* First do simple transformations. */
2577 if (!else_se)
2578 {
2579 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2580 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2581 {
2582 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2583 then_ = shortcut_cond_expr (expr);
2584 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2585 pred = TREE_OPERAND (pred, 0);
2586 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2587 }
2588 }
2589
2590 if (!then_se)
2591 {
2592 /* If there is no 'then', turn
2593 if (a || b); else d
2594 into
2595 if (a); else if (b); else d. */
2596 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2597 {
2598 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2599 else_ = shortcut_cond_expr (expr);
2600 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2601 pred = TREE_OPERAND (pred, 0);
2602 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2603 }
2604 }
2605
2606 /* If we're done, great. */
2607 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2608 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2609 return expr;
2610
2611 /* Otherwise we need to mess with gotos. Change
2612 if (a) c; else d;
2613 to
2614 if (a); else goto no;
2615 c; goto end;
2616 no: d; end:
2617 and recursively gimplify the condition. */
2618
2619 true_label = false_label = end_label = NULL_TREE;
2620
2621 /* If our arms just jump somewhere, hijack those labels so we don't
2622 generate jumps to jumps. */
2623
2624 if (then_
2625 && TREE_CODE (then_) == GOTO_EXPR
2626 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2627 {
2628 true_label = GOTO_DESTINATION (then_);
2629 then_ = NULL;
2630 then_se = false;
2631 }
2632
2633 if (else_
2634 && TREE_CODE (else_) == GOTO_EXPR
2635 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2636 {
2637 false_label = GOTO_DESTINATION (else_);
2638 else_ = NULL;
2639 else_se = false;
2640 }
2641
2642 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2643 if (true_label)
2644 true_label_p = &true_label;
2645 else
2646 true_label_p = NULL;
2647
2648 /* The 'else' branch also needs a label if it contains interesting code. */
2649 if (false_label || else_se)
2650 false_label_p = &false_label;
2651 else
2652 false_label_p = NULL;
2653
2654 /* If there was nothing else in our arms, just forward the label(s). */
2655 if (!then_se && !else_se)
2656 return shortcut_cond_r (pred, true_label_p, false_label_p);
2657
2658 /* If our last subexpression already has a terminal label, reuse it. */
2659 if (else_se)
2660 expr = expr_last (else_);
2661 else if (then_se)
2662 expr = expr_last (then_);
2663 else
2664 expr = NULL;
2665 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2666 end_label = LABEL_EXPR_LABEL (expr);
2667
2668 /* If we don't care about jumping to the 'else' branch, jump to the end
2669 if the condition is false. */
2670 if (!false_label_p)
2671 false_label_p = &end_label;
2672
2673 /* We only want to emit these labels if we aren't hijacking them. */
2674 emit_end = (end_label == NULL_TREE);
2675 emit_false = (false_label == NULL_TREE);
2676
2677 /* We only emit the jump over the else clause if we have to--if the
2678 then clause may fall through. Otherwise we can wind up with a
2679 useless jump and a useless label at the end of gimplified code,
2680 which will cause us to think that this conditional as a whole
2681 falls through even if it doesn't. If we then inline a function
2682 which ends with such a condition, that can cause us to issue an
2683 inappropriate warning about control reaching the end of a
2684 non-void function. */
2685 jump_over_else = block_may_fallthru (then_);
2686
2687 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2688
2689 expr = NULL;
2690 append_to_statement_list (pred, &expr);
2691
2692 append_to_statement_list (then_, &expr);
2693 if (else_se)
2694 {
2695 if (jump_over_else)
2696 {
2697 t = build_and_jump (&end_label);
2698 append_to_statement_list (t, &expr);
2699 }
2700 if (emit_false)
2701 {
2702 t = build1 (LABEL_EXPR, void_type_node, false_label);
2703 append_to_statement_list (t, &expr);
2704 }
2705 append_to_statement_list (else_, &expr);
2706 }
2707 if (emit_end && end_label)
2708 {
2709 t = build1 (LABEL_EXPR, void_type_node, end_label);
2710 append_to_statement_list (t, &expr);
2711 }
2712
2713 return expr;
2714 }
2715
2716 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2717
2718 tree
2719 gimple_boolify (tree expr)
2720 {
2721 tree type = TREE_TYPE (expr);
2722
2723 if (TREE_CODE (type) == BOOLEAN_TYPE)
2724 return expr;
2725
2726 switch (TREE_CODE (expr))
2727 {
2728 case TRUTH_AND_EXPR:
2729 case TRUTH_OR_EXPR:
2730 case TRUTH_XOR_EXPR:
2731 case TRUTH_ANDIF_EXPR:
2732 case TRUTH_ORIF_EXPR:
2733 /* Also boolify the arguments of truth exprs. */
2734 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2735 /* FALLTHRU */
2736
2737 case TRUTH_NOT_EXPR:
2738 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2739 /* FALLTHRU */
2740
2741 case EQ_EXPR: case NE_EXPR:
2742 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2743 /* These expressions always produce boolean results. */
2744 TREE_TYPE (expr) = boolean_type_node;
2745 return expr;
2746
2747 default:
2748 /* Other expressions that get here must have boolean values, but
2749 might need to be converted to the appropriate mode. */
2750 return fold_convert (boolean_type_node, expr);
2751 }
2752 }
2753
2754 /* Given a conditional expression *EXPR_P without side effects, gimplify
2755 its operands. New statements are inserted to PRE_P. */
2756
2757 static enum gimplify_status
2758 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2759 {
2760 tree expr = *expr_p, cond;
2761 enum gimplify_status ret, tret;
2762 enum tree_code code;
2763
2764 cond = gimple_boolify (COND_EXPR_COND (expr));
2765
2766 /* We need to handle && and || specially, as their gimplification
2767 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2768 code = TREE_CODE (cond);
2769 if (code == TRUTH_ANDIF_EXPR)
2770 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2771 else if (code == TRUTH_ORIF_EXPR)
2772 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2773 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2774 COND_EXPR_COND (*expr_p) = cond;
2775
2776 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2777 is_gimple_val, fb_rvalue);
2778 ret = MIN (ret, tret);
2779 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2780 is_gimple_val, fb_rvalue);
2781
2782 return MIN (ret, tret);
2783 }
2784
2785 /* Returns true if evaluating EXPR could trap.
2786 EXPR is GENERIC, while tree_could_trap_p can be called
2787 only on GIMPLE. */
2788
2789 static bool
2790 generic_expr_could_trap_p (tree expr)
2791 {
2792 unsigned i, n;
2793
2794 if (!expr || is_gimple_val (expr))
2795 return false;
2796
2797 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2798 return true;
2799
2800 n = TREE_OPERAND_LENGTH (expr);
2801 for (i = 0; i < n; i++)
2802 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2803 return true;
2804
2805 return false;
2806 }
2807
2808 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2809 into
2810
2811 if (p) if (p)
2812 t1 = a; a;
2813 else or else
2814 t1 = b; b;
2815 t1;
2816
2817 The second form is used when *EXPR_P is of type void.
2818
2819 PRE_P points to the list where side effects that must happen before
2820 *EXPR_P should be stored. */
2821
2822 static enum gimplify_status
2823 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2824 {
2825 tree expr = *expr_p;
2826 tree tmp, type, arm1, arm2;
2827 enum gimplify_status ret;
2828 tree label_true, label_false, label_cont;
2829 bool have_then_clause_p, have_else_clause_p;
2830 gimple gimple_cond;
2831 enum tree_code pred_code;
2832 gimple_seq seq = NULL;
2833
2834 type = TREE_TYPE (expr);
2835
2836 /* If this COND_EXPR has a value, copy the values into a temporary within
2837 the arms. */
2838 if (! VOID_TYPE_P (type))
2839 {
2840 tree result;
2841
2842 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2843 an addressable temporary. */
2844 if (((fallback & fb_rvalue)
2845 || !(fallback & fb_lvalue))
2846 && !TREE_ADDRESSABLE (type))
2847 {
2848 if (gimplify_ctxp->allow_rhs_cond_expr
2849 /* If either branch has side effects or could trap, it can't be
2850 evaluated unconditionally. */
2851 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2852 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2853 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2854 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2855 return gimplify_pure_cond_expr (expr_p, pre_p);
2856
2857 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2858 ret = GS_ALL_DONE;
2859 }
2860 else
2861 {
2862 tree type = build_pointer_type (TREE_TYPE (expr));
2863
2864 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2865 TREE_OPERAND (expr, 1) =
2866 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2867
2868 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2869 TREE_OPERAND (expr, 2) =
2870 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2871
2872 tmp = create_tmp_var (type, "iftmp");
2873
2874 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2875 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2876
2877 result = build_fold_indirect_ref (tmp);
2878 }
2879
2880 /* Build the then clause, 't1 = a;'. But don't build an assignment
2881 if this branch is void; in C++ it can be, if it's a throw. */
2882 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2883 TREE_OPERAND (expr, 1)
2884 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2885
2886 /* Build the else clause, 't1 = b;'. */
2887 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2888 TREE_OPERAND (expr, 2)
2889 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2890
2891 TREE_TYPE (expr) = void_type_node;
2892 recalculate_side_effects (expr);
2893
2894 /* Move the COND_EXPR to the prequeue. */
2895 gimplify_stmt (&expr, pre_p);
2896
2897 *expr_p = result;
2898 return GS_ALL_DONE;
2899 }
2900
2901 /* Make sure the condition has BOOLEAN_TYPE. */
2902 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2903
2904 /* Break apart && and || conditions. */
2905 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2906 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2907 {
2908 expr = shortcut_cond_expr (expr);
2909
2910 if (expr != *expr_p)
2911 {
2912 *expr_p = expr;
2913
2914 /* We can't rely on gimplify_expr to re-gimplify the expanded
2915 form properly, as cleanups might cause the target labels to be
2916 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2917 set up a conditional context. */
2918 gimple_push_condition ();
2919 gimplify_stmt (expr_p, &seq);
2920 gimple_pop_condition (pre_p);
2921 gimple_seq_add_seq (pre_p, seq);
2922
2923 return GS_ALL_DONE;
2924 }
2925 }
2926
2927 /* Now do the normal gimplification. */
2928
2929 /* Gimplify condition. */
2930 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2931 fb_rvalue);
2932 if (ret == GS_ERROR)
2933 return GS_ERROR;
2934 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2935
2936 gimple_push_condition ();
2937
2938 have_then_clause_p = have_else_clause_p = false;
2939 if (TREE_OPERAND (expr, 1) != NULL
2940 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2941 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2942 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2943 == current_function_decl)
2944 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2945 have different locations, otherwise we end up with incorrect
2946 location information on the branches. */
2947 && (optimize
2948 || !EXPR_HAS_LOCATION (expr)
2949 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2950 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2951 {
2952 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2953 have_then_clause_p = true;
2954 }
2955 else
2956 label_true = create_artificial_label ();
2957 if (TREE_OPERAND (expr, 2) != NULL
2958 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2959 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2960 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2961 == current_function_decl)
2962 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2963 have different locations, otherwise we end up with incorrect
2964 location information on the branches. */
2965 && (optimize
2966 || !EXPR_HAS_LOCATION (expr)
2967 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2968 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2969 {
2970 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
2971 have_else_clause_p = true;
2972 }
2973 else
2974 label_false = create_artificial_label ();
2975
2976 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
2977 &arm2);
2978
2979 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
2980 label_false);
2981
2982 gimplify_seq_add_stmt (&seq, gimple_cond);
2983 label_cont = NULL_TREE;
2984 if (!have_then_clause_p)
2985 {
2986 /* For if (...) {} else { code; } put label_true after
2987 the else block. */
2988 if (TREE_OPERAND (expr, 1) == NULL_TREE
2989 && !have_else_clause_p
2990 && TREE_OPERAND (expr, 2) != NULL_TREE)
2991 label_cont = label_true;
2992 else
2993 {
2994 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
2995 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
2996 /* For if (...) { code; } else {} or
2997 if (...) { code; } else goto label; or
2998 if (...) { code; return; } else { ... }
2999 label_cont isn't needed. */
3000 if (!have_else_clause_p
3001 && TREE_OPERAND (expr, 2) != NULL_TREE
3002 && gimple_seq_may_fallthru (seq))
3003 {
3004 gimple g;
3005 label_cont = create_artificial_label ();
3006
3007 g = gimple_build_goto (label_cont);
3008
3009 /* GIMPLE_COND's are very low level; they have embedded
3010 gotos. This particular embedded goto should not be marked
3011 with the location of the original COND_EXPR, as it would
3012 correspond to the COND_EXPR's condition, not the ELSE or the
3013 THEN arms. To avoid marking it with the wrong location, flag
3014 it as "no location". */
3015 gimple_set_do_not_emit_location (g);
3016
3017 gimplify_seq_add_stmt (&seq, g);
3018 }
3019 }
3020 }
3021 if (!have_else_clause_p)
3022 {
3023 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3024 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3025 }
3026 if (label_cont)
3027 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3028
3029 gimple_pop_condition (pre_p);
3030 gimple_seq_add_seq (pre_p, seq);
3031
3032 if (ret == GS_ERROR)
3033 ; /* Do nothing. */
3034 else if (have_then_clause_p || have_else_clause_p)
3035 ret = GS_ALL_DONE;
3036 else
3037 {
3038 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3039 expr = TREE_OPERAND (expr, 0);
3040 gimplify_stmt (&expr, pre_p);
3041 }
3042
3043 *expr_p = NULL;
3044 return ret;
3045 }
3046
3047 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3048 a call to __builtin_memcpy. */
3049
3050 static enum gimplify_status
3051 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3052 gimple_seq *seq_p)
3053 {
3054 tree t, to, to_ptr, from, from_ptr;
3055 gimple gs;
3056
3057 to = TREE_OPERAND (*expr_p, 0);
3058 from = TREE_OPERAND (*expr_p, 1);
3059
3060 from_ptr = build_fold_addr_expr (from);
3061 gimplify_arg (&from_ptr, seq_p, EXPR_LOCATION (*expr_p));
3062
3063 to_ptr = build_fold_addr_expr (to);
3064 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3065
3066 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3067
3068 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3069
3070 if (want_value)
3071 {
3072 /* tmp = memcpy() */
3073 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3074 gimple_call_set_lhs (gs, t);
3075 gimplify_seq_add_stmt (seq_p, gs);
3076
3077 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3078 return GS_ALL_DONE;
3079 }
3080
3081 gimplify_seq_add_stmt (seq_p, gs);
3082 *expr_p = NULL;
3083 return GS_ALL_DONE;
3084 }
3085
3086 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3087 a call to __builtin_memset. In this case we know that the RHS is
3088 a CONSTRUCTOR with an empty element list. */
3089
3090 static enum gimplify_status
3091 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3092 gimple_seq *seq_p)
3093 {
3094 tree t, from, to, to_ptr;
3095 gimple gs;
3096
3097 /* Assert our assumptions, to abort instead of producing wrong code
3098 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3099 not be immediately exposed. */
3100 from = TREE_OPERAND (*expr_p, 1);
3101 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3102 from = TREE_OPERAND (from, 0);
3103
3104 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3105 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3106
3107 /* Now proceed. */
3108 to = TREE_OPERAND (*expr_p, 0);
3109
3110 to_ptr = build_fold_addr_expr (to);
3111 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
3112 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3113
3114 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3115
3116 if (want_value)
3117 {
3118 /* tmp = memset() */
3119 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3120 gimple_call_set_lhs (gs, t);
3121 gimplify_seq_add_stmt (seq_p, gs);
3122
3123 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3124 return GS_ALL_DONE;
3125 }
3126
3127 gimplify_seq_add_stmt (seq_p, gs);
3128 *expr_p = NULL;
3129 return GS_ALL_DONE;
3130 }
3131
3132 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3133 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3134 assignment. Returns non-null if we detect a potential overlap. */
3135
3136 struct gimplify_init_ctor_preeval_data
3137 {
3138 /* The base decl of the lhs object. May be NULL, in which case we
3139 have to assume the lhs is indirect. */
3140 tree lhs_base_decl;
3141
3142 /* The alias set of the lhs object. */
3143 alias_set_type lhs_alias_set;
3144 };
3145
3146 static tree
3147 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3148 {
3149 struct gimplify_init_ctor_preeval_data *data
3150 = (struct gimplify_init_ctor_preeval_data *) xdata;
3151 tree t = *tp;
3152
3153 /* If we find the base object, obviously we have overlap. */
3154 if (data->lhs_base_decl == t)
3155 return t;
3156
3157 /* If the constructor component is indirect, determine if we have a
3158 potential overlap with the lhs. The only bits of information we
3159 have to go on at this point are addressability and alias sets. */
3160 if (TREE_CODE (t) == INDIRECT_REF
3161 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3162 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3163 return t;
3164
3165 /* If the constructor component is a call, determine if it can hide a
3166 potential overlap with the lhs through an INDIRECT_REF like above. */
3167 if (TREE_CODE (t) == CALL_EXPR)
3168 {
3169 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3170
3171 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3172 if (POINTER_TYPE_P (TREE_VALUE (type))
3173 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3174 && alias_sets_conflict_p (data->lhs_alias_set,
3175 get_alias_set
3176 (TREE_TYPE (TREE_VALUE (type)))))
3177 return t;
3178 }
3179
3180 if (IS_TYPE_OR_DECL_P (t))
3181 *walk_subtrees = 0;
3182 return NULL;
3183 }
3184
3185 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3186 force values that overlap with the lhs (as described by *DATA)
3187 into temporaries. */
3188
3189 static void
3190 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3191 struct gimplify_init_ctor_preeval_data *data)
3192 {
3193 enum gimplify_status one;
3194
3195 /* If the value is constant, then there's nothing to pre-evaluate. */
3196 if (TREE_CONSTANT (*expr_p))
3197 {
3198 /* Ensure it does not have side effects, it might contain a reference to
3199 the object we're initializing. */
3200 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3201 return;
3202 }
3203
3204 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3205 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3206 return;
3207
3208 /* Recurse for nested constructors. */
3209 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3210 {
3211 unsigned HOST_WIDE_INT ix;
3212 constructor_elt *ce;
3213 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3214
3215 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3216 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3217
3218 return;
3219 }
3220
3221 /* If this is a variable sized type, we must remember the size. */
3222 maybe_with_size_expr (expr_p);
3223
3224 /* Gimplify the constructor element to something appropriate for the rhs
3225 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3226 the gimplifier will consider this a store to memory. Doing this
3227 gimplification now means that we won't have to deal with complicated
3228 language-specific trees, nor trees like SAVE_EXPR that can induce
3229 exponential search behavior. */
3230 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3231 if (one == GS_ERROR)
3232 {
3233 *expr_p = NULL;
3234 return;
3235 }
3236
3237 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3238 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3239 always be true for all scalars, since is_gimple_mem_rhs insists on a
3240 temporary variable for them. */
3241 if (DECL_P (*expr_p))
3242 return;
3243
3244 /* If this is of variable size, we have no choice but to assume it doesn't
3245 overlap since we can't make a temporary for it. */
3246 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3247 return;
3248
3249 /* Otherwise, we must search for overlap ... */
3250 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3251 return;
3252
3253 /* ... and if found, force the value into a temporary. */
3254 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3255 }
3256
3257 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3258 a RANGE_EXPR in a CONSTRUCTOR for an array.
3259
3260 var = lower;
3261 loop_entry:
3262 object[var] = value;
3263 if (var == upper)
3264 goto loop_exit;
3265 var = var + 1;
3266 goto loop_entry;
3267 loop_exit:
3268
3269 We increment var _after_ the loop exit check because we might otherwise
3270 fail if upper == TYPE_MAX_VALUE (type for upper).
3271
3272 Note that we never have to deal with SAVE_EXPRs here, because this has
3273 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3274
3275 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3276 gimple_seq *, bool);
3277
3278 static void
3279 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3280 tree value, tree array_elt_type,
3281 gimple_seq *pre_p, bool cleared)
3282 {
3283 tree loop_entry_label, loop_exit_label, fall_thru_label;
3284 tree var, var_type, cref, tmp;
3285
3286 loop_entry_label = create_artificial_label ();
3287 loop_exit_label = create_artificial_label ();
3288 fall_thru_label = create_artificial_label ();
3289
3290 /* Create and initialize the index variable. */
3291 var_type = TREE_TYPE (upper);
3292 var = create_tmp_var (var_type, NULL);
3293 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3294
3295 /* Add the loop entry label. */
3296 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3297
3298 /* Build the reference. */
3299 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3300 var, NULL_TREE, NULL_TREE);
3301
3302 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3303 the store. Otherwise just assign value to the reference. */
3304
3305 if (TREE_CODE (value) == CONSTRUCTOR)
3306 /* NB we might have to call ourself recursively through
3307 gimplify_init_ctor_eval if the value is a constructor. */
3308 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3309 pre_p, cleared);
3310 else
3311 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3312
3313 /* We exit the loop when the index var is equal to the upper bound. */
3314 gimplify_seq_add_stmt (pre_p,
3315 gimple_build_cond (EQ_EXPR, var, upper,
3316 loop_exit_label, fall_thru_label));
3317
3318 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3319
3320 /* Otherwise, increment the index var... */
3321 tmp = build2 (PLUS_EXPR, var_type, var,
3322 fold_convert (var_type, integer_one_node));
3323 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3324
3325 /* ...and jump back to the loop entry. */
3326 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3327
3328 /* Add the loop exit label. */
3329 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3330 }
3331
3332 /* Return true if FDECL is accessing a field that is zero sized. */
3333
3334 static bool
3335 zero_sized_field_decl (const_tree fdecl)
3336 {
3337 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3338 && integer_zerop (DECL_SIZE (fdecl)))
3339 return true;
3340 return false;
3341 }
3342
3343 /* Return true if TYPE is zero sized. */
3344
3345 static bool
3346 zero_sized_type (const_tree type)
3347 {
3348 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3349 && integer_zerop (TYPE_SIZE (type)))
3350 return true;
3351 return false;
3352 }
3353
3354 /* A subroutine of gimplify_init_constructor. Generate individual
3355 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3356 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3357 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3358 zeroed first. */
3359
3360 static void
3361 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3362 gimple_seq *pre_p, bool cleared)
3363 {
3364 tree array_elt_type = NULL;
3365 unsigned HOST_WIDE_INT ix;
3366 tree purpose, value;
3367
3368 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3369 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3370
3371 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3372 {
3373 tree cref;
3374
3375 /* NULL values are created above for gimplification errors. */
3376 if (value == NULL)
3377 continue;
3378
3379 if (cleared && initializer_zerop (value))
3380 continue;
3381
3382 /* ??? Here's to hoping the front end fills in all of the indices,
3383 so we don't have to figure out what's missing ourselves. */
3384 gcc_assert (purpose);
3385
3386 /* Skip zero-sized fields, unless value has side-effects. This can
3387 happen with calls to functions returning a zero-sized type, which
3388 we shouldn't discard. As a number of downstream passes don't
3389 expect sets of zero-sized fields, we rely on the gimplification of
3390 the MODIFY_EXPR we make below to drop the assignment statement. */
3391 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3392 continue;
3393
3394 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3395 whole range. */
3396 if (TREE_CODE (purpose) == RANGE_EXPR)
3397 {
3398 tree lower = TREE_OPERAND (purpose, 0);
3399 tree upper = TREE_OPERAND (purpose, 1);
3400
3401 /* If the lower bound is equal to upper, just treat it as if
3402 upper was the index. */
3403 if (simple_cst_equal (lower, upper))
3404 purpose = upper;
3405 else
3406 {
3407 gimplify_init_ctor_eval_range (object, lower, upper, value,
3408 array_elt_type, pre_p, cleared);
3409 continue;
3410 }
3411 }
3412
3413 if (array_elt_type)
3414 {
3415 /* Do not use bitsizetype for ARRAY_REF indices. */
3416 if (TYPE_DOMAIN (TREE_TYPE (object)))
3417 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3418 purpose);
3419 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3420 purpose, NULL_TREE, NULL_TREE);
3421 }
3422 else
3423 {
3424 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3425 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3426 unshare_expr (object), purpose, NULL_TREE);
3427 }
3428
3429 if (TREE_CODE (value) == CONSTRUCTOR
3430 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3431 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3432 pre_p, cleared);
3433 else
3434 {
3435 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3436 gimplify_and_add (init, pre_p);
3437 ggc_free (init);
3438 }
3439 }
3440 }
3441
3442
3443 /* Returns the appropriate RHS predicate for this LHS. */
3444
3445 gimple_predicate
3446 rhs_predicate_for (tree lhs)
3447 {
3448 if (is_gimple_formal_tmp_var (lhs))
3449 return is_gimple_formal_tmp_or_call_rhs;
3450 else if (is_gimple_reg (lhs))
3451 return is_gimple_reg_or_call_rhs;
3452 else
3453 return is_gimple_mem_or_call_rhs;
3454 }
3455
3456
3457 /* A subroutine of gimplify_modify_expr. Break out elements of a
3458 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3459
3460 Note that we still need to clear any elements that don't have explicit
3461 initializers, so if not all elements are initialized we keep the
3462 original MODIFY_EXPR, we just remove all of the constructor elements.
3463
3464 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3465 GS_ERROR if we would have to create a temporary when gimplifying
3466 this constructor. Otherwise, return GS_OK.
3467
3468 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3469
3470 static enum gimplify_status
3471 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3472 bool want_value, bool notify_temp_creation)
3473 {
3474 tree object;
3475 tree ctor = TREE_OPERAND (*expr_p, 1);
3476 tree type = TREE_TYPE (ctor);
3477 enum gimplify_status ret;
3478 VEC(constructor_elt,gc) *elts;
3479
3480 if (TREE_CODE (ctor) != CONSTRUCTOR)
3481 return GS_UNHANDLED;
3482
3483 if (!notify_temp_creation)
3484 {
3485 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3486 is_gimple_lvalue, fb_lvalue);
3487 if (ret == GS_ERROR)
3488 return ret;
3489 }
3490
3491 object = TREE_OPERAND (*expr_p, 0);
3492 elts = CONSTRUCTOR_ELTS (ctor);
3493 ret = GS_ALL_DONE;
3494
3495 switch (TREE_CODE (type))
3496 {
3497 case RECORD_TYPE:
3498 case UNION_TYPE:
3499 case QUAL_UNION_TYPE:
3500 case ARRAY_TYPE:
3501 {
3502 struct gimplify_init_ctor_preeval_data preeval_data;
3503 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3504 HOST_WIDE_INT num_nonzero_elements;
3505 bool cleared, valid_const_initializer;
3506
3507 /* Aggregate types must lower constructors to initialization of
3508 individual elements. The exception is that a CONSTRUCTOR node
3509 with no elements indicates zero-initialization of the whole. */
3510 if (VEC_empty (constructor_elt, elts))
3511 {
3512 if (notify_temp_creation)
3513 return GS_OK;
3514 break;
3515 }
3516
3517 /* Fetch information about the constructor to direct later processing.
3518 We might want to make static versions of it in various cases, and
3519 can only do so if it known to be a valid constant initializer. */
3520 valid_const_initializer
3521 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3522 &num_ctor_elements, &cleared);
3523
3524 /* If a const aggregate variable is being initialized, then it
3525 should never be a lose to promote the variable to be static. */
3526 if (valid_const_initializer
3527 && num_nonzero_elements > 1
3528 && TREE_READONLY (object)
3529 && TREE_CODE (object) == VAR_DECL
3530 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3531 {
3532 if (notify_temp_creation)
3533 return GS_ERROR;
3534 DECL_INITIAL (object) = ctor;
3535 TREE_STATIC (object) = 1;
3536 if (!DECL_NAME (object))
3537 DECL_NAME (object) = create_tmp_var_name ("C");
3538 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3539
3540 /* ??? C++ doesn't automatically append a .<number> to the
3541 assembler name, and even when it does, it looks a FE private
3542 data structures to figure out what that number should be,
3543 which are not set for this variable. I suppose this is
3544 important for local statics for inline functions, which aren't
3545 "local" in the object file sense. So in order to get a unique
3546 TU-local symbol, we must invoke the lhd version now. */
3547 lhd_set_decl_assembler_name (object);
3548
3549 *expr_p = NULL_TREE;
3550 break;
3551 }
3552
3553 /* If there are "lots" of initialized elements, even discounting
3554 those that are not address constants (and thus *must* be
3555 computed at runtime), then partition the constructor into
3556 constant and non-constant parts. Block copy the constant
3557 parts in, then generate code for the non-constant parts. */
3558 /* TODO. There's code in cp/typeck.c to do this. */
3559
3560 num_type_elements = count_type_elements (type, true);
3561
3562 /* If count_type_elements could not determine number of type elements
3563 for a constant-sized object, assume clearing is needed.
3564 Don't do this for variable-sized objects, as store_constructor
3565 will ignore the clearing of variable-sized objects. */
3566 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3567 cleared = true;
3568 /* If there are "lots" of zeros, then block clear the object first. */
3569 else if (num_type_elements - num_nonzero_elements
3570 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3571 && num_nonzero_elements < num_type_elements/4)
3572 cleared = true;
3573 /* ??? This bit ought not be needed. For any element not present
3574 in the initializer, we should simply set them to zero. Except
3575 we'd need to *find* the elements that are not present, and that
3576 requires trickery to avoid quadratic compile-time behavior in
3577 large cases or excessive memory use in small cases. */
3578 else if (num_ctor_elements < num_type_elements)
3579 cleared = true;
3580
3581 /* If there are "lots" of initialized elements, and all of them
3582 are valid address constants, then the entire initializer can
3583 be dropped to memory, and then memcpy'd out. Don't do this
3584 for sparse arrays, though, as it's more efficient to follow
3585 the standard CONSTRUCTOR behavior of memset followed by
3586 individual element initialization. Also don't do this for small
3587 all-zero initializers (which aren't big enough to merit
3588 clearing), and don't try to make bitwise copies of
3589 TREE_ADDRESSABLE types. */
3590 if (valid_const_initializer
3591 && !(cleared || num_nonzero_elements == 0)
3592 && !TREE_ADDRESSABLE (type))
3593 {
3594 HOST_WIDE_INT size = int_size_in_bytes (type);
3595 unsigned int align;
3596
3597 /* ??? We can still get unbounded array types, at least
3598 from the C++ front end. This seems wrong, but attempt
3599 to work around it for now. */
3600 if (size < 0)
3601 {
3602 size = int_size_in_bytes (TREE_TYPE (object));
3603 if (size >= 0)
3604 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3605 }
3606
3607 /* Find the maximum alignment we can assume for the object. */
3608 /* ??? Make use of DECL_OFFSET_ALIGN. */
3609 if (DECL_P (object))
3610 align = DECL_ALIGN (object);
3611 else
3612 align = TYPE_ALIGN (type);
3613
3614 if (size > 0
3615 && num_nonzero_elements > 1
3616 && !can_move_by_pieces (size, align))
3617 {
3618 tree new_tree;
3619
3620 if (notify_temp_creation)
3621 return GS_ERROR;
3622
3623 new_tree = create_tmp_var_raw (type, "C");
3624
3625 gimple_add_tmp_var (new_tree);
3626 TREE_STATIC (new_tree) = 1;
3627 TREE_READONLY (new_tree) = 1;
3628 DECL_INITIAL (new_tree) = ctor;
3629 if (align > DECL_ALIGN (new_tree))
3630 {
3631 DECL_ALIGN (new_tree) = align;
3632 DECL_USER_ALIGN (new_tree) = 1;
3633 }
3634 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3635
3636 TREE_OPERAND (*expr_p, 1) = new_tree;
3637
3638 /* This is no longer an assignment of a CONSTRUCTOR, but
3639 we still may have processing to do on the LHS. So
3640 pretend we didn't do anything here to let that happen. */
3641 return GS_UNHANDLED;
3642 }
3643 }
3644
3645 if (notify_temp_creation)
3646 return GS_OK;
3647
3648 /* If there are nonzero elements, pre-evaluate to capture elements
3649 overlapping with the lhs into temporaries. We must do this before
3650 clearing to fetch the values before they are zeroed-out. */
3651 if (num_nonzero_elements > 0)
3652 {
3653 preeval_data.lhs_base_decl = get_base_address (object);
3654 if (!DECL_P (preeval_data.lhs_base_decl))
3655 preeval_data.lhs_base_decl = NULL;
3656 preeval_data.lhs_alias_set = get_alias_set (object);
3657
3658 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3659 pre_p, post_p, &preeval_data);
3660 }
3661
3662 if (cleared)
3663 {
3664 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3665 Note that we still have to gimplify, in order to handle the
3666 case of variable sized types. Avoid shared tree structures. */
3667 CONSTRUCTOR_ELTS (ctor) = NULL;
3668 TREE_SIDE_EFFECTS (ctor) = 0;
3669 object = unshare_expr (object);
3670 gimplify_stmt (expr_p, pre_p);
3671 }
3672
3673 /* If we have not block cleared the object, or if there are nonzero
3674 elements in the constructor, add assignments to the individual
3675 scalar fields of the object. */
3676 if (!cleared || num_nonzero_elements > 0)
3677 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3678
3679 *expr_p = NULL_TREE;
3680 }
3681 break;
3682
3683 case COMPLEX_TYPE:
3684 {
3685 tree r, i;
3686
3687 if (notify_temp_creation)
3688 return GS_OK;
3689
3690 /* Extract the real and imaginary parts out of the ctor. */
3691 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3692 r = VEC_index (constructor_elt, elts, 0)->value;
3693 i = VEC_index (constructor_elt, elts, 1)->value;
3694 if (r == NULL || i == NULL)
3695 {
3696 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3697 if (r == NULL)
3698 r = zero;
3699 if (i == NULL)
3700 i = zero;
3701 }
3702
3703 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3704 represent creation of a complex value. */
3705 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3706 {
3707 ctor = build_complex (type, r, i);
3708 TREE_OPERAND (*expr_p, 1) = ctor;
3709 }
3710 else
3711 {
3712 ctor = build2 (COMPLEX_EXPR, type, r, i);
3713 TREE_OPERAND (*expr_p, 1) = ctor;
3714 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3715 pre_p,
3716 post_p,
3717 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3718 fb_rvalue);
3719 }
3720 }
3721 break;
3722
3723 case VECTOR_TYPE:
3724 {
3725 unsigned HOST_WIDE_INT ix;
3726 constructor_elt *ce;
3727
3728 if (notify_temp_creation)
3729 return GS_OK;
3730
3731 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3732 if (TREE_CONSTANT (ctor))
3733 {
3734 bool constant_p = true;
3735 tree value;
3736
3737 /* Even when ctor is constant, it might contain non-*_CST
3738 elements, such as addresses or trapping values like
3739 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3740 in VECTOR_CST nodes. */
3741 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3742 if (!CONSTANT_CLASS_P (value))
3743 {
3744 constant_p = false;
3745 break;
3746 }
3747
3748 if (constant_p)
3749 {
3750 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3751 break;
3752 }
3753
3754 /* Don't reduce an initializer constant even if we can't
3755 make a VECTOR_CST. It won't do anything for us, and it'll
3756 prevent us from representing it as a single constant. */
3757 if (initializer_constant_valid_p (ctor, type))
3758 break;
3759
3760 TREE_CONSTANT (ctor) = 0;
3761 }
3762
3763 /* Vector types use CONSTRUCTOR all the way through gimple
3764 compilation as a general initializer. */
3765 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3766 {
3767 enum gimplify_status tret;
3768 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3769 fb_rvalue);
3770 if (tret == GS_ERROR)
3771 ret = GS_ERROR;
3772 }
3773 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3774 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3775 }
3776 break;
3777
3778 default:
3779 /* So how did we get a CONSTRUCTOR for a scalar type? */
3780 gcc_unreachable ();
3781 }
3782
3783 if (ret == GS_ERROR)
3784 return GS_ERROR;
3785 else if (want_value)
3786 {
3787 *expr_p = object;
3788 return GS_OK;
3789 }
3790 else
3791 {
3792 /* If we have gimplified both sides of the initializer but have
3793 not emitted an assignment, do so now. */
3794 if (*expr_p)
3795 {
3796 tree lhs = TREE_OPERAND (*expr_p, 0);
3797 tree rhs = TREE_OPERAND (*expr_p, 1);
3798 gimple init = gimple_build_assign (lhs, rhs);
3799 gimplify_seq_add_stmt (pre_p, init);
3800 *expr_p = NULL;
3801 }
3802
3803 return GS_ALL_DONE;
3804 }
3805 }
3806
3807 /* Given a pointer value OP0, return a simplified version of an
3808 indirection through OP0, or NULL_TREE if no simplification is
3809 possible. Note that the resulting type may be different from
3810 the type pointed to in the sense that it is still compatible
3811 from the langhooks point of view. */
3812
3813 tree
3814 gimple_fold_indirect_ref (tree t)
3815 {
3816 tree type = TREE_TYPE (TREE_TYPE (t));
3817 tree sub = t;
3818 tree subtype;
3819
3820 STRIP_USELESS_TYPE_CONVERSION (sub);
3821 subtype = TREE_TYPE (sub);
3822 if (!POINTER_TYPE_P (subtype))
3823 return NULL_TREE;
3824
3825 if (TREE_CODE (sub) == ADDR_EXPR)
3826 {
3827 tree op = TREE_OPERAND (sub, 0);
3828 tree optype = TREE_TYPE (op);
3829 /* *&p => p */
3830 if (useless_type_conversion_p (type, optype))
3831 return op;
3832
3833 /* *(foo *)&fooarray => fooarray[0] */
3834 if (TREE_CODE (optype) == ARRAY_TYPE
3835 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3836 {
3837 tree type_domain = TYPE_DOMAIN (optype);
3838 tree min_val = size_zero_node;
3839 if (type_domain && TYPE_MIN_VALUE (type_domain))
3840 min_val = TYPE_MIN_VALUE (type_domain);
3841 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3842 }
3843 }
3844
3845 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3846 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3847 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3848 {
3849 tree type_domain;
3850 tree min_val = size_zero_node;
3851 tree osub = sub;
3852 sub = gimple_fold_indirect_ref (sub);
3853 if (! sub)
3854 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3855 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3856 if (type_domain && TYPE_MIN_VALUE (type_domain))
3857 min_val = TYPE_MIN_VALUE (type_domain);
3858 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3859 }
3860
3861 return NULL_TREE;
3862 }
3863
3864 /* Given a pointer value OP0, return a simplified version of an
3865 indirection through OP0, or NULL_TREE if no simplification is
3866 possible. This may only be applied to a rhs of an expression.
3867 Note that the resulting type may be different from the type pointed
3868 to in the sense that it is still compatible from the langhooks
3869 point of view. */
3870
3871 static tree
3872 gimple_fold_indirect_ref_rhs (tree t)
3873 {
3874 return gimple_fold_indirect_ref (t);
3875 }
3876
3877 /* Subroutine of gimplify_modify_expr to do simplifications of
3878 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3879 something changes. */
3880
3881 static enum gimplify_status
3882 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3883 gimple_seq *pre_p, gimple_seq *post_p,
3884 bool want_value)
3885 {
3886 enum gimplify_status ret = GS_OK;
3887
3888 while (ret != GS_UNHANDLED)
3889 switch (TREE_CODE (*from_p))
3890 {
3891 case VAR_DECL:
3892 /* If we're assigning from a constant constructor, move the
3893 constructor expression to the RHS of the MODIFY_EXPR. */
3894 if (DECL_INITIAL (*from_p)
3895 && TREE_READONLY (*from_p)
3896 && !TREE_THIS_VOLATILE (*from_p)
3897 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
3898 {
3899 tree old_from = *from_p;
3900
3901 /* Move the constructor into the RHS. */
3902 *from_p = unshare_expr (DECL_INITIAL (*from_p));
3903
3904 /* Let's see if gimplify_init_constructor will need to put
3905 it in memory. If so, revert the change. */
3906 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
3907 if (ret == GS_ERROR)
3908 {
3909 *from_p = old_from;
3910 /* Fall through. */
3911 }
3912 else
3913 {
3914 ret = GS_OK;
3915 break;
3916 }
3917 }
3918 ret = GS_UNHANDLED;
3919 break;
3920 case INDIRECT_REF:
3921 {
3922 /* If we have code like
3923
3924 *(const A*)(A*)&x
3925
3926 where the type of "x" is a (possibly cv-qualified variant
3927 of "A"), treat the entire expression as identical to "x".
3928 This kind of code arises in C++ when an object is bound
3929 to a const reference, and if "x" is a TARGET_EXPR we want
3930 to take advantage of the optimization below. */
3931 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3932 if (t)
3933 {
3934 *from_p = t;
3935 ret = GS_OK;
3936 }
3937 else
3938 ret = GS_UNHANDLED;
3939 break;
3940 }
3941
3942 case TARGET_EXPR:
3943 {
3944 /* If we are initializing something from a TARGET_EXPR, strip the
3945 TARGET_EXPR and initialize it directly, if possible. This can't
3946 be done if the initializer is void, since that implies that the
3947 temporary is set in some non-trivial way.
3948
3949 ??? What about code that pulls out the temp and uses it
3950 elsewhere? I think that such code never uses the TARGET_EXPR as
3951 an initializer. If I'm wrong, we'll die because the temp won't
3952 have any RTL. In that case, I guess we'll need to replace
3953 references somehow. */
3954 tree init = TARGET_EXPR_INITIAL (*from_p);
3955
3956 if (init
3957 && !VOID_TYPE_P (TREE_TYPE (init)))
3958 {
3959 *from_p = init;
3960 ret = GS_OK;
3961 }
3962 else
3963 ret = GS_UNHANDLED;
3964 }
3965 break;
3966
3967 case COMPOUND_EXPR:
3968 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3969 caught. */
3970 gimplify_compound_expr (from_p, pre_p, true);
3971 ret = GS_OK;
3972 break;
3973
3974 case CONSTRUCTOR:
3975 /* If we're initializing from a CONSTRUCTOR, break this into
3976 individual MODIFY_EXPRs. */
3977 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
3978 false);
3979
3980 case COND_EXPR:
3981 /* If we're assigning to a non-register type, push the assignment
3982 down into the branches. This is mandatory for ADDRESSABLE types,
3983 since we cannot generate temporaries for such, but it saves a
3984 copy in other cases as well. */
3985 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3986 {
3987 /* This code should mirror the code in gimplify_cond_expr. */
3988 enum tree_code code = TREE_CODE (*expr_p);
3989 tree cond = *from_p;
3990 tree result = *to_p;
3991
3992 ret = gimplify_expr (&result, pre_p, post_p,
3993 is_gimple_lvalue, fb_lvalue);
3994 if (ret != GS_ERROR)
3995 ret = GS_OK;
3996
3997 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3998 TREE_OPERAND (cond, 1)
3999 = build2 (code, void_type_node, result,
4000 TREE_OPERAND (cond, 1));
4001 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4002 TREE_OPERAND (cond, 2)
4003 = build2 (code, void_type_node, unshare_expr (result),
4004 TREE_OPERAND (cond, 2));
4005
4006 TREE_TYPE (cond) = void_type_node;
4007 recalculate_side_effects (cond);
4008
4009 if (want_value)
4010 {
4011 gimplify_and_add (cond, pre_p);
4012 *expr_p = unshare_expr (result);
4013 }
4014 else
4015 *expr_p = cond;
4016 return ret;
4017 }
4018 else
4019 ret = GS_UNHANDLED;
4020 break;
4021
4022 case CALL_EXPR:
4023 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4024 return slot so that we don't generate a temporary. */
4025 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4026 && aggregate_value_p (*from_p, *from_p))
4027 {
4028 bool use_target;
4029
4030 if (!(rhs_predicate_for (*to_p))(*from_p))
4031 /* If we need a temporary, *to_p isn't accurate. */
4032 use_target = false;
4033 else if (TREE_CODE (*to_p) == RESULT_DECL
4034 && DECL_NAME (*to_p) == NULL_TREE
4035 && needs_to_live_in_memory (*to_p))
4036 /* It's OK to use the return slot directly unless it's an NRV. */
4037 use_target = true;
4038 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4039 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4040 /* Don't force regs into memory. */
4041 use_target = false;
4042 else if (TREE_CODE (*to_p) == VAR_DECL
4043 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
4044 /* Don't use the original target if it's a formal temp; we
4045 don't want to take their addresses. */
4046 use_target = false;
4047 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4048 /* It's OK to use the target directly if it's being
4049 initialized. */
4050 use_target = true;
4051 else if (!is_gimple_non_addressable (*to_p))
4052 /* Don't use the original target if it's already addressable;
4053 if its address escapes, and the called function uses the
4054 NRV optimization, a conforming program could see *to_p
4055 change before the called function returns; see c++/19317.
4056 When optimizing, the return_slot pass marks more functions
4057 as safe after we have escape info. */
4058 use_target = false;
4059 else
4060 use_target = true;
4061
4062 if (use_target)
4063 {
4064 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4065 mark_addressable (*to_p);
4066 }
4067 }
4068
4069 ret = GS_UNHANDLED;
4070 break;
4071
4072 /* If we're initializing from a container, push the initialization
4073 inside it. */
4074 case CLEANUP_POINT_EXPR:
4075 case BIND_EXPR:
4076 case STATEMENT_LIST:
4077 {
4078 tree wrap = *from_p;
4079 tree t;
4080
4081 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4082 fb_lvalue);
4083 if (ret != GS_ERROR)
4084 ret = GS_OK;
4085
4086 t = voidify_wrapper_expr (wrap, *expr_p);
4087 gcc_assert (t == *expr_p);
4088
4089 if (want_value)
4090 {
4091 gimplify_and_add (wrap, pre_p);
4092 *expr_p = unshare_expr (*to_p);
4093 }
4094 else
4095 *expr_p = wrap;
4096 return GS_OK;
4097 }
4098
4099 default:
4100 ret = GS_UNHANDLED;
4101 break;
4102 }
4103
4104 return ret;
4105 }
4106
4107
4108 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4109 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4110 DECL_GIMPLE_REG_P set.
4111
4112 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4113 other, unmodified part of the complex object just before the total store.
4114 As a consequence, if the object is still uninitialized, an undefined value
4115 will be loaded into a register, which may result in a spurious exception
4116 if the register is floating-point and the value happens to be a signaling
4117 NaN for example. Then the fully-fledged complex operations lowering pass
4118 followed by a DCE pass are necessary in order to fix things up. */
4119
4120 static enum gimplify_status
4121 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4122 bool want_value)
4123 {
4124 enum tree_code code, ocode;
4125 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4126
4127 lhs = TREE_OPERAND (*expr_p, 0);
4128 rhs = TREE_OPERAND (*expr_p, 1);
4129 code = TREE_CODE (lhs);
4130 lhs = TREE_OPERAND (lhs, 0);
4131
4132 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4133 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4134 other = get_formal_tmp_var (other, pre_p);
4135
4136 realpart = code == REALPART_EXPR ? rhs : other;
4137 imagpart = code == REALPART_EXPR ? other : rhs;
4138
4139 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4140 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4141 else
4142 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4143
4144 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4145 *expr_p = (want_value) ? rhs : NULL_TREE;
4146
4147 return GS_ALL_DONE;
4148 }
4149
4150
4151 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4152
4153 modify_expr
4154 : varname '=' rhs
4155 | '*' ID '=' rhs
4156
4157 PRE_P points to the list where side effects that must happen before
4158 *EXPR_P should be stored.
4159
4160 POST_P points to the list where side effects that must happen after
4161 *EXPR_P should be stored.
4162
4163 WANT_VALUE is nonzero iff we want to use the value of this expression
4164 in another expression. */
4165
4166 static enum gimplify_status
4167 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4168 bool want_value)
4169 {
4170 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4171 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4172 enum gimplify_status ret = GS_UNHANDLED;
4173 gimple assign;
4174
4175 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4176 || TREE_CODE (*expr_p) == INIT_EXPR);
4177
4178 /* Insert pointer conversions required by the middle-end that are not
4179 required by the frontend. This fixes middle-end type checking for
4180 for example gcc.dg/redecl-6.c. */
4181 if (POINTER_TYPE_P (TREE_TYPE (*to_p))
4182 && lang_hooks.types_compatible_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4183 {
4184 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4185 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4186 *from_p = fold_convert (TREE_TYPE (*to_p), *from_p);
4187 }
4188
4189 /* See if any simplifications can be done based on what the RHS is. */
4190 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4191 want_value);
4192 if (ret != GS_UNHANDLED)
4193 return ret;
4194
4195 /* For zero sized types only gimplify the left hand side and right hand
4196 side as statements and throw away the assignment. Do this after
4197 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4198 types properly. */
4199 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4200 {
4201 gimplify_stmt (from_p, pre_p);
4202 gimplify_stmt (to_p, pre_p);
4203 *expr_p = NULL_TREE;
4204 return GS_ALL_DONE;
4205 }
4206
4207 /* If the value being copied is of variable width, compute the length
4208 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4209 before gimplifying any of the operands so that we can resolve any
4210 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4211 the size of the expression to be copied, not of the destination, so
4212 that is what we must do here. */
4213 maybe_with_size_expr (from_p);
4214
4215 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4216 if (ret == GS_ERROR)
4217 return ret;
4218
4219 /* As a special case, we have to temporarily allow for assignments
4220 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4221 a toplevel statement, when gimplifying the GENERIC expression
4222 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4223 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4224
4225 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4226 prevent gimplify_expr from trying to create a new temporary for
4227 foo's LHS, we tell it that it should only gimplify until it
4228 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4229 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4230 and all we need to do here is set 'a' to be its LHS. */
4231 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4232 fb_rvalue);
4233 if (ret == GS_ERROR)
4234 return ret;
4235
4236 /* Now see if the above changed *from_p to something we handle specially. */
4237 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4238 want_value);
4239 if (ret != GS_UNHANDLED)
4240 return ret;
4241
4242 /* If we've got a variable sized assignment between two lvalues (i.e. does
4243 not involve a call), then we can make things a bit more straightforward
4244 by converting the assignment to memcpy or memset. */
4245 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4246 {
4247 tree from = TREE_OPERAND (*from_p, 0);
4248 tree size = TREE_OPERAND (*from_p, 1);
4249
4250 if (TREE_CODE (from) == CONSTRUCTOR)
4251 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4252
4253 if (is_gimple_addressable (from))
4254 {
4255 *from_p = from;
4256 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4257 pre_p);
4258 }
4259 }
4260
4261 /* Transform partial stores to non-addressable complex variables into
4262 total stores. This allows us to use real instead of virtual operands
4263 for these variables, which improves optimization. */
4264 if ((TREE_CODE (*to_p) == REALPART_EXPR
4265 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4266 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4267 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4268
4269 /* Try to alleviate the effects of the gimplification creating artificial
4270 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4271 if (!gimplify_ctxp->into_ssa
4272 && DECL_P (*from_p)
4273 && DECL_IGNORED_P (*from_p)
4274 && DECL_P (*to_p)
4275 && !DECL_IGNORED_P (*to_p))
4276 {
4277 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4278 DECL_NAME (*from_p)
4279 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4280 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4281 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4282 }
4283
4284 if (TREE_CODE (*from_p) == CALL_EXPR)
4285 {
4286 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4287 instead of a GIMPLE_ASSIGN. */
4288 assign = gimple_build_call_from_tree (*from_p);
4289 gimple_call_set_lhs (assign, *to_p);
4290 }
4291 else
4292 assign = gimple_build_assign (*to_p, *from_p);
4293
4294 gimplify_seq_add_stmt (pre_p, assign);
4295
4296 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4297 {
4298 /* If we've somehow already got an SSA_NAME on the LHS, then
4299 we've probably modified it twice. Not good. */
4300 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4301 *to_p = make_ssa_name (*to_p, assign);
4302 gimple_set_lhs (assign, *to_p);
4303 }
4304
4305 if (want_value)
4306 {
4307 *expr_p = unshare_expr (*to_p);
4308 return GS_OK;
4309 }
4310 else
4311 *expr_p = NULL;
4312
4313 return GS_ALL_DONE;
4314 }
4315
4316 /* Gimplify a comparison between two variable-sized objects. Do this
4317 with a call to BUILT_IN_MEMCMP. */
4318
4319 static enum gimplify_status
4320 gimplify_variable_sized_compare (tree *expr_p)
4321 {
4322 tree op0 = TREE_OPERAND (*expr_p, 0);
4323 tree op1 = TREE_OPERAND (*expr_p, 1);
4324 tree t, arg, dest, src;
4325
4326 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4327 arg = unshare_expr (arg);
4328 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4329 src = build_fold_addr_expr (op1);
4330 dest = build_fold_addr_expr (op0);
4331 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4332 t = build_call_expr (t, 3, dest, src, arg);
4333 *expr_p
4334 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4335
4336 return GS_OK;
4337 }
4338
4339 /* Gimplify a comparison between two aggregate objects of integral scalar
4340 mode as a comparison between the bitwise equivalent scalar values. */
4341
4342 static enum gimplify_status
4343 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4344 {
4345 tree op0 = TREE_OPERAND (*expr_p, 0);
4346 tree op1 = TREE_OPERAND (*expr_p, 1);
4347
4348 tree type = TREE_TYPE (op0);
4349 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4350
4351 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
4352 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
4353
4354 *expr_p
4355 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4356
4357 return GS_OK;
4358 }
4359
4360 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4361 points to the expression to gimplify.
4362
4363 Expressions of the form 'a && b' are gimplified to:
4364
4365 a && b ? true : false
4366
4367 gimplify_cond_expr will do the rest.
4368
4369 PRE_P points to the list where side effects that must happen before
4370 *EXPR_P should be stored. */
4371
4372 static enum gimplify_status
4373 gimplify_boolean_expr (tree *expr_p)
4374 {
4375 /* Preserve the original type of the expression. */
4376 tree type = TREE_TYPE (*expr_p);
4377
4378 *expr_p = build3 (COND_EXPR, type, *expr_p,
4379 fold_convert (type, boolean_true_node),
4380 fold_convert (type, boolean_false_node));
4381
4382 return GS_OK;
4383 }
4384
4385 /* Gimplifies an expression sequence. This function gimplifies each
4386 expression and re-writes the original expression with the last
4387 expression of the sequence in GIMPLE form.
4388
4389 PRE_P points to the list where the side effects for all the
4390 expressions in the sequence will be emitted.
4391
4392 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4393
4394 static enum gimplify_status
4395 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4396 {
4397 tree t = *expr_p;
4398
4399 do
4400 {
4401 tree *sub_p = &TREE_OPERAND (t, 0);
4402
4403 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4404 gimplify_compound_expr (sub_p, pre_p, false);
4405 else
4406 gimplify_stmt (sub_p, pre_p);
4407
4408 t = TREE_OPERAND (t, 1);
4409 }
4410 while (TREE_CODE (t) == COMPOUND_EXPR);
4411
4412 *expr_p = t;
4413 if (want_value)
4414 return GS_OK;
4415 else
4416 {
4417 gimplify_stmt (expr_p, pre_p);
4418 return GS_ALL_DONE;
4419 }
4420 }
4421
4422
4423 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4424 gimplify. After gimplification, EXPR_P will point to a new temporary
4425 that holds the original value of the SAVE_EXPR node.
4426
4427 PRE_P points to the list where side effects that must happen before
4428 *EXPR_P should be stored. */
4429
4430 static enum gimplify_status
4431 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4432 {
4433 enum gimplify_status ret = GS_ALL_DONE;
4434 tree val;
4435
4436 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4437 val = TREE_OPERAND (*expr_p, 0);
4438
4439 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4440 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4441 {
4442 /* The operand may be a void-valued expression such as SAVE_EXPRs
4443 generated by the Java frontend for class initialization. It is
4444 being executed only for its side-effects. */
4445 if (TREE_TYPE (val) == void_type_node)
4446 {
4447 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4448 is_gimple_stmt, fb_none);
4449 val = NULL;
4450 }
4451 else
4452 val = get_initialized_tmp_var (val, pre_p, post_p);
4453
4454 TREE_OPERAND (*expr_p, 0) = val;
4455 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4456 }
4457
4458 *expr_p = val;
4459
4460 return ret;
4461 }
4462
4463 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4464
4465 unary_expr
4466 : ...
4467 | '&' varname
4468 ...
4469
4470 PRE_P points to the list where side effects that must happen before
4471 *EXPR_P should be stored.
4472
4473 POST_P points to the list where side effects that must happen after
4474 *EXPR_P should be stored. */
4475
4476 static enum gimplify_status
4477 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4478 {
4479 tree expr = *expr_p;
4480 tree op0 = TREE_OPERAND (expr, 0);
4481 enum gimplify_status ret;
4482
4483 switch (TREE_CODE (op0))
4484 {
4485 case INDIRECT_REF:
4486 case MISALIGNED_INDIRECT_REF:
4487 do_indirect_ref:
4488 /* Check if we are dealing with an expression of the form '&*ptr'.
4489 While the front end folds away '&*ptr' into 'ptr', these
4490 expressions may be generated internally by the compiler (e.g.,
4491 builtins like __builtin_va_end). */
4492 /* Caution: the silent array decomposition semantics we allow for
4493 ADDR_EXPR means we can't always discard the pair. */
4494 /* Gimplification of the ADDR_EXPR operand may drop
4495 cv-qualification conversions, so make sure we add them if
4496 needed. */
4497 {
4498 tree op00 = TREE_OPERAND (op0, 0);
4499 tree t_expr = TREE_TYPE (expr);
4500 tree t_op00 = TREE_TYPE (op00);
4501
4502 if (!useless_type_conversion_p (t_expr, t_op00))
4503 op00 = fold_convert (TREE_TYPE (expr), op00);
4504 *expr_p = op00;
4505 ret = GS_OK;
4506 }
4507 break;
4508
4509 case VIEW_CONVERT_EXPR:
4510 /* Take the address of our operand and then convert it to the type of
4511 this ADDR_EXPR.
4512
4513 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4514 all clear. The impact of this transformation is even less clear. */
4515
4516 /* If the operand is a useless conversion, look through it. Doing so
4517 guarantees that the ADDR_EXPR and its operand will remain of the
4518 same type. */
4519 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4520 op0 = TREE_OPERAND (op0, 0);
4521
4522 *expr_p = fold_convert (TREE_TYPE (expr),
4523 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
4524 ret = GS_OK;
4525 break;
4526
4527 default:
4528 /* We use fb_either here because the C frontend sometimes takes
4529 the address of a call that returns a struct; see
4530 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4531 the implied temporary explicit. */
4532
4533 /* Mark the RHS addressable. */
4534 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4535 is_gimple_addressable, fb_either);
4536 if (ret == GS_ERROR)
4537 break;
4538
4539 /* We cannot rely on making the RHS addressable if it is
4540 a temporary created by gimplification. In this case create a
4541 new temporary that is initialized by a copy (which will
4542 become a store after we mark it addressable).
4543 This mostly happens if the frontend passed us something that
4544 it could not mark addressable yet, like a fortran
4545 pass-by-reference parameter (int) floatvar. */
4546 if (is_gimple_formal_tmp_var (TREE_OPERAND (expr, 0)))
4547 TREE_OPERAND (expr, 0)
4548 = get_initialized_tmp_var (TREE_OPERAND (expr, 0), pre_p, post_p);
4549
4550 op0 = TREE_OPERAND (expr, 0);
4551
4552 /* For various reasons, the gimplification of the expression
4553 may have made a new INDIRECT_REF. */
4554 if (TREE_CODE (op0) == INDIRECT_REF)
4555 goto do_indirect_ref;
4556
4557 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4558 recompute_tree_invariant_for_addr_expr (expr);
4559
4560 mark_addressable (TREE_OPERAND (expr, 0));
4561 break;
4562 }
4563
4564 return ret;
4565 }
4566
4567 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4568 value; output operands should be a gimple lvalue. */
4569
4570 static enum gimplify_status
4571 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4572 {
4573 tree expr;
4574 int noutputs;
4575 const char **oconstraints;
4576 int i;
4577 tree link;
4578 const char *constraint;
4579 bool allows_mem, allows_reg, is_inout;
4580 enum gimplify_status ret, tret;
4581 gimple stmt;
4582 VEC(tree, gc) *inputs;
4583 VEC(tree, gc) *outputs;
4584 VEC(tree, gc) *clobbers;
4585 tree link_next;
4586
4587 expr = *expr_p;
4588 noutputs = list_length (ASM_OUTPUTS (expr));
4589 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4590
4591 inputs = outputs = clobbers = NULL;
4592
4593 ret = GS_ALL_DONE;
4594 link_next = NULL_TREE;
4595 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4596 {
4597 bool ok;
4598 size_t constraint_len;
4599
4600 link_next = TREE_CHAIN (link);
4601
4602 oconstraints[i]
4603 = constraint
4604 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4605 constraint_len = strlen (constraint);
4606 if (constraint_len == 0)
4607 continue;
4608
4609 ok = parse_output_constraint (&constraint, i, 0, 0,
4610 &allows_mem, &allows_reg, &is_inout);
4611 if (!ok)
4612 {
4613 ret = GS_ERROR;
4614 is_inout = false;
4615 }
4616
4617 if (!allows_reg && allows_mem)
4618 mark_addressable (TREE_VALUE (link));
4619
4620 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4621 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4622 fb_lvalue | fb_mayfail);
4623 if (tret == GS_ERROR)
4624 {
4625 error ("invalid lvalue in asm output %d", i);
4626 ret = tret;
4627 }
4628
4629 VEC_safe_push (tree, gc, outputs, link);
4630 TREE_CHAIN (link) = NULL_TREE;
4631
4632 if (is_inout)
4633 {
4634 /* An input/output operand. To give the optimizers more
4635 flexibility, split it into separate input and output
4636 operands. */
4637 tree input;
4638 char buf[10];
4639
4640 /* Turn the in/out constraint into an output constraint. */
4641 char *p = xstrdup (constraint);
4642 p[0] = '=';
4643 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4644
4645 /* And add a matching input constraint. */
4646 if (allows_reg)
4647 {
4648 sprintf (buf, "%d", i);
4649
4650 /* If there are multiple alternatives in the constraint,
4651 handle each of them individually. Those that allow register
4652 will be replaced with operand number, the others will stay
4653 unchanged. */
4654 if (strchr (p, ',') != NULL)
4655 {
4656 size_t len = 0, buflen = strlen (buf);
4657 char *beg, *end, *str, *dst;
4658
4659 for (beg = p + 1;;)
4660 {
4661 end = strchr (beg, ',');
4662 if (end == NULL)
4663 end = strchr (beg, '\0');
4664 if ((size_t) (end - beg) < buflen)
4665 len += buflen + 1;
4666 else
4667 len += end - beg + 1;
4668 if (*end)
4669 beg = end + 1;
4670 else
4671 break;
4672 }
4673
4674 str = (char *) alloca (len);
4675 for (beg = p + 1, dst = str;;)
4676 {
4677 const char *tem;
4678 bool mem_p, reg_p, inout_p;
4679
4680 end = strchr (beg, ',');
4681 if (end)
4682 *end = '\0';
4683 beg[-1] = '=';
4684 tem = beg - 1;
4685 parse_output_constraint (&tem, i, 0, 0,
4686 &mem_p, &reg_p, &inout_p);
4687 if (dst != str)
4688 *dst++ = ',';
4689 if (reg_p)
4690 {
4691 memcpy (dst, buf, buflen);
4692 dst += buflen;
4693 }
4694 else
4695 {
4696 if (end)
4697 len = end - beg;
4698 else
4699 len = strlen (beg);
4700 memcpy (dst, beg, len);
4701 dst += len;
4702 }
4703 if (end)
4704 beg = end + 1;
4705 else
4706 break;
4707 }
4708 *dst = '\0';
4709 input = build_string (dst - str, str);
4710 }
4711 else
4712 input = build_string (strlen (buf), buf);
4713 }
4714 else
4715 input = build_string (constraint_len - 1, constraint + 1);
4716
4717 free (p);
4718
4719 input = build_tree_list (build_tree_list (NULL_TREE, input),
4720 unshare_expr (TREE_VALUE (link)));
4721 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4722 }
4723 }
4724
4725 link_next = NULL_TREE;
4726 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4727 {
4728 link_next = TREE_CHAIN (link);
4729 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4730 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4731 oconstraints, &allows_mem, &allows_reg);
4732
4733 /* If we can't make copies, we can only accept memory. */
4734 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4735 {
4736 if (allows_mem)
4737 allows_reg = 0;
4738 else
4739 {
4740 error ("impossible constraint in %<asm%>");
4741 error ("non-memory input %d must stay in memory", i);
4742 return GS_ERROR;
4743 }
4744 }
4745
4746 /* If the operand is a memory input, it should be an lvalue. */
4747 if (!allows_reg && allows_mem)
4748 {
4749 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4750 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4751 mark_addressable (TREE_VALUE (link));
4752 if (tret == GS_ERROR)
4753 {
4754 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4755 input_location = EXPR_LOCATION (TREE_VALUE (link));
4756 error ("memory input %d is not directly addressable", i);
4757 ret = tret;
4758 }
4759 }
4760 else
4761 {
4762 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4763 is_gimple_asm_val, fb_rvalue);
4764 if (tret == GS_ERROR)
4765 ret = tret;
4766 }
4767
4768 TREE_CHAIN (link) = NULL_TREE;
4769 VEC_safe_push (tree, gc, inputs, link);
4770 }
4771
4772 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4773 VEC_safe_push (tree, gc, clobbers, link);
4774
4775 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4776 inputs, outputs, clobbers);
4777
4778 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4779 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4780
4781 gimplify_seq_add_stmt (pre_p, stmt);
4782
4783 return ret;
4784 }
4785
4786 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4787 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4788 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4789 return to this function.
4790
4791 FIXME should we complexify the prequeue handling instead? Or use flags
4792 for all the cleanups and let the optimizer tighten them up? The current
4793 code seems pretty fragile; it will break on a cleanup within any
4794 non-conditional nesting. But any such nesting would be broken, anyway;
4795 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4796 and continues out of it. We can do that at the RTL level, though, so
4797 having an optimizer to tighten up try/finally regions would be a Good
4798 Thing. */
4799
4800 static enum gimplify_status
4801 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4802 {
4803 gimple_stmt_iterator iter;
4804 gimple_seq body_sequence = NULL;
4805
4806 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4807
4808 /* We only care about the number of conditions between the innermost
4809 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4810 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4811 int old_conds = gimplify_ctxp->conditions;
4812 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4813 gimplify_ctxp->conditions = 0;
4814 gimplify_ctxp->conditional_cleanups = NULL;
4815
4816 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4817
4818 gimplify_ctxp->conditions = old_conds;
4819 gimplify_ctxp->conditional_cleanups = old_cleanups;
4820
4821 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4822 {
4823 gimple wce = gsi_stmt (iter);
4824
4825 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4826 {
4827 if (gsi_one_before_end_p (iter))
4828 {
4829 /* Note that gsi_insert_seq_before and gsi_remove do not
4830 scan operands, unlike some other sequence mutators. */
4831 gsi_insert_seq_before_without_update (&iter,
4832 gimple_wce_cleanup (wce),
4833 GSI_SAME_STMT);
4834 gsi_remove (&iter, true);
4835 break;
4836 }
4837 else
4838 {
4839 gimple gtry;
4840 gimple_seq seq;
4841 enum gimple_try_flags kind;
4842
4843 if (gimple_wce_cleanup_eh_only (wce))
4844 kind = GIMPLE_TRY_CATCH;
4845 else
4846 kind = GIMPLE_TRY_FINALLY;
4847 seq = gsi_split_seq_after (iter);
4848
4849 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
4850 /* Do not use gsi_replace here, as it may scan operands.
4851 We want to do a simple structural modification only. */
4852 *gsi_stmt_ptr (&iter) = gtry;
4853 iter = gsi_start (seq);
4854 }
4855 }
4856 else
4857 gsi_next (&iter);
4858 }
4859
4860 gimplify_seq_add_seq (pre_p, body_sequence);
4861 if (temp)
4862 {
4863 *expr_p = temp;
4864 return GS_OK;
4865 }
4866 else
4867 {
4868 *expr_p = NULL;
4869 return GS_ALL_DONE;
4870 }
4871 }
4872
4873 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4874 is the cleanup action required. EH_ONLY is true if the cleanup should
4875 only be executed if an exception is thrown, not on normal exit. */
4876
4877 static void
4878 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
4879 {
4880 gimple wce;
4881 gimple_seq cleanup_stmts = NULL;
4882
4883 /* Errors can result in improperly nested cleanups. Which results in
4884 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
4885 if (errorcount || sorrycount)
4886 return;
4887
4888 if (gimple_conditional_context ())
4889 {
4890 /* If we're in a conditional context, this is more complex. We only
4891 want to run the cleanup if we actually ran the initialization that
4892 necessitates it, but we want to run it after the end of the
4893 conditional context. So we wrap the try/finally around the
4894 condition and use a flag to determine whether or not to actually
4895 run the destructor. Thus
4896
4897 test ? f(A()) : 0
4898
4899 becomes (approximately)
4900
4901 flag = 0;
4902 try {
4903 if (test) { A::A(temp); flag = 1; val = f(temp); }
4904 else { val = 0; }
4905 } finally {
4906 if (flag) A::~A(temp);
4907 }
4908 val
4909 */
4910 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4911 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
4912 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
4913
4914 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4915 gimplify_stmt (&cleanup, &cleanup_stmts);
4916 wce = gimple_build_wce (cleanup_stmts);
4917
4918 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
4919 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
4920 gimplify_seq_add_stmt (pre_p, ftrue);
4921
4922 /* Because of this manipulation, and the EH edges that jump
4923 threading cannot redirect, the temporary (VAR) will appear
4924 to be used uninitialized. Don't warn. */
4925 TREE_NO_WARNING (var) = 1;
4926 }
4927 else
4928 {
4929 gimplify_stmt (&cleanup, &cleanup_stmts);
4930 wce = gimple_build_wce (cleanup_stmts);
4931 gimple_wce_set_cleanup_eh_only (wce, eh_only);
4932 gimplify_seq_add_stmt (pre_p, wce);
4933 }
4934 }
4935
4936 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4937
4938 static enum gimplify_status
4939 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4940 {
4941 tree targ = *expr_p;
4942 tree temp = TARGET_EXPR_SLOT (targ);
4943 tree init = TARGET_EXPR_INITIAL (targ);
4944 enum gimplify_status ret;
4945
4946 if (init)
4947 {
4948 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4949 to the temps list. Handle also variable length TARGET_EXPRs. */
4950 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
4951 {
4952 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
4953 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
4954 gimplify_vla_decl (temp, pre_p);
4955 }
4956 else
4957 gimple_add_tmp_var (temp);
4958
4959 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4960 expression is supposed to initialize the slot. */
4961 if (VOID_TYPE_P (TREE_TYPE (init)))
4962 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4963 else
4964 {
4965 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
4966 init = init_expr;
4967 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4968 init = NULL;
4969 ggc_free (init_expr);
4970 }
4971 if (ret == GS_ERROR)
4972 {
4973 /* PR c++/28266 Make sure this is expanded only once. */
4974 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4975 return GS_ERROR;
4976 }
4977 if (init)
4978 gimplify_and_add (init, pre_p);
4979
4980 /* If needed, push the cleanup for the temp. */
4981 if (TARGET_EXPR_CLEANUP (targ))
4982 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4983 CLEANUP_EH_ONLY (targ), pre_p);
4984
4985 /* Only expand this once. */
4986 TREE_OPERAND (targ, 3) = init;
4987 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4988 }
4989 else
4990 /* We should have expanded this before. */
4991 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4992
4993 *expr_p = temp;
4994 return GS_OK;
4995 }
4996
4997 /* Gimplification of expression trees. */
4998
4999 /* Gimplify an expression which appears at statement context. The
5000 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5001 NULL, a new sequence is allocated.
5002
5003 Return true if we actually added a statement to the queue. */
5004
5005 bool
5006 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5007 {
5008 gimple_seq_node last;
5009
5010 if (!*seq_p)
5011 *seq_p = gimple_seq_alloc ();
5012
5013 last = gimple_seq_last (*seq_p);
5014 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5015 return last != gimple_seq_last (*seq_p);
5016 }
5017
5018
5019 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5020 to CTX. If entries already exist, force them to be some flavor of private.
5021 If there is no enclosing parallel, do nothing. */
5022
5023 void
5024 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5025 {
5026 splay_tree_node n;
5027
5028 if (decl == NULL || !DECL_P (decl))
5029 return;
5030
5031 do
5032 {
5033 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5034 if (n != NULL)
5035 {
5036 if (n->value & GOVD_SHARED)
5037 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5038 else
5039 return;
5040 }
5041 else if (ctx->region_type != ORT_WORKSHARE)
5042 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5043
5044 ctx = ctx->outer_context;
5045 }
5046 while (ctx);
5047 }
5048
5049 /* Similarly for each of the type sizes of TYPE. */
5050
5051 static void
5052 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5053 {
5054 if (type == NULL || type == error_mark_node)
5055 return;
5056 type = TYPE_MAIN_VARIANT (type);
5057
5058 if (pointer_set_insert (ctx->privatized_types, type))
5059 return;
5060
5061 switch (TREE_CODE (type))
5062 {
5063 case INTEGER_TYPE:
5064 case ENUMERAL_TYPE:
5065 case BOOLEAN_TYPE:
5066 case REAL_TYPE:
5067 case FIXED_POINT_TYPE:
5068 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5069 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5070 break;
5071
5072 case ARRAY_TYPE:
5073 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5074 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5075 break;
5076
5077 case RECORD_TYPE:
5078 case UNION_TYPE:
5079 case QUAL_UNION_TYPE:
5080 {
5081 tree field;
5082 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5083 if (TREE_CODE (field) == FIELD_DECL)
5084 {
5085 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5086 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5087 }
5088 }
5089 break;
5090
5091 case POINTER_TYPE:
5092 case REFERENCE_TYPE:
5093 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5094 break;
5095
5096 default:
5097 break;
5098 }
5099
5100 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5101 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5102 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5103 }
5104
5105 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5106
5107 static void
5108 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5109 {
5110 splay_tree_node n;
5111 unsigned int nflags;
5112 tree t;
5113
5114 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5115 return;
5116
5117 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5118 there are constructors involved somewhere. */
5119 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5120 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5121 flags |= GOVD_SEEN;
5122
5123 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5124 if (n != NULL)
5125 {
5126 /* We shouldn't be re-adding the decl with the same data
5127 sharing class. */
5128 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5129 /* The only combination of data sharing classes we should see is
5130 FIRSTPRIVATE and LASTPRIVATE. */
5131 nflags = n->value | flags;
5132 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5133 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5134 n->value = nflags;
5135 return;
5136 }
5137
5138 /* When adding a variable-sized variable, we have to handle all sorts
5139 of additional bits of data: the pointer replacement variable, and
5140 the parameters of the type. */
5141 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5142 {
5143 /* Add the pointer replacement variable as PRIVATE if the variable
5144 replacement is private, else FIRSTPRIVATE since we'll need the
5145 address of the original variable either for SHARED, or for the
5146 copy into or out of the context. */
5147 if (!(flags & GOVD_LOCAL))
5148 {
5149 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5150 nflags |= flags & GOVD_SEEN;
5151 t = DECL_VALUE_EXPR (decl);
5152 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5153 t = TREE_OPERAND (t, 0);
5154 gcc_assert (DECL_P (t));
5155 omp_add_variable (ctx, t, nflags);
5156 }
5157
5158 /* Add all of the variable and type parameters (which should have
5159 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5160 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5161 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5162 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5163
5164 /* The variable-sized variable itself is never SHARED, only some form
5165 of PRIVATE. The sharing would take place via the pointer variable
5166 which we remapped above. */
5167 if (flags & GOVD_SHARED)
5168 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5169 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5170
5171 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5172 alloca statement we generate for the variable, so make sure it
5173 is available. This isn't automatically needed for the SHARED
5174 case, since we won't be allocating local storage then.
5175 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5176 in this case omp_notice_variable will be called later
5177 on when it is gimplified. */
5178 else if (! (flags & GOVD_LOCAL))
5179 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5180 }
5181 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5182 {
5183 gcc_assert ((flags & GOVD_LOCAL) == 0);
5184 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5185
5186 /* Similar to the direct variable sized case above, we'll need the
5187 size of references being privatized. */
5188 if ((flags & GOVD_SHARED) == 0)
5189 {
5190 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5191 if (TREE_CODE (t) != INTEGER_CST)
5192 omp_notice_variable (ctx, t, true);
5193 }
5194 }
5195
5196 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5197 }
5198
5199 /* Record the fact that DECL was used within the OpenMP context CTX.
5200 IN_CODE is true when real code uses DECL, and false when we should
5201 merely emit default(none) errors. Return true if DECL is going to
5202 be remapped and thus DECL shouldn't be gimplified into its
5203 DECL_VALUE_EXPR (if any). */
5204
5205 static bool
5206 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5207 {
5208 splay_tree_node n;
5209 unsigned flags = in_code ? GOVD_SEEN : 0;
5210 bool ret = false, shared;
5211
5212 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5213 return false;
5214
5215 /* Threadprivate variables are predetermined. */
5216 if (is_global_var (decl))
5217 {
5218 if (DECL_THREAD_LOCAL_P (decl))
5219 return false;
5220
5221 if (DECL_HAS_VALUE_EXPR_P (decl))
5222 {
5223 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5224
5225 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5226 return false;
5227 }
5228 }
5229
5230 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5231 if (n == NULL)
5232 {
5233 enum omp_clause_default_kind default_kind, kind;
5234 struct gimplify_omp_ctx *octx;
5235
5236 if (ctx->region_type == ORT_WORKSHARE)
5237 goto do_outer;
5238
5239 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5240 remapped firstprivate instead of shared. To some extent this is
5241 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5242 default_kind = ctx->default_kind;
5243 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5244 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5245 default_kind = kind;
5246
5247 switch (default_kind)
5248 {
5249 case OMP_CLAUSE_DEFAULT_NONE:
5250 error ("%qs not specified in enclosing parallel",
5251 IDENTIFIER_POINTER (DECL_NAME (decl)));
5252 error ("%Henclosing parallel", &ctx->location);
5253 /* FALLTHRU */
5254 case OMP_CLAUSE_DEFAULT_SHARED:
5255 flags |= GOVD_SHARED;
5256 break;
5257 case OMP_CLAUSE_DEFAULT_PRIVATE:
5258 flags |= GOVD_PRIVATE;
5259 break;
5260 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5261 flags |= GOVD_FIRSTPRIVATE;
5262 break;
5263 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5264 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5265 gcc_assert (ctx->region_type == ORT_TASK);
5266 if (ctx->outer_context)
5267 omp_notice_variable (ctx->outer_context, decl, in_code);
5268 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5269 {
5270 splay_tree_node n2;
5271
5272 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5273 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5274 {
5275 flags |= GOVD_FIRSTPRIVATE;
5276 break;
5277 }
5278 if ((octx->region_type & ORT_PARALLEL) != 0)
5279 break;
5280 }
5281 if (flags & GOVD_FIRSTPRIVATE)
5282 break;
5283 if (octx == NULL
5284 && (TREE_CODE (decl) == PARM_DECL
5285 || (!is_global_var (decl)
5286 && DECL_CONTEXT (decl) == current_function_decl)))
5287 {
5288 flags |= GOVD_FIRSTPRIVATE;
5289 break;
5290 }
5291 flags |= GOVD_SHARED;
5292 break;
5293 default:
5294 gcc_unreachable ();
5295 }
5296
5297 if ((flags & GOVD_PRIVATE)
5298 && lang_hooks.decls.omp_private_outer_ref (decl))
5299 flags |= GOVD_PRIVATE_OUTER_REF;
5300
5301 omp_add_variable (ctx, decl, flags);
5302
5303 shared = (flags & GOVD_SHARED) != 0;
5304 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5305 goto do_outer;
5306 }
5307
5308 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5309 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5310
5311 /* If nothing changed, there's nothing left to do. */
5312 if ((n->value & flags) == flags)
5313 return ret;
5314 flags |= n->value;
5315 n->value = flags;
5316
5317 do_outer:
5318 /* If the variable is private in the current context, then we don't
5319 need to propagate anything to an outer context. */
5320 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5321 return ret;
5322 if (ctx->outer_context
5323 && omp_notice_variable (ctx->outer_context, decl, in_code))
5324 return true;
5325 return ret;
5326 }
5327
5328 /* Verify that DECL is private within CTX. If there's specific information
5329 to the contrary in the innermost scope, generate an error. */
5330
5331 static bool
5332 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5333 {
5334 splay_tree_node n;
5335
5336 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5337 if (n != NULL)
5338 {
5339 if (n->value & GOVD_SHARED)
5340 {
5341 if (ctx == gimplify_omp_ctxp)
5342 {
5343 error ("iteration variable %qs should be private",
5344 IDENTIFIER_POINTER (DECL_NAME (decl)));
5345 n->value = GOVD_PRIVATE;
5346 return true;
5347 }
5348 else
5349 return false;
5350 }
5351 else if ((n->value & GOVD_EXPLICIT) != 0
5352 && (ctx == gimplify_omp_ctxp
5353 || (ctx->region_type == ORT_COMBINED_PARALLEL
5354 && gimplify_omp_ctxp->outer_context == ctx)))
5355 {
5356 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5357 error ("iteration variable %qs should not be firstprivate",
5358 IDENTIFIER_POINTER (DECL_NAME (decl)));
5359 else if ((n->value & GOVD_REDUCTION) != 0)
5360 error ("iteration variable %qs should not be reduction",
5361 IDENTIFIER_POINTER (DECL_NAME (decl)));
5362 }
5363 return (ctx == gimplify_omp_ctxp
5364 || (ctx->region_type == ORT_COMBINED_PARALLEL
5365 && gimplify_omp_ctxp->outer_context == ctx));
5366 }
5367
5368 if (ctx->region_type != ORT_WORKSHARE)
5369 return false;
5370 else if (ctx->outer_context)
5371 return omp_is_private (ctx->outer_context, decl);
5372 return false;
5373 }
5374
5375 /* Return true if DECL is private within a parallel region
5376 that binds to the current construct's context or in parallel
5377 region's REDUCTION clause. */
5378
5379 static bool
5380 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5381 {
5382 splay_tree_node n;
5383
5384 do
5385 {
5386 ctx = ctx->outer_context;
5387 if (ctx == NULL)
5388 return !(is_global_var (decl)
5389 /* References might be private, but might be shared too. */
5390 || lang_hooks.decls.omp_privatize_by_reference (decl));
5391
5392 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5393 if (n != NULL)
5394 return (n->value & GOVD_SHARED) == 0;
5395 }
5396 while (ctx->region_type == ORT_WORKSHARE);
5397 return false;
5398 }
5399
5400 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5401 and previous omp contexts. */
5402
5403 static void
5404 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5405 enum omp_region_type region_type)
5406 {
5407 struct gimplify_omp_ctx *ctx, *outer_ctx;
5408 struct gimplify_ctx gctx;
5409 tree c;
5410
5411 ctx = new_omp_context (region_type);
5412 outer_ctx = ctx->outer_context;
5413
5414 while ((c = *list_p) != NULL)
5415 {
5416 bool remove = false;
5417 bool notice_outer = true;
5418 const char *check_non_private = NULL;
5419 unsigned int flags;
5420 tree decl;
5421
5422 switch (OMP_CLAUSE_CODE (c))
5423 {
5424 case OMP_CLAUSE_PRIVATE:
5425 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5426 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5427 {
5428 flags |= GOVD_PRIVATE_OUTER_REF;
5429 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5430 }
5431 else
5432 notice_outer = false;
5433 goto do_add;
5434 case OMP_CLAUSE_SHARED:
5435 flags = GOVD_SHARED | GOVD_EXPLICIT;
5436 goto do_add;
5437 case OMP_CLAUSE_FIRSTPRIVATE:
5438 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5439 check_non_private = "firstprivate";
5440 goto do_add;
5441 case OMP_CLAUSE_LASTPRIVATE:
5442 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5443 check_non_private = "lastprivate";
5444 goto do_add;
5445 case OMP_CLAUSE_REDUCTION:
5446 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5447 check_non_private = "reduction";
5448 goto do_add;
5449
5450 do_add:
5451 decl = OMP_CLAUSE_DECL (c);
5452 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5453 {
5454 remove = true;
5455 break;
5456 }
5457 omp_add_variable (ctx, decl, flags);
5458 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5459 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5460 {
5461 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5462 GOVD_LOCAL | GOVD_SEEN);
5463 gimplify_omp_ctxp = ctx;
5464 push_gimplify_context (&gctx);
5465
5466 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5467 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5468
5469 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5470 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5471 pop_gimplify_context
5472 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5473 push_gimplify_context (&gctx);
5474 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5475 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5476 pop_gimplify_context
5477 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5478 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5479 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5480
5481 gimplify_omp_ctxp = outer_ctx;
5482 }
5483 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5484 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5485 {
5486 gimplify_omp_ctxp = ctx;
5487 push_gimplify_context (&gctx);
5488 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5489 {
5490 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5491 NULL, NULL);
5492 TREE_SIDE_EFFECTS (bind) = 1;
5493 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5494 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5495 }
5496 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5497 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5498 pop_gimplify_context
5499 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5500 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5501
5502 gimplify_omp_ctxp = outer_ctx;
5503 }
5504 if (notice_outer)
5505 goto do_notice;
5506 break;
5507
5508 case OMP_CLAUSE_COPYIN:
5509 case OMP_CLAUSE_COPYPRIVATE:
5510 decl = OMP_CLAUSE_DECL (c);
5511 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5512 {
5513 remove = true;
5514 break;
5515 }
5516 do_notice:
5517 if (outer_ctx)
5518 omp_notice_variable (outer_ctx, decl, true);
5519 if (check_non_private
5520 && region_type == ORT_WORKSHARE
5521 && omp_check_private (ctx, decl))
5522 {
5523 error ("%s variable %qs is private in outer context",
5524 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
5525 remove = true;
5526 }
5527 break;
5528
5529 case OMP_CLAUSE_IF:
5530 OMP_CLAUSE_OPERAND (c, 0)
5531 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5532 /* Fall through. */
5533
5534 case OMP_CLAUSE_SCHEDULE:
5535 case OMP_CLAUSE_NUM_THREADS:
5536 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5537 is_gimple_val, fb_rvalue) == GS_ERROR)
5538 remove = true;
5539 break;
5540
5541 case OMP_CLAUSE_NOWAIT:
5542 case OMP_CLAUSE_ORDERED:
5543 case OMP_CLAUSE_UNTIED:
5544 case OMP_CLAUSE_COLLAPSE:
5545 break;
5546
5547 case OMP_CLAUSE_DEFAULT:
5548 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5549 break;
5550
5551 default:
5552 gcc_unreachable ();
5553 }
5554
5555 if (remove)
5556 *list_p = OMP_CLAUSE_CHAIN (c);
5557 else
5558 list_p = &OMP_CLAUSE_CHAIN (c);
5559 }
5560
5561 gimplify_omp_ctxp = ctx;
5562 }
5563
5564 /* For all variables that were not actually used within the context,
5565 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5566
5567 static int
5568 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5569 {
5570 tree *list_p = (tree *) data;
5571 tree decl = (tree) n->key;
5572 unsigned flags = n->value;
5573 enum omp_clause_code code;
5574 tree clause;
5575 bool private_debug;
5576
5577 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5578 return 0;
5579 if ((flags & GOVD_SEEN) == 0)
5580 return 0;
5581 if (flags & GOVD_DEBUG_PRIVATE)
5582 {
5583 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5584 private_debug = true;
5585 }
5586 else
5587 private_debug
5588 = lang_hooks.decls.omp_private_debug_clause (decl,
5589 !!(flags & GOVD_SHARED));
5590 if (private_debug)
5591 code = OMP_CLAUSE_PRIVATE;
5592 else if (flags & GOVD_SHARED)
5593 {
5594 if (is_global_var (decl))
5595 {
5596 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5597 while (ctx != NULL)
5598 {
5599 splay_tree_node on
5600 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5601 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5602 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5603 break;
5604 ctx = ctx->outer_context;
5605 }
5606 if (ctx == NULL)
5607 return 0;
5608 }
5609 code = OMP_CLAUSE_SHARED;
5610 }
5611 else if (flags & GOVD_PRIVATE)
5612 code = OMP_CLAUSE_PRIVATE;
5613 else if (flags & GOVD_FIRSTPRIVATE)
5614 code = OMP_CLAUSE_FIRSTPRIVATE;
5615 else
5616 gcc_unreachable ();
5617
5618 clause = build_omp_clause (code);
5619 OMP_CLAUSE_DECL (clause) = decl;
5620 OMP_CLAUSE_CHAIN (clause) = *list_p;
5621 if (private_debug)
5622 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5623 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5624 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5625 *list_p = clause;
5626 lang_hooks.decls.omp_finish_clause (clause);
5627
5628 return 0;
5629 }
5630
5631 static void
5632 gimplify_adjust_omp_clauses (tree *list_p)
5633 {
5634 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5635 tree c, decl;
5636
5637 while ((c = *list_p) != NULL)
5638 {
5639 splay_tree_node n;
5640 bool remove = false;
5641
5642 switch (OMP_CLAUSE_CODE (c))
5643 {
5644 case OMP_CLAUSE_PRIVATE:
5645 case OMP_CLAUSE_SHARED:
5646 case OMP_CLAUSE_FIRSTPRIVATE:
5647 decl = OMP_CLAUSE_DECL (c);
5648 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5649 remove = !(n->value & GOVD_SEEN);
5650 if (! remove)
5651 {
5652 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5653 if ((n->value & GOVD_DEBUG_PRIVATE)
5654 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5655 {
5656 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5657 || ((n->value & GOVD_DATA_SHARE_CLASS)
5658 == GOVD_PRIVATE));
5659 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5660 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5661 }
5662 }
5663 break;
5664
5665 case OMP_CLAUSE_LASTPRIVATE:
5666 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5667 accurately reflect the presence of a FIRSTPRIVATE clause. */
5668 decl = OMP_CLAUSE_DECL (c);
5669 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5670 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5671 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5672 break;
5673
5674 case OMP_CLAUSE_REDUCTION:
5675 case OMP_CLAUSE_COPYIN:
5676 case OMP_CLAUSE_COPYPRIVATE:
5677 case OMP_CLAUSE_IF:
5678 case OMP_CLAUSE_NUM_THREADS:
5679 case OMP_CLAUSE_SCHEDULE:
5680 case OMP_CLAUSE_NOWAIT:
5681 case OMP_CLAUSE_ORDERED:
5682 case OMP_CLAUSE_DEFAULT:
5683 case OMP_CLAUSE_UNTIED:
5684 case OMP_CLAUSE_COLLAPSE:
5685 break;
5686
5687 default:
5688 gcc_unreachable ();
5689 }
5690
5691 if (remove)
5692 *list_p = OMP_CLAUSE_CHAIN (c);
5693 else
5694 list_p = &OMP_CLAUSE_CHAIN (c);
5695 }
5696
5697 /* Add in any implicit data sharing. */
5698 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5699
5700 gimplify_omp_ctxp = ctx->outer_context;
5701 delete_omp_context (ctx);
5702 }
5703
5704 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5705 gimplification of the body, as well as scanning the body for used
5706 variables. We need to do this scan now, because variable-sized
5707 decls will be decomposed during gimplification. */
5708
5709 static void
5710 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5711 {
5712 tree expr = *expr_p;
5713 gimple g;
5714 gimple_seq body = NULL;
5715 struct gimplify_ctx gctx;
5716
5717 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5718 OMP_PARALLEL_COMBINED (expr)
5719 ? ORT_COMBINED_PARALLEL
5720 : ORT_PARALLEL);
5721
5722 push_gimplify_context (&gctx);
5723
5724 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5725 if (gimple_code (g) == GIMPLE_BIND)
5726 pop_gimplify_context (g);
5727 else
5728 pop_gimplify_context (NULL);
5729
5730 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5731
5732 g = gimple_build_omp_parallel (body,
5733 OMP_PARALLEL_CLAUSES (expr),
5734 NULL_TREE, NULL_TREE);
5735 if (OMP_PARALLEL_COMBINED (expr))
5736 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5737 gimplify_seq_add_stmt (pre_p, g);
5738 *expr_p = NULL_TREE;
5739 }
5740
5741 /* Gimplify the contents of an OMP_TASK statement. This involves
5742 gimplification of the body, as well as scanning the body for used
5743 variables. We need to do this scan now, because variable-sized
5744 decls will be decomposed during gimplification. */
5745
5746 static void
5747 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5748 {
5749 tree expr = *expr_p;
5750 gimple g;
5751 gimple_seq body = NULL;
5752 struct gimplify_ctx gctx;
5753
5754 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5755
5756 push_gimplify_context (&gctx);
5757
5758 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5759 if (gimple_code (g) == GIMPLE_BIND)
5760 pop_gimplify_context (g);
5761 else
5762 pop_gimplify_context (NULL);
5763
5764 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
5765
5766 g = gimple_build_omp_task (body,
5767 OMP_TASK_CLAUSES (expr),
5768 NULL_TREE, NULL_TREE,
5769 NULL_TREE, NULL_TREE, NULL_TREE);
5770 gimplify_seq_add_stmt (pre_p, g);
5771 *expr_p = NULL_TREE;
5772 }
5773
5774 /* Gimplify the gross structure of an OMP_FOR statement. */
5775
5776 static enum gimplify_status
5777 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
5778 {
5779 tree for_stmt, decl, var, t;
5780 enum gimplify_status ret = GS_OK;
5781 gimple gfor;
5782 gimple_seq for_body, for_pre_body;
5783 int i;
5784
5785 for_stmt = *expr_p;
5786
5787 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
5788 ORT_WORKSHARE);
5789
5790 /* Handle OMP_FOR_INIT. */
5791 for_pre_body = NULL;
5792 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
5793 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
5794
5795 for_body = gimple_seq_alloc ();
5796 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5797 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
5798 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5799 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
5800 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5801 {
5802 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5803 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5804 decl = TREE_OPERAND (t, 0);
5805 gcc_assert (DECL_P (decl));
5806 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
5807 || POINTER_TYPE_P (TREE_TYPE (decl)));
5808
5809 /* Make sure the iteration variable is private. */
5810 if (omp_is_private (gimplify_omp_ctxp, decl))
5811 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5812 else
5813 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5814
5815 /* If DECL is not a gimple register, create a temporary variable to act
5816 as an iteration counter. This is valid, since DECL cannot be
5817 modified in the body of the loop. */
5818 if (!is_gimple_reg (decl))
5819 {
5820 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5821 TREE_OPERAND (t, 0) = var;
5822
5823 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
5824
5825 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5826 }
5827 else
5828 var = decl;
5829
5830 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5831 is_gimple_val, fb_rvalue);
5832 if (ret == GS_ERROR)
5833 return ret;
5834
5835 /* Handle OMP_FOR_COND. */
5836 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5837 gcc_assert (COMPARISON_CLASS_P (t));
5838 gcc_assert (TREE_OPERAND (t, 0) == decl);
5839
5840 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5841 is_gimple_val, fb_rvalue);
5842
5843 /* Handle OMP_FOR_INCR. */
5844 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5845 switch (TREE_CODE (t))
5846 {
5847 case PREINCREMENT_EXPR:
5848 case POSTINCREMENT_EXPR:
5849 t = build_int_cst (TREE_TYPE (decl), 1);
5850 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5851 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
5852 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
5853 break;
5854
5855 case PREDECREMENT_EXPR:
5856 case POSTDECREMENT_EXPR:
5857 t = build_int_cst (TREE_TYPE (decl), -1);
5858 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5859 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
5860 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
5861 break;
5862
5863 case MODIFY_EXPR:
5864 gcc_assert (TREE_OPERAND (t, 0) == decl);
5865 TREE_OPERAND (t, 0) = var;
5866
5867 t = TREE_OPERAND (t, 1);
5868 switch (TREE_CODE (t))
5869 {
5870 case PLUS_EXPR:
5871 if (TREE_OPERAND (t, 1) == decl)
5872 {
5873 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5874 TREE_OPERAND (t, 0) = var;
5875 break;
5876 }
5877
5878 /* Fallthru. */
5879 case MINUS_EXPR:
5880 case POINTER_PLUS_EXPR:
5881 gcc_assert (TREE_OPERAND (t, 0) == decl);
5882 TREE_OPERAND (t, 0) = var;
5883 break;
5884 default:
5885 gcc_unreachable ();
5886 }
5887
5888 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5889 is_gimple_val, fb_rvalue);
5890 break;
5891
5892 default:
5893 gcc_unreachable ();
5894 }
5895
5896 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
5897 {
5898 tree c;
5899 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
5900 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5901 && OMP_CLAUSE_DECL (c) == decl
5902 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
5903 {
5904 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5905 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5906 gcc_assert (TREE_OPERAND (t, 0) == var);
5907 t = TREE_OPERAND (t, 1);
5908 gcc_assert (TREE_CODE (t) == PLUS_EXPR
5909 || TREE_CODE (t) == MINUS_EXPR
5910 || TREE_CODE (t) == POINTER_PLUS_EXPR);
5911 gcc_assert (TREE_OPERAND (t, 0) == var);
5912 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
5913 TREE_OPERAND (t, 1));
5914 gimplify_assign (decl, t,
5915 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5916 }
5917 }
5918 }
5919
5920 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
5921
5922 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5923
5924 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
5925 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
5926 for_pre_body);
5927
5928 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5929 {
5930 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5931 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
5932 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
5933 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5934 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
5935 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
5936 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
5937 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
5938 }
5939
5940 gimplify_seq_add_stmt (pre_p, gfor);
5941 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5942 }
5943
5944 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5945 In particular, OMP_SECTIONS and OMP_SINGLE. */
5946
5947 static void
5948 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
5949 {
5950 tree expr = *expr_p;
5951 gimple stmt;
5952 gimple_seq body = NULL;
5953
5954 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
5955 gimplify_and_add (OMP_BODY (expr), &body);
5956 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
5957
5958 if (TREE_CODE (expr) == OMP_SECTIONS)
5959 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
5960 else if (TREE_CODE (expr) == OMP_SINGLE)
5961 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
5962 else
5963 gcc_unreachable ();
5964
5965 gimplify_seq_add_stmt (pre_p, stmt);
5966 }
5967
5968 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5969 stabilized the lhs of the atomic operation as *ADDR. Return true if
5970 EXPR is this stabilized form. */
5971
5972 static bool
5973 goa_lhs_expr_p (tree expr, tree addr)
5974 {
5975 /* Also include casts to other type variants. The C front end is fond
5976 of adding these for e.g. volatile variables. This is like
5977 STRIP_TYPE_NOPS but includes the main variant lookup. */
5978 while ((CONVERT_EXPR_P (expr)
5979 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5980 && TREE_OPERAND (expr, 0) != error_mark_node
5981 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5982 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5983 expr = TREE_OPERAND (expr, 0);
5984
5985 if (TREE_CODE (expr) == INDIRECT_REF)
5986 {
5987 expr = TREE_OPERAND (expr, 0);
5988 while (expr != addr
5989 && (CONVERT_EXPR_P (expr)
5990 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5991 && TREE_CODE (expr) == TREE_CODE (addr)
5992 && TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5993 == TYPE_MAIN_VARIANT (TREE_TYPE (addr)))
5994 {
5995 expr = TREE_OPERAND (expr, 0);
5996 addr = TREE_OPERAND (addr, 0);
5997 }
5998 if (expr == addr)
5999 return true;
6000 return (TREE_CODE (addr) == ADDR_EXPR
6001 && TREE_CODE (expr) == ADDR_EXPR
6002 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6003 }
6004 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6005 return true;
6006 return false;
6007 }
6008
6009 /* Walk *EXPR_P and replace
6010 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6011 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6012 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6013
6014 static int
6015 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6016 tree lhs_var)
6017 {
6018 tree expr = *expr_p;
6019 int saw_lhs;
6020
6021 if (goa_lhs_expr_p (expr, lhs_addr))
6022 {
6023 *expr_p = lhs_var;
6024 return 1;
6025 }
6026 if (is_gimple_val (expr))
6027 return 0;
6028
6029 saw_lhs = 0;
6030 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6031 {
6032 case tcc_binary:
6033 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6034 lhs_var);
6035 case tcc_unary:
6036 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6037 lhs_var);
6038 break;
6039 default:
6040 break;
6041 }
6042
6043 if (saw_lhs == 0)
6044 {
6045 enum gimplify_status gs;
6046 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6047 if (gs != GS_ALL_DONE)
6048 saw_lhs = -1;
6049 }
6050
6051 return saw_lhs;
6052 }
6053
6054
6055 /* Gimplify an OMP_ATOMIC statement. */
6056
6057 static enum gimplify_status
6058 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6059 {
6060 tree addr = TREE_OPERAND (*expr_p, 0);
6061 tree rhs = TREE_OPERAND (*expr_p, 1);
6062 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6063 tree tmp_load;
6064
6065 tmp_load = create_tmp_var (type, NULL);
6066 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6067 return GS_ERROR;
6068
6069 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6070 != GS_ALL_DONE)
6071 return GS_ERROR;
6072
6073 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6074 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6075 != GS_ALL_DONE)
6076 return GS_ERROR;
6077 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6078 *expr_p = NULL;
6079
6080 return GS_ALL_DONE;
6081 }
6082
6083
6084 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6085 expression produces a value to be used as an operand inside a GIMPLE
6086 statement, the value will be stored back in *EXPR_P. This value will
6087 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6088 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6089 emitted in PRE_P and POST_P.
6090
6091 Additionally, this process may overwrite parts of the input
6092 expression during gimplification. Ideally, it should be
6093 possible to do non-destructive gimplification.
6094
6095 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6096 the expression needs to evaluate to a value to be used as
6097 an operand in a GIMPLE statement, this value will be stored in
6098 *EXPR_P on exit. This happens when the caller specifies one
6099 of fb_lvalue or fb_rvalue fallback flags.
6100
6101 PRE_P will contain the sequence of GIMPLE statements corresponding
6102 to the evaluation of EXPR and all the side-effects that must
6103 be executed before the main expression. On exit, the last
6104 statement of PRE_P is the core statement being gimplified. For
6105 instance, when gimplifying 'if (++a)' the last statement in
6106 PRE_P will be 'if (t.1)' where t.1 is the result of
6107 pre-incrementing 'a'.
6108
6109 POST_P will contain the sequence of GIMPLE statements corresponding
6110 to the evaluation of all the side-effects that must be executed
6111 after the main expression. If this is NULL, the post
6112 side-effects are stored at the end of PRE_P.
6113
6114 The reason why the output is split in two is to handle post
6115 side-effects explicitly. In some cases, an expression may have
6116 inner and outer post side-effects which need to be emitted in
6117 an order different from the one given by the recursive
6118 traversal. For instance, for the expression (*p--)++ the post
6119 side-effects of '--' must actually occur *after* the post
6120 side-effects of '++'. However, gimplification will first visit
6121 the inner expression, so if a separate POST sequence was not
6122 used, the resulting sequence would be:
6123
6124 1 t.1 = *p
6125 2 p = p - 1
6126 3 t.2 = t.1 + 1
6127 4 *p = t.2
6128
6129 However, the post-decrement operation in line #2 must not be
6130 evaluated until after the store to *p at line #4, so the
6131 correct sequence should be:
6132
6133 1 t.1 = *p
6134 2 t.2 = t.1 + 1
6135 3 *p = t.2
6136 4 p = p - 1
6137
6138 So, by specifying a separate post queue, it is possible
6139 to emit the post side-effects in the correct order.
6140 If POST_P is NULL, an internal queue will be used. Before
6141 returning to the caller, the sequence POST_P is appended to
6142 the main output sequence PRE_P.
6143
6144 GIMPLE_TEST_F points to a function that takes a tree T and
6145 returns nonzero if T is in the GIMPLE form requested by the
6146 caller. The GIMPLE predicates are in tree-gimple.c.
6147
6148 FALLBACK tells the function what sort of a temporary we want if
6149 gimplification cannot produce an expression that complies with
6150 GIMPLE_TEST_F.
6151
6152 fb_none means that no temporary should be generated
6153 fb_rvalue means that an rvalue is OK to generate
6154 fb_lvalue means that an lvalue is OK to generate
6155 fb_either means that either is OK, but an lvalue is preferable.
6156 fb_mayfail means that gimplification may fail (in which case
6157 GS_ERROR will be returned)
6158
6159 The return value is either GS_ERROR or GS_ALL_DONE, since this
6160 function iterates until EXPR is completely gimplified or an error
6161 occurs. */
6162
6163 enum gimplify_status
6164 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6165 bool (*gimple_test_f) (tree), fallback_t fallback)
6166 {
6167 tree tmp;
6168 gimple_seq internal_pre = NULL;
6169 gimple_seq internal_post = NULL;
6170 tree save_expr;
6171 bool is_statement;
6172 location_t saved_location;
6173 enum gimplify_status ret;
6174 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6175
6176 save_expr = *expr_p;
6177 if (save_expr == NULL_TREE)
6178 return GS_ALL_DONE;
6179
6180 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6181 is_statement = gimple_test_f == is_gimple_stmt;
6182 if (is_statement)
6183 gcc_assert (pre_p);
6184
6185 /* Consistency checks. */
6186 if (gimple_test_f == is_gimple_reg)
6187 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6188 else if (gimple_test_f == is_gimple_val
6189 || gimple_test_f == is_gimple_formal_tmp_rhs
6190 || gimple_test_f == is_gimple_formal_tmp_or_call_rhs
6191 || gimple_test_f == is_gimple_formal_tmp_reg
6192 || gimple_test_f == is_gimple_formal_tmp_var
6193 || gimple_test_f == is_gimple_call_addr
6194 || gimple_test_f == is_gimple_condexpr
6195 || gimple_test_f == is_gimple_mem_rhs
6196 || gimple_test_f == is_gimple_mem_or_call_rhs
6197 || gimple_test_f == is_gimple_reg_rhs
6198 || gimple_test_f == is_gimple_reg_or_call_rhs
6199 || gimple_test_f == is_gimple_asm_val)
6200 gcc_assert (fallback & fb_rvalue);
6201 else if (gimple_test_f == is_gimple_min_lval
6202 || gimple_test_f == is_gimple_lvalue)
6203 gcc_assert (fallback & fb_lvalue);
6204 else if (gimple_test_f == is_gimple_addressable)
6205 gcc_assert (fallback & fb_either);
6206 else if (gimple_test_f == is_gimple_stmt)
6207 gcc_assert (fallback == fb_none);
6208 else
6209 {
6210 /* We should have recognized the GIMPLE_TEST_F predicate to
6211 know what kind of fallback to use in case a temporary is
6212 needed to hold the value or address of *EXPR_P. */
6213 gcc_unreachable ();
6214 }
6215
6216 /* We used to check the predicate here and return immediately if it
6217 succeeds. This is wrong; the design is for gimplification to be
6218 idempotent, and for the predicates to only test for valid forms, not
6219 whether they are fully simplified. */
6220 if (pre_p == NULL)
6221 pre_p = &internal_pre;
6222
6223 if (post_p == NULL)
6224 post_p = &internal_post;
6225
6226 /* Remember the last statements added to PRE_P and POST_P. Every
6227 new statement added by the gimplification helpers needs to be
6228 annotated with location information. To centralize the
6229 responsibility, we remember the last statement that had been
6230 added to both queues before gimplifying *EXPR_P. If
6231 gimplification produces new statements in PRE_P and POST_P, those
6232 statements will be annotated with the same location information
6233 as *EXPR_P. */
6234 pre_last_gsi = gsi_last (*pre_p);
6235 post_last_gsi = gsi_last (*post_p);
6236
6237 saved_location = input_location;
6238 if (save_expr != error_mark_node
6239 && EXPR_HAS_LOCATION (*expr_p))
6240 input_location = EXPR_LOCATION (*expr_p);
6241
6242 /* Loop over the specific gimplifiers until the toplevel node
6243 remains the same. */
6244 do
6245 {
6246 /* Strip away as many useless type conversions as possible
6247 at the toplevel. */
6248 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6249
6250 /* Remember the expr. */
6251 save_expr = *expr_p;
6252
6253 /* Die, die, die, my darling. */
6254 if (save_expr == error_mark_node
6255 || (TREE_TYPE (save_expr)
6256 && TREE_TYPE (save_expr) == error_mark_node))
6257 {
6258 ret = GS_ERROR;
6259 break;
6260 }
6261
6262 /* Do any language-specific gimplification. */
6263 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
6264 if (ret == GS_OK)
6265 {
6266 if (*expr_p == NULL_TREE)
6267 break;
6268 if (*expr_p != save_expr)
6269 continue;
6270 }
6271 else if (ret != GS_UNHANDLED)
6272 break;
6273
6274 ret = GS_OK;
6275 switch (TREE_CODE (*expr_p))
6276 {
6277 /* First deal with the special cases. */
6278
6279 case POSTINCREMENT_EXPR:
6280 case POSTDECREMENT_EXPR:
6281 case PREINCREMENT_EXPR:
6282 case PREDECREMENT_EXPR:
6283 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6284 fallback != fb_none);
6285 break;
6286
6287 case ARRAY_REF:
6288 case ARRAY_RANGE_REF:
6289 case REALPART_EXPR:
6290 case IMAGPART_EXPR:
6291 case COMPONENT_REF:
6292 case VIEW_CONVERT_EXPR:
6293 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6294 fallback ? fallback : fb_rvalue);
6295 break;
6296
6297 case COND_EXPR:
6298 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6299
6300 /* C99 code may assign to an array in a structure value of a
6301 conditional expression, and this has undefined behavior
6302 only on execution, so create a temporary if an lvalue is
6303 required. */
6304 if (fallback == fb_lvalue)
6305 {
6306 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6307 mark_addressable (*expr_p);
6308 }
6309 break;
6310
6311 case CALL_EXPR:
6312 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6313
6314 /* C99 code may assign to an array in a structure returned
6315 from a function, and this has undefined behavior only on
6316 execution, so create a temporary if an lvalue is
6317 required. */
6318 if (fallback == fb_lvalue)
6319 {
6320 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6321 mark_addressable (*expr_p);
6322 }
6323 break;
6324
6325 case TREE_LIST:
6326 gcc_unreachable ();
6327
6328 case COMPOUND_EXPR:
6329 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6330 break;
6331
6332 case MODIFY_EXPR:
6333 case INIT_EXPR:
6334 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6335 fallback != fb_none);
6336 break;
6337
6338 case TRUTH_ANDIF_EXPR:
6339 case TRUTH_ORIF_EXPR:
6340 ret = gimplify_boolean_expr (expr_p);
6341 break;
6342
6343 case TRUTH_NOT_EXPR:
6344 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6345 {
6346 tree type = TREE_TYPE (*expr_p);
6347 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6348 ret = GS_OK;
6349 break;
6350 }
6351
6352 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6353 is_gimple_val, fb_rvalue);
6354 recalculate_side_effects (*expr_p);
6355 break;
6356
6357 case ADDR_EXPR:
6358 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6359 break;
6360
6361 case VA_ARG_EXPR:
6362 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6363 break;
6364
6365 CASE_CONVERT:
6366 if (IS_EMPTY_STMT (*expr_p))
6367 {
6368 ret = GS_ALL_DONE;
6369 break;
6370 }
6371
6372 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6373 || fallback == fb_none)
6374 {
6375 /* Just strip a conversion to void (or in void context) and
6376 try again. */
6377 *expr_p = TREE_OPERAND (*expr_p, 0);
6378 break;
6379 }
6380
6381 ret = gimplify_conversion (expr_p);
6382 if (ret == GS_ERROR)
6383 break;
6384 if (*expr_p != save_expr)
6385 break;
6386 /* FALLTHRU */
6387
6388 case FIX_TRUNC_EXPR:
6389 /* unary_expr: ... | '(' cast ')' val | ... */
6390 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6391 is_gimple_val, fb_rvalue);
6392 recalculate_side_effects (*expr_p);
6393 break;
6394
6395 case INDIRECT_REF:
6396 *expr_p = fold_indirect_ref (*expr_p);
6397 if (*expr_p != save_expr)
6398 break;
6399 /* else fall through. */
6400 case ALIGN_INDIRECT_REF:
6401 case MISALIGNED_INDIRECT_REF:
6402 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6403 is_gimple_reg, fb_rvalue);
6404 recalculate_side_effects (*expr_p);
6405 break;
6406
6407 /* Constants need not be gimplified. */
6408 case INTEGER_CST:
6409 case REAL_CST:
6410 case FIXED_CST:
6411 case STRING_CST:
6412 case COMPLEX_CST:
6413 case VECTOR_CST:
6414 ret = GS_ALL_DONE;
6415 break;
6416
6417 case CONST_DECL:
6418 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6419 CONST_DECL node. Otherwise the decl is replaceable by its
6420 value. */
6421 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6422 if (fallback & fb_lvalue)
6423 ret = GS_ALL_DONE;
6424 else
6425 *expr_p = DECL_INITIAL (*expr_p);
6426 break;
6427
6428 case DECL_EXPR:
6429 ret = gimplify_decl_expr (expr_p, pre_p);
6430 break;
6431
6432 case EXC_PTR_EXPR:
6433 /* FIXME make this a decl. */
6434 ret = GS_ALL_DONE;
6435 break;
6436
6437 case BIND_EXPR:
6438 ret = gimplify_bind_expr (expr_p, pre_p);
6439 break;
6440
6441 case LOOP_EXPR:
6442 ret = gimplify_loop_expr (expr_p, pre_p);
6443 break;
6444
6445 case SWITCH_EXPR:
6446 ret = gimplify_switch_expr (expr_p, pre_p);
6447 break;
6448
6449 case EXIT_EXPR:
6450 ret = gimplify_exit_expr (expr_p);
6451 break;
6452
6453 case GOTO_EXPR:
6454 /* If the target is not LABEL, then it is a computed jump
6455 and the target needs to be gimplified. */
6456 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6457 {
6458 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6459 NULL, is_gimple_val, fb_rvalue);
6460 if (ret == GS_ERROR)
6461 break;
6462 }
6463 gimplify_seq_add_stmt (pre_p,
6464 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6465 break;
6466
6467 case PREDICT_EXPR:
6468 gimplify_seq_add_stmt (pre_p,
6469 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6470 PREDICT_EXPR_OUTCOME (*expr_p)));
6471 ret = GS_ALL_DONE;
6472 break;
6473
6474 case LABEL_EXPR:
6475 ret = GS_ALL_DONE;
6476 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6477 == current_function_decl);
6478 gimplify_seq_add_stmt (pre_p,
6479 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6480 break;
6481
6482 case CASE_LABEL_EXPR:
6483 ret = gimplify_case_label_expr (expr_p, pre_p);
6484 break;
6485
6486 case RETURN_EXPR:
6487 ret = gimplify_return_expr (*expr_p, pre_p);
6488 break;
6489
6490 case CONSTRUCTOR:
6491 /* Don't reduce this in place; let gimplify_init_constructor work its
6492 magic. Buf if we're just elaborating this for side effects, just
6493 gimplify any element that has side-effects. */
6494 if (fallback == fb_none)
6495 {
6496 unsigned HOST_WIDE_INT ix;
6497 constructor_elt *ce;
6498 tree temp = NULL_TREE;
6499 for (ix = 0;
6500 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6501 ix, ce);
6502 ix++)
6503 if (TREE_SIDE_EFFECTS (ce->value))
6504 append_to_statement_list (ce->value, &temp);
6505
6506 *expr_p = temp;
6507 ret = GS_OK;
6508 }
6509 /* C99 code may assign to an array in a constructed
6510 structure or union, and this has undefined behavior only
6511 on execution, so create a temporary if an lvalue is
6512 required. */
6513 else if (fallback == fb_lvalue)
6514 {
6515 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6516 mark_addressable (*expr_p);
6517 }
6518 else
6519 ret = GS_ALL_DONE;
6520 break;
6521
6522 /* The following are special cases that are not handled by the
6523 original GIMPLE grammar. */
6524
6525 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6526 eliminated. */
6527 case SAVE_EXPR:
6528 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6529 break;
6530
6531 case BIT_FIELD_REF:
6532 {
6533 enum gimplify_status r0, r1, r2;
6534
6535 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6536 post_p, is_gimple_lvalue, fb_either);
6537 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6538 post_p, is_gimple_val, fb_rvalue);
6539 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6540 post_p, is_gimple_val, fb_rvalue);
6541 recalculate_side_effects (*expr_p);
6542
6543 ret = MIN (r0, MIN (r1, r2));
6544 }
6545 break;
6546
6547 case NON_LVALUE_EXPR:
6548 /* This should have been stripped above. */
6549 gcc_unreachable ();
6550
6551 case ASM_EXPR:
6552 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6553 break;
6554
6555 case TRY_FINALLY_EXPR:
6556 case TRY_CATCH_EXPR:
6557 {
6558 gimple_seq eval, cleanup;
6559 gimple try_;
6560
6561 eval = cleanup = NULL;
6562 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6563 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6564 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6565 if (gimple_seq_empty_p (cleanup))
6566 {
6567 gimple_seq_add_seq (pre_p, eval);
6568 ret = GS_ALL_DONE;
6569 break;
6570 }
6571 try_ = gimple_build_try (eval, cleanup,
6572 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6573 ? GIMPLE_TRY_FINALLY
6574 : GIMPLE_TRY_CATCH);
6575 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6576 gimple_try_set_catch_is_cleanup (try_,
6577 TRY_CATCH_IS_CLEANUP (*expr_p));
6578 gimplify_seq_add_stmt (pre_p, try_);
6579 ret = GS_ALL_DONE;
6580 break;
6581 }
6582
6583 case CLEANUP_POINT_EXPR:
6584 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6585 break;
6586
6587 case TARGET_EXPR:
6588 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6589 break;
6590
6591 case CATCH_EXPR:
6592 {
6593 gimple c;
6594 gimple_seq handler = NULL;
6595 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6596 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6597 gimplify_seq_add_stmt (pre_p, c);
6598 ret = GS_ALL_DONE;
6599 break;
6600 }
6601
6602 case EH_FILTER_EXPR:
6603 {
6604 gimple ehf;
6605 gimple_seq failure = NULL;
6606
6607 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6608 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6609 gimple_eh_filter_set_must_not_throw
6610 (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p));
6611 gimplify_seq_add_stmt (pre_p, ehf);
6612 ret = GS_ALL_DONE;
6613 break;
6614 }
6615
6616 case CHANGE_DYNAMIC_TYPE_EXPR:
6617 {
6618 gimple cdt;
6619
6620 ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
6621 pre_p, post_p, is_gimple_reg, fb_lvalue);
6622 cdt = gimple_build_cdt (CHANGE_DYNAMIC_TYPE_NEW_TYPE (*expr_p),
6623 CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p));
6624 gimplify_seq_add_stmt (pre_p, cdt);
6625 ret = GS_ALL_DONE;
6626 }
6627 break;
6628
6629 case OBJ_TYPE_REF:
6630 {
6631 enum gimplify_status r0, r1;
6632 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6633 post_p, is_gimple_val, fb_rvalue);
6634 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6635 post_p, is_gimple_val, fb_rvalue);
6636 TREE_SIDE_EFFECTS (*expr_p) = 0;
6637 ret = MIN (r0, r1);
6638 }
6639 break;
6640
6641 case LABEL_DECL:
6642 /* We get here when taking the address of a label. We mark
6643 the label as "forced"; meaning it can never be removed and
6644 it is a potential target for any computed goto. */
6645 FORCED_LABEL (*expr_p) = 1;
6646 ret = GS_ALL_DONE;
6647 break;
6648
6649 case STATEMENT_LIST:
6650 ret = gimplify_statement_list (expr_p, pre_p);
6651 break;
6652
6653 case WITH_SIZE_EXPR:
6654 {
6655 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6656 post_p == &internal_post ? NULL : post_p,
6657 gimple_test_f, fallback);
6658 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6659 is_gimple_val, fb_rvalue);
6660 }
6661 break;
6662
6663 case VAR_DECL:
6664 case PARM_DECL:
6665 ret = gimplify_var_or_parm_decl (expr_p);
6666 break;
6667
6668 case RESULT_DECL:
6669 /* When within an OpenMP context, notice uses of variables. */
6670 if (gimplify_omp_ctxp)
6671 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6672 ret = GS_ALL_DONE;
6673 break;
6674
6675 case SSA_NAME:
6676 /* Allow callbacks into the gimplifier during optimization. */
6677 ret = GS_ALL_DONE;
6678 break;
6679
6680 case OMP_PARALLEL:
6681 gimplify_omp_parallel (expr_p, pre_p);
6682 ret = GS_ALL_DONE;
6683 break;
6684
6685 case OMP_TASK:
6686 gimplify_omp_task (expr_p, pre_p);
6687 ret = GS_ALL_DONE;
6688 break;
6689
6690 case OMP_FOR:
6691 ret = gimplify_omp_for (expr_p, pre_p);
6692 break;
6693
6694 case OMP_SECTIONS:
6695 case OMP_SINGLE:
6696 gimplify_omp_workshare (expr_p, pre_p);
6697 ret = GS_ALL_DONE;
6698 break;
6699
6700 case OMP_SECTION:
6701 case OMP_MASTER:
6702 case OMP_ORDERED:
6703 case OMP_CRITICAL:
6704 {
6705 gimple_seq body = NULL;
6706 gimple g;
6707
6708 gimplify_and_add (OMP_BODY (*expr_p), &body);
6709 switch (TREE_CODE (*expr_p))
6710 {
6711 case OMP_SECTION:
6712 g = gimple_build_omp_section (body);
6713 break;
6714 case OMP_MASTER:
6715 g = gimple_build_omp_master (body);
6716 break;
6717 case OMP_ORDERED:
6718 g = gimple_build_omp_ordered (body);
6719 break;
6720 case OMP_CRITICAL:
6721 g = gimple_build_omp_critical (body,
6722 OMP_CRITICAL_NAME (*expr_p));
6723 break;
6724 default:
6725 gcc_unreachable ();
6726 }
6727 gimplify_seq_add_stmt (pre_p, g);
6728 ret = GS_ALL_DONE;
6729 break;
6730 }
6731
6732 case OMP_ATOMIC:
6733 ret = gimplify_omp_atomic (expr_p, pre_p);
6734 break;
6735
6736 case POINTER_PLUS_EXPR:
6737 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6738 The second is gimple immediate saving a need for extra statement.
6739 */
6740 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6741 && (tmp = maybe_fold_offset_to_address
6742 (TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6743 TREE_TYPE (*expr_p))))
6744 {
6745 *expr_p = tmp;
6746 break;
6747 }
6748 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6749 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6750 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6751 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6752 0),0)))
6753 && (tmp = maybe_fold_offset_to_address
6754 (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6755 TREE_OPERAND (*expr_p, 1),
6756 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6757 0)))))
6758 {
6759 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6760 break;
6761 }
6762 /* FALLTHRU */
6763
6764 default:
6765 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6766 {
6767 case tcc_comparison:
6768 /* Handle comparison of objects of non scalar mode aggregates
6769 with a call to memcmp. It would be nice to only have to do
6770 this for variable-sized objects, but then we'd have to allow
6771 the same nest of reference nodes we allow for MODIFY_EXPR and
6772 that's too complex.
6773
6774 Compare scalar mode aggregates as scalar mode values. Using
6775 memcmp for them would be very inefficient at best, and is
6776 plain wrong if bitfields are involved. */
6777 {
6778 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6779
6780 if (!AGGREGATE_TYPE_P (type))
6781 goto expr_2;
6782 else if (TYPE_MODE (type) != BLKmode)
6783 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6784 else
6785 ret = gimplify_variable_sized_compare (expr_p);
6786
6787 break;
6788 }
6789
6790 /* If *EXPR_P does not need to be special-cased, handle it
6791 according to its class. */
6792 case tcc_unary:
6793 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6794 post_p, is_gimple_val, fb_rvalue);
6795 break;
6796
6797 case tcc_binary:
6798 expr_2:
6799 {
6800 enum gimplify_status r0, r1;
6801
6802 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6803 post_p, is_gimple_val, fb_rvalue);
6804 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6805 post_p, is_gimple_val, fb_rvalue);
6806
6807 ret = MIN (r0, r1);
6808 break;
6809 }
6810
6811 case tcc_declaration:
6812 case tcc_constant:
6813 ret = GS_ALL_DONE;
6814 goto dont_recalculate;
6815
6816 default:
6817 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
6818 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
6819 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
6820 goto expr_2;
6821 }
6822
6823 recalculate_side_effects (*expr_p);
6824
6825 dont_recalculate:
6826 break;
6827 }
6828
6829 /* If we replaced *expr_p, gimplify again. */
6830 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
6831 ret = GS_ALL_DONE;
6832 }
6833 while (ret == GS_OK);
6834
6835 /* If we encountered an error_mark somewhere nested inside, either
6836 stub out the statement or propagate the error back out. */
6837 if (ret == GS_ERROR)
6838 {
6839 if (is_statement)
6840 *expr_p = NULL;
6841 goto out;
6842 }
6843
6844 /* This was only valid as a return value from the langhook, which
6845 we handled. Make sure it doesn't escape from any other context. */
6846 gcc_assert (ret != GS_UNHANDLED);
6847
6848 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6849 {
6850 /* We aren't looking for a value, and we don't have a valid
6851 statement. If it doesn't have side-effects, throw it away. */
6852 if (!TREE_SIDE_EFFECTS (*expr_p))
6853 *expr_p = NULL;
6854 else if (!TREE_THIS_VOLATILE (*expr_p))
6855 {
6856 /* This is probably a _REF that contains something nested that
6857 has side effects. Recurse through the operands to find it. */
6858 enum tree_code code = TREE_CODE (*expr_p);
6859
6860 switch (code)
6861 {
6862 case COMPONENT_REF:
6863 case REALPART_EXPR:
6864 case IMAGPART_EXPR:
6865 case VIEW_CONVERT_EXPR:
6866 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6867 gimple_test_f, fallback);
6868 break;
6869
6870 case ARRAY_REF:
6871 case ARRAY_RANGE_REF:
6872 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6873 gimple_test_f, fallback);
6874 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6875 gimple_test_f, fallback);
6876 break;
6877
6878 default:
6879 /* Anything else with side-effects must be converted to
6880 a valid statement before we get here. */
6881 gcc_unreachable ();
6882 }
6883
6884 *expr_p = NULL;
6885 }
6886 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
6887 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6888 {
6889 /* Historically, the compiler has treated a bare reference
6890 to a non-BLKmode volatile lvalue as forcing a load. */
6891 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
6892
6893 /* Normally, we do not want to create a temporary for a
6894 TREE_ADDRESSABLE type because such a type should not be
6895 copied by bitwise-assignment. However, we make an
6896 exception here, as all we are doing here is ensuring that
6897 we read the bytes that make up the type. We use
6898 create_tmp_var_raw because create_tmp_var will abort when
6899 given a TREE_ADDRESSABLE type. */
6900 tree tmp = create_tmp_var_raw (type, "vol");
6901 gimple_add_tmp_var (tmp);
6902 gimplify_assign (tmp, *expr_p, pre_p);
6903 *expr_p = NULL;
6904 }
6905 else
6906 /* We can't do anything useful with a volatile reference to
6907 an incomplete type, so just throw it away. Likewise for
6908 a BLKmode type, since any implicit inner load should
6909 already have been turned into an explicit one by the
6910 gimplification process. */
6911 *expr_p = NULL;
6912 }
6913
6914 /* If we are gimplifying at the statement level, we're done. Tack
6915 everything together and return. */
6916 if (fallback == fb_none || is_statement)
6917 {
6918 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
6919 it out for GC to reclaim it. */
6920 *expr_p = NULL_TREE;
6921
6922 if (!gimple_seq_empty_p (internal_pre)
6923 || !gimple_seq_empty_p (internal_post))
6924 {
6925 gimplify_seq_add_seq (&internal_pre, internal_post);
6926 gimplify_seq_add_seq (pre_p, internal_pre);
6927 }
6928
6929 /* The result of gimplifying *EXPR_P is going to be the last few
6930 statements in *PRE_P and *POST_P. Add location information
6931 to all the statements that were added by the gimplification
6932 helpers. */
6933 if (!gimple_seq_empty_p (*pre_p))
6934 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
6935
6936 if (!gimple_seq_empty_p (*post_p))
6937 annotate_all_with_location_after (*post_p, post_last_gsi,
6938 input_location);
6939
6940 goto out;
6941 }
6942
6943 #ifdef ENABLE_GIMPLE_CHECKING
6944 if (*expr_p)
6945 {
6946 enum tree_code code = TREE_CODE (*expr_p);
6947 /* These expressions should already be in gimple IR form. */
6948 gcc_assert (code != MODIFY_EXPR
6949 && code != ASM_EXPR
6950 && code != BIND_EXPR
6951 && code != CATCH_EXPR
6952 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
6953 && code != EH_FILTER_EXPR
6954 && code != GOTO_EXPR
6955 && code != LABEL_EXPR
6956 && code != LOOP_EXPR
6957 && code != RESX_EXPR
6958 && code != SWITCH_EXPR
6959 && code != TRY_FINALLY_EXPR
6960 && code != OMP_CRITICAL
6961 && code != OMP_FOR
6962 && code != OMP_MASTER
6963 && code != OMP_ORDERED
6964 && code != OMP_PARALLEL
6965 && code != OMP_SECTIONS
6966 && code != OMP_SECTION
6967 && code != OMP_SINGLE);
6968 }
6969 #endif
6970
6971 /* Otherwise we're gimplifying a subexpression, so the resulting
6972 value is interesting. If it's a valid operand that matches
6973 GIMPLE_TEST_F, we're done. Unless we are handling some
6974 post-effects internally; if that's the case, we need to copy into
6975 a temporary before adding the post-effects to POST_P. */
6976 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6977 goto out;
6978
6979 /* Otherwise, we need to create a new temporary for the gimplified
6980 expression. */
6981
6982 /* We can't return an lvalue if we have an internal postqueue. The
6983 object the lvalue refers to would (probably) be modified by the
6984 postqueue; we need to copy the value out first, which means an
6985 rvalue. */
6986 if ((fallback & fb_lvalue)
6987 && gimple_seq_empty_p (internal_post)
6988 && is_gimple_addressable (*expr_p))
6989 {
6990 /* An lvalue will do. Take the address of the expression, store it
6991 in a temporary, and replace the expression with an INDIRECT_REF of
6992 that temporary. */
6993 tmp = build_fold_addr_expr (*expr_p);
6994 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6995 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6996 }
6997 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_or_call_rhs (*expr_p))
6998 {
6999 /* An rvalue will do. Assign the gimplified expression into a
7000 new temporary TMP and replace the original expression with
7001 TMP. First, make sure that the expression has a type so that
7002 it can be assigned into a temporary. */
7003 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7004
7005 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7006 /* The postqueue might change the value of the expression between
7007 the initialization and use of the temporary, so we can't use a
7008 formal temp. FIXME do we care? */
7009 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7010 else
7011 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7012
7013 if (TREE_CODE (*expr_p) != SSA_NAME)
7014 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
7015 }
7016 else
7017 {
7018 #ifdef ENABLE_GIMPLE_CHECKING
7019 if (!(fallback & fb_mayfail))
7020 {
7021 fprintf (stderr, "gimplification failed:\n");
7022 print_generic_expr (stderr, *expr_p, 0);
7023 debug_tree (*expr_p);
7024 internal_error ("gimplification failed");
7025 }
7026 #endif
7027 gcc_assert (fallback & fb_mayfail);
7028
7029 /* If this is an asm statement, and the user asked for the
7030 impossible, don't die. Fail and let gimplify_asm_expr
7031 issue an error. */
7032 ret = GS_ERROR;
7033 goto out;
7034 }
7035
7036 /* Make sure the temporary matches our predicate. */
7037 gcc_assert ((*gimple_test_f) (*expr_p));
7038
7039 if (!gimple_seq_empty_p (internal_post))
7040 {
7041 annotate_all_with_location (internal_post, input_location);
7042 gimplify_seq_add_seq (pre_p, internal_post);
7043 }
7044
7045 out:
7046 input_location = saved_location;
7047 return ret;
7048 }
7049
7050 /* Look through TYPE for variable-sized objects and gimplify each such
7051 size that we find. Add to LIST_P any statements generated. */
7052
7053 void
7054 gimplify_type_sizes (tree type, gimple_seq *list_p)
7055 {
7056 tree field, t;
7057
7058 if (type == NULL || type == error_mark_node)
7059 return;
7060
7061 /* We first do the main variant, then copy into any other variants. */
7062 type = TYPE_MAIN_VARIANT (type);
7063
7064 /* Avoid infinite recursion. */
7065 if (TYPE_SIZES_GIMPLIFIED (type))
7066 return;
7067
7068 TYPE_SIZES_GIMPLIFIED (type) = 1;
7069
7070 switch (TREE_CODE (type))
7071 {
7072 case INTEGER_TYPE:
7073 case ENUMERAL_TYPE:
7074 case BOOLEAN_TYPE:
7075 case REAL_TYPE:
7076 case FIXED_POINT_TYPE:
7077 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7078 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7079
7080 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7081 {
7082 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7083 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7084 }
7085 break;
7086
7087 case ARRAY_TYPE:
7088 /* These types may not have declarations, so handle them here. */
7089 gimplify_type_sizes (TREE_TYPE (type), list_p);
7090 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7091 /* When not optimizing, ensure VLA bounds aren't removed. */
7092 if (!optimize
7093 && TYPE_DOMAIN (type)
7094 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7095 {
7096 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7097 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7098 DECL_IGNORED_P (t) = 0;
7099 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7100 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7101 DECL_IGNORED_P (t) = 0;
7102 }
7103 break;
7104
7105 case RECORD_TYPE:
7106 case UNION_TYPE:
7107 case QUAL_UNION_TYPE:
7108 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7109 if (TREE_CODE (field) == FIELD_DECL)
7110 {
7111 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7112 gimplify_type_sizes (TREE_TYPE (field), list_p);
7113 }
7114 break;
7115
7116 case POINTER_TYPE:
7117 case REFERENCE_TYPE:
7118 /* We used to recurse on the pointed-to type here, which turned out to
7119 be incorrect because its definition might refer to variables not
7120 yet initialized at this point if a forward declaration is involved.
7121
7122 It was actually useful for anonymous pointed-to types to ensure
7123 that the sizes evaluation dominates every possible later use of the
7124 values. Restricting to such types here would be safe since there
7125 is no possible forward declaration around, but would introduce an
7126 undesirable middle-end semantic to anonymity. We then defer to
7127 front-ends the responsibility of ensuring that the sizes are
7128 evaluated both early and late enough, e.g. by attaching artificial
7129 type declarations to the tree. */
7130 break;
7131
7132 default:
7133 break;
7134 }
7135
7136 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7137 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7138
7139 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7140 {
7141 TYPE_SIZE (t) = TYPE_SIZE (type);
7142 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7143 TYPE_SIZES_GIMPLIFIED (t) = 1;
7144 }
7145 }
7146
7147 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7148 a size or position, has had all of its SAVE_EXPRs evaluated.
7149 We add any required statements to *STMT_P. */
7150
7151 void
7152 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7153 {
7154 tree type, expr = *expr_p;
7155
7156 /* We don't do anything if the value isn't there, is constant, or contains
7157 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7158 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7159 will want to replace it with a new variable, but that will cause problems
7160 if this type is from outside the function. It's OK to have that here. */
7161 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7162 || TREE_CODE (expr) == VAR_DECL
7163 || CONTAINS_PLACEHOLDER_P (expr))
7164 return;
7165
7166 type = TREE_TYPE (expr);
7167 *expr_p = unshare_expr (expr);
7168
7169 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7170 expr = *expr_p;
7171
7172 /* Verify that we've an exact type match with the original expression.
7173 In particular, we do not wish to drop a "sizetype" in favour of a
7174 type of similar dimensions. We don't want to pollute the generic
7175 type-stripping code with this knowledge because it doesn't matter
7176 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7177 and friends retain their "sizetype-ness". */
7178 if (TREE_TYPE (expr) != type
7179 && TREE_CODE (type) == INTEGER_TYPE
7180 && TYPE_IS_SIZETYPE (type))
7181 {
7182 tree tmp;
7183 gimple stmt;
7184
7185 *expr_p = create_tmp_var (type, NULL);
7186 tmp = build1 (NOP_EXPR, type, expr);
7187 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7188 if (EXPR_HAS_LOCATION (expr))
7189 gimple_set_location (stmt, *EXPR_LOCUS (expr));
7190 else
7191 gimple_set_location (stmt, input_location);
7192 }
7193 }
7194
7195
7196 /* Gimplify the body of statements pointed to by BODY_P and return a
7197 GIMPLE_BIND containing the sequence of GIMPLE statements
7198 corresponding to BODY_P. FNDECL is the function decl containing
7199 *BODY_P. */
7200
7201 gimple
7202 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7203 {
7204 location_t saved_location = input_location;
7205 gimple_seq parm_stmts, seq;
7206 gimple outer_bind;
7207 struct gimplify_ctx gctx;
7208
7209 timevar_push (TV_TREE_GIMPLIFY);
7210
7211 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7212 gimplification. */
7213 default_rtl_profile ();
7214
7215 gcc_assert (gimplify_ctxp == NULL);
7216 push_gimplify_context (&gctx);
7217
7218 /* Unshare most shared trees in the body and in that of any nested functions.
7219 It would seem we don't have to do this for nested functions because
7220 they are supposed to be output and then the outer function gimplified
7221 first, but the g++ front end doesn't always do it that way. */
7222 unshare_body (body_p, fndecl);
7223 unvisit_body (body_p, fndecl);
7224
7225 /* Make sure input_location isn't set to something weird. */
7226 input_location = DECL_SOURCE_LOCATION (fndecl);
7227
7228 /* Resolve callee-copies. This has to be done before processing
7229 the body so that DECL_VALUE_EXPR gets processed correctly. */
7230 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7231
7232 /* Gimplify the function's body. */
7233 seq = NULL;
7234 gimplify_stmt (body_p, &seq);
7235 outer_bind = gimple_seq_first_stmt (seq);
7236 if (!outer_bind)
7237 {
7238 outer_bind = gimple_build_nop ();
7239 gimplify_seq_add_stmt (&seq, outer_bind);
7240 }
7241
7242 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7243 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7244 if (gimple_code (outer_bind) == GIMPLE_BIND
7245 && gimple_seq_first (seq) == gimple_seq_last (seq))
7246 ;
7247 else
7248 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7249
7250 *body_p = NULL_TREE;
7251
7252 /* If we had callee-copies statements, insert them at the beginning
7253 of the function. */
7254 if (!gimple_seq_empty_p (parm_stmts))
7255 {
7256 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7257 gimple_bind_set_body (outer_bind, parm_stmts);
7258 }
7259
7260 pop_gimplify_context (outer_bind);
7261 gcc_assert (gimplify_ctxp == NULL);
7262
7263 #ifdef ENABLE_TYPES_CHECKING
7264 if (!errorcount && !sorrycount)
7265 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7266 #endif
7267
7268 timevar_pop (TV_TREE_GIMPLIFY);
7269 input_location = saved_location;
7270
7271 return outer_bind;
7272 }
7273
7274 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7275 node for the function we want to gimplify.
7276
7277 Returns the sequence of GIMPLE statements corresponding to the body
7278 of FNDECL. */
7279
7280 void
7281 gimplify_function_tree (tree fndecl)
7282 {
7283 tree oldfn, parm, ret;
7284 gimple_seq seq;
7285 gimple bind;
7286
7287 oldfn = current_function_decl;
7288 current_function_decl = fndecl;
7289 if (DECL_STRUCT_FUNCTION (fndecl))
7290 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7291 else
7292 push_struct_function (fndecl);
7293
7294 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7295 {
7296 /* Preliminarily mark non-addressed complex variables as eligible
7297 for promotion to gimple registers. We'll transform their uses
7298 as we find them. */
7299 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7300 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7301 && !TREE_THIS_VOLATILE (parm)
7302 && !needs_to_live_in_memory (parm))
7303 DECL_GIMPLE_REG_P (parm) = 1;
7304 }
7305
7306 ret = DECL_RESULT (fndecl);
7307 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7308 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7309 && !needs_to_live_in_memory (ret))
7310 DECL_GIMPLE_REG_P (ret) = 1;
7311
7312 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7313
7314 /* The tree body of the function is no longer needed, replace it
7315 with the new GIMPLE body. */
7316 seq = gimple_seq_alloc ();
7317 gimple_seq_add_stmt (&seq, bind);
7318 gimple_set_body (fndecl, seq);
7319
7320 /* If we're instrumenting function entry/exit, then prepend the call to
7321 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7322 catch the exit hook. */
7323 /* ??? Add some way to ignore exceptions for this TFE. */
7324 if (flag_instrument_function_entry_exit
7325 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7326 && !flag_instrument_functions_exclude_p (fndecl))
7327 {
7328 tree x;
7329 gimple new_bind;
7330 gimple tf;
7331 gimple_seq cleanup = NULL, body = NULL;
7332
7333 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7334 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7335 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7336
7337 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7338 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7339 gimplify_seq_add_stmt (&body, tf);
7340 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7341 /* Clear the block for BIND, since it is no longer directly inside
7342 the function, but within a try block. */
7343 gimple_bind_set_block (bind, NULL);
7344
7345 /* Replace the current function body with the body
7346 wrapped in the try/finally TF. */
7347 seq = gimple_seq_alloc ();
7348 gimple_seq_add_stmt (&seq, new_bind);
7349 gimple_set_body (fndecl, seq);
7350 }
7351
7352 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7353
7354 current_function_decl = oldfn;
7355 pop_cfun ();
7356 }
7357
7358
7359 /* Some transformations like inlining may invalidate the GIMPLE form
7360 for operands. This function traverses all the operands in STMT and
7361 gimplifies anything that is not a valid gimple operand. Any new
7362 GIMPLE statements are inserted before *GSI_P. */
7363
7364 void
7365 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7366 {
7367 size_t i, num_ops;
7368 tree orig_lhs = NULL_TREE, lhs, t;
7369 gimple_seq pre = NULL;
7370 gimple post_stmt = NULL;
7371 struct gimplify_ctx gctx;
7372
7373 push_gimplify_context (&gctx);
7374 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7375
7376 switch (gimple_code (stmt))
7377 {
7378 case GIMPLE_COND:
7379 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7380 is_gimple_val, fb_rvalue);
7381 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7382 is_gimple_val, fb_rvalue);
7383 break;
7384 case GIMPLE_SWITCH:
7385 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7386 is_gimple_val, fb_rvalue);
7387 break;
7388 case GIMPLE_OMP_ATOMIC_LOAD:
7389 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7390 is_gimple_val, fb_rvalue);
7391 break;
7392 case GIMPLE_ASM:
7393 {
7394 size_t i, noutputs = gimple_asm_noutputs (stmt);
7395 const char *constraint, **oconstraints;
7396 bool allows_mem, allows_reg, is_inout;
7397
7398 oconstraints
7399 = (const char **) alloca ((noutputs) * sizeof (const char *));
7400 for (i = 0; i < noutputs; i++)
7401 {
7402 tree op = gimple_asm_output_op (stmt, i);
7403 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7404 oconstraints[i] = constraint;
7405 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7406 &allows_reg, &is_inout);
7407 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7408 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7409 fb_lvalue | fb_mayfail);
7410 }
7411 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7412 {
7413 tree op = gimple_asm_input_op (stmt, i);
7414 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7415 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7416 oconstraints, &allows_mem, &allows_reg);
7417 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7418 allows_reg = 0;
7419 if (!allows_reg && allows_mem)
7420 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7421 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7422 else
7423 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7424 is_gimple_asm_val, fb_rvalue);
7425 }
7426 }
7427 break;
7428 default:
7429 /* NOTE: We start gimplifying operands from last to first to
7430 make sure that side-effects on the RHS of calls, assignments
7431 and ASMs are executed before the LHS. The ordering is not
7432 important for other statements. */
7433 num_ops = gimple_num_ops (stmt);
7434 orig_lhs = gimple_get_lhs (stmt);
7435 for (i = num_ops; i > 0; i--)
7436 {
7437 tree op = gimple_op (stmt, i - 1);
7438 if (op == NULL_TREE)
7439 continue;
7440 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7441 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7442 else if (i == 2
7443 && is_gimple_assign (stmt)
7444 && num_ops == 2
7445 && get_gimple_rhs_class (gimple_expr_code (stmt))
7446 == GIMPLE_SINGLE_RHS)
7447 gimplify_expr (&op, &pre, NULL,
7448 rhs_predicate_for (gimple_assign_lhs (stmt)),
7449 fb_rvalue);
7450 else if (i == 2 && is_gimple_call (stmt))
7451 {
7452 if (TREE_CODE (op) == FUNCTION_DECL)
7453 continue;
7454 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7455 }
7456 else
7457 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7458 gimple_set_op (stmt, i - 1, op);
7459 }
7460
7461 lhs = gimple_get_lhs (stmt);
7462 /* If the LHS changed it in a way that requires a simple RHS,
7463 create temporary. */
7464 if (lhs && !is_gimple_formal_tmp_var (lhs))
7465 {
7466 bool need_temp = false;
7467
7468 if (is_gimple_assign (stmt)
7469 && num_ops == 2
7470 && get_gimple_rhs_class (gimple_expr_code (stmt))
7471 == GIMPLE_SINGLE_RHS)
7472 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7473 rhs_predicate_for (gimple_assign_lhs (stmt)),
7474 fb_rvalue);
7475 else if (is_gimple_reg (lhs))
7476 {
7477 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7478 {
7479 if (is_gimple_call (stmt))
7480 {
7481 i = gimple_call_flags (stmt);
7482 if ((i & ECF_LOOPING_CONST_OR_PURE)
7483 || !(i & (ECF_CONST | ECF_PURE)))
7484 need_temp = true;
7485 }
7486 if (stmt_can_throw_internal (stmt))
7487 need_temp = true;
7488 }
7489 }
7490 else
7491 {
7492 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7493 need_temp = true;
7494 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7495 {
7496 if (is_gimple_call (stmt))
7497 {
7498 tree fndecl = gimple_call_fndecl (stmt);
7499
7500 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7501 && !(fndecl && DECL_RESULT (fndecl)
7502 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7503 need_temp = true;
7504 }
7505 else
7506 need_temp = true;
7507 }
7508 }
7509 if (need_temp)
7510 {
7511 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7512
7513 DECL_GIMPLE_FORMAL_TEMP_P (temp) = 1;
7514 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7515 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7516 DECL_GIMPLE_REG_P (temp) = 1;
7517 if (TREE_CODE (orig_lhs) == SSA_NAME)
7518 orig_lhs = SSA_NAME_VAR (orig_lhs);
7519 if (TREE_CODE (orig_lhs) == VAR_DECL
7520 && DECL_BASED_ON_RESTRICT_P (orig_lhs))
7521 {
7522 DECL_BASED_ON_RESTRICT_P (temp) = 1;
7523 SET_DECL_RESTRICT_BASE (temp,
7524 DECL_GET_RESTRICT_BASE (orig_lhs));
7525 }
7526
7527 if (gimple_in_ssa_p (cfun))
7528 temp = make_ssa_name (temp, NULL);
7529 gimple_set_lhs (stmt, temp);
7530 post_stmt = gimple_build_assign (lhs, temp);
7531 if (TREE_CODE (lhs) == SSA_NAME)
7532 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7533 }
7534 }
7535 break;
7536 }
7537
7538 if (gimple_referenced_vars (cfun))
7539 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7540 add_referenced_var (t);
7541
7542 if (!gimple_seq_empty_p (pre))
7543 {
7544 if (gimple_in_ssa_p (cfun))
7545 {
7546 gimple_stmt_iterator i;
7547
7548 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7549 mark_symbols_for_renaming (gsi_stmt (i));
7550 }
7551 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7552 }
7553 if (post_stmt)
7554 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7555
7556 pop_gimplify_context (NULL);
7557 }
7558
7559
7560 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7561 force the result to be either ssa_name or an invariant, otherwise
7562 just force it to be a rhs expression. If VAR is not NULL, make the
7563 base variable of the final destination be VAR if suitable. */
7564
7565 tree
7566 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7567 {
7568 tree t;
7569 enum gimplify_status ret;
7570 gimple_predicate gimple_test_f;
7571 struct gimplify_ctx gctx;
7572
7573 *stmts = NULL;
7574
7575 if (is_gimple_val (expr))
7576 return expr;
7577
7578 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7579
7580 push_gimplify_context (&gctx);
7581 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7582 gimplify_ctxp->allow_rhs_cond_expr = true;
7583
7584 if (var)
7585 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7586
7587 if (TREE_CODE (expr) != MODIFY_EXPR
7588 && TREE_TYPE (expr) == void_type_node)
7589 {
7590 gimplify_and_add (expr, stmts);
7591 expr = NULL_TREE;
7592 }
7593 else
7594 {
7595 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7596 gcc_assert (ret != GS_ERROR);
7597 }
7598
7599 if (gimple_referenced_vars (cfun))
7600 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7601 add_referenced_var (t);
7602
7603 pop_gimplify_context (NULL);
7604
7605 return expr;
7606 }
7607
7608 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7609 some statements are produced, emits them at GSI. If BEFORE is true.
7610 the statements are appended before GSI, otherwise they are appended after
7611 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7612 GSI_CONTINUE_LINKING are the usual values). */
7613
7614 tree
7615 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7616 bool simple_p, tree var, bool before,
7617 enum gsi_iterator_update m)
7618 {
7619 gimple_seq stmts;
7620
7621 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7622
7623 if (!gimple_seq_empty_p (stmts))
7624 {
7625 if (gimple_in_ssa_p (cfun))
7626 {
7627 gimple_stmt_iterator i;
7628
7629 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7630 mark_symbols_for_renaming (gsi_stmt (i));
7631 }
7632
7633 if (before)
7634 gsi_insert_seq_before (gsi, stmts, m);
7635 else
7636 gsi_insert_seq_after (gsi, stmts, m);
7637 }
7638
7639 return expr;
7640 }
7641
7642 #include "gt-gimplify.h"