toplev.c: Include varray.h for statistics dumping.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "gimple.h"
31 #include "tree-iterator.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51 #include "splay-tree.h"
52 #include "vec.h"
53 #include "gimple.h"
54 #include "tree-pass.h"
55
56
57 enum gimplify_omp_var_data
58 {
59 GOVD_SEEN = 1,
60 GOVD_EXPLICIT = 2,
61 GOVD_SHARED = 4,
62 GOVD_PRIVATE = 8,
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
65 GOVD_REDUCTION = 64,
66 GOVD_LOCAL = 128,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
71 };
72
73
74 enum omp_region_type
75 {
76 ORT_WORKSHARE = 0,
77 ORT_TASK = 1,
78 ORT_PARALLEL = 2,
79 ORT_COMBINED_PARALLEL = 3
80 };
81
82 struct gimplify_omp_ctx
83 {
84 struct gimplify_omp_ctx *outer_context;
85 splay_tree variables;
86 struct pointer_set_t *privatized_types;
87 location_t location;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
90 };
91
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
94
95
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
98
99 typedef struct gimple_temp_hash_elt
100 {
101 tree val; /* Key */
102 tree temp; /* Value */
103 } elt_t;
104
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
107
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
110 void
111 mark_addressable (tree x)
112 {
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL
116 && TREE_CODE (x) != PARM_DECL
117 && TREE_CODE (x) != RESULT_DECL)
118 return ;
119 TREE_ADDRESSABLE (x) = 1;
120 }
121
122 /* Return a hash value for a formal temporary table entry. */
123
124 static hashval_t
125 gimple_tree_hash (const void *p)
126 {
127 tree t = ((const elt_t *) p)->val;
128 return iterative_hash_expr (t, 0);
129 }
130
131 /* Compare two formal temporary table entries. */
132
133 static int
134 gimple_tree_eq (const void *p1, const void *p2)
135 {
136 tree t1 = ((const elt_t *) p1)->val;
137 tree t2 = ((const elt_t *) p2)->val;
138 enum tree_code code = TREE_CODE (t1);
139
140 if (TREE_CODE (t2) != code
141 || TREE_TYPE (t1) != TREE_TYPE (t2))
142 return 0;
143
144 if (!operand_equal_p (t1, t2, 0))
145 return 0;
146
147 /* Only allow them to compare equal if they also hash equal; otherwise
148 results are nondeterminate, and we fail bootstrap comparison. */
149 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
150
151 return 1;
152 }
153
154 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
155 *SEQ_P is NULL, a new sequence is allocated. This function is
156 similar to gimple_seq_add_stmt, but does not scan the operands.
157 During gimplification, we need to manipulate statement sequences
158 before the def/use vectors have been constructed. */
159
160 static void
161 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
162 {
163 gimple_stmt_iterator si;
164
165 if (gs == NULL)
166 return;
167
168 if (*seq_p == NULL)
169 *seq_p = gimple_seq_alloc ();
170
171 si = gsi_last (*seq_p);
172
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
174 }
175
176 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
177 NULL, a new sequence is allocated. This function is
178 similar to gimple_seq_add_seq, but does not scan the operands.
179 During gimplification, we need to manipulate statement sequences
180 before the def/use vectors have been constructed. */
181
182 static void
183 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
184 {
185 gimple_stmt_iterator si;
186
187 if (src == NULL)
188 return;
189
190 if (*dst_p == NULL)
191 *dst_p = gimple_seq_alloc ();
192
193 si = gsi_last (*dst_p);
194 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
195 }
196
197 /* Set up a context for the gimplifier. */
198
199 void
200 push_gimplify_context (struct gimplify_ctx *c)
201 {
202 memset (c, '\0', sizeof (*c));
203 c->prev_context = gimplify_ctxp;
204 gimplify_ctxp = c;
205 }
206
207 /* Tear down a context for the gimplifier. If BODY is non-null, then
208 put the temporaries into the outer BIND_EXPR. Otherwise, put them
209 in the local_decls.
210
211 BODY is not a sequence, but the first tuple in a sequence. */
212
213 void
214 pop_gimplify_context (gimple body)
215 {
216 struct gimplify_ctx *c = gimplify_ctxp;
217
218 gcc_assert (c && (c->bind_expr_stack == NULL
219 || VEC_empty (gimple, c->bind_expr_stack)));
220 VEC_free (gimple, heap, c->bind_expr_stack);
221 gimplify_ctxp = c->prev_context;
222
223 if (body)
224 declare_vars (c->temps, body, false);
225 else
226 record_vars (c->temps);
227
228 if (c->temp_htab)
229 htab_delete (c->temp_htab);
230 }
231
232 static void
233 gimple_push_bind_expr (gimple gimple_bind)
234 {
235 if (gimplify_ctxp->bind_expr_stack == NULL)
236 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
237 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
238 }
239
240 static void
241 gimple_pop_bind_expr (void)
242 {
243 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
244 }
245
246 gimple
247 gimple_current_bind_expr (void)
248 {
249 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
250 }
251
252 /* Return the stack GIMPLE_BINDs created during gimplification. */
253
254 VEC(gimple, heap) *
255 gimple_bind_expr_stack (void)
256 {
257 return gimplify_ctxp->bind_expr_stack;
258 }
259
260 /* Returns true iff there is a COND_EXPR between us and the innermost
261 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
262
263 static bool
264 gimple_conditional_context (void)
265 {
266 return gimplify_ctxp->conditions > 0;
267 }
268
269 /* Note that we've entered a COND_EXPR. */
270
271 static void
272 gimple_push_condition (void)
273 {
274 #ifdef ENABLE_GIMPLE_CHECKING
275 if (gimplify_ctxp->conditions == 0)
276 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
277 #endif
278 ++(gimplify_ctxp->conditions);
279 }
280
281 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
282 now, add any conditional cleanups we've seen to the prequeue. */
283
284 static void
285 gimple_pop_condition (gimple_seq *pre_p)
286 {
287 int conds = --(gimplify_ctxp->conditions);
288
289 gcc_assert (conds >= 0);
290 if (conds == 0)
291 {
292 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
293 gimplify_ctxp->conditional_cleanups = NULL;
294 }
295 }
296
297 /* A stable comparison routine for use with splay trees and DECLs. */
298
299 static int
300 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
301 {
302 tree a = (tree) xa;
303 tree b = (tree) xb;
304
305 return DECL_UID (a) - DECL_UID (b);
306 }
307
308 /* Create a new omp construct that deals with variable remapping. */
309
310 static struct gimplify_omp_ctx *
311 new_omp_context (enum omp_region_type region_type)
312 {
313 struct gimplify_omp_ctx *c;
314
315 c = XCNEW (struct gimplify_omp_ctx);
316 c->outer_context = gimplify_omp_ctxp;
317 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
318 c->privatized_types = pointer_set_create ();
319 c->location = input_location;
320 c->region_type = region_type;
321 if (region_type != ORT_TASK)
322 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
323 else
324 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
325
326 return c;
327 }
328
329 /* Destroy an omp construct that deals with variable remapping. */
330
331 static void
332 delete_omp_context (struct gimplify_omp_ctx *c)
333 {
334 splay_tree_delete (c->variables);
335 pointer_set_destroy (c->privatized_types);
336 XDELETE (c);
337 }
338
339 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
340 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
341
342 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
343
344 static void
345 append_to_statement_list_1 (tree t, tree *list_p)
346 {
347 tree list = *list_p;
348 tree_stmt_iterator i;
349
350 if (!list)
351 {
352 if (t && TREE_CODE (t) == STATEMENT_LIST)
353 {
354 *list_p = t;
355 return;
356 }
357 *list_p = list = alloc_stmt_list ();
358 }
359
360 i = tsi_last (list);
361 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
362 }
363
364 /* Add T to the end of the list container pointed to by LIST_P.
365 If T is an expression with no effects, it is ignored. */
366
367 void
368 append_to_statement_list (tree t, tree *list_p)
369 {
370 if (t && TREE_SIDE_EFFECTS (t))
371 append_to_statement_list_1 (t, list_p);
372 }
373
374 /* Similar, but the statement is always added, regardless of side effects. */
375
376 void
377 append_to_statement_list_force (tree t, tree *list_p)
378 {
379 if (t != NULL_TREE)
380 append_to_statement_list_1 (t, list_p);
381 }
382
383 /* Both gimplify the statement T and append it to *SEQ_P. This function
384 behaves exactly as gimplify_stmt, but you don't have to pass T as a
385 reference. */
386
387 void
388 gimplify_and_add (tree t, gimple_seq *seq_p)
389 {
390 gimplify_stmt (&t, seq_p);
391 }
392
393 /* Gimplify statement T into sequence *SEQ_P, and return the first
394 tuple in the sequence of generated tuples for this statement.
395 Return NULL if gimplifying T produced no tuples. */
396
397 static gimple
398 gimplify_and_return_first (tree t, gimple_seq *seq_p)
399 {
400 gimple_stmt_iterator last = gsi_last (*seq_p);
401
402 gimplify_and_add (t, seq_p);
403
404 if (!gsi_end_p (last))
405 {
406 gsi_next (&last);
407 return gsi_stmt (last);
408 }
409 else
410 return gimple_seq_first_stmt (*seq_p);
411 }
412
413 /* Strip off a legitimate source ending from the input string NAME of
414 length LEN. Rather than having to know the names used by all of
415 our front ends, we strip off an ending of a period followed by
416 up to five characters. (Java uses ".class".) */
417
418 static inline void
419 remove_suffix (char *name, int len)
420 {
421 int i;
422
423 for (i = 2; i < 8 && len > i; i++)
424 {
425 if (name[len - i] == '.')
426 {
427 name[len - i] = '\0';
428 break;
429 }
430 }
431 }
432
433 /* Create a new temporary name with PREFIX. Returns an identifier. */
434
435 static GTY(()) unsigned int tmp_var_id_num;
436
437 tree
438 create_tmp_var_name (const char *prefix)
439 {
440 char *tmp_name;
441
442 if (prefix)
443 {
444 char *preftmp = ASTRDUP (prefix);
445
446 remove_suffix (preftmp, strlen (preftmp));
447 prefix = preftmp;
448 }
449
450 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
451 return get_identifier (tmp_name);
452 }
453
454
455 /* Create a new temporary variable declaration of type TYPE.
456 Does NOT push it into the current binding. */
457
458 tree
459 create_tmp_var_raw (tree type, const char *prefix)
460 {
461 tree tmp_var;
462 tree new_type;
463
464 /* Make the type of the variable writable. */
465 new_type = build_type_variant (type, 0, 0);
466 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
467
468 tmp_var = build_decl (input_location,
469 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
470 type);
471
472 /* The variable was declared by the compiler. */
473 DECL_ARTIFICIAL (tmp_var) = 1;
474 /* And we don't want debug info for it. */
475 DECL_IGNORED_P (tmp_var) = 1;
476
477 /* Make the variable writable. */
478 TREE_READONLY (tmp_var) = 0;
479
480 DECL_EXTERNAL (tmp_var) = 0;
481 TREE_STATIC (tmp_var) = 0;
482 TREE_USED (tmp_var) = 1;
483
484 return tmp_var;
485 }
486
487 /* Create a new temporary variable declaration of type TYPE. DOES push the
488 variable into the current binding. Further, assume that this is called
489 only from gimplification or optimization, at which point the creation of
490 certain types are bugs. */
491
492 tree
493 create_tmp_var (tree type, const char *prefix)
494 {
495 tree tmp_var;
496
497 /* We don't allow types that are addressable (meaning we can't make copies),
498 or incomplete. We also used to reject every variable size objects here,
499 but now support those for which a constant upper bound can be obtained.
500 The processing for variable sizes is performed in gimple_add_tmp_var,
501 point at which it really matters and possibly reached via paths not going
502 through this function, e.g. after direct calls to create_tmp_var_raw. */
503 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
504
505 tmp_var = create_tmp_var_raw (type, prefix);
506 gimple_add_tmp_var (tmp_var);
507 return tmp_var;
508 }
509
510 /* Create a new temporary variable declaration of type TYPE by calling
511 create_tmp_var and if TYPE is a vector or a complex number, mark the new
512 temporary as gimple register. */
513
514 tree
515 create_tmp_reg (tree type, const char *prefix)
516 {
517 tree tmp;
518
519 tmp = create_tmp_var (type, prefix);
520 if (TREE_CODE (type) == COMPLEX_TYPE
521 || TREE_CODE (type) == VECTOR_TYPE)
522 DECL_GIMPLE_REG_P (tmp) = 1;
523
524 return tmp;
525 }
526
527 /* Create a temporary with a name derived from VAL. Subroutine of
528 lookup_tmp_var; nobody else should call this function. */
529
530 static inline tree
531 create_tmp_from_val (tree val)
532 {
533 return create_tmp_var (TREE_TYPE (val), get_name (val));
534 }
535
536 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
537 an existing expression temporary. */
538
539 static tree
540 lookup_tmp_var (tree val, bool is_formal)
541 {
542 tree ret;
543
544 /* If not optimizing, never really reuse a temporary. local-alloc
545 won't allocate any variable that is used in more than one basic
546 block, which means it will go into memory, causing much extra
547 work in reload and final and poorer code generation, outweighing
548 the extra memory allocation here. */
549 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
550 ret = create_tmp_from_val (val);
551 else
552 {
553 elt_t elt, *elt_p;
554 void **slot;
555
556 elt.val = val;
557 if (gimplify_ctxp->temp_htab == NULL)
558 gimplify_ctxp->temp_htab
559 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
560 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
561 if (*slot == NULL)
562 {
563 elt_p = XNEW (elt_t);
564 elt_p->val = val;
565 elt_p->temp = ret = create_tmp_from_val (val);
566 *slot = (void *) elt_p;
567 }
568 else
569 {
570 elt_p = (elt_t *) *slot;
571 ret = elt_p->temp;
572 }
573 }
574
575 return ret;
576 }
577
578
579 /* Return true if T is a CALL_EXPR or an expression that can be
580 assignmed to a temporary. Note that this predicate should only be
581 used during gimplification. See the rationale for this in
582 gimplify_modify_expr. */
583
584 static bool
585 is_gimple_reg_rhs_or_call (tree t)
586 {
587 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
588 || TREE_CODE (t) == CALL_EXPR);
589 }
590
591 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
592 this predicate should only be used during gimplification. See the
593 rationale for this in gimplify_modify_expr. */
594
595 static bool
596 is_gimple_mem_rhs_or_call (tree t)
597 {
598 /* If we're dealing with a renamable type, either source or dest must be
599 a renamed variable. */
600 if (is_gimple_reg_type (TREE_TYPE (t)))
601 return is_gimple_val (t);
602 else
603 return (is_gimple_val (t) || is_gimple_lvalue (t)
604 || TREE_CODE (t) == CALL_EXPR);
605 }
606
607 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
608
609 static tree
610 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
611 bool is_formal)
612 {
613 tree t, mod;
614
615 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
616 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
617 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
618 fb_rvalue);
619
620 t = lookup_tmp_var (val, is_formal);
621
622 if (is_formal
623 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
624 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
625 DECL_GIMPLE_REG_P (t) = 1;
626
627 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
628
629 if (EXPR_HAS_LOCATION (val))
630 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
631 else
632 SET_EXPR_LOCATION (mod, input_location);
633
634 /* gimplify_modify_expr might want to reduce this further. */
635 gimplify_and_add (mod, pre_p);
636 ggc_free (mod);
637
638 /* If we're gimplifying into ssa, gimplify_modify_expr will have
639 given our temporary an SSA name. Find and return it. */
640 if (gimplify_ctxp->into_ssa)
641 {
642 gimple last = gimple_seq_last_stmt (*pre_p);
643 t = gimple_get_lhs (last);
644 }
645
646 return t;
647 }
648
649 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
650 in gimplify_expr. Only use this function if:
651
652 1) The value of the unfactored expression represented by VAL will not
653 change between the initialization and use of the temporary, and
654 2) The temporary will not be otherwise modified.
655
656 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
657 and #2 means it is inappropriate for && temps.
658
659 For other cases, use get_initialized_tmp_var instead. */
660
661 tree
662 get_formal_tmp_var (tree val, gimple_seq *pre_p)
663 {
664 return internal_get_tmp_var (val, pre_p, NULL, true);
665 }
666
667 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
668 are as in gimplify_expr. */
669
670 tree
671 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
672 {
673 return internal_get_tmp_var (val, pre_p, post_p, false);
674 }
675
676 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
677 true, generate debug info for them; otherwise don't. */
678
679 void
680 declare_vars (tree vars, gimple scope, bool debug_info)
681 {
682 tree last = vars;
683 if (last)
684 {
685 tree temps, block;
686
687 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
688
689 temps = nreverse (last);
690
691 block = gimple_bind_block (scope);
692 gcc_assert (!block || TREE_CODE (block) == BLOCK);
693 if (!block || !debug_info)
694 {
695 TREE_CHAIN (last) = gimple_bind_vars (scope);
696 gimple_bind_set_vars (scope, temps);
697 }
698 else
699 {
700 /* We need to attach the nodes both to the BIND_EXPR and to its
701 associated BLOCK for debugging purposes. The key point here
702 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
703 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
704 if (BLOCK_VARS (block))
705 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
706 else
707 {
708 gimple_bind_set_vars (scope,
709 chainon (gimple_bind_vars (scope), temps));
710 BLOCK_VARS (block) = temps;
711 }
712 }
713 }
714 }
715
716 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
717 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
718 no such upper bound can be obtained. */
719
720 static void
721 force_constant_size (tree var)
722 {
723 /* The only attempt we make is by querying the maximum size of objects
724 of the variable's type. */
725
726 HOST_WIDE_INT max_size;
727
728 gcc_assert (TREE_CODE (var) == VAR_DECL);
729
730 max_size = max_int_size_in_bytes (TREE_TYPE (var));
731
732 gcc_assert (max_size >= 0);
733
734 DECL_SIZE_UNIT (var)
735 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
736 DECL_SIZE (var)
737 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
738 }
739
740 void
741 gimple_add_tmp_var (tree tmp)
742 {
743 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
744
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
747 this case. */
748 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
749 force_constant_size (tmp);
750
751 DECL_CONTEXT (tmp) = current_function_decl;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
753
754 if (gimplify_ctxp)
755 {
756 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
757 gimplify_ctxp->temps = tmp;
758
759 /* Mark temporaries local within the nearest enclosing parallel. */
760 if (gimplify_omp_ctxp)
761 {
762 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
763 while (ctx && ctx->region_type == ORT_WORKSHARE)
764 ctx = ctx->outer_context;
765 if (ctx)
766 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
767 }
768 }
769 else if (cfun)
770 record_vars (tmp);
771 else
772 {
773 gimple_seq body_seq;
774
775 /* This case is for nested functions. We need to expose the locals
776 they create. */
777 body_seq = gimple_body (current_function_decl);
778 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
779 }
780 }
781
782 /* Determines whether to assign a location to the statement GS. */
783
784 static bool
785 should_carry_location_p (gimple gs)
786 {
787 /* Don't emit a line note for a label. We particularly don't want to
788 emit one for the break label, since it doesn't actually correspond
789 to the beginning of the loop/switch. */
790 if (gimple_code (gs) == GIMPLE_LABEL)
791 return false;
792
793 return true;
794 }
795
796
797 /* Return true if a location should not be emitted for this statement
798 by annotate_one_with_location. */
799
800 static inline bool
801 gimple_do_not_emit_location_p (gimple g)
802 {
803 return gimple_plf (g, GF_PLF_1);
804 }
805
806 /* Mark statement G so a location will not be emitted by
807 annotate_one_with_location. */
808
809 static inline void
810 gimple_set_do_not_emit_location (gimple g)
811 {
812 /* The PLF flags are initialized to 0 when a new tuple is created,
813 so no need to initialize it anywhere. */
814 gimple_set_plf (g, GF_PLF_1, true);
815 }
816
817 /* Set the location for gimple statement GS to LOCATION. */
818
819 static void
820 annotate_one_with_location (gimple gs, location_t location)
821 {
822 if (!gimple_has_location (gs)
823 && !gimple_do_not_emit_location_p (gs)
824 && should_carry_location_p (gs))
825 gimple_set_location (gs, location);
826 }
827
828
829 /* Set LOCATION for all the statements after iterator GSI in sequence
830 SEQ. If GSI is pointing to the end of the sequence, start with the
831 first statement in SEQ. */
832
833 static void
834 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
835 location_t location)
836 {
837 if (gsi_end_p (gsi))
838 gsi = gsi_start (seq);
839 else
840 gsi_next (&gsi);
841
842 for (; !gsi_end_p (gsi); gsi_next (&gsi))
843 annotate_one_with_location (gsi_stmt (gsi), location);
844 }
845
846
847 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
848
849 void
850 annotate_all_with_location (gimple_seq stmt_p, location_t location)
851 {
852 gimple_stmt_iterator i;
853
854 if (gimple_seq_empty_p (stmt_p))
855 return;
856
857 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
858 {
859 gimple gs = gsi_stmt (i);
860 annotate_one_with_location (gs, location);
861 }
862 }
863
864
865 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
866 These nodes model computations that should only be done once. If we
867 were to unshare something like SAVE_EXPR(i++), the gimplification
868 process would create wrong code. */
869
870 static tree
871 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
872 {
873 enum tree_code code = TREE_CODE (*tp);
874 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
875 if (TREE_CODE_CLASS (code) == tcc_type
876 || TREE_CODE_CLASS (code) == tcc_declaration
877 || TREE_CODE_CLASS (code) == tcc_constant
878 || code == SAVE_EXPR || code == TARGET_EXPR
879 /* We can't do anything sensible with a BLOCK used as an expression,
880 but we also can't just die when we see it because of non-expression
881 uses. So just avert our eyes and cross our fingers. Silly Java. */
882 || code == BLOCK)
883 *walk_subtrees = 0;
884 else
885 {
886 gcc_assert (code != BIND_EXPR);
887 copy_tree_r (tp, walk_subtrees, data);
888 }
889
890 return NULL_TREE;
891 }
892
893 /* Callback for walk_tree to unshare most of the shared trees rooted at
894 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
895 then *TP is deep copied by calling copy_tree_r.
896
897 This unshares the same trees as copy_tree_r with the exception of
898 SAVE_EXPR nodes. These nodes model computations that should only be
899 done once. If we were to unshare something like SAVE_EXPR(i++), the
900 gimplification process would create wrong code. */
901
902 static tree
903 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
904 void *data ATTRIBUTE_UNUSED)
905 {
906 tree t = *tp;
907 enum tree_code code = TREE_CODE (t);
908
909 /* Skip types, decls, and constants. But we do want to look at their
910 types and the bounds of types. Mark them as visited so we properly
911 unmark their subtrees on the unmark pass. If we've already seen them,
912 don't look down further. */
913 if (TREE_CODE_CLASS (code) == tcc_type
914 || TREE_CODE_CLASS (code) == tcc_declaration
915 || TREE_CODE_CLASS (code) == tcc_constant)
916 {
917 if (TREE_VISITED (t))
918 *walk_subtrees = 0;
919 else
920 TREE_VISITED (t) = 1;
921 }
922
923 /* If this node has been visited already, unshare it and don't look
924 any deeper. */
925 else if (TREE_VISITED (t))
926 {
927 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
928 *walk_subtrees = 0;
929 }
930
931 /* Otherwise, mark the tree as visited and keep looking. */
932 else
933 TREE_VISITED (t) = 1;
934
935 return NULL_TREE;
936 }
937
938 static tree
939 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
940 void *data ATTRIBUTE_UNUSED)
941 {
942 if (TREE_VISITED (*tp))
943 TREE_VISITED (*tp) = 0;
944 else
945 *walk_subtrees = 0;
946
947 return NULL_TREE;
948 }
949
950 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
951 bodies of any nested functions if we are unsharing the entire body of
952 FNDECL. */
953
954 static void
955 unshare_body (tree *body_p, tree fndecl)
956 {
957 struct cgraph_node *cgn = cgraph_node (fndecl);
958
959 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
960 if (body_p == &DECL_SAVED_TREE (fndecl))
961 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
962 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
963 }
964
965 /* Likewise, but mark all trees as not visited. */
966
967 static void
968 unvisit_body (tree *body_p, tree fndecl)
969 {
970 struct cgraph_node *cgn = cgraph_node (fndecl);
971
972 walk_tree (body_p, unmark_visited_r, NULL, NULL);
973 if (body_p == &DECL_SAVED_TREE (fndecl))
974 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
975 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
976 }
977
978 /* Unconditionally make an unshared copy of EXPR. This is used when using
979 stored expressions which span multiple functions, such as BINFO_VTABLE,
980 as the normal unsharing process can't tell that they're shared. */
981
982 tree
983 unshare_expr (tree expr)
984 {
985 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
986 return expr;
987 }
988 \f
989 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
990 contain statements and have a value. Assign its value to a temporary
991 and give it void_type_node. Returns the temporary, or NULL_TREE if
992 WRAPPER was already void. */
993
994 tree
995 voidify_wrapper_expr (tree wrapper, tree temp)
996 {
997 tree type = TREE_TYPE (wrapper);
998 if (type && !VOID_TYPE_P (type))
999 {
1000 tree *p;
1001
1002 /* Set p to point to the body of the wrapper. Loop until we find
1003 something that isn't a wrapper. */
1004 for (p = &wrapper; p && *p; )
1005 {
1006 switch (TREE_CODE (*p))
1007 {
1008 case BIND_EXPR:
1009 TREE_SIDE_EFFECTS (*p) = 1;
1010 TREE_TYPE (*p) = void_type_node;
1011 /* For a BIND_EXPR, the body is operand 1. */
1012 p = &BIND_EXPR_BODY (*p);
1013 break;
1014
1015 case CLEANUP_POINT_EXPR:
1016 case TRY_FINALLY_EXPR:
1017 case TRY_CATCH_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TREE_OPERAND (*p, 0);
1021 break;
1022
1023 case STATEMENT_LIST:
1024 {
1025 tree_stmt_iterator i = tsi_last (*p);
1026 TREE_SIDE_EFFECTS (*p) = 1;
1027 TREE_TYPE (*p) = void_type_node;
1028 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1029 }
1030 break;
1031
1032 case COMPOUND_EXPR:
1033 /* Advance to the last statement. Set all container types to void. */
1034 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1035 {
1036 TREE_SIDE_EFFECTS (*p) = 1;
1037 TREE_TYPE (*p) = void_type_node;
1038 }
1039 break;
1040
1041 default:
1042 goto out;
1043 }
1044 }
1045
1046 out:
1047 if (p == NULL || IS_EMPTY_STMT (*p))
1048 temp = NULL_TREE;
1049 else if (temp)
1050 {
1051 /* The wrapper is on the RHS of an assignment that we're pushing
1052 down. */
1053 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1054 || TREE_CODE (temp) == MODIFY_EXPR);
1055 TREE_OPERAND (temp, 1) = *p;
1056 *p = temp;
1057 }
1058 else
1059 {
1060 temp = create_tmp_var (type, "retval");
1061 *p = build2 (INIT_EXPR, type, temp, *p);
1062 }
1063
1064 return temp;
1065 }
1066
1067 return NULL_TREE;
1068 }
1069
1070 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1071 a temporary through which they communicate. */
1072
1073 static void
1074 build_stack_save_restore (gimple *save, gimple *restore)
1075 {
1076 tree tmp_var;
1077
1078 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1079 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1080 gimple_call_set_lhs (*save, tmp_var);
1081
1082 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1083 1, tmp_var);
1084 }
1085
1086 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1087
1088 static enum gimplify_status
1089 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1090 {
1091 tree bind_expr = *expr_p;
1092 bool old_save_stack = gimplify_ctxp->save_stack;
1093 tree t;
1094 gimple gimple_bind;
1095 gimple_seq body;
1096
1097 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1098
1099 /* Mark variables seen in this bind expr. */
1100 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1101 {
1102 if (TREE_CODE (t) == VAR_DECL)
1103 {
1104 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1105
1106 /* Mark variable as local. */
1107 if (ctx && !is_global_var (t)
1108 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1109 || splay_tree_lookup (ctx->variables,
1110 (splay_tree_key) t) == NULL))
1111 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1112
1113 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1114
1115 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1116 cfun->has_local_explicit_reg_vars = true;
1117 }
1118
1119 /* Preliminarily mark non-addressed complex variables as eligible
1120 for promotion to gimple registers. We'll transform their uses
1121 as we find them.
1122 We exclude complex types if not optimizing because they can be
1123 subject to partial stores in GNU C by means of the __real__ and
1124 __imag__ operators and we cannot promote them to total stores
1125 (see gimplify_modify_expr_complex_part). */
1126 if (optimize
1127 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1128 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1129 && !TREE_THIS_VOLATILE (t)
1130 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1131 && !needs_to_live_in_memory (t))
1132 DECL_GIMPLE_REG_P (t) = 1;
1133 }
1134
1135 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1136 BIND_EXPR_BLOCK (bind_expr));
1137 gimple_push_bind_expr (gimple_bind);
1138
1139 gimplify_ctxp->save_stack = false;
1140
1141 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1142 body = NULL;
1143 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1144 gimple_bind_set_body (gimple_bind, body);
1145
1146 if (gimplify_ctxp->save_stack)
1147 {
1148 gimple stack_save, stack_restore, gs;
1149 gimple_seq cleanup, new_body;
1150
1151 /* Save stack on entry and restore it on exit. Add a try_finally
1152 block to achieve this. Note that mudflap depends on the
1153 format of the emitted code: see mx_register_decls(). */
1154 build_stack_save_restore (&stack_save, &stack_restore);
1155
1156 cleanup = new_body = NULL;
1157 gimplify_seq_add_stmt (&cleanup, stack_restore);
1158 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1159 GIMPLE_TRY_FINALLY);
1160
1161 gimplify_seq_add_stmt (&new_body, stack_save);
1162 gimplify_seq_add_stmt (&new_body, gs);
1163 gimple_bind_set_body (gimple_bind, new_body);
1164 }
1165
1166 gimplify_ctxp->save_stack = old_save_stack;
1167 gimple_pop_bind_expr ();
1168
1169 gimplify_seq_add_stmt (pre_p, gimple_bind);
1170
1171 if (temp)
1172 {
1173 *expr_p = temp;
1174 return GS_OK;
1175 }
1176
1177 *expr_p = NULL_TREE;
1178 return GS_ALL_DONE;
1179 }
1180
1181 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1182 GIMPLE value, it is assigned to a new temporary and the statement is
1183 re-written to return the temporary.
1184
1185 PRE_P points to the sequence where side effects that must happen before
1186 STMT should be stored. */
1187
1188 static enum gimplify_status
1189 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1190 {
1191 gimple ret;
1192 tree ret_expr = TREE_OPERAND (stmt, 0);
1193 tree result_decl, result;
1194
1195 if (ret_expr == error_mark_node)
1196 return GS_ERROR;
1197
1198 if (!ret_expr
1199 || TREE_CODE (ret_expr) == RESULT_DECL
1200 || ret_expr == error_mark_node)
1201 {
1202 gimple ret = gimple_build_return (ret_expr);
1203 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1204 gimplify_seq_add_stmt (pre_p, ret);
1205 return GS_ALL_DONE;
1206 }
1207
1208 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1209 result_decl = NULL_TREE;
1210 else
1211 {
1212 result_decl = TREE_OPERAND (ret_expr, 0);
1213
1214 /* See through a return by reference. */
1215 if (TREE_CODE (result_decl) == INDIRECT_REF)
1216 result_decl = TREE_OPERAND (result_decl, 0);
1217
1218 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1219 || TREE_CODE (ret_expr) == INIT_EXPR)
1220 && TREE_CODE (result_decl) == RESULT_DECL);
1221 }
1222
1223 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1224 Recall that aggregate_value_p is FALSE for any aggregate type that is
1225 returned in registers. If we're returning values in registers, then
1226 we don't want to extend the lifetime of the RESULT_DECL, particularly
1227 across another call. In addition, for those aggregates for which
1228 hard_function_value generates a PARALLEL, we'll die during normal
1229 expansion of structure assignments; there's special code in expand_return
1230 to handle this case that does not exist in expand_expr. */
1231 if (!result_decl
1232 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1233 result = result_decl;
1234 else if (gimplify_ctxp->return_temp)
1235 result = gimplify_ctxp->return_temp;
1236 else
1237 {
1238 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1239
1240 /* ??? With complex control flow (usually involving abnormal edges),
1241 we can wind up warning about an uninitialized value for this. Due
1242 to how this variable is constructed and initialized, this is never
1243 true. Give up and never warn. */
1244 TREE_NO_WARNING (result) = 1;
1245
1246 gimplify_ctxp->return_temp = result;
1247 }
1248
1249 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1250 Then gimplify the whole thing. */
1251 if (result != result_decl)
1252 TREE_OPERAND (ret_expr, 0) = result;
1253
1254 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1255
1256 ret = gimple_build_return (result);
1257 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1258 gimplify_seq_add_stmt (pre_p, ret);
1259
1260 return GS_ALL_DONE;
1261 }
1262
1263 static void
1264 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1265 {
1266 /* This is a variable-sized decl. Simplify its size and mark it
1267 for deferred expansion. Note that mudflap depends on the format
1268 of the emitted code: see mx_register_decls(). */
1269 tree t, addr, ptr_type;
1270
1271 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1272 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1273
1274 /* All occurrences of this decl in final gimplified code will be
1275 replaced by indirection. Setting DECL_VALUE_EXPR does two
1276 things: First, it lets the rest of the gimplifier know what
1277 replacement to use. Second, it lets the debug info know
1278 where to find the value. */
1279 ptr_type = build_pointer_type (TREE_TYPE (decl));
1280 addr = create_tmp_var (ptr_type, get_name (decl));
1281 DECL_IGNORED_P (addr) = 0;
1282 t = build_fold_indirect_ref (addr);
1283 SET_DECL_VALUE_EXPR (decl, t);
1284 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1285
1286 t = built_in_decls[BUILT_IN_ALLOCA];
1287 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1288 t = fold_convert (ptr_type, t);
1289 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1290
1291 gimplify_and_add (t, seq_p);
1292
1293 /* Indicate that we need to restore the stack level when the
1294 enclosing BIND_EXPR is exited. */
1295 gimplify_ctxp->save_stack = true;
1296 }
1297
1298
1299 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1300 and initialization explicit. */
1301
1302 static enum gimplify_status
1303 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1304 {
1305 tree stmt = *stmt_p;
1306 tree decl = DECL_EXPR_DECL (stmt);
1307
1308 *stmt_p = NULL_TREE;
1309
1310 if (TREE_TYPE (decl) == error_mark_node)
1311 return GS_ERROR;
1312
1313 if ((TREE_CODE (decl) == TYPE_DECL
1314 || TREE_CODE (decl) == VAR_DECL)
1315 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1316 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1317
1318 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1319 {
1320 tree init = DECL_INITIAL (decl);
1321
1322 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1323 || (!TREE_STATIC (decl)
1324 && flag_stack_check == GENERIC_STACK_CHECK
1325 && compare_tree_int (DECL_SIZE_UNIT (decl),
1326 STACK_CHECK_MAX_VAR_SIZE) > 0))
1327 gimplify_vla_decl (decl, seq_p);
1328
1329 if (init && init != error_mark_node)
1330 {
1331 if (!TREE_STATIC (decl))
1332 {
1333 DECL_INITIAL (decl) = NULL_TREE;
1334 init = build2 (INIT_EXPR, void_type_node, decl, init);
1335 gimplify_and_add (init, seq_p);
1336 ggc_free (init);
1337 }
1338 else
1339 /* We must still examine initializers for static variables
1340 as they may contain a label address. */
1341 walk_tree (&init, force_labels_r, NULL, NULL);
1342 }
1343
1344 /* Some front ends do not explicitly declare all anonymous
1345 artificial variables. We compensate here by declaring the
1346 variables, though it would be better if the front ends would
1347 explicitly declare them. */
1348 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1349 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1350 gimple_add_tmp_var (decl);
1351 }
1352
1353 return GS_ALL_DONE;
1354 }
1355
1356 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1357 and replacing the LOOP_EXPR with goto, but if the loop contains an
1358 EXIT_EXPR, we need to append a label for it to jump to. */
1359
1360 static enum gimplify_status
1361 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1362 {
1363 tree saved_label = gimplify_ctxp->exit_label;
1364 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1365
1366 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1367
1368 gimplify_ctxp->exit_label = NULL_TREE;
1369
1370 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1371
1372 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1373
1374 if (gimplify_ctxp->exit_label)
1375 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1376
1377 gimplify_ctxp->exit_label = saved_label;
1378
1379 *expr_p = NULL;
1380 return GS_ALL_DONE;
1381 }
1382
1383 /* Gimplifies a statement list onto a sequence. These may be created either
1384 by an enlightened front-end, or by shortcut_cond_expr. */
1385
1386 static enum gimplify_status
1387 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1388 {
1389 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1390
1391 tree_stmt_iterator i = tsi_start (*expr_p);
1392
1393 while (!tsi_end_p (i))
1394 {
1395 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1396 tsi_delink (&i);
1397 }
1398
1399 if (temp)
1400 {
1401 *expr_p = temp;
1402 return GS_OK;
1403 }
1404
1405 return GS_ALL_DONE;
1406 }
1407
1408 /* Compare two case labels. Because the front end should already have
1409 made sure that case ranges do not overlap, it is enough to only compare
1410 the CASE_LOW values of each case label. */
1411
1412 static int
1413 compare_case_labels (const void *p1, const void *p2)
1414 {
1415 const_tree const case1 = *(const_tree const*)p1;
1416 const_tree const case2 = *(const_tree const*)p2;
1417
1418 /* The 'default' case label always goes first. */
1419 if (!CASE_LOW (case1))
1420 return -1;
1421 else if (!CASE_LOW (case2))
1422 return 1;
1423 else
1424 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1425 }
1426
1427
1428 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1429
1430 void
1431 sort_case_labels (VEC(tree,heap)* label_vec)
1432 {
1433 size_t len = VEC_length (tree, label_vec);
1434 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1435 compare_case_labels);
1436 }
1437
1438
1439 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1440 branch to. */
1441
1442 static enum gimplify_status
1443 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1444 {
1445 tree switch_expr = *expr_p;
1446 gimple_seq switch_body_seq = NULL;
1447 enum gimplify_status ret;
1448
1449 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1450 fb_rvalue);
1451 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1452 return ret;
1453
1454 if (SWITCH_BODY (switch_expr))
1455 {
1456 VEC (tree,heap) *labels;
1457 VEC (tree,heap) *saved_labels;
1458 tree default_case = NULL_TREE;
1459 size_t i, len;
1460 gimple gimple_switch;
1461
1462 /* If someone can be bothered to fill in the labels, they can
1463 be bothered to null out the body too. */
1464 gcc_assert (!SWITCH_LABELS (switch_expr));
1465
1466 /* save old labels, get new ones from body, then restore the old
1467 labels. Save all the things from the switch body to append after. */
1468 saved_labels = gimplify_ctxp->case_labels;
1469 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1470
1471 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1472 labels = gimplify_ctxp->case_labels;
1473 gimplify_ctxp->case_labels = saved_labels;
1474
1475 i = 0;
1476 while (i < VEC_length (tree, labels))
1477 {
1478 tree elt = VEC_index (tree, labels, i);
1479 tree low = CASE_LOW (elt);
1480 bool remove_element = FALSE;
1481
1482 if (low)
1483 {
1484 /* Discard empty ranges. */
1485 tree high = CASE_HIGH (elt);
1486 if (high && tree_int_cst_lt (high, low))
1487 remove_element = TRUE;
1488 }
1489 else
1490 {
1491 /* The default case must be the last label in the list. */
1492 gcc_assert (!default_case);
1493 default_case = elt;
1494 remove_element = TRUE;
1495 }
1496
1497 if (remove_element)
1498 VEC_ordered_remove (tree, labels, i);
1499 else
1500 i++;
1501 }
1502 len = i;
1503
1504 if (!VEC_empty (tree, labels))
1505 sort_case_labels (labels);
1506
1507 if (!default_case)
1508 {
1509 tree type = TREE_TYPE (switch_expr);
1510
1511 /* If the switch has no default label, add one, so that we jump
1512 around the switch body. If the labels already cover the whole
1513 range of type, add the default label pointing to one of the
1514 existing labels. */
1515 if (type == void_type_node)
1516 type = TREE_TYPE (SWITCH_COND (switch_expr));
1517 if (len
1518 && INTEGRAL_TYPE_P (type)
1519 && TYPE_MIN_VALUE (type)
1520 && TYPE_MAX_VALUE (type)
1521 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1522 TYPE_MIN_VALUE (type)))
1523 {
1524 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1525 if (!high)
1526 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1527 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1528 {
1529 for (i = 1; i < len; i++)
1530 {
1531 high = CASE_LOW (VEC_index (tree, labels, i));
1532 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1533 if (!low)
1534 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1535 if ((TREE_INT_CST_LOW (low) + 1
1536 != TREE_INT_CST_LOW (high))
1537 || (TREE_INT_CST_HIGH (low)
1538 + (TREE_INT_CST_LOW (high) == 0)
1539 != TREE_INT_CST_HIGH (high)))
1540 break;
1541 }
1542 if (i == len)
1543 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1544 NULL_TREE, NULL_TREE,
1545 CASE_LABEL (VEC_index (tree,
1546 labels, 0)));
1547 }
1548 }
1549
1550 if (!default_case)
1551 {
1552 gimple new_default;
1553
1554 default_case
1555 = build3 (CASE_LABEL_EXPR, void_type_node,
1556 NULL_TREE, NULL_TREE,
1557 create_artificial_label (UNKNOWN_LOCATION));
1558 new_default = gimple_build_label (CASE_LABEL (default_case));
1559 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1560 }
1561 }
1562
1563 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1564 default_case, labels);
1565 gimplify_seq_add_stmt (pre_p, gimple_switch);
1566 gimplify_seq_add_seq (pre_p, switch_body_seq);
1567 VEC_free(tree, heap, labels);
1568 }
1569 else
1570 gcc_assert (SWITCH_LABELS (switch_expr));
1571
1572 return GS_ALL_DONE;
1573 }
1574
1575
1576 static enum gimplify_status
1577 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1578 {
1579 struct gimplify_ctx *ctxp;
1580 gimple gimple_label;
1581
1582 /* Invalid OpenMP programs can play Duff's Device type games with
1583 #pragma omp parallel. At least in the C front end, we don't
1584 detect such invalid branches until after gimplification. */
1585 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1586 if (ctxp->case_labels)
1587 break;
1588
1589 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1590 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1591 gimplify_seq_add_stmt (pre_p, gimple_label);
1592
1593 return GS_ALL_DONE;
1594 }
1595
1596 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1597 if necessary. */
1598
1599 tree
1600 build_and_jump (tree *label_p)
1601 {
1602 if (label_p == NULL)
1603 /* If there's nowhere to jump, just fall through. */
1604 return NULL_TREE;
1605
1606 if (*label_p == NULL_TREE)
1607 {
1608 tree label = create_artificial_label (UNKNOWN_LOCATION);
1609 *label_p = label;
1610 }
1611
1612 return build1 (GOTO_EXPR, void_type_node, *label_p);
1613 }
1614
1615 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1616 This also involves building a label to jump to and communicating it to
1617 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1618
1619 static enum gimplify_status
1620 gimplify_exit_expr (tree *expr_p)
1621 {
1622 tree cond = TREE_OPERAND (*expr_p, 0);
1623 tree expr;
1624
1625 expr = build_and_jump (&gimplify_ctxp->exit_label);
1626 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1627 *expr_p = expr;
1628
1629 return GS_OK;
1630 }
1631
1632 /* A helper function to be called via walk_tree. Mark all labels under *TP
1633 as being forced. To be called for DECL_INITIAL of static variables. */
1634
1635 tree
1636 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1637 {
1638 if (TYPE_P (*tp))
1639 *walk_subtrees = 0;
1640 if (TREE_CODE (*tp) == LABEL_DECL)
1641 FORCED_LABEL (*tp) = 1;
1642
1643 return NULL_TREE;
1644 }
1645
1646 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1647 different from its canonical type, wrap the whole thing inside a
1648 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1649 type.
1650
1651 The canonical type of a COMPONENT_REF is the type of the field being
1652 referenced--unless the field is a bit-field which can be read directly
1653 in a smaller mode, in which case the canonical type is the
1654 sign-appropriate type corresponding to that mode. */
1655
1656 static void
1657 canonicalize_component_ref (tree *expr_p)
1658 {
1659 tree expr = *expr_p;
1660 tree type;
1661
1662 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1663
1664 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1665 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1666 else
1667 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1668
1669 /* One could argue that all the stuff below is not necessary for
1670 the non-bitfield case and declare it a FE error if type
1671 adjustment would be needed. */
1672 if (TREE_TYPE (expr) != type)
1673 {
1674 #ifdef ENABLE_TYPES_CHECKING
1675 tree old_type = TREE_TYPE (expr);
1676 #endif
1677 int type_quals;
1678
1679 /* We need to preserve qualifiers and propagate them from
1680 operand 0. */
1681 type_quals = TYPE_QUALS (type)
1682 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1683 if (TYPE_QUALS (type) != type_quals)
1684 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1685
1686 /* Set the type of the COMPONENT_REF to the underlying type. */
1687 TREE_TYPE (expr) = type;
1688
1689 #ifdef ENABLE_TYPES_CHECKING
1690 /* It is now a FE error, if the conversion from the canonical
1691 type to the original expression type is not useless. */
1692 gcc_assert (useless_type_conversion_p (old_type, type));
1693 #endif
1694 }
1695 }
1696
1697 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1698 to foo, embed that change in the ADDR_EXPR by converting
1699 T array[U];
1700 (T *)&array
1701 ==>
1702 &array[L]
1703 where L is the lower bound. For simplicity, only do this for constant
1704 lower bound.
1705 The constraint is that the type of &array[L] is trivially convertible
1706 to T *. */
1707
1708 static void
1709 canonicalize_addr_expr (tree *expr_p)
1710 {
1711 tree expr = *expr_p;
1712 tree addr_expr = TREE_OPERAND (expr, 0);
1713 tree datype, ddatype, pddatype;
1714
1715 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1716 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1717 || TREE_CODE (addr_expr) != ADDR_EXPR)
1718 return;
1719
1720 /* The addr_expr type should be a pointer to an array. */
1721 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1722 if (TREE_CODE (datype) != ARRAY_TYPE)
1723 return;
1724
1725 /* The pointer to element type shall be trivially convertible to
1726 the expression pointer type. */
1727 ddatype = TREE_TYPE (datype);
1728 pddatype = build_pointer_type (ddatype);
1729 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1730 pddatype))
1731 return;
1732
1733 /* The lower bound and element sizes must be constant. */
1734 if (!TYPE_SIZE_UNIT (ddatype)
1735 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1736 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1737 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1738 return;
1739
1740 /* All checks succeeded. Build a new node to merge the cast. */
1741 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1742 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1743 NULL_TREE, NULL_TREE);
1744 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1745
1746 /* We can have stripped a required restrict qualifier above. */
1747 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1748 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1749 }
1750
1751 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1752 underneath as appropriate. */
1753
1754 static enum gimplify_status
1755 gimplify_conversion (tree *expr_p)
1756 {
1757 tree tem;
1758 location_t loc = EXPR_LOCATION (*expr_p);
1759 gcc_assert (CONVERT_EXPR_P (*expr_p));
1760
1761 /* Then strip away all but the outermost conversion. */
1762 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1763
1764 /* And remove the outermost conversion if it's useless. */
1765 if (tree_ssa_useless_type_conversion (*expr_p))
1766 *expr_p = TREE_OPERAND (*expr_p, 0);
1767
1768 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1769 For example this fold (subclass *)&A into &A->subclass avoiding
1770 a need for statement. */
1771 if (CONVERT_EXPR_P (*expr_p)
1772 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1773 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1774 && (tem = maybe_fold_offset_to_address
1775 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1776 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1777 *expr_p = tem;
1778
1779 /* If we still have a conversion at the toplevel,
1780 then canonicalize some constructs. */
1781 if (CONVERT_EXPR_P (*expr_p))
1782 {
1783 tree sub = TREE_OPERAND (*expr_p, 0);
1784
1785 /* If a NOP conversion is changing the type of a COMPONENT_REF
1786 expression, then canonicalize its type now in order to expose more
1787 redundant conversions. */
1788 if (TREE_CODE (sub) == COMPONENT_REF)
1789 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1790
1791 /* If a NOP conversion is changing a pointer to array of foo
1792 to a pointer to foo, embed that change in the ADDR_EXPR. */
1793 else if (TREE_CODE (sub) == ADDR_EXPR)
1794 canonicalize_addr_expr (expr_p);
1795 }
1796
1797 /* If we have a conversion to a non-register type force the
1798 use of a VIEW_CONVERT_EXPR instead. */
1799 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1800 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1801 TREE_OPERAND (*expr_p, 0));
1802
1803 return GS_OK;
1804 }
1805
1806 /* Nonlocal VLAs seen in the current function. */
1807 static struct pointer_set_t *nonlocal_vlas;
1808
1809 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1810 DECL_VALUE_EXPR, and it's worth re-examining things. */
1811
1812 static enum gimplify_status
1813 gimplify_var_or_parm_decl (tree *expr_p)
1814 {
1815 tree decl = *expr_p;
1816
1817 /* ??? If this is a local variable, and it has not been seen in any
1818 outer BIND_EXPR, then it's probably the result of a duplicate
1819 declaration, for which we've already issued an error. It would
1820 be really nice if the front end wouldn't leak these at all.
1821 Currently the only known culprit is C++ destructors, as seen
1822 in g++.old-deja/g++.jason/binding.C. */
1823 if (TREE_CODE (decl) == VAR_DECL
1824 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1825 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1826 && decl_function_context (decl) == current_function_decl)
1827 {
1828 gcc_assert (errorcount || sorrycount);
1829 return GS_ERROR;
1830 }
1831
1832 /* When within an OpenMP context, notice uses of variables. */
1833 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1834 return GS_ALL_DONE;
1835
1836 /* If the decl is an alias for another expression, substitute it now. */
1837 if (DECL_HAS_VALUE_EXPR_P (decl))
1838 {
1839 tree value_expr = DECL_VALUE_EXPR (decl);
1840
1841 /* For referenced nonlocal VLAs add a decl for debugging purposes
1842 to the current function. */
1843 if (TREE_CODE (decl) == VAR_DECL
1844 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1845 && nonlocal_vlas != NULL
1846 && TREE_CODE (value_expr) == INDIRECT_REF
1847 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1848 && decl_function_context (decl) != current_function_decl)
1849 {
1850 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1851 while (ctx && ctx->region_type == ORT_WORKSHARE)
1852 ctx = ctx->outer_context;
1853 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1854 {
1855 tree copy = copy_node (decl), block;
1856
1857 lang_hooks.dup_lang_specific_decl (copy);
1858 SET_DECL_RTL (copy, NULL_RTX);
1859 TREE_USED (copy) = 1;
1860 block = DECL_INITIAL (current_function_decl);
1861 TREE_CHAIN (copy) = BLOCK_VARS (block);
1862 BLOCK_VARS (block) = copy;
1863 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1864 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1865 }
1866 }
1867
1868 *expr_p = unshare_expr (value_expr);
1869 return GS_OK;
1870 }
1871
1872 return GS_ALL_DONE;
1873 }
1874
1875
1876 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1877 node *EXPR_P.
1878
1879 compound_lval
1880 : min_lval '[' val ']'
1881 | min_lval '.' ID
1882 | compound_lval '[' val ']'
1883 | compound_lval '.' ID
1884
1885 This is not part of the original SIMPLE definition, which separates
1886 array and member references, but it seems reasonable to handle them
1887 together. Also, this way we don't run into problems with union
1888 aliasing; gcc requires that for accesses through a union to alias, the
1889 union reference must be explicit, which was not always the case when we
1890 were splitting up array and member refs.
1891
1892 PRE_P points to the sequence where side effects that must happen before
1893 *EXPR_P should be stored.
1894
1895 POST_P points to the sequence where side effects that must happen after
1896 *EXPR_P should be stored. */
1897
1898 static enum gimplify_status
1899 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1900 fallback_t fallback)
1901 {
1902 tree *p;
1903 VEC(tree,heap) *stack;
1904 enum gimplify_status ret = GS_OK, tret;
1905 int i;
1906 location_t loc = EXPR_LOCATION (*expr_p);
1907
1908 /* Create a stack of the subexpressions so later we can walk them in
1909 order from inner to outer. */
1910 stack = VEC_alloc (tree, heap, 10);
1911
1912 /* We can handle anything that get_inner_reference can deal with. */
1913 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1914 {
1915 restart:
1916 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1917 if (TREE_CODE (*p) == INDIRECT_REF)
1918 *p = fold_indirect_ref_loc (loc, *p);
1919
1920 if (handled_component_p (*p))
1921 ;
1922 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1923 additional COMPONENT_REFs. */
1924 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1925 && gimplify_var_or_parm_decl (p) == GS_OK)
1926 goto restart;
1927 else
1928 break;
1929
1930 VEC_safe_push (tree, heap, stack, *p);
1931 }
1932
1933 gcc_assert (VEC_length (tree, stack));
1934
1935 /* Now STACK is a stack of pointers to all the refs we've walked through
1936 and P points to the innermost expression.
1937
1938 Java requires that we elaborated nodes in source order. That
1939 means we must gimplify the inner expression followed by each of
1940 the indices, in order. But we can't gimplify the inner
1941 expression until we deal with any variable bounds, sizes, or
1942 positions in order to deal with PLACEHOLDER_EXPRs.
1943
1944 So we do this in three steps. First we deal with the annotations
1945 for any variables in the components, then we gimplify the base,
1946 then we gimplify any indices, from left to right. */
1947 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1948 {
1949 tree t = VEC_index (tree, stack, i);
1950
1951 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1952 {
1953 /* Gimplify the low bound and element type size and put them into
1954 the ARRAY_REF. If these values are set, they have already been
1955 gimplified. */
1956 if (TREE_OPERAND (t, 2) == NULL_TREE)
1957 {
1958 tree low = unshare_expr (array_ref_low_bound (t));
1959 if (!is_gimple_min_invariant (low))
1960 {
1961 TREE_OPERAND (t, 2) = low;
1962 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1963 post_p, is_gimple_reg,
1964 fb_rvalue);
1965 ret = MIN (ret, tret);
1966 }
1967 }
1968
1969 if (!TREE_OPERAND (t, 3))
1970 {
1971 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1972 tree elmt_size = unshare_expr (array_ref_element_size (t));
1973 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1974
1975 /* Divide the element size by the alignment of the element
1976 type (above). */
1977 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1978
1979 if (!is_gimple_min_invariant (elmt_size))
1980 {
1981 TREE_OPERAND (t, 3) = elmt_size;
1982 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1983 post_p, is_gimple_reg,
1984 fb_rvalue);
1985 ret = MIN (ret, tret);
1986 }
1987 }
1988 }
1989 else if (TREE_CODE (t) == COMPONENT_REF)
1990 {
1991 /* Set the field offset into T and gimplify it. */
1992 if (!TREE_OPERAND (t, 2))
1993 {
1994 tree offset = unshare_expr (component_ref_field_offset (t));
1995 tree field = TREE_OPERAND (t, 1);
1996 tree factor
1997 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1998
1999 /* Divide the offset by its alignment. */
2000 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2001
2002 if (!is_gimple_min_invariant (offset))
2003 {
2004 TREE_OPERAND (t, 2) = offset;
2005 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2006 post_p, is_gimple_reg,
2007 fb_rvalue);
2008 ret = MIN (ret, tret);
2009 }
2010 }
2011 }
2012 }
2013
2014 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2015 so as to match the min_lval predicate. Failure to do so may result
2016 in the creation of large aggregate temporaries. */
2017 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2018 fallback | fb_lvalue);
2019 ret = MIN (ret, tret);
2020
2021 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2022 loop we also remove any useless conversions. */
2023 for (; VEC_length (tree, stack) > 0; )
2024 {
2025 tree t = VEC_pop (tree, stack);
2026
2027 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2028 {
2029 /* Gimplify the dimension. */
2030 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2031 {
2032 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2033 is_gimple_val, fb_rvalue);
2034 ret = MIN (ret, tret);
2035 }
2036 }
2037 else if (TREE_CODE (t) == BIT_FIELD_REF)
2038 {
2039 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2040 is_gimple_val, fb_rvalue);
2041 ret = MIN (ret, tret);
2042 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2043 is_gimple_val, fb_rvalue);
2044 ret = MIN (ret, tret);
2045 }
2046
2047 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2048
2049 /* The innermost expression P may have originally had
2050 TREE_SIDE_EFFECTS set which would have caused all the outer
2051 expressions in *EXPR_P leading to P to also have had
2052 TREE_SIDE_EFFECTS set. */
2053 recalculate_side_effects (t);
2054 }
2055
2056 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2057 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2058 {
2059 canonicalize_component_ref (expr_p);
2060 ret = MIN (ret, GS_OK);
2061 }
2062
2063 VEC_free (tree, heap, stack);
2064
2065 return ret;
2066 }
2067
2068 /* Gimplify the self modifying expression pointed to by EXPR_P
2069 (++, --, +=, -=).
2070
2071 PRE_P points to the list where side effects that must happen before
2072 *EXPR_P should be stored.
2073
2074 POST_P points to the list where side effects that must happen after
2075 *EXPR_P should be stored.
2076
2077 WANT_VALUE is nonzero iff we want to use the value of this expression
2078 in another expression. */
2079
2080 static enum gimplify_status
2081 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2082 bool want_value)
2083 {
2084 enum tree_code code;
2085 tree lhs, lvalue, rhs, t1;
2086 gimple_seq post = NULL, *orig_post_p = post_p;
2087 bool postfix;
2088 enum tree_code arith_code;
2089 enum gimplify_status ret;
2090 location_t loc = EXPR_LOCATION (*expr_p);
2091
2092 code = TREE_CODE (*expr_p);
2093
2094 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2095 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2096
2097 /* Prefix or postfix? */
2098 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2099 /* Faster to treat as prefix if result is not used. */
2100 postfix = want_value;
2101 else
2102 postfix = false;
2103
2104 /* For postfix, make sure the inner expression's post side effects
2105 are executed after side effects from this expression. */
2106 if (postfix)
2107 post_p = &post;
2108
2109 /* Add or subtract? */
2110 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2111 arith_code = PLUS_EXPR;
2112 else
2113 arith_code = MINUS_EXPR;
2114
2115 /* Gimplify the LHS into a GIMPLE lvalue. */
2116 lvalue = TREE_OPERAND (*expr_p, 0);
2117 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2118 if (ret == GS_ERROR)
2119 return ret;
2120
2121 /* Extract the operands to the arithmetic operation. */
2122 lhs = lvalue;
2123 rhs = TREE_OPERAND (*expr_p, 1);
2124
2125 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2126 that as the result value and in the postqueue operation. We also
2127 make sure to make lvalue a minimal lval, see
2128 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2129 if (postfix)
2130 {
2131 if (!is_gimple_min_lval (lvalue))
2132 {
2133 mark_addressable (lvalue);
2134 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2135 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2136 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2137 }
2138 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2139 if (ret == GS_ERROR)
2140 return ret;
2141 }
2142
2143 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2144 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2145 {
2146 rhs = fold_convert_loc (loc, sizetype, rhs);
2147 if (arith_code == MINUS_EXPR)
2148 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2149 arith_code = POINTER_PLUS_EXPR;
2150 }
2151
2152 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2153
2154 if (postfix)
2155 {
2156 gimplify_assign (lvalue, t1, orig_post_p);
2157 gimplify_seq_add_seq (orig_post_p, post);
2158 *expr_p = lhs;
2159 return GS_ALL_DONE;
2160 }
2161 else
2162 {
2163 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2164 return GS_OK;
2165 }
2166 }
2167
2168
2169 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2170
2171 static void
2172 maybe_with_size_expr (tree *expr_p)
2173 {
2174 tree expr = *expr_p;
2175 tree type = TREE_TYPE (expr);
2176 tree size;
2177
2178 /* If we've already wrapped this or the type is error_mark_node, we can't do
2179 anything. */
2180 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2181 || type == error_mark_node)
2182 return;
2183
2184 /* If the size isn't known or is a constant, we have nothing to do. */
2185 size = TYPE_SIZE_UNIT (type);
2186 if (!size || TREE_CODE (size) == INTEGER_CST)
2187 return;
2188
2189 /* Otherwise, make a WITH_SIZE_EXPR. */
2190 size = unshare_expr (size);
2191 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2192 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2193 }
2194
2195
2196 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2197 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2198 the CALL_EXPR. */
2199
2200 static enum gimplify_status
2201 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2202 {
2203 bool (*test) (tree);
2204 fallback_t fb;
2205
2206 /* In general, we allow lvalues for function arguments to avoid
2207 extra overhead of copying large aggregates out of even larger
2208 aggregates into temporaries only to copy the temporaries to
2209 the argument list. Make optimizers happy by pulling out to
2210 temporaries those types that fit in registers. */
2211 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2212 test = is_gimple_val, fb = fb_rvalue;
2213 else
2214 test = is_gimple_lvalue, fb = fb_either;
2215
2216 /* If this is a variable sized type, we must remember the size. */
2217 maybe_with_size_expr (arg_p);
2218
2219 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2220 /* Make sure arguments have the same location as the function call
2221 itself. */
2222 protected_set_expr_location (*arg_p, call_location);
2223
2224 /* There is a sequence point before a function call. Side effects in
2225 the argument list must occur before the actual call. So, when
2226 gimplifying arguments, force gimplify_expr to use an internal
2227 post queue which is then appended to the end of PRE_P. */
2228 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2229 }
2230
2231
2232 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2233 WANT_VALUE is true if the result of the call is desired. */
2234
2235 static enum gimplify_status
2236 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2237 {
2238 tree fndecl, parms, p;
2239 enum gimplify_status ret;
2240 int i, nargs;
2241 gimple call;
2242 bool builtin_va_start_p = FALSE;
2243 location_t loc = EXPR_LOCATION (*expr_p);
2244
2245 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2246
2247 /* For reliable diagnostics during inlining, it is necessary that
2248 every call_expr be annotated with file and line. */
2249 if (! EXPR_HAS_LOCATION (*expr_p))
2250 SET_EXPR_LOCATION (*expr_p, input_location);
2251
2252 /* This may be a call to a builtin function.
2253
2254 Builtin function calls may be transformed into different
2255 (and more efficient) builtin function calls under certain
2256 circumstances. Unfortunately, gimplification can muck things
2257 up enough that the builtin expanders are not aware that certain
2258 transformations are still valid.
2259
2260 So we attempt transformation/gimplification of the call before
2261 we gimplify the CALL_EXPR. At this time we do not manage to
2262 transform all calls in the same manner as the expanders do, but
2263 we do transform most of them. */
2264 fndecl = get_callee_fndecl (*expr_p);
2265 if (fndecl && DECL_BUILT_IN (fndecl))
2266 {
2267 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2268
2269 if (new_tree && new_tree != *expr_p)
2270 {
2271 /* There was a transformation of this call which computes the
2272 same value, but in a more efficient way. Return and try
2273 again. */
2274 *expr_p = new_tree;
2275 return GS_OK;
2276 }
2277
2278 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2279 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2280 {
2281 builtin_va_start_p = TRUE;
2282 if (call_expr_nargs (*expr_p) < 2)
2283 {
2284 error ("too few arguments to function %<va_start%>");
2285 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2286 return GS_OK;
2287 }
2288
2289 if (fold_builtin_next_arg (*expr_p, true))
2290 {
2291 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2292 return GS_OK;
2293 }
2294 }
2295 }
2296
2297 /* There is a sequence point before the call, so any side effects in
2298 the calling expression must occur before the actual call. Force
2299 gimplify_expr to use an internal post queue. */
2300 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2301 is_gimple_call_addr, fb_rvalue);
2302
2303 nargs = call_expr_nargs (*expr_p);
2304
2305 /* Get argument types for verification. */
2306 fndecl = get_callee_fndecl (*expr_p);
2307 parms = NULL_TREE;
2308 if (fndecl)
2309 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2310 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2311 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2312
2313 if (fndecl && DECL_ARGUMENTS (fndecl))
2314 p = DECL_ARGUMENTS (fndecl);
2315 else if (parms)
2316 p = parms;
2317 else
2318 p = NULL_TREE;
2319 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2320 ;
2321
2322 /* If the last argument is __builtin_va_arg_pack () and it is not
2323 passed as a named argument, decrease the number of CALL_EXPR
2324 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2325 if (!p
2326 && i < nargs
2327 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2328 {
2329 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2330 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2331
2332 if (last_arg_fndecl
2333 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2334 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2335 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2336 {
2337 tree call = *expr_p;
2338
2339 --nargs;
2340 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2341 CALL_EXPR_FN (call),
2342 nargs, CALL_EXPR_ARGP (call));
2343
2344 /* Copy all CALL_EXPR flags, location and block, except
2345 CALL_EXPR_VA_ARG_PACK flag. */
2346 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2347 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2348 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2349 = CALL_EXPR_RETURN_SLOT_OPT (call);
2350 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2351 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2352 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2353 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2354
2355 /* Set CALL_EXPR_VA_ARG_PACK. */
2356 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2357 }
2358 }
2359
2360 /* Finally, gimplify the function arguments. */
2361 if (nargs > 0)
2362 {
2363 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2364 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2365 PUSH_ARGS_REVERSED ? i-- : i++)
2366 {
2367 enum gimplify_status t;
2368
2369 /* Avoid gimplifying the second argument to va_start, which needs to
2370 be the plain PARM_DECL. */
2371 if ((i != 1) || !builtin_va_start_p)
2372 {
2373 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2374 EXPR_LOCATION (*expr_p));
2375
2376 if (t == GS_ERROR)
2377 ret = GS_ERROR;
2378 }
2379 }
2380 }
2381
2382 /* Verify the function result. */
2383 if (want_value && fndecl
2384 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2385 {
2386 error_at (loc, "using result of function returning %<void%>");
2387 ret = GS_ERROR;
2388 }
2389
2390 /* Try this again in case gimplification exposed something. */
2391 if (ret != GS_ERROR)
2392 {
2393 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2394
2395 if (new_tree && new_tree != *expr_p)
2396 {
2397 /* There was a transformation of this call which computes the
2398 same value, but in a more efficient way. Return and try
2399 again. */
2400 *expr_p = new_tree;
2401 return GS_OK;
2402 }
2403 }
2404 else
2405 {
2406 *expr_p = error_mark_node;
2407 return GS_ERROR;
2408 }
2409
2410 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2411 decl. This allows us to eliminate redundant or useless
2412 calls to "const" functions. */
2413 if (TREE_CODE (*expr_p) == CALL_EXPR)
2414 {
2415 int flags = call_expr_flags (*expr_p);
2416 if (flags & (ECF_CONST | ECF_PURE)
2417 /* An infinite loop is considered a side effect. */
2418 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2419 TREE_SIDE_EFFECTS (*expr_p) = 0;
2420 }
2421
2422 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2423 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2424 form and delegate the creation of a GIMPLE_CALL to
2425 gimplify_modify_expr. This is always possible because when
2426 WANT_VALUE is true, the caller wants the result of this call into
2427 a temporary, which means that we will emit an INIT_EXPR in
2428 internal_get_tmp_var which will then be handled by
2429 gimplify_modify_expr. */
2430 if (!want_value)
2431 {
2432 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2433 have to do is replicate it as a GIMPLE_CALL tuple. */
2434 call = gimple_build_call_from_tree (*expr_p);
2435 gimplify_seq_add_stmt (pre_p, call);
2436 *expr_p = NULL_TREE;
2437 }
2438
2439 return ret;
2440 }
2441
2442 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2443 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2444
2445 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2446 condition is true or false, respectively. If null, we should generate
2447 our own to skip over the evaluation of this specific expression.
2448
2449 LOCUS is the source location of the COND_EXPR.
2450
2451 This function is the tree equivalent of do_jump.
2452
2453 shortcut_cond_r should only be called by shortcut_cond_expr. */
2454
2455 static tree
2456 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2457 location_t locus)
2458 {
2459 tree local_label = NULL_TREE;
2460 tree t, expr = NULL;
2461
2462 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2463 retain the shortcut semantics. Just insert the gotos here;
2464 shortcut_cond_expr will append the real blocks later. */
2465 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2466 {
2467 location_t new_locus;
2468
2469 /* Turn if (a && b) into
2470
2471 if (a); else goto no;
2472 if (b) goto yes; else goto no;
2473 (no:) */
2474
2475 if (false_label_p == NULL)
2476 false_label_p = &local_label;
2477
2478 /* Keep the original source location on the first 'if'. */
2479 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2480 append_to_statement_list (t, &expr);
2481
2482 /* Set the source location of the && on the second 'if'. */
2483 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2484 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2485 new_locus);
2486 append_to_statement_list (t, &expr);
2487 }
2488 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2489 {
2490 location_t new_locus;
2491
2492 /* Turn if (a || b) into
2493
2494 if (a) goto yes;
2495 if (b) goto yes; else goto no;
2496 (yes:) */
2497
2498 if (true_label_p == NULL)
2499 true_label_p = &local_label;
2500
2501 /* Keep the original source location on the first 'if'. */
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2503 append_to_statement_list (t, &expr);
2504
2505 /* Set the source location of the || on the second 'if'. */
2506 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2507 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2508 new_locus);
2509 append_to_statement_list (t, &expr);
2510 }
2511 else if (TREE_CODE (pred) == COND_EXPR)
2512 {
2513 location_t new_locus;
2514
2515 /* As long as we're messing with gotos, turn if (a ? b : c) into
2516 if (a)
2517 if (b) goto yes; else goto no;
2518 else
2519 if (c) goto yes; else goto no; */
2520
2521 /* Keep the original source location on the first 'if'. Set the source
2522 location of the ? on the second 'if'. */
2523 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2524 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2525 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2526 false_label_p, locus),
2527 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2528 false_label_p, new_locus));
2529 }
2530 else
2531 {
2532 expr = build3 (COND_EXPR, void_type_node, pred,
2533 build_and_jump (true_label_p),
2534 build_and_jump (false_label_p));
2535 SET_EXPR_LOCATION (expr, locus);
2536 }
2537
2538 if (local_label)
2539 {
2540 t = build1 (LABEL_EXPR, void_type_node, local_label);
2541 append_to_statement_list (t, &expr);
2542 }
2543
2544 return expr;
2545 }
2546
2547 /* Given a conditional expression EXPR with short-circuit boolean
2548 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2549 predicate appart into the equivalent sequence of conditionals. */
2550
2551 static tree
2552 shortcut_cond_expr (tree expr)
2553 {
2554 tree pred = TREE_OPERAND (expr, 0);
2555 tree then_ = TREE_OPERAND (expr, 1);
2556 tree else_ = TREE_OPERAND (expr, 2);
2557 tree true_label, false_label, end_label, t;
2558 tree *true_label_p;
2559 tree *false_label_p;
2560 bool emit_end, emit_false, jump_over_else;
2561 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2562 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2563
2564 /* First do simple transformations. */
2565 if (!else_se)
2566 {
2567 /* If there is no 'else', turn
2568 if (a && b) then c
2569 into
2570 if (a) if (b) then c. */
2571 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2572 {
2573 /* Keep the original source location on the first 'if'. */
2574 location_t locus = EXPR_HAS_LOCATION (expr)
2575 ? EXPR_LOCATION (expr) : input_location;
2576 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2577 /* Set the source location of the && on the second 'if'. */
2578 if (EXPR_HAS_LOCATION (pred))
2579 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2580 then_ = shortcut_cond_expr (expr);
2581 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2582 pred = TREE_OPERAND (pred, 0);
2583 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2584 SET_EXPR_LOCATION (expr, locus);
2585 }
2586 }
2587
2588 if (!then_se)
2589 {
2590 /* If there is no 'then', turn
2591 if (a || b); else d
2592 into
2593 if (a); else if (b); else d. */
2594 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2595 {
2596 /* Keep the original source location on the first 'if'. */
2597 location_t locus = EXPR_HAS_LOCATION (expr)
2598 ? EXPR_LOCATION (expr) : input_location;
2599 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2600 /* Set the source location of the || on the second 'if'. */
2601 if (EXPR_HAS_LOCATION (pred))
2602 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2603 else_ = shortcut_cond_expr (expr);
2604 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2605 pred = TREE_OPERAND (pred, 0);
2606 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2607 SET_EXPR_LOCATION (expr, locus);
2608 }
2609 }
2610
2611 /* If we're done, great. */
2612 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2613 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2614 return expr;
2615
2616 /* Otherwise we need to mess with gotos. Change
2617 if (a) c; else d;
2618 to
2619 if (a); else goto no;
2620 c; goto end;
2621 no: d; end:
2622 and recursively gimplify the condition. */
2623
2624 true_label = false_label = end_label = NULL_TREE;
2625
2626 /* If our arms just jump somewhere, hijack those labels so we don't
2627 generate jumps to jumps. */
2628
2629 if (then_
2630 && TREE_CODE (then_) == GOTO_EXPR
2631 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2632 {
2633 true_label = GOTO_DESTINATION (then_);
2634 then_ = NULL;
2635 then_se = false;
2636 }
2637
2638 if (else_
2639 && TREE_CODE (else_) == GOTO_EXPR
2640 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2641 {
2642 false_label = GOTO_DESTINATION (else_);
2643 else_ = NULL;
2644 else_se = false;
2645 }
2646
2647 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2648 if (true_label)
2649 true_label_p = &true_label;
2650 else
2651 true_label_p = NULL;
2652
2653 /* The 'else' branch also needs a label if it contains interesting code. */
2654 if (false_label || else_se)
2655 false_label_p = &false_label;
2656 else
2657 false_label_p = NULL;
2658
2659 /* If there was nothing else in our arms, just forward the label(s). */
2660 if (!then_se && !else_se)
2661 return shortcut_cond_r (pred, true_label_p, false_label_p,
2662 EXPR_HAS_LOCATION (expr)
2663 ? EXPR_LOCATION (expr) : input_location);
2664
2665 /* If our last subexpression already has a terminal label, reuse it. */
2666 if (else_se)
2667 t = expr_last (else_);
2668 else if (then_se)
2669 t = expr_last (then_);
2670 else
2671 t = NULL;
2672 if (t && TREE_CODE (t) == LABEL_EXPR)
2673 end_label = LABEL_EXPR_LABEL (t);
2674
2675 /* If we don't care about jumping to the 'else' branch, jump to the end
2676 if the condition is false. */
2677 if (!false_label_p)
2678 false_label_p = &end_label;
2679
2680 /* We only want to emit these labels if we aren't hijacking them. */
2681 emit_end = (end_label == NULL_TREE);
2682 emit_false = (false_label == NULL_TREE);
2683
2684 /* We only emit the jump over the else clause if we have to--if the
2685 then clause may fall through. Otherwise we can wind up with a
2686 useless jump and a useless label at the end of gimplified code,
2687 which will cause us to think that this conditional as a whole
2688 falls through even if it doesn't. If we then inline a function
2689 which ends with such a condition, that can cause us to issue an
2690 inappropriate warning about control reaching the end of a
2691 non-void function. */
2692 jump_over_else = block_may_fallthru (then_);
2693
2694 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2695 EXPR_HAS_LOCATION (expr)
2696 ? EXPR_LOCATION (expr) : input_location);
2697
2698 expr = NULL;
2699 append_to_statement_list (pred, &expr);
2700
2701 append_to_statement_list (then_, &expr);
2702 if (else_se)
2703 {
2704 if (jump_over_else)
2705 {
2706 tree last = expr_last (expr);
2707 t = build_and_jump (&end_label);
2708 if (EXPR_HAS_LOCATION (last))
2709 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2710 append_to_statement_list (t, &expr);
2711 }
2712 if (emit_false)
2713 {
2714 t = build1 (LABEL_EXPR, void_type_node, false_label);
2715 append_to_statement_list (t, &expr);
2716 }
2717 append_to_statement_list (else_, &expr);
2718 }
2719 if (emit_end && end_label)
2720 {
2721 t = build1 (LABEL_EXPR, void_type_node, end_label);
2722 append_to_statement_list (t, &expr);
2723 }
2724
2725 return expr;
2726 }
2727
2728 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2729
2730 tree
2731 gimple_boolify (tree expr)
2732 {
2733 tree type = TREE_TYPE (expr);
2734 location_t loc = EXPR_LOCATION (expr);
2735
2736 if (TREE_CODE (expr) == NE_EXPR
2737 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2738 && integer_zerop (TREE_OPERAND (expr, 1)))
2739 {
2740 tree call = TREE_OPERAND (expr, 0);
2741 tree fn = get_callee_fndecl (call);
2742
2743 /* For __builtin_expect ((long) (x), y) recurse into x as well
2744 if x is truth_value_p. */
2745 if (fn
2746 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2747 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2748 && call_expr_nargs (call) == 2)
2749 {
2750 tree arg = CALL_EXPR_ARG (call, 0);
2751 if (arg)
2752 {
2753 if (TREE_CODE (arg) == NOP_EXPR
2754 && TREE_TYPE (arg) == TREE_TYPE (call))
2755 arg = TREE_OPERAND (arg, 0);
2756 if (truth_value_p (TREE_CODE (arg)))
2757 {
2758 arg = gimple_boolify (arg);
2759 CALL_EXPR_ARG (call, 0)
2760 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2761 }
2762 }
2763 }
2764 }
2765
2766 if (TREE_CODE (type) == BOOLEAN_TYPE)
2767 return expr;
2768
2769 switch (TREE_CODE (expr))
2770 {
2771 case TRUTH_AND_EXPR:
2772 case TRUTH_OR_EXPR:
2773 case TRUTH_XOR_EXPR:
2774 case TRUTH_ANDIF_EXPR:
2775 case TRUTH_ORIF_EXPR:
2776 /* Also boolify the arguments of truth exprs. */
2777 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2778 /* FALLTHRU */
2779
2780 case TRUTH_NOT_EXPR:
2781 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2782 /* FALLTHRU */
2783
2784 case EQ_EXPR: case NE_EXPR:
2785 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2786 /* These expressions always produce boolean results. */
2787 TREE_TYPE (expr) = boolean_type_node;
2788 return expr;
2789
2790 default:
2791 /* Other expressions that get here must have boolean values, but
2792 might need to be converted to the appropriate mode. */
2793 return fold_convert_loc (loc, boolean_type_node, expr);
2794 }
2795 }
2796
2797 /* Given a conditional expression *EXPR_P without side effects, gimplify
2798 its operands. New statements are inserted to PRE_P. */
2799
2800 static enum gimplify_status
2801 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2802 {
2803 tree expr = *expr_p, cond;
2804 enum gimplify_status ret, tret;
2805 enum tree_code code;
2806
2807 cond = gimple_boolify (COND_EXPR_COND (expr));
2808
2809 /* We need to handle && and || specially, as their gimplification
2810 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2811 code = TREE_CODE (cond);
2812 if (code == TRUTH_ANDIF_EXPR)
2813 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2814 else if (code == TRUTH_ORIF_EXPR)
2815 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2816 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2817 COND_EXPR_COND (*expr_p) = cond;
2818
2819 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2820 is_gimple_val, fb_rvalue);
2821 ret = MIN (ret, tret);
2822 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2823 is_gimple_val, fb_rvalue);
2824
2825 return MIN (ret, tret);
2826 }
2827
2828 /* Returns true if evaluating EXPR could trap.
2829 EXPR is GENERIC, while tree_could_trap_p can be called
2830 only on GIMPLE. */
2831
2832 static bool
2833 generic_expr_could_trap_p (tree expr)
2834 {
2835 unsigned i, n;
2836
2837 if (!expr || is_gimple_val (expr))
2838 return false;
2839
2840 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2841 return true;
2842
2843 n = TREE_OPERAND_LENGTH (expr);
2844 for (i = 0; i < n; i++)
2845 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2846 return true;
2847
2848 return false;
2849 }
2850
2851 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2852 into
2853
2854 if (p) if (p)
2855 t1 = a; a;
2856 else or else
2857 t1 = b; b;
2858 t1;
2859
2860 The second form is used when *EXPR_P is of type void.
2861
2862 PRE_P points to the list where side effects that must happen before
2863 *EXPR_P should be stored. */
2864
2865 static enum gimplify_status
2866 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2867 {
2868 tree expr = *expr_p;
2869 tree type = TREE_TYPE (expr);
2870 location_t loc = EXPR_LOCATION (expr);
2871 tree tmp, arm1, arm2;
2872 enum gimplify_status ret;
2873 tree label_true, label_false, label_cont;
2874 bool have_then_clause_p, have_else_clause_p;
2875 gimple gimple_cond;
2876 enum tree_code pred_code;
2877 gimple_seq seq = NULL;
2878
2879 /* If this COND_EXPR has a value, copy the values into a temporary within
2880 the arms. */
2881 if (!VOID_TYPE_P (type))
2882 {
2883 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2884 tree result;
2885
2886 /* If either an rvalue is ok or we do not require an lvalue, create the
2887 temporary. But we cannot do that if the type is addressable. */
2888 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2889 && !TREE_ADDRESSABLE (type))
2890 {
2891 if (gimplify_ctxp->allow_rhs_cond_expr
2892 /* If either branch has side effects or could trap, it can't be
2893 evaluated unconditionally. */
2894 && !TREE_SIDE_EFFECTS (then_)
2895 && !generic_expr_could_trap_p (then_)
2896 && !TREE_SIDE_EFFECTS (else_)
2897 && !generic_expr_could_trap_p (else_))
2898 return gimplify_pure_cond_expr (expr_p, pre_p);
2899
2900 tmp = create_tmp_var (type, "iftmp");
2901 result = tmp;
2902 }
2903
2904 /* Otherwise, only create and copy references to the values. */
2905 else
2906 {
2907 type = build_pointer_type (type);
2908
2909 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2910 then_ = build_fold_addr_expr_loc (loc, then_);
2911
2912 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2913 else_ = build_fold_addr_expr_loc (loc, else_);
2914
2915 expr
2916 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2917
2918 tmp = create_tmp_var (type, "iftmp");
2919 result = build_fold_indirect_ref_loc (loc, tmp);
2920 }
2921
2922 /* Build the new then clause, `tmp = then_;'. But don't build the
2923 assignment if the value is void; in C++ it can be if it's a throw. */
2924 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2925 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2926
2927 /* Similarly, build the new else clause, `tmp = else_;'. */
2928 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2929 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2930
2931 TREE_TYPE (expr) = void_type_node;
2932 recalculate_side_effects (expr);
2933
2934 /* Move the COND_EXPR to the prequeue. */
2935 gimplify_stmt (&expr, pre_p);
2936
2937 *expr_p = result;
2938 return GS_ALL_DONE;
2939 }
2940
2941 /* Make sure the condition has BOOLEAN_TYPE. */
2942 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2943
2944 /* Break apart && and || conditions. */
2945 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2946 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2947 {
2948 expr = shortcut_cond_expr (expr);
2949
2950 if (expr != *expr_p)
2951 {
2952 *expr_p = expr;
2953
2954 /* We can't rely on gimplify_expr to re-gimplify the expanded
2955 form properly, as cleanups might cause the target labels to be
2956 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2957 set up a conditional context. */
2958 gimple_push_condition ();
2959 gimplify_stmt (expr_p, &seq);
2960 gimple_pop_condition (pre_p);
2961 gimple_seq_add_seq (pre_p, seq);
2962
2963 return GS_ALL_DONE;
2964 }
2965 }
2966
2967 /* Now do the normal gimplification. */
2968
2969 /* Gimplify condition. */
2970 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2971 fb_rvalue);
2972 if (ret == GS_ERROR)
2973 return GS_ERROR;
2974 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2975
2976 gimple_push_condition ();
2977
2978 have_then_clause_p = have_else_clause_p = false;
2979 if (TREE_OPERAND (expr, 1) != NULL
2980 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2981 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2982 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2983 == current_function_decl)
2984 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2985 have different locations, otherwise we end up with incorrect
2986 location information on the branches. */
2987 && (optimize
2988 || !EXPR_HAS_LOCATION (expr)
2989 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2990 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2991 {
2992 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2993 have_then_clause_p = true;
2994 }
2995 else
2996 label_true = create_artificial_label (UNKNOWN_LOCATION);
2997 if (TREE_OPERAND (expr, 2) != NULL
2998 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2999 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3000 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3001 == current_function_decl)
3002 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3003 have different locations, otherwise we end up with incorrect
3004 location information on the branches. */
3005 && (optimize
3006 || !EXPR_HAS_LOCATION (expr)
3007 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3008 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3009 {
3010 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3011 have_else_clause_p = true;
3012 }
3013 else
3014 label_false = create_artificial_label (UNKNOWN_LOCATION);
3015
3016 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3017 &arm2);
3018
3019 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3020 label_false);
3021
3022 gimplify_seq_add_stmt (&seq, gimple_cond);
3023 label_cont = NULL_TREE;
3024 if (!have_then_clause_p)
3025 {
3026 /* For if (...) {} else { code; } put label_true after
3027 the else block. */
3028 if (TREE_OPERAND (expr, 1) == NULL_TREE
3029 && !have_else_clause_p
3030 && TREE_OPERAND (expr, 2) != NULL_TREE)
3031 label_cont = label_true;
3032 else
3033 {
3034 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3035 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3036 /* For if (...) { code; } else {} or
3037 if (...) { code; } else goto label; or
3038 if (...) { code; return; } else { ... }
3039 label_cont isn't needed. */
3040 if (!have_else_clause_p
3041 && TREE_OPERAND (expr, 2) != NULL_TREE
3042 && gimple_seq_may_fallthru (seq))
3043 {
3044 gimple g;
3045 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3046
3047 g = gimple_build_goto (label_cont);
3048
3049 /* GIMPLE_COND's are very low level; they have embedded
3050 gotos. This particular embedded goto should not be marked
3051 with the location of the original COND_EXPR, as it would
3052 correspond to the COND_EXPR's condition, not the ELSE or the
3053 THEN arms. To avoid marking it with the wrong location, flag
3054 it as "no location". */
3055 gimple_set_do_not_emit_location (g);
3056
3057 gimplify_seq_add_stmt (&seq, g);
3058 }
3059 }
3060 }
3061 if (!have_else_clause_p)
3062 {
3063 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3064 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3065 }
3066 if (label_cont)
3067 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3068
3069 gimple_pop_condition (pre_p);
3070 gimple_seq_add_seq (pre_p, seq);
3071
3072 if (ret == GS_ERROR)
3073 ; /* Do nothing. */
3074 else if (have_then_clause_p || have_else_clause_p)
3075 ret = GS_ALL_DONE;
3076 else
3077 {
3078 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3079 expr = TREE_OPERAND (expr, 0);
3080 gimplify_stmt (&expr, pre_p);
3081 }
3082
3083 *expr_p = NULL;
3084 return ret;
3085 }
3086
3087 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3088 to be marked addressable.
3089
3090 We cannot rely on such an expression being directly markable if a temporary
3091 has been created by the gimplification. In this case, we create another
3092 temporary and initialize it with a copy, which will become a store after we
3093 mark it addressable. This can happen if the front-end passed us something
3094 that it could not mark addressable yet, like a Fortran pass-by-reference
3095 parameter (int) floatvar. */
3096
3097 static void
3098 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3099 {
3100 while (handled_component_p (*expr_p))
3101 expr_p = &TREE_OPERAND (*expr_p, 0);
3102 if (is_gimple_reg (*expr_p))
3103 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3104 }
3105
3106 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3107 a call to __builtin_memcpy. */
3108
3109 static enum gimplify_status
3110 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3111 gimple_seq *seq_p)
3112 {
3113 tree t, to, to_ptr, from, from_ptr;
3114 gimple gs;
3115 location_t loc = EXPR_LOCATION (*expr_p);
3116
3117 to = TREE_OPERAND (*expr_p, 0);
3118 from = TREE_OPERAND (*expr_p, 1);
3119
3120 /* Mark the RHS addressable. Beware that it may not be possible to do so
3121 directly if a temporary has been created by the gimplification. */
3122 prepare_gimple_addressable (&from, seq_p);
3123
3124 mark_addressable (from);
3125 from_ptr = build_fold_addr_expr_loc (loc, from);
3126 gimplify_arg (&from_ptr, seq_p, loc);
3127
3128 mark_addressable (to);
3129 to_ptr = build_fold_addr_expr_loc (loc, to);
3130 gimplify_arg (&to_ptr, seq_p, loc);
3131
3132 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3133
3134 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3135
3136 if (want_value)
3137 {
3138 /* tmp = memcpy() */
3139 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3140 gimple_call_set_lhs (gs, t);
3141 gimplify_seq_add_stmt (seq_p, gs);
3142
3143 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3144 return GS_ALL_DONE;
3145 }
3146
3147 gimplify_seq_add_stmt (seq_p, gs);
3148 *expr_p = NULL;
3149 return GS_ALL_DONE;
3150 }
3151
3152 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3153 a call to __builtin_memset. In this case we know that the RHS is
3154 a CONSTRUCTOR with an empty element list. */
3155
3156 static enum gimplify_status
3157 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3158 gimple_seq *seq_p)
3159 {
3160 tree t, from, to, to_ptr;
3161 gimple gs;
3162 location_t loc = EXPR_LOCATION (*expr_p);
3163
3164 /* Assert our assumptions, to abort instead of producing wrong code
3165 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3166 not be immediately exposed. */
3167 from = TREE_OPERAND (*expr_p, 1);
3168 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3169 from = TREE_OPERAND (from, 0);
3170
3171 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3172 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3173
3174 /* Now proceed. */
3175 to = TREE_OPERAND (*expr_p, 0);
3176
3177 to_ptr = build_fold_addr_expr_loc (loc, to);
3178 gimplify_arg (&to_ptr, seq_p, loc);
3179 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3180
3181 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3182
3183 if (want_value)
3184 {
3185 /* tmp = memset() */
3186 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3187 gimple_call_set_lhs (gs, t);
3188 gimplify_seq_add_stmt (seq_p, gs);
3189
3190 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3191 return GS_ALL_DONE;
3192 }
3193
3194 gimplify_seq_add_stmt (seq_p, gs);
3195 *expr_p = NULL;
3196 return GS_ALL_DONE;
3197 }
3198
3199 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3200 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3201 assignment. Returns non-null if we detect a potential overlap. */
3202
3203 struct gimplify_init_ctor_preeval_data
3204 {
3205 /* The base decl of the lhs object. May be NULL, in which case we
3206 have to assume the lhs is indirect. */
3207 tree lhs_base_decl;
3208
3209 /* The alias set of the lhs object. */
3210 alias_set_type lhs_alias_set;
3211 };
3212
3213 static tree
3214 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3215 {
3216 struct gimplify_init_ctor_preeval_data *data
3217 = (struct gimplify_init_ctor_preeval_data *) xdata;
3218 tree t = *tp;
3219
3220 /* If we find the base object, obviously we have overlap. */
3221 if (data->lhs_base_decl == t)
3222 return t;
3223
3224 /* If the constructor component is indirect, determine if we have a
3225 potential overlap with the lhs. The only bits of information we
3226 have to go on at this point are addressability and alias sets. */
3227 if (TREE_CODE (t) == INDIRECT_REF
3228 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3229 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3230 return t;
3231
3232 /* If the constructor component is a call, determine if it can hide a
3233 potential overlap with the lhs through an INDIRECT_REF like above. */
3234 if (TREE_CODE (t) == CALL_EXPR)
3235 {
3236 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3237
3238 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3239 if (POINTER_TYPE_P (TREE_VALUE (type))
3240 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3241 && alias_sets_conflict_p (data->lhs_alias_set,
3242 get_alias_set
3243 (TREE_TYPE (TREE_VALUE (type)))))
3244 return t;
3245 }
3246
3247 if (IS_TYPE_OR_DECL_P (t))
3248 *walk_subtrees = 0;
3249 return NULL;
3250 }
3251
3252 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3253 force values that overlap with the lhs (as described by *DATA)
3254 into temporaries. */
3255
3256 static void
3257 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3258 struct gimplify_init_ctor_preeval_data *data)
3259 {
3260 enum gimplify_status one;
3261
3262 /* If the value is constant, then there's nothing to pre-evaluate. */
3263 if (TREE_CONSTANT (*expr_p))
3264 {
3265 /* Ensure it does not have side effects, it might contain a reference to
3266 the object we're initializing. */
3267 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3268 return;
3269 }
3270
3271 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3272 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3273 return;
3274
3275 /* Recurse for nested constructors. */
3276 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3277 {
3278 unsigned HOST_WIDE_INT ix;
3279 constructor_elt *ce;
3280 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3281
3282 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3283 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3284
3285 return;
3286 }
3287
3288 /* If this is a variable sized type, we must remember the size. */
3289 maybe_with_size_expr (expr_p);
3290
3291 /* Gimplify the constructor element to something appropriate for the rhs
3292 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3293 the gimplifier will consider this a store to memory. Doing this
3294 gimplification now means that we won't have to deal with complicated
3295 language-specific trees, nor trees like SAVE_EXPR that can induce
3296 exponential search behavior. */
3297 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3298 if (one == GS_ERROR)
3299 {
3300 *expr_p = NULL;
3301 return;
3302 }
3303
3304 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3305 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3306 always be true for all scalars, since is_gimple_mem_rhs insists on a
3307 temporary variable for them. */
3308 if (DECL_P (*expr_p))
3309 return;
3310
3311 /* If this is of variable size, we have no choice but to assume it doesn't
3312 overlap since we can't make a temporary for it. */
3313 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3314 return;
3315
3316 /* Otherwise, we must search for overlap ... */
3317 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3318 return;
3319
3320 /* ... and if found, force the value into a temporary. */
3321 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3322 }
3323
3324 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3325 a RANGE_EXPR in a CONSTRUCTOR for an array.
3326
3327 var = lower;
3328 loop_entry:
3329 object[var] = value;
3330 if (var == upper)
3331 goto loop_exit;
3332 var = var + 1;
3333 goto loop_entry;
3334 loop_exit:
3335
3336 We increment var _after_ the loop exit check because we might otherwise
3337 fail if upper == TYPE_MAX_VALUE (type for upper).
3338
3339 Note that we never have to deal with SAVE_EXPRs here, because this has
3340 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3341
3342 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3343 gimple_seq *, bool);
3344
3345 static void
3346 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3347 tree value, tree array_elt_type,
3348 gimple_seq *pre_p, bool cleared)
3349 {
3350 tree loop_entry_label, loop_exit_label, fall_thru_label;
3351 tree var, var_type, cref, tmp;
3352
3353 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3354 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3355 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3356
3357 /* Create and initialize the index variable. */
3358 var_type = TREE_TYPE (upper);
3359 var = create_tmp_var (var_type, NULL);
3360 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3361
3362 /* Add the loop entry label. */
3363 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3364
3365 /* Build the reference. */
3366 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3367 var, NULL_TREE, NULL_TREE);
3368
3369 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3370 the store. Otherwise just assign value to the reference. */
3371
3372 if (TREE_CODE (value) == CONSTRUCTOR)
3373 /* NB we might have to call ourself recursively through
3374 gimplify_init_ctor_eval if the value is a constructor. */
3375 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3376 pre_p, cleared);
3377 else
3378 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3379
3380 /* We exit the loop when the index var is equal to the upper bound. */
3381 gimplify_seq_add_stmt (pre_p,
3382 gimple_build_cond (EQ_EXPR, var, upper,
3383 loop_exit_label, fall_thru_label));
3384
3385 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3386
3387 /* Otherwise, increment the index var... */
3388 tmp = build2 (PLUS_EXPR, var_type, var,
3389 fold_convert (var_type, integer_one_node));
3390 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3391
3392 /* ...and jump back to the loop entry. */
3393 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3394
3395 /* Add the loop exit label. */
3396 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3397 }
3398
3399 /* Return true if FDECL is accessing a field that is zero sized. */
3400
3401 static bool
3402 zero_sized_field_decl (const_tree fdecl)
3403 {
3404 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3405 && integer_zerop (DECL_SIZE (fdecl)))
3406 return true;
3407 return false;
3408 }
3409
3410 /* Return true if TYPE is zero sized. */
3411
3412 static bool
3413 zero_sized_type (const_tree type)
3414 {
3415 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3416 && integer_zerop (TYPE_SIZE (type)))
3417 return true;
3418 return false;
3419 }
3420
3421 /* A subroutine of gimplify_init_constructor. Generate individual
3422 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3423 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3424 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3425 zeroed first. */
3426
3427 static void
3428 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3429 gimple_seq *pre_p, bool cleared)
3430 {
3431 tree array_elt_type = NULL;
3432 unsigned HOST_WIDE_INT ix;
3433 tree purpose, value;
3434
3435 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3436 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3437
3438 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3439 {
3440 tree cref;
3441
3442 /* NULL values are created above for gimplification errors. */
3443 if (value == NULL)
3444 continue;
3445
3446 if (cleared && initializer_zerop (value))
3447 continue;
3448
3449 /* ??? Here's to hoping the front end fills in all of the indices,
3450 so we don't have to figure out what's missing ourselves. */
3451 gcc_assert (purpose);
3452
3453 /* Skip zero-sized fields, unless value has side-effects. This can
3454 happen with calls to functions returning a zero-sized type, which
3455 we shouldn't discard. As a number of downstream passes don't
3456 expect sets of zero-sized fields, we rely on the gimplification of
3457 the MODIFY_EXPR we make below to drop the assignment statement. */
3458 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3459 continue;
3460
3461 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3462 whole range. */
3463 if (TREE_CODE (purpose) == RANGE_EXPR)
3464 {
3465 tree lower = TREE_OPERAND (purpose, 0);
3466 tree upper = TREE_OPERAND (purpose, 1);
3467
3468 /* If the lower bound is equal to upper, just treat it as if
3469 upper was the index. */
3470 if (simple_cst_equal (lower, upper))
3471 purpose = upper;
3472 else
3473 {
3474 gimplify_init_ctor_eval_range (object, lower, upper, value,
3475 array_elt_type, pre_p, cleared);
3476 continue;
3477 }
3478 }
3479
3480 if (array_elt_type)
3481 {
3482 /* Do not use bitsizetype for ARRAY_REF indices. */
3483 if (TYPE_DOMAIN (TREE_TYPE (object)))
3484 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3485 purpose);
3486 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3487 purpose, NULL_TREE, NULL_TREE);
3488 }
3489 else
3490 {
3491 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3492 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3493 unshare_expr (object), purpose, NULL_TREE);
3494 }
3495
3496 if (TREE_CODE (value) == CONSTRUCTOR
3497 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3498 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3499 pre_p, cleared);
3500 else
3501 {
3502 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3503 gimplify_and_add (init, pre_p);
3504 ggc_free (init);
3505 }
3506 }
3507 }
3508
3509
3510 /* Returns the appropriate RHS predicate for this LHS. */
3511
3512 gimple_predicate
3513 rhs_predicate_for (tree lhs)
3514 {
3515 if (is_gimple_reg (lhs))
3516 return is_gimple_reg_rhs_or_call;
3517 else
3518 return is_gimple_mem_rhs_or_call;
3519 }
3520
3521 /* Gimplify a C99 compound literal expression. This just means adding
3522 the DECL_EXPR before the current statement and using its anonymous
3523 decl instead. */
3524
3525 static enum gimplify_status
3526 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3527 {
3528 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3529 tree decl = DECL_EXPR_DECL (decl_s);
3530 /* Mark the decl as addressable if the compound literal
3531 expression is addressable now, otherwise it is marked too late
3532 after we gimplify the initialization expression. */
3533 if (TREE_ADDRESSABLE (*expr_p))
3534 TREE_ADDRESSABLE (decl) = 1;
3535
3536 /* Preliminarily mark non-addressed complex variables as eligible
3537 for promotion to gimple registers. We'll transform their uses
3538 as we find them. */
3539 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3540 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3541 && !TREE_THIS_VOLATILE (decl)
3542 && !needs_to_live_in_memory (decl))
3543 DECL_GIMPLE_REG_P (decl) = 1;
3544
3545 /* This decl isn't mentioned in the enclosing block, so add it to the
3546 list of temps. FIXME it seems a bit of a kludge to say that
3547 anonymous artificial vars aren't pushed, but everything else is. */
3548 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3549 gimple_add_tmp_var (decl);
3550
3551 gimplify_and_add (decl_s, pre_p);
3552 *expr_p = decl;
3553 return GS_OK;
3554 }
3555
3556 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3557 return a new CONSTRUCTOR if something changed. */
3558
3559 static tree
3560 optimize_compound_literals_in_ctor (tree orig_ctor)
3561 {
3562 tree ctor = orig_ctor;
3563 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3564 unsigned int idx, num = VEC_length (constructor_elt, elts);
3565
3566 for (idx = 0; idx < num; idx++)
3567 {
3568 tree value = VEC_index (constructor_elt, elts, idx)->value;
3569 tree newval = value;
3570 if (TREE_CODE (value) == CONSTRUCTOR)
3571 newval = optimize_compound_literals_in_ctor (value);
3572 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3573 {
3574 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3575 tree decl = DECL_EXPR_DECL (decl_s);
3576 tree init = DECL_INITIAL (decl);
3577
3578 if (!TREE_ADDRESSABLE (value)
3579 && !TREE_ADDRESSABLE (decl)
3580 && init)
3581 newval = optimize_compound_literals_in_ctor (init);
3582 }
3583 if (newval == value)
3584 continue;
3585
3586 if (ctor == orig_ctor)
3587 {
3588 ctor = copy_node (orig_ctor);
3589 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3590 elts = CONSTRUCTOR_ELTS (ctor);
3591 }
3592 VEC_index (constructor_elt, elts, idx)->value = newval;
3593 }
3594 return ctor;
3595 }
3596
3597
3598
3599 /* A subroutine of gimplify_modify_expr. Break out elements of a
3600 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3601
3602 Note that we still need to clear any elements that don't have explicit
3603 initializers, so if not all elements are initialized we keep the
3604 original MODIFY_EXPR, we just remove all of the constructor elements.
3605
3606 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3607 GS_ERROR if we would have to create a temporary when gimplifying
3608 this constructor. Otherwise, return GS_OK.
3609
3610 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3611
3612 static enum gimplify_status
3613 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3614 bool want_value, bool notify_temp_creation)
3615 {
3616 tree object, ctor, type;
3617 enum gimplify_status ret;
3618 VEC(constructor_elt,gc) *elts;
3619
3620 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3621
3622 if (!notify_temp_creation)
3623 {
3624 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3625 is_gimple_lvalue, fb_lvalue);
3626 if (ret == GS_ERROR)
3627 return ret;
3628 }
3629
3630 object = TREE_OPERAND (*expr_p, 0);
3631 ctor = TREE_OPERAND (*expr_p, 1) =
3632 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3633 type = TREE_TYPE (ctor);
3634 elts = CONSTRUCTOR_ELTS (ctor);
3635 ret = GS_ALL_DONE;
3636
3637 switch (TREE_CODE (type))
3638 {
3639 case RECORD_TYPE:
3640 case UNION_TYPE:
3641 case QUAL_UNION_TYPE:
3642 case ARRAY_TYPE:
3643 {
3644 struct gimplify_init_ctor_preeval_data preeval_data;
3645 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3646 HOST_WIDE_INT num_nonzero_elements;
3647 bool cleared, valid_const_initializer;
3648
3649 /* Aggregate types must lower constructors to initialization of
3650 individual elements. The exception is that a CONSTRUCTOR node
3651 with no elements indicates zero-initialization of the whole. */
3652 if (VEC_empty (constructor_elt, elts))
3653 {
3654 if (notify_temp_creation)
3655 return GS_OK;
3656 break;
3657 }
3658
3659 /* Fetch information about the constructor to direct later processing.
3660 We might want to make static versions of it in various cases, and
3661 can only do so if it known to be a valid constant initializer. */
3662 valid_const_initializer
3663 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3664 &num_ctor_elements, &cleared);
3665
3666 /* If a const aggregate variable is being initialized, then it
3667 should never be a lose to promote the variable to be static. */
3668 if (valid_const_initializer
3669 && num_nonzero_elements > 1
3670 && TREE_READONLY (object)
3671 && TREE_CODE (object) == VAR_DECL
3672 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3673 {
3674 if (notify_temp_creation)
3675 return GS_ERROR;
3676 DECL_INITIAL (object) = ctor;
3677 TREE_STATIC (object) = 1;
3678 if (!DECL_NAME (object))
3679 DECL_NAME (object) = create_tmp_var_name ("C");
3680 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3681
3682 /* ??? C++ doesn't automatically append a .<number> to the
3683 assembler name, and even when it does, it looks a FE private
3684 data structures to figure out what that number should be,
3685 which are not set for this variable. I suppose this is
3686 important for local statics for inline functions, which aren't
3687 "local" in the object file sense. So in order to get a unique
3688 TU-local symbol, we must invoke the lhd version now. */
3689 lhd_set_decl_assembler_name (object);
3690
3691 *expr_p = NULL_TREE;
3692 break;
3693 }
3694
3695 /* If there are "lots" of initialized elements, even discounting
3696 those that are not address constants (and thus *must* be
3697 computed at runtime), then partition the constructor into
3698 constant and non-constant parts. Block copy the constant
3699 parts in, then generate code for the non-constant parts. */
3700 /* TODO. There's code in cp/typeck.c to do this. */
3701
3702 num_type_elements = count_type_elements (type, true);
3703
3704 /* If count_type_elements could not determine number of type elements
3705 for a constant-sized object, assume clearing is needed.
3706 Don't do this for variable-sized objects, as store_constructor
3707 will ignore the clearing of variable-sized objects. */
3708 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3709 cleared = true;
3710 /* If there are "lots" of zeros, then block clear the object first. */
3711 else if (num_type_elements - num_nonzero_elements
3712 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3713 && num_nonzero_elements < num_type_elements/4)
3714 cleared = true;
3715 /* ??? This bit ought not be needed. For any element not present
3716 in the initializer, we should simply set them to zero. Except
3717 we'd need to *find* the elements that are not present, and that
3718 requires trickery to avoid quadratic compile-time behavior in
3719 large cases or excessive memory use in small cases. */
3720 else if (num_ctor_elements < num_type_elements)
3721 cleared = true;
3722
3723 /* If there are "lots" of initialized elements, and all of them
3724 are valid address constants, then the entire initializer can
3725 be dropped to memory, and then memcpy'd out. Don't do this
3726 for sparse arrays, though, as it's more efficient to follow
3727 the standard CONSTRUCTOR behavior of memset followed by
3728 individual element initialization. Also don't do this for small
3729 all-zero initializers (which aren't big enough to merit
3730 clearing), and don't try to make bitwise copies of
3731 TREE_ADDRESSABLE types. */
3732 if (valid_const_initializer
3733 && !(cleared || num_nonzero_elements == 0)
3734 && !TREE_ADDRESSABLE (type))
3735 {
3736 HOST_WIDE_INT size = int_size_in_bytes (type);
3737 unsigned int align;
3738
3739 /* ??? We can still get unbounded array types, at least
3740 from the C++ front end. This seems wrong, but attempt
3741 to work around it for now. */
3742 if (size < 0)
3743 {
3744 size = int_size_in_bytes (TREE_TYPE (object));
3745 if (size >= 0)
3746 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3747 }
3748
3749 /* Find the maximum alignment we can assume for the object. */
3750 /* ??? Make use of DECL_OFFSET_ALIGN. */
3751 if (DECL_P (object))
3752 align = DECL_ALIGN (object);
3753 else
3754 align = TYPE_ALIGN (type);
3755
3756 if (size > 0
3757 && num_nonzero_elements > 1
3758 && !can_move_by_pieces (size, align))
3759 {
3760 if (notify_temp_creation)
3761 return GS_ERROR;
3762
3763 walk_tree (&ctor, force_labels_r, NULL, NULL);
3764 TREE_OPERAND (*expr_p, 1) = tree_output_constant_def (ctor);
3765
3766 /* This is no longer an assignment of a CONSTRUCTOR, but
3767 we still may have processing to do on the LHS. So
3768 pretend we didn't do anything here to let that happen. */
3769 return GS_UNHANDLED;
3770 }
3771 }
3772
3773 /* If the target is volatile and we have non-zero elements
3774 initialize the target from a temporary. */
3775 if (TREE_THIS_VOLATILE (object)
3776 && !TREE_ADDRESSABLE (type)
3777 && num_nonzero_elements > 0)
3778 {
3779 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3780 TREE_OPERAND (*expr_p, 0) = temp;
3781 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3782 *expr_p,
3783 build2 (MODIFY_EXPR, void_type_node,
3784 object, temp));
3785 return GS_OK;
3786 }
3787
3788 if (notify_temp_creation)
3789 return GS_OK;
3790
3791 /* If there are nonzero elements and if needed, pre-evaluate to capture
3792 elements overlapping with the lhs into temporaries. We must do this
3793 before clearing to fetch the values before they are zeroed-out. */
3794 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3795 {
3796 preeval_data.lhs_base_decl = get_base_address (object);
3797 if (!DECL_P (preeval_data.lhs_base_decl))
3798 preeval_data.lhs_base_decl = NULL;
3799 preeval_data.lhs_alias_set = get_alias_set (object);
3800
3801 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3802 pre_p, post_p, &preeval_data);
3803 }
3804
3805 if (cleared)
3806 {
3807 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3808 Note that we still have to gimplify, in order to handle the
3809 case of variable sized types. Avoid shared tree structures. */
3810 CONSTRUCTOR_ELTS (ctor) = NULL;
3811 TREE_SIDE_EFFECTS (ctor) = 0;
3812 object = unshare_expr (object);
3813 gimplify_stmt (expr_p, pre_p);
3814 }
3815
3816 /* If we have not block cleared the object, or if there are nonzero
3817 elements in the constructor, add assignments to the individual
3818 scalar fields of the object. */
3819 if (!cleared || num_nonzero_elements > 0)
3820 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3821
3822 *expr_p = NULL_TREE;
3823 }
3824 break;
3825
3826 case COMPLEX_TYPE:
3827 {
3828 tree r, i;
3829
3830 if (notify_temp_creation)
3831 return GS_OK;
3832
3833 /* Extract the real and imaginary parts out of the ctor. */
3834 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3835 r = VEC_index (constructor_elt, elts, 0)->value;
3836 i = VEC_index (constructor_elt, elts, 1)->value;
3837 if (r == NULL || i == NULL)
3838 {
3839 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3840 if (r == NULL)
3841 r = zero;
3842 if (i == NULL)
3843 i = zero;
3844 }
3845
3846 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3847 represent creation of a complex value. */
3848 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3849 {
3850 ctor = build_complex (type, r, i);
3851 TREE_OPERAND (*expr_p, 1) = ctor;
3852 }
3853 else
3854 {
3855 ctor = build2 (COMPLEX_EXPR, type, r, i);
3856 TREE_OPERAND (*expr_p, 1) = ctor;
3857 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3858 pre_p,
3859 post_p,
3860 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3861 fb_rvalue);
3862 }
3863 }
3864 break;
3865
3866 case VECTOR_TYPE:
3867 {
3868 unsigned HOST_WIDE_INT ix;
3869 constructor_elt *ce;
3870
3871 if (notify_temp_creation)
3872 return GS_OK;
3873
3874 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3875 if (TREE_CONSTANT (ctor))
3876 {
3877 bool constant_p = true;
3878 tree value;
3879
3880 /* Even when ctor is constant, it might contain non-*_CST
3881 elements, such as addresses or trapping values like
3882 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3883 in VECTOR_CST nodes. */
3884 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3885 if (!CONSTANT_CLASS_P (value))
3886 {
3887 constant_p = false;
3888 break;
3889 }
3890
3891 if (constant_p)
3892 {
3893 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3894 break;
3895 }
3896
3897 /* Don't reduce an initializer constant even if we can't
3898 make a VECTOR_CST. It won't do anything for us, and it'll
3899 prevent us from representing it as a single constant. */
3900 if (initializer_constant_valid_p (ctor, type))
3901 break;
3902
3903 TREE_CONSTANT (ctor) = 0;
3904 }
3905
3906 /* Vector types use CONSTRUCTOR all the way through gimple
3907 compilation as a general initializer. */
3908 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3909 {
3910 enum gimplify_status tret;
3911 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3912 fb_rvalue);
3913 if (tret == GS_ERROR)
3914 ret = GS_ERROR;
3915 }
3916 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3917 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3918 }
3919 break;
3920
3921 default:
3922 /* So how did we get a CONSTRUCTOR for a scalar type? */
3923 gcc_unreachable ();
3924 }
3925
3926 if (ret == GS_ERROR)
3927 return GS_ERROR;
3928 else if (want_value)
3929 {
3930 *expr_p = object;
3931 return GS_OK;
3932 }
3933 else
3934 {
3935 /* If we have gimplified both sides of the initializer but have
3936 not emitted an assignment, do so now. */
3937 if (*expr_p)
3938 {
3939 tree lhs = TREE_OPERAND (*expr_p, 0);
3940 tree rhs = TREE_OPERAND (*expr_p, 1);
3941 gimple init = gimple_build_assign (lhs, rhs);
3942 gimplify_seq_add_stmt (pre_p, init);
3943 *expr_p = NULL;
3944 }
3945
3946 return GS_ALL_DONE;
3947 }
3948 }
3949
3950 /* Given a pointer value OP0, return a simplified version of an
3951 indirection through OP0, or NULL_TREE if no simplification is
3952 possible. Note that the resulting type may be different from
3953 the type pointed to in the sense that it is still compatible
3954 from the langhooks point of view. */
3955
3956 tree
3957 gimple_fold_indirect_ref (tree t)
3958 {
3959 tree type = TREE_TYPE (TREE_TYPE (t));
3960 tree sub = t;
3961 tree subtype;
3962
3963 STRIP_NOPS (sub);
3964 subtype = TREE_TYPE (sub);
3965 if (!POINTER_TYPE_P (subtype))
3966 return NULL_TREE;
3967
3968 if (TREE_CODE (sub) == ADDR_EXPR)
3969 {
3970 tree op = TREE_OPERAND (sub, 0);
3971 tree optype = TREE_TYPE (op);
3972 /* *&p => p */
3973 if (useless_type_conversion_p (type, optype))
3974 return op;
3975
3976 /* *(foo *)&fooarray => fooarray[0] */
3977 if (TREE_CODE (optype) == ARRAY_TYPE
3978 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3979 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3980 {
3981 tree type_domain = TYPE_DOMAIN (optype);
3982 tree min_val = size_zero_node;
3983 if (type_domain && TYPE_MIN_VALUE (type_domain))
3984 min_val = TYPE_MIN_VALUE (type_domain);
3985 if (TREE_CODE (min_val) == INTEGER_CST)
3986 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3987 }
3988 /* *(foo *)&complexfoo => __real__ complexfoo */
3989 else if (TREE_CODE (optype) == COMPLEX_TYPE
3990 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3991 return fold_build1 (REALPART_EXPR, type, op);
3992 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3993 else if (TREE_CODE (optype) == VECTOR_TYPE
3994 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3995 {
3996 tree part_width = TYPE_SIZE (type);
3997 tree index = bitsize_int (0);
3998 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3999 }
4000 }
4001
4002 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
4003 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4004 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4005 {
4006 tree op00 = TREE_OPERAND (sub, 0);
4007 tree op01 = TREE_OPERAND (sub, 1);
4008 tree op00type;
4009
4010 STRIP_NOPS (op00);
4011 op00type = TREE_TYPE (op00);
4012 if (TREE_CODE (op00) == ADDR_EXPR
4013 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
4014 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4015 {
4016 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
4017 tree part_width = TYPE_SIZE (type);
4018 unsigned HOST_WIDE_INT part_widthi
4019 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4020 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4021 tree index = bitsize_int (indexi);
4022 if (offset / part_widthi
4023 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
4024 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
4025 part_width, index);
4026 }
4027 }
4028
4029 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
4030 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4031 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4032 {
4033 tree op00 = TREE_OPERAND (sub, 0);
4034 tree op01 = TREE_OPERAND (sub, 1);
4035 tree op00type;
4036
4037 STRIP_NOPS (op00);
4038 op00type = TREE_TYPE (op00);
4039 if (TREE_CODE (op00) == ADDR_EXPR
4040 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
4041 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
4042 {
4043 tree size = TYPE_SIZE_UNIT (type);
4044 if (tree_int_cst_equal (size, op01))
4045 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
4046 }
4047 }
4048
4049 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4050 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4051 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4052 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4053 {
4054 tree type_domain;
4055 tree min_val = size_zero_node;
4056 tree osub = sub;
4057 sub = gimple_fold_indirect_ref (sub);
4058 if (! sub)
4059 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4060 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4061 if (type_domain && TYPE_MIN_VALUE (type_domain))
4062 min_val = TYPE_MIN_VALUE (type_domain);
4063 if (TREE_CODE (min_val) == INTEGER_CST)
4064 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4065 }
4066
4067 return NULL_TREE;
4068 }
4069
4070 /* Given a pointer value OP0, return a simplified version of an
4071 indirection through OP0, or NULL_TREE if no simplification is
4072 possible. This may only be applied to a rhs of an expression.
4073 Note that the resulting type may be different from the type pointed
4074 to in the sense that it is still compatible from the langhooks
4075 point of view. */
4076
4077 static tree
4078 gimple_fold_indirect_ref_rhs (tree t)
4079 {
4080 return gimple_fold_indirect_ref (t);
4081 }
4082
4083 /* Subroutine of gimplify_modify_expr to do simplifications of
4084 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4085 something changes. */
4086
4087 static enum gimplify_status
4088 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4089 gimple_seq *pre_p, gimple_seq *post_p,
4090 bool want_value)
4091 {
4092 enum gimplify_status ret = GS_OK;
4093
4094 while (ret != GS_UNHANDLED)
4095 switch (TREE_CODE (*from_p))
4096 {
4097 case VAR_DECL:
4098 /* If we're assigning from a read-only variable initialized with
4099 a constructor, do the direct assignment from the constructor,
4100 but only if neither source nor target are volatile since this
4101 latter assignment might end up being done on a per-field basis. */
4102 if (DECL_INITIAL (*from_p)
4103 && TREE_READONLY (*from_p)
4104 && !TREE_THIS_VOLATILE (*from_p)
4105 && !TREE_THIS_VOLATILE (*to_p)
4106 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4107 {
4108 tree old_from = *from_p;
4109
4110 /* Move the constructor into the RHS. */
4111 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4112
4113 /* Let's see if gimplify_init_constructor will need to put
4114 it in memory. If so, revert the change. */
4115 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4116 if (ret == GS_ERROR)
4117 {
4118 *from_p = old_from;
4119 /* Fall through. */
4120 }
4121 else
4122 {
4123 ret = GS_OK;
4124 break;
4125 }
4126 }
4127 ret = GS_UNHANDLED;
4128 break;
4129 case INDIRECT_REF:
4130 {
4131 /* If we have code like
4132
4133 *(const A*)(A*)&x
4134
4135 where the type of "x" is a (possibly cv-qualified variant
4136 of "A"), treat the entire expression as identical to "x".
4137 This kind of code arises in C++ when an object is bound
4138 to a const reference, and if "x" is a TARGET_EXPR we want
4139 to take advantage of the optimization below. */
4140 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4141 if (t)
4142 {
4143 *from_p = t;
4144 ret = GS_OK;
4145 }
4146 else
4147 ret = GS_UNHANDLED;
4148 break;
4149 }
4150
4151 case TARGET_EXPR:
4152 {
4153 /* If we are initializing something from a TARGET_EXPR, strip the
4154 TARGET_EXPR and initialize it directly, if possible. This can't
4155 be done if the initializer is void, since that implies that the
4156 temporary is set in some non-trivial way.
4157
4158 ??? What about code that pulls out the temp and uses it
4159 elsewhere? I think that such code never uses the TARGET_EXPR as
4160 an initializer. If I'm wrong, we'll die because the temp won't
4161 have any RTL. In that case, I guess we'll need to replace
4162 references somehow. */
4163 tree init = TARGET_EXPR_INITIAL (*from_p);
4164
4165 if (init
4166 && !VOID_TYPE_P (TREE_TYPE (init)))
4167 {
4168 *from_p = init;
4169 ret = GS_OK;
4170 }
4171 else
4172 ret = GS_UNHANDLED;
4173 }
4174 break;
4175
4176 case COMPOUND_EXPR:
4177 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4178 caught. */
4179 gimplify_compound_expr (from_p, pre_p, true);
4180 ret = GS_OK;
4181 break;
4182
4183 case CONSTRUCTOR:
4184 /* If we're initializing from a CONSTRUCTOR, break this into
4185 individual MODIFY_EXPRs. */
4186 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4187 false);
4188
4189 case COND_EXPR:
4190 /* If we're assigning to a non-register type, push the assignment
4191 down into the branches. This is mandatory for ADDRESSABLE types,
4192 since we cannot generate temporaries for such, but it saves a
4193 copy in other cases as well. */
4194 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4195 {
4196 /* This code should mirror the code in gimplify_cond_expr. */
4197 enum tree_code code = TREE_CODE (*expr_p);
4198 tree cond = *from_p;
4199 tree result = *to_p;
4200
4201 ret = gimplify_expr (&result, pre_p, post_p,
4202 is_gimple_lvalue, fb_lvalue);
4203 if (ret != GS_ERROR)
4204 ret = GS_OK;
4205
4206 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4207 TREE_OPERAND (cond, 1)
4208 = build2 (code, void_type_node, result,
4209 TREE_OPERAND (cond, 1));
4210 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4211 TREE_OPERAND (cond, 2)
4212 = build2 (code, void_type_node, unshare_expr (result),
4213 TREE_OPERAND (cond, 2));
4214
4215 TREE_TYPE (cond) = void_type_node;
4216 recalculate_side_effects (cond);
4217
4218 if (want_value)
4219 {
4220 gimplify_and_add (cond, pre_p);
4221 *expr_p = unshare_expr (result);
4222 }
4223 else
4224 *expr_p = cond;
4225 return ret;
4226 }
4227 else
4228 ret = GS_UNHANDLED;
4229 break;
4230
4231 case CALL_EXPR:
4232 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4233 return slot so that we don't generate a temporary. */
4234 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4235 && aggregate_value_p (*from_p, *from_p))
4236 {
4237 bool use_target;
4238
4239 if (!(rhs_predicate_for (*to_p))(*from_p))
4240 /* If we need a temporary, *to_p isn't accurate. */
4241 use_target = false;
4242 else if (TREE_CODE (*to_p) == RESULT_DECL
4243 && DECL_NAME (*to_p) == NULL_TREE
4244 && needs_to_live_in_memory (*to_p))
4245 /* It's OK to use the return slot directly unless it's an NRV. */
4246 use_target = true;
4247 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4248 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4249 /* Don't force regs into memory. */
4250 use_target = false;
4251 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4252 /* It's OK to use the target directly if it's being
4253 initialized. */
4254 use_target = true;
4255 else if (!is_gimple_non_addressable (*to_p))
4256 /* Don't use the original target if it's already addressable;
4257 if its address escapes, and the called function uses the
4258 NRV optimization, a conforming program could see *to_p
4259 change before the called function returns; see c++/19317.
4260 When optimizing, the return_slot pass marks more functions
4261 as safe after we have escape info. */
4262 use_target = false;
4263 else
4264 use_target = true;
4265
4266 if (use_target)
4267 {
4268 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4269 mark_addressable (*to_p);
4270 }
4271 }
4272
4273 ret = GS_UNHANDLED;
4274 break;
4275
4276 case WITH_SIZE_EXPR:
4277 /* Likewise for calls that return an aggregate of non-constant size,
4278 since we would not be able to generate a temporary at all. */
4279 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4280 {
4281 *from_p = TREE_OPERAND (*from_p, 0);
4282 ret = GS_OK;
4283 }
4284 else
4285 ret = GS_UNHANDLED;
4286 break;
4287
4288 /* If we're initializing from a container, push the initialization
4289 inside it. */
4290 case CLEANUP_POINT_EXPR:
4291 case BIND_EXPR:
4292 case STATEMENT_LIST:
4293 {
4294 tree wrap = *from_p;
4295 tree t;
4296
4297 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4298 fb_lvalue);
4299 if (ret != GS_ERROR)
4300 ret = GS_OK;
4301
4302 t = voidify_wrapper_expr (wrap, *expr_p);
4303 gcc_assert (t == *expr_p);
4304
4305 if (want_value)
4306 {
4307 gimplify_and_add (wrap, pre_p);
4308 *expr_p = unshare_expr (*to_p);
4309 }
4310 else
4311 *expr_p = wrap;
4312 return GS_OK;
4313 }
4314
4315 case COMPOUND_LITERAL_EXPR:
4316 {
4317 tree complit = TREE_OPERAND (*expr_p, 1);
4318 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4319 tree decl = DECL_EXPR_DECL (decl_s);
4320 tree init = DECL_INITIAL (decl);
4321
4322 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4323 into struct T x = { 0, 1, 2 } if the address of the
4324 compound literal has never been taken. */
4325 if (!TREE_ADDRESSABLE (complit)
4326 && !TREE_ADDRESSABLE (decl)
4327 && init)
4328 {
4329 *expr_p = copy_node (*expr_p);
4330 TREE_OPERAND (*expr_p, 1) = init;
4331 return GS_OK;
4332 }
4333 }
4334
4335 default:
4336 ret = GS_UNHANDLED;
4337 break;
4338 }
4339
4340 return ret;
4341 }
4342
4343
4344 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4345 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4346 DECL_GIMPLE_REG_P set.
4347
4348 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4349 other, unmodified part of the complex object just before the total store.
4350 As a consequence, if the object is still uninitialized, an undefined value
4351 will be loaded into a register, which may result in a spurious exception
4352 if the register is floating-point and the value happens to be a signaling
4353 NaN for example. Then the fully-fledged complex operations lowering pass
4354 followed by a DCE pass are necessary in order to fix things up. */
4355
4356 static enum gimplify_status
4357 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4358 bool want_value)
4359 {
4360 enum tree_code code, ocode;
4361 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4362
4363 lhs = TREE_OPERAND (*expr_p, 0);
4364 rhs = TREE_OPERAND (*expr_p, 1);
4365 code = TREE_CODE (lhs);
4366 lhs = TREE_OPERAND (lhs, 0);
4367
4368 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4369 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4370 other = get_formal_tmp_var (other, pre_p);
4371
4372 realpart = code == REALPART_EXPR ? rhs : other;
4373 imagpart = code == REALPART_EXPR ? other : rhs;
4374
4375 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4376 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4377 else
4378 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4379
4380 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4381 *expr_p = (want_value) ? rhs : NULL_TREE;
4382
4383 return GS_ALL_DONE;
4384 }
4385
4386
4387 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4388
4389 modify_expr
4390 : varname '=' rhs
4391 | '*' ID '=' rhs
4392
4393 PRE_P points to the list where side effects that must happen before
4394 *EXPR_P should be stored.
4395
4396 POST_P points to the list where side effects that must happen after
4397 *EXPR_P should be stored.
4398
4399 WANT_VALUE is nonzero iff we want to use the value of this expression
4400 in another expression. */
4401
4402 static enum gimplify_status
4403 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4404 bool want_value)
4405 {
4406 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4407 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4408 enum gimplify_status ret = GS_UNHANDLED;
4409 gimple assign;
4410 location_t loc = EXPR_LOCATION (*expr_p);
4411
4412 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4413 || TREE_CODE (*expr_p) == INIT_EXPR);
4414
4415 /* Insert pointer conversions required by the middle-end that are not
4416 required by the frontend. This fixes middle-end type checking for
4417 for example gcc.dg/redecl-6.c. */
4418 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4419 {
4420 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4421 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4422 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4423 }
4424
4425 /* See if any simplifications can be done based on what the RHS is. */
4426 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4427 want_value);
4428 if (ret != GS_UNHANDLED)
4429 return ret;
4430
4431 /* For zero sized types only gimplify the left hand side and right hand
4432 side as statements and throw away the assignment. Do this after
4433 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4434 types properly. */
4435 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4436 {
4437 gimplify_stmt (from_p, pre_p);
4438 gimplify_stmt (to_p, pre_p);
4439 *expr_p = NULL_TREE;
4440 return GS_ALL_DONE;
4441 }
4442
4443 /* If the value being copied is of variable width, compute the length
4444 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4445 before gimplifying any of the operands so that we can resolve any
4446 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4447 the size of the expression to be copied, not of the destination, so
4448 that is what we must do here. */
4449 maybe_with_size_expr (from_p);
4450
4451 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4452 if (ret == GS_ERROR)
4453 return ret;
4454
4455 /* As a special case, we have to temporarily allow for assignments
4456 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4457 a toplevel statement, when gimplifying the GENERIC expression
4458 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4459 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4460
4461 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4462 prevent gimplify_expr from trying to create a new temporary for
4463 foo's LHS, we tell it that it should only gimplify until it
4464 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4465 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4466 and all we need to do here is set 'a' to be its LHS. */
4467 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4468 fb_rvalue);
4469 if (ret == GS_ERROR)
4470 return ret;
4471
4472 /* Now see if the above changed *from_p to something we handle specially. */
4473 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4474 want_value);
4475 if (ret != GS_UNHANDLED)
4476 return ret;
4477
4478 /* If we've got a variable sized assignment between two lvalues (i.e. does
4479 not involve a call), then we can make things a bit more straightforward
4480 by converting the assignment to memcpy or memset. */
4481 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4482 {
4483 tree from = TREE_OPERAND (*from_p, 0);
4484 tree size = TREE_OPERAND (*from_p, 1);
4485
4486 if (TREE_CODE (from) == CONSTRUCTOR)
4487 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4488
4489 if (is_gimple_addressable (from))
4490 {
4491 *from_p = from;
4492 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4493 pre_p);
4494 }
4495 }
4496
4497 /* Transform partial stores to non-addressable complex variables into
4498 total stores. This allows us to use real instead of virtual operands
4499 for these variables, which improves optimization. */
4500 if ((TREE_CODE (*to_p) == REALPART_EXPR
4501 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4502 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4503 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4504
4505 /* Try to alleviate the effects of the gimplification creating artificial
4506 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4507 if (!gimplify_ctxp->into_ssa
4508 && DECL_P (*from_p)
4509 && DECL_IGNORED_P (*from_p)
4510 && DECL_P (*to_p)
4511 && !DECL_IGNORED_P (*to_p))
4512 {
4513 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4514 DECL_NAME (*from_p)
4515 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4516 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4517 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4518 }
4519
4520 if (TREE_CODE (*from_p) == CALL_EXPR)
4521 {
4522 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4523 instead of a GIMPLE_ASSIGN. */
4524 assign = gimple_build_call_from_tree (*from_p);
4525 if (!gimple_call_noreturn_p (assign))
4526 gimple_call_set_lhs (assign, *to_p);
4527 }
4528 else
4529 {
4530 assign = gimple_build_assign (*to_p, *from_p);
4531 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4532 }
4533
4534 gimplify_seq_add_stmt (pre_p, assign);
4535
4536 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4537 {
4538 /* If we've somehow already got an SSA_NAME on the LHS, then
4539 we've probably modified it twice. Not good. */
4540 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4541 *to_p = make_ssa_name (*to_p, assign);
4542 gimple_set_lhs (assign, *to_p);
4543 }
4544
4545 if (want_value)
4546 {
4547 *expr_p = unshare_expr (*to_p);
4548 return GS_OK;
4549 }
4550 else
4551 *expr_p = NULL;
4552
4553 return GS_ALL_DONE;
4554 }
4555
4556 /* Gimplify a comparison between two variable-sized objects. Do this
4557 with a call to BUILT_IN_MEMCMP. */
4558
4559 static enum gimplify_status
4560 gimplify_variable_sized_compare (tree *expr_p)
4561 {
4562 tree op0 = TREE_OPERAND (*expr_p, 0);
4563 tree op1 = TREE_OPERAND (*expr_p, 1);
4564 tree t, arg, dest, src;
4565 location_t loc = EXPR_LOCATION (*expr_p);
4566
4567 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4568 arg = unshare_expr (arg);
4569 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4570 src = build_fold_addr_expr_loc (loc, op1);
4571 dest = build_fold_addr_expr_loc (loc, op0);
4572 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4573 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4574 *expr_p
4575 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4576
4577 return GS_OK;
4578 }
4579
4580 /* Gimplify a comparison between two aggregate objects of integral scalar
4581 mode as a comparison between the bitwise equivalent scalar values. */
4582
4583 static enum gimplify_status
4584 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4585 {
4586 location_t loc = EXPR_LOCATION (*expr_p);
4587 tree op0 = TREE_OPERAND (*expr_p, 0);
4588 tree op1 = TREE_OPERAND (*expr_p, 1);
4589
4590 tree type = TREE_TYPE (op0);
4591 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4592
4593 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4594 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4595
4596 *expr_p
4597 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4598
4599 return GS_OK;
4600 }
4601
4602 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4603 points to the expression to gimplify.
4604
4605 Expressions of the form 'a && b' are gimplified to:
4606
4607 a && b ? true : false
4608
4609 LOCUS is the source location to be put on the generated COND_EXPR.
4610 gimplify_cond_expr will do the rest. */
4611
4612 static enum gimplify_status
4613 gimplify_boolean_expr (tree *expr_p, location_t locus)
4614 {
4615 /* Preserve the original type of the expression. */
4616 tree type = TREE_TYPE (*expr_p);
4617
4618 *expr_p = build3 (COND_EXPR, type, *expr_p,
4619 fold_convert_loc (locus, type, boolean_true_node),
4620 fold_convert_loc (locus, type, boolean_false_node));
4621
4622 SET_EXPR_LOCATION (*expr_p, locus);
4623
4624 return GS_OK;
4625 }
4626
4627 /* Gimplifies an expression sequence. This function gimplifies each
4628 expression and re-writes the original expression with the last
4629 expression of the sequence in GIMPLE form.
4630
4631 PRE_P points to the list where the side effects for all the
4632 expressions in the sequence will be emitted.
4633
4634 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4635
4636 static enum gimplify_status
4637 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4638 {
4639 tree t = *expr_p;
4640
4641 do
4642 {
4643 tree *sub_p = &TREE_OPERAND (t, 0);
4644
4645 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4646 gimplify_compound_expr (sub_p, pre_p, false);
4647 else
4648 gimplify_stmt (sub_p, pre_p);
4649
4650 t = TREE_OPERAND (t, 1);
4651 }
4652 while (TREE_CODE (t) == COMPOUND_EXPR);
4653
4654 *expr_p = t;
4655 if (want_value)
4656 return GS_OK;
4657 else
4658 {
4659 gimplify_stmt (expr_p, pre_p);
4660 return GS_ALL_DONE;
4661 }
4662 }
4663
4664
4665 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4666 gimplify. After gimplification, EXPR_P will point to a new temporary
4667 that holds the original value of the SAVE_EXPR node.
4668
4669 PRE_P points to the list where side effects that must happen before
4670 *EXPR_P should be stored. */
4671
4672 static enum gimplify_status
4673 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4674 {
4675 enum gimplify_status ret = GS_ALL_DONE;
4676 tree val;
4677
4678 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4679 val = TREE_OPERAND (*expr_p, 0);
4680
4681 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4682 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4683 {
4684 /* The operand may be a void-valued expression such as SAVE_EXPRs
4685 generated by the Java frontend for class initialization. It is
4686 being executed only for its side-effects. */
4687 if (TREE_TYPE (val) == void_type_node)
4688 {
4689 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4690 is_gimple_stmt, fb_none);
4691 val = NULL;
4692 }
4693 else
4694 val = get_initialized_tmp_var (val, pre_p, post_p);
4695
4696 TREE_OPERAND (*expr_p, 0) = val;
4697 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4698 }
4699
4700 *expr_p = val;
4701
4702 return ret;
4703 }
4704
4705 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4706
4707 unary_expr
4708 : ...
4709 | '&' varname
4710 ...
4711
4712 PRE_P points to the list where side effects that must happen before
4713 *EXPR_P should be stored.
4714
4715 POST_P points to the list where side effects that must happen after
4716 *EXPR_P should be stored. */
4717
4718 static enum gimplify_status
4719 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4720 {
4721 tree expr = *expr_p;
4722 tree op0 = TREE_OPERAND (expr, 0);
4723 enum gimplify_status ret;
4724 location_t loc = EXPR_LOCATION (*expr_p);
4725
4726 switch (TREE_CODE (op0))
4727 {
4728 case INDIRECT_REF:
4729 case MISALIGNED_INDIRECT_REF:
4730 do_indirect_ref:
4731 /* Check if we are dealing with an expression of the form '&*ptr'.
4732 While the front end folds away '&*ptr' into 'ptr', these
4733 expressions may be generated internally by the compiler (e.g.,
4734 builtins like __builtin_va_end). */
4735 /* Caution: the silent array decomposition semantics we allow for
4736 ADDR_EXPR means we can't always discard the pair. */
4737 /* Gimplification of the ADDR_EXPR operand may drop
4738 cv-qualification conversions, so make sure we add them if
4739 needed. */
4740 {
4741 tree op00 = TREE_OPERAND (op0, 0);
4742 tree t_expr = TREE_TYPE (expr);
4743 tree t_op00 = TREE_TYPE (op00);
4744
4745 if (!useless_type_conversion_p (t_expr, t_op00))
4746 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4747 *expr_p = op00;
4748 ret = GS_OK;
4749 }
4750 break;
4751
4752 case VIEW_CONVERT_EXPR:
4753 /* Take the address of our operand and then convert it to the type of
4754 this ADDR_EXPR.
4755
4756 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4757 all clear. The impact of this transformation is even less clear. */
4758
4759 /* If the operand is a useless conversion, look through it. Doing so
4760 guarantees that the ADDR_EXPR and its operand will remain of the
4761 same type. */
4762 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4763 op0 = TREE_OPERAND (op0, 0);
4764
4765 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4766 build_fold_addr_expr_loc (loc,
4767 TREE_OPERAND (op0, 0)));
4768 ret = GS_OK;
4769 break;
4770
4771 default:
4772 /* We use fb_either here because the C frontend sometimes takes
4773 the address of a call that returns a struct; see
4774 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4775 the implied temporary explicit. */
4776
4777 /* Make the operand addressable. */
4778 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4779 is_gimple_addressable, fb_either);
4780 if (ret == GS_ERROR)
4781 break;
4782
4783 /* Then mark it. Beware that it may not be possible to do so directly
4784 if a temporary has been created by the gimplification. */
4785 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4786
4787 op0 = TREE_OPERAND (expr, 0);
4788
4789 /* For various reasons, the gimplification of the expression
4790 may have made a new INDIRECT_REF. */
4791 if (TREE_CODE (op0) == INDIRECT_REF)
4792 goto do_indirect_ref;
4793
4794 mark_addressable (TREE_OPERAND (expr, 0));
4795
4796 /* The FEs may end up building ADDR_EXPRs early on a decl with
4797 an incomplete type. Re-build ADDR_EXPRs in canonical form
4798 here. */
4799 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4800 *expr_p = build_fold_addr_expr (op0);
4801
4802 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4803 recompute_tree_invariant_for_addr_expr (*expr_p);
4804
4805 /* If we re-built the ADDR_EXPR add a conversion to the original type
4806 if required. */
4807 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4808 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4809
4810 break;
4811 }
4812
4813 return ret;
4814 }
4815
4816 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4817 value; output operands should be a gimple lvalue. */
4818
4819 static enum gimplify_status
4820 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4821 {
4822 tree expr;
4823 int noutputs;
4824 const char **oconstraints;
4825 int i;
4826 tree link;
4827 const char *constraint;
4828 bool allows_mem, allows_reg, is_inout;
4829 enum gimplify_status ret, tret;
4830 gimple stmt;
4831 VEC(tree, gc) *inputs;
4832 VEC(tree, gc) *outputs;
4833 VEC(tree, gc) *clobbers;
4834 VEC(tree, gc) *labels;
4835 tree link_next;
4836
4837 expr = *expr_p;
4838 noutputs = list_length (ASM_OUTPUTS (expr));
4839 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4840
4841 inputs = outputs = clobbers = labels = NULL;
4842
4843 ret = GS_ALL_DONE;
4844 link_next = NULL_TREE;
4845 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4846 {
4847 bool ok;
4848 size_t constraint_len;
4849
4850 link_next = TREE_CHAIN (link);
4851
4852 oconstraints[i]
4853 = constraint
4854 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4855 constraint_len = strlen (constraint);
4856 if (constraint_len == 0)
4857 continue;
4858
4859 ok = parse_output_constraint (&constraint, i, 0, 0,
4860 &allows_mem, &allows_reg, &is_inout);
4861 if (!ok)
4862 {
4863 ret = GS_ERROR;
4864 is_inout = false;
4865 }
4866
4867 if (!allows_reg && allows_mem)
4868 mark_addressable (TREE_VALUE (link));
4869
4870 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4871 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4872 fb_lvalue | fb_mayfail);
4873 if (tret == GS_ERROR)
4874 {
4875 error ("invalid lvalue in asm output %d", i);
4876 ret = tret;
4877 }
4878
4879 VEC_safe_push (tree, gc, outputs, link);
4880 TREE_CHAIN (link) = NULL_TREE;
4881
4882 if (is_inout)
4883 {
4884 /* An input/output operand. To give the optimizers more
4885 flexibility, split it into separate input and output
4886 operands. */
4887 tree input;
4888 char buf[10];
4889
4890 /* Turn the in/out constraint into an output constraint. */
4891 char *p = xstrdup (constraint);
4892 p[0] = '=';
4893 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4894
4895 /* And add a matching input constraint. */
4896 if (allows_reg)
4897 {
4898 sprintf (buf, "%d", i);
4899
4900 /* If there are multiple alternatives in the constraint,
4901 handle each of them individually. Those that allow register
4902 will be replaced with operand number, the others will stay
4903 unchanged. */
4904 if (strchr (p, ',') != NULL)
4905 {
4906 size_t len = 0, buflen = strlen (buf);
4907 char *beg, *end, *str, *dst;
4908
4909 for (beg = p + 1;;)
4910 {
4911 end = strchr (beg, ',');
4912 if (end == NULL)
4913 end = strchr (beg, '\0');
4914 if ((size_t) (end - beg) < buflen)
4915 len += buflen + 1;
4916 else
4917 len += end - beg + 1;
4918 if (*end)
4919 beg = end + 1;
4920 else
4921 break;
4922 }
4923
4924 str = (char *) alloca (len);
4925 for (beg = p + 1, dst = str;;)
4926 {
4927 const char *tem;
4928 bool mem_p, reg_p, inout_p;
4929
4930 end = strchr (beg, ',');
4931 if (end)
4932 *end = '\0';
4933 beg[-1] = '=';
4934 tem = beg - 1;
4935 parse_output_constraint (&tem, i, 0, 0,
4936 &mem_p, &reg_p, &inout_p);
4937 if (dst != str)
4938 *dst++ = ',';
4939 if (reg_p)
4940 {
4941 memcpy (dst, buf, buflen);
4942 dst += buflen;
4943 }
4944 else
4945 {
4946 if (end)
4947 len = end - beg;
4948 else
4949 len = strlen (beg);
4950 memcpy (dst, beg, len);
4951 dst += len;
4952 }
4953 if (end)
4954 beg = end + 1;
4955 else
4956 break;
4957 }
4958 *dst = '\0';
4959 input = build_string (dst - str, str);
4960 }
4961 else
4962 input = build_string (strlen (buf), buf);
4963 }
4964 else
4965 input = build_string (constraint_len - 1, constraint + 1);
4966
4967 free (p);
4968
4969 input = build_tree_list (build_tree_list (NULL_TREE, input),
4970 unshare_expr (TREE_VALUE (link)));
4971 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4972 }
4973 }
4974
4975 link_next = NULL_TREE;
4976 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4977 {
4978 link_next = TREE_CHAIN (link);
4979 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4980 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4981 oconstraints, &allows_mem, &allows_reg);
4982
4983 /* If we can't make copies, we can only accept memory. */
4984 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4985 {
4986 if (allows_mem)
4987 allows_reg = 0;
4988 else
4989 {
4990 error ("impossible constraint in %<asm%>");
4991 error ("non-memory input %d must stay in memory", i);
4992 return GS_ERROR;
4993 }
4994 }
4995
4996 /* If the operand is a memory input, it should be an lvalue. */
4997 if (!allows_reg && allows_mem)
4998 {
4999 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5000 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5001 mark_addressable (TREE_VALUE (link));
5002 if (tret == GS_ERROR)
5003 {
5004 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5005 input_location = EXPR_LOCATION (TREE_VALUE (link));
5006 error ("memory input %d is not directly addressable", i);
5007 ret = tret;
5008 }
5009 }
5010 else
5011 {
5012 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5013 is_gimple_asm_val, fb_rvalue);
5014 if (tret == GS_ERROR)
5015 ret = tret;
5016 }
5017
5018 TREE_CHAIN (link) = NULL_TREE;
5019 VEC_safe_push (tree, gc, inputs, link);
5020 }
5021
5022 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5023 VEC_safe_push (tree, gc, clobbers, link);
5024
5025 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5026 VEC_safe_push (tree, gc, labels, link);
5027
5028 /* Do not add ASMs with errors to the gimple IL stream. */
5029 if (ret != GS_ERROR)
5030 {
5031 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5032 inputs, outputs, clobbers, labels);
5033
5034 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5035 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5036
5037 gimplify_seq_add_stmt (pre_p, stmt);
5038 }
5039
5040 return ret;
5041 }
5042
5043 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5044 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5045 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5046 return to this function.
5047
5048 FIXME should we complexify the prequeue handling instead? Or use flags
5049 for all the cleanups and let the optimizer tighten them up? The current
5050 code seems pretty fragile; it will break on a cleanup within any
5051 non-conditional nesting. But any such nesting would be broken, anyway;
5052 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5053 and continues out of it. We can do that at the RTL level, though, so
5054 having an optimizer to tighten up try/finally regions would be a Good
5055 Thing. */
5056
5057 static enum gimplify_status
5058 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5059 {
5060 gimple_stmt_iterator iter;
5061 gimple_seq body_sequence = NULL;
5062
5063 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5064
5065 /* We only care about the number of conditions between the innermost
5066 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5067 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5068 int old_conds = gimplify_ctxp->conditions;
5069 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5070 gimplify_ctxp->conditions = 0;
5071 gimplify_ctxp->conditional_cleanups = NULL;
5072
5073 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5074
5075 gimplify_ctxp->conditions = old_conds;
5076 gimplify_ctxp->conditional_cleanups = old_cleanups;
5077
5078 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5079 {
5080 gimple wce = gsi_stmt (iter);
5081
5082 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5083 {
5084 if (gsi_one_before_end_p (iter))
5085 {
5086 /* Note that gsi_insert_seq_before and gsi_remove do not
5087 scan operands, unlike some other sequence mutators. */
5088 gsi_insert_seq_before_without_update (&iter,
5089 gimple_wce_cleanup (wce),
5090 GSI_SAME_STMT);
5091 gsi_remove (&iter, true);
5092 break;
5093 }
5094 else
5095 {
5096 gimple gtry;
5097 gimple_seq seq;
5098 enum gimple_try_flags kind;
5099
5100 if (gimple_wce_cleanup_eh_only (wce))
5101 kind = GIMPLE_TRY_CATCH;
5102 else
5103 kind = GIMPLE_TRY_FINALLY;
5104 seq = gsi_split_seq_after (iter);
5105
5106 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5107 /* Do not use gsi_replace here, as it may scan operands.
5108 We want to do a simple structural modification only. */
5109 *gsi_stmt_ptr (&iter) = gtry;
5110 iter = gsi_start (seq);
5111 }
5112 }
5113 else
5114 gsi_next (&iter);
5115 }
5116
5117 gimplify_seq_add_seq (pre_p, body_sequence);
5118 if (temp)
5119 {
5120 *expr_p = temp;
5121 return GS_OK;
5122 }
5123 else
5124 {
5125 *expr_p = NULL;
5126 return GS_ALL_DONE;
5127 }
5128 }
5129
5130 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5131 is the cleanup action required. EH_ONLY is true if the cleanup should
5132 only be executed if an exception is thrown, not on normal exit. */
5133
5134 static void
5135 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5136 {
5137 gimple wce;
5138 gimple_seq cleanup_stmts = NULL;
5139
5140 /* Errors can result in improperly nested cleanups. Which results in
5141 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5142 if (errorcount || sorrycount)
5143 return;
5144
5145 if (gimple_conditional_context ())
5146 {
5147 /* If we're in a conditional context, this is more complex. We only
5148 want to run the cleanup if we actually ran the initialization that
5149 necessitates it, but we want to run it after the end of the
5150 conditional context. So we wrap the try/finally around the
5151 condition and use a flag to determine whether or not to actually
5152 run the destructor. Thus
5153
5154 test ? f(A()) : 0
5155
5156 becomes (approximately)
5157
5158 flag = 0;
5159 try {
5160 if (test) { A::A(temp); flag = 1; val = f(temp); }
5161 else { val = 0; }
5162 } finally {
5163 if (flag) A::~A(temp);
5164 }
5165 val
5166 */
5167 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5168 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5169 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5170
5171 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5172 gimplify_stmt (&cleanup, &cleanup_stmts);
5173 wce = gimple_build_wce (cleanup_stmts);
5174
5175 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5176 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5177 gimplify_seq_add_stmt (pre_p, ftrue);
5178
5179 /* Because of this manipulation, and the EH edges that jump
5180 threading cannot redirect, the temporary (VAR) will appear
5181 to be used uninitialized. Don't warn. */
5182 TREE_NO_WARNING (var) = 1;
5183 }
5184 else
5185 {
5186 gimplify_stmt (&cleanup, &cleanup_stmts);
5187 wce = gimple_build_wce (cleanup_stmts);
5188 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5189 gimplify_seq_add_stmt (pre_p, wce);
5190 }
5191 }
5192
5193 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5194
5195 static enum gimplify_status
5196 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5197 {
5198 tree targ = *expr_p;
5199 tree temp = TARGET_EXPR_SLOT (targ);
5200 tree init = TARGET_EXPR_INITIAL (targ);
5201 enum gimplify_status ret;
5202
5203 if (init)
5204 {
5205 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5206 to the temps list. Handle also variable length TARGET_EXPRs. */
5207 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5208 {
5209 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5210 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5211 gimplify_vla_decl (temp, pre_p);
5212 }
5213 else
5214 gimple_add_tmp_var (temp);
5215
5216 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5217 expression is supposed to initialize the slot. */
5218 if (VOID_TYPE_P (TREE_TYPE (init)))
5219 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5220 else
5221 {
5222 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5223 init = init_expr;
5224 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5225 init = NULL;
5226 ggc_free (init_expr);
5227 }
5228 if (ret == GS_ERROR)
5229 {
5230 /* PR c++/28266 Make sure this is expanded only once. */
5231 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5232 return GS_ERROR;
5233 }
5234 if (init)
5235 gimplify_and_add (init, pre_p);
5236
5237 /* If needed, push the cleanup for the temp. */
5238 if (TARGET_EXPR_CLEANUP (targ))
5239 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5240 CLEANUP_EH_ONLY (targ), pre_p);
5241
5242 /* Only expand this once. */
5243 TREE_OPERAND (targ, 3) = init;
5244 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5245 }
5246 else
5247 /* We should have expanded this before. */
5248 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5249
5250 *expr_p = temp;
5251 return GS_OK;
5252 }
5253
5254 /* Gimplification of expression trees. */
5255
5256 /* Gimplify an expression which appears at statement context. The
5257 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5258 NULL, a new sequence is allocated.
5259
5260 Return true if we actually added a statement to the queue. */
5261
5262 bool
5263 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5264 {
5265 gimple_seq_node last;
5266
5267 if (!*seq_p)
5268 *seq_p = gimple_seq_alloc ();
5269
5270 last = gimple_seq_last (*seq_p);
5271 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5272 return last != gimple_seq_last (*seq_p);
5273 }
5274
5275
5276 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5277 to CTX. If entries already exist, force them to be some flavor of private.
5278 If there is no enclosing parallel, do nothing. */
5279
5280 void
5281 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5282 {
5283 splay_tree_node n;
5284
5285 if (decl == NULL || !DECL_P (decl))
5286 return;
5287
5288 do
5289 {
5290 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5291 if (n != NULL)
5292 {
5293 if (n->value & GOVD_SHARED)
5294 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5295 else
5296 return;
5297 }
5298 else if (ctx->region_type != ORT_WORKSHARE)
5299 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5300
5301 ctx = ctx->outer_context;
5302 }
5303 while (ctx);
5304 }
5305
5306 /* Similarly for each of the type sizes of TYPE. */
5307
5308 static void
5309 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5310 {
5311 if (type == NULL || type == error_mark_node)
5312 return;
5313 type = TYPE_MAIN_VARIANT (type);
5314
5315 if (pointer_set_insert (ctx->privatized_types, type))
5316 return;
5317
5318 switch (TREE_CODE (type))
5319 {
5320 case INTEGER_TYPE:
5321 case ENUMERAL_TYPE:
5322 case BOOLEAN_TYPE:
5323 case REAL_TYPE:
5324 case FIXED_POINT_TYPE:
5325 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5326 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5327 break;
5328
5329 case ARRAY_TYPE:
5330 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5331 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5332 break;
5333
5334 case RECORD_TYPE:
5335 case UNION_TYPE:
5336 case QUAL_UNION_TYPE:
5337 {
5338 tree field;
5339 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5340 if (TREE_CODE (field) == FIELD_DECL)
5341 {
5342 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5343 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5344 }
5345 }
5346 break;
5347
5348 case POINTER_TYPE:
5349 case REFERENCE_TYPE:
5350 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5351 break;
5352
5353 default:
5354 break;
5355 }
5356
5357 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5358 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5359 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5360 }
5361
5362 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5363
5364 static void
5365 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5366 {
5367 splay_tree_node n;
5368 unsigned int nflags;
5369 tree t;
5370
5371 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5372 return;
5373
5374 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5375 there are constructors involved somewhere. */
5376 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5377 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5378 flags |= GOVD_SEEN;
5379
5380 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5381 if (n != NULL)
5382 {
5383 /* We shouldn't be re-adding the decl with the same data
5384 sharing class. */
5385 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5386 /* The only combination of data sharing classes we should see is
5387 FIRSTPRIVATE and LASTPRIVATE. */
5388 nflags = n->value | flags;
5389 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5390 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5391 n->value = nflags;
5392 return;
5393 }
5394
5395 /* When adding a variable-sized variable, we have to handle all sorts
5396 of additional bits of data: the pointer replacement variable, and
5397 the parameters of the type. */
5398 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5399 {
5400 /* Add the pointer replacement variable as PRIVATE if the variable
5401 replacement is private, else FIRSTPRIVATE since we'll need the
5402 address of the original variable either for SHARED, or for the
5403 copy into or out of the context. */
5404 if (!(flags & GOVD_LOCAL))
5405 {
5406 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5407 nflags |= flags & GOVD_SEEN;
5408 t = DECL_VALUE_EXPR (decl);
5409 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5410 t = TREE_OPERAND (t, 0);
5411 gcc_assert (DECL_P (t));
5412 omp_add_variable (ctx, t, nflags);
5413 }
5414
5415 /* Add all of the variable and type parameters (which should have
5416 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5417 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5418 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5419 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5420
5421 /* The variable-sized variable itself is never SHARED, only some form
5422 of PRIVATE. The sharing would take place via the pointer variable
5423 which we remapped above. */
5424 if (flags & GOVD_SHARED)
5425 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5426 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5427
5428 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5429 alloca statement we generate for the variable, so make sure it
5430 is available. This isn't automatically needed for the SHARED
5431 case, since we won't be allocating local storage then.
5432 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5433 in this case omp_notice_variable will be called later
5434 on when it is gimplified. */
5435 else if (! (flags & GOVD_LOCAL))
5436 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5437 }
5438 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5439 {
5440 gcc_assert ((flags & GOVD_LOCAL) == 0);
5441 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5442
5443 /* Similar to the direct variable sized case above, we'll need the
5444 size of references being privatized. */
5445 if ((flags & GOVD_SHARED) == 0)
5446 {
5447 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5448 if (TREE_CODE (t) != INTEGER_CST)
5449 omp_notice_variable (ctx, t, true);
5450 }
5451 }
5452
5453 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5454 }
5455
5456 /* Record the fact that DECL was used within the OpenMP context CTX.
5457 IN_CODE is true when real code uses DECL, and false when we should
5458 merely emit default(none) errors. Return true if DECL is going to
5459 be remapped and thus DECL shouldn't be gimplified into its
5460 DECL_VALUE_EXPR (if any). */
5461
5462 static bool
5463 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5464 {
5465 splay_tree_node n;
5466 unsigned flags = in_code ? GOVD_SEEN : 0;
5467 bool ret = false, shared;
5468
5469 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5470 return false;
5471
5472 /* Threadprivate variables are predetermined. */
5473 if (is_global_var (decl))
5474 {
5475 if (DECL_THREAD_LOCAL_P (decl))
5476 return false;
5477
5478 if (DECL_HAS_VALUE_EXPR_P (decl))
5479 {
5480 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5481
5482 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5483 return false;
5484 }
5485 }
5486
5487 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5488 if (n == NULL)
5489 {
5490 enum omp_clause_default_kind default_kind, kind;
5491 struct gimplify_omp_ctx *octx;
5492
5493 if (ctx->region_type == ORT_WORKSHARE)
5494 goto do_outer;
5495
5496 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5497 remapped firstprivate instead of shared. To some extent this is
5498 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5499 default_kind = ctx->default_kind;
5500 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5501 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5502 default_kind = kind;
5503
5504 switch (default_kind)
5505 {
5506 case OMP_CLAUSE_DEFAULT_NONE:
5507 error ("%qE not specified in enclosing parallel",
5508 DECL_NAME (decl));
5509 error_at (ctx->location, "enclosing parallel");
5510 /* FALLTHRU */
5511 case OMP_CLAUSE_DEFAULT_SHARED:
5512 flags |= GOVD_SHARED;
5513 break;
5514 case OMP_CLAUSE_DEFAULT_PRIVATE:
5515 flags |= GOVD_PRIVATE;
5516 break;
5517 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5518 flags |= GOVD_FIRSTPRIVATE;
5519 break;
5520 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5521 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5522 gcc_assert (ctx->region_type == ORT_TASK);
5523 if (ctx->outer_context)
5524 omp_notice_variable (ctx->outer_context, decl, in_code);
5525 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5526 {
5527 splay_tree_node n2;
5528
5529 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5530 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5531 {
5532 flags |= GOVD_FIRSTPRIVATE;
5533 break;
5534 }
5535 if ((octx->region_type & ORT_PARALLEL) != 0)
5536 break;
5537 }
5538 if (flags & GOVD_FIRSTPRIVATE)
5539 break;
5540 if (octx == NULL
5541 && (TREE_CODE (decl) == PARM_DECL
5542 || (!is_global_var (decl)
5543 && DECL_CONTEXT (decl) == current_function_decl)))
5544 {
5545 flags |= GOVD_FIRSTPRIVATE;
5546 break;
5547 }
5548 flags |= GOVD_SHARED;
5549 break;
5550 default:
5551 gcc_unreachable ();
5552 }
5553
5554 if ((flags & GOVD_PRIVATE)
5555 && lang_hooks.decls.omp_private_outer_ref (decl))
5556 flags |= GOVD_PRIVATE_OUTER_REF;
5557
5558 omp_add_variable (ctx, decl, flags);
5559
5560 shared = (flags & GOVD_SHARED) != 0;
5561 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5562 goto do_outer;
5563 }
5564
5565 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5566 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5567 && DECL_SIZE (decl)
5568 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5569 {
5570 splay_tree_node n2;
5571 tree t = DECL_VALUE_EXPR (decl);
5572 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5573 t = TREE_OPERAND (t, 0);
5574 gcc_assert (DECL_P (t));
5575 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5576 n2->value |= GOVD_SEEN;
5577 }
5578
5579 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5580 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5581
5582 /* If nothing changed, there's nothing left to do. */
5583 if ((n->value & flags) == flags)
5584 return ret;
5585 flags |= n->value;
5586 n->value = flags;
5587
5588 do_outer:
5589 /* If the variable is private in the current context, then we don't
5590 need to propagate anything to an outer context. */
5591 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5592 return ret;
5593 if (ctx->outer_context
5594 && omp_notice_variable (ctx->outer_context, decl, in_code))
5595 return true;
5596 return ret;
5597 }
5598
5599 /* Verify that DECL is private within CTX. If there's specific information
5600 to the contrary in the innermost scope, generate an error. */
5601
5602 static bool
5603 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5604 {
5605 splay_tree_node n;
5606
5607 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5608 if (n != NULL)
5609 {
5610 if (n->value & GOVD_SHARED)
5611 {
5612 if (ctx == gimplify_omp_ctxp)
5613 {
5614 error ("iteration variable %qE should be private",
5615 DECL_NAME (decl));
5616 n->value = GOVD_PRIVATE;
5617 return true;
5618 }
5619 else
5620 return false;
5621 }
5622 else if ((n->value & GOVD_EXPLICIT) != 0
5623 && (ctx == gimplify_omp_ctxp
5624 || (ctx->region_type == ORT_COMBINED_PARALLEL
5625 && gimplify_omp_ctxp->outer_context == ctx)))
5626 {
5627 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5628 error ("iteration variable %qE should not be firstprivate",
5629 DECL_NAME (decl));
5630 else if ((n->value & GOVD_REDUCTION) != 0)
5631 error ("iteration variable %qE should not be reduction",
5632 DECL_NAME (decl));
5633 }
5634 return (ctx == gimplify_omp_ctxp
5635 || (ctx->region_type == ORT_COMBINED_PARALLEL
5636 && gimplify_omp_ctxp->outer_context == ctx));
5637 }
5638
5639 if (ctx->region_type != ORT_WORKSHARE)
5640 return false;
5641 else if (ctx->outer_context)
5642 return omp_is_private (ctx->outer_context, decl);
5643 return false;
5644 }
5645
5646 /* Return true if DECL is private within a parallel region
5647 that binds to the current construct's context or in parallel
5648 region's REDUCTION clause. */
5649
5650 static bool
5651 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5652 {
5653 splay_tree_node n;
5654
5655 do
5656 {
5657 ctx = ctx->outer_context;
5658 if (ctx == NULL)
5659 return !(is_global_var (decl)
5660 /* References might be private, but might be shared too. */
5661 || lang_hooks.decls.omp_privatize_by_reference (decl));
5662
5663 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5664 if (n != NULL)
5665 return (n->value & GOVD_SHARED) == 0;
5666 }
5667 while (ctx->region_type == ORT_WORKSHARE);
5668 return false;
5669 }
5670
5671 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5672 and previous omp contexts. */
5673
5674 static void
5675 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5676 enum omp_region_type region_type)
5677 {
5678 struct gimplify_omp_ctx *ctx, *outer_ctx;
5679 struct gimplify_ctx gctx;
5680 tree c;
5681
5682 ctx = new_omp_context (region_type);
5683 outer_ctx = ctx->outer_context;
5684
5685 while ((c = *list_p) != NULL)
5686 {
5687 bool remove = false;
5688 bool notice_outer = true;
5689 const char *check_non_private = NULL;
5690 unsigned int flags;
5691 tree decl;
5692
5693 switch (OMP_CLAUSE_CODE (c))
5694 {
5695 case OMP_CLAUSE_PRIVATE:
5696 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5697 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5698 {
5699 flags |= GOVD_PRIVATE_OUTER_REF;
5700 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5701 }
5702 else
5703 notice_outer = false;
5704 goto do_add;
5705 case OMP_CLAUSE_SHARED:
5706 flags = GOVD_SHARED | GOVD_EXPLICIT;
5707 goto do_add;
5708 case OMP_CLAUSE_FIRSTPRIVATE:
5709 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5710 check_non_private = "firstprivate";
5711 goto do_add;
5712 case OMP_CLAUSE_LASTPRIVATE:
5713 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5714 check_non_private = "lastprivate";
5715 goto do_add;
5716 case OMP_CLAUSE_REDUCTION:
5717 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5718 check_non_private = "reduction";
5719 goto do_add;
5720
5721 do_add:
5722 decl = OMP_CLAUSE_DECL (c);
5723 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5724 {
5725 remove = true;
5726 break;
5727 }
5728 omp_add_variable (ctx, decl, flags);
5729 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5730 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5731 {
5732 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5733 GOVD_LOCAL | GOVD_SEEN);
5734 gimplify_omp_ctxp = ctx;
5735 push_gimplify_context (&gctx);
5736
5737 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5738 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5739
5740 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5741 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5742 pop_gimplify_context
5743 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5744 push_gimplify_context (&gctx);
5745 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5746 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5747 pop_gimplify_context
5748 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5749 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5750 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5751
5752 gimplify_omp_ctxp = outer_ctx;
5753 }
5754 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5755 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5756 {
5757 gimplify_omp_ctxp = ctx;
5758 push_gimplify_context (&gctx);
5759 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5760 {
5761 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5762 NULL, NULL);
5763 TREE_SIDE_EFFECTS (bind) = 1;
5764 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5765 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5766 }
5767 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5768 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5769 pop_gimplify_context
5770 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5771 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5772
5773 gimplify_omp_ctxp = outer_ctx;
5774 }
5775 if (notice_outer)
5776 goto do_notice;
5777 break;
5778
5779 case OMP_CLAUSE_COPYIN:
5780 case OMP_CLAUSE_COPYPRIVATE:
5781 decl = OMP_CLAUSE_DECL (c);
5782 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5783 {
5784 remove = true;
5785 break;
5786 }
5787 do_notice:
5788 if (outer_ctx)
5789 omp_notice_variable (outer_ctx, decl, true);
5790 if (check_non_private
5791 && region_type == ORT_WORKSHARE
5792 && omp_check_private (ctx, decl))
5793 {
5794 error ("%s variable %qE is private in outer context",
5795 check_non_private, DECL_NAME (decl));
5796 remove = true;
5797 }
5798 break;
5799
5800 case OMP_CLAUSE_IF:
5801 OMP_CLAUSE_OPERAND (c, 0)
5802 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5803 /* Fall through. */
5804
5805 case OMP_CLAUSE_SCHEDULE:
5806 case OMP_CLAUSE_NUM_THREADS:
5807 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5808 is_gimple_val, fb_rvalue) == GS_ERROR)
5809 remove = true;
5810 break;
5811
5812 case OMP_CLAUSE_NOWAIT:
5813 case OMP_CLAUSE_ORDERED:
5814 case OMP_CLAUSE_UNTIED:
5815 case OMP_CLAUSE_COLLAPSE:
5816 break;
5817
5818 case OMP_CLAUSE_DEFAULT:
5819 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5820 break;
5821
5822 default:
5823 gcc_unreachable ();
5824 }
5825
5826 if (remove)
5827 *list_p = OMP_CLAUSE_CHAIN (c);
5828 else
5829 list_p = &OMP_CLAUSE_CHAIN (c);
5830 }
5831
5832 gimplify_omp_ctxp = ctx;
5833 }
5834
5835 /* For all variables that were not actually used within the context,
5836 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5837
5838 static int
5839 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5840 {
5841 tree *list_p = (tree *) data;
5842 tree decl = (tree) n->key;
5843 unsigned flags = n->value;
5844 enum omp_clause_code code;
5845 tree clause;
5846 bool private_debug;
5847
5848 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5849 return 0;
5850 if ((flags & GOVD_SEEN) == 0)
5851 return 0;
5852 if (flags & GOVD_DEBUG_PRIVATE)
5853 {
5854 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5855 private_debug = true;
5856 }
5857 else
5858 private_debug
5859 = lang_hooks.decls.omp_private_debug_clause (decl,
5860 !!(flags & GOVD_SHARED));
5861 if (private_debug)
5862 code = OMP_CLAUSE_PRIVATE;
5863 else if (flags & GOVD_SHARED)
5864 {
5865 if (is_global_var (decl))
5866 {
5867 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5868 while (ctx != NULL)
5869 {
5870 splay_tree_node on
5871 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5872 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5873 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5874 break;
5875 ctx = ctx->outer_context;
5876 }
5877 if (ctx == NULL)
5878 return 0;
5879 }
5880 code = OMP_CLAUSE_SHARED;
5881 }
5882 else if (flags & GOVD_PRIVATE)
5883 code = OMP_CLAUSE_PRIVATE;
5884 else if (flags & GOVD_FIRSTPRIVATE)
5885 code = OMP_CLAUSE_FIRSTPRIVATE;
5886 else
5887 gcc_unreachable ();
5888
5889 clause = build_omp_clause (input_location, code);
5890 OMP_CLAUSE_DECL (clause) = decl;
5891 OMP_CLAUSE_CHAIN (clause) = *list_p;
5892 if (private_debug)
5893 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5894 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5895 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5896 *list_p = clause;
5897 lang_hooks.decls.omp_finish_clause (clause);
5898
5899 return 0;
5900 }
5901
5902 static void
5903 gimplify_adjust_omp_clauses (tree *list_p)
5904 {
5905 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5906 tree c, decl;
5907
5908 while ((c = *list_p) != NULL)
5909 {
5910 splay_tree_node n;
5911 bool remove = false;
5912
5913 switch (OMP_CLAUSE_CODE (c))
5914 {
5915 case OMP_CLAUSE_PRIVATE:
5916 case OMP_CLAUSE_SHARED:
5917 case OMP_CLAUSE_FIRSTPRIVATE:
5918 decl = OMP_CLAUSE_DECL (c);
5919 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5920 remove = !(n->value & GOVD_SEEN);
5921 if (! remove)
5922 {
5923 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5924 if ((n->value & GOVD_DEBUG_PRIVATE)
5925 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5926 {
5927 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5928 || ((n->value & GOVD_DATA_SHARE_CLASS)
5929 == GOVD_PRIVATE));
5930 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5931 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5932 }
5933 }
5934 break;
5935
5936 case OMP_CLAUSE_LASTPRIVATE:
5937 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5938 accurately reflect the presence of a FIRSTPRIVATE clause. */
5939 decl = OMP_CLAUSE_DECL (c);
5940 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5941 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5942 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5943 break;
5944
5945 case OMP_CLAUSE_REDUCTION:
5946 case OMP_CLAUSE_COPYIN:
5947 case OMP_CLAUSE_COPYPRIVATE:
5948 case OMP_CLAUSE_IF:
5949 case OMP_CLAUSE_NUM_THREADS:
5950 case OMP_CLAUSE_SCHEDULE:
5951 case OMP_CLAUSE_NOWAIT:
5952 case OMP_CLAUSE_ORDERED:
5953 case OMP_CLAUSE_DEFAULT:
5954 case OMP_CLAUSE_UNTIED:
5955 case OMP_CLAUSE_COLLAPSE:
5956 break;
5957
5958 default:
5959 gcc_unreachable ();
5960 }
5961
5962 if (remove)
5963 *list_p = OMP_CLAUSE_CHAIN (c);
5964 else
5965 list_p = &OMP_CLAUSE_CHAIN (c);
5966 }
5967
5968 /* Add in any implicit data sharing. */
5969 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5970
5971 gimplify_omp_ctxp = ctx->outer_context;
5972 delete_omp_context (ctx);
5973 }
5974
5975 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5976 gimplification of the body, as well as scanning the body for used
5977 variables. We need to do this scan now, because variable-sized
5978 decls will be decomposed during gimplification. */
5979
5980 static void
5981 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5982 {
5983 tree expr = *expr_p;
5984 gimple g;
5985 gimple_seq body = NULL;
5986 struct gimplify_ctx gctx;
5987
5988 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5989 OMP_PARALLEL_COMBINED (expr)
5990 ? ORT_COMBINED_PARALLEL
5991 : ORT_PARALLEL);
5992
5993 push_gimplify_context (&gctx);
5994
5995 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5996 if (gimple_code (g) == GIMPLE_BIND)
5997 pop_gimplify_context (g);
5998 else
5999 pop_gimplify_context (NULL);
6000
6001 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6002
6003 g = gimple_build_omp_parallel (body,
6004 OMP_PARALLEL_CLAUSES (expr),
6005 NULL_TREE, NULL_TREE);
6006 if (OMP_PARALLEL_COMBINED (expr))
6007 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6008 gimplify_seq_add_stmt (pre_p, g);
6009 *expr_p = NULL_TREE;
6010 }
6011
6012 /* Gimplify the contents of an OMP_TASK statement. This involves
6013 gimplification of the body, as well as scanning the body for used
6014 variables. We need to do this scan now, because variable-sized
6015 decls will be decomposed during gimplification. */
6016
6017 static void
6018 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6019 {
6020 tree expr = *expr_p;
6021 gimple g;
6022 gimple_seq body = NULL;
6023 struct gimplify_ctx gctx;
6024
6025 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
6026
6027 push_gimplify_context (&gctx);
6028
6029 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6030 if (gimple_code (g) == GIMPLE_BIND)
6031 pop_gimplify_context (g);
6032 else
6033 pop_gimplify_context (NULL);
6034
6035 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6036
6037 g = gimple_build_omp_task (body,
6038 OMP_TASK_CLAUSES (expr),
6039 NULL_TREE, NULL_TREE,
6040 NULL_TREE, NULL_TREE, NULL_TREE);
6041 gimplify_seq_add_stmt (pre_p, g);
6042 *expr_p = NULL_TREE;
6043 }
6044
6045 /* Gimplify the gross structure of an OMP_FOR statement. */
6046
6047 static enum gimplify_status
6048 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6049 {
6050 tree for_stmt, decl, var, t;
6051 enum gimplify_status ret = GS_ALL_DONE;
6052 enum gimplify_status tret;
6053 gimple gfor;
6054 gimple_seq for_body, for_pre_body;
6055 int i;
6056
6057 for_stmt = *expr_p;
6058
6059 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6060 ORT_WORKSHARE);
6061
6062 /* Handle OMP_FOR_INIT. */
6063 for_pre_body = NULL;
6064 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6065 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6066
6067 for_body = gimple_seq_alloc ();
6068 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6069 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6070 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6071 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6072 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6073 {
6074 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6075 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6076 decl = TREE_OPERAND (t, 0);
6077 gcc_assert (DECL_P (decl));
6078 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6079 || POINTER_TYPE_P (TREE_TYPE (decl)));
6080
6081 /* Make sure the iteration variable is private. */
6082 if (omp_is_private (gimplify_omp_ctxp, decl))
6083 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6084 else
6085 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6086
6087 /* If DECL is not a gimple register, create a temporary variable to act
6088 as an iteration counter. This is valid, since DECL cannot be
6089 modified in the body of the loop. */
6090 if (!is_gimple_reg (decl))
6091 {
6092 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6093 TREE_OPERAND (t, 0) = var;
6094
6095 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6096
6097 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6098 }
6099 else
6100 var = decl;
6101
6102 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6103 is_gimple_val, fb_rvalue);
6104 ret = MIN (ret, tret);
6105 if (ret == GS_ERROR)
6106 return ret;
6107
6108 /* Handle OMP_FOR_COND. */
6109 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6110 gcc_assert (COMPARISON_CLASS_P (t));
6111 gcc_assert (TREE_OPERAND (t, 0) == decl);
6112
6113 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6114 is_gimple_val, fb_rvalue);
6115 ret = MIN (ret, tret);
6116
6117 /* Handle OMP_FOR_INCR. */
6118 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6119 switch (TREE_CODE (t))
6120 {
6121 case PREINCREMENT_EXPR:
6122 case POSTINCREMENT_EXPR:
6123 t = build_int_cst (TREE_TYPE (decl), 1);
6124 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6125 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6126 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6127 break;
6128
6129 case PREDECREMENT_EXPR:
6130 case POSTDECREMENT_EXPR:
6131 t = build_int_cst (TREE_TYPE (decl), -1);
6132 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6133 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6134 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6135 break;
6136
6137 case MODIFY_EXPR:
6138 gcc_assert (TREE_OPERAND (t, 0) == decl);
6139 TREE_OPERAND (t, 0) = var;
6140
6141 t = TREE_OPERAND (t, 1);
6142 switch (TREE_CODE (t))
6143 {
6144 case PLUS_EXPR:
6145 if (TREE_OPERAND (t, 1) == decl)
6146 {
6147 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6148 TREE_OPERAND (t, 0) = var;
6149 break;
6150 }
6151
6152 /* Fallthru. */
6153 case MINUS_EXPR:
6154 case POINTER_PLUS_EXPR:
6155 gcc_assert (TREE_OPERAND (t, 0) == decl);
6156 TREE_OPERAND (t, 0) = var;
6157 break;
6158 default:
6159 gcc_unreachable ();
6160 }
6161
6162 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6163 is_gimple_val, fb_rvalue);
6164 ret = MIN (ret, tret);
6165 break;
6166
6167 default:
6168 gcc_unreachable ();
6169 }
6170
6171 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6172 {
6173 tree c;
6174 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6175 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6176 && OMP_CLAUSE_DECL (c) == decl
6177 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6178 {
6179 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6180 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6181 gcc_assert (TREE_OPERAND (t, 0) == var);
6182 t = TREE_OPERAND (t, 1);
6183 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6184 || TREE_CODE (t) == MINUS_EXPR
6185 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6186 gcc_assert (TREE_OPERAND (t, 0) == var);
6187 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6188 TREE_OPERAND (t, 1));
6189 gimplify_assign (decl, t,
6190 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6191 }
6192 }
6193 }
6194
6195 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6196
6197 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6198
6199 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6200 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6201 for_pre_body);
6202
6203 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6204 {
6205 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6206 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6207 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6208 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6209 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6210 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6211 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6212 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6213 }
6214
6215 gimplify_seq_add_stmt (pre_p, gfor);
6216 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6217 }
6218
6219 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6220 In particular, OMP_SECTIONS and OMP_SINGLE. */
6221
6222 static void
6223 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6224 {
6225 tree expr = *expr_p;
6226 gimple stmt;
6227 gimple_seq body = NULL;
6228
6229 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6230 gimplify_and_add (OMP_BODY (expr), &body);
6231 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6232
6233 if (TREE_CODE (expr) == OMP_SECTIONS)
6234 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6235 else if (TREE_CODE (expr) == OMP_SINGLE)
6236 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6237 else
6238 gcc_unreachable ();
6239
6240 gimplify_seq_add_stmt (pre_p, stmt);
6241 }
6242
6243 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6244 stabilized the lhs of the atomic operation as *ADDR. Return true if
6245 EXPR is this stabilized form. */
6246
6247 static bool
6248 goa_lhs_expr_p (tree expr, tree addr)
6249 {
6250 /* Also include casts to other type variants. The C front end is fond
6251 of adding these for e.g. volatile variables. This is like
6252 STRIP_TYPE_NOPS but includes the main variant lookup. */
6253 STRIP_USELESS_TYPE_CONVERSION (expr);
6254
6255 if (TREE_CODE (expr) == INDIRECT_REF)
6256 {
6257 expr = TREE_OPERAND (expr, 0);
6258 while (expr != addr
6259 && (CONVERT_EXPR_P (expr)
6260 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6261 && TREE_CODE (expr) == TREE_CODE (addr)
6262 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6263 {
6264 expr = TREE_OPERAND (expr, 0);
6265 addr = TREE_OPERAND (addr, 0);
6266 }
6267 if (expr == addr)
6268 return true;
6269 return (TREE_CODE (addr) == ADDR_EXPR
6270 && TREE_CODE (expr) == ADDR_EXPR
6271 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6272 }
6273 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6274 return true;
6275 return false;
6276 }
6277
6278 /* Walk *EXPR_P and replace
6279 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6280 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6281 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6282
6283 static int
6284 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6285 tree lhs_var)
6286 {
6287 tree expr = *expr_p;
6288 int saw_lhs;
6289
6290 if (goa_lhs_expr_p (expr, lhs_addr))
6291 {
6292 *expr_p = lhs_var;
6293 return 1;
6294 }
6295 if (is_gimple_val (expr))
6296 return 0;
6297
6298 saw_lhs = 0;
6299 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6300 {
6301 case tcc_binary:
6302 case tcc_comparison:
6303 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6304 lhs_var);
6305 case tcc_unary:
6306 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6307 lhs_var);
6308 break;
6309 case tcc_expression:
6310 switch (TREE_CODE (expr))
6311 {
6312 case TRUTH_ANDIF_EXPR:
6313 case TRUTH_ORIF_EXPR:
6314 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6315 lhs_addr, lhs_var);
6316 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6317 lhs_addr, lhs_var);
6318 break;
6319 default:
6320 break;
6321 }
6322 break;
6323 default:
6324 break;
6325 }
6326
6327 if (saw_lhs == 0)
6328 {
6329 enum gimplify_status gs;
6330 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6331 if (gs != GS_ALL_DONE)
6332 saw_lhs = -1;
6333 }
6334
6335 return saw_lhs;
6336 }
6337
6338
6339 /* Gimplify an OMP_ATOMIC statement. */
6340
6341 static enum gimplify_status
6342 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6343 {
6344 tree addr = TREE_OPERAND (*expr_p, 0);
6345 tree rhs = TREE_OPERAND (*expr_p, 1);
6346 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6347 tree tmp_load;
6348
6349 tmp_load = create_tmp_reg (type, NULL);
6350 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6351 return GS_ERROR;
6352
6353 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6354 != GS_ALL_DONE)
6355 return GS_ERROR;
6356
6357 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6358 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6359 != GS_ALL_DONE)
6360 return GS_ERROR;
6361 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6362 *expr_p = NULL;
6363
6364 return GS_ALL_DONE;
6365 }
6366
6367
6368 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6369 expression produces a value to be used as an operand inside a GIMPLE
6370 statement, the value will be stored back in *EXPR_P. This value will
6371 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6372 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6373 emitted in PRE_P and POST_P.
6374
6375 Additionally, this process may overwrite parts of the input
6376 expression during gimplification. Ideally, it should be
6377 possible to do non-destructive gimplification.
6378
6379 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6380 the expression needs to evaluate to a value to be used as
6381 an operand in a GIMPLE statement, this value will be stored in
6382 *EXPR_P on exit. This happens when the caller specifies one
6383 of fb_lvalue or fb_rvalue fallback flags.
6384
6385 PRE_P will contain the sequence of GIMPLE statements corresponding
6386 to the evaluation of EXPR and all the side-effects that must
6387 be executed before the main expression. On exit, the last
6388 statement of PRE_P is the core statement being gimplified. For
6389 instance, when gimplifying 'if (++a)' the last statement in
6390 PRE_P will be 'if (t.1)' where t.1 is the result of
6391 pre-incrementing 'a'.
6392
6393 POST_P will contain the sequence of GIMPLE statements corresponding
6394 to the evaluation of all the side-effects that must be executed
6395 after the main expression. If this is NULL, the post
6396 side-effects are stored at the end of PRE_P.
6397
6398 The reason why the output is split in two is to handle post
6399 side-effects explicitly. In some cases, an expression may have
6400 inner and outer post side-effects which need to be emitted in
6401 an order different from the one given by the recursive
6402 traversal. For instance, for the expression (*p--)++ the post
6403 side-effects of '--' must actually occur *after* the post
6404 side-effects of '++'. However, gimplification will first visit
6405 the inner expression, so if a separate POST sequence was not
6406 used, the resulting sequence would be:
6407
6408 1 t.1 = *p
6409 2 p = p - 1
6410 3 t.2 = t.1 + 1
6411 4 *p = t.2
6412
6413 However, the post-decrement operation in line #2 must not be
6414 evaluated until after the store to *p at line #4, so the
6415 correct sequence should be:
6416
6417 1 t.1 = *p
6418 2 t.2 = t.1 + 1
6419 3 *p = t.2
6420 4 p = p - 1
6421
6422 So, by specifying a separate post queue, it is possible
6423 to emit the post side-effects in the correct order.
6424 If POST_P is NULL, an internal queue will be used. Before
6425 returning to the caller, the sequence POST_P is appended to
6426 the main output sequence PRE_P.
6427
6428 GIMPLE_TEST_F points to a function that takes a tree T and
6429 returns nonzero if T is in the GIMPLE form requested by the
6430 caller. The GIMPLE predicates are in tree-gimple.c.
6431
6432 FALLBACK tells the function what sort of a temporary we want if
6433 gimplification cannot produce an expression that complies with
6434 GIMPLE_TEST_F.
6435
6436 fb_none means that no temporary should be generated
6437 fb_rvalue means that an rvalue is OK to generate
6438 fb_lvalue means that an lvalue is OK to generate
6439 fb_either means that either is OK, but an lvalue is preferable.
6440 fb_mayfail means that gimplification may fail (in which case
6441 GS_ERROR will be returned)
6442
6443 The return value is either GS_ERROR or GS_ALL_DONE, since this
6444 function iterates until EXPR is completely gimplified or an error
6445 occurs. */
6446
6447 enum gimplify_status
6448 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6449 bool (*gimple_test_f) (tree), fallback_t fallback)
6450 {
6451 tree tmp;
6452 gimple_seq internal_pre = NULL;
6453 gimple_seq internal_post = NULL;
6454 tree save_expr;
6455 bool is_statement;
6456 location_t saved_location;
6457 enum gimplify_status ret;
6458 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6459
6460 save_expr = *expr_p;
6461 if (save_expr == NULL_TREE)
6462 return GS_ALL_DONE;
6463
6464 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6465 is_statement = gimple_test_f == is_gimple_stmt;
6466 if (is_statement)
6467 gcc_assert (pre_p);
6468
6469 /* Consistency checks. */
6470 if (gimple_test_f == is_gimple_reg)
6471 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6472 else if (gimple_test_f == is_gimple_val
6473 || gimple_test_f == is_gimple_call_addr
6474 || gimple_test_f == is_gimple_condexpr
6475 || gimple_test_f == is_gimple_mem_rhs
6476 || gimple_test_f == is_gimple_mem_rhs_or_call
6477 || gimple_test_f == is_gimple_reg_rhs
6478 || gimple_test_f == is_gimple_reg_rhs_or_call
6479 || gimple_test_f == is_gimple_asm_val)
6480 gcc_assert (fallback & fb_rvalue);
6481 else if (gimple_test_f == is_gimple_min_lval
6482 || gimple_test_f == is_gimple_lvalue)
6483 gcc_assert (fallback & fb_lvalue);
6484 else if (gimple_test_f == is_gimple_addressable)
6485 gcc_assert (fallback & fb_either);
6486 else if (gimple_test_f == is_gimple_stmt)
6487 gcc_assert (fallback == fb_none);
6488 else
6489 {
6490 /* We should have recognized the GIMPLE_TEST_F predicate to
6491 know what kind of fallback to use in case a temporary is
6492 needed to hold the value or address of *EXPR_P. */
6493 gcc_unreachable ();
6494 }
6495
6496 /* We used to check the predicate here and return immediately if it
6497 succeeds. This is wrong; the design is for gimplification to be
6498 idempotent, and for the predicates to only test for valid forms, not
6499 whether they are fully simplified. */
6500 if (pre_p == NULL)
6501 pre_p = &internal_pre;
6502
6503 if (post_p == NULL)
6504 post_p = &internal_post;
6505
6506 /* Remember the last statements added to PRE_P and POST_P. Every
6507 new statement added by the gimplification helpers needs to be
6508 annotated with location information. To centralize the
6509 responsibility, we remember the last statement that had been
6510 added to both queues before gimplifying *EXPR_P. If
6511 gimplification produces new statements in PRE_P and POST_P, those
6512 statements will be annotated with the same location information
6513 as *EXPR_P. */
6514 pre_last_gsi = gsi_last (*pre_p);
6515 post_last_gsi = gsi_last (*post_p);
6516
6517 saved_location = input_location;
6518 if (save_expr != error_mark_node
6519 && EXPR_HAS_LOCATION (*expr_p))
6520 input_location = EXPR_LOCATION (*expr_p);
6521
6522 /* Loop over the specific gimplifiers until the toplevel node
6523 remains the same. */
6524 do
6525 {
6526 /* Strip away as many useless type conversions as possible
6527 at the toplevel. */
6528 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6529
6530 /* Remember the expr. */
6531 save_expr = *expr_p;
6532
6533 /* Die, die, die, my darling. */
6534 if (save_expr == error_mark_node
6535 || (TREE_TYPE (save_expr)
6536 && TREE_TYPE (save_expr) == error_mark_node))
6537 {
6538 ret = GS_ERROR;
6539 break;
6540 }
6541
6542 /* Do any language-specific gimplification. */
6543 ret = ((enum gimplify_status)
6544 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6545 if (ret == GS_OK)
6546 {
6547 if (*expr_p == NULL_TREE)
6548 break;
6549 if (*expr_p != save_expr)
6550 continue;
6551 }
6552 else if (ret != GS_UNHANDLED)
6553 break;
6554
6555 ret = GS_OK;
6556 switch (TREE_CODE (*expr_p))
6557 {
6558 /* First deal with the special cases. */
6559
6560 case POSTINCREMENT_EXPR:
6561 case POSTDECREMENT_EXPR:
6562 case PREINCREMENT_EXPR:
6563 case PREDECREMENT_EXPR:
6564 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6565 fallback != fb_none);
6566 break;
6567
6568 case ARRAY_REF:
6569 case ARRAY_RANGE_REF:
6570 case REALPART_EXPR:
6571 case IMAGPART_EXPR:
6572 case COMPONENT_REF:
6573 case VIEW_CONVERT_EXPR:
6574 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6575 fallback ? fallback : fb_rvalue);
6576 break;
6577
6578 case COND_EXPR:
6579 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6580
6581 /* C99 code may assign to an array in a structure value of a
6582 conditional expression, and this has undefined behavior
6583 only on execution, so create a temporary if an lvalue is
6584 required. */
6585 if (fallback == fb_lvalue)
6586 {
6587 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6588 mark_addressable (*expr_p);
6589 }
6590 break;
6591
6592 case CALL_EXPR:
6593 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6594
6595 /* C99 code may assign to an array in a structure returned
6596 from a function, and this has undefined behavior only on
6597 execution, so create a temporary if an lvalue is
6598 required. */
6599 if (fallback == fb_lvalue)
6600 {
6601 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6602 mark_addressable (*expr_p);
6603 }
6604 break;
6605
6606 case TREE_LIST:
6607 gcc_unreachable ();
6608
6609 case COMPOUND_EXPR:
6610 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6611 break;
6612
6613 case COMPOUND_LITERAL_EXPR:
6614 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6615 break;
6616
6617 case MODIFY_EXPR:
6618 case INIT_EXPR:
6619 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6620 fallback != fb_none);
6621 break;
6622
6623 case TRUTH_ANDIF_EXPR:
6624 case TRUTH_ORIF_EXPR:
6625 /* Pass the source location of the outer expression. */
6626 ret = gimplify_boolean_expr (expr_p, saved_location);
6627 break;
6628
6629 case TRUTH_NOT_EXPR:
6630 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6631 {
6632 tree type = TREE_TYPE (*expr_p);
6633 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6634 ret = GS_OK;
6635 break;
6636 }
6637
6638 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6639 is_gimple_val, fb_rvalue);
6640 recalculate_side_effects (*expr_p);
6641 break;
6642
6643 case ADDR_EXPR:
6644 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6645 break;
6646
6647 case VA_ARG_EXPR:
6648 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6649 break;
6650
6651 CASE_CONVERT:
6652 if (IS_EMPTY_STMT (*expr_p))
6653 {
6654 ret = GS_ALL_DONE;
6655 break;
6656 }
6657
6658 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6659 || fallback == fb_none)
6660 {
6661 /* Just strip a conversion to void (or in void context) and
6662 try again. */
6663 *expr_p = TREE_OPERAND (*expr_p, 0);
6664 break;
6665 }
6666
6667 ret = gimplify_conversion (expr_p);
6668 if (ret == GS_ERROR)
6669 break;
6670 if (*expr_p != save_expr)
6671 break;
6672 /* FALLTHRU */
6673
6674 case FIX_TRUNC_EXPR:
6675 /* unary_expr: ... | '(' cast ')' val | ... */
6676 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6677 is_gimple_val, fb_rvalue);
6678 recalculate_side_effects (*expr_p);
6679 break;
6680
6681 case INDIRECT_REF:
6682 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6683 if (*expr_p != save_expr)
6684 break;
6685 /* else fall through. */
6686 case ALIGN_INDIRECT_REF:
6687 case MISALIGNED_INDIRECT_REF:
6688 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6689 is_gimple_reg, fb_rvalue);
6690 recalculate_side_effects (*expr_p);
6691 break;
6692
6693 /* Constants need not be gimplified. */
6694 case INTEGER_CST:
6695 case REAL_CST:
6696 case FIXED_CST:
6697 case STRING_CST:
6698 case COMPLEX_CST:
6699 case VECTOR_CST:
6700 ret = GS_ALL_DONE;
6701 break;
6702
6703 case CONST_DECL:
6704 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6705 CONST_DECL node. Otherwise the decl is replaceable by its
6706 value. */
6707 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6708 if (fallback & fb_lvalue)
6709 ret = GS_ALL_DONE;
6710 else
6711 *expr_p = DECL_INITIAL (*expr_p);
6712 break;
6713
6714 case DECL_EXPR:
6715 ret = gimplify_decl_expr (expr_p, pre_p);
6716 break;
6717
6718 case BIND_EXPR:
6719 ret = gimplify_bind_expr (expr_p, pre_p);
6720 break;
6721
6722 case LOOP_EXPR:
6723 ret = gimplify_loop_expr (expr_p, pre_p);
6724 break;
6725
6726 case SWITCH_EXPR:
6727 ret = gimplify_switch_expr (expr_p, pre_p);
6728 break;
6729
6730 case EXIT_EXPR:
6731 ret = gimplify_exit_expr (expr_p);
6732 break;
6733
6734 case GOTO_EXPR:
6735 /* If the target is not LABEL, then it is a computed jump
6736 and the target needs to be gimplified. */
6737 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6738 {
6739 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6740 NULL, is_gimple_val, fb_rvalue);
6741 if (ret == GS_ERROR)
6742 break;
6743 }
6744 gimplify_seq_add_stmt (pre_p,
6745 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6746 break;
6747
6748 case PREDICT_EXPR:
6749 gimplify_seq_add_stmt (pre_p,
6750 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6751 PREDICT_EXPR_OUTCOME (*expr_p)));
6752 ret = GS_ALL_DONE;
6753 break;
6754
6755 case LABEL_EXPR:
6756 ret = GS_ALL_DONE;
6757 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6758 == current_function_decl);
6759 gimplify_seq_add_stmt (pre_p,
6760 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6761 break;
6762
6763 case CASE_LABEL_EXPR:
6764 ret = gimplify_case_label_expr (expr_p, pre_p);
6765 break;
6766
6767 case RETURN_EXPR:
6768 ret = gimplify_return_expr (*expr_p, pre_p);
6769 break;
6770
6771 case CONSTRUCTOR:
6772 /* Don't reduce this in place; let gimplify_init_constructor work its
6773 magic. Buf if we're just elaborating this for side effects, just
6774 gimplify any element that has side-effects. */
6775 if (fallback == fb_none)
6776 {
6777 unsigned HOST_WIDE_INT ix;
6778 constructor_elt *ce;
6779 tree temp = NULL_TREE;
6780 for (ix = 0;
6781 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6782 ix, ce);
6783 ix++)
6784 if (TREE_SIDE_EFFECTS (ce->value))
6785 append_to_statement_list (ce->value, &temp);
6786
6787 *expr_p = temp;
6788 ret = GS_OK;
6789 }
6790 /* C99 code may assign to an array in a constructed
6791 structure or union, and this has undefined behavior only
6792 on execution, so create a temporary if an lvalue is
6793 required. */
6794 else if (fallback == fb_lvalue)
6795 {
6796 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6797 mark_addressable (*expr_p);
6798 }
6799 else
6800 ret = GS_ALL_DONE;
6801 break;
6802
6803 /* The following are special cases that are not handled by the
6804 original GIMPLE grammar. */
6805
6806 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6807 eliminated. */
6808 case SAVE_EXPR:
6809 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6810 break;
6811
6812 case BIT_FIELD_REF:
6813 {
6814 enum gimplify_status r0, r1, r2;
6815
6816 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6817 post_p, is_gimple_lvalue, fb_either);
6818 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6819 post_p, is_gimple_val, fb_rvalue);
6820 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6821 post_p, is_gimple_val, fb_rvalue);
6822 recalculate_side_effects (*expr_p);
6823
6824 ret = MIN (r0, MIN (r1, r2));
6825 }
6826 break;
6827
6828 case TARGET_MEM_REF:
6829 {
6830 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6831
6832 if (TMR_SYMBOL (*expr_p))
6833 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6834 post_p, is_gimple_lvalue, fb_either);
6835 else if (TMR_BASE (*expr_p))
6836 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6837 post_p, is_gimple_val, fb_either);
6838 if (TMR_INDEX (*expr_p))
6839 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6840 post_p, is_gimple_val, fb_rvalue);
6841 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6842 ret = MIN (r0, r1);
6843 }
6844 break;
6845
6846 case NON_LVALUE_EXPR:
6847 /* This should have been stripped above. */
6848 gcc_unreachable ();
6849
6850 case ASM_EXPR:
6851 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6852 break;
6853
6854 case TRY_FINALLY_EXPR:
6855 case TRY_CATCH_EXPR:
6856 {
6857 gimple_seq eval, cleanup;
6858 gimple try_;
6859
6860 eval = cleanup = NULL;
6861 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6862 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6863 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6864 if (gimple_seq_empty_p (cleanup))
6865 {
6866 gimple_seq_add_seq (pre_p, eval);
6867 ret = GS_ALL_DONE;
6868 break;
6869 }
6870 try_ = gimple_build_try (eval, cleanup,
6871 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6872 ? GIMPLE_TRY_FINALLY
6873 : GIMPLE_TRY_CATCH);
6874 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6875 gimple_try_set_catch_is_cleanup (try_,
6876 TRY_CATCH_IS_CLEANUP (*expr_p));
6877 gimplify_seq_add_stmt (pre_p, try_);
6878 ret = GS_ALL_DONE;
6879 break;
6880 }
6881
6882 case CLEANUP_POINT_EXPR:
6883 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6884 break;
6885
6886 case TARGET_EXPR:
6887 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6888 break;
6889
6890 case CATCH_EXPR:
6891 {
6892 gimple c;
6893 gimple_seq handler = NULL;
6894 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6895 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6896 gimplify_seq_add_stmt (pre_p, c);
6897 ret = GS_ALL_DONE;
6898 break;
6899 }
6900
6901 case EH_FILTER_EXPR:
6902 {
6903 gimple ehf;
6904 gimple_seq failure = NULL;
6905
6906 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6907 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6908 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6909 gimplify_seq_add_stmt (pre_p, ehf);
6910 ret = GS_ALL_DONE;
6911 break;
6912 }
6913
6914 case OBJ_TYPE_REF:
6915 {
6916 enum gimplify_status r0, r1;
6917 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6918 post_p, is_gimple_val, fb_rvalue);
6919 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6920 post_p, is_gimple_val, fb_rvalue);
6921 TREE_SIDE_EFFECTS (*expr_p) = 0;
6922 ret = MIN (r0, r1);
6923 }
6924 break;
6925
6926 case LABEL_DECL:
6927 /* We get here when taking the address of a label. We mark
6928 the label as "forced"; meaning it can never be removed and
6929 it is a potential target for any computed goto. */
6930 FORCED_LABEL (*expr_p) = 1;
6931 ret = GS_ALL_DONE;
6932 break;
6933
6934 case STATEMENT_LIST:
6935 ret = gimplify_statement_list (expr_p, pre_p);
6936 break;
6937
6938 case WITH_SIZE_EXPR:
6939 {
6940 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6941 post_p == &internal_post ? NULL : post_p,
6942 gimple_test_f, fallback);
6943 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6944 is_gimple_val, fb_rvalue);
6945 }
6946 break;
6947
6948 case VAR_DECL:
6949 case PARM_DECL:
6950 ret = gimplify_var_or_parm_decl (expr_p);
6951 break;
6952
6953 case RESULT_DECL:
6954 /* When within an OpenMP context, notice uses of variables. */
6955 if (gimplify_omp_ctxp)
6956 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6957 ret = GS_ALL_DONE;
6958 break;
6959
6960 case SSA_NAME:
6961 /* Allow callbacks into the gimplifier during optimization. */
6962 ret = GS_ALL_DONE;
6963 break;
6964
6965 case OMP_PARALLEL:
6966 gimplify_omp_parallel (expr_p, pre_p);
6967 ret = GS_ALL_DONE;
6968 break;
6969
6970 case OMP_TASK:
6971 gimplify_omp_task (expr_p, pre_p);
6972 ret = GS_ALL_DONE;
6973 break;
6974
6975 case OMP_FOR:
6976 ret = gimplify_omp_for (expr_p, pre_p);
6977 break;
6978
6979 case OMP_SECTIONS:
6980 case OMP_SINGLE:
6981 gimplify_omp_workshare (expr_p, pre_p);
6982 ret = GS_ALL_DONE;
6983 break;
6984
6985 case OMP_SECTION:
6986 case OMP_MASTER:
6987 case OMP_ORDERED:
6988 case OMP_CRITICAL:
6989 {
6990 gimple_seq body = NULL;
6991 gimple g;
6992
6993 gimplify_and_add (OMP_BODY (*expr_p), &body);
6994 switch (TREE_CODE (*expr_p))
6995 {
6996 case OMP_SECTION:
6997 g = gimple_build_omp_section (body);
6998 break;
6999 case OMP_MASTER:
7000 g = gimple_build_omp_master (body);
7001 break;
7002 case OMP_ORDERED:
7003 g = gimple_build_omp_ordered (body);
7004 break;
7005 case OMP_CRITICAL:
7006 g = gimple_build_omp_critical (body,
7007 OMP_CRITICAL_NAME (*expr_p));
7008 break;
7009 default:
7010 gcc_unreachable ();
7011 }
7012 gimplify_seq_add_stmt (pre_p, g);
7013 ret = GS_ALL_DONE;
7014 break;
7015 }
7016
7017 case OMP_ATOMIC:
7018 ret = gimplify_omp_atomic (expr_p, pre_p);
7019 break;
7020
7021 case POINTER_PLUS_EXPR:
7022 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
7023 The second is gimple immediate saving a need for extra statement.
7024 */
7025 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7026 && (tmp = maybe_fold_offset_to_address
7027 (EXPR_LOCATION (*expr_p),
7028 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
7029 TREE_TYPE (*expr_p))))
7030 {
7031 *expr_p = tmp;
7032 break;
7033 }
7034 /* Convert (void *)&a + 4 into (void *)&a[1]. */
7035 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
7036 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7037 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
7038 0),0)))
7039 && (tmp = maybe_fold_offset_to_address
7040 (EXPR_LOCATION (*expr_p),
7041 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
7042 TREE_OPERAND (*expr_p, 1),
7043 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
7044 0)))))
7045 {
7046 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
7047 break;
7048 }
7049 /* FALLTHRU */
7050
7051 default:
7052 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7053 {
7054 case tcc_comparison:
7055 /* Handle comparison of objects of non scalar mode aggregates
7056 with a call to memcmp. It would be nice to only have to do
7057 this for variable-sized objects, but then we'd have to allow
7058 the same nest of reference nodes we allow for MODIFY_EXPR and
7059 that's too complex.
7060
7061 Compare scalar mode aggregates as scalar mode values. Using
7062 memcmp for them would be very inefficient at best, and is
7063 plain wrong if bitfields are involved. */
7064 {
7065 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7066
7067 if (!AGGREGATE_TYPE_P (type))
7068 goto expr_2;
7069 else if (TYPE_MODE (type) != BLKmode)
7070 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7071 else
7072 ret = gimplify_variable_sized_compare (expr_p);
7073
7074 break;
7075 }
7076
7077 /* If *EXPR_P does not need to be special-cased, handle it
7078 according to its class. */
7079 case tcc_unary:
7080 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7081 post_p, is_gimple_val, fb_rvalue);
7082 break;
7083
7084 case tcc_binary:
7085 expr_2:
7086 {
7087 enum gimplify_status r0, r1;
7088
7089 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7090 post_p, is_gimple_val, fb_rvalue);
7091 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7092 post_p, is_gimple_val, fb_rvalue);
7093
7094 ret = MIN (r0, r1);
7095 break;
7096 }
7097
7098 case tcc_declaration:
7099 case tcc_constant:
7100 ret = GS_ALL_DONE;
7101 goto dont_recalculate;
7102
7103 default:
7104 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7105 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7106 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7107 goto expr_2;
7108 }
7109
7110 recalculate_side_effects (*expr_p);
7111
7112 dont_recalculate:
7113 break;
7114 }
7115
7116 /* If we replaced *expr_p, gimplify again. */
7117 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7118 ret = GS_ALL_DONE;
7119 }
7120 while (ret == GS_OK);
7121
7122 /* If we encountered an error_mark somewhere nested inside, either
7123 stub out the statement or propagate the error back out. */
7124 if (ret == GS_ERROR)
7125 {
7126 if (is_statement)
7127 *expr_p = NULL;
7128 goto out;
7129 }
7130
7131 /* This was only valid as a return value from the langhook, which
7132 we handled. Make sure it doesn't escape from any other context. */
7133 gcc_assert (ret != GS_UNHANDLED);
7134
7135 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7136 {
7137 /* We aren't looking for a value, and we don't have a valid
7138 statement. If it doesn't have side-effects, throw it away. */
7139 if (!TREE_SIDE_EFFECTS (*expr_p))
7140 *expr_p = NULL;
7141 else if (!TREE_THIS_VOLATILE (*expr_p))
7142 {
7143 /* This is probably a _REF that contains something nested that
7144 has side effects. Recurse through the operands to find it. */
7145 enum tree_code code = TREE_CODE (*expr_p);
7146
7147 switch (code)
7148 {
7149 case COMPONENT_REF:
7150 case REALPART_EXPR:
7151 case IMAGPART_EXPR:
7152 case VIEW_CONVERT_EXPR:
7153 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7154 gimple_test_f, fallback);
7155 break;
7156
7157 case ARRAY_REF:
7158 case ARRAY_RANGE_REF:
7159 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7160 gimple_test_f, fallback);
7161 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7162 gimple_test_f, fallback);
7163 break;
7164
7165 default:
7166 /* Anything else with side-effects must be converted to
7167 a valid statement before we get here. */
7168 gcc_unreachable ();
7169 }
7170
7171 *expr_p = NULL;
7172 }
7173 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7174 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7175 {
7176 /* Historically, the compiler has treated a bare reference
7177 to a non-BLKmode volatile lvalue as forcing a load. */
7178 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7179
7180 /* Normally, we do not want to create a temporary for a
7181 TREE_ADDRESSABLE type because such a type should not be
7182 copied by bitwise-assignment. However, we make an
7183 exception here, as all we are doing here is ensuring that
7184 we read the bytes that make up the type. We use
7185 create_tmp_var_raw because create_tmp_var will abort when
7186 given a TREE_ADDRESSABLE type. */
7187 tree tmp = create_tmp_var_raw (type, "vol");
7188 gimple_add_tmp_var (tmp);
7189 gimplify_assign (tmp, *expr_p, pre_p);
7190 *expr_p = NULL;
7191 }
7192 else
7193 /* We can't do anything useful with a volatile reference to
7194 an incomplete type, so just throw it away. Likewise for
7195 a BLKmode type, since any implicit inner load should
7196 already have been turned into an explicit one by the
7197 gimplification process. */
7198 *expr_p = NULL;
7199 }
7200
7201 /* If we are gimplifying at the statement level, we're done. Tack
7202 everything together and return. */
7203 if (fallback == fb_none || is_statement)
7204 {
7205 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7206 it out for GC to reclaim it. */
7207 *expr_p = NULL_TREE;
7208
7209 if (!gimple_seq_empty_p (internal_pre)
7210 || !gimple_seq_empty_p (internal_post))
7211 {
7212 gimplify_seq_add_seq (&internal_pre, internal_post);
7213 gimplify_seq_add_seq (pre_p, internal_pre);
7214 }
7215
7216 /* The result of gimplifying *EXPR_P is going to be the last few
7217 statements in *PRE_P and *POST_P. Add location information
7218 to all the statements that were added by the gimplification
7219 helpers. */
7220 if (!gimple_seq_empty_p (*pre_p))
7221 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7222
7223 if (!gimple_seq_empty_p (*post_p))
7224 annotate_all_with_location_after (*post_p, post_last_gsi,
7225 input_location);
7226
7227 goto out;
7228 }
7229
7230 #ifdef ENABLE_GIMPLE_CHECKING
7231 if (*expr_p)
7232 {
7233 enum tree_code code = TREE_CODE (*expr_p);
7234 /* These expressions should already be in gimple IR form. */
7235 gcc_assert (code != MODIFY_EXPR
7236 && code != ASM_EXPR
7237 && code != BIND_EXPR
7238 && code != CATCH_EXPR
7239 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7240 && code != EH_FILTER_EXPR
7241 && code != GOTO_EXPR
7242 && code != LABEL_EXPR
7243 && code != LOOP_EXPR
7244 && code != SWITCH_EXPR
7245 && code != TRY_FINALLY_EXPR
7246 && code != OMP_CRITICAL
7247 && code != OMP_FOR
7248 && code != OMP_MASTER
7249 && code != OMP_ORDERED
7250 && code != OMP_PARALLEL
7251 && code != OMP_SECTIONS
7252 && code != OMP_SECTION
7253 && code != OMP_SINGLE);
7254 }
7255 #endif
7256
7257 /* Otherwise we're gimplifying a subexpression, so the resulting
7258 value is interesting. If it's a valid operand that matches
7259 GIMPLE_TEST_F, we're done. Unless we are handling some
7260 post-effects internally; if that's the case, we need to copy into
7261 a temporary before adding the post-effects to POST_P. */
7262 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7263 goto out;
7264
7265 /* Otherwise, we need to create a new temporary for the gimplified
7266 expression. */
7267
7268 /* We can't return an lvalue if we have an internal postqueue. The
7269 object the lvalue refers to would (probably) be modified by the
7270 postqueue; we need to copy the value out first, which means an
7271 rvalue. */
7272 if ((fallback & fb_lvalue)
7273 && gimple_seq_empty_p (internal_post)
7274 && is_gimple_addressable (*expr_p))
7275 {
7276 /* An lvalue will do. Take the address of the expression, store it
7277 in a temporary, and replace the expression with an INDIRECT_REF of
7278 that temporary. */
7279 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7280 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7281 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7282 }
7283 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7284 {
7285 /* An rvalue will do. Assign the gimplified expression into a
7286 new temporary TMP and replace the original expression with
7287 TMP. First, make sure that the expression has a type so that
7288 it can be assigned into a temporary. */
7289 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7290
7291 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7292 /* The postqueue might change the value of the expression between
7293 the initialization and use of the temporary, so we can't use a
7294 formal temp. FIXME do we care? */
7295 {
7296 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7297 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7298 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7299 DECL_GIMPLE_REG_P (*expr_p) = 1;
7300 }
7301 else
7302 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7303 }
7304 else
7305 {
7306 #ifdef ENABLE_GIMPLE_CHECKING
7307 if (!(fallback & fb_mayfail))
7308 {
7309 fprintf (stderr, "gimplification failed:\n");
7310 print_generic_expr (stderr, *expr_p, 0);
7311 debug_tree (*expr_p);
7312 internal_error ("gimplification failed");
7313 }
7314 #endif
7315 gcc_assert (fallback & fb_mayfail);
7316
7317 /* If this is an asm statement, and the user asked for the
7318 impossible, don't die. Fail and let gimplify_asm_expr
7319 issue an error. */
7320 ret = GS_ERROR;
7321 goto out;
7322 }
7323
7324 /* Make sure the temporary matches our predicate. */
7325 gcc_assert ((*gimple_test_f) (*expr_p));
7326
7327 if (!gimple_seq_empty_p (internal_post))
7328 {
7329 annotate_all_with_location (internal_post, input_location);
7330 gimplify_seq_add_seq (pre_p, internal_post);
7331 }
7332
7333 out:
7334 input_location = saved_location;
7335 return ret;
7336 }
7337
7338 /* Look through TYPE for variable-sized objects and gimplify each such
7339 size that we find. Add to LIST_P any statements generated. */
7340
7341 void
7342 gimplify_type_sizes (tree type, gimple_seq *list_p)
7343 {
7344 tree field, t;
7345
7346 if (type == NULL || type == error_mark_node)
7347 return;
7348
7349 /* We first do the main variant, then copy into any other variants. */
7350 type = TYPE_MAIN_VARIANT (type);
7351
7352 /* Avoid infinite recursion. */
7353 if (TYPE_SIZES_GIMPLIFIED (type))
7354 return;
7355
7356 TYPE_SIZES_GIMPLIFIED (type) = 1;
7357
7358 switch (TREE_CODE (type))
7359 {
7360 case INTEGER_TYPE:
7361 case ENUMERAL_TYPE:
7362 case BOOLEAN_TYPE:
7363 case REAL_TYPE:
7364 case FIXED_POINT_TYPE:
7365 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7366 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7367
7368 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7369 {
7370 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7371 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7372 }
7373 break;
7374
7375 case ARRAY_TYPE:
7376 /* These types may not have declarations, so handle them here. */
7377 gimplify_type_sizes (TREE_TYPE (type), list_p);
7378 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7379 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7380 with assigned stack slots, for -O1+ -g they should be tracked
7381 by VTA. */
7382 if (TYPE_DOMAIN (type)
7383 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7384 {
7385 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7386 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7387 DECL_IGNORED_P (t) = 0;
7388 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7389 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7390 DECL_IGNORED_P (t) = 0;
7391 }
7392 break;
7393
7394 case RECORD_TYPE:
7395 case UNION_TYPE:
7396 case QUAL_UNION_TYPE:
7397 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7398 if (TREE_CODE (field) == FIELD_DECL)
7399 {
7400 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7401 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7402 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7403 gimplify_type_sizes (TREE_TYPE (field), list_p);
7404 }
7405 break;
7406
7407 case POINTER_TYPE:
7408 case REFERENCE_TYPE:
7409 /* We used to recurse on the pointed-to type here, which turned out to
7410 be incorrect because its definition might refer to variables not
7411 yet initialized at this point if a forward declaration is involved.
7412
7413 It was actually useful for anonymous pointed-to types to ensure
7414 that the sizes evaluation dominates every possible later use of the
7415 values. Restricting to such types here would be safe since there
7416 is no possible forward declaration around, but would introduce an
7417 undesirable middle-end semantic to anonymity. We then defer to
7418 front-ends the responsibility of ensuring that the sizes are
7419 evaluated both early and late enough, e.g. by attaching artificial
7420 type declarations to the tree. */
7421 break;
7422
7423 default:
7424 break;
7425 }
7426
7427 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7428 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7429
7430 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7431 {
7432 TYPE_SIZE (t) = TYPE_SIZE (type);
7433 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7434 TYPE_SIZES_GIMPLIFIED (t) = 1;
7435 }
7436 }
7437
7438 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7439 a size or position, has had all of its SAVE_EXPRs evaluated.
7440 We add any required statements to *STMT_P. */
7441
7442 void
7443 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7444 {
7445 tree type, expr = *expr_p;
7446
7447 /* We don't do anything if the value isn't there, is constant, or contains
7448 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7449 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7450 will want to replace it with a new variable, but that will cause problems
7451 if this type is from outside the function. It's OK to have that here. */
7452 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7453 || TREE_CODE (expr) == VAR_DECL
7454 || CONTAINS_PLACEHOLDER_P (expr))
7455 return;
7456
7457 type = TREE_TYPE (expr);
7458 *expr_p = unshare_expr (expr);
7459
7460 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7461 expr = *expr_p;
7462
7463 /* Verify that we've an exact type match with the original expression.
7464 In particular, we do not wish to drop a "sizetype" in favour of a
7465 type of similar dimensions. We don't want to pollute the generic
7466 type-stripping code with this knowledge because it doesn't matter
7467 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7468 and friends retain their "sizetype-ness". */
7469 if (TREE_TYPE (expr) != type
7470 && TREE_CODE (type) == INTEGER_TYPE
7471 && TYPE_IS_SIZETYPE (type))
7472 {
7473 tree tmp;
7474 gimple stmt;
7475
7476 *expr_p = create_tmp_var (type, NULL);
7477 tmp = build1 (NOP_EXPR, type, expr);
7478 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7479 if (EXPR_HAS_LOCATION (expr))
7480 gimple_set_location (stmt, EXPR_LOCATION (expr));
7481 else
7482 gimple_set_location (stmt, input_location);
7483 }
7484 }
7485
7486
7487 /* Gimplify the body of statements pointed to by BODY_P and return a
7488 GIMPLE_BIND containing the sequence of GIMPLE statements
7489 corresponding to BODY_P. FNDECL is the function decl containing
7490 *BODY_P. */
7491
7492 gimple
7493 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7494 {
7495 location_t saved_location = input_location;
7496 gimple_seq parm_stmts, seq;
7497 gimple outer_bind;
7498 struct gimplify_ctx gctx;
7499
7500 timevar_push (TV_TREE_GIMPLIFY);
7501
7502 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7503 gimplification. */
7504 default_rtl_profile ();
7505
7506 gcc_assert (gimplify_ctxp == NULL);
7507 push_gimplify_context (&gctx);
7508
7509 /* Unshare most shared trees in the body and in that of any nested functions.
7510 It would seem we don't have to do this for nested functions because
7511 they are supposed to be output and then the outer function gimplified
7512 first, but the g++ front end doesn't always do it that way. */
7513 unshare_body (body_p, fndecl);
7514 unvisit_body (body_p, fndecl);
7515
7516 if (cgraph_node (fndecl)->origin)
7517 nonlocal_vlas = pointer_set_create ();
7518
7519 /* Make sure input_location isn't set to something weird. */
7520 input_location = DECL_SOURCE_LOCATION (fndecl);
7521
7522 /* Resolve callee-copies. This has to be done before processing
7523 the body so that DECL_VALUE_EXPR gets processed correctly. */
7524 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7525
7526 /* Gimplify the function's body. */
7527 seq = NULL;
7528 gimplify_stmt (body_p, &seq);
7529 outer_bind = gimple_seq_first_stmt (seq);
7530 if (!outer_bind)
7531 {
7532 outer_bind = gimple_build_nop ();
7533 gimplify_seq_add_stmt (&seq, outer_bind);
7534 }
7535
7536 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7537 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7538 if (gimple_code (outer_bind) == GIMPLE_BIND
7539 && gimple_seq_first (seq) == gimple_seq_last (seq))
7540 ;
7541 else
7542 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7543
7544 *body_p = NULL_TREE;
7545
7546 /* If we had callee-copies statements, insert them at the beginning
7547 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
7548 if (!gimple_seq_empty_p (parm_stmts))
7549 {
7550 tree parm;
7551
7552 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7553 gimple_bind_set_body (outer_bind, parm_stmts);
7554
7555 for (parm = DECL_ARGUMENTS (current_function_decl);
7556 parm; parm = TREE_CHAIN (parm))
7557 if (DECL_HAS_VALUE_EXPR_P (parm))
7558 {
7559 DECL_HAS_VALUE_EXPR_P (parm) = 0;
7560 DECL_IGNORED_P (parm) = 0;
7561 }
7562 }
7563
7564 if (nonlocal_vlas)
7565 {
7566 pointer_set_destroy (nonlocal_vlas);
7567 nonlocal_vlas = NULL;
7568 }
7569
7570 pop_gimplify_context (outer_bind);
7571 gcc_assert (gimplify_ctxp == NULL);
7572
7573 #ifdef ENABLE_TYPES_CHECKING
7574 if (!errorcount && !sorrycount)
7575 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7576 #endif
7577
7578 timevar_pop (TV_TREE_GIMPLIFY);
7579 input_location = saved_location;
7580
7581 return outer_bind;
7582 }
7583
7584 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7585 node for the function we want to gimplify.
7586
7587 Returns the sequence of GIMPLE statements corresponding to the body
7588 of FNDECL. */
7589
7590 void
7591 gimplify_function_tree (tree fndecl)
7592 {
7593 tree oldfn, parm, ret;
7594 gimple_seq seq;
7595 gimple bind;
7596
7597 gcc_assert (!gimple_body (fndecl));
7598
7599 oldfn = current_function_decl;
7600 current_function_decl = fndecl;
7601 if (DECL_STRUCT_FUNCTION (fndecl))
7602 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7603 else
7604 push_struct_function (fndecl);
7605
7606 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7607 {
7608 /* Preliminarily mark non-addressed complex variables as eligible
7609 for promotion to gimple registers. We'll transform their uses
7610 as we find them. */
7611 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7612 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7613 && !TREE_THIS_VOLATILE (parm)
7614 && !needs_to_live_in_memory (parm))
7615 DECL_GIMPLE_REG_P (parm) = 1;
7616 }
7617
7618 ret = DECL_RESULT (fndecl);
7619 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7620 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7621 && !needs_to_live_in_memory (ret))
7622 DECL_GIMPLE_REG_P (ret) = 1;
7623
7624 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7625
7626 /* The tree body of the function is no longer needed, replace it
7627 with the new GIMPLE body. */
7628 seq = gimple_seq_alloc ();
7629 gimple_seq_add_stmt (&seq, bind);
7630 gimple_set_body (fndecl, seq);
7631
7632 /* If we're instrumenting function entry/exit, then prepend the call to
7633 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7634 catch the exit hook. */
7635 /* ??? Add some way to ignore exceptions for this TFE. */
7636 if (flag_instrument_function_entry_exit
7637 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7638 && !flag_instrument_functions_exclude_p (fndecl))
7639 {
7640 tree x;
7641 gimple new_bind;
7642 gimple tf;
7643 gimple_seq cleanup = NULL, body = NULL;
7644
7645 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7646 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7647 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7648
7649 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7650 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7651 gimplify_seq_add_stmt (&body, tf);
7652 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7653 /* Clear the block for BIND, since it is no longer directly inside
7654 the function, but within a try block. */
7655 gimple_bind_set_block (bind, NULL);
7656
7657 /* Replace the current function body with the body
7658 wrapped in the try/finally TF. */
7659 seq = gimple_seq_alloc ();
7660 gimple_seq_add_stmt (&seq, new_bind);
7661 gimple_set_body (fndecl, seq);
7662 }
7663
7664 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7665 cfun->curr_properties = PROP_gimple_any;
7666
7667 current_function_decl = oldfn;
7668 pop_cfun ();
7669 }
7670
7671
7672 /* Some transformations like inlining may invalidate the GIMPLE form
7673 for operands. This function traverses all the operands in STMT and
7674 gimplifies anything that is not a valid gimple operand. Any new
7675 GIMPLE statements are inserted before *GSI_P. */
7676
7677 void
7678 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7679 {
7680 size_t i, num_ops;
7681 tree orig_lhs = NULL_TREE, lhs, t;
7682 gimple_seq pre = NULL;
7683 gimple post_stmt = NULL;
7684 struct gimplify_ctx gctx;
7685
7686 push_gimplify_context (&gctx);
7687 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7688
7689 switch (gimple_code (stmt))
7690 {
7691 case GIMPLE_COND:
7692 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7693 is_gimple_val, fb_rvalue);
7694 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7695 is_gimple_val, fb_rvalue);
7696 break;
7697 case GIMPLE_SWITCH:
7698 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7699 is_gimple_val, fb_rvalue);
7700 break;
7701 case GIMPLE_OMP_ATOMIC_LOAD:
7702 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7703 is_gimple_val, fb_rvalue);
7704 break;
7705 case GIMPLE_ASM:
7706 {
7707 size_t i, noutputs = gimple_asm_noutputs (stmt);
7708 const char *constraint, **oconstraints;
7709 bool allows_mem, allows_reg, is_inout;
7710
7711 oconstraints
7712 = (const char **) alloca ((noutputs) * sizeof (const char *));
7713 for (i = 0; i < noutputs; i++)
7714 {
7715 tree op = gimple_asm_output_op (stmt, i);
7716 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7717 oconstraints[i] = constraint;
7718 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7719 &allows_reg, &is_inout);
7720 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7721 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7722 fb_lvalue | fb_mayfail);
7723 }
7724 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7725 {
7726 tree op = gimple_asm_input_op (stmt, i);
7727 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7728 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7729 oconstraints, &allows_mem, &allows_reg);
7730 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7731 allows_reg = 0;
7732 if (!allows_reg && allows_mem)
7733 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7734 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7735 else
7736 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7737 is_gimple_asm_val, fb_rvalue);
7738 }
7739 }
7740 break;
7741 default:
7742 /* NOTE: We start gimplifying operands from last to first to
7743 make sure that side-effects on the RHS of calls, assignments
7744 and ASMs are executed before the LHS. The ordering is not
7745 important for other statements. */
7746 num_ops = gimple_num_ops (stmt);
7747 orig_lhs = gimple_get_lhs (stmt);
7748 for (i = num_ops; i > 0; i--)
7749 {
7750 tree op = gimple_op (stmt, i - 1);
7751 if (op == NULL_TREE)
7752 continue;
7753 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7754 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7755 else if (i == 2
7756 && is_gimple_assign (stmt)
7757 && num_ops == 2
7758 && get_gimple_rhs_class (gimple_expr_code (stmt))
7759 == GIMPLE_SINGLE_RHS)
7760 gimplify_expr (&op, &pre, NULL,
7761 rhs_predicate_for (gimple_assign_lhs (stmt)),
7762 fb_rvalue);
7763 else if (i == 2 && is_gimple_call (stmt))
7764 {
7765 if (TREE_CODE (op) == FUNCTION_DECL)
7766 continue;
7767 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7768 }
7769 else
7770 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7771 gimple_set_op (stmt, i - 1, op);
7772 }
7773
7774 lhs = gimple_get_lhs (stmt);
7775 /* If the LHS changed it in a way that requires a simple RHS,
7776 create temporary. */
7777 if (lhs && !is_gimple_reg (lhs))
7778 {
7779 bool need_temp = false;
7780
7781 if (is_gimple_assign (stmt)
7782 && num_ops == 2
7783 && get_gimple_rhs_class (gimple_expr_code (stmt))
7784 == GIMPLE_SINGLE_RHS)
7785 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7786 rhs_predicate_for (gimple_assign_lhs (stmt)),
7787 fb_rvalue);
7788 else if (is_gimple_reg (lhs))
7789 {
7790 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7791 {
7792 if (is_gimple_call (stmt))
7793 {
7794 i = gimple_call_flags (stmt);
7795 if ((i & ECF_LOOPING_CONST_OR_PURE)
7796 || !(i & (ECF_CONST | ECF_PURE)))
7797 need_temp = true;
7798 }
7799 if (stmt_can_throw_internal (stmt))
7800 need_temp = true;
7801 }
7802 }
7803 else
7804 {
7805 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7806 need_temp = true;
7807 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7808 {
7809 if (is_gimple_call (stmt))
7810 {
7811 tree fndecl = gimple_call_fndecl (stmt);
7812
7813 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7814 && !(fndecl && DECL_RESULT (fndecl)
7815 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7816 need_temp = true;
7817 }
7818 else
7819 need_temp = true;
7820 }
7821 }
7822 if (need_temp)
7823 {
7824 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
7825
7826 if (TREE_CODE (orig_lhs) == SSA_NAME)
7827 orig_lhs = SSA_NAME_VAR (orig_lhs);
7828
7829 if (gimple_in_ssa_p (cfun))
7830 temp = make_ssa_name (temp, NULL);
7831 gimple_set_lhs (stmt, temp);
7832 post_stmt = gimple_build_assign (lhs, temp);
7833 if (TREE_CODE (lhs) == SSA_NAME)
7834 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7835 }
7836 }
7837 break;
7838 }
7839
7840 if (gimple_referenced_vars (cfun))
7841 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7842 add_referenced_var (t);
7843
7844 if (!gimple_seq_empty_p (pre))
7845 {
7846 if (gimple_in_ssa_p (cfun))
7847 {
7848 gimple_stmt_iterator i;
7849
7850 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7851 mark_symbols_for_renaming (gsi_stmt (i));
7852 }
7853 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7854 }
7855 if (post_stmt)
7856 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7857
7858 pop_gimplify_context (NULL);
7859 }
7860
7861
7862 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7863 force the result to be either ssa_name or an invariant, otherwise
7864 just force it to be a rhs expression. If VAR is not NULL, make the
7865 base variable of the final destination be VAR if suitable. */
7866
7867 tree
7868 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7869 {
7870 tree t;
7871 enum gimplify_status ret;
7872 gimple_predicate gimple_test_f;
7873 struct gimplify_ctx gctx;
7874
7875 *stmts = NULL;
7876
7877 if (is_gimple_val (expr))
7878 return expr;
7879
7880 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7881
7882 push_gimplify_context (&gctx);
7883 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7884 gimplify_ctxp->allow_rhs_cond_expr = true;
7885
7886 if (var)
7887 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7888
7889 if (TREE_CODE (expr) != MODIFY_EXPR
7890 && TREE_TYPE (expr) == void_type_node)
7891 {
7892 gimplify_and_add (expr, stmts);
7893 expr = NULL_TREE;
7894 }
7895 else
7896 {
7897 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7898 gcc_assert (ret != GS_ERROR);
7899 }
7900
7901 if (gimple_referenced_vars (cfun))
7902 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7903 add_referenced_var (t);
7904
7905 pop_gimplify_context (NULL);
7906
7907 return expr;
7908 }
7909
7910 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7911 some statements are produced, emits them at GSI. If BEFORE is true.
7912 the statements are appended before GSI, otherwise they are appended after
7913 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7914 GSI_CONTINUE_LINKING are the usual values). */
7915
7916 tree
7917 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7918 bool simple_p, tree var, bool before,
7919 enum gsi_iterator_update m)
7920 {
7921 gimple_seq stmts;
7922
7923 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7924
7925 if (!gimple_seq_empty_p (stmts))
7926 {
7927 if (gimple_in_ssa_p (cfun))
7928 {
7929 gimple_stmt_iterator i;
7930
7931 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7932 mark_symbols_for_renaming (gsi_stmt (i));
7933 }
7934
7935 if (before)
7936 gsi_insert_seq_before (gsi, stmts, m);
7937 else
7938 gsi_insert_seq_after (gsi, stmts, m);
7939 }
7940
7941 return expr;
7942 }
7943
7944 #include "gt-gimplify.h"