Update copyright notices of my recent commits.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53 #include "vec.h"
54 #include "gimple.h"
55 #include "tree-pass.h"
56
57
58 enum gimplify_omp_var_data
59 {
60 GOVD_SEEN = 1,
61 GOVD_EXPLICIT = 2,
62 GOVD_SHARED = 4,
63 GOVD_PRIVATE = 8,
64 GOVD_FIRSTPRIVATE = 16,
65 GOVD_LASTPRIVATE = 32,
66 GOVD_REDUCTION = 64,
67 GOVD_LOCAL = 128,
68 GOVD_DEBUG_PRIVATE = 256,
69 GOVD_PRIVATE_OUTER_REF = 512,
70 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
71 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
72 };
73
74
75 enum omp_region_type
76 {
77 ORT_WORKSHARE = 0,
78 ORT_TASK = 1,
79 ORT_PARALLEL = 2,
80 ORT_COMBINED_PARALLEL = 3
81 };
82
83 struct gimplify_omp_ctx
84 {
85 struct gimplify_omp_ctx *outer_context;
86 splay_tree variables;
87 struct pointer_set_t *privatized_types;
88 location_t location;
89 enum omp_clause_default_kind default_kind;
90 enum omp_region_type region_type;
91 };
92
93 static struct gimplify_ctx *gimplify_ctxp;
94 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
95
96
97 /* Formal (expression) temporary table handling: Multiple occurrences of
98 the same scalar expression are evaluated into the same temporary. */
99
100 typedef struct gimple_temp_hash_elt
101 {
102 tree val; /* Key */
103 tree temp; /* Value */
104 } elt_t;
105
106 /* Forward declarations. */
107 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
108
109 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
110 form and we don't do any syntax checking. */
111 void
112 mark_addressable (tree x)
113 {
114 while (handled_component_p (x))
115 x = TREE_OPERAND (x, 0);
116 if (TREE_CODE (x) != VAR_DECL
117 && TREE_CODE (x) != PARM_DECL
118 && TREE_CODE (x) != RESULT_DECL)
119 return ;
120 TREE_ADDRESSABLE (x) = 1;
121 }
122
123 /* Return a hash value for a formal temporary table entry. */
124
125 static hashval_t
126 gimple_tree_hash (const void *p)
127 {
128 tree t = ((const elt_t *) p)->val;
129 return iterative_hash_expr (t, 0);
130 }
131
132 /* Compare two formal temporary table entries. */
133
134 static int
135 gimple_tree_eq (const void *p1, const void *p2)
136 {
137 tree t1 = ((const elt_t *) p1)->val;
138 tree t2 = ((const elt_t *) p2)->val;
139 enum tree_code code = TREE_CODE (t1);
140
141 if (TREE_CODE (t2) != code
142 || TREE_TYPE (t1) != TREE_TYPE (t2))
143 return 0;
144
145 if (!operand_equal_p (t1, t2, 0))
146 return 0;
147
148 /* Only allow them to compare equal if they also hash equal; otherwise
149 results are nondeterminate, and we fail bootstrap comparison. */
150 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
151
152 return 1;
153 }
154
155 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
156 *SEQ_P is NULL, a new sequence is allocated. This function is
157 similar to gimple_seq_add_stmt, but does not scan the operands.
158 During gimplification, we need to manipulate statement sequences
159 before the def/use vectors have been constructed. */
160
161 static void
162 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
163 {
164 gimple_stmt_iterator si;
165
166 if (gs == NULL)
167 return;
168
169 if (*seq_p == NULL)
170 *seq_p = gimple_seq_alloc ();
171
172 si = gsi_last (*seq_p);
173
174 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
175 }
176
177 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
178 NULL, a new sequence is allocated. This function is
179 similar to gimple_seq_add_seq, but does not scan the operands.
180 During gimplification, we need to manipulate statement sequences
181 before the def/use vectors have been constructed. */
182
183 static void
184 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
185 {
186 gimple_stmt_iterator si;
187
188 if (src == NULL)
189 return;
190
191 if (*dst_p == NULL)
192 *dst_p = gimple_seq_alloc ();
193
194 si = gsi_last (*dst_p);
195 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
196 }
197
198 /* Set up a context for the gimplifier. */
199
200 void
201 push_gimplify_context (struct gimplify_ctx *c)
202 {
203 memset (c, '\0', sizeof (*c));
204 c->prev_context = gimplify_ctxp;
205 gimplify_ctxp = c;
206 }
207
208 /* Tear down a context for the gimplifier. If BODY is non-null, then
209 put the temporaries into the outer BIND_EXPR. Otherwise, put them
210 in the local_decls.
211
212 BODY is not a sequence, but the first tuple in a sequence. */
213
214 void
215 pop_gimplify_context (gimple body)
216 {
217 struct gimplify_ctx *c = gimplify_ctxp;
218
219 gcc_assert (c && (c->bind_expr_stack == NULL
220 || VEC_empty (gimple, c->bind_expr_stack)));
221 VEC_free (gimple, heap, c->bind_expr_stack);
222 gimplify_ctxp = c->prev_context;
223
224 if (body)
225 declare_vars (c->temps, body, false);
226 else
227 record_vars (c->temps);
228
229 if (c->temp_htab)
230 htab_delete (c->temp_htab);
231 }
232
233 static void
234 gimple_push_bind_expr (gimple gimple_bind)
235 {
236 if (gimplify_ctxp->bind_expr_stack == NULL)
237 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
238 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
239 }
240
241 static void
242 gimple_pop_bind_expr (void)
243 {
244 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
245 }
246
247 gimple
248 gimple_current_bind_expr (void)
249 {
250 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
251 }
252
253 /* Return the stack GIMPLE_BINDs created during gimplification. */
254
255 VEC(gimple, heap) *
256 gimple_bind_expr_stack (void)
257 {
258 return gimplify_ctxp->bind_expr_stack;
259 }
260
261 /* Returns true iff there is a COND_EXPR between us and the innermost
262 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
263
264 static bool
265 gimple_conditional_context (void)
266 {
267 return gimplify_ctxp->conditions > 0;
268 }
269
270 /* Note that we've entered a COND_EXPR. */
271
272 static void
273 gimple_push_condition (void)
274 {
275 #ifdef ENABLE_GIMPLE_CHECKING
276 if (gimplify_ctxp->conditions == 0)
277 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
278 #endif
279 ++(gimplify_ctxp->conditions);
280 }
281
282 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
283 now, add any conditional cleanups we've seen to the prequeue. */
284
285 static void
286 gimple_pop_condition (gimple_seq *pre_p)
287 {
288 int conds = --(gimplify_ctxp->conditions);
289
290 gcc_assert (conds >= 0);
291 if (conds == 0)
292 {
293 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
294 gimplify_ctxp->conditional_cleanups = NULL;
295 }
296 }
297
298 /* A stable comparison routine for use with splay trees and DECLs. */
299
300 static int
301 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
302 {
303 tree a = (tree) xa;
304 tree b = (tree) xb;
305
306 return DECL_UID (a) - DECL_UID (b);
307 }
308
309 /* Create a new omp construct that deals with variable remapping. */
310
311 static struct gimplify_omp_ctx *
312 new_omp_context (enum omp_region_type region_type)
313 {
314 struct gimplify_omp_ctx *c;
315
316 c = XCNEW (struct gimplify_omp_ctx);
317 c->outer_context = gimplify_omp_ctxp;
318 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
319 c->privatized_types = pointer_set_create ();
320 c->location = input_location;
321 c->region_type = region_type;
322 if (region_type != ORT_TASK)
323 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
324 else
325 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
326
327 return c;
328 }
329
330 /* Destroy an omp construct that deals with variable remapping. */
331
332 static void
333 delete_omp_context (struct gimplify_omp_ctx *c)
334 {
335 splay_tree_delete (c->variables);
336 pointer_set_destroy (c->privatized_types);
337 XDELETE (c);
338 }
339
340 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
341 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
342
343 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
344
345 static void
346 append_to_statement_list_1 (tree t, tree *list_p)
347 {
348 tree list = *list_p;
349 tree_stmt_iterator i;
350
351 if (!list)
352 {
353 if (t && TREE_CODE (t) == STATEMENT_LIST)
354 {
355 *list_p = t;
356 return;
357 }
358 *list_p = list = alloc_stmt_list ();
359 }
360
361 i = tsi_last (list);
362 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
363 }
364
365 /* Add T to the end of the list container pointed to by LIST_P.
366 If T is an expression with no effects, it is ignored. */
367
368 void
369 append_to_statement_list (tree t, tree *list_p)
370 {
371 if (t && TREE_SIDE_EFFECTS (t))
372 append_to_statement_list_1 (t, list_p);
373 }
374
375 /* Similar, but the statement is always added, regardless of side effects. */
376
377 void
378 append_to_statement_list_force (tree t, tree *list_p)
379 {
380 if (t != NULL_TREE)
381 append_to_statement_list_1 (t, list_p);
382 }
383
384 /* Both gimplify the statement T and append it to *SEQ_P. This function
385 behaves exactly as gimplify_stmt, but you don't have to pass T as a
386 reference. */
387
388 void
389 gimplify_and_add (tree t, gimple_seq *seq_p)
390 {
391 gimplify_stmt (&t, seq_p);
392 }
393
394 /* Gimplify statement T into sequence *SEQ_P, and return the first
395 tuple in the sequence of generated tuples for this statement.
396 Return NULL if gimplifying T produced no tuples. */
397
398 static gimple
399 gimplify_and_return_first (tree t, gimple_seq *seq_p)
400 {
401 gimple_stmt_iterator last = gsi_last (*seq_p);
402
403 gimplify_and_add (t, seq_p);
404
405 if (!gsi_end_p (last))
406 {
407 gsi_next (&last);
408 return gsi_stmt (last);
409 }
410 else
411 return gimple_seq_first_stmt (*seq_p);
412 }
413
414 /* Strip off a legitimate source ending from the input string NAME of
415 length LEN. Rather than having to know the names used by all of
416 our front ends, we strip off an ending of a period followed by
417 up to five characters. (Java uses ".class".) */
418
419 static inline void
420 remove_suffix (char *name, int len)
421 {
422 int i;
423
424 for (i = 2; i < 8 && len > i; i++)
425 {
426 if (name[len - i] == '.')
427 {
428 name[len - i] = '\0';
429 break;
430 }
431 }
432 }
433
434 /* Create a new temporary name with PREFIX. Returns an identifier. */
435
436 static GTY(()) unsigned int tmp_var_id_num;
437
438 tree
439 create_tmp_var_name (const char *prefix)
440 {
441 char *tmp_name;
442
443 if (prefix)
444 {
445 char *preftmp = ASTRDUP (prefix);
446
447 remove_suffix (preftmp, strlen (preftmp));
448 prefix = preftmp;
449 }
450
451 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
452 return get_identifier (tmp_name);
453 }
454
455
456 /* Create a new temporary variable declaration of type TYPE.
457 Does NOT push it into the current binding. */
458
459 tree
460 create_tmp_var_raw (tree type, const char *prefix)
461 {
462 tree tmp_var;
463 tree new_type;
464
465 /* Make the type of the variable writable. */
466 new_type = build_type_variant (type, 0, 0);
467 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
468
469 tmp_var = build_decl (input_location,
470 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
471 type);
472
473 /* The variable was declared by the compiler. */
474 DECL_ARTIFICIAL (tmp_var) = 1;
475 /* And we don't want debug info for it. */
476 DECL_IGNORED_P (tmp_var) = 1;
477
478 /* Make the variable writable. */
479 TREE_READONLY (tmp_var) = 0;
480
481 DECL_EXTERNAL (tmp_var) = 0;
482 TREE_STATIC (tmp_var) = 0;
483 TREE_USED (tmp_var) = 1;
484
485 return tmp_var;
486 }
487
488 /* Create a new temporary variable declaration of type TYPE. DOES push the
489 variable into the current binding. Further, assume that this is called
490 only from gimplification or optimization, at which point the creation of
491 certain types are bugs. */
492
493 tree
494 create_tmp_var (tree type, const char *prefix)
495 {
496 tree tmp_var;
497
498 /* We don't allow types that are addressable (meaning we can't make copies),
499 or incomplete. We also used to reject every variable size objects here,
500 but now support those for which a constant upper bound can be obtained.
501 The processing for variable sizes is performed in gimple_add_tmp_var,
502 point at which it really matters and possibly reached via paths not going
503 through this function, e.g. after direct calls to create_tmp_var_raw. */
504 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
505
506 tmp_var = create_tmp_var_raw (type, prefix);
507 gimple_add_tmp_var (tmp_var);
508 return tmp_var;
509 }
510
511 /* Create a temporary with a name derived from VAL. Subroutine of
512 lookup_tmp_var; nobody else should call this function. */
513
514 static inline tree
515 create_tmp_from_val (tree val)
516 {
517 return create_tmp_var (TREE_TYPE (val), get_name (val));
518 }
519
520 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
522
523 static tree
524 lookup_tmp_var (tree val, bool is_formal)
525 {
526 tree ret;
527
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
534 ret = create_tmp_from_val (val);
535 else
536 {
537 elt_t elt, *elt_p;
538 void **slot;
539
540 elt.val = val;
541 if (gimplify_ctxp->temp_htab == NULL)
542 gimplify_ctxp->temp_htab
543 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
544 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
545 if (*slot == NULL)
546 {
547 elt_p = XNEW (elt_t);
548 elt_p->val = val;
549 elt_p->temp = ret = create_tmp_from_val (val);
550 *slot = (void *) elt_p;
551 }
552 else
553 {
554 elt_p = (elt_t *) *slot;
555 ret = elt_p->temp;
556 }
557 }
558
559 return ret;
560 }
561
562
563 /* Return true if T is a CALL_EXPR or an expression that can be
564 assignmed to a temporary. Note that this predicate should only be
565 used during gimplification. See the rationale for this in
566 gimplify_modify_expr. */
567
568 static bool
569 is_gimple_reg_rhs_or_call (tree t)
570 {
571 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
572 || TREE_CODE (t) == CALL_EXPR);
573 }
574
575 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
576 this predicate should only be used during gimplification. See the
577 rationale for this in gimplify_modify_expr. */
578
579 static bool
580 is_gimple_mem_rhs_or_call (tree t)
581 {
582 /* If we're dealing with a renamable type, either source or dest must be
583 a renamed variable. */
584 if (is_gimple_reg_type (TREE_TYPE (t)))
585 return is_gimple_val (t);
586 else
587 return (is_gimple_val (t) || is_gimple_lvalue (t)
588 || TREE_CODE (t) == CALL_EXPR);
589 }
590
591 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
592
593 static tree
594 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
595 bool is_formal)
596 {
597 tree t, mod;
598
599 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
600 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
601 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
602 fb_rvalue);
603
604 t = lookup_tmp_var (val, is_formal);
605
606 if (is_formal
607 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
608 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
609 DECL_GIMPLE_REG_P (t) = 1;
610
611 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
612
613 if (EXPR_HAS_LOCATION (val))
614 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
615 else
616 SET_EXPR_LOCATION (mod, input_location);
617
618 /* gimplify_modify_expr might want to reduce this further. */
619 gimplify_and_add (mod, pre_p);
620 ggc_free (mod);
621
622 /* If we're gimplifying into ssa, gimplify_modify_expr will have
623 given our temporary an SSA name. Find and return it. */
624 if (gimplify_ctxp->into_ssa)
625 {
626 gimple last = gimple_seq_last_stmt (*pre_p);
627 t = gimple_get_lhs (last);
628 }
629
630 return t;
631 }
632
633 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
634 in gimplify_expr. Only use this function if:
635
636 1) The value of the unfactored expression represented by VAL will not
637 change between the initialization and use of the temporary, and
638 2) The temporary will not be otherwise modified.
639
640 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
641 and #2 means it is inappropriate for && temps.
642
643 For other cases, use get_initialized_tmp_var instead. */
644
645 tree
646 get_formal_tmp_var (tree val, gimple_seq *pre_p)
647 {
648 return internal_get_tmp_var (val, pre_p, NULL, true);
649 }
650
651 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
652 are as in gimplify_expr. */
653
654 tree
655 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
656 {
657 return internal_get_tmp_var (val, pre_p, post_p, false);
658 }
659
660 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
661 true, generate debug info for them; otherwise don't. */
662
663 void
664 declare_vars (tree vars, gimple scope, bool debug_info)
665 {
666 tree last = vars;
667 if (last)
668 {
669 tree temps, block;
670
671 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
672
673 temps = nreverse (last);
674
675 block = gimple_bind_block (scope);
676 gcc_assert (!block || TREE_CODE (block) == BLOCK);
677 if (!block || !debug_info)
678 {
679 TREE_CHAIN (last) = gimple_bind_vars (scope);
680 gimple_bind_set_vars (scope, temps);
681 }
682 else
683 {
684 /* We need to attach the nodes both to the BIND_EXPR and to its
685 associated BLOCK for debugging purposes. The key point here
686 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
687 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
688 if (BLOCK_VARS (block))
689 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
690 else
691 {
692 gimple_bind_set_vars (scope,
693 chainon (gimple_bind_vars (scope), temps));
694 BLOCK_VARS (block) = temps;
695 }
696 }
697 }
698 }
699
700 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
701 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
702 no such upper bound can be obtained. */
703
704 static void
705 force_constant_size (tree var)
706 {
707 /* The only attempt we make is by querying the maximum size of objects
708 of the variable's type. */
709
710 HOST_WIDE_INT max_size;
711
712 gcc_assert (TREE_CODE (var) == VAR_DECL);
713
714 max_size = max_int_size_in_bytes (TREE_TYPE (var));
715
716 gcc_assert (max_size >= 0);
717
718 DECL_SIZE_UNIT (var)
719 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
720 DECL_SIZE (var)
721 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
722 }
723
724 void
725 gimple_add_tmp_var (tree tmp)
726 {
727 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
728
729 /* Later processing assumes that the object size is constant, which might
730 not be true at this point. Force the use of a constant upper bound in
731 this case. */
732 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
733 force_constant_size (tmp);
734
735 DECL_CONTEXT (tmp) = current_function_decl;
736 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
737
738 if (gimplify_ctxp)
739 {
740 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
741 gimplify_ctxp->temps = tmp;
742
743 /* Mark temporaries local within the nearest enclosing parallel. */
744 if (gimplify_omp_ctxp)
745 {
746 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
747 while (ctx && ctx->region_type == ORT_WORKSHARE)
748 ctx = ctx->outer_context;
749 if (ctx)
750 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
751 }
752 }
753 else if (cfun)
754 record_vars (tmp);
755 else
756 {
757 gimple_seq body_seq;
758
759 /* This case is for nested functions. We need to expose the locals
760 they create. */
761 body_seq = gimple_body (current_function_decl);
762 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
763 }
764 }
765
766 /* Determines whether to assign a location to the statement GS. */
767
768 static bool
769 should_carry_location_p (gimple gs)
770 {
771 /* Don't emit a line note for a label. We particularly don't want to
772 emit one for the break label, since it doesn't actually correspond
773 to the beginning of the loop/switch. */
774 if (gimple_code (gs) == GIMPLE_LABEL)
775 return false;
776
777 return true;
778 }
779
780
781 /* Return true if a location should not be emitted for this statement
782 by annotate_one_with_location. */
783
784 static inline bool
785 gimple_do_not_emit_location_p (gimple g)
786 {
787 return gimple_plf (g, GF_PLF_1);
788 }
789
790 /* Mark statement G so a location will not be emitted by
791 annotate_one_with_location. */
792
793 static inline void
794 gimple_set_do_not_emit_location (gimple g)
795 {
796 /* The PLF flags are initialized to 0 when a new tuple is created,
797 so no need to initialize it anywhere. */
798 gimple_set_plf (g, GF_PLF_1, true);
799 }
800
801 /* Set the location for gimple statement GS to LOCATION. */
802
803 static void
804 annotate_one_with_location (gimple gs, location_t location)
805 {
806 if (!gimple_has_location (gs)
807 && !gimple_do_not_emit_location_p (gs)
808 && should_carry_location_p (gs))
809 gimple_set_location (gs, location);
810 }
811
812
813 /* Set LOCATION for all the statements after iterator GSI in sequence
814 SEQ. If GSI is pointing to the end of the sequence, start with the
815 first statement in SEQ. */
816
817 static void
818 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
819 location_t location)
820 {
821 if (gsi_end_p (gsi))
822 gsi = gsi_start (seq);
823 else
824 gsi_next (&gsi);
825
826 for (; !gsi_end_p (gsi); gsi_next (&gsi))
827 annotate_one_with_location (gsi_stmt (gsi), location);
828 }
829
830
831 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
832
833 void
834 annotate_all_with_location (gimple_seq stmt_p, location_t location)
835 {
836 gimple_stmt_iterator i;
837
838 if (gimple_seq_empty_p (stmt_p))
839 return;
840
841 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
842 {
843 gimple gs = gsi_stmt (i);
844 annotate_one_with_location (gs, location);
845 }
846 }
847
848
849 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
850 These nodes model computations that should only be done once. If we
851 were to unshare something like SAVE_EXPR(i++), the gimplification
852 process would create wrong code. */
853
854 static tree
855 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
856 {
857 enum tree_code code = TREE_CODE (*tp);
858 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
859 if (TREE_CODE_CLASS (code) == tcc_type
860 || TREE_CODE_CLASS (code) == tcc_declaration
861 || TREE_CODE_CLASS (code) == tcc_constant
862 || code == SAVE_EXPR || code == TARGET_EXPR
863 /* We can't do anything sensible with a BLOCK used as an expression,
864 but we also can't just die when we see it because of non-expression
865 uses. So just avert our eyes and cross our fingers. Silly Java. */
866 || code == BLOCK)
867 *walk_subtrees = 0;
868 else
869 {
870 gcc_assert (code != BIND_EXPR);
871 copy_tree_r (tp, walk_subtrees, data);
872 }
873
874 return NULL_TREE;
875 }
876
877 /* Callback for walk_tree to unshare most of the shared trees rooted at
878 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
879 then *TP is deep copied by calling copy_tree_r.
880
881 This unshares the same trees as copy_tree_r with the exception of
882 SAVE_EXPR nodes. These nodes model computations that should only be
883 done once. If we were to unshare something like SAVE_EXPR(i++), the
884 gimplification process would create wrong code. */
885
886 static tree
887 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
888 void *data ATTRIBUTE_UNUSED)
889 {
890 tree t = *tp;
891 enum tree_code code = TREE_CODE (t);
892
893 /* Skip types, decls, and constants. But we do want to look at their
894 types and the bounds of types. Mark them as visited so we properly
895 unmark their subtrees on the unmark pass. If we've already seen them,
896 don't look down further. */
897 if (TREE_CODE_CLASS (code) == tcc_type
898 || TREE_CODE_CLASS (code) == tcc_declaration
899 || TREE_CODE_CLASS (code) == tcc_constant)
900 {
901 if (TREE_VISITED (t))
902 *walk_subtrees = 0;
903 else
904 TREE_VISITED (t) = 1;
905 }
906
907 /* If this node has been visited already, unshare it and don't look
908 any deeper. */
909 else if (TREE_VISITED (t))
910 {
911 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
912 *walk_subtrees = 0;
913 }
914
915 /* Otherwise, mark the tree as visited and keep looking. */
916 else
917 TREE_VISITED (t) = 1;
918
919 return NULL_TREE;
920 }
921
922 static tree
923 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
924 void *data ATTRIBUTE_UNUSED)
925 {
926 if (TREE_VISITED (*tp))
927 TREE_VISITED (*tp) = 0;
928 else
929 *walk_subtrees = 0;
930
931 return NULL_TREE;
932 }
933
934 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
935 bodies of any nested functions if we are unsharing the entire body of
936 FNDECL. */
937
938 static void
939 unshare_body (tree *body_p, tree fndecl)
940 {
941 struct cgraph_node *cgn = cgraph_node (fndecl);
942
943 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
944 if (body_p == &DECL_SAVED_TREE (fndecl))
945 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
946 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
947 }
948
949 /* Likewise, but mark all trees as not visited. */
950
951 static void
952 unvisit_body (tree *body_p, tree fndecl)
953 {
954 struct cgraph_node *cgn = cgraph_node (fndecl);
955
956 walk_tree (body_p, unmark_visited_r, NULL, NULL);
957 if (body_p == &DECL_SAVED_TREE (fndecl))
958 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
959 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
960 }
961
962 /* Unconditionally make an unshared copy of EXPR. This is used when using
963 stored expressions which span multiple functions, such as BINFO_VTABLE,
964 as the normal unsharing process can't tell that they're shared. */
965
966 tree
967 unshare_expr (tree expr)
968 {
969 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
970 return expr;
971 }
972 \f
973 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
974 contain statements and have a value. Assign its value to a temporary
975 and give it void_type_node. Returns the temporary, or NULL_TREE if
976 WRAPPER was already void. */
977
978 tree
979 voidify_wrapper_expr (tree wrapper, tree temp)
980 {
981 tree type = TREE_TYPE (wrapper);
982 if (type && !VOID_TYPE_P (type))
983 {
984 tree *p;
985
986 /* Set p to point to the body of the wrapper. Loop until we find
987 something that isn't a wrapper. */
988 for (p = &wrapper; p && *p; )
989 {
990 switch (TREE_CODE (*p))
991 {
992 case BIND_EXPR:
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
995 /* For a BIND_EXPR, the body is operand 1. */
996 p = &BIND_EXPR_BODY (*p);
997 break;
998
999 case CLEANUP_POINT_EXPR:
1000 case TRY_FINALLY_EXPR:
1001 case TRY_CATCH_EXPR:
1002 TREE_SIDE_EFFECTS (*p) = 1;
1003 TREE_TYPE (*p) = void_type_node;
1004 p = &TREE_OPERAND (*p, 0);
1005 break;
1006
1007 case STATEMENT_LIST:
1008 {
1009 tree_stmt_iterator i = tsi_last (*p);
1010 TREE_SIDE_EFFECTS (*p) = 1;
1011 TREE_TYPE (*p) = void_type_node;
1012 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1013 }
1014 break;
1015
1016 case COMPOUND_EXPR:
1017 /* Advance to the last statement. Set all container types to void. */
1018 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1019 {
1020 TREE_SIDE_EFFECTS (*p) = 1;
1021 TREE_TYPE (*p) = void_type_node;
1022 }
1023 break;
1024
1025 default:
1026 goto out;
1027 }
1028 }
1029
1030 out:
1031 if (p == NULL || IS_EMPTY_STMT (*p))
1032 temp = NULL_TREE;
1033 else if (temp)
1034 {
1035 /* The wrapper is on the RHS of an assignment that we're pushing
1036 down. */
1037 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1038 || TREE_CODE (temp) == MODIFY_EXPR);
1039 TREE_OPERAND (temp, 1) = *p;
1040 *p = temp;
1041 }
1042 else
1043 {
1044 temp = create_tmp_var (type, "retval");
1045 *p = build2 (INIT_EXPR, type, temp, *p);
1046 }
1047
1048 return temp;
1049 }
1050
1051 return NULL_TREE;
1052 }
1053
1054 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1055 a temporary through which they communicate. */
1056
1057 static void
1058 build_stack_save_restore (gimple *save, gimple *restore)
1059 {
1060 tree tmp_var;
1061
1062 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1063 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1064 gimple_call_set_lhs (*save, tmp_var);
1065
1066 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1067 1, tmp_var);
1068 }
1069
1070 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1071
1072 static enum gimplify_status
1073 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1074 {
1075 tree bind_expr = *expr_p;
1076 bool old_save_stack = gimplify_ctxp->save_stack;
1077 tree t;
1078 gimple gimple_bind;
1079 gimple_seq body;
1080
1081 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1082
1083 /* Mark variables seen in this bind expr. */
1084 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1085 {
1086 if (TREE_CODE (t) == VAR_DECL)
1087 {
1088 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1089
1090 /* Mark variable as local. */
1091 if (ctx && !is_global_var (t)
1092 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1093 || splay_tree_lookup (ctx->variables,
1094 (splay_tree_key) t) == NULL))
1095 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1096
1097 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1098
1099 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1100 cfun->has_local_explicit_reg_vars = true;
1101 }
1102
1103 /* Preliminarily mark non-addressed complex variables as eligible
1104 for promotion to gimple registers. We'll transform their uses
1105 as we find them.
1106 We exclude complex types if not optimizing because they can be
1107 subject to partial stores in GNU C by means of the __real__ and
1108 __imag__ operators and we cannot promote them to total stores
1109 (see gimplify_modify_expr_complex_part). */
1110 if (optimize
1111 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1112 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1113 && !TREE_THIS_VOLATILE (t)
1114 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1115 && !needs_to_live_in_memory (t))
1116 DECL_GIMPLE_REG_P (t) = 1;
1117 }
1118
1119 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1120 BIND_EXPR_BLOCK (bind_expr));
1121 gimple_push_bind_expr (gimple_bind);
1122
1123 gimplify_ctxp->save_stack = false;
1124
1125 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1126 body = NULL;
1127 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1128 gimple_bind_set_body (gimple_bind, body);
1129
1130 if (gimplify_ctxp->save_stack)
1131 {
1132 gimple stack_save, stack_restore, gs;
1133 gimple_seq cleanup, new_body;
1134
1135 /* Save stack on entry and restore it on exit. Add a try_finally
1136 block to achieve this. Note that mudflap depends on the
1137 format of the emitted code: see mx_register_decls(). */
1138 build_stack_save_restore (&stack_save, &stack_restore);
1139
1140 cleanup = new_body = NULL;
1141 gimplify_seq_add_stmt (&cleanup, stack_restore);
1142 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1143 GIMPLE_TRY_FINALLY);
1144
1145 gimplify_seq_add_stmt (&new_body, stack_save);
1146 gimplify_seq_add_stmt (&new_body, gs);
1147 gimple_bind_set_body (gimple_bind, new_body);
1148 }
1149
1150 gimplify_ctxp->save_stack = old_save_stack;
1151 gimple_pop_bind_expr ();
1152
1153 gimplify_seq_add_stmt (pre_p, gimple_bind);
1154
1155 if (temp)
1156 {
1157 *expr_p = temp;
1158 return GS_OK;
1159 }
1160
1161 *expr_p = NULL_TREE;
1162 return GS_ALL_DONE;
1163 }
1164
1165 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1166 GIMPLE value, it is assigned to a new temporary and the statement is
1167 re-written to return the temporary.
1168
1169 PRE_P points to the sequence where side effects that must happen before
1170 STMT should be stored. */
1171
1172 static enum gimplify_status
1173 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1174 {
1175 gimple ret;
1176 tree ret_expr = TREE_OPERAND (stmt, 0);
1177 tree result_decl, result;
1178
1179 if (ret_expr == error_mark_node)
1180 return GS_ERROR;
1181
1182 if (!ret_expr
1183 || TREE_CODE (ret_expr) == RESULT_DECL
1184 || ret_expr == error_mark_node)
1185 {
1186 gimple ret = gimple_build_return (ret_expr);
1187 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1188 gimplify_seq_add_stmt (pre_p, ret);
1189 return GS_ALL_DONE;
1190 }
1191
1192 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1193 result_decl = NULL_TREE;
1194 else
1195 {
1196 result_decl = TREE_OPERAND (ret_expr, 0);
1197
1198 /* See through a return by reference. */
1199 if (TREE_CODE (result_decl) == INDIRECT_REF)
1200 result_decl = TREE_OPERAND (result_decl, 0);
1201
1202 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1203 || TREE_CODE (ret_expr) == INIT_EXPR)
1204 && TREE_CODE (result_decl) == RESULT_DECL);
1205 }
1206
1207 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1208 Recall that aggregate_value_p is FALSE for any aggregate type that is
1209 returned in registers. If we're returning values in registers, then
1210 we don't want to extend the lifetime of the RESULT_DECL, particularly
1211 across another call. In addition, for those aggregates for which
1212 hard_function_value generates a PARALLEL, we'll die during normal
1213 expansion of structure assignments; there's special code in expand_return
1214 to handle this case that does not exist in expand_expr. */
1215 if (!result_decl
1216 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1217 result = result_decl;
1218 else if (gimplify_ctxp->return_temp)
1219 result = gimplify_ctxp->return_temp;
1220 else
1221 {
1222 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1223 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1224 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1225 DECL_GIMPLE_REG_P (result) = 1;
1226
1227 /* ??? With complex control flow (usually involving abnormal edges),
1228 we can wind up warning about an uninitialized value for this. Due
1229 to how this variable is constructed and initialized, this is never
1230 true. Give up and never warn. */
1231 TREE_NO_WARNING (result) = 1;
1232
1233 gimplify_ctxp->return_temp = result;
1234 }
1235
1236 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1237 Then gimplify the whole thing. */
1238 if (result != result_decl)
1239 TREE_OPERAND (ret_expr, 0) = result;
1240
1241 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1242
1243 ret = gimple_build_return (result);
1244 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1245 gimplify_seq_add_stmt (pre_p, ret);
1246
1247 return GS_ALL_DONE;
1248 }
1249
1250 static void
1251 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1252 {
1253 /* This is a variable-sized decl. Simplify its size and mark it
1254 for deferred expansion. Note that mudflap depends on the format
1255 of the emitted code: see mx_register_decls(). */
1256 tree t, addr, ptr_type;
1257
1258 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1259 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1260
1261 /* All occurrences of this decl in final gimplified code will be
1262 replaced by indirection. Setting DECL_VALUE_EXPR does two
1263 things: First, it lets the rest of the gimplifier know what
1264 replacement to use. Second, it lets the debug info know
1265 where to find the value. */
1266 ptr_type = build_pointer_type (TREE_TYPE (decl));
1267 addr = create_tmp_var (ptr_type, get_name (decl));
1268 DECL_IGNORED_P (addr) = 0;
1269 t = build_fold_indirect_ref (addr);
1270 SET_DECL_VALUE_EXPR (decl, t);
1271 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1272
1273 t = built_in_decls[BUILT_IN_ALLOCA];
1274 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1275 t = fold_convert (ptr_type, t);
1276 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1277
1278 gimplify_and_add (t, seq_p);
1279
1280 /* Indicate that we need to restore the stack level when the
1281 enclosing BIND_EXPR is exited. */
1282 gimplify_ctxp->save_stack = true;
1283 }
1284
1285
1286 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1287 and initialization explicit. */
1288
1289 static enum gimplify_status
1290 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1291 {
1292 tree stmt = *stmt_p;
1293 tree decl = DECL_EXPR_DECL (stmt);
1294
1295 *stmt_p = NULL_TREE;
1296
1297 if (TREE_TYPE (decl) == error_mark_node)
1298 return GS_ERROR;
1299
1300 if ((TREE_CODE (decl) == TYPE_DECL
1301 || TREE_CODE (decl) == VAR_DECL)
1302 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1303 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1304
1305 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1306 {
1307 tree init = DECL_INITIAL (decl);
1308
1309 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1310 || (!TREE_STATIC (decl)
1311 && flag_stack_check == GENERIC_STACK_CHECK
1312 && compare_tree_int (DECL_SIZE_UNIT (decl),
1313 STACK_CHECK_MAX_VAR_SIZE) > 0))
1314 gimplify_vla_decl (decl, seq_p);
1315
1316 if (init && init != error_mark_node)
1317 {
1318 if (!TREE_STATIC (decl))
1319 {
1320 DECL_INITIAL (decl) = NULL_TREE;
1321 init = build2 (INIT_EXPR, void_type_node, decl, init);
1322 gimplify_and_add (init, seq_p);
1323 ggc_free (init);
1324 }
1325 else
1326 /* We must still examine initializers for static variables
1327 as they may contain a label address. */
1328 walk_tree (&init, force_labels_r, NULL, NULL);
1329 }
1330
1331 /* Some front ends do not explicitly declare all anonymous
1332 artificial variables. We compensate here by declaring the
1333 variables, though it would be better if the front ends would
1334 explicitly declare them. */
1335 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1336 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1337 gimple_add_tmp_var (decl);
1338 }
1339
1340 return GS_ALL_DONE;
1341 }
1342
1343 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1344 and replacing the LOOP_EXPR with goto, but if the loop contains an
1345 EXIT_EXPR, we need to append a label for it to jump to. */
1346
1347 static enum gimplify_status
1348 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1349 {
1350 tree saved_label = gimplify_ctxp->exit_label;
1351 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1352
1353 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1354
1355 gimplify_ctxp->exit_label = NULL_TREE;
1356
1357 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1358
1359 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1360
1361 if (gimplify_ctxp->exit_label)
1362 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1363
1364 gimplify_ctxp->exit_label = saved_label;
1365
1366 *expr_p = NULL;
1367 return GS_ALL_DONE;
1368 }
1369
1370 /* Gimplifies a statement list onto a sequence. These may be created either
1371 by an enlightened front-end, or by shortcut_cond_expr. */
1372
1373 static enum gimplify_status
1374 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1375 {
1376 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1377
1378 tree_stmt_iterator i = tsi_start (*expr_p);
1379
1380 while (!tsi_end_p (i))
1381 {
1382 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1383 tsi_delink (&i);
1384 }
1385
1386 if (temp)
1387 {
1388 *expr_p = temp;
1389 return GS_OK;
1390 }
1391
1392 return GS_ALL_DONE;
1393 }
1394
1395 /* Compare two case labels. Because the front end should already have
1396 made sure that case ranges do not overlap, it is enough to only compare
1397 the CASE_LOW values of each case label. */
1398
1399 static int
1400 compare_case_labels (const void *p1, const void *p2)
1401 {
1402 const_tree const case1 = *(const_tree const*)p1;
1403 const_tree const case2 = *(const_tree const*)p2;
1404
1405 /* The 'default' case label always goes first. */
1406 if (!CASE_LOW (case1))
1407 return -1;
1408 else if (!CASE_LOW (case2))
1409 return 1;
1410 else
1411 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1412 }
1413
1414
1415 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1416
1417 void
1418 sort_case_labels (VEC(tree,heap)* label_vec)
1419 {
1420 size_t len = VEC_length (tree, label_vec);
1421 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1422 compare_case_labels);
1423 }
1424
1425
1426 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1427 branch to. */
1428
1429 static enum gimplify_status
1430 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1431 {
1432 tree switch_expr = *expr_p;
1433 gimple_seq switch_body_seq = NULL;
1434 enum gimplify_status ret;
1435
1436 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1437 fb_rvalue);
1438 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1439 return ret;
1440
1441 if (SWITCH_BODY (switch_expr))
1442 {
1443 VEC (tree,heap) *labels;
1444 VEC (tree,heap) *saved_labels;
1445 tree default_case = NULL_TREE;
1446 size_t i, len;
1447 gimple gimple_switch;
1448
1449 /* If someone can be bothered to fill in the labels, they can
1450 be bothered to null out the body too. */
1451 gcc_assert (!SWITCH_LABELS (switch_expr));
1452
1453 /* save old labels, get new ones from body, then restore the old
1454 labels. Save all the things from the switch body to append after. */
1455 saved_labels = gimplify_ctxp->case_labels;
1456 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1457
1458 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1459 labels = gimplify_ctxp->case_labels;
1460 gimplify_ctxp->case_labels = saved_labels;
1461
1462 i = 0;
1463 while (i < VEC_length (tree, labels))
1464 {
1465 tree elt = VEC_index (tree, labels, i);
1466 tree low = CASE_LOW (elt);
1467 bool remove_element = FALSE;
1468
1469 if (low)
1470 {
1471 /* Discard empty ranges. */
1472 tree high = CASE_HIGH (elt);
1473 if (high && tree_int_cst_lt (high, low))
1474 remove_element = TRUE;
1475 }
1476 else
1477 {
1478 /* The default case must be the last label in the list. */
1479 gcc_assert (!default_case);
1480 default_case = elt;
1481 remove_element = TRUE;
1482 }
1483
1484 if (remove_element)
1485 VEC_ordered_remove (tree, labels, i);
1486 else
1487 i++;
1488 }
1489 len = i;
1490
1491 if (!VEC_empty (tree, labels))
1492 sort_case_labels (labels);
1493
1494 if (!default_case)
1495 {
1496 tree type = TREE_TYPE (switch_expr);
1497
1498 /* If the switch has no default label, add one, so that we jump
1499 around the switch body. If the labels already cover the whole
1500 range of type, add the default label pointing to one of the
1501 existing labels. */
1502 if (type == void_type_node)
1503 type = TREE_TYPE (SWITCH_COND (switch_expr));
1504 if (len
1505 && INTEGRAL_TYPE_P (type)
1506 && TYPE_MIN_VALUE (type)
1507 && TYPE_MAX_VALUE (type)
1508 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1509 TYPE_MIN_VALUE (type)))
1510 {
1511 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1512 if (!high)
1513 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1514 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1515 {
1516 for (i = 1; i < len; i++)
1517 {
1518 high = CASE_LOW (VEC_index (tree, labels, i));
1519 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1520 if (!low)
1521 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1522 if ((TREE_INT_CST_LOW (low) + 1
1523 != TREE_INT_CST_LOW (high))
1524 || (TREE_INT_CST_HIGH (low)
1525 + (TREE_INT_CST_LOW (high) == 0)
1526 != TREE_INT_CST_HIGH (high)))
1527 break;
1528 }
1529 if (i == len)
1530 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1531 NULL_TREE, NULL_TREE,
1532 CASE_LABEL (VEC_index (tree,
1533 labels, 0)));
1534 }
1535 }
1536
1537 if (!default_case)
1538 {
1539 gimple new_default;
1540
1541 default_case
1542 = build3 (CASE_LABEL_EXPR, void_type_node,
1543 NULL_TREE, NULL_TREE,
1544 create_artificial_label (UNKNOWN_LOCATION));
1545 new_default = gimple_build_label (CASE_LABEL (default_case));
1546 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1547 }
1548 }
1549
1550 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1551 default_case, labels);
1552 gimplify_seq_add_stmt (pre_p, gimple_switch);
1553 gimplify_seq_add_seq (pre_p, switch_body_seq);
1554 VEC_free(tree, heap, labels);
1555 }
1556 else
1557 gcc_assert (SWITCH_LABELS (switch_expr));
1558
1559 return GS_ALL_DONE;
1560 }
1561
1562
1563 static enum gimplify_status
1564 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1565 {
1566 struct gimplify_ctx *ctxp;
1567 gimple gimple_label;
1568
1569 /* Invalid OpenMP programs can play Duff's Device type games with
1570 #pragma omp parallel. At least in the C front end, we don't
1571 detect such invalid branches until after gimplification. */
1572 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1573 if (ctxp->case_labels)
1574 break;
1575
1576 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1577 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1578 gimplify_seq_add_stmt (pre_p, gimple_label);
1579
1580 return GS_ALL_DONE;
1581 }
1582
1583 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1584 if necessary. */
1585
1586 tree
1587 build_and_jump (tree *label_p)
1588 {
1589 if (label_p == NULL)
1590 /* If there's nowhere to jump, just fall through. */
1591 return NULL_TREE;
1592
1593 if (*label_p == NULL_TREE)
1594 {
1595 tree label = create_artificial_label (UNKNOWN_LOCATION);
1596 *label_p = label;
1597 }
1598
1599 return build1 (GOTO_EXPR, void_type_node, *label_p);
1600 }
1601
1602 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1603 This also involves building a label to jump to and communicating it to
1604 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1605
1606 static enum gimplify_status
1607 gimplify_exit_expr (tree *expr_p)
1608 {
1609 tree cond = TREE_OPERAND (*expr_p, 0);
1610 tree expr;
1611
1612 expr = build_and_jump (&gimplify_ctxp->exit_label);
1613 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1614 *expr_p = expr;
1615
1616 return GS_OK;
1617 }
1618
1619 /* A helper function to be called via walk_tree. Mark all labels under *TP
1620 as being forced. To be called for DECL_INITIAL of static variables. */
1621
1622 tree
1623 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1624 {
1625 if (TYPE_P (*tp))
1626 *walk_subtrees = 0;
1627 if (TREE_CODE (*tp) == LABEL_DECL)
1628 FORCED_LABEL (*tp) = 1;
1629
1630 return NULL_TREE;
1631 }
1632
1633 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1634 different from its canonical type, wrap the whole thing inside a
1635 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1636 type.
1637
1638 The canonical type of a COMPONENT_REF is the type of the field being
1639 referenced--unless the field is a bit-field which can be read directly
1640 in a smaller mode, in which case the canonical type is the
1641 sign-appropriate type corresponding to that mode. */
1642
1643 static void
1644 canonicalize_component_ref (tree *expr_p)
1645 {
1646 tree expr = *expr_p;
1647 tree type;
1648
1649 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1650
1651 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1652 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1653 else
1654 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1655
1656 /* One could argue that all the stuff below is not necessary for
1657 the non-bitfield case and declare it a FE error if type
1658 adjustment would be needed. */
1659 if (TREE_TYPE (expr) != type)
1660 {
1661 #ifdef ENABLE_TYPES_CHECKING
1662 tree old_type = TREE_TYPE (expr);
1663 #endif
1664 int type_quals;
1665
1666 /* We need to preserve qualifiers and propagate them from
1667 operand 0. */
1668 type_quals = TYPE_QUALS (type)
1669 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1670 if (TYPE_QUALS (type) != type_quals)
1671 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1672
1673 /* Set the type of the COMPONENT_REF to the underlying type. */
1674 TREE_TYPE (expr) = type;
1675
1676 #ifdef ENABLE_TYPES_CHECKING
1677 /* It is now a FE error, if the conversion from the canonical
1678 type to the original expression type is not useless. */
1679 gcc_assert (useless_type_conversion_p (old_type, type));
1680 #endif
1681 }
1682 }
1683
1684 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1685 to foo, embed that change in the ADDR_EXPR by converting
1686 T array[U];
1687 (T *)&array
1688 ==>
1689 &array[L]
1690 where L is the lower bound. For simplicity, only do this for constant
1691 lower bound.
1692 The constraint is that the type of &array[L] is trivially convertible
1693 to T *. */
1694
1695 static void
1696 canonicalize_addr_expr (tree *expr_p)
1697 {
1698 tree expr = *expr_p;
1699 tree addr_expr = TREE_OPERAND (expr, 0);
1700 tree datype, ddatype, pddatype;
1701
1702 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1703 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1704 || TREE_CODE (addr_expr) != ADDR_EXPR)
1705 return;
1706
1707 /* The addr_expr type should be a pointer to an array. */
1708 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1709 if (TREE_CODE (datype) != ARRAY_TYPE)
1710 return;
1711
1712 /* The pointer to element type shall be trivially convertible to
1713 the expression pointer type. */
1714 ddatype = TREE_TYPE (datype);
1715 pddatype = build_pointer_type (ddatype);
1716 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1717 pddatype))
1718 return;
1719
1720 /* The lower bound and element sizes must be constant. */
1721 if (!TYPE_SIZE_UNIT (ddatype)
1722 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1723 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1724 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1725 return;
1726
1727 /* All checks succeeded. Build a new node to merge the cast. */
1728 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1729 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1730 NULL_TREE, NULL_TREE);
1731 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1732
1733 /* We can have stripped a required restrict qualifier above. */
1734 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1735 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1736 }
1737
1738 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1739 underneath as appropriate. */
1740
1741 static enum gimplify_status
1742 gimplify_conversion (tree *expr_p)
1743 {
1744 tree tem;
1745 location_t loc = EXPR_LOCATION (*expr_p);
1746 gcc_assert (CONVERT_EXPR_P (*expr_p));
1747
1748 /* Then strip away all but the outermost conversion. */
1749 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1750
1751 /* And remove the outermost conversion if it's useless. */
1752 if (tree_ssa_useless_type_conversion (*expr_p))
1753 *expr_p = TREE_OPERAND (*expr_p, 0);
1754
1755 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1756 For example this fold (subclass *)&A into &A->subclass avoiding
1757 a need for statement. */
1758 if (CONVERT_EXPR_P (*expr_p)
1759 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1760 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1761 && (tem = maybe_fold_offset_to_address
1762 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1763 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1764 *expr_p = tem;
1765
1766 /* If we still have a conversion at the toplevel,
1767 then canonicalize some constructs. */
1768 if (CONVERT_EXPR_P (*expr_p))
1769 {
1770 tree sub = TREE_OPERAND (*expr_p, 0);
1771
1772 /* If a NOP conversion is changing the type of a COMPONENT_REF
1773 expression, then canonicalize its type now in order to expose more
1774 redundant conversions. */
1775 if (TREE_CODE (sub) == COMPONENT_REF)
1776 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1777
1778 /* If a NOP conversion is changing a pointer to array of foo
1779 to a pointer to foo, embed that change in the ADDR_EXPR. */
1780 else if (TREE_CODE (sub) == ADDR_EXPR)
1781 canonicalize_addr_expr (expr_p);
1782 }
1783
1784 /* If we have a conversion to a non-register type force the
1785 use of a VIEW_CONVERT_EXPR instead. */
1786 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1787 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1788 TREE_OPERAND (*expr_p, 0));
1789
1790 return GS_OK;
1791 }
1792
1793 /* Nonlocal VLAs seen in the current function. */
1794 static struct pointer_set_t *nonlocal_vlas;
1795
1796 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1797 DECL_VALUE_EXPR, and it's worth re-examining things. */
1798
1799 static enum gimplify_status
1800 gimplify_var_or_parm_decl (tree *expr_p)
1801 {
1802 tree decl = *expr_p;
1803
1804 /* ??? If this is a local variable, and it has not been seen in any
1805 outer BIND_EXPR, then it's probably the result of a duplicate
1806 declaration, for which we've already issued an error. It would
1807 be really nice if the front end wouldn't leak these at all.
1808 Currently the only known culprit is C++ destructors, as seen
1809 in g++.old-deja/g++.jason/binding.C. */
1810 if (TREE_CODE (decl) == VAR_DECL
1811 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1812 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1813 && decl_function_context (decl) == current_function_decl)
1814 {
1815 gcc_assert (errorcount || sorrycount);
1816 return GS_ERROR;
1817 }
1818
1819 /* When within an OpenMP context, notice uses of variables. */
1820 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1821 return GS_ALL_DONE;
1822
1823 /* If the decl is an alias for another expression, substitute it now. */
1824 if (DECL_HAS_VALUE_EXPR_P (decl))
1825 {
1826 tree value_expr = DECL_VALUE_EXPR (decl);
1827
1828 /* For referenced nonlocal VLAs add a decl for debugging purposes
1829 to the current function. */
1830 if (TREE_CODE (decl) == VAR_DECL
1831 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1832 && nonlocal_vlas != NULL
1833 && TREE_CODE (value_expr) == INDIRECT_REF
1834 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1835 && decl_function_context (decl) != current_function_decl)
1836 {
1837 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1838 while (ctx && ctx->region_type == ORT_WORKSHARE)
1839 ctx = ctx->outer_context;
1840 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1841 {
1842 tree copy = copy_node (decl), block;
1843
1844 lang_hooks.dup_lang_specific_decl (copy);
1845 SET_DECL_RTL (copy, NULL_RTX);
1846 TREE_USED (copy) = 1;
1847 block = DECL_INITIAL (current_function_decl);
1848 TREE_CHAIN (copy) = BLOCK_VARS (block);
1849 BLOCK_VARS (block) = copy;
1850 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1851 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1852 }
1853 }
1854
1855 *expr_p = unshare_expr (value_expr);
1856 return GS_OK;
1857 }
1858
1859 return GS_ALL_DONE;
1860 }
1861
1862
1863 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1864 node *EXPR_P.
1865
1866 compound_lval
1867 : min_lval '[' val ']'
1868 | min_lval '.' ID
1869 | compound_lval '[' val ']'
1870 | compound_lval '.' ID
1871
1872 This is not part of the original SIMPLE definition, which separates
1873 array and member references, but it seems reasonable to handle them
1874 together. Also, this way we don't run into problems with union
1875 aliasing; gcc requires that for accesses through a union to alias, the
1876 union reference must be explicit, which was not always the case when we
1877 were splitting up array and member refs.
1878
1879 PRE_P points to the sequence where side effects that must happen before
1880 *EXPR_P should be stored.
1881
1882 POST_P points to the sequence where side effects that must happen after
1883 *EXPR_P should be stored. */
1884
1885 static enum gimplify_status
1886 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1887 fallback_t fallback)
1888 {
1889 tree *p;
1890 VEC(tree,heap) *stack;
1891 enum gimplify_status ret = GS_OK, tret;
1892 int i;
1893 location_t loc = EXPR_LOCATION (*expr_p);
1894
1895 /* Create a stack of the subexpressions so later we can walk them in
1896 order from inner to outer. */
1897 stack = VEC_alloc (tree, heap, 10);
1898
1899 /* We can handle anything that get_inner_reference can deal with. */
1900 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1901 {
1902 restart:
1903 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1904 if (TREE_CODE (*p) == INDIRECT_REF)
1905 *p = fold_indirect_ref_loc (loc, *p);
1906
1907 if (handled_component_p (*p))
1908 ;
1909 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1910 additional COMPONENT_REFs. */
1911 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1912 && gimplify_var_or_parm_decl (p) == GS_OK)
1913 goto restart;
1914 else
1915 break;
1916
1917 VEC_safe_push (tree, heap, stack, *p);
1918 }
1919
1920 gcc_assert (VEC_length (tree, stack));
1921
1922 /* Now STACK is a stack of pointers to all the refs we've walked through
1923 and P points to the innermost expression.
1924
1925 Java requires that we elaborated nodes in source order. That
1926 means we must gimplify the inner expression followed by each of
1927 the indices, in order. But we can't gimplify the inner
1928 expression until we deal with any variable bounds, sizes, or
1929 positions in order to deal with PLACEHOLDER_EXPRs.
1930
1931 So we do this in three steps. First we deal with the annotations
1932 for any variables in the components, then we gimplify the base,
1933 then we gimplify any indices, from left to right. */
1934 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1935 {
1936 tree t = VEC_index (tree, stack, i);
1937
1938 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1939 {
1940 /* Gimplify the low bound and element type size and put them into
1941 the ARRAY_REF. If these values are set, they have already been
1942 gimplified. */
1943 if (TREE_OPERAND (t, 2) == NULL_TREE)
1944 {
1945 tree low = unshare_expr (array_ref_low_bound (t));
1946 if (!is_gimple_min_invariant (low))
1947 {
1948 TREE_OPERAND (t, 2) = low;
1949 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1950 post_p, is_gimple_reg,
1951 fb_rvalue);
1952 ret = MIN (ret, tret);
1953 }
1954 }
1955
1956 if (!TREE_OPERAND (t, 3))
1957 {
1958 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1959 tree elmt_size = unshare_expr (array_ref_element_size (t));
1960 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1961
1962 /* Divide the element size by the alignment of the element
1963 type (above). */
1964 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1965
1966 if (!is_gimple_min_invariant (elmt_size))
1967 {
1968 TREE_OPERAND (t, 3) = elmt_size;
1969 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1970 post_p, is_gimple_reg,
1971 fb_rvalue);
1972 ret = MIN (ret, tret);
1973 }
1974 }
1975 }
1976 else if (TREE_CODE (t) == COMPONENT_REF)
1977 {
1978 /* Set the field offset into T and gimplify it. */
1979 if (!TREE_OPERAND (t, 2))
1980 {
1981 tree offset = unshare_expr (component_ref_field_offset (t));
1982 tree field = TREE_OPERAND (t, 1);
1983 tree factor
1984 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1985
1986 /* Divide the offset by its alignment. */
1987 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1988
1989 if (!is_gimple_min_invariant (offset))
1990 {
1991 TREE_OPERAND (t, 2) = offset;
1992 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1993 post_p, is_gimple_reg,
1994 fb_rvalue);
1995 ret = MIN (ret, tret);
1996 }
1997 }
1998 }
1999 }
2000
2001 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2002 so as to match the min_lval predicate. Failure to do so may result
2003 in the creation of large aggregate temporaries. */
2004 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2005 fallback | fb_lvalue);
2006 ret = MIN (ret, tret);
2007
2008 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2009 loop we also remove any useless conversions. */
2010 for (; VEC_length (tree, stack) > 0; )
2011 {
2012 tree t = VEC_pop (tree, stack);
2013
2014 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2015 {
2016 /* Gimplify the dimension. */
2017 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2018 {
2019 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2020 is_gimple_val, fb_rvalue);
2021 ret = MIN (ret, tret);
2022 }
2023 }
2024 else if (TREE_CODE (t) == BIT_FIELD_REF)
2025 {
2026 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2027 is_gimple_val, fb_rvalue);
2028 ret = MIN (ret, tret);
2029 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2030 is_gimple_val, fb_rvalue);
2031 ret = MIN (ret, tret);
2032 }
2033
2034 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2035
2036 /* The innermost expression P may have originally had
2037 TREE_SIDE_EFFECTS set which would have caused all the outer
2038 expressions in *EXPR_P leading to P to also have had
2039 TREE_SIDE_EFFECTS set. */
2040 recalculate_side_effects (t);
2041 }
2042
2043 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2044 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2045 {
2046 canonicalize_component_ref (expr_p);
2047 ret = MIN (ret, GS_OK);
2048 }
2049
2050 VEC_free (tree, heap, stack);
2051
2052 return ret;
2053 }
2054
2055 /* Gimplify the self modifying expression pointed to by EXPR_P
2056 (++, --, +=, -=).
2057
2058 PRE_P points to the list where side effects that must happen before
2059 *EXPR_P should be stored.
2060
2061 POST_P points to the list where side effects that must happen after
2062 *EXPR_P should be stored.
2063
2064 WANT_VALUE is nonzero iff we want to use the value of this expression
2065 in another expression. */
2066
2067 static enum gimplify_status
2068 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2069 bool want_value)
2070 {
2071 enum tree_code code;
2072 tree lhs, lvalue, rhs, t1;
2073 gimple_seq post = NULL, *orig_post_p = post_p;
2074 bool postfix;
2075 enum tree_code arith_code;
2076 enum gimplify_status ret;
2077 location_t loc = EXPR_LOCATION (*expr_p);
2078
2079 code = TREE_CODE (*expr_p);
2080
2081 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2082 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2083
2084 /* Prefix or postfix? */
2085 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2086 /* Faster to treat as prefix if result is not used. */
2087 postfix = want_value;
2088 else
2089 postfix = false;
2090
2091 /* For postfix, make sure the inner expression's post side effects
2092 are executed after side effects from this expression. */
2093 if (postfix)
2094 post_p = &post;
2095
2096 /* Add or subtract? */
2097 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2098 arith_code = PLUS_EXPR;
2099 else
2100 arith_code = MINUS_EXPR;
2101
2102 /* Gimplify the LHS into a GIMPLE lvalue. */
2103 lvalue = TREE_OPERAND (*expr_p, 0);
2104 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2105 if (ret == GS_ERROR)
2106 return ret;
2107
2108 /* Extract the operands to the arithmetic operation. */
2109 lhs = lvalue;
2110 rhs = TREE_OPERAND (*expr_p, 1);
2111
2112 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2113 that as the result value and in the postqueue operation. We also
2114 make sure to make lvalue a minimal lval, see
2115 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2116 if (postfix)
2117 {
2118 if (!is_gimple_min_lval (lvalue))
2119 {
2120 mark_addressable (lvalue);
2121 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2122 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2123 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2124 }
2125 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2126 if (ret == GS_ERROR)
2127 return ret;
2128 }
2129
2130 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2131 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2132 {
2133 rhs = fold_convert_loc (loc, sizetype, rhs);
2134 if (arith_code == MINUS_EXPR)
2135 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2136 arith_code = POINTER_PLUS_EXPR;
2137 }
2138
2139 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2140
2141 if (postfix)
2142 {
2143 gimplify_assign (lvalue, t1, orig_post_p);
2144 gimplify_seq_add_seq (orig_post_p, post);
2145 *expr_p = lhs;
2146 return GS_ALL_DONE;
2147 }
2148 else
2149 {
2150 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2151 return GS_OK;
2152 }
2153 }
2154
2155
2156 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2157
2158 static void
2159 maybe_with_size_expr (tree *expr_p)
2160 {
2161 tree expr = *expr_p;
2162 tree type = TREE_TYPE (expr);
2163 tree size;
2164
2165 /* If we've already wrapped this or the type is error_mark_node, we can't do
2166 anything. */
2167 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2168 || type == error_mark_node)
2169 return;
2170
2171 /* If the size isn't known or is a constant, we have nothing to do. */
2172 size = TYPE_SIZE_UNIT (type);
2173 if (!size || TREE_CODE (size) == INTEGER_CST)
2174 return;
2175
2176 /* Otherwise, make a WITH_SIZE_EXPR. */
2177 size = unshare_expr (size);
2178 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2179 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2180 }
2181
2182
2183 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2184 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2185 the CALL_EXPR. */
2186
2187 static enum gimplify_status
2188 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2189 {
2190 bool (*test) (tree);
2191 fallback_t fb;
2192
2193 /* In general, we allow lvalues for function arguments to avoid
2194 extra overhead of copying large aggregates out of even larger
2195 aggregates into temporaries only to copy the temporaries to
2196 the argument list. Make optimizers happy by pulling out to
2197 temporaries those types that fit in registers. */
2198 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2199 test = is_gimple_val, fb = fb_rvalue;
2200 else
2201 test = is_gimple_lvalue, fb = fb_either;
2202
2203 /* If this is a variable sized type, we must remember the size. */
2204 maybe_with_size_expr (arg_p);
2205
2206 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2207 /* Make sure arguments have the same location as the function call
2208 itself. */
2209 protected_set_expr_location (*arg_p, call_location);
2210
2211 /* There is a sequence point before a function call. Side effects in
2212 the argument list must occur before the actual call. So, when
2213 gimplifying arguments, force gimplify_expr to use an internal
2214 post queue which is then appended to the end of PRE_P. */
2215 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2216 }
2217
2218
2219 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2220 WANT_VALUE is true if the result of the call is desired. */
2221
2222 static enum gimplify_status
2223 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2224 {
2225 tree fndecl, parms, p;
2226 enum gimplify_status ret;
2227 int i, nargs;
2228 gimple call;
2229 bool builtin_va_start_p = FALSE;
2230 location_t loc = EXPR_LOCATION (*expr_p);
2231
2232 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2233
2234 /* For reliable diagnostics during inlining, it is necessary that
2235 every call_expr be annotated with file and line. */
2236 if (! EXPR_HAS_LOCATION (*expr_p))
2237 SET_EXPR_LOCATION (*expr_p, input_location);
2238
2239 /* This may be a call to a builtin function.
2240
2241 Builtin function calls may be transformed into different
2242 (and more efficient) builtin function calls under certain
2243 circumstances. Unfortunately, gimplification can muck things
2244 up enough that the builtin expanders are not aware that certain
2245 transformations are still valid.
2246
2247 So we attempt transformation/gimplification of the call before
2248 we gimplify the CALL_EXPR. At this time we do not manage to
2249 transform all calls in the same manner as the expanders do, but
2250 we do transform most of them. */
2251 fndecl = get_callee_fndecl (*expr_p);
2252 if (fndecl && DECL_BUILT_IN (fndecl))
2253 {
2254 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2255
2256 if (new_tree && new_tree != *expr_p)
2257 {
2258 /* There was a transformation of this call which computes the
2259 same value, but in a more efficient way. Return and try
2260 again. */
2261 *expr_p = new_tree;
2262 return GS_OK;
2263 }
2264
2265 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2266 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2267 {
2268 builtin_va_start_p = TRUE;
2269 if (call_expr_nargs (*expr_p) < 2)
2270 {
2271 error ("too few arguments to function %<va_start%>");
2272 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2273 return GS_OK;
2274 }
2275
2276 if (fold_builtin_next_arg (*expr_p, true))
2277 {
2278 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2279 return GS_OK;
2280 }
2281 }
2282 }
2283
2284 /* There is a sequence point before the call, so any side effects in
2285 the calling expression must occur before the actual call. Force
2286 gimplify_expr to use an internal post queue. */
2287 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2288 is_gimple_call_addr, fb_rvalue);
2289
2290 nargs = call_expr_nargs (*expr_p);
2291
2292 /* Get argument types for verification. */
2293 fndecl = get_callee_fndecl (*expr_p);
2294 parms = NULL_TREE;
2295 if (fndecl)
2296 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2297 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2298 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2299
2300 if (fndecl && DECL_ARGUMENTS (fndecl))
2301 p = DECL_ARGUMENTS (fndecl);
2302 else if (parms)
2303 p = parms;
2304 else
2305 p = NULL_TREE;
2306 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2307 ;
2308
2309 /* If the last argument is __builtin_va_arg_pack () and it is not
2310 passed as a named argument, decrease the number of CALL_EXPR
2311 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2312 if (!p
2313 && i < nargs
2314 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2315 {
2316 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2317 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2318
2319 if (last_arg_fndecl
2320 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2321 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2322 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2323 {
2324 tree call = *expr_p;
2325
2326 --nargs;
2327 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2328 CALL_EXPR_FN (call),
2329 nargs, CALL_EXPR_ARGP (call));
2330
2331 /* Copy all CALL_EXPR flags, location and block, except
2332 CALL_EXPR_VA_ARG_PACK flag. */
2333 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2334 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2335 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2336 = CALL_EXPR_RETURN_SLOT_OPT (call);
2337 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2338 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2339 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2340 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2341
2342 /* Set CALL_EXPR_VA_ARG_PACK. */
2343 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2344 }
2345 }
2346
2347 /* Finally, gimplify the function arguments. */
2348 if (nargs > 0)
2349 {
2350 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2351 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2352 PUSH_ARGS_REVERSED ? i-- : i++)
2353 {
2354 enum gimplify_status t;
2355
2356 /* Avoid gimplifying the second argument to va_start, which needs to
2357 be the plain PARM_DECL. */
2358 if ((i != 1) || !builtin_va_start_p)
2359 {
2360 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2361 EXPR_LOCATION (*expr_p));
2362
2363 if (t == GS_ERROR)
2364 ret = GS_ERROR;
2365 }
2366 }
2367 }
2368
2369 /* Verify the function result. */
2370 if (want_value && fndecl
2371 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2372 {
2373 error_at (loc, "using result of function returning %<void%>");
2374 ret = GS_ERROR;
2375 }
2376
2377 /* Try this again in case gimplification exposed something. */
2378 if (ret != GS_ERROR)
2379 {
2380 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2381
2382 if (new_tree && new_tree != *expr_p)
2383 {
2384 /* There was a transformation of this call which computes the
2385 same value, but in a more efficient way. Return and try
2386 again. */
2387 *expr_p = new_tree;
2388 return GS_OK;
2389 }
2390 }
2391 else
2392 {
2393 *expr_p = error_mark_node;
2394 return GS_ERROR;
2395 }
2396
2397 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2398 decl. This allows us to eliminate redundant or useless
2399 calls to "const" functions. */
2400 if (TREE_CODE (*expr_p) == CALL_EXPR)
2401 {
2402 int flags = call_expr_flags (*expr_p);
2403 if (flags & (ECF_CONST | ECF_PURE)
2404 /* An infinite loop is considered a side effect. */
2405 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2406 TREE_SIDE_EFFECTS (*expr_p) = 0;
2407 }
2408
2409 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2410 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2411 form and delegate the creation of a GIMPLE_CALL to
2412 gimplify_modify_expr. This is always possible because when
2413 WANT_VALUE is true, the caller wants the result of this call into
2414 a temporary, which means that we will emit an INIT_EXPR in
2415 internal_get_tmp_var which will then be handled by
2416 gimplify_modify_expr. */
2417 if (!want_value)
2418 {
2419 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2420 have to do is replicate it as a GIMPLE_CALL tuple. */
2421 call = gimple_build_call_from_tree (*expr_p);
2422 gimplify_seq_add_stmt (pre_p, call);
2423 *expr_p = NULL_TREE;
2424 }
2425
2426 return ret;
2427 }
2428
2429 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2430 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2431
2432 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2433 condition is true or false, respectively. If null, we should generate
2434 our own to skip over the evaluation of this specific expression.
2435
2436 LOCUS is the source location of the COND_EXPR.
2437
2438 This function is the tree equivalent of do_jump.
2439
2440 shortcut_cond_r should only be called by shortcut_cond_expr. */
2441
2442 static tree
2443 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2444 location_t locus)
2445 {
2446 tree local_label = NULL_TREE;
2447 tree t, expr = NULL;
2448
2449 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2450 retain the shortcut semantics. Just insert the gotos here;
2451 shortcut_cond_expr will append the real blocks later. */
2452 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2453 {
2454 location_t new_locus;
2455
2456 /* Turn if (a && b) into
2457
2458 if (a); else goto no;
2459 if (b) goto yes; else goto no;
2460 (no:) */
2461
2462 if (false_label_p == NULL)
2463 false_label_p = &local_label;
2464
2465 /* Keep the original source location on the first 'if'. */
2466 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2467 append_to_statement_list (t, &expr);
2468
2469 /* Set the source location of the && on the second 'if'. */
2470 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2471 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2472 new_locus);
2473 append_to_statement_list (t, &expr);
2474 }
2475 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2476 {
2477 location_t new_locus;
2478
2479 /* Turn if (a || b) into
2480
2481 if (a) goto yes;
2482 if (b) goto yes; else goto no;
2483 (yes:) */
2484
2485 if (true_label_p == NULL)
2486 true_label_p = &local_label;
2487
2488 /* Keep the original source location on the first 'if'. */
2489 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2490 append_to_statement_list (t, &expr);
2491
2492 /* Set the source location of the || on the second 'if'. */
2493 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2494 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2495 new_locus);
2496 append_to_statement_list (t, &expr);
2497 }
2498 else if (TREE_CODE (pred) == COND_EXPR)
2499 {
2500 location_t new_locus;
2501
2502 /* As long as we're messing with gotos, turn if (a ? b : c) into
2503 if (a)
2504 if (b) goto yes; else goto no;
2505 else
2506 if (c) goto yes; else goto no; */
2507
2508 /* Keep the original source location on the first 'if'. Set the source
2509 location of the ? on the second 'if'. */
2510 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2511 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2512 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2513 false_label_p, locus),
2514 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2515 false_label_p, new_locus));
2516 }
2517 else
2518 {
2519 expr = build3 (COND_EXPR, void_type_node, pred,
2520 build_and_jump (true_label_p),
2521 build_and_jump (false_label_p));
2522 SET_EXPR_LOCATION (expr, locus);
2523 }
2524
2525 if (local_label)
2526 {
2527 t = build1 (LABEL_EXPR, void_type_node, local_label);
2528 append_to_statement_list (t, &expr);
2529 }
2530
2531 return expr;
2532 }
2533
2534 /* Given a conditional expression EXPR with short-circuit boolean
2535 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2536 predicate appart into the equivalent sequence of conditionals. */
2537
2538 static tree
2539 shortcut_cond_expr (tree expr)
2540 {
2541 tree pred = TREE_OPERAND (expr, 0);
2542 tree then_ = TREE_OPERAND (expr, 1);
2543 tree else_ = TREE_OPERAND (expr, 2);
2544 tree true_label, false_label, end_label, t;
2545 tree *true_label_p;
2546 tree *false_label_p;
2547 bool emit_end, emit_false, jump_over_else;
2548 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2549 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2550
2551 /* First do simple transformations. */
2552 if (!else_se)
2553 {
2554 /* If there is no 'else', turn
2555 if (a && b) then c
2556 into
2557 if (a) if (b) then c. */
2558 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2559 {
2560 /* Keep the original source location on the first 'if'. */
2561 location_t locus = EXPR_HAS_LOCATION (expr)
2562 ? EXPR_LOCATION (expr) : input_location;
2563 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2564 /* Set the source location of the && on the second 'if'. */
2565 if (EXPR_HAS_LOCATION (pred))
2566 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2567 then_ = shortcut_cond_expr (expr);
2568 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2569 pred = TREE_OPERAND (pred, 0);
2570 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2571 SET_EXPR_LOCATION (expr, locus);
2572 }
2573 }
2574
2575 if (!then_se)
2576 {
2577 /* If there is no 'then', turn
2578 if (a || b); else d
2579 into
2580 if (a); else if (b); else d. */
2581 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2582 {
2583 /* Keep the original source location on the first 'if'. */
2584 location_t locus = EXPR_HAS_LOCATION (expr)
2585 ? EXPR_LOCATION (expr) : input_location;
2586 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2587 /* Set the source location of the || on the second 'if'. */
2588 if (EXPR_HAS_LOCATION (pred))
2589 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2590 else_ = shortcut_cond_expr (expr);
2591 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2592 pred = TREE_OPERAND (pred, 0);
2593 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2594 SET_EXPR_LOCATION (expr, locus);
2595 }
2596 }
2597
2598 /* If we're done, great. */
2599 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2600 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2601 return expr;
2602
2603 /* Otherwise we need to mess with gotos. Change
2604 if (a) c; else d;
2605 to
2606 if (a); else goto no;
2607 c; goto end;
2608 no: d; end:
2609 and recursively gimplify the condition. */
2610
2611 true_label = false_label = end_label = NULL_TREE;
2612
2613 /* If our arms just jump somewhere, hijack those labels so we don't
2614 generate jumps to jumps. */
2615
2616 if (then_
2617 && TREE_CODE (then_) == GOTO_EXPR
2618 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2619 {
2620 true_label = GOTO_DESTINATION (then_);
2621 then_ = NULL;
2622 then_se = false;
2623 }
2624
2625 if (else_
2626 && TREE_CODE (else_) == GOTO_EXPR
2627 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2628 {
2629 false_label = GOTO_DESTINATION (else_);
2630 else_ = NULL;
2631 else_se = false;
2632 }
2633
2634 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2635 if (true_label)
2636 true_label_p = &true_label;
2637 else
2638 true_label_p = NULL;
2639
2640 /* The 'else' branch also needs a label if it contains interesting code. */
2641 if (false_label || else_se)
2642 false_label_p = &false_label;
2643 else
2644 false_label_p = NULL;
2645
2646 /* If there was nothing else in our arms, just forward the label(s). */
2647 if (!then_se && !else_se)
2648 return shortcut_cond_r (pred, true_label_p, false_label_p,
2649 EXPR_HAS_LOCATION (expr)
2650 ? EXPR_LOCATION (expr) : input_location);
2651
2652 /* If our last subexpression already has a terminal label, reuse it. */
2653 if (else_se)
2654 t = expr_last (else_);
2655 else if (then_se)
2656 t = expr_last (then_);
2657 else
2658 t = NULL;
2659 if (t && TREE_CODE (t) == LABEL_EXPR)
2660 end_label = LABEL_EXPR_LABEL (t);
2661
2662 /* If we don't care about jumping to the 'else' branch, jump to the end
2663 if the condition is false. */
2664 if (!false_label_p)
2665 false_label_p = &end_label;
2666
2667 /* We only want to emit these labels if we aren't hijacking them. */
2668 emit_end = (end_label == NULL_TREE);
2669 emit_false = (false_label == NULL_TREE);
2670
2671 /* We only emit the jump over the else clause if we have to--if the
2672 then clause may fall through. Otherwise we can wind up with a
2673 useless jump and a useless label at the end of gimplified code,
2674 which will cause us to think that this conditional as a whole
2675 falls through even if it doesn't. If we then inline a function
2676 which ends with such a condition, that can cause us to issue an
2677 inappropriate warning about control reaching the end of a
2678 non-void function. */
2679 jump_over_else = block_may_fallthru (then_);
2680
2681 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2682 EXPR_HAS_LOCATION (expr)
2683 ? EXPR_LOCATION (expr) : input_location);
2684
2685 expr = NULL;
2686 append_to_statement_list (pred, &expr);
2687
2688 append_to_statement_list (then_, &expr);
2689 if (else_se)
2690 {
2691 if (jump_over_else)
2692 {
2693 tree last = expr_last (expr);
2694 t = build_and_jump (&end_label);
2695 if (EXPR_HAS_LOCATION (last))
2696 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2697 append_to_statement_list (t, &expr);
2698 }
2699 if (emit_false)
2700 {
2701 t = build1 (LABEL_EXPR, void_type_node, false_label);
2702 append_to_statement_list (t, &expr);
2703 }
2704 append_to_statement_list (else_, &expr);
2705 }
2706 if (emit_end && end_label)
2707 {
2708 t = build1 (LABEL_EXPR, void_type_node, end_label);
2709 append_to_statement_list (t, &expr);
2710 }
2711
2712 return expr;
2713 }
2714
2715 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2716
2717 tree
2718 gimple_boolify (tree expr)
2719 {
2720 tree type = TREE_TYPE (expr);
2721 location_t loc = EXPR_LOCATION (expr);
2722
2723 if (TREE_CODE (type) == BOOLEAN_TYPE)
2724 return expr;
2725
2726 switch (TREE_CODE (expr))
2727 {
2728 case TRUTH_AND_EXPR:
2729 case TRUTH_OR_EXPR:
2730 case TRUTH_XOR_EXPR:
2731 case TRUTH_ANDIF_EXPR:
2732 case TRUTH_ORIF_EXPR:
2733 /* Also boolify the arguments of truth exprs. */
2734 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2735 /* FALLTHRU */
2736
2737 case TRUTH_NOT_EXPR:
2738 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2739 /* FALLTHRU */
2740
2741 case EQ_EXPR: case NE_EXPR:
2742 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2743 /* These expressions always produce boolean results. */
2744 TREE_TYPE (expr) = boolean_type_node;
2745 return expr;
2746
2747 default:
2748 /* Other expressions that get here must have boolean values, but
2749 might need to be converted to the appropriate mode. */
2750 return fold_convert_loc (loc, boolean_type_node, expr);
2751 }
2752 }
2753
2754 /* Given a conditional expression *EXPR_P without side effects, gimplify
2755 its operands. New statements are inserted to PRE_P. */
2756
2757 static enum gimplify_status
2758 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2759 {
2760 tree expr = *expr_p, cond;
2761 enum gimplify_status ret, tret;
2762 enum tree_code code;
2763
2764 cond = gimple_boolify (COND_EXPR_COND (expr));
2765
2766 /* We need to handle && and || specially, as their gimplification
2767 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2768 code = TREE_CODE (cond);
2769 if (code == TRUTH_ANDIF_EXPR)
2770 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2771 else if (code == TRUTH_ORIF_EXPR)
2772 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2773 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2774 COND_EXPR_COND (*expr_p) = cond;
2775
2776 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2777 is_gimple_val, fb_rvalue);
2778 ret = MIN (ret, tret);
2779 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2780 is_gimple_val, fb_rvalue);
2781
2782 return MIN (ret, tret);
2783 }
2784
2785 /* Returns true if evaluating EXPR could trap.
2786 EXPR is GENERIC, while tree_could_trap_p can be called
2787 only on GIMPLE. */
2788
2789 static bool
2790 generic_expr_could_trap_p (tree expr)
2791 {
2792 unsigned i, n;
2793
2794 if (!expr || is_gimple_val (expr))
2795 return false;
2796
2797 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2798 return true;
2799
2800 n = TREE_OPERAND_LENGTH (expr);
2801 for (i = 0; i < n; i++)
2802 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2803 return true;
2804
2805 return false;
2806 }
2807
2808 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2809 into
2810
2811 if (p) if (p)
2812 t1 = a; a;
2813 else or else
2814 t1 = b; b;
2815 t1;
2816
2817 The second form is used when *EXPR_P is of type void.
2818
2819 PRE_P points to the list where side effects that must happen before
2820 *EXPR_P should be stored. */
2821
2822 static enum gimplify_status
2823 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2824 {
2825 tree expr = *expr_p;
2826 tree tmp, type, arm1, arm2;
2827 enum gimplify_status ret;
2828 tree label_true, label_false, label_cont;
2829 bool have_then_clause_p, have_else_clause_p;
2830 gimple gimple_cond;
2831 enum tree_code pred_code;
2832 gimple_seq seq = NULL;
2833 location_t loc = EXPR_LOCATION (*expr_p);
2834
2835 type = TREE_TYPE (expr);
2836
2837 /* If this COND_EXPR has a value, copy the values into a temporary within
2838 the arms. */
2839 if (! VOID_TYPE_P (type))
2840 {
2841 tree result;
2842
2843 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2844 an addressable temporary. */
2845 if (((fallback & fb_rvalue)
2846 || !(fallback & fb_lvalue))
2847 && !TREE_ADDRESSABLE (type))
2848 {
2849 if (gimplify_ctxp->allow_rhs_cond_expr
2850 /* If either branch has side effects or could trap, it can't be
2851 evaluated unconditionally. */
2852 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2853 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2854 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2855 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2856 return gimplify_pure_cond_expr (expr_p, pre_p);
2857
2858 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2859 ret = GS_ALL_DONE;
2860 }
2861 else
2862 {
2863 tree type = build_pointer_type (TREE_TYPE (expr));
2864
2865 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2866 TREE_OPERAND (expr, 1) =
2867 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
2868
2869 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2870 TREE_OPERAND (expr, 2) =
2871 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
2872
2873 tmp = create_tmp_var (type, "iftmp");
2874
2875 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2876 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2877
2878 result = build_fold_indirect_ref_loc (loc, tmp);
2879 }
2880
2881 /* Build the then clause, 't1 = a;'. But don't build an assignment
2882 if this branch is void; in C++ it can be, if it's a throw. */
2883 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2884 TREE_OPERAND (expr, 1)
2885 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2886
2887 /* Build the else clause, 't1 = b;'. */
2888 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2889 TREE_OPERAND (expr, 2)
2890 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2891
2892 TREE_TYPE (expr) = void_type_node;
2893 recalculate_side_effects (expr);
2894
2895 /* Move the COND_EXPR to the prequeue. */
2896 gimplify_stmt (&expr, pre_p);
2897
2898 *expr_p = result;
2899 return GS_ALL_DONE;
2900 }
2901
2902 /* Make sure the condition has BOOLEAN_TYPE. */
2903 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2904
2905 /* Break apart && and || conditions. */
2906 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2907 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2908 {
2909 expr = shortcut_cond_expr (expr);
2910
2911 if (expr != *expr_p)
2912 {
2913 *expr_p = expr;
2914
2915 /* We can't rely on gimplify_expr to re-gimplify the expanded
2916 form properly, as cleanups might cause the target labels to be
2917 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2918 set up a conditional context. */
2919 gimple_push_condition ();
2920 gimplify_stmt (expr_p, &seq);
2921 gimple_pop_condition (pre_p);
2922 gimple_seq_add_seq (pre_p, seq);
2923
2924 return GS_ALL_DONE;
2925 }
2926 }
2927
2928 /* Now do the normal gimplification. */
2929
2930 /* Gimplify condition. */
2931 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2932 fb_rvalue);
2933 if (ret == GS_ERROR)
2934 return GS_ERROR;
2935 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2936
2937 gimple_push_condition ();
2938
2939 have_then_clause_p = have_else_clause_p = false;
2940 if (TREE_OPERAND (expr, 1) != NULL
2941 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2942 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2943 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2944 == current_function_decl)
2945 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2946 have different locations, otherwise we end up with incorrect
2947 location information on the branches. */
2948 && (optimize
2949 || !EXPR_HAS_LOCATION (expr)
2950 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2951 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2952 {
2953 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2954 have_then_clause_p = true;
2955 }
2956 else
2957 label_true = create_artificial_label (UNKNOWN_LOCATION);
2958 if (TREE_OPERAND (expr, 2) != NULL
2959 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2960 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2961 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2962 == current_function_decl)
2963 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2964 have different locations, otherwise we end up with incorrect
2965 location information on the branches. */
2966 && (optimize
2967 || !EXPR_HAS_LOCATION (expr)
2968 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
2969 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
2970 {
2971 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
2972 have_else_clause_p = true;
2973 }
2974 else
2975 label_false = create_artificial_label (UNKNOWN_LOCATION);
2976
2977 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
2978 &arm2);
2979
2980 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
2981 label_false);
2982
2983 gimplify_seq_add_stmt (&seq, gimple_cond);
2984 label_cont = NULL_TREE;
2985 if (!have_then_clause_p)
2986 {
2987 /* For if (...) {} else { code; } put label_true after
2988 the else block. */
2989 if (TREE_OPERAND (expr, 1) == NULL_TREE
2990 && !have_else_clause_p
2991 && TREE_OPERAND (expr, 2) != NULL_TREE)
2992 label_cont = label_true;
2993 else
2994 {
2995 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
2996 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
2997 /* For if (...) { code; } else {} or
2998 if (...) { code; } else goto label; or
2999 if (...) { code; return; } else { ... }
3000 label_cont isn't needed. */
3001 if (!have_else_clause_p
3002 && TREE_OPERAND (expr, 2) != NULL_TREE
3003 && gimple_seq_may_fallthru (seq))
3004 {
3005 gimple g;
3006 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3007
3008 g = gimple_build_goto (label_cont);
3009
3010 /* GIMPLE_COND's are very low level; they have embedded
3011 gotos. This particular embedded goto should not be marked
3012 with the location of the original COND_EXPR, as it would
3013 correspond to the COND_EXPR's condition, not the ELSE or the
3014 THEN arms. To avoid marking it with the wrong location, flag
3015 it as "no location". */
3016 gimple_set_do_not_emit_location (g);
3017
3018 gimplify_seq_add_stmt (&seq, g);
3019 }
3020 }
3021 }
3022 if (!have_else_clause_p)
3023 {
3024 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3025 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3026 }
3027 if (label_cont)
3028 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3029
3030 gimple_pop_condition (pre_p);
3031 gimple_seq_add_seq (pre_p, seq);
3032
3033 if (ret == GS_ERROR)
3034 ; /* Do nothing. */
3035 else if (have_then_clause_p || have_else_clause_p)
3036 ret = GS_ALL_DONE;
3037 else
3038 {
3039 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3040 expr = TREE_OPERAND (expr, 0);
3041 gimplify_stmt (&expr, pre_p);
3042 }
3043
3044 *expr_p = NULL;
3045 return ret;
3046 }
3047
3048 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3049 to be marked addressable.
3050
3051 We cannot rely on such an expression being directly markable if a temporary
3052 has been created by the gimplification. In this case, we create another
3053 temporary and initialize it with a copy, which will become a store after we
3054 mark it addressable. This can happen if the front-end passed us something
3055 that it could not mark addressable yet, like a Fortran pass-by-reference
3056 parameter (int) floatvar. */
3057
3058 static void
3059 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3060 {
3061 while (handled_component_p (*expr_p))
3062 expr_p = &TREE_OPERAND (*expr_p, 0);
3063 if (is_gimple_reg (*expr_p))
3064 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3065 }
3066
3067 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3068 a call to __builtin_memcpy. */
3069
3070 static enum gimplify_status
3071 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3072 gimple_seq *seq_p)
3073 {
3074 tree t, to, to_ptr, from, from_ptr;
3075 gimple gs;
3076 location_t loc = EXPR_LOCATION (*expr_p);
3077
3078 to = TREE_OPERAND (*expr_p, 0);
3079 from = TREE_OPERAND (*expr_p, 1);
3080
3081 /* Mark the RHS addressable. Beware that it may not be possible to do so
3082 directly if a temporary has been created by the gimplification. */
3083 prepare_gimple_addressable (&from, seq_p);
3084
3085 mark_addressable (from);
3086 from_ptr = build_fold_addr_expr_loc (loc, from);
3087 gimplify_arg (&from_ptr, seq_p, loc);
3088
3089 mark_addressable (to);
3090 to_ptr = build_fold_addr_expr_loc (loc, to);
3091 gimplify_arg (&to_ptr, seq_p, loc);
3092
3093 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3094
3095 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3096
3097 if (want_value)
3098 {
3099 /* tmp = memcpy() */
3100 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3101 gimple_call_set_lhs (gs, t);
3102 gimplify_seq_add_stmt (seq_p, gs);
3103
3104 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3105 return GS_ALL_DONE;
3106 }
3107
3108 gimplify_seq_add_stmt (seq_p, gs);
3109 *expr_p = NULL;
3110 return GS_ALL_DONE;
3111 }
3112
3113 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3114 a call to __builtin_memset. In this case we know that the RHS is
3115 a CONSTRUCTOR with an empty element list. */
3116
3117 static enum gimplify_status
3118 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3119 gimple_seq *seq_p)
3120 {
3121 tree t, from, to, to_ptr;
3122 gimple gs;
3123 location_t loc = EXPR_LOCATION (*expr_p);
3124
3125 /* Assert our assumptions, to abort instead of producing wrong code
3126 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3127 not be immediately exposed. */
3128 from = TREE_OPERAND (*expr_p, 1);
3129 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3130 from = TREE_OPERAND (from, 0);
3131
3132 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3133 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3134
3135 /* Now proceed. */
3136 to = TREE_OPERAND (*expr_p, 0);
3137
3138 to_ptr = build_fold_addr_expr_loc (loc, to);
3139 gimplify_arg (&to_ptr, seq_p, loc);
3140 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3141
3142 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3143
3144 if (want_value)
3145 {
3146 /* tmp = memset() */
3147 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3148 gimple_call_set_lhs (gs, t);
3149 gimplify_seq_add_stmt (seq_p, gs);
3150
3151 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3152 return GS_ALL_DONE;
3153 }
3154
3155 gimplify_seq_add_stmt (seq_p, gs);
3156 *expr_p = NULL;
3157 return GS_ALL_DONE;
3158 }
3159
3160 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3161 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3162 assignment. Returns non-null if we detect a potential overlap. */
3163
3164 struct gimplify_init_ctor_preeval_data
3165 {
3166 /* The base decl of the lhs object. May be NULL, in which case we
3167 have to assume the lhs is indirect. */
3168 tree lhs_base_decl;
3169
3170 /* The alias set of the lhs object. */
3171 alias_set_type lhs_alias_set;
3172 };
3173
3174 static tree
3175 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3176 {
3177 struct gimplify_init_ctor_preeval_data *data
3178 = (struct gimplify_init_ctor_preeval_data *) xdata;
3179 tree t = *tp;
3180
3181 /* If we find the base object, obviously we have overlap. */
3182 if (data->lhs_base_decl == t)
3183 return t;
3184
3185 /* If the constructor component is indirect, determine if we have a
3186 potential overlap with the lhs. The only bits of information we
3187 have to go on at this point are addressability and alias sets. */
3188 if (TREE_CODE (t) == INDIRECT_REF
3189 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3190 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3191 return t;
3192
3193 /* If the constructor component is a call, determine if it can hide a
3194 potential overlap with the lhs through an INDIRECT_REF like above. */
3195 if (TREE_CODE (t) == CALL_EXPR)
3196 {
3197 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3198
3199 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3200 if (POINTER_TYPE_P (TREE_VALUE (type))
3201 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3202 && alias_sets_conflict_p (data->lhs_alias_set,
3203 get_alias_set
3204 (TREE_TYPE (TREE_VALUE (type)))))
3205 return t;
3206 }
3207
3208 if (IS_TYPE_OR_DECL_P (t))
3209 *walk_subtrees = 0;
3210 return NULL;
3211 }
3212
3213 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3214 force values that overlap with the lhs (as described by *DATA)
3215 into temporaries. */
3216
3217 static void
3218 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3219 struct gimplify_init_ctor_preeval_data *data)
3220 {
3221 enum gimplify_status one;
3222
3223 /* If the value is constant, then there's nothing to pre-evaluate. */
3224 if (TREE_CONSTANT (*expr_p))
3225 {
3226 /* Ensure it does not have side effects, it might contain a reference to
3227 the object we're initializing. */
3228 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3229 return;
3230 }
3231
3232 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3233 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3234 return;
3235
3236 /* Recurse for nested constructors. */
3237 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3238 {
3239 unsigned HOST_WIDE_INT ix;
3240 constructor_elt *ce;
3241 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3242
3243 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3244 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3245
3246 return;
3247 }
3248
3249 /* If this is a variable sized type, we must remember the size. */
3250 maybe_with_size_expr (expr_p);
3251
3252 /* Gimplify the constructor element to something appropriate for the rhs
3253 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3254 the gimplifier will consider this a store to memory. Doing this
3255 gimplification now means that we won't have to deal with complicated
3256 language-specific trees, nor trees like SAVE_EXPR that can induce
3257 exponential search behavior. */
3258 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3259 if (one == GS_ERROR)
3260 {
3261 *expr_p = NULL;
3262 return;
3263 }
3264
3265 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3266 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3267 always be true for all scalars, since is_gimple_mem_rhs insists on a
3268 temporary variable for them. */
3269 if (DECL_P (*expr_p))
3270 return;
3271
3272 /* If this is of variable size, we have no choice but to assume it doesn't
3273 overlap since we can't make a temporary for it. */
3274 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3275 return;
3276
3277 /* Otherwise, we must search for overlap ... */
3278 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3279 return;
3280
3281 /* ... and if found, force the value into a temporary. */
3282 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3283 }
3284
3285 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3286 a RANGE_EXPR in a CONSTRUCTOR for an array.
3287
3288 var = lower;
3289 loop_entry:
3290 object[var] = value;
3291 if (var == upper)
3292 goto loop_exit;
3293 var = var + 1;
3294 goto loop_entry;
3295 loop_exit:
3296
3297 We increment var _after_ the loop exit check because we might otherwise
3298 fail if upper == TYPE_MAX_VALUE (type for upper).
3299
3300 Note that we never have to deal with SAVE_EXPRs here, because this has
3301 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3302
3303 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3304 gimple_seq *, bool);
3305
3306 static void
3307 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3308 tree value, tree array_elt_type,
3309 gimple_seq *pre_p, bool cleared)
3310 {
3311 tree loop_entry_label, loop_exit_label, fall_thru_label;
3312 tree var, var_type, cref, tmp;
3313
3314 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3315 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3316 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3317
3318 /* Create and initialize the index variable. */
3319 var_type = TREE_TYPE (upper);
3320 var = create_tmp_var (var_type, NULL);
3321 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3322
3323 /* Add the loop entry label. */
3324 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3325
3326 /* Build the reference. */
3327 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3328 var, NULL_TREE, NULL_TREE);
3329
3330 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3331 the store. Otherwise just assign value to the reference. */
3332
3333 if (TREE_CODE (value) == CONSTRUCTOR)
3334 /* NB we might have to call ourself recursively through
3335 gimplify_init_ctor_eval if the value is a constructor. */
3336 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3337 pre_p, cleared);
3338 else
3339 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3340
3341 /* We exit the loop when the index var is equal to the upper bound. */
3342 gimplify_seq_add_stmt (pre_p,
3343 gimple_build_cond (EQ_EXPR, var, upper,
3344 loop_exit_label, fall_thru_label));
3345
3346 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3347
3348 /* Otherwise, increment the index var... */
3349 tmp = build2 (PLUS_EXPR, var_type, var,
3350 fold_convert (var_type, integer_one_node));
3351 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3352
3353 /* ...and jump back to the loop entry. */
3354 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3355
3356 /* Add the loop exit label. */
3357 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3358 }
3359
3360 /* Return true if FDECL is accessing a field that is zero sized. */
3361
3362 static bool
3363 zero_sized_field_decl (const_tree fdecl)
3364 {
3365 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3366 && integer_zerop (DECL_SIZE (fdecl)))
3367 return true;
3368 return false;
3369 }
3370
3371 /* Return true if TYPE is zero sized. */
3372
3373 static bool
3374 zero_sized_type (const_tree type)
3375 {
3376 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3377 && integer_zerop (TYPE_SIZE (type)))
3378 return true;
3379 return false;
3380 }
3381
3382 /* A subroutine of gimplify_init_constructor. Generate individual
3383 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3384 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3385 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3386 zeroed first. */
3387
3388 static void
3389 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3390 gimple_seq *pre_p, bool cleared)
3391 {
3392 tree array_elt_type = NULL;
3393 unsigned HOST_WIDE_INT ix;
3394 tree purpose, value;
3395
3396 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3397 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3398
3399 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3400 {
3401 tree cref;
3402
3403 /* NULL values are created above for gimplification errors. */
3404 if (value == NULL)
3405 continue;
3406
3407 if (cleared && initializer_zerop (value))
3408 continue;
3409
3410 /* ??? Here's to hoping the front end fills in all of the indices,
3411 so we don't have to figure out what's missing ourselves. */
3412 gcc_assert (purpose);
3413
3414 /* Skip zero-sized fields, unless value has side-effects. This can
3415 happen with calls to functions returning a zero-sized type, which
3416 we shouldn't discard. As a number of downstream passes don't
3417 expect sets of zero-sized fields, we rely on the gimplification of
3418 the MODIFY_EXPR we make below to drop the assignment statement. */
3419 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3420 continue;
3421
3422 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3423 whole range. */
3424 if (TREE_CODE (purpose) == RANGE_EXPR)
3425 {
3426 tree lower = TREE_OPERAND (purpose, 0);
3427 tree upper = TREE_OPERAND (purpose, 1);
3428
3429 /* If the lower bound is equal to upper, just treat it as if
3430 upper was the index. */
3431 if (simple_cst_equal (lower, upper))
3432 purpose = upper;
3433 else
3434 {
3435 gimplify_init_ctor_eval_range (object, lower, upper, value,
3436 array_elt_type, pre_p, cleared);
3437 continue;
3438 }
3439 }
3440
3441 if (array_elt_type)
3442 {
3443 /* Do not use bitsizetype for ARRAY_REF indices. */
3444 if (TYPE_DOMAIN (TREE_TYPE (object)))
3445 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3446 purpose);
3447 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3448 purpose, NULL_TREE, NULL_TREE);
3449 }
3450 else
3451 {
3452 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3453 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3454 unshare_expr (object), purpose, NULL_TREE);
3455 }
3456
3457 if (TREE_CODE (value) == CONSTRUCTOR
3458 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3459 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3460 pre_p, cleared);
3461 else
3462 {
3463 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3464 gimplify_and_add (init, pre_p);
3465 ggc_free (init);
3466 }
3467 }
3468 }
3469
3470
3471 /* Returns the appropriate RHS predicate for this LHS. */
3472
3473 gimple_predicate
3474 rhs_predicate_for (tree lhs)
3475 {
3476 if (is_gimple_reg (lhs))
3477 return is_gimple_reg_rhs_or_call;
3478 else
3479 return is_gimple_mem_rhs_or_call;
3480 }
3481
3482 /* Gimplify a C99 compound literal expression. This just means adding
3483 the DECL_EXPR before the current statement and using its anonymous
3484 decl instead. */
3485
3486 static enum gimplify_status
3487 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3488 {
3489 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3490 tree decl = DECL_EXPR_DECL (decl_s);
3491 /* Mark the decl as addressable if the compound literal
3492 expression is addressable now, otherwise it is marked too late
3493 after we gimplify the initialization expression. */
3494 if (TREE_ADDRESSABLE (*expr_p))
3495 TREE_ADDRESSABLE (decl) = 1;
3496
3497 /* Preliminarily mark non-addressed complex variables as eligible
3498 for promotion to gimple registers. We'll transform their uses
3499 as we find them. */
3500 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3501 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3502 && !TREE_THIS_VOLATILE (decl)
3503 && !needs_to_live_in_memory (decl))
3504 DECL_GIMPLE_REG_P (decl) = 1;
3505
3506 /* This decl isn't mentioned in the enclosing block, so add it to the
3507 list of temps. FIXME it seems a bit of a kludge to say that
3508 anonymous artificial vars aren't pushed, but everything else is. */
3509 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3510 gimple_add_tmp_var (decl);
3511
3512 gimplify_and_add (decl_s, pre_p);
3513 *expr_p = decl;
3514 return GS_OK;
3515 }
3516
3517 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3518 return a new CONSTRUCTOR if something changed. */
3519
3520 static tree
3521 optimize_compound_literals_in_ctor (tree orig_ctor)
3522 {
3523 tree ctor = orig_ctor;
3524 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3525 unsigned int idx, num = VEC_length (constructor_elt, elts);
3526
3527 for (idx = 0; idx < num; idx++)
3528 {
3529 tree value = VEC_index (constructor_elt, elts, idx)->value;
3530 tree newval = value;
3531 if (TREE_CODE (value) == CONSTRUCTOR)
3532 newval = optimize_compound_literals_in_ctor (value);
3533 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3534 {
3535 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3536 tree decl = DECL_EXPR_DECL (decl_s);
3537 tree init = DECL_INITIAL (decl);
3538
3539 if (!TREE_ADDRESSABLE (value)
3540 && !TREE_ADDRESSABLE (decl)
3541 && init)
3542 newval = optimize_compound_literals_in_ctor (init);
3543 }
3544 if (newval == value)
3545 continue;
3546
3547 if (ctor == orig_ctor)
3548 {
3549 ctor = copy_node (orig_ctor);
3550 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3551 elts = CONSTRUCTOR_ELTS (ctor);
3552 }
3553 VEC_index (constructor_elt, elts, idx)->value = newval;
3554 }
3555 return ctor;
3556 }
3557
3558
3559
3560 /* A subroutine of gimplify_modify_expr. Break out elements of a
3561 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3562
3563 Note that we still need to clear any elements that don't have explicit
3564 initializers, so if not all elements are initialized we keep the
3565 original MODIFY_EXPR, we just remove all of the constructor elements.
3566
3567 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3568 GS_ERROR if we would have to create a temporary when gimplifying
3569 this constructor. Otherwise, return GS_OK.
3570
3571 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3572
3573 static enum gimplify_status
3574 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3575 bool want_value, bool notify_temp_creation)
3576 {
3577 tree object, ctor, type;
3578 enum gimplify_status ret;
3579 VEC(constructor_elt,gc) *elts;
3580
3581 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3582
3583 if (!notify_temp_creation)
3584 {
3585 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3586 is_gimple_lvalue, fb_lvalue);
3587 if (ret == GS_ERROR)
3588 return ret;
3589 }
3590
3591 object = TREE_OPERAND (*expr_p, 0);
3592 ctor = TREE_OPERAND (*expr_p, 1) =
3593 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3594 type = TREE_TYPE (ctor);
3595 elts = CONSTRUCTOR_ELTS (ctor);
3596 ret = GS_ALL_DONE;
3597
3598 switch (TREE_CODE (type))
3599 {
3600 case RECORD_TYPE:
3601 case UNION_TYPE:
3602 case QUAL_UNION_TYPE:
3603 case ARRAY_TYPE:
3604 {
3605 struct gimplify_init_ctor_preeval_data preeval_data;
3606 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3607 HOST_WIDE_INT num_nonzero_elements;
3608 bool cleared, valid_const_initializer;
3609
3610 /* Aggregate types must lower constructors to initialization of
3611 individual elements. The exception is that a CONSTRUCTOR node
3612 with no elements indicates zero-initialization of the whole. */
3613 if (VEC_empty (constructor_elt, elts))
3614 {
3615 if (notify_temp_creation)
3616 return GS_OK;
3617 break;
3618 }
3619
3620 /* Fetch information about the constructor to direct later processing.
3621 We might want to make static versions of it in various cases, and
3622 can only do so if it known to be a valid constant initializer. */
3623 valid_const_initializer
3624 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3625 &num_ctor_elements, &cleared);
3626
3627 /* If a const aggregate variable is being initialized, then it
3628 should never be a lose to promote the variable to be static. */
3629 if (valid_const_initializer
3630 && num_nonzero_elements > 1
3631 && TREE_READONLY (object)
3632 && TREE_CODE (object) == VAR_DECL
3633 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3634 {
3635 if (notify_temp_creation)
3636 return GS_ERROR;
3637 DECL_INITIAL (object) = ctor;
3638 TREE_STATIC (object) = 1;
3639 if (!DECL_NAME (object))
3640 DECL_NAME (object) = create_tmp_var_name ("C");
3641 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3642
3643 /* ??? C++ doesn't automatically append a .<number> to the
3644 assembler name, and even when it does, it looks a FE private
3645 data structures to figure out what that number should be,
3646 which are not set for this variable. I suppose this is
3647 important for local statics for inline functions, which aren't
3648 "local" in the object file sense. So in order to get a unique
3649 TU-local symbol, we must invoke the lhd version now. */
3650 lhd_set_decl_assembler_name (object);
3651
3652 *expr_p = NULL_TREE;
3653 break;
3654 }
3655
3656 /* If there are "lots" of initialized elements, even discounting
3657 those that are not address constants (and thus *must* be
3658 computed at runtime), then partition the constructor into
3659 constant and non-constant parts. Block copy the constant
3660 parts in, then generate code for the non-constant parts. */
3661 /* TODO. There's code in cp/typeck.c to do this. */
3662
3663 num_type_elements = count_type_elements (type, true);
3664
3665 /* If count_type_elements could not determine number of type elements
3666 for a constant-sized object, assume clearing is needed.
3667 Don't do this for variable-sized objects, as store_constructor
3668 will ignore the clearing of variable-sized objects. */
3669 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3670 cleared = true;
3671 /* If there are "lots" of zeros, then block clear the object first. */
3672 else if (num_type_elements - num_nonzero_elements
3673 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3674 && num_nonzero_elements < num_type_elements/4)
3675 cleared = true;
3676 /* ??? This bit ought not be needed. For any element not present
3677 in the initializer, we should simply set them to zero. Except
3678 we'd need to *find* the elements that are not present, and that
3679 requires trickery to avoid quadratic compile-time behavior in
3680 large cases or excessive memory use in small cases. */
3681 else if (num_ctor_elements < num_type_elements)
3682 cleared = true;
3683
3684 /* If there are "lots" of initialized elements, and all of them
3685 are valid address constants, then the entire initializer can
3686 be dropped to memory, and then memcpy'd out. Don't do this
3687 for sparse arrays, though, as it's more efficient to follow
3688 the standard CONSTRUCTOR behavior of memset followed by
3689 individual element initialization. Also don't do this for small
3690 all-zero initializers (which aren't big enough to merit
3691 clearing), and don't try to make bitwise copies of
3692 TREE_ADDRESSABLE types. */
3693 if (valid_const_initializer
3694 && !(cleared || num_nonzero_elements == 0)
3695 && !TREE_ADDRESSABLE (type))
3696 {
3697 HOST_WIDE_INT size = int_size_in_bytes (type);
3698 unsigned int align;
3699
3700 /* ??? We can still get unbounded array types, at least
3701 from the C++ front end. This seems wrong, but attempt
3702 to work around it for now. */
3703 if (size < 0)
3704 {
3705 size = int_size_in_bytes (TREE_TYPE (object));
3706 if (size >= 0)
3707 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3708 }
3709
3710 /* Find the maximum alignment we can assume for the object. */
3711 /* ??? Make use of DECL_OFFSET_ALIGN. */
3712 if (DECL_P (object))
3713 align = DECL_ALIGN (object);
3714 else
3715 align = TYPE_ALIGN (type);
3716
3717 if (size > 0
3718 && num_nonzero_elements > 1
3719 && !can_move_by_pieces (size, align))
3720 {
3721 tree new_tree;
3722
3723 if (notify_temp_creation)
3724 return GS_ERROR;
3725
3726 new_tree = create_tmp_var_raw (type, "C");
3727
3728 gimple_add_tmp_var (new_tree);
3729 TREE_STATIC (new_tree) = 1;
3730 TREE_READONLY (new_tree) = 1;
3731 DECL_INITIAL (new_tree) = ctor;
3732 if (align > DECL_ALIGN (new_tree))
3733 {
3734 DECL_ALIGN (new_tree) = align;
3735 DECL_USER_ALIGN (new_tree) = 1;
3736 }
3737 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3738
3739 TREE_OPERAND (*expr_p, 1) = new_tree;
3740
3741 /* This is no longer an assignment of a CONSTRUCTOR, but
3742 we still may have processing to do on the LHS. So
3743 pretend we didn't do anything here to let that happen. */
3744 return GS_UNHANDLED;
3745 }
3746 }
3747
3748 if (notify_temp_creation)
3749 return GS_OK;
3750
3751 /* If there are nonzero elements, pre-evaluate to capture elements
3752 overlapping with the lhs into temporaries. We must do this before
3753 clearing to fetch the values before they are zeroed-out. */
3754 if (num_nonzero_elements > 0)
3755 {
3756 preeval_data.lhs_base_decl = get_base_address (object);
3757 if (!DECL_P (preeval_data.lhs_base_decl))
3758 preeval_data.lhs_base_decl = NULL;
3759 preeval_data.lhs_alias_set = get_alias_set (object);
3760
3761 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3762 pre_p, post_p, &preeval_data);
3763 }
3764
3765 if (cleared)
3766 {
3767 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3768 Note that we still have to gimplify, in order to handle the
3769 case of variable sized types. Avoid shared tree structures. */
3770 CONSTRUCTOR_ELTS (ctor) = NULL;
3771 TREE_SIDE_EFFECTS (ctor) = 0;
3772 object = unshare_expr (object);
3773 gimplify_stmt (expr_p, pre_p);
3774 }
3775
3776 /* If we have not block cleared the object, or if there are nonzero
3777 elements in the constructor, add assignments to the individual
3778 scalar fields of the object. */
3779 if (!cleared || num_nonzero_elements > 0)
3780 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3781
3782 *expr_p = NULL_TREE;
3783 }
3784 break;
3785
3786 case COMPLEX_TYPE:
3787 {
3788 tree r, i;
3789
3790 if (notify_temp_creation)
3791 return GS_OK;
3792
3793 /* Extract the real and imaginary parts out of the ctor. */
3794 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3795 r = VEC_index (constructor_elt, elts, 0)->value;
3796 i = VEC_index (constructor_elt, elts, 1)->value;
3797 if (r == NULL || i == NULL)
3798 {
3799 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3800 if (r == NULL)
3801 r = zero;
3802 if (i == NULL)
3803 i = zero;
3804 }
3805
3806 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3807 represent creation of a complex value. */
3808 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3809 {
3810 ctor = build_complex (type, r, i);
3811 TREE_OPERAND (*expr_p, 1) = ctor;
3812 }
3813 else
3814 {
3815 ctor = build2 (COMPLEX_EXPR, type, r, i);
3816 TREE_OPERAND (*expr_p, 1) = ctor;
3817 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3818 pre_p,
3819 post_p,
3820 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3821 fb_rvalue);
3822 }
3823 }
3824 break;
3825
3826 case VECTOR_TYPE:
3827 {
3828 unsigned HOST_WIDE_INT ix;
3829 constructor_elt *ce;
3830
3831 if (notify_temp_creation)
3832 return GS_OK;
3833
3834 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3835 if (TREE_CONSTANT (ctor))
3836 {
3837 bool constant_p = true;
3838 tree value;
3839
3840 /* Even when ctor is constant, it might contain non-*_CST
3841 elements, such as addresses or trapping values like
3842 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3843 in VECTOR_CST nodes. */
3844 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3845 if (!CONSTANT_CLASS_P (value))
3846 {
3847 constant_p = false;
3848 break;
3849 }
3850
3851 if (constant_p)
3852 {
3853 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3854 break;
3855 }
3856
3857 /* Don't reduce an initializer constant even if we can't
3858 make a VECTOR_CST. It won't do anything for us, and it'll
3859 prevent us from representing it as a single constant. */
3860 if (initializer_constant_valid_p (ctor, type))
3861 break;
3862
3863 TREE_CONSTANT (ctor) = 0;
3864 }
3865
3866 /* Vector types use CONSTRUCTOR all the way through gimple
3867 compilation as a general initializer. */
3868 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3869 {
3870 enum gimplify_status tret;
3871 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3872 fb_rvalue);
3873 if (tret == GS_ERROR)
3874 ret = GS_ERROR;
3875 }
3876 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3877 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3878 }
3879 break;
3880
3881 default:
3882 /* So how did we get a CONSTRUCTOR for a scalar type? */
3883 gcc_unreachable ();
3884 }
3885
3886 if (ret == GS_ERROR)
3887 return GS_ERROR;
3888 else if (want_value)
3889 {
3890 *expr_p = object;
3891 return GS_OK;
3892 }
3893 else
3894 {
3895 /* If we have gimplified both sides of the initializer but have
3896 not emitted an assignment, do so now. */
3897 if (*expr_p)
3898 {
3899 tree lhs = TREE_OPERAND (*expr_p, 0);
3900 tree rhs = TREE_OPERAND (*expr_p, 1);
3901 gimple init = gimple_build_assign (lhs, rhs);
3902 gimplify_seq_add_stmt (pre_p, init);
3903 *expr_p = NULL;
3904 }
3905
3906 return GS_ALL_DONE;
3907 }
3908 }
3909
3910 /* Given a pointer value OP0, return a simplified version of an
3911 indirection through OP0, or NULL_TREE if no simplification is
3912 possible. Note that the resulting type may be different from
3913 the type pointed to in the sense that it is still compatible
3914 from the langhooks point of view. */
3915
3916 tree
3917 gimple_fold_indirect_ref (tree t)
3918 {
3919 tree type = TREE_TYPE (TREE_TYPE (t));
3920 tree sub = t;
3921 tree subtype;
3922
3923 STRIP_USELESS_TYPE_CONVERSION (sub);
3924 subtype = TREE_TYPE (sub);
3925 if (!POINTER_TYPE_P (subtype))
3926 return NULL_TREE;
3927
3928 if (TREE_CODE (sub) == ADDR_EXPR)
3929 {
3930 tree op = TREE_OPERAND (sub, 0);
3931 tree optype = TREE_TYPE (op);
3932 /* *&p => p */
3933 if (useless_type_conversion_p (type, optype))
3934 return op;
3935
3936 /* *(foo *)&fooarray => fooarray[0] */
3937 if (TREE_CODE (optype) == ARRAY_TYPE
3938 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3939 {
3940 tree type_domain = TYPE_DOMAIN (optype);
3941 tree min_val = size_zero_node;
3942 if (type_domain && TYPE_MIN_VALUE (type_domain))
3943 min_val = TYPE_MIN_VALUE (type_domain);
3944 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3945 }
3946 }
3947
3948 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3949 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3950 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3951 {
3952 tree type_domain;
3953 tree min_val = size_zero_node;
3954 tree osub = sub;
3955 sub = gimple_fold_indirect_ref (sub);
3956 if (! sub)
3957 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3958 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3959 if (type_domain && TYPE_MIN_VALUE (type_domain))
3960 min_val = TYPE_MIN_VALUE (type_domain);
3961 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3962 }
3963
3964 return NULL_TREE;
3965 }
3966
3967 /* Given a pointer value OP0, return a simplified version of an
3968 indirection through OP0, or NULL_TREE if no simplification is
3969 possible. This may only be applied to a rhs of an expression.
3970 Note that the resulting type may be different from the type pointed
3971 to in the sense that it is still compatible from the langhooks
3972 point of view. */
3973
3974 static tree
3975 gimple_fold_indirect_ref_rhs (tree t)
3976 {
3977 return gimple_fold_indirect_ref (t);
3978 }
3979
3980 /* Subroutine of gimplify_modify_expr to do simplifications of
3981 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3982 something changes. */
3983
3984 static enum gimplify_status
3985 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3986 gimple_seq *pre_p, gimple_seq *post_p,
3987 bool want_value)
3988 {
3989 enum gimplify_status ret = GS_OK;
3990
3991 while (ret != GS_UNHANDLED)
3992 switch (TREE_CODE (*from_p))
3993 {
3994 case VAR_DECL:
3995 /* If we're assigning from a read-only variable initialized with
3996 a constructor, do the direct assignment from the constructor,
3997 but only if neither source nor target are volatile since this
3998 latter assignment might end up being done on a per-field basis. */
3999 if (DECL_INITIAL (*from_p)
4000 && TREE_READONLY (*from_p)
4001 && !TREE_THIS_VOLATILE (*from_p)
4002 && !TREE_THIS_VOLATILE (*to_p)
4003 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4004 {
4005 tree old_from = *from_p;
4006
4007 /* Move the constructor into the RHS. */
4008 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4009
4010 /* Let's see if gimplify_init_constructor will need to put
4011 it in memory. If so, revert the change. */
4012 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4013 if (ret == GS_ERROR)
4014 {
4015 *from_p = old_from;
4016 /* Fall through. */
4017 }
4018 else
4019 {
4020 ret = GS_OK;
4021 break;
4022 }
4023 }
4024 ret = GS_UNHANDLED;
4025 break;
4026 case INDIRECT_REF:
4027 {
4028 /* If we have code like
4029
4030 *(const A*)(A*)&x
4031
4032 where the type of "x" is a (possibly cv-qualified variant
4033 of "A"), treat the entire expression as identical to "x".
4034 This kind of code arises in C++ when an object is bound
4035 to a const reference, and if "x" is a TARGET_EXPR we want
4036 to take advantage of the optimization below. */
4037 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4038 if (t)
4039 {
4040 *from_p = t;
4041 ret = GS_OK;
4042 }
4043 else
4044 ret = GS_UNHANDLED;
4045 break;
4046 }
4047
4048 case TARGET_EXPR:
4049 {
4050 /* If we are initializing something from a TARGET_EXPR, strip the
4051 TARGET_EXPR and initialize it directly, if possible. This can't
4052 be done if the initializer is void, since that implies that the
4053 temporary is set in some non-trivial way.
4054
4055 ??? What about code that pulls out the temp and uses it
4056 elsewhere? I think that such code never uses the TARGET_EXPR as
4057 an initializer. If I'm wrong, we'll die because the temp won't
4058 have any RTL. In that case, I guess we'll need to replace
4059 references somehow. */
4060 tree init = TARGET_EXPR_INITIAL (*from_p);
4061
4062 if (init
4063 && !VOID_TYPE_P (TREE_TYPE (init)))
4064 {
4065 *from_p = init;
4066 ret = GS_OK;
4067 }
4068 else
4069 ret = GS_UNHANDLED;
4070 }
4071 break;
4072
4073 case COMPOUND_EXPR:
4074 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4075 caught. */
4076 gimplify_compound_expr (from_p, pre_p, true);
4077 ret = GS_OK;
4078 break;
4079
4080 case CONSTRUCTOR:
4081 /* If we're initializing from a CONSTRUCTOR, break this into
4082 individual MODIFY_EXPRs. */
4083 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4084 false);
4085
4086 case COND_EXPR:
4087 /* If we're assigning to a non-register type, push the assignment
4088 down into the branches. This is mandatory for ADDRESSABLE types,
4089 since we cannot generate temporaries for such, but it saves a
4090 copy in other cases as well. */
4091 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4092 {
4093 /* This code should mirror the code in gimplify_cond_expr. */
4094 enum tree_code code = TREE_CODE (*expr_p);
4095 tree cond = *from_p;
4096 tree result = *to_p;
4097
4098 ret = gimplify_expr (&result, pre_p, post_p,
4099 is_gimple_lvalue, fb_lvalue);
4100 if (ret != GS_ERROR)
4101 ret = GS_OK;
4102
4103 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4104 TREE_OPERAND (cond, 1)
4105 = build2 (code, void_type_node, result,
4106 TREE_OPERAND (cond, 1));
4107 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4108 TREE_OPERAND (cond, 2)
4109 = build2 (code, void_type_node, unshare_expr (result),
4110 TREE_OPERAND (cond, 2));
4111
4112 TREE_TYPE (cond) = void_type_node;
4113 recalculate_side_effects (cond);
4114
4115 if (want_value)
4116 {
4117 gimplify_and_add (cond, pre_p);
4118 *expr_p = unshare_expr (result);
4119 }
4120 else
4121 *expr_p = cond;
4122 return ret;
4123 }
4124 else
4125 ret = GS_UNHANDLED;
4126 break;
4127
4128 case CALL_EXPR:
4129 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4130 return slot so that we don't generate a temporary. */
4131 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4132 && aggregate_value_p (*from_p, *from_p))
4133 {
4134 bool use_target;
4135
4136 if (!(rhs_predicate_for (*to_p))(*from_p))
4137 /* If we need a temporary, *to_p isn't accurate. */
4138 use_target = false;
4139 else if (TREE_CODE (*to_p) == RESULT_DECL
4140 && DECL_NAME (*to_p) == NULL_TREE
4141 && needs_to_live_in_memory (*to_p))
4142 /* It's OK to use the return slot directly unless it's an NRV. */
4143 use_target = true;
4144 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4145 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4146 /* Don't force regs into memory. */
4147 use_target = false;
4148 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4149 /* It's OK to use the target directly if it's being
4150 initialized. */
4151 use_target = true;
4152 else if (!is_gimple_non_addressable (*to_p))
4153 /* Don't use the original target if it's already addressable;
4154 if its address escapes, and the called function uses the
4155 NRV optimization, a conforming program could see *to_p
4156 change before the called function returns; see c++/19317.
4157 When optimizing, the return_slot pass marks more functions
4158 as safe after we have escape info. */
4159 use_target = false;
4160 else
4161 use_target = true;
4162
4163 if (use_target)
4164 {
4165 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4166 mark_addressable (*to_p);
4167 }
4168 }
4169
4170 ret = GS_UNHANDLED;
4171 break;
4172
4173 /* If we're initializing from a container, push the initialization
4174 inside it. */
4175 case CLEANUP_POINT_EXPR:
4176 case BIND_EXPR:
4177 case STATEMENT_LIST:
4178 {
4179 tree wrap = *from_p;
4180 tree t;
4181
4182 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4183 fb_lvalue);
4184 if (ret != GS_ERROR)
4185 ret = GS_OK;
4186
4187 t = voidify_wrapper_expr (wrap, *expr_p);
4188 gcc_assert (t == *expr_p);
4189
4190 if (want_value)
4191 {
4192 gimplify_and_add (wrap, pre_p);
4193 *expr_p = unshare_expr (*to_p);
4194 }
4195 else
4196 *expr_p = wrap;
4197 return GS_OK;
4198 }
4199
4200 case COMPOUND_LITERAL_EXPR:
4201 {
4202 tree complit = TREE_OPERAND (*expr_p, 1);
4203 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4204 tree decl = DECL_EXPR_DECL (decl_s);
4205 tree init = DECL_INITIAL (decl);
4206
4207 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4208 into struct T x = { 0, 1, 2 } if the address of the
4209 compound literal has never been taken. */
4210 if (!TREE_ADDRESSABLE (complit)
4211 && !TREE_ADDRESSABLE (decl)
4212 && init)
4213 {
4214 *expr_p = copy_node (*expr_p);
4215 TREE_OPERAND (*expr_p, 1) = init;
4216 return GS_OK;
4217 }
4218 }
4219
4220 default:
4221 ret = GS_UNHANDLED;
4222 break;
4223 }
4224
4225 return ret;
4226 }
4227
4228
4229 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4230 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4231 DECL_GIMPLE_REG_P set.
4232
4233 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4234 other, unmodified part of the complex object just before the total store.
4235 As a consequence, if the object is still uninitialized, an undefined value
4236 will be loaded into a register, which may result in a spurious exception
4237 if the register is floating-point and the value happens to be a signaling
4238 NaN for example. Then the fully-fledged complex operations lowering pass
4239 followed by a DCE pass are necessary in order to fix things up. */
4240
4241 static enum gimplify_status
4242 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4243 bool want_value)
4244 {
4245 enum tree_code code, ocode;
4246 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4247
4248 lhs = TREE_OPERAND (*expr_p, 0);
4249 rhs = TREE_OPERAND (*expr_p, 1);
4250 code = TREE_CODE (lhs);
4251 lhs = TREE_OPERAND (lhs, 0);
4252
4253 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4254 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4255 other = get_formal_tmp_var (other, pre_p);
4256
4257 realpart = code == REALPART_EXPR ? rhs : other;
4258 imagpart = code == REALPART_EXPR ? other : rhs;
4259
4260 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4261 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4262 else
4263 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4264
4265 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4266 *expr_p = (want_value) ? rhs : NULL_TREE;
4267
4268 return GS_ALL_DONE;
4269 }
4270
4271
4272 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4273
4274 modify_expr
4275 : varname '=' rhs
4276 | '*' ID '=' rhs
4277
4278 PRE_P points to the list where side effects that must happen before
4279 *EXPR_P should be stored.
4280
4281 POST_P points to the list where side effects that must happen after
4282 *EXPR_P should be stored.
4283
4284 WANT_VALUE is nonzero iff we want to use the value of this expression
4285 in another expression. */
4286
4287 static enum gimplify_status
4288 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4289 bool want_value)
4290 {
4291 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4292 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4293 enum gimplify_status ret = GS_UNHANDLED;
4294 gimple assign;
4295 location_t loc = EXPR_LOCATION (*expr_p);
4296
4297 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4298 || TREE_CODE (*expr_p) == INIT_EXPR);
4299
4300 /* Insert pointer conversions required by the middle-end that are not
4301 required by the frontend. This fixes middle-end type checking for
4302 for example gcc.dg/redecl-6.c. */
4303 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4304 {
4305 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4306 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4307 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4308 }
4309
4310 /* See if any simplifications can be done based on what the RHS is. */
4311 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4312 want_value);
4313 if (ret != GS_UNHANDLED)
4314 return ret;
4315
4316 /* For zero sized types only gimplify the left hand side and right hand
4317 side as statements and throw away the assignment. Do this after
4318 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4319 types properly. */
4320 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4321 {
4322 gimplify_stmt (from_p, pre_p);
4323 gimplify_stmt (to_p, pre_p);
4324 *expr_p = NULL_TREE;
4325 return GS_ALL_DONE;
4326 }
4327
4328 /* If the value being copied is of variable width, compute the length
4329 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4330 before gimplifying any of the operands so that we can resolve any
4331 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4332 the size of the expression to be copied, not of the destination, so
4333 that is what we must do here. */
4334 maybe_with_size_expr (from_p);
4335
4336 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4337 if (ret == GS_ERROR)
4338 return ret;
4339
4340 /* As a special case, we have to temporarily allow for assignments
4341 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4342 a toplevel statement, when gimplifying the GENERIC expression
4343 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4344 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4345
4346 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4347 prevent gimplify_expr from trying to create a new temporary for
4348 foo's LHS, we tell it that it should only gimplify until it
4349 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4350 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4351 and all we need to do here is set 'a' to be its LHS. */
4352 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4353 fb_rvalue);
4354 if (ret == GS_ERROR)
4355 return ret;
4356
4357 /* Now see if the above changed *from_p to something we handle specially. */
4358 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4359 want_value);
4360 if (ret != GS_UNHANDLED)
4361 return ret;
4362
4363 /* If we've got a variable sized assignment between two lvalues (i.e. does
4364 not involve a call), then we can make things a bit more straightforward
4365 by converting the assignment to memcpy or memset. */
4366 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4367 {
4368 tree from = TREE_OPERAND (*from_p, 0);
4369 tree size = TREE_OPERAND (*from_p, 1);
4370
4371 if (TREE_CODE (from) == CONSTRUCTOR)
4372 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4373
4374 if (is_gimple_addressable (from))
4375 {
4376 *from_p = from;
4377 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4378 pre_p);
4379 }
4380 }
4381
4382 /* Transform partial stores to non-addressable complex variables into
4383 total stores. This allows us to use real instead of virtual operands
4384 for these variables, which improves optimization. */
4385 if ((TREE_CODE (*to_p) == REALPART_EXPR
4386 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4387 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4388 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4389
4390 /* Try to alleviate the effects of the gimplification creating artificial
4391 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4392 if (!gimplify_ctxp->into_ssa
4393 && DECL_P (*from_p)
4394 && DECL_IGNORED_P (*from_p)
4395 && DECL_P (*to_p)
4396 && !DECL_IGNORED_P (*to_p))
4397 {
4398 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4399 DECL_NAME (*from_p)
4400 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4401 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4402 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4403 }
4404
4405 if (TREE_CODE (*from_p) == CALL_EXPR)
4406 {
4407 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4408 instead of a GIMPLE_ASSIGN. */
4409 assign = gimple_build_call_from_tree (*from_p);
4410 if (!gimple_call_noreturn_p (assign))
4411 gimple_call_set_lhs (assign, *to_p);
4412 }
4413 else
4414 {
4415 assign = gimple_build_assign (*to_p, *from_p);
4416 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4417 }
4418
4419 gimplify_seq_add_stmt (pre_p, assign);
4420
4421 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4422 {
4423 /* If we've somehow already got an SSA_NAME on the LHS, then
4424 we've probably modified it twice. Not good. */
4425 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4426 *to_p = make_ssa_name (*to_p, assign);
4427 gimple_set_lhs (assign, *to_p);
4428 }
4429
4430 if (want_value)
4431 {
4432 *expr_p = unshare_expr (*to_p);
4433 return GS_OK;
4434 }
4435 else
4436 *expr_p = NULL;
4437
4438 return GS_ALL_DONE;
4439 }
4440
4441 /* Gimplify a comparison between two variable-sized objects. Do this
4442 with a call to BUILT_IN_MEMCMP. */
4443
4444 static enum gimplify_status
4445 gimplify_variable_sized_compare (tree *expr_p)
4446 {
4447 tree op0 = TREE_OPERAND (*expr_p, 0);
4448 tree op1 = TREE_OPERAND (*expr_p, 1);
4449 tree t, arg, dest, src;
4450 location_t loc = EXPR_LOCATION (*expr_p);
4451
4452 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4453 arg = unshare_expr (arg);
4454 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4455 src = build_fold_addr_expr_loc (loc, op1);
4456 dest = build_fold_addr_expr_loc (loc, op0);
4457 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4458 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4459 *expr_p
4460 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4461
4462 return GS_OK;
4463 }
4464
4465 /* Gimplify a comparison between two aggregate objects of integral scalar
4466 mode as a comparison between the bitwise equivalent scalar values. */
4467
4468 static enum gimplify_status
4469 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4470 {
4471 location_t loc = EXPR_LOCATION (*expr_p);
4472 tree op0 = TREE_OPERAND (*expr_p, 0);
4473 tree op1 = TREE_OPERAND (*expr_p, 1);
4474
4475 tree type = TREE_TYPE (op0);
4476 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4477
4478 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4479 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4480
4481 *expr_p
4482 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4483
4484 return GS_OK;
4485 }
4486
4487 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4488 points to the expression to gimplify.
4489
4490 Expressions of the form 'a && b' are gimplified to:
4491
4492 a && b ? true : false
4493
4494 LOCUS is the source location to be put on the generated COND_EXPR.
4495 gimplify_cond_expr will do the rest. */
4496
4497 static enum gimplify_status
4498 gimplify_boolean_expr (tree *expr_p, location_t locus)
4499 {
4500 /* Preserve the original type of the expression. */
4501 tree type = TREE_TYPE (*expr_p);
4502
4503 *expr_p = build3 (COND_EXPR, type, *expr_p,
4504 fold_convert_loc (locus, type, boolean_true_node),
4505 fold_convert_loc (locus, type, boolean_false_node));
4506
4507 SET_EXPR_LOCATION (*expr_p, locus);
4508
4509 return GS_OK;
4510 }
4511
4512 /* Gimplifies an expression sequence. This function gimplifies each
4513 expression and re-writes the original expression with the last
4514 expression of the sequence in GIMPLE form.
4515
4516 PRE_P points to the list where the side effects for all the
4517 expressions in the sequence will be emitted.
4518
4519 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4520
4521 static enum gimplify_status
4522 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4523 {
4524 tree t = *expr_p;
4525
4526 do
4527 {
4528 tree *sub_p = &TREE_OPERAND (t, 0);
4529
4530 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4531 gimplify_compound_expr (sub_p, pre_p, false);
4532 else
4533 gimplify_stmt (sub_p, pre_p);
4534
4535 t = TREE_OPERAND (t, 1);
4536 }
4537 while (TREE_CODE (t) == COMPOUND_EXPR);
4538
4539 *expr_p = t;
4540 if (want_value)
4541 return GS_OK;
4542 else
4543 {
4544 gimplify_stmt (expr_p, pre_p);
4545 return GS_ALL_DONE;
4546 }
4547 }
4548
4549
4550 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4551 gimplify. After gimplification, EXPR_P will point to a new temporary
4552 that holds the original value of the SAVE_EXPR node.
4553
4554 PRE_P points to the list where side effects that must happen before
4555 *EXPR_P should be stored. */
4556
4557 static enum gimplify_status
4558 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4559 {
4560 enum gimplify_status ret = GS_ALL_DONE;
4561 tree val;
4562
4563 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4564 val = TREE_OPERAND (*expr_p, 0);
4565
4566 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4567 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4568 {
4569 /* The operand may be a void-valued expression such as SAVE_EXPRs
4570 generated by the Java frontend for class initialization. It is
4571 being executed only for its side-effects. */
4572 if (TREE_TYPE (val) == void_type_node)
4573 {
4574 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4575 is_gimple_stmt, fb_none);
4576 val = NULL;
4577 }
4578 else
4579 val = get_initialized_tmp_var (val, pre_p, post_p);
4580
4581 TREE_OPERAND (*expr_p, 0) = val;
4582 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4583 }
4584
4585 *expr_p = val;
4586
4587 return ret;
4588 }
4589
4590 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4591
4592 unary_expr
4593 : ...
4594 | '&' varname
4595 ...
4596
4597 PRE_P points to the list where side effects that must happen before
4598 *EXPR_P should be stored.
4599
4600 POST_P points to the list where side effects that must happen after
4601 *EXPR_P should be stored. */
4602
4603 static enum gimplify_status
4604 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4605 {
4606 tree expr = *expr_p;
4607 tree op0 = TREE_OPERAND (expr, 0);
4608 enum gimplify_status ret;
4609 location_t loc = EXPR_LOCATION (*expr_p);
4610
4611 switch (TREE_CODE (op0))
4612 {
4613 case INDIRECT_REF:
4614 case MISALIGNED_INDIRECT_REF:
4615 do_indirect_ref:
4616 /* Check if we are dealing with an expression of the form '&*ptr'.
4617 While the front end folds away '&*ptr' into 'ptr', these
4618 expressions may be generated internally by the compiler (e.g.,
4619 builtins like __builtin_va_end). */
4620 /* Caution: the silent array decomposition semantics we allow for
4621 ADDR_EXPR means we can't always discard the pair. */
4622 /* Gimplification of the ADDR_EXPR operand may drop
4623 cv-qualification conversions, so make sure we add them if
4624 needed. */
4625 {
4626 tree op00 = TREE_OPERAND (op0, 0);
4627 tree t_expr = TREE_TYPE (expr);
4628 tree t_op00 = TREE_TYPE (op00);
4629
4630 if (!useless_type_conversion_p (t_expr, t_op00))
4631 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4632 *expr_p = op00;
4633 ret = GS_OK;
4634 }
4635 break;
4636
4637 case VIEW_CONVERT_EXPR:
4638 /* Take the address of our operand and then convert it to the type of
4639 this ADDR_EXPR.
4640
4641 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4642 all clear. The impact of this transformation is even less clear. */
4643
4644 /* If the operand is a useless conversion, look through it. Doing so
4645 guarantees that the ADDR_EXPR and its operand will remain of the
4646 same type. */
4647 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4648 op0 = TREE_OPERAND (op0, 0);
4649
4650 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4651 build_fold_addr_expr_loc (loc,
4652 TREE_OPERAND (op0, 0)));
4653 ret = GS_OK;
4654 break;
4655
4656 default:
4657 /* We use fb_either here because the C frontend sometimes takes
4658 the address of a call that returns a struct; see
4659 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4660 the implied temporary explicit. */
4661
4662 /* Make the operand addressable. */
4663 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4664 is_gimple_addressable, fb_either);
4665 if (ret == GS_ERROR)
4666 break;
4667
4668 /* Then mark it. Beware that it may not be possible to do so directly
4669 if a temporary has been created by the gimplification. */
4670 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4671
4672 op0 = TREE_OPERAND (expr, 0);
4673
4674 /* For various reasons, the gimplification of the expression
4675 may have made a new INDIRECT_REF. */
4676 if (TREE_CODE (op0) == INDIRECT_REF)
4677 goto do_indirect_ref;
4678
4679 mark_addressable (TREE_OPERAND (expr, 0));
4680
4681 /* The FEs may end up building ADDR_EXPRs early on a decl with
4682 an incomplete type. Re-build ADDR_EXPRs in canonical form
4683 here. */
4684 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4685 *expr_p = build_fold_addr_expr (op0);
4686
4687 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4688 recompute_tree_invariant_for_addr_expr (*expr_p);
4689
4690 /* If we re-built the ADDR_EXPR add a conversion to the original type
4691 if required. */
4692 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4693 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4694
4695 break;
4696 }
4697
4698 return ret;
4699 }
4700
4701 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4702 value; output operands should be a gimple lvalue. */
4703
4704 static enum gimplify_status
4705 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4706 {
4707 tree expr;
4708 int noutputs;
4709 const char **oconstraints;
4710 int i;
4711 tree link;
4712 const char *constraint;
4713 bool allows_mem, allows_reg, is_inout;
4714 enum gimplify_status ret, tret;
4715 gimple stmt;
4716 VEC(tree, gc) *inputs;
4717 VEC(tree, gc) *outputs;
4718 VEC(tree, gc) *clobbers;
4719 VEC(tree, gc) *labels;
4720 tree link_next;
4721
4722 expr = *expr_p;
4723 noutputs = list_length (ASM_OUTPUTS (expr));
4724 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4725
4726 inputs = outputs = clobbers = labels = NULL;
4727
4728 ret = GS_ALL_DONE;
4729 link_next = NULL_TREE;
4730 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4731 {
4732 bool ok;
4733 size_t constraint_len;
4734
4735 link_next = TREE_CHAIN (link);
4736
4737 oconstraints[i]
4738 = constraint
4739 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4740 constraint_len = strlen (constraint);
4741 if (constraint_len == 0)
4742 continue;
4743
4744 ok = parse_output_constraint (&constraint, i, 0, 0,
4745 &allows_mem, &allows_reg, &is_inout);
4746 if (!ok)
4747 {
4748 ret = GS_ERROR;
4749 is_inout = false;
4750 }
4751
4752 if (!allows_reg && allows_mem)
4753 mark_addressable (TREE_VALUE (link));
4754
4755 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4756 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4757 fb_lvalue | fb_mayfail);
4758 if (tret == GS_ERROR)
4759 {
4760 error ("invalid lvalue in asm output %d", i);
4761 ret = tret;
4762 }
4763
4764 VEC_safe_push (tree, gc, outputs, link);
4765 TREE_CHAIN (link) = NULL_TREE;
4766
4767 if (is_inout)
4768 {
4769 /* An input/output operand. To give the optimizers more
4770 flexibility, split it into separate input and output
4771 operands. */
4772 tree input;
4773 char buf[10];
4774
4775 /* Turn the in/out constraint into an output constraint. */
4776 char *p = xstrdup (constraint);
4777 p[0] = '=';
4778 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4779
4780 /* And add a matching input constraint. */
4781 if (allows_reg)
4782 {
4783 sprintf (buf, "%d", i);
4784
4785 /* If there are multiple alternatives in the constraint,
4786 handle each of them individually. Those that allow register
4787 will be replaced with operand number, the others will stay
4788 unchanged. */
4789 if (strchr (p, ',') != NULL)
4790 {
4791 size_t len = 0, buflen = strlen (buf);
4792 char *beg, *end, *str, *dst;
4793
4794 for (beg = p + 1;;)
4795 {
4796 end = strchr (beg, ',');
4797 if (end == NULL)
4798 end = strchr (beg, '\0');
4799 if ((size_t) (end - beg) < buflen)
4800 len += buflen + 1;
4801 else
4802 len += end - beg + 1;
4803 if (*end)
4804 beg = end + 1;
4805 else
4806 break;
4807 }
4808
4809 str = (char *) alloca (len);
4810 for (beg = p + 1, dst = str;;)
4811 {
4812 const char *tem;
4813 bool mem_p, reg_p, inout_p;
4814
4815 end = strchr (beg, ',');
4816 if (end)
4817 *end = '\0';
4818 beg[-1] = '=';
4819 tem = beg - 1;
4820 parse_output_constraint (&tem, i, 0, 0,
4821 &mem_p, &reg_p, &inout_p);
4822 if (dst != str)
4823 *dst++ = ',';
4824 if (reg_p)
4825 {
4826 memcpy (dst, buf, buflen);
4827 dst += buflen;
4828 }
4829 else
4830 {
4831 if (end)
4832 len = end - beg;
4833 else
4834 len = strlen (beg);
4835 memcpy (dst, beg, len);
4836 dst += len;
4837 }
4838 if (end)
4839 beg = end + 1;
4840 else
4841 break;
4842 }
4843 *dst = '\0';
4844 input = build_string (dst - str, str);
4845 }
4846 else
4847 input = build_string (strlen (buf), buf);
4848 }
4849 else
4850 input = build_string (constraint_len - 1, constraint + 1);
4851
4852 free (p);
4853
4854 input = build_tree_list (build_tree_list (NULL_TREE, input),
4855 unshare_expr (TREE_VALUE (link)));
4856 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4857 }
4858 }
4859
4860 link_next = NULL_TREE;
4861 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4862 {
4863 link_next = TREE_CHAIN (link);
4864 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4865 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4866 oconstraints, &allows_mem, &allows_reg);
4867
4868 /* If we can't make copies, we can only accept memory. */
4869 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4870 {
4871 if (allows_mem)
4872 allows_reg = 0;
4873 else
4874 {
4875 error ("impossible constraint in %<asm%>");
4876 error ("non-memory input %d must stay in memory", i);
4877 return GS_ERROR;
4878 }
4879 }
4880
4881 /* If the operand is a memory input, it should be an lvalue. */
4882 if (!allows_reg && allows_mem)
4883 {
4884 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4885 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4886 mark_addressable (TREE_VALUE (link));
4887 if (tret == GS_ERROR)
4888 {
4889 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4890 input_location = EXPR_LOCATION (TREE_VALUE (link));
4891 error ("memory input %d is not directly addressable", i);
4892 ret = tret;
4893 }
4894 }
4895 else
4896 {
4897 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4898 is_gimple_asm_val, fb_rvalue);
4899 if (tret == GS_ERROR)
4900 ret = tret;
4901 }
4902
4903 TREE_CHAIN (link) = NULL_TREE;
4904 VEC_safe_push (tree, gc, inputs, link);
4905 }
4906
4907 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4908 VEC_safe_push (tree, gc, clobbers, link);
4909
4910 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
4911 VEC_safe_push (tree, gc, labels, link);
4912
4913 /* Do not add ASMs with errors to the gimple IL stream. */
4914 if (ret != GS_ERROR)
4915 {
4916 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4917 inputs, outputs, clobbers, labels);
4918
4919 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4920 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4921
4922 gimplify_seq_add_stmt (pre_p, stmt);
4923 }
4924
4925 return ret;
4926 }
4927
4928 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4929 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4930 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4931 return to this function.
4932
4933 FIXME should we complexify the prequeue handling instead? Or use flags
4934 for all the cleanups and let the optimizer tighten them up? The current
4935 code seems pretty fragile; it will break on a cleanup within any
4936 non-conditional nesting. But any such nesting would be broken, anyway;
4937 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4938 and continues out of it. We can do that at the RTL level, though, so
4939 having an optimizer to tighten up try/finally regions would be a Good
4940 Thing. */
4941
4942 static enum gimplify_status
4943 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4944 {
4945 gimple_stmt_iterator iter;
4946 gimple_seq body_sequence = NULL;
4947
4948 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4949
4950 /* We only care about the number of conditions between the innermost
4951 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4952 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4953 int old_conds = gimplify_ctxp->conditions;
4954 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4955 gimplify_ctxp->conditions = 0;
4956 gimplify_ctxp->conditional_cleanups = NULL;
4957
4958 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4959
4960 gimplify_ctxp->conditions = old_conds;
4961 gimplify_ctxp->conditional_cleanups = old_cleanups;
4962
4963 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4964 {
4965 gimple wce = gsi_stmt (iter);
4966
4967 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4968 {
4969 if (gsi_one_before_end_p (iter))
4970 {
4971 /* Note that gsi_insert_seq_before and gsi_remove do not
4972 scan operands, unlike some other sequence mutators. */
4973 gsi_insert_seq_before_without_update (&iter,
4974 gimple_wce_cleanup (wce),
4975 GSI_SAME_STMT);
4976 gsi_remove (&iter, true);
4977 break;
4978 }
4979 else
4980 {
4981 gimple gtry;
4982 gimple_seq seq;
4983 enum gimple_try_flags kind;
4984
4985 if (gimple_wce_cleanup_eh_only (wce))
4986 kind = GIMPLE_TRY_CATCH;
4987 else
4988 kind = GIMPLE_TRY_FINALLY;
4989 seq = gsi_split_seq_after (iter);
4990
4991 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
4992 /* Do not use gsi_replace here, as it may scan operands.
4993 We want to do a simple structural modification only. */
4994 *gsi_stmt_ptr (&iter) = gtry;
4995 iter = gsi_start (seq);
4996 }
4997 }
4998 else
4999 gsi_next (&iter);
5000 }
5001
5002 gimplify_seq_add_seq (pre_p, body_sequence);
5003 if (temp)
5004 {
5005 *expr_p = temp;
5006 return GS_OK;
5007 }
5008 else
5009 {
5010 *expr_p = NULL;
5011 return GS_ALL_DONE;
5012 }
5013 }
5014
5015 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5016 is the cleanup action required. EH_ONLY is true if the cleanup should
5017 only be executed if an exception is thrown, not on normal exit. */
5018
5019 static void
5020 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5021 {
5022 gimple wce;
5023 gimple_seq cleanup_stmts = NULL;
5024
5025 /* Errors can result in improperly nested cleanups. Which results in
5026 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5027 if (errorcount || sorrycount)
5028 return;
5029
5030 if (gimple_conditional_context ())
5031 {
5032 /* If we're in a conditional context, this is more complex. We only
5033 want to run the cleanup if we actually ran the initialization that
5034 necessitates it, but we want to run it after the end of the
5035 conditional context. So we wrap the try/finally around the
5036 condition and use a flag to determine whether or not to actually
5037 run the destructor. Thus
5038
5039 test ? f(A()) : 0
5040
5041 becomes (approximately)
5042
5043 flag = 0;
5044 try {
5045 if (test) { A::A(temp); flag = 1; val = f(temp); }
5046 else { val = 0; }
5047 } finally {
5048 if (flag) A::~A(temp);
5049 }
5050 val
5051 */
5052 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5053 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5054 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5055
5056 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5057 gimplify_stmt (&cleanup, &cleanup_stmts);
5058 wce = gimple_build_wce (cleanup_stmts);
5059
5060 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5061 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5062 gimplify_seq_add_stmt (pre_p, ftrue);
5063
5064 /* Because of this manipulation, and the EH edges that jump
5065 threading cannot redirect, the temporary (VAR) will appear
5066 to be used uninitialized. Don't warn. */
5067 TREE_NO_WARNING (var) = 1;
5068 }
5069 else
5070 {
5071 gimplify_stmt (&cleanup, &cleanup_stmts);
5072 wce = gimple_build_wce (cleanup_stmts);
5073 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5074 gimplify_seq_add_stmt (pre_p, wce);
5075 }
5076 }
5077
5078 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5079
5080 static enum gimplify_status
5081 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5082 {
5083 tree targ = *expr_p;
5084 tree temp = TARGET_EXPR_SLOT (targ);
5085 tree init = TARGET_EXPR_INITIAL (targ);
5086 enum gimplify_status ret;
5087
5088 if (init)
5089 {
5090 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5091 to the temps list. Handle also variable length TARGET_EXPRs. */
5092 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5093 {
5094 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5095 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5096 gimplify_vla_decl (temp, pre_p);
5097 }
5098 else
5099 gimple_add_tmp_var (temp);
5100
5101 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5102 expression is supposed to initialize the slot. */
5103 if (VOID_TYPE_P (TREE_TYPE (init)))
5104 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5105 else
5106 {
5107 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5108 init = init_expr;
5109 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5110 init = NULL;
5111 ggc_free (init_expr);
5112 }
5113 if (ret == GS_ERROR)
5114 {
5115 /* PR c++/28266 Make sure this is expanded only once. */
5116 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5117 return GS_ERROR;
5118 }
5119 if (init)
5120 gimplify_and_add (init, pre_p);
5121
5122 /* If needed, push the cleanup for the temp. */
5123 if (TARGET_EXPR_CLEANUP (targ))
5124 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5125 CLEANUP_EH_ONLY (targ), pre_p);
5126
5127 /* Only expand this once. */
5128 TREE_OPERAND (targ, 3) = init;
5129 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5130 }
5131 else
5132 /* We should have expanded this before. */
5133 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5134
5135 *expr_p = temp;
5136 return GS_OK;
5137 }
5138
5139 /* Gimplification of expression trees. */
5140
5141 /* Gimplify an expression which appears at statement context. The
5142 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5143 NULL, a new sequence is allocated.
5144
5145 Return true if we actually added a statement to the queue. */
5146
5147 bool
5148 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5149 {
5150 gimple_seq_node last;
5151
5152 if (!*seq_p)
5153 *seq_p = gimple_seq_alloc ();
5154
5155 last = gimple_seq_last (*seq_p);
5156 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5157 return last != gimple_seq_last (*seq_p);
5158 }
5159
5160
5161 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5162 to CTX. If entries already exist, force them to be some flavor of private.
5163 If there is no enclosing parallel, do nothing. */
5164
5165 void
5166 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5167 {
5168 splay_tree_node n;
5169
5170 if (decl == NULL || !DECL_P (decl))
5171 return;
5172
5173 do
5174 {
5175 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5176 if (n != NULL)
5177 {
5178 if (n->value & GOVD_SHARED)
5179 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5180 else
5181 return;
5182 }
5183 else if (ctx->region_type != ORT_WORKSHARE)
5184 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5185
5186 ctx = ctx->outer_context;
5187 }
5188 while (ctx);
5189 }
5190
5191 /* Similarly for each of the type sizes of TYPE. */
5192
5193 static void
5194 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5195 {
5196 if (type == NULL || type == error_mark_node)
5197 return;
5198 type = TYPE_MAIN_VARIANT (type);
5199
5200 if (pointer_set_insert (ctx->privatized_types, type))
5201 return;
5202
5203 switch (TREE_CODE (type))
5204 {
5205 case INTEGER_TYPE:
5206 case ENUMERAL_TYPE:
5207 case BOOLEAN_TYPE:
5208 case REAL_TYPE:
5209 case FIXED_POINT_TYPE:
5210 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5211 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5212 break;
5213
5214 case ARRAY_TYPE:
5215 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5216 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5217 break;
5218
5219 case RECORD_TYPE:
5220 case UNION_TYPE:
5221 case QUAL_UNION_TYPE:
5222 {
5223 tree field;
5224 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5225 if (TREE_CODE (field) == FIELD_DECL)
5226 {
5227 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5228 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5229 }
5230 }
5231 break;
5232
5233 case POINTER_TYPE:
5234 case REFERENCE_TYPE:
5235 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5236 break;
5237
5238 default:
5239 break;
5240 }
5241
5242 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5243 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5244 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5245 }
5246
5247 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5248
5249 static void
5250 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5251 {
5252 splay_tree_node n;
5253 unsigned int nflags;
5254 tree t;
5255
5256 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5257 return;
5258
5259 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5260 there are constructors involved somewhere. */
5261 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5262 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5263 flags |= GOVD_SEEN;
5264
5265 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5266 if (n != NULL)
5267 {
5268 /* We shouldn't be re-adding the decl with the same data
5269 sharing class. */
5270 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5271 /* The only combination of data sharing classes we should see is
5272 FIRSTPRIVATE and LASTPRIVATE. */
5273 nflags = n->value | flags;
5274 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5275 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5276 n->value = nflags;
5277 return;
5278 }
5279
5280 /* When adding a variable-sized variable, we have to handle all sorts
5281 of additional bits of data: the pointer replacement variable, and
5282 the parameters of the type. */
5283 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5284 {
5285 /* Add the pointer replacement variable as PRIVATE if the variable
5286 replacement is private, else FIRSTPRIVATE since we'll need the
5287 address of the original variable either for SHARED, or for the
5288 copy into or out of the context. */
5289 if (!(flags & GOVD_LOCAL))
5290 {
5291 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5292 nflags |= flags & GOVD_SEEN;
5293 t = DECL_VALUE_EXPR (decl);
5294 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5295 t = TREE_OPERAND (t, 0);
5296 gcc_assert (DECL_P (t));
5297 omp_add_variable (ctx, t, nflags);
5298 }
5299
5300 /* Add all of the variable and type parameters (which should have
5301 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5302 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5303 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5304 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5305
5306 /* The variable-sized variable itself is never SHARED, only some form
5307 of PRIVATE. The sharing would take place via the pointer variable
5308 which we remapped above. */
5309 if (flags & GOVD_SHARED)
5310 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5311 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5312
5313 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5314 alloca statement we generate for the variable, so make sure it
5315 is available. This isn't automatically needed for the SHARED
5316 case, since we won't be allocating local storage then.
5317 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5318 in this case omp_notice_variable will be called later
5319 on when it is gimplified. */
5320 else if (! (flags & GOVD_LOCAL))
5321 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5322 }
5323 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5324 {
5325 gcc_assert ((flags & GOVD_LOCAL) == 0);
5326 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5327
5328 /* Similar to the direct variable sized case above, we'll need the
5329 size of references being privatized. */
5330 if ((flags & GOVD_SHARED) == 0)
5331 {
5332 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5333 if (TREE_CODE (t) != INTEGER_CST)
5334 omp_notice_variable (ctx, t, true);
5335 }
5336 }
5337
5338 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5339 }
5340
5341 /* Record the fact that DECL was used within the OpenMP context CTX.
5342 IN_CODE is true when real code uses DECL, and false when we should
5343 merely emit default(none) errors. Return true if DECL is going to
5344 be remapped and thus DECL shouldn't be gimplified into its
5345 DECL_VALUE_EXPR (if any). */
5346
5347 static bool
5348 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5349 {
5350 splay_tree_node n;
5351 unsigned flags = in_code ? GOVD_SEEN : 0;
5352 bool ret = false, shared;
5353
5354 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5355 return false;
5356
5357 /* Threadprivate variables are predetermined. */
5358 if (is_global_var (decl))
5359 {
5360 if (DECL_THREAD_LOCAL_P (decl))
5361 return false;
5362
5363 if (DECL_HAS_VALUE_EXPR_P (decl))
5364 {
5365 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5366
5367 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5368 return false;
5369 }
5370 }
5371
5372 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5373 if (n == NULL)
5374 {
5375 enum omp_clause_default_kind default_kind, kind;
5376 struct gimplify_omp_ctx *octx;
5377
5378 if (ctx->region_type == ORT_WORKSHARE)
5379 goto do_outer;
5380
5381 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5382 remapped firstprivate instead of shared. To some extent this is
5383 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5384 default_kind = ctx->default_kind;
5385 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5386 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5387 default_kind = kind;
5388
5389 switch (default_kind)
5390 {
5391 case OMP_CLAUSE_DEFAULT_NONE:
5392 error ("%qE not specified in enclosing parallel",
5393 DECL_NAME (decl));
5394 error_at (ctx->location, "enclosing parallel");
5395 /* FALLTHRU */
5396 case OMP_CLAUSE_DEFAULT_SHARED:
5397 flags |= GOVD_SHARED;
5398 break;
5399 case OMP_CLAUSE_DEFAULT_PRIVATE:
5400 flags |= GOVD_PRIVATE;
5401 break;
5402 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5403 flags |= GOVD_FIRSTPRIVATE;
5404 break;
5405 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5406 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5407 gcc_assert (ctx->region_type == ORT_TASK);
5408 if (ctx->outer_context)
5409 omp_notice_variable (ctx->outer_context, decl, in_code);
5410 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5411 {
5412 splay_tree_node n2;
5413
5414 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5415 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5416 {
5417 flags |= GOVD_FIRSTPRIVATE;
5418 break;
5419 }
5420 if ((octx->region_type & ORT_PARALLEL) != 0)
5421 break;
5422 }
5423 if (flags & GOVD_FIRSTPRIVATE)
5424 break;
5425 if (octx == NULL
5426 && (TREE_CODE (decl) == PARM_DECL
5427 || (!is_global_var (decl)
5428 && DECL_CONTEXT (decl) == current_function_decl)))
5429 {
5430 flags |= GOVD_FIRSTPRIVATE;
5431 break;
5432 }
5433 flags |= GOVD_SHARED;
5434 break;
5435 default:
5436 gcc_unreachable ();
5437 }
5438
5439 if ((flags & GOVD_PRIVATE)
5440 && lang_hooks.decls.omp_private_outer_ref (decl))
5441 flags |= GOVD_PRIVATE_OUTER_REF;
5442
5443 omp_add_variable (ctx, decl, flags);
5444
5445 shared = (flags & GOVD_SHARED) != 0;
5446 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5447 goto do_outer;
5448 }
5449
5450 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5451 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5452 && DECL_SIZE (decl)
5453 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5454 {
5455 splay_tree_node n2;
5456 tree t = DECL_VALUE_EXPR (decl);
5457 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5458 t = TREE_OPERAND (t, 0);
5459 gcc_assert (DECL_P (t));
5460 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5461 n2->value |= GOVD_SEEN;
5462 }
5463
5464 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5465 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5466
5467 /* If nothing changed, there's nothing left to do. */
5468 if ((n->value & flags) == flags)
5469 return ret;
5470 flags |= n->value;
5471 n->value = flags;
5472
5473 do_outer:
5474 /* If the variable is private in the current context, then we don't
5475 need to propagate anything to an outer context. */
5476 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5477 return ret;
5478 if (ctx->outer_context
5479 && omp_notice_variable (ctx->outer_context, decl, in_code))
5480 return true;
5481 return ret;
5482 }
5483
5484 /* Verify that DECL is private within CTX. If there's specific information
5485 to the contrary in the innermost scope, generate an error. */
5486
5487 static bool
5488 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5489 {
5490 splay_tree_node n;
5491
5492 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5493 if (n != NULL)
5494 {
5495 if (n->value & GOVD_SHARED)
5496 {
5497 if (ctx == gimplify_omp_ctxp)
5498 {
5499 error ("iteration variable %qE should be private",
5500 DECL_NAME (decl));
5501 n->value = GOVD_PRIVATE;
5502 return true;
5503 }
5504 else
5505 return false;
5506 }
5507 else if ((n->value & GOVD_EXPLICIT) != 0
5508 && (ctx == gimplify_omp_ctxp
5509 || (ctx->region_type == ORT_COMBINED_PARALLEL
5510 && gimplify_omp_ctxp->outer_context == ctx)))
5511 {
5512 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5513 error ("iteration variable %qE should not be firstprivate",
5514 DECL_NAME (decl));
5515 else if ((n->value & GOVD_REDUCTION) != 0)
5516 error ("iteration variable %qE should not be reduction",
5517 DECL_NAME (decl));
5518 }
5519 return (ctx == gimplify_omp_ctxp
5520 || (ctx->region_type == ORT_COMBINED_PARALLEL
5521 && gimplify_omp_ctxp->outer_context == ctx));
5522 }
5523
5524 if (ctx->region_type != ORT_WORKSHARE)
5525 return false;
5526 else if (ctx->outer_context)
5527 return omp_is_private (ctx->outer_context, decl);
5528 return false;
5529 }
5530
5531 /* Return true if DECL is private within a parallel region
5532 that binds to the current construct's context or in parallel
5533 region's REDUCTION clause. */
5534
5535 static bool
5536 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5537 {
5538 splay_tree_node n;
5539
5540 do
5541 {
5542 ctx = ctx->outer_context;
5543 if (ctx == NULL)
5544 return !(is_global_var (decl)
5545 /* References might be private, but might be shared too. */
5546 || lang_hooks.decls.omp_privatize_by_reference (decl));
5547
5548 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5549 if (n != NULL)
5550 return (n->value & GOVD_SHARED) == 0;
5551 }
5552 while (ctx->region_type == ORT_WORKSHARE);
5553 return false;
5554 }
5555
5556 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5557 and previous omp contexts. */
5558
5559 static void
5560 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5561 enum omp_region_type region_type)
5562 {
5563 struct gimplify_omp_ctx *ctx, *outer_ctx;
5564 struct gimplify_ctx gctx;
5565 tree c;
5566
5567 ctx = new_omp_context (region_type);
5568 outer_ctx = ctx->outer_context;
5569
5570 while ((c = *list_p) != NULL)
5571 {
5572 bool remove = false;
5573 bool notice_outer = true;
5574 const char *check_non_private = NULL;
5575 unsigned int flags;
5576 tree decl;
5577
5578 switch (OMP_CLAUSE_CODE (c))
5579 {
5580 case OMP_CLAUSE_PRIVATE:
5581 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5582 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5583 {
5584 flags |= GOVD_PRIVATE_OUTER_REF;
5585 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5586 }
5587 else
5588 notice_outer = false;
5589 goto do_add;
5590 case OMP_CLAUSE_SHARED:
5591 flags = GOVD_SHARED | GOVD_EXPLICIT;
5592 goto do_add;
5593 case OMP_CLAUSE_FIRSTPRIVATE:
5594 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5595 check_non_private = "firstprivate";
5596 goto do_add;
5597 case OMP_CLAUSE_LASTPRIVATE:
5598 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5599 check_non_private = "lastprivate";
5600 goto do_add;
5601 case OMP_CLAUSE_REDUCTION:
5602 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5603 check_non_private = "reduction";
5604 goto do_add;
5605
5606 do_add:
5607 decl = OMP_CLAUSE_DECL (c);
5608 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5609 {
5610 remove = true;
5611 break;
5612 }
5613 omp_add_variable (ctx, decl, flags);
5614 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5615 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5616 {
5617 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5618 GOVD_LOCAL | GOVD_SEEN);
5619 gimplify_omp_ctxp = ctx;
5620 push_gimplify_context (&gctx);
5621
5622 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5623 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5624
5625 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5626 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5627 pop_gimplify_context
5628 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5629 push_gimplify_context (&gctx);
5630 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5631 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5632 pop_gimplify_context
5633 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5634 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5635 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5636
5637 gimplify_omp_ctxp = outer_ctx;
5638 }
5639 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5640 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5641 {
5642 gimplify_omp_ctxp = ctx;
5643 push_gimplify_context (&gctx);
5644 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5645 {
5646 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5647 NULL, NULL);
5648 TREE_SIDE_EFFECTS (bind) = 1;
5649 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5650 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5651 }
5652 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5653 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5654 pop_gimplify_context
5655 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5656 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5657
5658 gimplify_omp_ctxp = outer_ctx;
5659 }
5660 if (notice_outer)
5661 goto do_notice;
5662 break;
5663
5664 case OMP_CLAUSE_COPYIN:
5665 case OMP_CLAUSE_COPYPRIVATE:
5666 decl = OMP_CLAUSE_DECL (c);
5667 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5668 {
5669 remove = true;
5670 break;
5671 }
5672 do_notice:
5673 if (outer_ctx)
5674 omp_notice_variable (outer_ctx, decl, true);
5675 if (check_non_private
5676 && region_type == ORT_WORKSHARE
5677 && omp_check_private (ctx, decl))
5678 {
5679 error ("%s variable %qE is private in outer context",
5680 check_non_private, DECL_NAME (decl));
5681 remove = true;
5682 }
5683 break;
5684
5685 case OMP_CLAUSE_IF:
5686 OMP_CLAUSE_OPERAND (c, 0)
5687 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5688 /* Fall through. */
5689
5690 case OMP_CLAUSE_SCHEDULE:
5691 case OMP_CLAUSE_NUM_THREADS:
5692 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5693 is_gimple_val, fb_rvalue) == GS_ERROR)
5694 remove = true;
5695 break;
5696
5697 case OMP_CLAUSE_NOWAIT:
5698 case OMP_CLAUSE_ORDERED:
5699 case OMP_CLAUSE_UNTIED:
5700 case OMP_CLAUSE_COLLAPSE:
5701 break;
5702
5703 case OMP_CLAUSE_DEFAULT:
5704 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5705 break;
5706
5707 default:
5708 gcc_unreachable ();
5709 }
5710
5711 if (remove)
5712 *list_p = OMP_CLAUSE_CHAIN (c);
5713 else
5714 list_p = &OMP_CLAUSE_CHAIN (c);
5715 }
5716
5717 gimplify_omp_ctxp = ctx;
5718 }
5719
5720 /* For all variables that were not actually used within the context,
5721 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5722
5723 static int
5724 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5725 {
5726 tree *list_p = (tree *) data;
5727 tree decl = (tree) n->key;
5728 unsigned flags = n->value;
5729 enum omp_clause_code code;
5730 tree clause;
5731 bool private_debug;
5732
5733 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5734 return 0;
5735 if ((flags & GOVD_SEEN) == 0)
5736 return 0;
5737 if (flags & GOVD_DEBUG_PRIVATE)
5738 {
5739 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5740 private_debug = true;
5741 }
5742 else
5743 private_debug
5744 = lang_hooks.decls.omp_private_debug_clause (decl,
5745 !!(flags & GOVD_SHARED));
5746 if (private_debug)
5747 code = OMP_CLAUSE_PRIVATE;
5748 else if (flags & GOVD_SHARED)
5749 {
5750 if (is_global_var (decl))
5751 {
5752 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5753 while (ctx != NULL)
5754 {
5755 splay_tree_node on
5756 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5757 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5758 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5759 break;
5760 ctx = ctx->outer_context;
5761 }
5762 if (ctx == NULL)
5763 return 0;
5764 }
5765 code = OMP_CLAUSE_SHARED;
5766 }
5767 else if (flags & GOVD_PRIVATE)
5768 code = OMP_CLAUSE_PRIVATE;
5769 else if (flags & GOVD_FIRSTPRIVATE)
5770 code = OMP_CLAUSE_FIRSTPRIVATE;
5771 else
5772 gcc_unreachable ();
5773
5774 clause = build_omp_clause (input_location, code);
5775 OMP_CLAUSE_DECL (clause) = decl;
5776 OMP_CLAUSE_CHAIN (clause) = *list_p;
5777 if (private_debug)
5778 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5779 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5780 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5781 *list_p = clause;
5782 lang_hooks.decls.omp_finish_clause (clause);
5783
5784 return 0;
5785 }
5786
5787 static void
5788 gimplify_adjust_omp_clauses (tree *list_p)
5789 {
5790 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5791 tree c, decl;
5792
5793 while ((c = *list_p) != NULL)
5794 {
5795 splay_tree_node n;
5796 bool remove = false;
5797
5798 switch (OMP_CLAUSE_CODE (c))
5799 {
5800 case OMP_CLAUSE_PRIVATE:
5801 case OMP_CLAUSE_SHARED:
5802 case OMP_CLAUSE_FIRSTPRIVATE:
5803 decl = OMP_CLAUSE_DECL (c);
5804 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5805 remove = !(n->value & GOVD_SEEN);
5806 if (! remove)
5807 {
5808 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5809 if ((n->value & GOVD_DEBUG_PRIVATE)
5810 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5811 {
5812 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5813 || ((n->value & GOVD_DATA_SHARE_CLASS)
5814 == GOVD_PRIVATE));
5815 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5816 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5817 }
5818 }
5819 break;
5820
5821 case OMP_CLAUSE_LASTPRIVATE:
5822 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5823 accurately reflect the presence of a FIRSTPRIVATE clause. */
5824 decl = OMP_CLAUSE_DECL (c);
5825 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5826 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5827 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5828 break;
5829
5830 case OMP_CLAUSE_REDUCTION:
5831 case OMP_CLAUSE_COPYIN:
5832 case OMP_CLAUSE_COPYPRIVATE:
5833 case OMP_CLAUSE_IF:
5834 case OMP_CLAUSE_NUM_THREADS:
5835 case OMP_CLAUSE_SCHEDULE:
5836 case OMP_CLAUSE_NOWAIT:
5837 case OMP_CLAUSE_ORDERED:
5838 case OMP_CLAUSE_DEFAULT:
5839 case OMP_CLAUSE_UNTIED:
5840 case OMP_CLAUSE_COLLAPSE:
5841 break;
5842
5843 default:
5844 gcc_unreachable ();
5845 }
5846
5847 if (remove)
5848 *list_p = OMP_CLAUSE_CHAIN (c);
5849 else
5850 list_p = &OMP_CLAUSE_CHAIN (c);
5851 }
5852
5853 /* Add in any implicit data sharing. */
5854 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5855
5856 gimplify_omp_ctxp = ctx->outer_context;
5857 delete_omp_context (ctx);
5858 }
5859
5860 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5861 gimplification of the body, as well as scanning the body for used
5862 variables. We need to do this scan now, because variable-sized
5863 decls will be decomposed during gimplification. */
5864
5865 static void
5866 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5867 {
5868 tree expr = *expr_p;
5869 gimple g;
5870 gimple_seq body = NULL;
5871 struct gimplify_ctx gctx;
5872
5873 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5874 OMP_PARALLEL_COMBINED (expr)
5875 ? ORT_COMBINED_PARALLEL
5876 : ORT_PARALLEL);
5877
5878 push_gimplify_context (&gctx);
5879
5880 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5881 if (gimple_code (g) == GIMPLE_BIND)
5882 pop_gimplify_context (g);
5883 else
5884 pop_gimplify_context (NULL);
5885
5886 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5887
5888 g = gimple_build_omp_parallel (body,
5889 OMP_PARALLEL_CLAUSES (expr),
5890 NULL_TREE, NULL_TREE);
5891 if (OMP_PARALLEL_COMBINED (expr))
5892 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5893 gimplify_seq_add_stmt (pre_p, g);
5894 *expr_p = NULL_TREE;
5895 }
5896
5897 /* Gimplify the contents of an OMP_TASK statement. This involves
5898 gimplification of the body, as well as scanning the body for used
5899 variables. We need to do this scan now, because variable-sized
5900 decls will be decomposed during gimplification. */
5901
5902 static void
5903 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5904 {
5905 tree expr = *expr_p;
5906 gimple g;
5907 gimple_seq body = NULL;
5908 struct gimplify_ctx gctx;
5909
5910 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5911
5912 push_gimplify_context (&gctx);
5913
5914 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5915 if (gimple_code (g) == GIMPLE_BIND)
5916 pop_gimplify_context (g);
5917 else
5918 pop_gimplify_context (NULL);
5919
5920 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
5921
5922 g = gimple_build_omp_task (body,
5923 OMP_TASK_CLAUSES (expr),
5924 NULL_TREE, NULL_TREE,
5925 NULL_TREE, NULL_TREE, NULL_TREE);
5926 gimplify_seq_add_stmt (pre_p, g);
5927 *expr_p = NULL_TREE;
5928 }
5929
5930 /* Gimplify the gross structure of an OMP_FOR statement. */
5931
5932 static enum gimplify_status
5933 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
5934 {
5935 tree for_stmt, decl, var, t;
5936 enum gimplify_status ret = GS_ALL_DONE;
5937 enum gimplify_status tret;
5938 gimple gfor;
5939 gimple_seq for_body, for_pre_body;
5940 int i;
5941
5942 for_stmt = *expr_p;
5943
5944 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
5945 ORT_WORKSHARE);
5946
5947 /* Handle OMP_FOR_INIT. */
5948 for_pre_body = NULL;
5949 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
5950 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
5951
5952 for_body = gimple_seq_alloc ();
5953 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5954 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
5955 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5956 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
5957 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5958 {
5959 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5960 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5961 decl = TREE_OPERAND (t, 0);
5962 gcc_assert (DECL_P (decl));
5963 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
5964 || POINTER_TYPE_P (TREE_TYPE (decl)));
5965
5966 /* Make sure the iteration variable is private. */
5967 if (omp_is_private (gimplify_omp_ctxp, decl))
5968 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5969 else
5970 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5971
5972 /* If DECL is not a gimple register, create a temporary variable to act
5973 as an iteration counter. This is valid, since DECL cannot be
5974 modified in the body of the loop. */
5975 if (!is_gimple_reg (decl))
5976 {
5977 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5978 TREE_OPERAND (t, 0) = var;
5979
5980 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
5981
5982 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5983 }
5984 else
5985 var = decl;
5986
5987 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5988 is_gimple_val, fb_rvalue);
5989 ret = MIN (ret, tret);
5990 if (ret == GS_ERROR)
5991 return ret;
5992
5993 /* Handle OMP_FOR_COND. */
5994 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5995 gcc_assert (COMPARISON_CLASS_P (t));
5996 gcc_assert (TREE_OPERAND (t, 0) == decl);
5997
5998 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5999 is_gimple_val, fb_rvalue);
6000 ret = MIN (ret, tret);
6001
6002 /* Handle OMP_FOR_INCR. */
6003 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6004 switch (TREE_CODE (t))
6005 {
6006 case PREINCREMENT_EXPR:
6007 case POSTINCREMENT_EXPR:
6008 t = build_int_cst (TREE_TYPE (decl), 1);
6009 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6010 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6011 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6012 break;
6013
6014 case PREDECREMENT_EXPR:
6015 case POSTDECREMENT_EXPR:
6016 t = build_int_cst (TREE_TYPE (decl), -1);
6017 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6018 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6019 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6020 break;
6021
6022 case MODIFY_EXPR:
6023 gcc_assert (TREE_OPERAND (t, 0) == decl);
6024 TREE_OPERAND (t, 0) = var;
6025
6026 t = TREE_OPERAND (t, 1);
6027 switch (TREE_CODE (t))
6028 {
6029 case PLUS_EXPR:
6030 if (TREE_OPERAND (t, 1) == decl)
6031 {
6032 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6033 TREE_OPERAND (t, 0) = var;
6034 break;
6035 }
6036
6037 /* Fallthru. */
6038 case MINUS_EXPR:
6039 case POINTER_PLUS_EXPR:
6040 gcc_assert (TREE_OPERAND (t, 0) == decl);
6041 TREE_OPERAND (t, 0) = var;
6042 break;
6043 default:
6044 gcc_unreachable ();
6045 }
6046
6047 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6048 is_gimple_val, fb_rvalue);
6049 ret = MIN (ret, tret);
6050 break;
6051
6052 default:
6053 gcc_unreachable ();
6054 }
6055
6056 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6057 {
6058 tree c;
6059 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6060 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6061 && OMP_CLAUSE_DECL (c) == decl
6062 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6063 {
6064 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6065 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6066 gcc_assert (TREE_OPERAND (t, 0) == var);
6067 t = TREE_OPERAND (t, 1);
6068 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6069 || TREE_CODE (t) == MINUS_EXPR
6070 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6071 gcc_assert (TREE_OPERAND (t, 0) == var);
6072 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6073 TREE_OPERAND (t, 1));
6074 gimplify_assign (decl, t,
6075 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6076 }
6077 }
6078 }
6079
6080 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6081
6082 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6083
6084 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6085 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6086 for_pre_body);
6087
6088 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6089 {
6090 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6091 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6092 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6093 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6094 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6095 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6096 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6097 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6098 }
6099
6100 gimplify_seq_add_stmt (pre_p, gfor);
6101 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6102 }
6103
6104 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6105 In particular, OMP_SECTIONS and OMP_SINGLE. */
6106
6107 static void
6108 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6109 {
6110 tree expr = *expr_p;
6111 gimple stmt;
6112 gimple_seq body = NULL;
6113
6114 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6115 gimplify_and_add (OMP_BODY (expr), &body);
6116 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6117
6118 if (TREE_CODE (expr) == OMP_SECTIONS)
6119 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6120 else if (TREE_CODE (expr) == OMP_SINGLE)
6121 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6122 else
6123 gcc_unreachable ();
6124
6125 gimplify_seq_add_stmt (pre_p, stmt);
6126 }
6127
6128 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6129 stabilized the lhs of the atomic operation as *ADDR. Return true if
6130 EXPR is this stabilized form. */
6131
6132 static bool
6133 goa_lhs_expr_p (tree expr, tree addr)
6134 {
6135 /* Also include casts to other type variants. The C front end is fond
6136 of adding these for e.g. volatile variables. This is like
6137 STRIP_TYPE_NOPS but includes the main variant lookup. */
6138 STRIP_USELESS_TYPE_CONVERSION (expr);
6139
6140 if (TREE_CODE (expr) == INDIRECT_REF)
6141 {
6142 expr = TREE_OPERAND (expr, 0);
6143 while (expr != addr
6144 && (CONVERT_EXPR_P (expr)
6145 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6146 && TREE_CODE (expr) == TREE_CODE (addr)
6147 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6148 {
6149 expr = TREE_OPERAND (expr, 0);
6150 addr = TREE_OPERAND (addr, 0);
6151 }
6152 if (expr == addr)
6153 return true;
6154 return (TREE_CODE (addr) == ADDR_EXPR
6155 && TREE_CODE (expr) == ADDR_EXPR
6156 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6157 }
6158 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6159 return true;
6160 return false;
6161 }
6162
6163 /* Walk *EXPR_P and replace
6164 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6165 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6166 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6167
6168 static int
6169 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6170 tree lhs_var)
6171 {
6172 tree expr = *expr_p;
6173 int saw_lhs;
6174
6175 if (goa_lhs_expr_p (expr, lhs_addr))
6176 {
6177 *expr_p = lhs_var;
6178 return 1;
6179 }
6180 if (is_gimple_val (expr))
6181 return 0;
6182
6183 saw_lhs = 0;
6184 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6185 {
6186 case tcc_binary:
6187 case tcc_comparison:
6188 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6189 lhs_var);
6190 case tcc_unary:
6191 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6192 lhs_var);
6193 break;
6194 case tcc_expression:
6195 switch (TREE_CODE (expr))
6196 {
6197 case TRUTH_ANDIF_EXPR:
6198 case TRUTH_ORIF_EXPR:
6199 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6200 lhs_addr, lhs_var);
6201 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6202 lhs_addr, lhs_var);
6203 break;
6204 default:
6205 break;
6206 }
6207 break;
6208 default:
6209 break;
6210 }
6211
6212 if (saw_lhs == 0)
6213 {
6214 enum gimplify_status gs;
6215 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6216 if (gs != GS_ALL_DONE)
6217 saw_lhs = -1;
6218 }
6219
6220 return saw_lhs;
6221 }
6222
6223
6224 /* Gimplify an OMP_ATOMIC statement. */
6225
6226 static enum gimplify_status
6227 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6228 {
6229 tree addr = TREE_OPERAND (*expr_p, 0);
6230 tree rhs = TREE_OPERAND (*expr_p, 1);
6231 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6232 tree tmp_load;
6233
6234 tmp_load = create_tmp_var (type, NULL);
6235 if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE)
6236 DECL_GIMPLE_REG_P (tmp_load) = 1;
6237 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6238 return GS_ERROR;
6239
6240 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6241 != GS_ALL_DONE)
6242 return GS_ERROR;
6243
6244 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6245 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6246 != GS_ALL_DONE)
6247 return GS_ERROR;
6248 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6249 *expr_p = NULL;
6250
6251 return GS_ALL_DONE;
6252 }
6253
6254
6255 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6256 expression produces a value to be used as an operand inside a GIMPLE
6257 statement, the value will be stored back in *EXPR_P. This value will
6258 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6259 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6260 emitted in PRE_P and POST_P.
6261
6262 Additionally, this process may overwrite parts of the input
6263 expression during gimplification. Ideally, it should be
6264 possible to do non-destructive gimplification.
6265
6266 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6267 the expression needs to evaluate to a value to be used as
6268 an operand in a GIMPLE statement, this value will be stored in
6269 *EXPR_P on exit. This happens when the caller specifies one
6270 of fb_lvalue or fb_rvalue fallback flags.
6271
6272 PRE_P will contain the sequence of GIMPLE statements corresponding
6273 to the evaluation of EXPR and all the side-effects that must
6274 be executed before the main expression. On exit, the last
6275 statement of PRE_P is the core statement being gimplified. For
6276 instance, when gimplifying 'if (++a)' the last statement in
6277 PRE_P will be 'if (t.1)' where t.1 is the result of
6278 pre-incrementing 'a'.
6279
6280 POST_P will contain the sequence of GIMPLE statements corresponding
6281 to the evaluation of all the side-effects that must be executed
6282 after the main expression. If this is NULL, the post
6283 side-effects are stored at the end of PRE_P.
6284
6285 The reason why the output is split in two is to handle post
6286 side-effects explicitly. In some cases, an expression may have
6287 inner and outer post side-effects which need to be emitted in
6288 an order different from the one given by the recursive
6289 traversal. For instance, for the expression (*p--)++ the post
6290 side-effects of '--' must actually occur *after* the post
6291 side-effects of '++'. However, gimplification will first visit
6292 the inner expression, so if a separate POST sequence was not
6293 used, the resulting sequence would be:
6294
6295 1 t.1 = *p
6296 2 p = p - 1
6297 3 t.2 = t.1 + 1
6298 4 *p = t.2
6299
6300 However, the post-decrement operation in line #2 must not be
6301 evaluated until after the store to *p at line #4, so the
6302 correct sequence should be:
6303
6304 1 t.1 = *p
6305 2 t.2 = t.1 + 1
6306 3 *p = t.2
6307 4 p = p - 1
6308
6309 So, by specifying a separate post queue, it is possible
6310 to emit the post side-effects in the correct order.
6311 If POST_P is NULL, an internal queue will be used. Before
6312 returning to the caller, the sequence POST_P is appended to
6313 the main output sequence PRE_P.
6314
6315 GIMPLE_TEST_F points to a function that takes a tree T and
6316 returns nonzero if T is in the GIMPLE form requested by the
6317 caller. The GIMPLE predicates are in tree-gimple.c.
6318
6319 FALLBACK tells the function what sort of a temporary we want if
6320 gimplification cannot produce an expression that complies with
6321 GIMPLE_TEST_F.
6322
6323 fb_none means that no temporary should be generated
6324 fb_rvalue means that an rvalue is OK to generate
6325 fb_lvalue means that an lvalue is OK to generate
6326 fb_either means that either is OK, but an lvalue is preferable.
6327 fb_mayfail means that gimplification may fail (in which case
6328 GS_ERROR will be returned)
6329
6330 The return value is either GS_ERROR or GS_ALL_DONE, since this
6331 function iterates until EXPR is completely gimplified or an error
6332 occurs. */
6333
6334 enum gimplify_status
6335 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6336 bool (*gimple_test_f) (tree), fallback_t fallback)
6337 {
6338 tree tmp;
6339 gimple_seq internal_pre = NULL;
6340 gimple_seq internal_post = NULL;
6341 tree save_expr;
6342 bool is_statement;
6343 location_t saved_location;
6344 enum gimplify_status ret;
6345 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6346
6347 save_expr = *expr_p;
6348 if (save_expr == NULL_TREE)
6349 return GS_ALL_DONE;
6350
6351 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6352 is_statement = gimple_test_f == is_gimple_stmt;
6353 if (is_statement)
6354 gcc_assert (pre_p);
6355
6356 /* Consistency checks. */
6357 if (gimple_test_f == is_gimple_reg)
6358 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6359 else if (gimple_test_f == is_gimple_val
6360 || gimple_test_f == is_gimple_call_addr
6361 || gimple_test_f == is_gimple_condexpr
6362 || gimple_test_f == is_gimple_mem_rhs
6363 || gimple_test_f == is_gimple_mem_rhs_or_call
6364 || gimple_test_f == is_gimple_reg_rhs
6365 || gimple_test_f == is_gimple_reg_rhs_or_call
6366 || gimple_test_f == is_gimple_asm_val)
6367 gcc_assert (fallback & fb_rvalue);
6368 else if (gimple_test_f == is_gimple_min_lval
6369 || gimple_test_f == is_gimple_lvalue)
6370 gcc_assert (fallback & fb_lvalue);
6371 else if (gimple_test_f == is_gimple_addressable)
6372 gcc_assert (fallback & fb_either);
6373 else if (gimple_test_f == is_gimple_stmt)
6374 gcc_assert (fallback == fb_none);
6375 else
6376 {
6377 /* We should have recognized the GIMPLE_TEST_F predicate to
6378 know what kind of fallback to use in case a temporary is
6379 needed to hold the value or address of *EXPR_P. */
6380 gcc_unreachable ();
6381 }
6382
6383 /* We used to check the predicate here and return immediately if it
6384 succeeds. This is wrong; the design is for gimplification to be
6385 idempotent, and for the predicates to only test for valid forms, not
6386 whether they are fully simplified. */
6387 if (pre_p == NULL)
6388 pre_p = &internal_pre;
6389
6390 if (post_p == NULL)
6391 post_p = &internal_post;
6392
6393 /* Remember the last statements added to PRE_P and POST_P. Every
6394 new statement added by the gimplification helpers needs to be
6395 annotated with location information. To centralize the
6396 responsibility, we remember the last statement that had been
6397 added to both queues before gimplifying *EXPR_P. If
6398 gimplification produces new statements in PRE_P and POST_P, those
6399 statements will be annotated with the same location information
6400 as *EXPR_P. */
6401 pre_last_gsi = gsi_last (*pre_p);
6402 post_last_gsi = gsi_last (*post_p);
6403
6404 saved_location = input_location;
6405 if (save_expr != error_mark_node
6406 && EXPR_HAS_LOCATION (*expr_p))
6407 input_location = EXPR_LOCATION (*expr_p);
6408
6409 /* Loop over the specific gimplifiers until the toplevel node
6410 remains the same. */
6411 do
6412 {
6413 /* Strip away as many useless type conversions as possible
6414 at the toplevel. */
6415 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6416
6417 /* Remember the expr. */
6418 save_expr = *expr_p;
6419
6420 /* Die, die, die, my darling. */
6421 if (save_expr == error_mark_node
6422 || (TREE_TYPE (save_expr)
6423 && TREE_TYPE (save_expr) == error_mark_node))
6424 {
6425 ret = GS_ERROR;
6426 break;
6427 }
6428
6429 /* Do any language-specific gimplification. */
6430 ret = ((enum gimplify_status)
6431 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6432 if (ret == GS_OK)
6433 {
6434 if (*expr_p == NULL_TREE)
6435 break;
6436 if (*expr_p != save_expr)
6437 continue;
6438 }
6439 else if (ret != GS_UNHANDLED)
6440 break;
6441
6442 ret = GS_OK;
6443 switch (TREE_CODE (*expr_p))
6444 {
6445 /* First deal with the special cases. */
6446
6447 case POSTINCREMENT_EXPR:
6448 case POSTDECREMENT_EXPR:
6449 case PREINCREMENT_EXPR:
6450 case PREDECREMENT_EXPR:
6451 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6452 fallback != fb_none);
6453 break;
6454
6455 case ARRAY_REF:
6456 case ARRAY_RANGE_REF:
6457 case REALPART_EXPR:
6458 case IMAGPART_EXPR:
6459 case COMPONENT_REF:
6460 case VIEW_CONVERT_EXPR:
6461 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6462 fallback ? fallback : fb_rvalue);
6463 break;
6464
6465 case COND_EXPR:
6466 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6467
6468 /* C99 code may assign to an array in a structure value of a
6469 conditional expression, and this has undefined behavior
6470 only on execution, so create a temporary if an lvalue is
6471 required. */
6472 if (fallback == fb_lvalue)
6473 {
6474 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6475 mark_addressable (*expr_p);
6476 }
6477 break;
6478
6479 case CALL_EXPR:
6480 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6481
6482 /* C99 code may assign to an array in a structure returned
6483 from a function, and this has undefined behavior only on
6484 execution, so create a temporary if an lvalue is
6485 required. */
6486 if (fallback == fb_lvalue)
6487 {
6488 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6489 mark_addressable (*expr_p);
6490 }
6491 break;
6492
6493 case TREE_LIST:
6494 gcc_unreachable ();
6495
6496 case COMPOUND_EXPR:
6497 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6498 break;
6499
6500 case COMPOUND_LITERAL_EXPR:
6501 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6502 break;
6503
6504 case MODIFY_EXPR:
6505 case INIT_EXPR:
6506 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6507 fallback != fb_none);
6508 break;
6509
6510 case TRUTH_ANDIF_EXPR:
6511 case TRUTH_ORIF_EXPR:
6512 /* Pass the source location of the outer expression. */
6513 ret = gimplify_boolean_expr (expr_p, saved_location);
6514 break;
6515
6516 case TRUTH_NOT_EXPR:
6517 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6518 {
6519 tree type = TREE_TYPE (*expr_p);
6520 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6521 ret = GS_OK;
6522 break;
6523 }
6524
6525 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6526 is_gimple_val, fb_rvalue);
6527 recalculate_side_effects (*expr_p);
6528 break;
6529
6530 case ADDR_EXPR:
6531 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6532 break;
6533
6534 case VA_ARG_EXPR:
6535 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6536 break;
6537
6538 CASE_CONVERT:
6539 if (IS_EMPTY_STMT (*expr_p))
6540 {
6541 ret = GS_ALL_DONE;
6542 break;
6543 }
6544
6545 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6546 || fallback == fb_none)
6547 {
6548 /* Just strip a conversion to void (or in void context) and
6549 try again. */
6550 *expr_p = TREE_OPERAND (*expr_p, 0);
6551 break;
6552 }
6553
6554 ret = gimplify_conversion (expr_p);
6555 if (ret == GS_ERROR)
6556 break;
6557 if (*expr_p != save_expr)
6558 break;
6559 /* FALLTHRU */
6560
6561 case FIX_TRUNC_EXPR:
6562 /* unary_expr: ... | '(' cast ')' val | ... */
6563 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6564 is_gimple_val, fb_rvalue);
6565 recalculate_side_effects (*expr_p);
6566 break;
6567
6568 case INDIRECT_REF:
6569 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6570 if (*expr_p != save_expr)
6571 break;
6572 /* else fall through. */
6573 case ALIGN_INDIRECT_REF:
6574 case MISALIGNED_INDIRECT_REF:
6575 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6576 is_gimple_reg, fb_rvalue);
6577 recalculate_side_effects (*expr_p);
6578 break;
6579
6580 /* Constants need not be gimplified. */
6581 case INTEGER_CST:
6582 case REAL_CST:
6583 case FIXED_CST:
6584 case STRING_CST:
6585 case COMPLEX_CST:
6586 case VECTOR_CST:
6587 ret = GS_ALL_DONE;
6588 break;
6589
6590 case CONST_DECL:
6591 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6592 CONST_DECL node. Otherwise the decl is replaceable by its
6593 value. */
6594 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6595 if (fallback & fb_lvalue)
6596 ret = GS_ALL_DONE;
6597 else
6598 *expr_p = DECL_INITIAL (*expr_p);
6599 break;
6600
6601 case DECL_EXPR:
6602 ret = gimplify_decl_expr (expr_p, pre_p);
6603 break;
6604
6605 case BIND_EXPR:
6606 ret = gimplify_bind_expr (expr_p, pre_p);
6607 break;
6608
6609 case LOOP_EXPR:
6610 ret = gimplify_loop_expr (expr_p, pre_p);
6611 break;
6612
6613 case SWITCH_EXPR:
6614 ret = gimplify_switch_expr (expr_p, pre_p);
6615 break;
6616
6617 case EXIT_EXPR:
6618 ret = gimplify_exit_expr (expr_p);
6619 break;
6620
6621 case GOTO_EXPR:
6622 /* If the target is not LABEL, then it is a computed jump
6623 and the target needs to be gimplified. */
6624 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6625 {
6626 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6627 NULL, is_gimple_val, fb_rvalue);
6628 if (ret == GS_ERROR)
6629 break;
6630 }
6631 gimplify_seq_add_stmt (pre_p,
6632 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6633 break;
6634
6635 case PREDICT_EXPR:
6636 gimplify_seq_add_stmt (pre_p,
6637 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6638 PREDICT_EXPR_OUTCOME (*expr_p)));
6639 ret = GS_ALL_DONE;
6640 break;
6641
6642 case LABEL_EXPR:
6643 ret = GS_ALL_DONE;
6644 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6645 == current_function_decl);
6646 gimplify_seq_add_stmt (pre_p,
6647 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6648 break;
6649
6650 case CASE_LABEL_EXPR:
6651 ret = gimplify_case_label_expr (expr_p, pre_p);
6652 break;
6653
6654 case RETURN_EXPR:
6655 ret = gimplify_return_expr (*expr_p, pre_p);
6656 break;
6657
6658 case CONSTRUCTOR:
6659 /* Don't reduce this in place; let gimplify_init_constructor work its
6660 magic. Buf if we're just elaborating this for side effects, just
6661 gimplify any element that has side-effects. */
6662 if (fallback == fb_none)
6663 {
6664 unsigned HOST_WIDE_INT ix;
6665 constructor_elt *ce;
6666 tree temp = NULL_TREE;
6667 for (ix = 0;
6668 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6669 ix, ce);
6670 ix++)
6671 if (TREE_SIDE_EFFECTS (ce->value))
6672 append_to_statement_list (ce->value, &temp);
6673
6674 *expr_p = temp;
6675 ret = GS_OK;
6676 }
6677 /* C99 code may assign to an array in a constructed
6678 structure or union, and this has undefined behavior only
6679 on execution, so create a temporary if an lvalue is
6680 required. */
6681 else if (fallback == fb_lvalue)
6682 {
6683 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6684 mark_addressable (*expr_p);
6685 }
6686 else
6687 ret = GS_ALL_DONE;
6688 break;
6689
6690 /* The following are special cases that are not handled by the
6691 original GIMPLE grammar. */
6692
6693 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6694 eliminated. */
6695 case SAVE_EXPR:
6696 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6697 break;
6698
6699 case BIT_FIELD_REF:
6700 {
6701 enum gimplify_status r0, r1, r2;
6702
6703 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6704 post_p, is_gimple_lvalue, fb_either);
6705 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6706 post_p, is_gimple_val, fb_rvalue);
6707 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6708 post_p, is_gimple_val, fb_rvalue);
6709 recalculate_side_effects (*expr_p);
6710
6711 ret = MIN (r0, MIN (r1, r2));
6712 }
6713 break;
6714
6715 case TARGET_MEM_REF:
6716 {
6717 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6718
6719 if (TMR_SYMBOL (*expr_p))
6720 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6721 post_p, is_gimple_lvalue, fb_either);
6722 else if (TMR_BASE (*expr_p))
6723 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6724 post_p, is_gimple_val, fb_either);
6725 if (TMR_INDEX (*expr_p))
6726 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6727 post_p, is_gimple_val, fb_rvalue);
6728 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6729 ret = MIN (r0, r1);
6730 }
6731 break;
6732
6733 case NON_LVALUE_EXPR:
6734 /* This should have been stripped above. */
6735 gcc_unreachable ();
6736
6737 case ASM_EXPR:
6738 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6739 break;
6740
6741 case TRY_FINALLY_EXPR:
6742 case TRY_CATCH_EXPR:
6743 {
6744 gimple_seq eval, cleanup;
6745 gimple try_;
6746
6747 eval = cleanup = NULL;
6748 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6749 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6750 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6751 if (gimple_seq_empty_p (cleanup))
6752 {
6753 gimple_seq_add_seq (pre_p, eval);
6754 ret = GS_ALL_DONE;
6755 break;
6756 }
6757 try_ = gimple_build_try (eval, cleanup,
6758 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6759 ? GIMPLE_TRY_FINALLY
6760 : GIMPLE_TRY_CATCH);
6761 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6762 gimple_try_set_catch_is_cleanup (try_,
6763 TRY_CATCH_IS_CLEANUP (*expr_p));
6764 gimplify_seq_add_stmt (pre_p, try_);
6765 ret = GS_ALL_DONE;
6766 break;
6767 }
6768
6769 case CLEANUP_POINT_EXPR:
6770 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6771 break;
6772
6773 case TARGET_EXPR:
6774 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6775 break;
6776
6777 case CATCH_EXPR:
6778 {
6779 gimple c;
6780 gimple_seq handler = NULL;
6781 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6782 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6783 gimplify_seq_add_stmt (pre_p, c);
6784 ret = GS_ALL_DONE;
6785 break;
6786 }
6787
6788 case EH_FILTER_EXPR:
6789 {
6790 gimple ehf;
6791 gimple_seq failure = NULL;
6792
6793 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6794 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6795 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6796 gimplify_seq_add_stmt (pre_p, ehf);
6797 ret = GS_ALL_DONE;
6798 break;
6799 }
6800
6801 case OBJ_TYPE_REF:
6802 {
6803 enum gimplify_status r0, r1;
6804 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6805 post_p, is_gimple_val, fb_rvalue);
6806 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6807 post_p, is_gimple_val, fb_rvalue);
6808 TREE_SIDE_EFFECTS (*expr_p) = 0;
6809 ret = MIN (r0, r1);
6810 }
6811 break;
6812
6813 case LABEL_DECL:
6814 /* We get here when taking the address of a label. We mark
6815 the label as "forced"; meaning it can never be removed and
6816 it is a potential target for any computed goto. */
6817 FORCED_LABEL (*expr_p) = 1;
6818 ret = GS_ALL_DONE;
6819 break;
6820
6821 case STATEMENT_LIST:
6822 ret = gimplify_statement_list (expr_p, pre_p);
6823 break;
6824
6825 case WITH_SIZE_EXPR:
6826 {
6827 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6828 post_p == &internal_post ? NULL : post_p,
6829 gimple_test_f, fallback);
6830 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6831 is_gimple_val, fb_rvalue);
6832 }
6833 break;
6834
6835 case VAR_DECL:
6836 case PARM_DECL:
6837 ret = gimplify_var_or_parm_decl (expr_p);
6838 break;
6839
6840 case RESULT_DECL:
6841 /* When within an OpenMP context, notice uses of variables. */
6842 if (gimplify_omp_ctxp)
6843 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6844 ret = GS_ALL_DONE;
6845 break;
6846
6847 case SSA_NAME:
6848 /* Allow callbacks into the gimplifier during optimization. */
6849 ret = GS_ALL_DONE;
6850 break;
6851
6852 case OMP_PARALLEL:
6853 gimplify_omp_parallel (expr_p, pre_p);
6854 ret = GS_ALL_DONE;
6855 break;
6856
6857 case OMP_TASK:
6858 gimplify_omp_task (expr_p, pre_p);
6859 ret = GS_ALL_DONE;
6860 break;
6861
6862 case OMP_FOR:
6863 ret = gimplify_omp_for (expr_p, pre_p);
6864 break;
6865
6866 case OMP_SECTIONS:
6867 case OMP_SINGLE:
6868 gimplify_omp_workshare (expr_p, pre_p);
6869 ret = GS_ALL_DONE;
6870 break;
6871
6872 case OMP_SECTION:
6873 case OMP_MASTER:
6874 case OMP_ORDERED:
6875 case OMP_CRITICAL:
6876 {
6877 gimple_seq body = NULL;
6878 gimple g;
6879
6880 gimplify_and_add (OMP_BODY (*expr_p), &body);
6881 switch (TREE_CODE (*expr_p))
6882 {
6883 case OMP_SECTION:
6884 g = gimple_build_omp_section (body);
6885 break;
6886 case OMP_MASTER:
6887 g = gimple_build_omp_master (body);
6888 break;
6889 case OMP_ORDERED:
6890 g = gimple_build_omp_ordered (body);
6891 break;
6892 case OMP_CRITICAL:
6893 g = gimple_build_omp_critical (body,
6894 OMP_CRITICAL_NAME (*expr_p));
6895 break;
6896 default:
6897 gcc_unreachable ();
6898 }
6899 gimplify_seq_add_stmt (pre_p, g);
6900 ret = GS_ALL_DONE;
6901 break;
6902 }
6903
6904 case OMP_ATOMIC:
6905 ret = gimplify_omp_atomic (expr_p, pre_p);
6906 break;
6907
6908 case POINTER_PLUS_EXPR:
6909 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6910 The second is gimple immediate saving a need for extra statement.
6911 */
6912 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6913 && (tmp = maybe_fold_offset_to_address
6914 (EXPR_LOCATION (*expr_p),
6915 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6916 TREE_TYPE (*expr_p))))
6917 {
6918 *expr_p = tmp;
6919 break;
6920 }
6921 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6922 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6923 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6924 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6925 0),0)))
6926 && (tmp = maybe_fold_offset_to_address
6927 (EXPR_LOCATION (*expr_p),
6928 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6929 TREE_OPERAND (*expr_p, 1),
6930 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6931 0)))))
6932 {
6933 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6934 break;
6935 }
6936 /* FALLTHRU */
6937
6938 default:
6939 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6940 {
6941 case tcc_comparison:
6942 /* Handle comparison of objects of non scalar mode aggregates
6943 with a call to memcmp. It would be nice to only have to do
6944 this for variable-sized objects, but then we'd have to allow
6945 the same nest of reference nodes we allow for MODIFY_EXPR and
6946 that's too complex.
6947
6948 Compare scalar mode aggregates as scalar mode values. Using
6949 memcmp for them would be very inefficient at best, and is
6950 plain wrong if bitfields are involved. */
6951 {
6952 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6953
6954 if (!AGGREGATE_TYPE_P (type))
6955 goto expr_2;
6956 else if (TYPE_MODE (type) != BLKmode)
6957 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6958 else
6959 ret = gimplify_variable_sized_compare (expr_p);
6960
6961 break;
6962 }
6963
6964 /* If *EXPR_P does not need to be special-cased, handle it
6965 according to its class. */
6966 case tcc_unary:
6967 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6968 post_p, is_gimple_val, fb_rvalue);
6969 break;
6970
6971 case tcc_binary:
6972 expr_2:
6973 {
6974 enum gimplify_status r0, r1;
6975
6976 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6977 post_p, is_gimple_val, fb_rvalue);
6978 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6979 post_p, is_gimple_val, fb_rvalue);
6980
6981 ret = MIN (r0, r1);
6982 break;
6983 }
6984
6985 case tcc_declaration:
6986 case tcc_constant:
6987 ret = GS_ALL_DONE;
6988 goto dont_recalculate;
6989
6990 default:
6991 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
6992 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
6993 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
6994 goto expr_2;
6995 }
6996
6997 recalculate_side_effects (*expr_p);
6998
6999 dont_recalculate:
7000 break;
7001 }
7002
7003 /* If we replaced *expr_p, gimplify again. */
7004 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7005 ret = GS_ALL_DONE;
7006 }
7007 while (ret == GS_OK);
7008
7009 /* If we encountered an error_mark somewhere nested inside, either
7010 stub out the statement or propagate the error back out. */
7011 if (ret == GS_ERROR)
7012 {
7013 if (is_statement)
7014 *expr_p = NULL;
7015 goto out;
7016 }
7017
7018 /* This was only valid as a return value from the langhook, which
7019 we handled. Make sure it doesn't escape from any other context. */
7020 gcc_assert (ret != GS_UNHANDLED);
7021
7022 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7023 {
7024 /* We aren't looking for a value, and we don't have a valid
7025 statement. If it doesn't have side-effects, throw it away. */
7026 if (!TREE_SIDE_EFFECTS (*expr_p))
7027 *expr_p = NULL;
7028 else if (!TREE_THIS_VOLATILE (*expr_p))
7029 {
7030 /* This is probably a _REF that contains something nested that
7031 has side effects. Recurse through the operands to find it. */
7032 enum tree_code code = TREE_CODE (*expr_p);
7033
7034 switch (code)
7035 {
7036 case COMPONENT_REF:
7037 case REALPART_EXPR:
7038 case IMAGPART_EXPR:
7039 case VIEW_CONVERT_EXPR:
7040 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7041 gimple_test_f, fallback);
7042 break;
7043
7044 case ARRAY_REF:
7045 case ARRAY_RANGE_REF:
7046 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7047 gimple_test_f, fallback);
7048 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7049 gimple_test_f, fallback);
7050 break;
7051
7052 default:
7053 /* Anything else with side-effects must be converted to
7054 a valid statement before we get here. */
7055 gcc_unreachable ();
7056 }
7057
7058 *expr_p = NULL;
7059 }
7060 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7061 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7062 {
7063 /* Historically, the compiler has treated a bare reference
7064 to a non-BLKmode volatile lvalue as forcing a load. */
7065 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7066
7067 /* Normally, we do not want to create a temporary for a
7068 TREE_ADDRESSABLE type because such a type should not be
7069 copied by bitwise-assignment. However, we make an
7070 exception here, as all we are doing here is ensuring that
7071 we read the bytes that make up the type. We use
7072 create_tmp_var_raw because create_tmp_var will abort when
7073 given a TREE_ADDRESSABLE type. */
7074 tree tmp = create_tmp_var_raw (type, "vol");
7075 gimple_add_tmp_var (tmp);
7076 gimplify_assign (tmp, *expr_p, pre_p);
7077 *expr_p = NULL;
7078 }
7079 else
7080 /* We can't do anything useful with a volatile reference to
7081 an incomplete type, so just throw it away. Likewise for
7082 a BLKmode type, since any implicit inner load should
7083 already have been turned into an explicit one by the
7084 gimplification process. */
7085 *expr_p = NULL;
7086 }
7087
7088 /* If we are gimplifying at the statement level, we're done. Tack
7089 everything together and return. */
7090 if (fallback == fb_none || is_statement)
7091 {
7092 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7093 it out for GC to reclaim it. */
7094 *expr_p = NULL_TREE;
7095
7096 if (!gimple_seq_empty_p (internal_pre)
7097 || !gimple_seq_empty_p (internal_post))
7098 {
7099 gimplify_seq_add_seq (&internal_pre, internal_post);
7100 gimplify_seq_add_seq (pre_p, internal_pre);
7101 }
7102
7103 /* The result of gimplifying *EXPR_P is going to be the last few
7104 statements in *PRE_P and *POST_P. Add location information
7105 to all the statements that were added by the gimplification
7106 helpers. */
7107 if (!gimple_seq_empty_p (*pre_p))
7108 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7109
7110 if (!gimple_seq_empty_p (*post_p))
7111 annotate_all_with_location_after (*post_p, post_last_gsi,
7112 input_location);
7113
7114 goto out;
7115 }
7116
7117 #ifdef ENABLE_GIMPLE_CHECKING
7118 if (*expr_p)
7119 {
7120 enum tree_code code = TREE_CODE (*expr_p);
7121 /* These expressions should already be in gimple IR form. */
7122 gcc_assert (code != MODIFY_EXPR
7123 && code != ASM_EXPR
7124 && code != BIND_EXPR
7125 && code != CATCH_EXPR
7126 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7127 && code != EH_FILTER_EXPR
7128 && code != GOTO_EXPR
7129 && code != LABEL_EXPR
7130 && code != LOOP_EXPR
7131 && code != SWITCH_EXPR
7132 && code != TRY_FINALLY_EXPR
7133 && code != OMP_CRITICAL
7134 && code != OMP_FOR
7135 && code != OMP_MASTER
7136 && code != OMP_ORDERED
7137 && code != OMP_PARALLEL
7138 && code != OMP_SECTIONS
7139 && code != OMP_SECTION
7140 && code != OMP_SINGLE);
7141 }
7142 #endif
7143
7144 /* Otherwise we're gimplifying a subexpression, so the resulting
7145 value is interesting. If it's a valid operand that matches
7146 GIMPLE_TEST_F, we're done. Unless we are handling some
7147 post-effects internally; if that's the case, we need to copy into
7148 a temporary before adding the post-effects to POST_P. */
7149 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7150 goto out;
7151
7152 /* Otherwise, we need to create a new temporary for the gimplified
7153 expression. */
7154
7155 /* We can't return an lvalue if we have an internal postqueue. The
7156 object the lvalue refers to would (probably) be modified by the
7157 postqueue; we need to copy the value out first, which means an
7158 rvalue. */
7159 if ((fallback & fb_lvalue)
7160 && gimple_seq_empty_p (internal_post)
7161 && is_gimple_addressable (*expr_p))
7162 {
7163 /* An lvalue will do. Take the address of the expression, store it
7164 in a temporary, and replace the expression with an INDIRECT_REF of
7165 that temporary. */
7166 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7167 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7168 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7169 }
7170 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7171 {
7172 /* An rvalue will do. Assign the gimplified expression into a
7173 new temporary TMP and replace the original expression with
7174 TMP. First, make sure that the expression has a type so that
7175 it can be assigned into a temporary. */
7176 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7177
7178 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7179 /* The postqueue might change the value of the expression between
7180 the initialization and use of the temporary, so we can't use a
7181 formal temp. FIXME do we care? */
7182 {
7183 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7184 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7185 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7186 DECL_GIMPLE_REG_P (*expr_p) = 1;
7187 }
7188 else
7189 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7190 }
7191 else
7192 {
7193 #ifdef ENABLE_GIMPLE_CHECKING
7194 if (!(fallback & fb_mayfail))
7195 {
7196 fprintf (stderr, "gimplification failed:\n");
7197 print_generic_expr (stderr, *expr_p, 0);
7198 debug_tree (*expr_p);
7199 internal_error ("gimplification failed");
7200 }
7201 #endif
7202 gcc_assert (fallback & fb_mayfail);
7203
7204 /* If this is an asm statement, and the user asked for the
7205 impossible, don't die. Fail and let gimplify_asm_expr
7206 issue an error. */
7207 ret = GS_ERROR;
7208 goto out;
7209 }
7210
7211 /* Make sure the temporary matches our predicate. */
7212 gcc_assert ((*gimple_test_f) (*expr_p));
7213
7214 if (!gimple_seq_empty_p (internal_post))
7215 {
7216 annotate_all_with_location (internal_post, input_location);
7217 gimplify_seq_add_seq (pre_p, internal_post);
7218 }
7219
7220 out:
7221 input_location = saved_location;
7222 return ret;
7223 }
7224
7225 /* Look through TYPE for variable-sized objects and gimplify each such
7226 size that we find. Add to LIST_P any statements generated. */
7227
7228 void
7229 gimplify_type_sizes (tree type, gimple_seq *list_p)
7230 {
7231 tree field, t;
7232
7233 if (type == NULL || type == error_mark_node)
7234 return;
7235
7236 /* We first do the main variant, then copy into any other variants. */
7237 type = TYPE_MAIN_VARIANT (type);
7238
7239 /* Avoid infinite recursion. */
7240 if (TYPE_SIZES_GIMPLIFIED (type))
7241 return;
7242
7243 TYPE_SIZES_GIMPLIFIED (type) = 1;
7244
7245 switch (TREE_CODE (type))
7246 {
7247 case INTEGER_TYPE:
7248 case ENUMERAL_TYPE:
7249 case BOOLEAN_TYPE:
7250 case REAL_TYPE:
7251 case FIXED_POINT_TYPE:
7252 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7253 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7254
7255 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7256 {
7257 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7258 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7259 }
7260 break;
7261
7262 case ARRAY_TYPE:
7263 /* These types may not have declarations, so handle them here. */
7264 gimplify_type_sizes (TREE_TYPE (type), list_p);
7265 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7266 /* When not optimizing, ensure VLA bounds aren't removed. */
7267 if (!optimize
7268 && TYPE_DOMAIN (type)
7269 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7270 {
7271 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7272 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7273 DECL_IGNORED_P (t) = 0;
7274 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7275 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7276 DECL_IGNORED_P (t) = 0;
7277 }
7278 break;
7279
7280 case RECORD_TYPE:
7281 case UNION_TYPE:
7282 case QUAL_UNION_TYPE:
7283 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7284 if (TREE_CODE (field) == FIELD_DECL)
7285 {
7286 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7287 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7288 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7289 gimplify_type_sizes (TREE_TYPE (field), list_p);
7290 }
7291 break;
7292
7293 case POINTER_TYPE:
7294 case REFERENCE_TYPE:
7295 /* We used to recurse on the pointed-to type here, which turned out to
7296 be incorrect because its definition might refer to variables not
7297 yet initialized at this point if a forward declaration is involved.
7298
7299 It was actually useful for anonymous pointed-to types to ensure
7300 that the sizes evaluation dominates every possible later use of the
7301 values. Restricting to such types here would be safe since there
7302 is no possible forward declaration around, but would introduce an
7303 undesirable middle-end semantic to anonymity. We then defer to
7304 front-ends the responsibility of ensuring that the sizes are
7305 evaluated both early and late enough, e.g. by attaching artificial
7306 type declarations to the tree. */
7307 break;
7308
7309 default:
7310 break;
7311 }
7312
7313 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7314 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7315
7316 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7317 {
7318 TYPE_SIZE (t) = TYPE_SIZE (type);
7319 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7320 TYPE_SIZES_GIMPLIFIED (t) = 1;
7321 }
7322 }
7323
7324 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7325 a size or position, has had all of its SAVE_EXPRs evaluated.
7326 We add any required statements to *STMT_P. */
7327
7328 void
7329 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7330 {
7331 tree type, expr = *expr_p;
7332
7333 /* We don't do anything if the value isn't there, is constant, or contains
7334 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7335 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7336 will want to replace it with a new variable, but that will cause problems
7337 if this type is from outside the function. It's OK to have that here. */
7338 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7339 || TREE_CODE (expr) == VAR_DECL
7340 || CONTAINS_PLACEHOLDER_P (expr))
7341 return;
7342
7343 type = TREE_TYPE (expr);
7344 *expr_p = unshare_expr (expr);
7345
7346 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7347 expr = *expr_p;
7348
7349 /* Verify that we've an exact type match with the original expression.
7350 In particular, we do not wish to drop a "sizetype" in favour of a
7351 type of similar dimensions. We don't want to pollute the generic
7352 type-stripping code with this knowledge because it doesn't matter
7353 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7354 and friends retain their "sizetype-ness". */
7355 if (TREE_TYPE (expr) != type
7356 && TREE_CODE (type) == INTEGER_TYPE
7357 && TYPE_IS_SIZETYPE (type))
7358 {
7359 tree tmp;
7360 gimple stmt;
7361
7362 *expr_p = create_tmp_var (type, NULL);
7363 tmp = build1 (NOP_EXPR, type, expr);
7364 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7365 if (EXPR_HAS_LOCATION (expr))
7366 gimple_set_location (stmt, EXPR_LOCATION (expr));
7367 else
7368 gimple_set_location (stmt, input_location);
7369 }
7370 }
7371
7372
7373 /* Gimplify the body of statements pointed to by BODY_P and return a
7374 GIMPLE_BIND containing the sequence of GIMPLE statements
7375 corresponding to BODY_P. FNDECL is the function decl containing
7376 *BODY_P. */
7377
7378 gimple
7379 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7380 {
7381 location_t saved_location = input_location;
7382 gimple_seq parm_stmts, seq;
7383 gimple outer_bind;
7384 struct gimplify_ctx gctx;
7385
7386 timevar_push (TV_TREE_GIMPLIFY);
7387
7388 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7389 gimplification. */
7390 default_rtl_profile ();
7391
7392 gcc_assert (gimplify_ctxp == NULL);
7393 push_gimplify_context (&gctx);
7394
7395 /* Unshare most shared trees in the body and in that of any nested functions.
7396 It would seem we don't have to do this for nested functions because
7397 they are supposed to be output and then the outer function gimplified
7398 first, but the g++ front end doesn't always do it that way. */
7399 unshare_body (body_p, fndecl);
7400 unvisit_body (body_p, fndecl);
7401
7402 if (cgraph_node (fndecl)->origin)
7403 nonlocal_vlas = pointer_set_create ();
7404
7405 /* Make sure input_location isn't set to something weird. */
7406 input_location = DECL_SOURCE_LOCATION (fndecl);
7407
7408 /* Resolve callee-copies. This has to be done before processing
7409 the body so that DECL_VALUE_EXPR gets processed correctly. */
7410 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7411
7412 /* Gimplify the function's body. */
7413 seq = NULL;
7414 gimplify_stmt (body_p, &seq);
7415 outer_bind = gimple_seq_first_stmt (seq);
7416 if (!outer_bind)
7417 {
7418 outer_bind = gimple_build_nop ();
7419 gimplify_seq_add_stmt (&seq, outer_bind);
7420 }
7421
7422 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7423 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7424 if (gimple_code (outer_bind) == GIMPLE_BIND
7425 && gimple_seq_first (seq) == gimple_seq_last (seq))
7426 ;
7427 else
7428 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7429
7430 *body_p = NULL_TREE;
7431
7432 /* If we had callee-copies statements, insert them at the beginning
7433 of the function. */
7434 if (!gimple_seq_empty_p (parm_stmts))
7435 {
7436 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7437 gimple_bind_set_body (outer_bind, parm_stmts);
7438 }
7439
7440 if (nonlocal_vlas)
7441 {
7442 pointer_set_destroy (nonlocal_vlas);
7443 nonlocal_vlas = NULL;
7444 }
7445
7446 pop_gimplify_context (outer_bind);
7447 gcc_assert (gimplify_ctxp == NULL);
7448
7449 #ifdef ENABLE_TYPES_CHECKING
7450 if (!errorcount && !sorrycount)
7451 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7452 #endif
7453
7454 timevar_pop (TV_TREE_GIMPLIFY);
7455 input_location = saved_location;
7456
7457 return outer_bind;
7458 }
7459
7460 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7461 node for the function we want to gimplify.
7462
7463 Returns the sequence of GIMPLE statements corresponding to the body
7464 of FNDECL. */
7465
7466 void
7467 gimplify_function_tree (tree fndecl)
7468 {
7469 tree oldfn, parm, ret;
7470 gimple_seq seq;
7471 gimple bind;
7472
7473 gcc_assert (!gimple_body (fndecl));
7474
7475 oldfn = current_function_decl;
7476 current_function_decl = fndecl;
7477 if (DECL_STRUCT_FUNCTION (fndecl))
7478 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7479 else
7480 push_struct_function (fndecl);
7481
7482 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7483 {
7484 /* Preliminarily mark non-addressed complex variables as eligible
7485 for promotion to gimple registers. We'll transform their uses
7486 as we find them. */
7487 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7488 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7489 && !TREE_THIS_VOLATILE (parm)
7490 && !needs_to_live_in_memory (parm))
7491 DECL_GIMPLE_REG_P (parm) = 1;
7492 }
7493
7494 ret = DECL_RESULT (fndecl);
7495 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7496 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7497 && !needs_to_live_in_memory (ret))
7498 DECL_GIMPLE_REG_P (ret) = 1;
7499
7500 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7501
7502 /* The tree body of the function is no longer needed, replace it
7503 with the new GIMPLE body. */
7504 seq = gimple_seq_alloc ();
7505 gimple_seq_add_stmt (&seq, bind);
7506 gimple_set_body (fndecl, seq);
7507
7508 /* If we're instrumenting function entry/exit, then prepend the call to
7509 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7510 catch the exit hook. */
7511 /* ??? Add some way to ignore exceptions for this TFE. */
7512 if (flag_instrument_function_entry_exit
7513 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7514 && !flag_instrument_functions_exclude_p (fndecl))
7515 {
7516 tree x;
7517 gimple new_bind;
7518 gimple tf;
7519 gimple_seq cleanup = NULL, body = NULL;
7520
7521 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7522 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7523 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7524
7525 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7526 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7527 gimplify_seq_add_stmt (&body, tf);
7528 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7529 /* Clear the block for BIND, since it is no longer directly inside
7530 the function, but within a try block. */
7531 gimple_bind_set_block (bind, NULL);
7532
7533 /* Replace the current function body with the body
7534 wrapped in the try/finally TF. */
7535 seq = gimple_seq_alloc ();
7536 gimple_seq_add_stmt (&seq, new_bind);
7537 gimple_set_body (fndecl, seq);
7538 }
7539
7540 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7541 cfun->curr_properties = PROP_gimple_any;
7542
7543 current_function_decl = oldfn;
7544 pop_cfun ();
7545 }
7546
7547
7548 /* Some transformations like inlining may invalidate the GIMPLE form
7549 for operands. This function traverses all the operands in STMT and
7550 gimplifies anything that is not a valid gimple operand. Any new
7551 GIMPLE statements are inserted before *GSI_P. */
7552
7553 void
7554 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7555 {
7556 size_t i, num_ops;
7557 tree orig_lhs = NULL_TREE, lhs, t;
7558 gimple_seq pre = NULL;
7559 gimple post_stmt = NULL;
7560 struct gimplify_ctx gctx;
7561
7562 push_gimplify_context (&gctx);
7563 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7564
7565 switch (gimple_code (stmt))
7566 {
7567 case GIMPLE_COND:
7568 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7569 is_gimple_val, fb_rvalue);
7570 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7571 is_gimple_val, fb_rvalue);
7572 break;
7573 case GIMPLE_SWITCH:
7574 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7575 is_gimple_val, fb_rvalue);
7576 break;
7577 case GIMPLE_OMP_ATOMIC_LOAD:
7578 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7579 is_gimple_val, fb_rvalue);
7580 break;
7581 case GIMPLE_ASM:
7582 {
7583 size_t i, noutputs = gimple_asm_noutputs (stmt);
7584 const char *constraint, **oconstraints;
7585 bool allows_mem, allows_reg, is_inout;
7586
7587 oconstraints
7588 = (const char **) alloca ((noutputs) * sizeof (const char *));
7589 for (i = 0; i < noutputs; i++)
7590 {
7591 tree op = gimple_asm_output_op (stmt, i);
7592 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7593 oconstraints[i] = constraint;
7594 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7595 &allows_reg, &is_inout);
7596 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7597 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7598 fb_lvalue | fb_mayfail);
7599 }
7600 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7601 {
7602 tree op = gimple_asm_input_op (stmt, i);
7603 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7604 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7605 oconstraints, &allows_mem, &allows_reg);
7606 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7607 allows_reg = 0;
7608 if (!allows_reg && allows_mem)
7609 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7610 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7611 else
7612 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7613 is_gimple_asm_val, fb_rvalue);
7614 }
7615 }
7616 break;
7617 default:
7618 /* NOTE: We start gimplifying operands from last to first to
7619 make sure that side-effects on the RHS of calls, assignments
7620 and ASMs are executed before the LHS. The ordering is not
7621 important for other statements. */
7622 num_ops = gimple_num_ops (stmt);
7623 orig_lhs = gimple_get_lhs (stmt);
7624 for (i = num_ops; i > 0; i--)
7625 {
7626 tree op = gimple_op (stmt, i - 1);
7627 if (op == NULL_TREE)
7628 continue;
7629 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7630 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7631 else if (i == 2
7632 && is_gimple_assign (stmt)
7633 && num_ops == 2
7634 && get_gimple_rhs_class (gimple_expr_code (stmt))
7635 == GIMPLE_SINGLE_RHS)
7636 gimplify_expr (&op, &pre, NULL,
7637 rhs_predicate_for (gimple_assign_lhs (stmt)),
7638 fb_rvalue);
7639 else if (i == 2 && is_gimple_call (stmt))
7640 {
7641 if (TREE_CODE (op) == FUNCTION_DECL)
7642 continue;
7643 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7644 }
7645 else
7646 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7647 gimple_set_op (stmt, i - 1, op);
7648 }
7649
7650 lhs = gimple_get_lhs (stmt);
7651 /* If the LHS changed it in a way that requires a simple RHS,
7652 create temporary. */
7653 if (lhs && !is_gimple_reg (lhs))
7654 {
7655 bool need_temp = false;
7656
7657 if (is_gimple_assign (stmt)
7658 && num_ops == 2
7659 && get_gimple_rhs_class (gimple_expr_code (stmt))
7660 == GIMPLE_SINGLE_RHS)
7661 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7662 rhs_predicate_for (gimple_assign_lhs (stmt)),
7663 fb_rvalue);
7664 else if (is_gimple_reg (lhs))
7665 {
7666 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7667 {
7668 if (is_gimple_call (stmt))
7669 {
7670 i = gimple_call_flags (stmt);
7671 if ((i & ECF_LOOPING_CONST_OR_PURE)
7672 || !(i & (ECF_CONST | ECF_PURE)))
7673 need_temp = true;
7674 }
7675 if (stmt_can_throw_internal (stmt))
7676 need_temp = true;
7677 }
7678 }
7679 else
7680 {
7681 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7682 need_temp = true;
7683 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7684 {
7685 if (is_gimple_call (stmt))
7686 {
7687 tree fndecl = gimple_call_fndecl (stmt);
7688
7689 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7690 && !(fndecl && DECL_RESULT (fndecl)
7691 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7692 need_temp = true;
7693 }
7694 else
7695 need_temp = true;
7696 }
7697 }
7698 if (need_temp)
7699 {
7700 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7701
7702 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7703 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7704 DECL_GIMPLE_REG_P (temp) = 1;
7705 if (TREE_CODE (orig_lhs) == SSA_NAME)
7706 orig_lhs = SSA_NAME_VAR (orig_lhs);
7707
7708 if (gimple_in_ssa_p (cfun))
7709 temp = make_ssa_name (temp, NULL);
7710 gimple_set_lhs (stmt, temp);
7711 post_stmt = gimple_build_assign (lhs, temp);
7712 if (TREE_CODE (lhs) == SSA_NAME)
7713 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7714 }
7715 }
7716 break;
7717 }
7718
7719 if (gimple_referenced_vars (cfun))
7720 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7721 add_referenced_var (t);
7722
7723 if (!gimple_seq_empty_p (pre))
7724 {
7725 if (gimple_in_ssa_p (cfun))
7726 {
7727 gimple_stmt_iterator i;
7728
7729 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7730 mark_symbols_for_renaming (gsi_stmt (i));
7731 }
7732 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7733 }
7734 if (post_stmt)
7735 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7736
7737 pop_gimplify_context (NULL);
7738 }
7739
7740
7741 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7742 force the result to be either ssa_name or an invariant, otherwise
7743 just force it to be a rhs expression. If VAR is not NULL, make the
7744 base variable of the final destination be VAR if suitable. */
7745
7746 tree
7747 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7748 {
7749 tree t;
7750 enum gimplify_status ret;
7751 gimple_predicate gimple_test_f;
7752 struct gimplify_ctx gctx;
7753
7754 *stmts = NULL;
7755
7756 if (is_gimple_val (expr))
7757 return expr;
7758
7759 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7760
7761 push_gimplify_context (&gctx);
7762 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7763 gimplify_ctxp->allow_rhs_cond_expr = true;
7764
7765 if (var)
7766 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7767
7768 if (TREE_CODE (expr) != MODIFY_EXPR
7769 && TREE_TYPE (expr) == void_type_node)
7770 {
7771 gimplify_and_add (expr, stmts);
7772 expr = NULL_TREE;
7773 }
7774 else
7775 {
7776 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7777 gcc_assert (ret != GS_ERROR);
7778 }
7779
7780 if (gimple_referenced_vars (cfun))
7781 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7782 add_referenced_var (t);
7783
7784 pop_gimplify_context (NULL);
7785
7786 return expr;
7787 }
7788
7789 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7790 some statements are produced, emits them at GSI. If BEFORE is true.
7791 the statements are appended before GSI, otherwise they are appended after
7792 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7793 GSI_CONTINUE_LINKING are the usual values). */
7794
7795 tree
7796 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7797 bool simple_p, tree var, bool before,
7798 enum gsi_iterator_update m)
7799 {
7800 gimple_seq stmts;
7801
7802 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7803
7804 if (!gimple_seq_empty_p (stmts))
7805 {
7806 if (gimple_in_ssa_p (cfun))
7807 {
7808 gimple_stmt_iterator i;
7809
7810 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7811 mark_symbols_for_renaming (gsi_stmt (i));
7812 }
7813
7814 if (before)
7815 gsi_insert_seq_before (gsi, stmts, m);
7816 else
7817 gsi_insert_seq_after (gsi, stmts, m);
7818 }
7819
7820 return expr;
7821 }
7822
7823 #include "gt-gimplify.h"