gimplify.c (gimple_regimplify_operands): Only mark the LHS of statements for renaming.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "tree-pretty-print.h"
33 #include "langhooks.h"
34 #include "tree-flow.h"
35 #include "cgraph.h"
36 #include "timevar.h"
37 #include "hashtab.h"
38 #include "flags.h"
39 #include "function.h"
40 #include "output.h"
41 #include "ggc.h"
42 #include "diagnostic-core.h"
43 #include "target.h"
44 #include "pointer-set.h"
45 #include "splay-tree.h"
46 #include "vec.h"
47 #include "gimple.h"
48 #include "tree-pass.h"
49
50 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name. */
51 #include "expr.h" /* FIXME: for can_move_by_pieces
52 and STACK_CHECK_MAX_VAR_SIZE. */
53
54 enum gimplify_omp_var_data
55 {
56 GOVD_SEEN = 1,
57 GOVD_EXPLICIT = 2,
58 GOVD_SHARED = 4,
59 GOVD_PRIVATE = 8,
60 GOVD_FIRSTPRIVATE = 16,
61 GOVD_LASTPRIVATE = 32,
62 GOVD_REDUCTION = 64,
63 GOVD_LOCAL = 128,
64 GOVD_DEBUG_PRIVATE = 256,
65 GOVD_PRIVATE_OUTER_REF = 512,
66 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
67 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
68 };
69
70
71 enum omp_region_type
72 {
73 ORT_WORKSHARE = 0,
74 ORT_PARALLEL = 2,
75 ORT_COMBINED_PARALLEL = 3,
76 ORT_TASK = 4,
77 ORT_UNTIED_TASK = 5
78 };
79
80 struct gimplify_omp_ctx
81 {
82 struct gimplify_omp_ctx *outer_context;
83 splay_tree variables;
84 struct pointer_set_t *privatized_types;
85 location_t location;
86 enum omp_clause_default_kind default_kind;
87 enum omp_region_type region_type;
88 };
89
90 static struct gimplify_ctx *gimplify_ctxp;
91 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
92
93
94 /* Formal (expression) temporary table handling: multiple occurrences of
95 the same scalar expression are evaluated into the same temporary. */
96
97 typedef struct gimple_temp_hash_elt
98 {
99 tree val; /* Key */
100 tree temp; /* Value */
101 } elt_t;
102
103 /* Forward declaration. */
104 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
105
106 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
107 form and we don't do any syntax checking. */
108
109 void
110 mark_addressable (tree x)
111 {
112 while (handled_component_p (x))
113 x = TREE_OPERAND (x, 0);
114 if (TREE_CODE (x) == MEM_REF
115 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
116 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
117 if (TREE_CODE (x) != VAR_DECL
118 && TREE_CODE (x) != PARM_DECL
119 && TREE_CODE (x) != RESULT_DECL)
120 return;
121 TREE_ADDRESSABLE (x) = 1;
122 }
123
124 /* Return a hash value for a formal temporary table entry. */
125
126 static hashval_t
127 gimple_tree_hash (const void *p)
128 {
129 tree t = ((const elt_t *) p)->val;
130 return iterative_hash_expr (t, 0);
131 }
132
133 /* Compare two formal temporary table entries. */
134
135 static int
136 gimple_tree_eq (const void *p1, const void *p2)
137 {
138 tree t1 = ((const elt_t *) p1)->val;
139 tree t2 = ((const elt_t *) p2)->val;
140 enum tree_code code = TREE_CODE (t1);
141
142 if (TREE_CODE (t2) != code
143 || TREE_TYPE (t1) != TREE_TYPE (t2))
144 return 0;
145
146 if (!operand_equal_p (t1, t2, 0))
147 return 0;
148
149 #ifdef ENABLE_CHECKING
150 /* Only allow them to compare equal if they also hash equal; otherwise
151 results are nondeterminate, and we fail bootstrap comparison. */
152 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
153 #endif
154
155 return 1;
156 }
157
158 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
159 *SEQ_P is NULL, a new sequence is allocated. This function is
160 similar to gimple_seq_add_stmt, but does not scan the operands.
161 During gimplification, we need to manipulate statement sequences
162 before the def/use vectors have been constructed. */
163
164 void
165 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
166 {
167 gimple_stmt_iterator si;
168
169 if (gs == NULL)
170 return;
171
172 si = gsi_last (*seq_p);
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
174 }
175
176 /* Shorter alias name for the above function for use in gimplify.c
177 only. */
178
179 static inline void
180 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
181 {
182 gimple_seq_add_stmt_without_update (seq_p, gs);
183 }
184
185 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
186 NULL, a new sequence is allocated. This function is
187 similar to gimple_seq_add_seq, but does not scan the operands.
188 During gimplification, we need to manipulate statement sequences
189 before the def/use vectors have been constructed. */
190
191 static void
192 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
193 {
194 gimple_stmt_iterator si;
195
196 if (src == NULL)
197 return;
198
199 si = gsi_last (*dst_p);
200 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
201 }
202
203 /* Set up a context for the gimplifier. */
204
205 void
206 push_gimplify_context (struct gimplify_ctx *c)
207 {
208 memset (c, '\0', sizeof (*c));
209 c->prev_context = gimplify_ctxp;
210 gimplify_ctxp = c;
211 }
212
213 /* Tear down a context for the gimplifier. If BODY is non-null, then
214 put the temporaries into the outer BIND_EXPR. Otherwise, put them
215 in the local_decls.
216
217 BODY is not a sequence, but the first tuple in a sequence. */
218
219 void
220 pop_gimplify_context (gimple body)
221 {
222 struct gimplify_ctx *c = gimplify_ctxp;
223
224 gcc_assert (c && (c->bind_expr_stack == NULL
225 || VEC_empty (gimple, c->bind_expr_stack)));
226 VEC_free (gimple, heap, c->bind_expr_stack);
227 gimplify_ctxp = c->prev_context;
228
229 if (body)
230 declare_vars (c->temps, body, false);
231 else
232 record_vars (c->temps);
233
234 if (c->temp_htab)
235 htab_delete (c->temp_htab);
236 }
237
238 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
239
240 static void
241 gimple_push_bind_expr (gimple gimple_bind)
242 {
243 if (gimplify_ctxp->bind_expr_stack == NULL)
244 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
245 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
246 }
247
248 /* Pop the first element off the stack of bindings. */
249
250 static void
251 gimple_pop_bind_expr (void)
252 {
253 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
254 }
255
256 /* Return the first element of the stack of bindings. */
257
258 gimple
259 gimple_current_bind_expr (void)
260 {
261 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
262 }
263
264 /* Return the stack of bindings created during gimplification. */
265
266 VEC(gimple, heap) *
267 gimple_bind_expr_stack (void)
268 {
269 return gimplify_ctxp->bind_expr_stack;
270 }
271
272 /* Return true iff there is a COND_EXPR between us and the innermost
273 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
274
275 static bool
276 gimple_conditional_context (void)
277 {
278 return gimplify_ctxp->conditions > 0;
279 }
280
281 /* Note that we've entered a COND_EXPR. */
282
283 static void
284 gimple_push_condition (void)
285 {
286 #ifdef ENABLE_GIMPLE_CHECKING
287 if (gimplify_ctxp->conditions == 0)
288 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
289 #endif
290 ++(gimplify_ctxp->conditions);
291 }
292
293 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
294 now, add any conditional cleanups we've seen to the prequeue. */
295
296 static void
297 gimple_pop_condition (gimple_seq *pre_p)
298 {
299 int conds = --(gimplify_ctxp->conditions);
300
301 gcc_assert (conds >= 0);
302 if (conds == 0)
303 {
304 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
305 gimplify_ctxp->conditional_cleanups = NULL;
306 }
307 }
308
309 /* A stable comparison routine for use with splay trees and DECLs. */
310
311 static int
312 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
313 {
314 tree a = (tree) xa;
315 tree b = (tree) xb;
316
317 return DECL_UID (a) - DECL_UID (b);
318 }
319
320 /* Create a new omp construct that deals with variable remapping. */
321
322 static struct gimplify_omp_ctx *
323 new_omp_context (enum omp_region_type region_type)
324 {
325 struct gimplify_omp_ctx *c;
326
327 c = XCNEW (struct gimplify_omp_ctx);
328 c->outer_context = gimplify_omp_ctxp;
329 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
330 c->privatized_types = pointer_set_create ();
331 c->location = input_location;
332 c->region_type = region_type;
333 if ((region_type & ORT_TASK) == 0)
334 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
335 else
336 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
337
338 return c;
339 }
340
341 /* Destroy an omp construct that deals with variable remapping. */
342
343 static void
344 delete_omp_context (struct gimplify_omp_ctx *c)
345 {
346 splay_tree_delete (c->variables);
347 pointer_set_destroy (c->privatized_types);
348 XDELETE (c);
349 }
350
351 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
352 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
353
354 /* Both gimplify the statement T and append it to *SEQ_P. This function
355 behaves exactly as gimplify_stmt, but you don't have to pass T as a
356 reference. */
357
358 void
359 gimplify_and_add (tree t, gimple_seq *seq_p)
360 {
361 gimplify_stmt (&t, seq_p);
362 }
363
364 /* Gimplify statement T into sequence *SEQ_P, and return the first
365 tuple in the sequence of generated tuples for this statement.
366 Return NULL if gimplifying T produced no tuples. */
367
368 static gimple
369 gimplify_and_return_first (tree t, gimple_seq *seq_p)
370 {
371 gimple_stmt_iterator last = gsi_last (*seq_p);
372
373 gimplify_and_add (t, seq_p);
374
375 if (!gsi_end_p (last))
376 {
377 gsi_next (&last);
378 return gsi_stmt (last);
379 }
380 else
381 return gimple_seq_first_stmt (*seq_p);
382 }
383
384 /* Strip off a legitimate source ending from the input string NAME of
385 length LEN. Rather than having to know the names used by all of
386 our front ends, we strip off an ending of a period followed by
387 up to five characters. (Java uses ".class".) */
388
389 static inline void
390 remove_suffix (char *name, int len)
391 {
392 int i;
393
394 for (i = 2; i < 8 && len > i; i++)
395 {
396 if (name[len - i] == '.')
397 {
398 name[len - i] = '\0';
399 break;
400 }
401 }
402 }
403
404 /* Create a new temporary name with PREFIX. Return an identifier. */
405
406 static GTY(()) unsigned int tmp_var_id_num;
407
408 tree
409 create_tmp_var_name (const char *prefix)
410 {
411 char *tmp_name;
412
413 if (prefix)
414 {
415 char *preftmp = ASTRDUP (prefix);
416
417 remove_suffix (preftmp, strlen (preftmp));
418 clean_symbol_name (preftmp);
419
420 prefix = preftmp;
421 }
422
423 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
424 return get_identifier (tmp_name);
425 }
426
427 /* Create a new temporary variable declaration of type TYPE.
428 Do NOT push it into the current binding. */
429
430 tree
431 create_tmp_var_raw (tree type, const char *prefix)
432 {
433 tree tmp_var;
434
435 tmp_var = build_decl (input_location,
436 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
437 type);
438
439 /* The variable was declared by the compiler. */
440 DECL_ARTIFICIAL (tmp_var) = 1;
441 /* And we don't want debug info for it. */
442 DECL_IGNORED_P (tmp_var) = 1;
443
444 /* Make the variable writable. */
445 TREE_READONLY (tmp_var) = 0;
446
447 DECL_EXTERNAL (tmp_var) = 0;
448 TREE_STATIC (tmp_var) = 0;
449 TREE_USED (tmp_var) = 1;
450
451 return tmp_var;
452 }
453
454 /* Create a new temporary variable declaration of type TYPE. DO push the
455 variable into the current binding. Further, assume that this is called
456 only from gimplification or optimization, at which point the creation of
457 certain types are bugs. */
458
459 tree
460 create_tmp_var (tree type, const char *prefix)
461 {
462 tree tmp_var;
463
464 /* We don't allow types that are addressable (meaning we can't make copies),
465 or incomplete. We also used to reject every variable size objects here,
466 but now support those for which a constant upper bound can be obtained.
467 The processing for variable sizes is performed in gimple_add_tmp_var,
468 point at which it really matters and possibly reached via paths not going
469 through this function, e.g. after direct calls to create_tmp_var_raw. */
470 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
471
472 tmp_var = create_tmp_var_raw (type, prefix);
473 gimple_add_tmp_var (tmp_var);
474 return tmp_var;
475 }
476
477 /* Create a new temporary variable declaration of type TYPE by calling
478 create_tmp_var and if TYPE is a vector or a complex number, mark the new
479 temporary as gimple register. */
480
481 tree
482 create_tmp_reg (tree type, const char *prefix)
483 {
484 tree tmp;
485
486 tmp = create_tmp_var (type, prefix);
487 if (TREE_CODE (type) == COMPLEX_TYPE
488 || TREE_CODE (type) == VECTOR_TYPE)
489 DECL_GIMPLE_REG_P (tmp) = 1;
490
491 return tmp;
492 }
493
494 /* Create a temporary with a name derived from VAL. Subroutine of
495 lookup_tmp_var; nobody else should call this function. */
496
497 static inline tree
498 create_tmp_from_val (tree val)
499 {
500 /* Drop all qualifiers and address-space information from the value type. */
501 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
502 }
503
504 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
505 an existing expression temporary. */
506
507 static tree
508 lookup_tmp_var (tree val, bool is_formal)
509 {
510 tree ret;
511
512 /* If not optimizing, never really reuse a temporary. local-alloc
513 won't allocate any variable that is used in more than one basic
514 block, which means it will go into memory, causing much extra
515 work in reload and final and poorer code generation, outweighing
516 the extra memory allocation here. */
517 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
518 ret = create_tmp_from_val (val);
519 else
520 {
521 elt_t elt, *elt_p;
522 void **slot;
523
524 elt.val = val;
525 if (gimplify_ctxp->temp_htab == NULL)
526 gimplify_ctxp->temp_htab
527 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
528 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
529 if (*slot == NULL)
530 {
531 elt_p = XNEW (elt_t);
532 elt_p->val = val;
533 elt_p->temp = ret = create_tmp_from_val (val);
534 *slot = (void *) elt_p;
535 }
536 else
537 {
538 elt_p = (elt_t *) *slot;
539 ret = elt_p->temp;
540 }
541 }
542
543 return ret;
544 }
545
546 /* Returns true iff T is a valid RHS for an assignment to a renamed
547 user -- or front-end generated artificial -- variable. */
548
549 static bool
550 is_gimple_reg_rhs (tree t)
551 {
552 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
553 }
554
555 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
556 LHS, or for a call argument. */
557
558 static bool
559 is_gimple_mem_rhs (tree t)
560 {
561 /* If we're dealing with a renamable type, either source or dest must be
562 a renamed variable. */
563 if (is_gimple_reg_type (TREE_TYPE (t)))
564 return is_gimple_val (t);
565 else
566 return is_gimple_val (t) || is_gimple_lvalue (t);
567 }
568
569 /* Return true if T is a CALL_EXPR or an expression that can be
570 assigned to a temporary. Note that this predicate should only be
571 used during gimplification. See the rationale for this in
572 gimplify_modify_expr. */
573
574 static bool
575 is_gimple_reg_rhs_or_call (tree t)
576 {
577 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
578 || TREE_CODE (t) == CALL_EXPR);
579 }
580
581 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
582 this predicate should only be used during gimplification. See the
583 rationale for this in gimplify_modify_expr. */
584
585 static bool
586 is_gimple_mem_rhs_or_call (tree t)
587 {
588 /* If we're dealing with a renamable type, either source or dest must be
589 a renamed variable. */
590 if (is_gimple_reg_type (TREE_TYPE (t)))
591 return is_gimple_val (t);
592 else
593 return (is_gimple_val (t) || is_gimple_lvalue (t)
594 || TREE_CODE (t) == CALL_EXPR);
595 }
596
597 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
598
599 static tree
600 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
601 bool is_formal)
602 {
603 tree t, mod;
604
605 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
606 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
607 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
608 fb_rvalue);
609
610 t = lookup_tmp_var (val, is_formal);
611
612 if (is_formal
613 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
614 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
615 DECL_GIMPLE_REG_P (t) = 1;
616
617 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
618
619 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
620
621 /* gimplify_modify_expr might want to reduce this further. */
622 gimplify_and_add (mod, pre_p);
623 ggc_free (mod);
624
625 /* If we're gimplifying into ssa, gimplify_modify_expr will have
626 given our temporary an SSA name. Find and return it. */
627 if (gimplify_ctxp->into_ssa)
628 {
629 gimple last = gimple_seq_last_stmt (*pre_p);
630 t = gimple_get_lhs (last);
631 }
632
633 return t;
634 }
635
636 /* Return a formal temporary variable initialized with VAL. PRE_P is as
637 in gimplify_expr. Only use this function if:
638
639 1) The value of the unfactored expression represented by VAL will not
640 change between the initialization and use of the temporary, and
641 2) The temporary will not be otherwise modified.
642
643 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
644 and #2 means it is inappropriate for && temps.
645
646 For other cases, use get_initialized_tmp_var instead. */
647
648 tree
649 get_formal_tmp_var (tree val, gimple_seq *pre_p)
650 {
651 return internal_get_tmp_var (val, pre_p, NULL, true);
652 }
653
654 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
655 are as in gimplify_expr. */
656
657 tree
658 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
659 {
660 return internal_get_tmp_var (val, pre_p, post_p, false);
661 }
662
663 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
664 generate debug info for them; otherwise don't. */
665
666 void
667 declare_vars (tree vars, gimple scope, bool debug_info)
668 {
669 tree last = vars;
670 if (last)
671 {
672 tree temps, block;
673
674 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
675
676 temps = nreverse (last);
677
678 block = gimple_bind_block (scope);
679 gcc_assert (!block || TREE_CODE (block) == BLOCK);
680 if (!block || !debug_info)
681 {
682 DECL_CHAIN (last) = gimple_bind_vars (scope);
683 gimple_bind_set_vars (scope, temps);
684 }
685 else
686 {
687 /* We need to attach the nodes both to the BIND_EXPR and to its
688 associated BLOCK for debugging purposes. The key point here
689 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
690 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
691 if (BLOCK_VARS (block))
692 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
693 else
694 {
695 gimple_bind_set_vars (scope,
696 chainon (gimple_bind_vars (scope), temps));
697 BLOCK_VARS (block) = temps;
698 }
699 }
700 }
701 }
702
703 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
704 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
705 no such upper bound can be obtained. */
706
707 static void
708 force_constant_size (tree var)
709 {
710 /* The only attempt we make is by querying the maximum size of objects
711 of the variable's type. */
712
713 HOST_WIDE_INT max_size;
714
715 gcc_assert (TREE_CODE (var) == VAR_DECL);
716
717 max_size = max_int_size_in_bytes (TREE_TYPE (var));
718
719 gcc_assert (max_size >= 0);
720
721 DECL_SIZE_UNIT (var)
722 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
723 DECL_SIZE (var)
724 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
725 }
726
727 /* Push the temporary variable TMP into the current binding. */
728
729 void
730 gimple_add_tmp_var (tree tmp)
731 {
732 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
733
734 /* Later processing assumes that the object size is constant, which might
735 not be true at this point. Force the use of a constant upper bound in
736 this case. */
737 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
738 force_constant_size (tmp);
739
740 DECL_CONTEXT (tmp) = current_function_decl;
741 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
742
743 if (gimplify_ctxp)
744 {
745 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
746 gimplify_ctxp->temps = tmp;
747
748 /* Mark temporaries local within the nearest enclosing parallel. */
749 if (gimplify_omp_ctxp)
750 {
751 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
752 while (ctx && ctx->region_type == ORT_WORKSHARE)
753 ctx = ctx->outer_context;
754 if (ctx)
755 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
756 }
757 }
758 else if (cfun)
759 record_vars (tmp);
760 else
761 {
762 gimple_seq body_seq;
763
764 /* This case is for nested functions. We need to expose the locals
765 they create. */
766 body_seq = gimple_body (current_function_decl);
767 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
768 }
769 }
770
771 /* Determine whether to assign a location to the statement GS. */
772
773 static bool
774 should_carry_location_p (gimple gs)
775 {
776 /* Don't emit a line note for a label. We particularly don't want to
777 emit one for the break label, since it doesn't actually correspond
778 to the beginning of the loop/switch. */
779 if (gimple_code (gs) == GIMPLE_LABEL)
780 return false;
781
782 return true;
783 }
784
785 /* Return true if a location should not be emitted for this statement
786 by annotate_one_with_location. */
787
788 static inline bool
789 gimple_do_not_emit_location_p (gimple g)
790 {
791 return gimple_plf (g, GF_PLF_1);
792 }
793
794 /* Mark statement G so a location will not be emitted by
795 annotate_one_with_location. */
796
797 static inline void
798 gimple_set_do_not_emit_location (gimple g)
799 {
800 /* The PLF flags are initialized to 0 when a new tuple is created,
801 so no need to initialize it anywhere. */
802 gimple_set_plf (g, GF_PLF_1, true);
803 }
804
805 /* Set the location for gimple statement GS to LOCATION. */
806
807 static void
808 annotate_one_with_location (gimple gs, location_t location)
809 {
810 if (!gimple_has_location (gs)
811 && !gimple_do_not_emit_location_p (gs)
812 && should_carry_location_p (gs))
813 gimple_set_location (gs, location);
814 }
815
816 /* Set LOCATION for all the statements after iterator GSI in sequence
817 SEQ. If GSI is pointing to the end of the sequence, start with the
818 first statement in SEQ. */
819
820 static void
821 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
822 location_t location)
823 {
824 if (gsi_end_p (gsi))
825 gsi = gsi_start (seq);
826 else
827 gsi_next (&gsi);
828
829 for (; !gsi_end_p (gsi); gsi_next (&gsi))
830 annotate_one_with_location (gsi_stmt (gsi), location);
831 }
832
833 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
834
835 void
836 annotate_all_with_location (gimple_seq stmt_p, location_t location)
837 {
838 gimple_stmt_iterator i;
839
840 if (gimple_seq_empty_p (stmt_p))
841 return;
842
843 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
844 {
845 gimple gs = gsi_stmt (i);
846 annotate_one_with_location (gs, location);
847 }
848 }
849 \f
850 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
851 nodes that are referenced more than once in GENERIC functions. This is
852 necessary because gimplification (translation into GIMPLE) is performed
853 by modifying tree nodes in-place, so gimplication of a shared node in a
854 first context could generate an invalid GIMPLE form in a second context.
855
856 This is achieved with a simple mark/copy/unmark algorithm that walks the
857 GENERIC representation top-down, marks nodes with TREE_VISITED the first
858 time it encounters them, duplicates them if they already have TREE_VISITED
859 set, and finally removes the TREE_VISITED marks it has set.
860
861 The algorithm works only at the function level, i.e. it generates a GENERIC
862 representation of a function with no nodes shared within the function when
863 passed a GENERIC function (except for nodes that are allowed to be shared).
864
865 At the global level, it is also necessary to unshare tree nodes that are
866 referenced in more than one function, for the same aforementioned reason.
867 This requires some cooperation from the front-end. There are 2 strategies:
868
869 1. Manual unsharing. The front-end needs to call unshare_expr on every
870 expression that might end up being shared across functions.
871
872 2. Deep unsharing. This is an extension of regular unsharing. Instead
873 of calling unshare_expr on expressions that might be shared across
874 functions, the front-end pre-marks them with TREE_VISITED. This will
875 ensure that they are unshared on the first reference within functions
876 when the regular unsharing algorithm runs. The counterpart is that
877 this algorithm must look deeper than for manual unsharing, which is
878 specified by LANG_HOOKS_DEEP_UNSHARING.
879
880 If there are only few specific cases of node sharing across functions, it is
881 probably easier for a front-end to unshare the expressions manually. On the
882 contrary, if the expressions generated at the global level are as widespread
883 as expressions generated within functions, deep unsharing is very likely the
884 way to go. */
885
886 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
887 These nodes model computations that must be done once. If we were to
888 unshare something like SAVE_EXPR(i++), the gimplification process would
889 create wrong code. However, if DATA is non-null, it must hold a pointer
890 set that is used to unshare the subtrees of these nodes. */
891
892 static tree
893 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
894 {
895 tree t = *tp;
896 enum tree_code code = TREE_CODE (t);
897
898 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
899 copy their subtrees if we can make sure to do it only once. */
900 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
901 {
902 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
903 ;
904 else
905 *walk_subtrees = 0;
906 }
907
908 /* Stop at types, decls, constants like copy_tree_r. */
909 else if (TREE_CODE_CLASS (code) == tcc_type
910 || TREE_CODE_CLASS (code) == tcc_declaration
911 || TREE_CODE_CLASS (code) == tcc_constant
912 /* We can't do anything sensible with a BLOCK used as an
913 expression, but we also can't just die when we see it
914 because of non-expression uses. So we avert our eyes
915 and cross our fingers. Silly Java. */
916 || code == BLOCK)
917 *walk_subtrees = 0;
918
919 /* Cope with the statement expression extension. */
920 else if (code == STATEMENT_LIST)
921 ;
922
923 /* Leave the bulk of the work to copy_tree_r itself. */
924 else
925 copy_tree_r (tp, walk_subtrees, NULL);
926
927 return NULL_TREE;
928 }
929
930 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
931 If *TP has been visited already, then *TP is deeply copied by calling
932 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
933
934 static tree
935 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
936 {
937 tree t = *tp;
938 enum tree_code code = TREE_CODE (t);
939
940 /* Skip types, decls, and constants. But we do want to look at their
941 types and the bounds of types. Mark them as visited so we properly
942 unmark their subtrees on the unmark pass. If we've already seen them,
943 don't look down further. */
944 if (TREE_CODE_CLASS (code) == tcc_type
945 || TREE_CODE_CLASS (code) == tcc_declaration
946 || TREE_CODE_CLASS (code) == tcc_constant)
947 {
948 if (TREE_VISITED (t))
949 *walk_subtrees = 0;
950 else
951 TREE_VISITED (t) = 1;
952 }
953
954 /* If this node has been visited already, unshare it and don't look
955 any deeper. */
956 else if (TREE_VISITED (t))
957 {
958 walk_tree (tp, mostly_copy_tree_r, data, NULL);
959 *walk_subtrees = 0;
960 }
961
962 /* Otherwise, mark the node as visited and keep looking. */
963 else
964 TREE_VISITED (t) = 1;
965
966 return NULL_TREE;
967 }
968
969 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
970 copy_if_shared_r callback unmodified. */
971
972 static inline void
973 copy_if_shared (tree *tp, void *data)
974 {
975 walk_tree (tp, copy_if_shared_r, data, NULL);
976 }
977
978 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
979 any nested functions. */
980
981 static void
982 unshare_body (tree fndecl)
983 {
984 struct cgraph_node *cgn = cgraph_get_node (fndecl);
985 /* If the language requires deep unsharing, we need a pointer set to make
986 sure we don't repeatedly unshare subtrees of unshareable nodes. */
987 struct pointer_set_t *visited
988 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
989
990 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
991 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
992 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
993
994 if (visited)
995 pointer_set_destroy (visited);
996
997 if (cgn)
998 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
999 unshare_body (cgn->symbol.decl);
1000 }
1001
1002 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
1003 Subtrees are walked until the first unvisited node is encountered. */
1004
1005 static tree
1006 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1007 {
1008 tree t = *tp;
1009
1010 /* If this node has been visited, unmark it and keep looking. */
1011 if (TREE_VISITED (t))
1012 TREE_VISITED (t) = 0;
1013
1014 /* Otherwise, don't look any deeper. */
1015 else
1016 *walk_subtrees = 0;
1017
1018 return NULL_TREE;
1019 }
1020
1021 /* Unmark the visited trees rooted at *TP. */
1022
1023 static inline void
1024 unmark_visited (tree *tp)
1025 {
1026 walk_tree (tp, unmark_visited_r, NULL, NULL);
1027 }
1028
1029 /* Likewise, but mark all trees as not visited. */
1030
1031 static void
1032 unvisit_body (tree fndecl)
1033 {
1034 struct cgraph_node *cgn = cgraph_get_node (fndecl);
1035
1036 unmark_visited (&DECL_SAVED_TREE (fndecl));
1037 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1038 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1039
1040 if (cgn)
1041 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1042 unvisit_body (cgn->symbol.decl);
1043 }
1044
1045 /* Unconditionally make an unshared copy of EXPR. This is used when using
1046 stored expressions which span multiple functions, such as BINFO_VTABLE,
1047 as the normal unsharing process can't tell that they're shared. */
1048
1049 tree
1050 unshare_expr (tree expr)
1051 {
1052 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1053 return expr;
1054 }
1055 \f
1056 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1057 contain statements and have a value. Assign its value to a temporary
1058 and give it void_type_node. Return the temporary, or NULL_TREE if
1059 WRAPPER was already void. */
1060
1061 tree
1062 voidify_wrapper_expr (tree wrapper, tree temp)
1063 {
1064 tree type = TREE_TYPE (wrapper);
1065 if (type && !VOID_TYPE_P (type))
1066 {
1067 tree *p;
1068
1069 /* Set p to point to the body of the wrapper. Loop until we find
1070 something that isn't a wrapper. */
1071 for (p = &wrapper; p && *p; )
1072 {
1073 switch (TREE_CODE (*p))
1074 {
1075 case BIND_EXPR:
1076 TREE_SIDE_EFFECTS (*p) = 1;
1077 TREE_TYPE (*p) = void_type_node;
1078 /* For a BIND_EXPR, the body is operand 1. */
1079 p = &BIND_EXPR_BODY (*p);
1080 break;
1081
1082 case CLEANUP_POINT_EXPR:
1083 case TRY_FINALLY_EXPR:
1084 case TRY_CATCH_EXPR:
1085 TREE_SIDE_EFFECTS (*p) = 1;
1086 TREE_TYPE (*p) = void_type_node;
1087 p = &TREE_OPERAND (*p, 0);
1088 break;
1089
1090 case STATEMENT_LIST:
1091 {
1092 tree_stmt_iterator i = tsi_last (*p);
1093 TREE_SIDE_EFFECTS (*p) = 1;
1094 TREE_TYPE (*p) = void_type_node;
1095 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1096 }
1097 break;
1098
1099 case COMPOUND_EXPR:
1100 /* Advance to the last statement. Set all container types to
1101 void. */
1102 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1103 {
1104 TREE_SIDE_EFFECTS (*p) = 1;
1105 TREE_TYPE (*p) = void_type_node;
1106 }
1107 break;
1108
1109 case TRANSACTION_EXPR:
1110 TREE_SIDE_EFFECTS (*p) = 1;
1111 TREE_TYPE (*p) = void_type_node;
1112 p = &TRANSACTION_EXPR_BODY (*p);
1113 break;
1114
1115 default:
1116 /* Assume that any tree upon which voidify_wrapper_expr is
1117 directly called is a wrapper, and that its body is op0. */
1118 if (p == &wrapper)
1119 {
1120 TREE_SIDE_EFFECTS (*p) = 1;
1121 TREE_TYPE (*p) = void_type_node;
1122 p = &TREE_OPERAND (*p, 0);
1123 break;
1124 }
1125 goto out;
1126 }
1127 }
1128
1129 out:
1130 if (p == NULL || IS_EMPTY_STMT (*p))
1131 temp = NULL_TREE;
1132 else if (temp)
1133 {
1134 /* The wrapper is on the RHS of an assignment that we're pushing
1135 down. */
1136 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1137 || TREE_CODE (temp) == MODIFY_EXPR);
1138 TREE_OPERAND (temp, 1) = *p;
1139 *p = temp;
1140 }
1141 else
1142 {
1143 temp = create_tmp_var (type, "retval");
1144 *p = build2 (INIT_EXPR, type, temp, *p);
1145 }
1146
1147 return temp;
1148 }
1149
1150 return NULL_TREE;
1151 }
1152
1153 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1154 a temporary through which they communicate. */
1155
1156 static void
1157 build_stack_save_restore (gimple *save, gimple *restore)
1158 {
1159 tree tmp_var;
1160
1161 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1162 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1163 gimple_call_set_lhs (*save, tmp_var);
1164
1165 *restore
1166 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1167 1, tmp_var);
1168 }
1169
1170 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1171
1172 static enum gimplify_status
1173 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1174 {
1175 tree bind_expr = *expr_p;
1176 bool old_save_stack = gimplify_ctxp->save_stack;
1177 tree t;
1178 gimple gimple_bind;
1179 gimple_seq body, cleanup;
1180 gimple stack_save;
1181
1182 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1183
1184 /* Mark variables seen in this bind expr. */
1185 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1186 {
1187 if (TREE_CODE (t) == VAR_DECL)
1188 {
1189 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1190
1191 /* Mark variable as local. */
1192 if (ctx && !DECL_EXTERNAL (t)
1193 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1194 || splay_tree_lookup (ctx->variables,
1195 (splay_tree_key) t) == NULL))
1196 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1197
1198 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1199
1200 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1201 cfun->has_local_explicit_reg_vars = true;
1202 }
1203
1204 /* Preliminarily mark non-addressed complex variables as eligible
1205 for promotion to gimple registers. We'll transform their uses
1206 as we find them. */
1207 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1208 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1209 && !TREE_THIS_VOLATILE (t)
1210 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1211 && !needs_to_live_in_memory (t))
1212 DECL_GIMPLE_REG_P (t) = 1;
1213 }
1214
1215 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1216 BIND_EXPR_BLOCK (bind_expr));
1217 gimple_push_bind_expr (gimple_bind);
1218
1219 gimplify_ctxp->save_stack = false;
1220
1221 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1222 body = NULL;
1223 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1224 gimple_bind_set_body (gimple_bind, body);
1225
1226 cleanup = NULL;
1227 stack_save = NULL;
1228 if (gimplify_ctxp->save_stack)
1229 {
1230 gimple stack_restore;
1231
1232 /* Save stack on entry and restore it on exit. Add a try_finally
1233 block to achieve this. Note that mudflap depends on the
1234 format of the emitted code: see mx_register_decls(). */
1235 build_stack_save_restore (&stack_save, &stack_restore);
1236
1237 gimplify_seq_add_stmt (&cleanup, stack_restore);
1238 }
1239
1240 /* Add clobbers for all variables that go out of scope. */
1241 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1242 {
1243 if (TREE_CODE (t) == VAR_DECL
1244 && !is_global_var (t)
1245 && DECL_CONTEXT (t) == current_function_decl
1246 && !DECL_HARD_REGISTER (t)
1247 && !TREE_THIS_VOLATILE (t)
1248 && !DECL_HAS_VALUE_EXPR_P (t)
1249 /* Only care for variables that have to be in memory. Others
1250 will be rewritten into SSA names, hence moved to the top-level. */
1251 && !is_gimple_reg (t))
1252 {
1253 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1254 TREE_THIS_VOLATILE (clobber) = 1;
1255 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1256 }
1257 }
1258
1259 if (cleanup)
1260 {
1261 gimple gs;
1262 gimple_seq new_body;
1263
1264 new_body = NULL;
1265 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1266 GIMPLE_TRY_FINALLY);
1267
1268 if (stack_save)
1269 gimplify_seq_add_stmt (&new_body, stack_save);
1270 gimplify_seq_add_stmt (&new_body, gs);
1271 gimple_bind_set_body (gimple_bind, new_body);
1272 }
1273
1274 gimplify_ctxp->save_stack = old_save_stack;
1275 gimple_pop_bind_expr ();
1276
1277 gimplify_seq_add_stmt (pre_p, gimple_bind);
1278
1279 if (temp)
1280 {
1281 *expr_p = temp;
1282 return GS_OK;
1283 }
1284
1285 *expr_p = NULL_TREE;
1286 return GS_ALL_DONE;
1287 }
1288
1289 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1290 GIMPLE value, it is assigned to a new temporary and the statement is
1291 re-written to return the temporary.
1292
1293 PRE_P points to the sequence where side effects that must happen before
1294 STMT should be stored. */
1295
1296 static enum gimplify_status
1297 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1298 {
1299 gimple ret;
1300 tree ret_expr = TREE_OPERAND (stmt, 0);
1301 tree result_decl, result;
1302
1303 if (ret_expr == error_mark_node)
1304 return GS_ERROR;
1305
1306 if (!ret_expr
1307 || TREE_CODE (ret_expr) == RESULT_DECL
1308 || ret_expr == error_mark_node)
1309 {
1310 gimple ret = gimple_build_return (ret_expr);
1311 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1312 gimplify_seq_add_stmt (pre_p, ret);
1313 return GS_ALL_DONE;
1314 }
1315
1316 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1317 result_decl = NULL_TREE;
1318 else
1319 {
1320 result_decl = TREE_OPERAND (ret_expr, 0);
1321
1322 /* See through a return by reference. */
1323 if (TREE_CODE (result_decl) == INDIRECT_REF)
1324 result_decl = TREE_OPERAND (result_decl, 0);
1325
1326 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1327 || TREE_CODE (ret_expr) == INIT_EXPR)
1328 && TREE_CODE (result_decl) == RESULT_DECL);
1329 }
1330
1331 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1332 Recall that aggregate_value_p is FALSE for any aggregate type that is
1333 returned in registers. If we're returning values in registers, then
1334 we don't want to extend the lifetime of the RESULT_DECL, particularly
1335 across another call. In addition, for those aggregates for which
1336 hard_function_value generates a PARALLEL, we'll die during normal
1337 expansion of structure assignments; there's special code in expand_return
1338 to handle this case that does not exist in expand_expr. */
1339 if (!result_decl)
1340 result = NULL_TREE;
1341 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1342 {
1343 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1344 {
1345 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1346 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1347 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1348 should be effectively allocated by the caller, i.e. all calls to
1349 this function must be subject to the Return Slot Optimization. */
1350 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1351 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1352 }
1353 result = result_decl;
1354 }
1355 else if (gimplify_ctxp->return_temp)
1356 result = gimplify_ctxp->return_temp;
1357 else
1358 {
1359 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1360
1361 /* ??? With complex control flow (usually involving abnormal edges),
1362 we can wind up warning about an uninitialized value for this. Due
1363 to how this variable is constructed and initialized, this is never
1364 true. Give up and never warn. */
1365 TREE_NO_WARNING (result) = 1;
1366
1367 gimplify_ctxp->return_temp = result;
1368 }
1369
1370 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1371 Then gimplify the whole thing. */
1372 if (result != result_decl)
1373 TREE_OPERAND (ret_expr, 0) = result;
1374
1375 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1376
1377 ret = gimple_build_return (result);
1378 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1379 gimplify_seq_add_stmt (pre_p, ret);
1380
1381 return GS_ALL_DONE;
1382 }
1383
1384 /* Gimplify a variable-length array DECL. */
1385
1386 static void
1387 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1388 {
1389 /* This is a variable-sized decl. Simplify its size and mark it
1390 for deferred expansion. Note that mudflap depends on the format
1391 of the emitted code: see mx_register_decls(). */
1392 tree t, addr, ptr_type;
1393
1394 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1395 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1396
1397 /* All occurrences of this decl in final gimplified code will be
1398 replaced by indirection. Setting DECL_VALUE_EXPR does two
1399 things: First, it lets the rest of the gimplifier know what
1400 replacement to use. Second, it lets the debug info know
1401 where to find the value. */
1402 ptr_type = build_pointer_type (TREE_TYPE (decl));
1403 addr = create_tmp_var (ptr_type, get_name (decl));
1404 DECL_IGNORED_P (addr) = 0;
1405 t = build_fold_indirect_ref (addr);
1406 TREE_THIS_NOTRAP (t) = 1;
1407 SET_DECL_VALUE_EXPR (decl, t);
1408 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1409
1410 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1411 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1412 size_int (DECL_ALIGN (decl)));
1413 /* The call has been built for a variable-sized object. */
1414 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1415 t = fold_convert (ptr_type, t);
1416 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1417
1418 gimplify_and_add (t, seq_p);
1419
1420 /* Indicate that we need to restore the stack level when the
1421 enclosing BIND_EXPR is exited. */
1422 gimplify_ctxp->save_stack = true;
1423 }
1424
1425 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1426 and initialization explicit. */
1427
1428 static enum gimplify_status
1429 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1430 {
1431 tree stmt = *stmt_p;
1432 tree decl = DECL_EXPR_DECL (stmt);
1433
1434 *stmt_p = NULL_TREE;
1435
1436 if (TREE_TYPE (decl) == error_mark_node)
1437 return GS_ERROR;
1438
1439 if ((TREE_CODE (decl) == TYPE_DECL
1440 || TREE_CODE (decl) == VAR_DECL)
1441 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1442 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1443
1444 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1445 in case its size expressions contain problematic nodes like CALL_EXPR. */
1446 if (TREE_CODE (decl) == TYPE_DECL
1447 && DECL_ORIGINAL_TYPE (decl)
1448 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1449 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1450
1451 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1452 {
1453 tree init = DECL_INITIAL (decl);
1454
1455 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1456 || (!TREE_STATIC (decl)
1457 && flag_stack_check == GENERIC_STACK_CHECK
1458 && compare_tree_int (DECL_SIZE_UNIT (decl),
1459 STACK_CHECK_MAX_VAR_SIZE) > 0))
1460 gimplify_vla_decl (decl, seq_p);
1461
1462 /* Some front ends do not explicitly declare all anonymous
1463 artificial variables. We compensate here by declaring the
1464 variables, though it would be better if the front ends would
1465 explicitly declare them. */
1466 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1467 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1468 gimple_add_tmp_var (decl);
1469
1470 if (init && init != error_mark_node)
1471 {
1472 if (!TREE_STATIC (decl))
1473 {
1474 DECL_INITIAL (decl) = NULL_TREE;
1475 init = build2 (INIT_EXPR, void_type_node, decl, init);
1476 gimplify_and_add (init, seq_p);
1477 ggc_free (init);
1478 }
1479 else
1480 /* We must still examine initializers for static variables
1481 as they may contain a label address. */
1482 walk_tree (&init, force_labels_r, NULL, NULL);
1483 }
1484 }
1485
1486 return GS_ALL_DONE;
1487 }
1488
1489 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1490 and replacing the LOOP_EXPR with goto, but if the loop contains an
1491 EXIT_EXPR, we need to append a label for it to jump to. */
1492
1493 static enum gimplify_status
1494 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1495 {
1496 tree saved_label = gimplify_ctxp->exit_label;
1497 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1498
1499 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1500
1501 gimplify_ctxp->exit_label = NULL_TREE;
1502
1503 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1504
1505 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1506
1507 if (gimplify_ctxp->exit_label)
1508 gimplify_seq_add_stmt (pre_p,
1509 gimple_build_label (gimplify_ctxp->exit_label));
1510
1511 gimplify_ctxp->exit_label = saved_label;
1512
1513 *expr_p = NULL;
1514 return GS_ALL_DONE;
1515 }
1516
1517 /* Gimplify a statement list onto a sequence. These may be created either
1518 by an enlightened front-end, or by shortcut_cond_expr. */
1519
1520 static enum gimplify_status
1521 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1522 {
1523 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1524
1525 tree_stmt_iterator i = tsi_start (*expr_p);
1526
1527 while (!tsi_end_p (i))
1528 {
1529 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1530 tsi_delink (&i);
1531 }
1532
1533 if (temp)
1534 {
1535 *expr_p = temp;
1536 return GS_OK;
1537 }
1538
1539 return GS_ALL_DONE;
1540 }
1541 \f
1542 /* Compare two case labels. Because the front end should already have
1543 made sure that case ranges do not overlap, it is enough to only compare
1544 the CASE_LOW values of each case label. */
1545
1546 static int
1547 compare_case_labels (const void *p1, const void *p2)
1548 {
1549 const_tree const case1 = *(const_tree const*)p1;
1550 const_tree const case2 = *(const_tree const*)p2;
1551
1552 /* The 'default' case label always goes first. */
1553 if (!CASE_LOW (case1))
1554 return -1;
1555 else if (!CASE_LOW (case2))
1556 return 1;
1557 else
1558 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1559 }
1560
1561 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1562
1563 void
1564 sort_case_labels (VEC(tree,heap)* label_vec)
1565 {
1566 VEC_qsort (tree, label_vec, compare_case_labels);
1567 }
1568 \f
1569 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
1570
1571 LABELS is a vector that contains all case labels to look at.
1572
1573 INDEX_TYPE is the type of the switch index expression. Case labels
1574 in LABELS are discarded if their values are not in the value range
1575 covered by INDEX_TYPE. The remaining case label values are folded
1576 to INDEX_TYPE.
1577
1578 If a default case exists in LABELS, it is removed from LABELS and
1579 returned in DEFAULT_CASEP. If no default case exists, but the
1580 case labels already cover the whole range of INDEX_TYPE, a default
1581 case is returned pointing to one of the existing case labels.
1582 Otherwise DEFAULT_CASEP is set to NULL_TREE.
1583
1584 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
1585 apply and no action is taken regardless of whether a default case is
1586 found or not. */
1587
1588 void
1589 preprocess_case_label_vec_for_gimple (VEC(tree,heap) *labels,
1590 tree index_type,
1591 tree *default_casep)
1592 {
1593 tree min_value, max_value;
1594 tree default_case = NULL_TREE;
1595 size_t i, len;
1596
1597 i = 0;
1598 min_value = TYPE_MIN_VALUE (index_type);
1599 max_value = TYPE_MAX_VALUE (index_type);
1600 while (i < VEC_length (tree, labels))
1601 {
1602 tree elt = VEC_index (tree, labels, i);
1603 tree low = CASE_LOW (elt);
1604 tree high = CASE_HIGH (elt);
1605 bool remove_element = FALSE;
1606
1607 if (low)
1608 {
1609 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
1610 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
1611
1612 /* This is a non-default case label, i.e. it has a value.
1613
1614 See if the case label is reachable within the range of
1615 the index type. Remove out-of-range case values. Turn
1616 case ranges into a canonical form (high > low strictly)
1617 and convert the case label values to the index type.
1618
1619 NB: The type of gimple_switch_index() may be the promoted
1620 type, but the case labels retain the original type. */
1621
1622 if (high)
1623 {
1624 /* This is a case range. Discard empty ranges.
1625 If the bounds or the range are equal, turn this
1626 into a simple (one-value) case. */
1627 int cmp = tree_int_cst_compare (high, low);
1628 if (cmp < 0)
1629 remove_element = TRUE;
1630 else if (cmp == 0)
1631 high = NULL_TREE;
1632 }
1633
1634 if (! high)
1635 {
1636 /* If the simple case value is unreachable, ignore it. */
1637 if ((TREE_CODE (min_value) == INTEGER_CST
1638 && tree_int_cst_compare (low, min_value) < 0)
1639 || (TREE_CODE (max_value) == INTEGER_CST
1640 && tree_int_cst_compare (low, max_value) > 0))
1641 remove_element = TRUE;
1642 else
1643 low = fold_convert (index_type, low);
1644 }
1645 else
1646 {
1647 /* If the entire case range is unreachable, ignore it. */
1648 if ((TREE_CODE (min_value) == INTEGER_CST
1649 && tree_int_cst_compare (high, min_value) < 0)
1650 || (TREE_CODE (max_value) == INTEGER_CST
1651 && tree_int_cst_compare (low, max_value) > 0))
1652 remove_element = TRUE;
1653 else
1654 {
1655 /* If the lower bound is less than the index type's
1656 minimum value, truncate the range bounds. */
1657 if (TREE_CODE (min_value) == INTEGER_CST
1658 && tree_int_cst_compare (low, min_value) < 0)
1659 low = min_value;
1660 low = fold_convert (index_type, low);
1661
1662 /* If the upper bound is greater than the index type's
1663 maximum value, truncate the range bounds. */
1664 if (TREE_CODE (max_value) == INTEGER_CST
1665 && tree_int_cst_compare (high, max_value) > 0)
1666 high = max_value;
1667 high = fold_convert (index_type, high);
1668
1669 /* We may have folded a case range to a one-value case. */
1670 if (tree_int_cst_equal (low, high))
1671 high = NULL_TREE;
1672 }
1673 }
1674
1675 CASE_LOW (elt) = low;
1676 CASE_HIGH (elt) = high;
1677 }
1678 else
1679 {
1680 gcc_assert (!default_case);
1681 default_case = elt;
1682 /* The default case must be passed separately to the
1683 gimple_build_switch routines. But if DEFAULT_CASEP
1684 is NULL, we do not remove the default case (it would
1685 be completely lost). */
1686 if (default_casep)
1687 remove_element = TRUE;
1688 }
1689
1690 if (remove_element)
1691 VEC_ordered_remove (tree, labels, i);
1692 else
1693 i++;
1694 }
1695 len = i;
1696
1697 if (!VEC_empty (tree, labels))
1698 sort_case_labels (labels);
1699
1700 if (default_casep && !default_case)
1701 {
1702 /* If the switch has no default label, add one, so that we jump
1703 around the switch body. If the labels already cover the whole
1704 range of the switch index_type, add the default label pointing
1705 to one of the existing labels. */
1706 if (len
1707 && TYPE_MIN_VALUE (index_type)
1708 && TYPE_MAX_VALUE (index_type)
1709 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1710 TYPE_MIN_VALUE (index_type)))
1711 {
1712 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1713 if (!high)
1714 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1715 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
1716 {
1717 for (i = 1; i < len; i++)
1718 {
1719 high = CASE_LOW (VEC_index (tree, labels, i));
1720 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1721 if (!low)
1722 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1723 if ((TREE_INT_CST_LOW (low) + 1
1724 != TREE_INT_CST_LOW (high))
1725 || (TREE_INT_CST_HIGH (low)
1726 + (TREE_INT_CST_LOW (high) == 0)
1727 != TREE_INT_CST_HIGH (high)))
1728 break;
1729 }
1730 if (i == len)
1731 {
1732 tree label = CASE_LABEL (VEC_index (tree, labels, 0));
1733 default_case = build_case_label (NULL_TREE, NULL_TREE,
1734 label);
1735 }
1736 }
1737 }
1738 }
1739
1740 if (default_casep)
1741 *default_casep = default_case;
1742 }
1743 \f
1744 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1745 branch to. */
1746
1747 static enum gimplify_status
1748 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1749 {
1750 tree switch_expr = *expr_p;
1751 gimple_seq switch_body_seq = NULL;
1752 enum gimplify_status ret;
1753 tree index_type = TREE_TYPE (switch_expr);
1754 if (index_type == NULL_TREE)
1755 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1756
1757 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1758 fb_rvalue);
1759 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1760 return ret;
1761
1762 if (SWITCH_BODY (switch_expr))
1763 {
1764 VEC (tree,heap) *labels;
1765 VEC (tree,heap) *saved_labels;
1766 tree default_case = NULL_TREE;
1767 gimple gimple_switch;
1768
1769 /* If someone can be bothered to fill in the labels, they can
1770 be bothered to null out the body too. */
1771 gcc_assert (!SWITCH_LABELS (switch_expr));
1772
1773 /* Save old labels, get new ones from body, then restore the old
1774 labels. Save all the things from the switch body to append after. */
1775 saved_labels = gimplify_ctxp->case_labels;
1776 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1777
1778 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1779 labels = gimplify_ctxp->case_labels;
1780 gimplify_ctxp->case_labels = saved_labels;
1781
1782 preprocess_case_label_vec_for_gimple (labels, index_type,
1783 &default_case);
1784
1785 if (!default_case)
1786 {
1787 gimple new_default;
1788
1789 default_case
1790 = build_case_label (NULL_TREE, NULL_TREE,
1791 create_artificial_label (UNKNOWN_LOCATION));
1792 new_default = gimple_build_label (CASE_LABEL (default_case));
1793 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1794 }
1795
1796 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1797 default_case, labels);
1798 gimplify_seq_add_stmt (pre_p, gimple_switch);
1799 gimplify_seq_add_seq (pre_p, switch_body_seq);
1800 VEC_free(tree, heap, labels);
1801 }
1802 else
1803 gcc_assert (SWITCH_LABELS (switch_expr));
1804
1805 return GS_ALL_DONE;
1806 }
1807
1808 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1809
1810 static enum gimplify_status
1811 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1812 {
1813 struct gimplify_ctx *ctxp;
1814 gimple gimple_label;
1815
1816 /* Invalid OpenMP programs can play Duff's Device type games with
1817 #pragma omp parallel. At least in the C front end, we don't
1818 detect such invalid branches until after gimplification. */
1819 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1820 if (ctxp->case_labels)
1821 break;
1822
1823 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1824 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1825 gimplify_seq_add_stmt (pre_p, gimple_label);
1826
1827 return GS_ALL_DONE;
1828 }
1829
1830 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1831 if necessary. */
1832
1833 tree
1834 build_and_jump (tree *label_p)
1835 {
1836 if (label_p == NULL)
1837 /* If there's nowhere to jump, just fall through. */
1838 return NULL_TREE;
1839
1840 if (*label_p == NULL_TREE)
1841 {
1842 tree label = create_artificial_label (UNKNOWN_LOCATION);
1843 *label_p = label;
1844 }
1845
1846 return build1 (GOTO_EXPR, void_type_node, *label_p);
1847 }
1848
1849 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1850 This also involves building a label to jump to and communicating it to
1851 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1852
1853 static enum gimplify_status
1854 gimplify_exit_expr (tree *expr_p)
1855 {
1856 tree cond = TREE_OPERAND (*expr_p, 0);
1857 tree expr;
1858
1859 expr = build_and_jump (&gimplify_ctxp->exit_label);
1860 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1861 *expr_p = expr;
1862
1863 return GS_OK;
1864 }
1865
1866 /* A helper function to be called via walk_tree. Mark all labels under *TP
1867 as being forced. To be called for DECL_INITIAL of static variables. */
1868
1869 tree
1870 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1871 {
1872 if (TYPE_P (*tp))
1873 *walk_subtrees = 0;
1874 if (TREE_CODE (*tp) == LABEL_DECL)
1875 FORCED_LABEL (*tp) = 1;
1876
1877 return NULL_TREE;
1878 }
1879
1880 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1881 different from its canonical type, wrap the whole thing inside a
1882 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1883 type.
1884
1885 The canonical type of a COMPONENT_REF is the type of the field being
1886 referenced--unless the field is a bit-field which can be read directly
1887 in a smaller mode, in which case the canonical type is the
1888 sign-appropriate type corresponding to that mode. */
1889
1890 static void
1891 canonicalize_component_ref (tree *expr_p)
1892 {
1893 tree expr = *expr_p;
1894 tree type;
1895
1896 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1897
1898 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1899 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1900 else
1901 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1902
1903 /* One could argue that all the stuff below is not necessary for
1904 the non-bitfield case and declare it a FE error if type
1905 adjustment would be needed. */
1906 if (TREE_TYPE (expr) != type)
1907 {
1908 #ifdef ENABLE_TYPES_CHECKING
1909 tree old_type = TREE_TYPE (expr);
1910 #endif
1911 int type_quals;
1912
1913 /* We need to preserve qualifiers and propagate them from
1914 operand 0. */
1915 type_quals = TYPE_QUALS (type)
1916 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1917 if (TYPE_QUALS (type) != type_quals)
1918 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1919
1920 /* Set the type of the COMPONENT_REF to the underlying type. */
1921 TREE_TYPE (expr) = type;
1922
1923 #ifdef ENABLE_TYPES_CHECKING
1924 /* It is now a FE error, if the conversion from the canonical
1925 type to the original expression type is not useless. */
1926 gcc_assert (useless_type_conversion_p (old_type, type));
1927 #endif
1928 }
1929 }
1930
1931 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1932 to foo, embed that change in the ADDR_EXPR by converting
1933 T array[U];
1934 (T *)&array
1935 ==>
1936 &array[L]
1937 where L is the lower bound. For simplicity, only do this for constant
1938 lower bound.
1939 The constraint is that the type of &array[L] is trivially convertible
1940 to T *. */
1941
1942 static void
1943 canonicalize_addr_expr (tree *expr_p)
1944 {
1945 tree expr = *expr_p;
1946 tree addr_expr = TREE_OPERAND (expr, 0);
1947 tree datype, ddatype, pddatype;
1948
1949 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1950 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1951 || TREE_CODE (addr_expr) != ADDR_EXPR)
1952 return;
1953
1954 /* The addr_expr type should be a pointer to an array. */
1955 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1956 if (TREE_CODE (datype) != ARRAY_TYPE)
1957 return;
1958
1959 /* The pointer to element type shall be trivially convertible to
1960 the expression pointer type. */
1961 ddatype = TREE_TYPE (datype);
1962 pddatype = build_pointer_type (ddatype);
1963 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1964 pddatype))
1965 return;
1966
1967 /* The lower bound and element sizes must be constant. */
1968 if (!TYPE_SIZE_UNIT (ddatype)
1969 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1970 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1971 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1972 return;
1973
1974 /* All checks succeeded. Build a new node to merge the cast. */
1975 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1976 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1977 NULL_TREE, NULL_TREE);
1978 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1979
1980 /* We can have stripped a required restrict qualifier above. */
1981 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1982 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1983 }
1984
1985 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1986 underneath as appropriate. */
1987
1988 static enum gimplify_status
1989 gimplify_conversion (tree *expr_p)
1990 {
1991 location_t loc = EXPR_LOCATION (*expr_p);
1992 gcc_assert (CONVERT_EXPR_P (*expr_p));
1993
1994 /* Then strip away all but the outermost conversion. */
1995 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1996
1997 /* And remove the outermost conversion if it's useless. */
1998 if (tree_ssa_useless_type_conversion (*expr_p))
1999 *expr_p = TREE_OPERAND (*expr_p, 0);
2000
2001 /* If we still have a conversion at the toplevel,
2002 then canonicalize some constructs. */
2003 if (CONVERT_EXPR_P (*expr_p))
2004 {
2005 tree sub = TREE_OPERAND (*expr_p, 0);
2006
2007 /* If a NOP conversion is changing the type of a COMPONENT_REF
2008 expression, then canonicalize its type now in order to expose more
2009 redundant conversions. */
2010 if (TREE_CODE (sub) == COMPONENT_REF)
2011 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2012
2013 /* If a NOP conversion is changing a pointer to array of foo
2014 to a pointer to foo, embed that change in the ADDR_EXPR. */
2015 else if (TREE_CODE (sub) == ADDR_EXPR)
2016 canonicalize_addr_expr (expr_p);
2017 }
2018
2019 /* If we have a conversion to a non-register type force the
2020 use of a VIEW_CONVERT_EXPR instead. */
2021 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2022 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2023 TREE_OPERAND (*expr_p, 0));
2024
2025 return GS_OK;
2026 }
2027
2028 /* Nonlocal VLAs seen in the current function. */
2029 static struct pointer_set_t *nonlocal_vlas;
2030
2031 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2032 DECL_VALUE_EXPR, and it's worth re-examining things. */
2033
2034 static enum gimplify_status
2035 gimplify_var_or_parm_decl (tree *expr_p)
2036 {
2037 tree decl = *expr_p;
2038
2039 /* ??? If this is a local variable, and it has not been seen in any
2040 outer BIND_EXPR, then it's probably the result of a duplicate
2041 declaration, for which we've already issued an error. It would
2042 be really nice if the front end wouldn't leak these at all.
2043 Currently the only known culprit is C++ destructors, as seen
2044 in g++.old-deja/g++.jason/binding.C. */
2045 if (TREE_CODE (decl) == VAR_DECL
2046 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2047 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2048 && decl_function_context (decl) == current_function_decl)
2049 {
2050 gcc_assert (seen_error ());
2051 return GS_ERROR;
2052 }
2053
2054 /* When within an OpenMP context, notice uses of variables. */
2055 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2056 return GS_ALL_DONE;
2057
2058 /* If the decl is an alias for another expression, substitute it now. */
2059 if (DECL_HAS_VALUE_EXPR_P (decl))
2060 {
2061 tree value_expr = DECL_VALUE_EXPR (decl);
2062
2063 /* For referenced nonlocal VLAs add a decl for debugging purposes
2064 to the current function. */
2065 if (TREE_CODE (decl) == VAR_DECL
2066 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2067 && nonlocal_vlas != NULL
2068 && TREE_CODE (value_expr) == INDIRECT_REF
2069 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2070 && decl_function_context (decl) != current_function_decl)
2071 {
2072 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2073 while (ctx && ctx->region_type == ORT_WORKSHARE)
2074 ctx = ctx->outer_context;
2075 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
2076 {
2077 tree copy = copy_node (decl), block;
2078
2079 lang_hooks.dup_lang_specific_decl (copy);
2080 SET_DECL_RTL (copy, 0);
2081 TREE_USED (copy) = 1;
2082 block = DECL_INITIAL (current_function_decl);
2083 DECL_CHAIN (copy) = BLOCK_VARS (block);
2084 BLOCK_VARS (block) = copy;
2085 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2086 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2087 }
2088 }
2089
2090 *expr_p = unshare_expr (value_expr);
2091 return GS_OK;
2092 }
2093
2094 return GS_ALL_DONE;
2095 }
2096
2097 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2098 node *EXPR_P.
2099
2100 compound_lval
2101 : min_lval '[' val ']'
2102 | min_lval '.' ID
2103 | compound_lval '[' val ']'
2104 | compound_lval '.' ID
2105
2106 This is not part of the original SIMPLE definition, which separates
2107 array and member references, but it seems reasonable to handle them
2108 together. Also, this way we don't run into problems with union
2109 aliasing; gcc requires that for accesses through a union to alias, the
2110 union reference must be explicit, which was not always the case when we
2111 were splitting up array and member refs.
2112
2113 PRE_P points to the sequence where side effects that must happen before
2114 *EXPR_P should be stored.
2115
2116 POST_P points to the sequence where side effects that must happen after
2117 *EXPR_P should be stored. */
2118
2119 static enum gimplify_status
2120 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2121 fallback_t fallback)
2122 {
2123 tree *p;
2124 VEC(tree,heap) *stack;
2125 enum gimplify_status ret = GS_ALL_DONE, tret;
2126 int i;
2127 location_t loc = EXPR_LOCATION (*expr_p);
2128 tree expr = *expr_p;
2129
2130 /* Create a stack of the subexpressions so later we can walk them in
2131 order from inner to outer. */
2132 stack = VEC_alloc (tree, heap, 10);
2133
2134 /* We can handle anything that get_inner_reference can deal with. */
2135 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2136 {
2137 restart:
2138 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2139 if (TREE_CODE (*p) == INDIRECT_REF)
2140 *p = fold_indirect_ref_loc (loc, *p);
2141
2142 if (handled_component_p (*p))
2143 ;
2144 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2145 additional COMPONENT_REFs. */
2146 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2147 && gimplify_var_or_parm_decl (p) == GS_OK)
2148 goto restart;
2149 else
2150 break;
2151
2152 VEC_safe_push (tree, heap, stack, *p);
2153 }
2154
2155 gcc_assert (VEC_length (tree, stack));
2156
2157 /* Now STACK is a stack of pointers to all the refs we've walked through
2158 and P points to the innermost expression.
2159
2160 Java requires that we elaborated nodes in source order. That
2161 means we must gimplify the inner expression followed by each of
2162 the indices, in order. But we can't gimplify the inner
2163 expression until we deal with any variable bounds, sizes, or
2164 positions in order to deal with PLACEHOLDER_EXPRs.
2165
2166 So we do this in three steps. First we deal with the annotations
2167 for any variables in the components, then we gimplify the base,
2168 then we gimplify any indices, from left to right. */
2169 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
2170 {
2171 tree t = VEC_index (tree, stack, i);
2172
2173 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2174 {
2175 /* Gimplify the low bound and element type size and put them into
2176 the ARRAY_REF. If these values are set, they have already been
2177 gimplified. */
2178 if (TREE_OPERAND (t, 2) == NULL_TREE)
2179 {
2180 tree low = unshare_expr (array_ref_low_bound (t));
2181 if (!is_gimple_min_invariant (low))
2182 {
2183 TREE_OPERAND (t, 2) = low;
2184 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2185 post_p, is_gimple_reg,
2186 fb_rvalue);
2187 ret = MIN (ret, tret);
2188 }
2189 }
2190 else
2191 {
2192 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2193 is_gimple_reg, fb_rvalue);
2194 ret = MIN (ret, tret);
2195 }
2196
2197 if (TREE_OPERAND (t, 3) == NULL_TREE)
2198 {
2199 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2200 tree elmt_size = unshare_expr (array_ref_element_size (t));
2201 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2202
2203 /* Divide the element size by the alignment of the element
2204 type (above). */
2205 elmt_size
2206 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2207
2208 if (!is_gimple_min_invariant (elmt_size))
2209 {
2210 TREE_OPERAND (t, 3) = elmt_size;
2211 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2212 post_p, is_gimple_reg,
2213 fb_rvalue);
2214 ret = MIN (ret, tret);
2215 }
2216 }
2217 else
2218 {
2219 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2220 is_gimple_reg, fb_rvalue);
2221 ret = MIN (ret, tret);
2222 }
2223 }
2224 else if (TREE_CODE (t) == COMPONENT_REF)
2225 {
2226 /* Set the field offset into T and gimplify it. */
2227 if (TREE_OPERAND (t, 2) == NULL_TREE)
2228 {
2229 tree offset = unshare_expr (component_ref_field_offset (t));
2230 tree field = TREE_OPERAND (t, 1);
2231 tree factor
2232 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2233
2234 /* Divide the offset by its alignment. */
2235 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2236
2237 if (!is_gimple_min_invariant (offset))
2238 {
2239 TREE_OPERAND (t, 2) = offset;
2240 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2241 post_p, is_gimple_reg,
2242 fb_rvalue);
2243 ret = MIN (ret, tret);
2244 }
2245 }
2246 else
2247 {
2248 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2249 is_gimple_reg, fb_rvalue);
2250 ret = MIN (ret, tret);
2251 }
2252 }
2253 }
2254
2255 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2256 so as to match the min_lval predicate. Failure to do so may result
2257 in the creation of large aggregate temporaries. */
2258 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2259 fallback | fb_lvalue);
2260 ret = MIN (ret, tret);
2261
2262 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2263 loop we also remove any useless conversions. */
2264 for (; VEC_length (tree, stack) > 0; )
2265 {
2266 tree t = VEC_pop (tree, stack);
2267
2268 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2269 {
2270 /* Gimplify the dimension. */
2271 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2272 {
2273 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2274 is_gimple_val, fb_rvalue);
2275 ret = MIN (ret, tret);
2276 }
2277 }
2278 else if (TREE_CODE (t) == BIT_FIELD_REF)
2279 {
2280 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2281 is_gimple_val, fb_rvalue);
2282 ret = MIN (ret, tret);
2283 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2284 is_gimple_val, fb_rvalue);
2285 ret = MIN (ret, tret);
2286 }
2287
2288 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2289
2290 /* The innermost expression P may have originally had
2291 TREE_SIDE_EFFECTS set which would have caused all the outer
2292 expressions in *EXPR_P leading to P to also have had
2293 TREE_SIDE_EFFECTS set. */
2294 recalculate_side_effects (t);
2295 }
2296
2297 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2298 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2299 {
2300 canonicalize_component_ref (expr_p);
2301 }
2302
2303 VEC_free (tree, heap, stack);
2304
2305 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2306
2307 return ret;
2308 }
2309
2310 /* Gimplify the self modifying expression pointed to by EXPR_P
2311 (++, --, +=, -=).
2312
2313 PRE_P points to the list where side effects that must happen before
2314 *EXPR_P should be stored.
2315
2316 POST_P points to the list where side effects that must happen after
2317 *EXPR_P should be stored.
2318
2319 WANT_VALUE is nonzero iff we want to use the value of this expression
2320 in another expression. */
2321
2322 static enum gimplify_status
2323 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2324 bool want_value)
2325 {
2326 enum tree_code code;
2327 tree lhs, lvalue, rhs, t1;
2328 gimple_seq post = NULL, *orig_post_p = post_p;
2329 bool postfix;
2330 enum tree_code arith_code;
2331 enum gimplify_status ret;
2332 location_t loc = EXPR_LOCATION (*expr_p);
2333
2334 code = TREE_CODE (*expr_p);
2335
2336 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2337 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2338
2339 /* Prefix or postfix? */
2340 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2341 /* Faster to treat as prefix if result is not used. */
2342 postfix = want_value;
2343 else
2344 postfix = false;
2345
2346 /* For postfix, make sure the inner expression's post side effects
2347 are executed after side effects from this expression. */
2348 if (postfix)
2349 post_p = &post;
2350
2351 /* Add or subtract? */
2352 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2353 arith_code = PLUS_EXPR;
2354 else
2355 arith_code = MINUS_EXPR;
2356
2357 /* Gimplify the LHS into a GIMPLE lvalue. */
2358 lvalue = TREE_OPERAND (*expr_p, 0);
2359 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2360 if (ret == GS_ERROR)
2361 return ret;
2362
2363 /* Extract the operands to the arithmetic operation. */
2364 lhs = lvalue;
2365 rhs = TREE_OPERAND (*expr_p, 1);
2366
2367 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2368 that as the result value and in the postqueue operation. We also
2369 make sure to make lvalue a minimal lval, see
2370 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2371 if (postfix)
2372 {
2373 if (!is_gimple_min_lval (lvalue))
2374 {
2375 mark_addressable (lvalue);
2376 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2377 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2378 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2379 }
2380 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2381 if (ret == GS_ERROR)
2382 return ret;
2383 }
2384
2385 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2386 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2387 {
2388 rhs = convert_to_ptrofftype_loc (loc, rhs);
2389 if (arith_code == MINUS_EXPR)
2390 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2391 arith_code = POINTER_PLUS_EXPR;
2392 }
2393
2394 if (postfix)
2395 {
2396 tree t2 = get_initialized_tmp_var (lhs, pre_p, NULL);
2397 t1 = build2 (arith_code, TREE_TYPE (*expr_p), t2, rhs);
2398 gimplify_assign (lvalue, t1, pre_p);
2399 gimplify_seq_add_seq (orig_post_p, post);
2400 *expr_p = t2;
2401 return GS_ALL_DONE;
2402 }
2403 else
2404 {
2405 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2406 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2407 return GS_OK;
2408 }
2409 }
2410
2411 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2412
2413 static void
2414 maybe_with_size_expr (tree *expr_p)
2415 {
2416 tree expr = *expr_p;
2417 tree type = TREE_TYPE (expr);
2418 tree size;
2419
2420 /* If we've already wrapped this or the type is error_mark_node, we can't do
2421 anything. */
2422 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2423 || type == error_mark_node)
2424 return;
2425
2426 /* If the size isn't known or is a constant, we have nothing to do. */
2427 size = TYPE_SIZE_UNIT (type);
2428 if (!size || TREE_CODE (size) == INTEGER_CST)
2429 return;
2430
2431 /* Otherwise, make a WITH_SIZE_EXPR. */
2432 size = unshare_expr (size);
2433 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2434 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2435 }
2436
2437 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2438 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2439 the CALL_EXPR. */
2440
2441 static enum gimplify_status
2442 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2443 {
2444 bool (*test) (tree);
2445 fallback_t fb;
2446
2447 /* In general, we allow lvalues for function arguments to avoid
2448 extra overhead of copying large aggregates out of even larger
2449 aggregates into temporaries only to copy the temporaries to
2450 the argument list. Make optimizers happy by pulling out to
2451 temporaries those types that fit in registers. */
2452 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2453 test = is_gimple_val, fb = fb_rvalue;
2454 else
2455 {
2456 test = is_gimple_lvalue, fb = fb_either;
2457 /* Also strip a TARGET_EXPR that would force an extra copy. */
2458 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2459 {
2460 tree init = TARGET_EXPR_INITIAL (*arg_p);
2461 if (init
2462 && !VOID_TYPE_P (TREE_TYPE (init)))
2463 *arg_p = init;
2464 }
2465 }
2466
2467 /* If this is a variable sized type, we must remember the size. */
2468 maybe_with_size_expr (arg_p);
2469
2470 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2471 /* Make sure arguments have the same location as the function call
2472 itself. */
2473 protected_set_expr_location (*arg_p, call_location);
2474
2475 /* There is a sequence point before a function call. Side effects in
2476 the argument list must occur before the actual call. So, when
2477 gimplifying arguments, force gimplify_expr to use an internal
2478 post queue which is then appended to the end of PRE_P. */
2479 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2480 }
2481
2482 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2483 WANT_VALUE is true if the result of the call is desired. */
2484
2485 static enum gimplify_status
2486 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2487 {
2488 tree fndecl, parms, p, fnptrtype;
2489 enum gimplify_status ret;
2490 int i, nargs;
2491 gimple call;
2492 bool builtin_va_start_p = FALSE;
2493 location_t loc = EXPR_LOCATION (*expr_p);
2494
2495 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2496
2497 /* For reliable diagnostics during inlining, it is necessary that
2498 every call_expr be annotated with file and line. */
2499 if (! EXPR_HAS_LOCATION (*expr_p))
2500 SET_EXPR_LOCATION (*expr_p, input_location);
2501
2502 /* This may be a call to a builtin function.
2503
2504 Builtin function calls may be transformed into different
2505 (and more efficient) builtin function calls under certain
2506 circumstances. Unfortunately, gimplification can muck things
2507 up enough that the builtin expanders are not aware that certain
2508 transformations are still valid.
2509
2510 So we attempt transformation/gimplification of the call before
2511 we gimplify the CALL_EXPR. At this time we do not manage to
2512 transform all calls in the same manner as the expanders do, but
2513 we do transform most of them. */
2514 fndecl = get_callee_fndecl (*expr_p);
2515 if (fndecl && DECL_BUILT_IN (fndecl))
2516 {
2517 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2518
2519 if (new_tree && new_tree != *expr_p)
2520 {
2521 /* There was a transformation of this call which computes the
2522 same value, but in a more efficient way. Return and try
2523 again. */
2524 *expr_p = new_tree;
2525 return GS_OK;
2526 }
2527
2528 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2529 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2530 {
2531 builtin_va_start_p = TRUE;
2532 if (call_expr_nargs (*expr_p) < 2)
2533 {
2534 error ("too few arguments to function %<va_start%>");
2535 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2536 return GS_OK;
2537 }
2538
2539 if (fold_builtin_next_arg (*expr_p, true))
2540 {
2541 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2542 return GS_OK;
2543 }
2544 }
2545 }
2546
2547 /* Remember the original function pointer type. */
2548 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2549
2550 /* There is a sequence point before the call, so any side effects in
2551 the calling expression must occur before the actual call. Force
2552 gimplify_expr to use an internal post queue. */
2553 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2554 is_gimple_call_addr, fb_rvalue);
2555
2556 nargs = call_expr_nargs (*expr_p);
2557
2558 /* Get argument types for verification. */
2559 fndecl = get_callee_fndecl (*expr_p);
2560 parms = NULL_TREE;
2561 if (fndecl)
2562 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2563 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2564 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2565
2566 if (fndecl && DECL_ARGUMENTS (fndecl))
2567 p = DECL_ARGUMENTS (fndecl);
2568 else if (parms)
2569 p = parms;
2570 else
2571 p = NULL_TREE;
2572 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2573 ;
2574
2575 /* If the last argument is __builtin_va_arg_pack () and it is not
2576 passed as a named argument, decrease the number of CALL_EXPR
2577 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2578 if (!p
2579 && i < nargs
2580 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2581 {
2582 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2583 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2584
2585 if (last_arg_fndecl
2586 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2587 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2588 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2589 {
2590 tree call = *expr_p;
2591
2592 --nargs;
2593 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2594 CALL_EXPR_FN (call),
2595 nargs, CALL_EXPR_ARGP (call));
2596
2597 /* Copy all CALL_EXPR flags, location and block, except
2598 CALL_EXPR_VA_ARG_PACK flag. */
2599 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2600 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2601 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2602 = CALL_EXPR_RETURN_SLOT_OPT (call);
2603 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2604 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2605 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2606
2607 /* Set CALL_EXPR_VA_ARG_PACK. */
2608 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2609 }
2610 }
2611
2612 /* Finally, gimplify the function arguments. */
2613 if (nargs > 0)
2614 {
2615 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2616 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2617 PUSH_ARGS_REVERSED ? i-- : i++)
2618 {
2619 enum gimplify_status t;
2620
2621 /* Avoid gimplifying the second argument to va_start, which needs to
2622 be the plain PARM_DECL. */
2623 if ((i != 1) || !builtin_va_start_p)
2624 {
2625 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2626 EXPR_LOCATION (*expr_p));
2627
2628 if (t == GS_ERROR)
2629 ret = GS_ERROR;
2630 }
2631 }
2632 }
2633
2634 /* Verify the function result. */
2635 if (want_value && fndecl
2636 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2637 {
2638 error_at (loc, "using result of function returning %<void%>");
2639 ret = GS_ERROR;
2640 }
2641
2642 /* Try this again in case gimplification exposed something. */
2643 if (ret != GS_ERROR)
2644 {
2645 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2646
2647 if (new_tree && new_tree != *expr_p)
2648 {
2649 /* There was a transformation of this call which computes the
2650 same value, but in a more efficient way. Return and try
2651 again. */
2652 *expr_p = new_tree;
2653 return GS_OK;
2654 }
2655 }
2656 else
2657 {
2658 *expr_p = error_mark_node;
2659 return GS_ERROR;
2660 }
2661
2662 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2663 decl. This allows us to eliminate redundant or useless
2664 calls to "const" functions. */
2665 if (TREE_CODE (*expr_p) == CALL_EXPR)
2666 {
2667 int flags = call_expr_flags (*expr_p);
2668 if (flags & (ECF_CONST | ECF_PURE)
2669 /* An infinite loop is considered a side effect. */
2670 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2671 TREE_SIDE_EFFECTS (*expr_p) = 0;
2672 }
2673
2674 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2675 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2676 form and delegate the creation of a GIMPLE_CALL to
2677 gimplify_modify_expr. This is always possible because when
2678 WANT_VALUE is true, the caller wants the result of this call into
2679 a temporary, which means that we will emit an INIT_EXPR in
2680 internal_get_tmp_var which will then be handled by
2681 gimplify_modify_expr. */
2682 if (!want_value)
2683 {
2684 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2685 have to do is replicate it as a GIMPLE_CALL tuple. */
2686 gimple_stmt_iterator gsi;
2687 call = gimple_build_call_from_tree (*expr_p);
2688 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2689 gimplify_seq_add_stmt (pre_p, call);
2690 gsi = gsi_last (*pre_p);
2691 fold_stmt (&gsi);
2692 *expr_p = NULL_TREE;
2693 }
2694 else
2695 /* Remember the original function type. */
2696 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2697 CALL_EXPR_FN (*expr_p));
2698
2699 return ret;
2700 }
2701
2702 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2703 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2704
2705 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2706 condition is true or false, respectively. If null, we should generate
2707 our own to skip over the evaluation of this specific expression.
2708
2709 LOCUS is the source location of the COND_EXPR.
2710
2711 This function is the tree equivalent of do_jump.
2712
2713 shortcut_cond_r should only be called by shortcut_cond_expr. */
2714
2715 static tree
2716 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2717 location_t locus)
2718 {
2719 tree local_label = NULL_TREE;
2720 tree t, expr = NULL;
2721
2722 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2723 retain the shortcut semantics. Just insert the gotos here;
2724 shortcut_cond_expr will append the real blocks later. */
2725 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2726 {
2727 location_t new_locus;
2728
2729 /* Turn if (a && b) into
2730
2731 if (a); else goto no;
2732 if (b) goto yes; else goto no;
2733 (no:) */
2734
2735 if (false_label_p == NULL)
2736 false_label_p = &local_label;
2737
2738 /* Keep the original source location on the first 'if'. */
2739 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2740 append_to_statement_list (t, &expr);
2741
2742 /* Set the source location of the && on the second 'if'. */
2743 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2744 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2745 new_locus);
2746 append_to_statement_list (t, &expr);
2747 }
2748 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2749 {
2750 location_t new_locus;
2751
2752 /* Turn if (a || b) into
2753
2754 if (a) goto yes;
2755 if (b) goto yes; else goto no;
2756 (yes:) */
2757
2758 if (true_label_p == NULL)
2759 true_label_p = &local_label;
2760
2761 /* Keep the original source location on the first 'if'. */
2762 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2763 append_to_statement_list (t, &expr);
2764
2765 /* Set the source location of the || on the second 'if'. */
2766 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2767 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2768 new_locus);
2769 append_to_statement_list (t, &expr);
2770 }
2771 else if (TREE_CODE (pred) == COND_EXPR
2772 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2773 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2774 {
2775 location_t new_locus;
2776
2777 /* As long as we're messing with gotos, turn if (a ? b : c) into
2778 if (a)
2779 if (b) goto yes; else goto no;
2780 else
2781 if (c) goto yes; else goto no;
2782
2783 Don't do this if one of the arms has void type, which can happen
2784 in C++ when the arm is throw. */
2785
2786 /* Keep the original source location on the first 'if'. Set the source
2787 location of the ? on the second 'if'. */
2788 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2789 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2790 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2791 false_label_p, locus),
2792 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2793 false_label_p, new_locus));
2794 }
2795 else
2796 {
2797 expr = build3 (COND_EXPR, void_type_node, pred,
2798 build_and_jump (true_label_p),
2799 build_and_jump (false_label_p));
2800 SET_EXPR_LOCATION (expr, locus);
2801 }
2802
2803 if (local_label)
2804 {
2805 t = build1 (LABEL_EXPR, void_type_node, local_label);
2806 append_to_statement_list (t, &expr);
2807 }
2808
2809 return expr;
2810 }
2811
2812 /* Given a conditional expression EXPR with short-circuit boolean
2813 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2814 predicate appart into the equivalent sequence of conditionals. */
2815
2816 static tree
2817 shortcut_cond_expr (tree expr)
2818 {
2819 tree pred = TREE_OPERAND (expr, 0);
2820 tree then_ = TREE_OPERAND (expr, 1);
2821 tree else_ = TREE_OPERAND (expr, 2);
2822 tree true_label, false_label, end_label, t;
2823 tree *true_label_p;
2824 tree *false_label_p;
2825 bool emit_end, emit_false, jump_over_else;
2826 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2827 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2828
2829 /* First do simple transformations. */
2830 if (!else_se)
2831 {
2832 /* If there is no 'else', turn
2833 if (a && b) then c
2834 into
2835 if (a) if (b) then c. */
2836 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2837 {
2838 /* Keep the original source location on the first 'if'. */
2839 location_t locus = EXPR_LOC_OR_HERE (expr);
2840 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2841 /* Set the source location of the && on the second 'if'. */
2842 if (EXPR_HAS_LOCATION (pred))
2843 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2844 then_ = shortcut_cond_expr (expr);
2845 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2846 pred = TREE_OPERAND (pred, 0);
2847 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2848 SET_EXPR_LOCATION (expr, locus);
2849 }
2850 }
2851
2852 if (!then_se)
2853 {
2854 /* If there is no 'then', turn
2855 if (a || b); else d
2856 into
2857 if (a); else if (b); else d. */
2858 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2859 {
2860 /* Keep the original source location on the first 'if'. */
2861 location_t locus = EXPR_LOC_OR_HERE (expr);
2862 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2863 /* Set the source location of the || on the second 'if'. */
2864 if (EXPR_HAS_LOCATION (pred))
2865 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2866 else_ = shortcut_cond_expr (expr);
2867 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2868 pred = TREE_OPERAND (pred, 0);
2869 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2870 SET_EXPR_LOCATION (expr, locus);
2871 }
2872 }
2873
2874 /* If we're done, great. */
2875 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2876 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2877 return expr;
2878
2879 /* Otherwise we need to mess with gotos. Change
2880 if (a) c; else d;
2881 to
2882 if (a); else goto no;
2883 c; goto end;
2884 no: d; end:
2885 and recursively gimplify the condition. */
2886
2887 true_label = false_label = end_label = NULL_TREE;
2888
2889 /* If our arms just jump somewhere, hijack those labels so we don't
2890 generate jumps to jumps. */
2891
2892 if (then_
2893 && TREE_CODE (then_) == GOTO_EXPR
2894 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2895 {
2896 true_label = GOTO_DESTINATION (then_);
2897 then_ = NULL;
2898 then_se = false;
2899 }
2900
2901 if (else_
2902 && TREE_CODE (else_) == GOTO_EXPR
2903 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2904 {
2905 false_label = GOTO_DESTINATION (else_);
2906 else_ = NULL;
2907 else_se = false;
2908 }
2909
2910 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2911 if (true_label)
2912 true_label_p = &true_label;
2913 else
2914 true_label_p = NULL;
2915
2916 /* The 'else' branch also needs a label if it contains interesting code. */
2917 if (false_label || else_se)
2918 false_label_p = &false_label;
2919 else
2920 false_label_p = NULL;
2921
2922 /* If there was nothing else in our arms, just forward the label(s). */
2923 if (!then_se && !else_se)
2924 return shortcut_cond_r (pred, true_label_p, false_label_p,
2925 EXPR_LOC_OR_HERE (expr));
2926
2927 /* If our last subexpression already has a terminal label, reuse it. */
2928 if (else_se)
2929 t = expr_last (else_);
2930 else if (then_se)
2931 t = expr_last (then_);
2932 else
2933 t = NULL;
2934 if (t && TREE_CODE (t) == LABEL_EXPR)
2935 end_label = LABEL_EXPR_LABEL (t);
2936
2937 /* If we don't care about jumping to the 'else' branch, jump to the end
2938 if the condition is false. */
2939 if (!false_label_p)
2940 false_label_p = &end_label;
2941
2942 /* We only want to emit these labels if we aren't hijacking them. */
2943 emit_end = (end_label == NULL_TREE);
2944 emit_false = (false_label == NULL_TREE);
2945
2946 /* We only emit the jump over the else clause if we have to--if the
2947 then clause may fall through. Otherwise we can wind up with a
2948 useless jump and a useless label at the end of gimplified code,
2949 which will cause us to think that this conditional as a whole
2950 falls through even if it doesn't. If we then inline a function
2951 which ends with such a condition, that can cause us to issue an
2952 inappropriate warning about control reaching the end of a
2953 non-void function. */
2954 jump_over_else = block_may_fallthru (then_);
2955
2956 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2957 EXPR_LOC_OR_HERE (expr));
2958
2959 expr = NULL;
2960 append_to_statement_list (pred, &expr);
2961
2962 append_to_statement_list (then_, &expr);
2963 if (else_se)
2964 {
2965 if (jump_over_else)
2966 {
2967 tree last = expr_last (expr);
2968 t = build_and_jump (&end_label);
2969 if (EXPR_HAS_LOCATION (last))
2970 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2971 append_to_statement_list (t, &expr);
2972 }
2973 if (emit_false)
2974 {
2975 t = build1 (LABEL_EXPR, void_type_node, false_label);
2976 append_to_statement_list (t, &expr);
2977 }
2978 append_to_statement_list (else_, &expr);
2979 }
2980 if (emit_end && end_label)
2981 {
2982 t = build1 (LABEL_EXPR, void_type_node, end_label);
2983 append_to_statement_list (t, &expr);
2984 }
2985
2986 return expr;
2987 }
2988
2989 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2990
2991 tree
2992 gimple_boolify (tree expr)
2993 {
2994 tree type = TREE_TYPE (expr);
2995 location_t loc = EXPR_LOCATION (expr);
2996
2997 if (TREE_CODE (expr) == NE_EXPR
2998 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2999 && integer_zerop (TREE_OPERAND (expr, 1)))
3000 {
3001 tree call = TREE_OPERAND (expr, 0);
3002 tree fn = get_callee_fndecl (call);
3003
3004 /* For __builtin_expect ((long) (x), y) recurse into x as well
3005 if x is truth_value_p. */
3006 if (fn
3007 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3008 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3009 && call_expr_nargs (call) == 2)
3010 {
3011 tree arg = CALL_EXPR_ARG (call, 0);
3012 if (arg)
3013 {
3014 if (TREE_CODE (arg) == NOP_EXPR
3015 && TREE_TYPE (arg) == TREE_TYPE (call))
3016 arg = TREE_OPERAND (arg, 0);
3017 if (truth_value_p (TREE_CODE (arg)))
3018 {
3019 arg = gimple_boolify (arg);
3020 CALL_EXPR_ARG (call, 0)
3021 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3022 }
3023 }
3024 }
3025 }
3026
3027 switch (TREE_CODE (expr))
3028 {
3029 case TRUTH_AND_EXPR:
3030 case TRUTH_OR_EXPR:
3031 case TRUTH_XOR_EXPR:
3032 case TRUTH_ANDIF_EXPR:
3033 case TRUTH_ORIF_EXPR:
3034 /* Also boolify the arguments of truth exprs. */
3035 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3036 /* FALLTHRU */
3037
3038 case TRUTH_NOT_EXPR:
3039 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3040
3041 /* These expressions always produce boolean results. */
3042 if (TREE_CODE (type) != BOOLEAN_TYPE)
3043 TREE_TYPE (expr) = boolean_type_node;
3044 return expr;
3045
3046 default:
3047 if (COMPARISON_CLASS_P (expr))
3048 {
3049 /* There expressions always prduce boolean results. */
3050 if (TREE_CODE (type) != BOOLEAN_TYPE)
3051 TREE_TYPE (expr) = boolean_type_node;
3052 return expr;
3053 }
3054 /* Other expressions that get here must have boolean values, but
3055 might need to be converted to the appropriate mode. */
3056 if (TREE_CODE (type) == BOOLEAN_TYPE)
3057 return expr;
3058 return fold_convert_loc (loc, boolean_type_node, expr);
3059 }
3060 }
3061
3062 /* Given a conditional expression *EXPR_P without side effects, gimplify
3063 its operands. New statements are inserted to PRE_P. */
3064
3065 static enum gimplify_status
3066 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3067 {
3068 tree expr = *expr_p, cond;
3069 enum gimplify_status ret, tret;
3070 enum tree_code code;
3071
3072 cond = gimple_boolify (COND_EXPR_COND (expr));
3073
3074 /* We need to handle && and || specially, as their gimplification
3075 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3076 code = TREE_CODE (cond);
3077 if (code == TRUTH_ANDIF_EXPR)
3078 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3079 else if (code == TRUTH_ORIF_EXPR)
3080 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3081 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3082 COND_EXPR_COND (*expr_p) = cond;
3083
3084 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3085 is_gimple_val, fb_rvalue);
3086 ret = MIN (ret, tret);
3087 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3088 is_gimple_val, fb_rvalue);
3089
3090 return MIN (ret, tret);
3091 }
3092
3093 /* Return true if evaluating EXPR could trap.
3094 EXPR is GENERIC, while tree_could_trap_p can be called
3095 only on GIMPLE. */
3096
3097 static bool
3098 generic_expr_could_trap_p (tree expr)
3099 {
3100 unsigned i, n;
3101
3102 if (!expr || is_gimple_val (expr))
3103 return false;
3104
3105 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3106 return true;
3107
3108 n = TREE_OPERAND_LENGTH (expr);
3109 for (i = 0; i < n; i++)
3110 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3111 return true;
3112
3113 return false;
3114 }
3115
3116 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3117 into
3118
3119 if (p) if (p)
3120 t1 = a; a;
3121 else or else
3122 t1 = b; b;
3123 t1;
3124
3125 The second form is used when *EXPR_P is of type void.
3126
3127 PRE_P points to the list where side effects that must happen before
3128 *EXPR_P should be stored. */
3129
3130 static enum gimplify_status
3131 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3132 {
3133 tree expr = *expr_p;
3134 tree type = TREE_TYPE (expr);
3135 location_t loc = EXPR_LOCATION (expr);
3136 tree tmp, arm1, arm2;
3137 enum gimplify_status ret;
3138 tree label_true, label_false, label_cont;
3139 bool have_then_clause_p, have_else_clause_p;
3140 gimple gimple_cond;
3141 enum tree_code pred_code;
3142 gimple_seq seq = NULL;
3143
3144 /* If this COND_EXPR has a value, copy the values into a temporary within
3145 the arms. */
3146 if (!VOID_TYPE_P (type))
3147 {
3148 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3149 tree result;
3150
3151 /* If either an rvalue is ok or we do not require an lvalue, create the
3152 temporary. But we cannot do that if the type is addressable. */
3153 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3154 && !TREE_ADDRESSABLE (type))
3155 {
3156 if (gimplify_ctxp->allow_rhs_cond_expr
3157 /* If either branch has side effects or could trap, it can't be
3158 evaluated unconditionally. */
3159 && !TREE_SIDE_EFFECTS (then_)
3160 && !generic_expr_could_trap_p (then_)
3161 && !TREE_SIDE_EFFECTS (else_)
3162 && !generic_expr_could_trap_p (else_))
3163 return gimplify_pure_cond_expr (expr_p, pre_p);
3164
3165 tmp = create_tmp_var (type, "iftmp");
3166 result = tmp;
3167 }
3168
3169 /* Otherwise, only create and copy references to the values. */
3170 else
3171 {
3172 type = build_pointer_type (type);
3173
3174 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3175 then_ = build_fold_addr_expr_loc (loc, then_);
3176
3177 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3178 else_ = build_fold_addr_expr_loc (loc, else_);
3179
3180 expr
3181 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3182
3183 tmp = create_tmp_var (type, "iftmp");
3184 result = build_simple_mem_ref_loc (loc, tmp);
3185 }
3186
3187 /* Build the new then clause, `tmp = then_;'. But don't build the
3188 assignment if the value is void; in C++ it can be if it's a throw. */
3189 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3190 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3191
3192 /* Similarly, build the new else clause, `tmp = else_;'. */
3193 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3194 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3195
3196 TREE_TYPE (expr) = void_type_node;
3197 recalculate_side_effects (expr);
3198
3199 /* Move the COND_EXPR to the prequeue. */
3200 gimplify_stmt (&expr, pre_p);
3201
3202 *expr_p = result;
3203 return GS_ALL_DONE;
3204 }
3205
3206 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3207 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3208 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3209 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3210
3211 /* Make sure the condition has BOOLEAN_TYPE. */
3212 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3213
3214 /* Break apart && and || conditions. */
3215 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3216 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3217 {
3218 expr = shortcut_cond_expr (expr);
3219
3220 if (expr != *expr_p)
3221 {
3222 *expr_p = expr;
3223
3224 /* We can't rely on gimplify_expr to re-gimplify the expanded
3225 form properly, as cleanups might cause the target labels to be
3226 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3227 set up a conditional context. */
3228 gimple_push_condition ();
3229 gimplify_stmt (expr_p, &seq);
3230 gimple_pop_condition (pre_p);
3231 gimple_seq_add_seq (pre_p, seq);
3232
3233 return GS_ALL_DONE;
3234 }
3235 }
3236
3237 /* Now do the normal gimplification. */
3238
3239 /* Gimplify condition. */
3240 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3241 fb_rvalue);
3242 if (ret == GS_ERROR)
3243 return GS_ERROR;
3244 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3245
3246 gimple_push_condition ();
3247
3248 have_then_clause_p = have_else_clause_p = false;
3249 if (TREE_OPERAND (expr, 1) != NULL
3250 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3251 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3252 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3253 == current_function_decl)
3254 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3255 have different locations, otherwise we end up with incorrect
3256 location information on the branches. */
3257 && (optimize
3258 || !EXPR_HAS_LOCATION (expr)
3259 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3260 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3261 {
3262 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3263 have_then_clause_p = true;
3264 }
3265 else
3266 label_true = create_artificial_label (UNKNOWN_LOCATION);
3267 if (TREE_OPERAND (expr, 2) != NULL
3268 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3269 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3270 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3271 == current_function_decl)
3272 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3273 have different locations, otherwise we end up with incorrect
3274 location information on the branches. */
3275 && (optimize
3276 || !EXPR_HAS_LOCATION (expr)
3277 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3278 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3279 {
3280 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3281 have_else_clause_p = true;
3282 }
3283 else
3284 label_false = create_artificial_label (UNKNOWN_LOCATION);
3285
3286 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3287 &arm2);
3288
3289 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3290 label_false);
3291
3292 gimplify_seq_add_stmt (&seq, gimple_cond);
3293 label_cont = NULL_TREE;
3294 if (!have_then_clause_p)
3295 {
3296 /* For if (...) {} else { code; } put label_true after
3297 the else block. */
3298 if (TREE_OPERAND (expr, 1) == NULL_TREE
3299 && !have_else_clause_p
3300 && TREE_OPERAND (expr, 2) != NULL_TREE)
3301 label_cont = label_true;
3302 else
3303 {
3304 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3305 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3306 /* For if (...) { code; } else {} or
3307 if (...) { code; } else goto label; or
3308 if (...) { code; return; } else { ... }
3309 label_cont isn't needed. */
3310 if (!have_else_clause_p
3311 && TREE_OPERAND (expr, 2) != NULL_TREE
3312 && gimple_seq_may_fallthru (seq))
3313 {
3314 gimple g;
3315 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3316
3317 g = gimple_build_goto (label_cont);
3318
3319 /* GIMPLE_COND's are very low level; they have embedded
3320 gotos. This particular embedded goto should not be marked
3321 with the location of the original COND_EXPR, as it would
3322 correspond to the COND_EXPR's condition, not the ELSE or the
3323 THEN arms. To avoid marking it with the wrong location, flag
3324 it as "no location". */
3325 gimple_set_do_not_emit_location (g);
3326
3327 gimplify_seq_add_stmt (&seq, g);
3328 }
3329 }
3330 }
3331 if (!have_else_clause_p)
3332 {
3333 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3334 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3335 }
3336 if (label_cont)
3337 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3338
3339 gimple_pop_condition (pre_p);
3340 gimple_seq_add_seq (pre_p, seq);
3341
3342 if (ret == GS_ERROR)
3343 ; /* Do nothing. */
3344 else if (have_then_clause_p || have_else_clause_p)
3345 ret = GS_ALL_DONE;
3346 else
3347 {
3348 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3349 expr = TREE_OPERAND (expr, 0);
3350 gimplify_stmt (&expr, pre_p);
3351 }
3352
3353 *expr_p = NULL;
3354 return ret;
3355 }
3356
3357 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3358 to be marked addressable.
3359
3360 We cannot rely on such an expression being directly markable if a temporary
3361 has been created by the gimplification. In this case, we create another
3362 temporary and initialize it with a copy, which will become a store after we
3363 mark it addressable. This can happen if the front-end passed us something
3364 that it could not mark addressable yet, like a Fortran pass-by-reference
3365 parameter (int) floatvar. */
3366
3367 static void
3368 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3369 {
3370 while (handled_component_p (*expr_p))
3371 expr_p = &TREE_OPERAND (*expr_p, 0);
3372 if (is_gimple_reg (*expr_p))
3373 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3374 }
3375
3376 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3377 a call to __builtin_memcpy. */
3378
3379 static enum gimplify_status
3380 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3381 gimple_seq *seq_p)
3382 {
3383 tree t, to, to_ptr, from, from_ptr;
3384 gimple gs;
3385 location_t loc = EXPR_LOCATION (*expr_p);
3386
3387 to = TREE_OPERAND (*expr_p, 0);
3388 from = TREE_OPERAND (*expr_p, 1);
3389
3390 /* Mark the RHS addressable. Beware that it may not be possible to do so
3391 directly if a temporary has been created by the gimplification. */
3392 prepare_gimple_addressable (&from, seq_p);
3393
3394 mark_addressable (from);
3395 from_ptr = build_fold_addr_expr_loc (loc, from);
3396 gimplify_arg (&from_ptr, seq_p, loc);
3397
3398 mark_addressable (to);
3399 to_ptr = build_fold_addr_expr_loc (loc, to);
3400 gimplify_arg (&to_ptr, seq_p, loc);
3401
3402 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3403
3404 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3405
3406 if (want_value)
3407 {
3408 /* tmp = memcpy() */
3409 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3410 gimple_call_set_lhs (gs, t);
3411 gimplify_seq_add_stmt (seq_p, gs);
3412
3413 *expr_p = build_simple_mem_ref (t);
3414 return GS_ALL_DONE;
3415 }
3416
3417 gimplify_seq_add_stmt (seq_p, gs);
3418 *expr_p = NULL;
3419 return GS_ALL_DONE;
3420 }
3421
3422 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3423 a call to __builtin_memset. In this case we know that the RHS is
3424 a CONSTRUCTOR with an empty element list. */
3425
3426 static enum gimplify_status
3427 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3428 gimple_seq *seq_p)
3429 {
3430 tree t, from, to, to_ptr;
3431 gimple gs;
3432 location_t loc = EXPR_LOCATION (*expr_p);
3433
3434 /* Assert our assumptions, to abort instead of producing wrong code
3435 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3436 not be immediately exposed. */
3437 from = TREE_OPERAND (*expr_p, 1);
3438 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3439 from = TREE_OPERAND (from, 0);
3440
3441 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3442 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3443
3444 /* Now proceed. */
3445 to = TREE_OPERAND (*expr_p, 0);
3446
3447 to_ptr = build_fold_addr_expr_loc (loc, to);
3448 gimplify_arg (&to_ptr, seq_p, loc);
3449 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3450
3451 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3452
3453 if (want_value)
3454 {
3455 /* tmp = memset() */
3456 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3457 gimple_call_set_lhs (gs, t);
3458 gimplify_seq_add_stmt (seq_p, gs);
3459
3460 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3461 return GS_ALL_DONE;
3462 }
3463
3464 gimplify_seq_add_stmt (seq_p, gs);
3465 *expr_p = NULL;
3466 return GS_ALL_DONE;
3467 }
3468
3469 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3470 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3471 assignment. Return non-null if we detect a potential overlap. */
3472
3473 struct gimplify_init_ctor_preeval_data
3474 {
3475 /* The base decl of the lhs object. May be NULL, in which case we
3476 have to assume the lhs is indirect. */
3477 tree lhs_base_decl;
3478
3479 /* The alias set of the lhs object. */
3480 alias_set_type lhs_alias_set;
3481 };
3482
3483 static tree
3484 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3485 {
3486 struct gimplify_init_ctor_preeval_data *data
3487 = (struct gimplify_init_ctor_preeval_data *) xdata;
3488 tree t = *tp;
3489
3490 /* If we find the base object, obviously we have overlap. */
3491 if (data->lhs_base_decl == t)
3492 return t;
3493
3494 /* If the constructor component is indirect, determine if we have a
3495 potential overlap with the lhs. The only bits of information we
3496 have to go on at this point are addressability and alias sets. */
3497 if ((INDIRECT_REF_P (t)
3498 || TREE_CODE (t) == MEM_REF)
3499 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3500 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3501 return t;
3502
3503 /* If the constructor component is a call, determine if it can hide a
3504 potential overlap with the lhs through an INDIRECT_REF like above.
3505 ??? Ugh - this is completely broken. In fact this whole analysis
3506 doesn't look conservative. */
3507 if (TREE_CODE (t) == CALL_EXPR)
3508 {
3509 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3510
3511 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3512 if (POINTER_TYPE_P (TREE_VALUE (type))
3513 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3514 && alias_sets_conflict_p (data->lhs_alias_set,
3515 get_alias_set
3516 (TREE_TYPE (TREE_VALUE (type)))))
3517 return t;
3518 }
3519
3520 if (IS_TYPE_OR_DECL_P (t))
3521 *walk_subtrees = 0;
3522 return NULL;
3523 }
3524
3525 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3526 force values that overlap with the lhs (as described by *DATA)
3527 into temporaries. */
3528
3529 static void
3530 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3531 struct gimplify_init_ctor_preeval_data *data)
3532 {
3533 enum gimplify_status one;
3534
3535 /* If the value is constant, then there's nothing to pre-evaluate. */
3536 if (TREE_CONSTANT (*expr_p))
3537 {
3538 /* Ensure it does not have side effects, it might contain a reference to
3539 the object we're initializing. */
3540 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3541 return;
3542 }
3543
3544 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3545 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3546 return;
3547
3548 /* Recurse for nested constructors. */
3549 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3550 {
3551 unsigned HOST_WIDE_INT ix;
3552 constructor_elt *ce;
3553 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3554
3555 FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
3556 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3557
3558 return;
3559 }
3560
3561 /* If this is a variable sized type, we must remember the size. */
3562 maybe_with_size_expr (expr_p);
3563
3564 /* Gimplify the constructor element to something appropriate for the rhs
3565 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3566 the gimplifier will consider this a store to memory. Doing this
3567 gimplification now means that we won't have to deal with complicated
3568 language-specific trees, nor trees like SAVE_EXPR that can induce
3569 exponential search behavior. */
3570 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3571 if (one == GS_ERROR)
3572 {
3573 *expr_p = NULL;
3574 return;
3575 }
3576
3577 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3578 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3579 always be true for all scalars, since is_gimple_mem_rhs insists on a
3580 temporary variable for them. */
3581 if (DECL_P (*expr_p))
3582 return;
3583
3584 /* If this is of variable size, we have no choice but to assume it doesn't
3585 overlap since we can't make a temporary for it. */
3586 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3587 return;
3588
3589 /* Otherwise, we must search for overlap ... */
3590 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3591 return;
3592
3593 /* ... and if found, force the value into a temporary. */
3594 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3595 }
3596
3597 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3598 a RANGE_EXPR in a CONSTRUCTOR for an array.
3599
3600 var = lower;
3601 loop_entry:
3602 object[var] = value;
3603 if (var == upper)
3604 goto loop_exit;
3605 var = var + 1;
3606 goto loop_entry;
3607 loop_exit:
3608
3609 We increment var _after_ the loop exit check because we might otherwise
3610 fail if upper == TYPE_MAX_VALUE (type for upper).
3611
3612 Note that we never have to deal with SAVE_EXPRs here, because this has
3613 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3614
3615 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3616 gimple_seq *, bool);
3617
3618 static void
3619 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3620 tree value, tree array_elt_type,
3621 gimple_seq *pre_p, bool cleared)
3622 {
3623 tree loop_entry_label, loop_exit_label, fall_thru_label;
3624 tree var, var_type, cref, tmp;
3625
3626 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3627 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3628 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3629
3630 /* Create and initialize the index variable. */
3631 var_type = TREE_TYPE (upper);
3632 var = create_tmp_var (var_type, NULL);
3633 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3634
3635 /* Add the loop entry label. */
3636 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3637
3638 /* Build the reference. */
3639 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3640 var, NULL_TREE, NULL_TREE);
3641
3642 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3643 the store. Otherwise just assign value to the reference. */
3644
3645 if (TREE_CODE (value) == CONSTRUCTOR)
3646 /* NB we might have to call ourself recursively through
3647 gimplify_init_ctor_eval if the value is a constructor. */
3648 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3649 pre_p, cleared);
3650 else
3651 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3652
3653 /* We exit the loop when the index var is equal to the upper bound. */
3654 gimplify_seq_add_stmt (pre_p,
3655 gimple_build_cond (EQ_EXPR, var, upper,
3656 loop_exit_label, fall_thru_label));
3657
3658 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3659
3660 /* Otherwise, increment the index var... */
3661 tmp = build2 (PLUS_EXPR, var_type, var,
3662 fold_convert (var_type, integer_one_node));
3663 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3664
3665 /* ...and jump back to the loop entry. */
3666 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3667
3668 /* Add the loop exit label. */
3669 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3670 }
3671
3672 /* Return true if FDECL is accessing a field that is zero sized. */
3673
3674 static bool
3675 zero_sized_field_decl (const_tree fdecl)
3676 {
3677 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3678 && integer_zerop (DECL_SIZE (fdecl)))
3679 return true;
3680 return false;
3681 }
3682
3683 /* Return true if TYPE is zero sized. */
3684
3685 static bool
3686 zero_sized_type (const_tree type)
3687 {
3688 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3689 && integer_zerop (TYPE_SIZE (type)))
3690 return true;
3691 return false;
3692 }
3693
3694 /* A subroutine of gimplify_init_constructor. Generate individual
3695 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3696 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3697 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3698 zeroed first. */
3699
3700 static void
3701 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3702 gimple_seq *pre_p, bool cleared)
3703 {
3704 tree array_elt_type = NULL;
3705 unsigned HOST_WIDE_INT ix;
3706 tree purpose, value;
3707
3708 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3709 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3710
3711 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3712 {
3713 tree cref;
3714
3715 /* NULL values are created above for gimplification errors. */
3716 if (value == NULL)
3717 continue;
3718
3719 if (cleared && initializer_zerop (value))
3720 continue;
3721
3722 /* ??? Here's to hoping the front end fills in all of the indices,
3723 so we don't have to figure out what's missing ourselves. */
3724 gcc_assert (purpose);
3725
3726 /* Skip zero-sized fields, unless value has side-effects. This can
3727 happen with calls to functions returning a zero-sized type, which
3728 we shouldn't discard. As a number of downstream passes don't
3729 expect sets of zero-sized fields, we rely on the gimplification of
3730 the MODIFY_EXPR we make below to drop the assignment statement. */
3731 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3732 continue;
3733
3734 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3735 whole range. */
3736 if (TREE_CODE (purpose) == RANGE_EXPR)
3737 {
3738 tree lower = TREE_OPERAND (purpose, 0);
3739 tree upper = TREE_OPERAND (purpose, 1);
3740
3741 /* If the lower bound is equal to upper, just treat it as if
3742 upper was the index. */
3743 if (simple_cst_equal (lower, upper))
3744 purpose = upper;
3745 else
3746 {
3747 gimplify_init_ctor_eval_range (object, lower, upper, value,
3748 array_elt_type, pre_p, cleared);
3749 continue;
3750 }
3751 }
3752
3753 if (array_elt_type)
3754 {
3755 /* Do not use bitsizetype for ARRAY_REF indices. */
3756 if (TYPE_DOMAIN (TREE_TYPE (object)))
3757 purpose
3758 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3759 purpose);
3760 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3761 purpose, NULL_TREE, NULL_TREE);
3762 }
3763 else
3764 {
3765 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3766 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3767 unshare_expr (object), purpose, NULL_TREE);
3768 }
3769
3770 if (TREE_CODE (value) == CONSTRUCTOR
3771 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3772 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3773 pre_p, cleared);
3774 else
3775 {
3776 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3777 gimplify_and_add (init, pre_p);
3778 ggc_free (init);
3779 }
3780 }
3781 }
3782
3783 /* Return the appropriate RHS predicate for this LHS. */
3784
3785 gimple_predicate
3786 rhs_predicate_for (tree lhs)
3787 {
3788 if (is_gimple_reg (lhs))
3789 return is_gimple_reg_rhs_or_call;
3790 else
3791 return is_gimple_mem_rhs_or_call;
3792 }
3793
3794 /* Gimplify a C99 compound literal expression. This just means adding
3795 the DECL_EXPR before the current statement and using its anonymous
3796 decl instead. */
3797
3798 static enum gimplify_status
3799 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3800 fallback_t fallback)
3801 {
3802 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3803 tree decl = DECL_EXPR_DECL (decl_s);
3804 /* Mark the decl as addressable if the compound literal
3805 expression is addressable now, otherwise it is marked too late
3806 after we gimplify the initialization expression. */
3807 if (TREE_ADDRESSABLE (*expr_p))
3808 TREE_ADDRESSABLE (decl) = 1;
3809
3810 /* Preliminarily mark non-addressed complex variables as eligible
3811 for promotion to gimple registers. We'll transform their uses
3812 as we find them. */
3813 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3814 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3815 && !TREE_THIS_VOLATILE (decl)
3816 && !needs_to_live_in_memory (decl))
3817 DECL_GIMPLE_REG_P (decl) = 1;
3818
3819 /* If the decl is not addressable, then it is being used in some
3820 expression or on the right hand side of a statement, and it can
3821 be put into a readonly data section. */
3822 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3823 TREE_READONLY (decl) = 1;
3824
3825 /* This decl isn't mentioned in the enclosing block, so add it to the
3826 list of temps. FIXME it seems a bit of a kludge to say that
3827 anonymous artificial vars aren't pushed, but everything else is. */
3828 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3829 gimple_add_tmp_var (decl);
3830
3831 gimplify_and_add (decl_s, pre_p);
3832 *expr_p = decl;
3833 return GS_OK;
3834 }
3835
3836 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3837 return a new CONSTRUCTOR if something changed. */
3838
3839 static tree
3840 optimize_compound_literals_in_ctor (tree orig_ctor)
3841 {
3842 tree ctor = orig_ctor;
3843 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3844 unsigned int idx, num = VEC_length (constructor_elt, elts);
3845
3846 for (idx = 0; idx < num; idx++)
3847 {
3848 tree value = VEC_index (constructor_elt, elts, idx)->value;
3849 tree newval = value;
3850 if (TREE_CODE (value) == CONSTRUCTOR)
3851 newval = optimize_compound_literals_in_ctor (value);
3852 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3853 {
3854 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3855 tree decl = DECL_EXPR_DECL (decl_s);
3856 tree init = DECL_INITIAL (decl);
3857
3858 if (!TREE_ADDRESSABLE (value)
3859 && !TREE_ADDRESSABLE (decl)
3860 && init)
3861 newval = optimize_compound_literals_in_ctor (init);
3862 }
3863 if (newval == value)
3864 continue;
3865
3866 if (ctor == orig_ctor)
3867 {
3868 ctor = copy_node (orig_ctor);
3869 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3870 elts = CONSTRUCTOR_ELTS (ctor);
3871 }
3872 VEC_index (constructor_elt, elts, idx)->value = newval;
3873 }
3874 return ctor;
3875 }
3876
3877 /* A subroutine of gimplify_modify_expr. Break out elements of a
3878 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3879
3880 Note that we still need to clear any elements that don't have explicit
3881 initializers, so if not all elements are initialized we keep the
3882 original MODIFY_EXPR, we just remove all of the constructor elements.
3883
3884 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3885 GS_ERROR if we would have to create a temporary when gimplifying
3886 this constructor. Otherwise, return GS_OK.
3887
3888 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3889
3890 static enum gimplify_status
3891 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3892 bool want_value, bool notify_temp_creation)
3893 {
3894 tree object, ctor, type;
3895 enum gimplify_status ret;
3896 VEC(constructor_elt,gc) *elts;
3897
3898 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3899
3900 if (!notify_temp_creation)
3901 {
3902 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3903 is_gimple_lvalue, fb_lvalue);
3904 if (ret == GS_ERROR)
3905 return ret;
3906 }
3907
3908 object = TREE_OPERAND (*expr_p, 0);
3909 ctor = TREE_OPERAND (*expr_p, 1) =
3910 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3911 type = TREE_TYPE (ctor);
3912 elts = CONSTRUCTOR_ELTS (ctor);
3913 ret = GS_ALL_DONE;
3914
3915 switch (TREE_CODE (type))
3916 {
3917 case RECORD_TYPE:
3918 case UNION_TYPE:
3919 case QUAL_UNION_TYPE:
3920 case ARRAY_TYPE:
3921 {
3922 struct gimplify_init_ctor_preeval_data preeval_data;
3923 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3924 bool cleared, complete_p, valid_const_initializer;
3925
3926 /* Aggregate types must lower constructors to initialization of
3927 individual elements. The exception is that a CONSTRUCTOR node
3928 with no elements indicates zero-initialization of the whole. */
3929 if (VEC_empty (constructor_elt, elts))
3930 {
3931 if (notify_temp_creation)
3932 return GS_OK;
3933 break;
3934 }
3935
3936 /* Fetch information about the constructor to direct later processing.
3937 We might want to make static versions of it in various cases, and
3938 can only do so if it known to be a valid constant initializer. */
3939 valid_const_initializer
3940 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3941 &num_ctor_elements, &complete_p);
3942
3943 /* If a const aggregate variable is being initialized, then it
3944 should never be a lose to promote the variable to be static. */
3945 if (valid_const_initializer
3946 && num_nonzero_elements > 1
3947 && TREE_READONLY (object)
3948 && TREE_CODE (object) == VAR_DECL
3949 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3950 {
3951 if (notify_temp_creation)
3952 return GS_ERROR;
3953 DECL_INITIAL (object) = ctor;
3954 TREE_STATIC (object) = 1;
3955 if (!DECL_NAME (object))
3956 DECL_NAME (object) = create_tmp_var_name ("C");
3957 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3958
3959 /* ??? C++ doesn't automatically append a .<number> to the
3960 assembler name, and even when it does, it looks a FE private
3961 data structures to figure out what that number should be,
3962 which are not set for this variable. I suppose this is
3963 important for local statics for inline functions, which aren't
3964 "local" in the object file sense. So in order to get a unique
3965 TU-local symbol, we must invoke the lhd version now. */
3966 lhd_set_decl_assembler_name (object);
3967
3968 *expr_p = NULL_TREE;
3969 break;
3970 }
3971
3972 /* If there are "lots" of initialized elements, even discounting
3973 those that are not address constants (and thus *must* be
3974 computed at runtime), then partition the constructor into
3975 constant and non-constant parts. Block copy the constant
3976 parts in, then generate code for the non-constant parts. */
3977 /* TODO. There's code in cp/typeck.c to do this. */
3978
3979 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3980 /* store_constructor will ignore the clearing of variable-sized
3981 objects. Initializers for such objects must explicitly set
3982 every field that needs to be set. */
3983 cleared = false;
3984 else if (!complete_p)
3985 /* If the constructor isn't complete, clear the whole object
3986 beforehand.
3987
3988 ??? This ought not to be needed. For any element not present
3989 in the initializer, we should simply set them to zero. Except
3990 we'd need to *find* the elements that are not present, and that
3991 requires trickery to avoid quadratic compile-time behavior in
3992 large cases or excessive memory use in small cases. */
3993 cleared = true;
3994 else if (num_ctor_elements - num_nonzero_elements
3995 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3996 && num_nonzero_elements < num_ctor_elements / 4)
3997 /* If there are "lots" of zeros, it's more efficient to clear
3998 the memory and then set the nonzero elements. */
3999 cleared = true;
4000 else
4001 cleared = false;
4002
4003 /* If there are "lots" of initialized elements, and all of them
4004 are valid address constants, then the entire initializer can
4005 be dropped to memory, and then memcpy'd out. Don't do this
4006 for sparse arrays, though, as it's more efficient to follow
4007 the standard CONSTRUCTOR behavior of memset followed by
4008 individual element initialization. Also don't do this for small
4009 all-zero initializers (which aren't big enough to merit
4010 clearing), and don't try to make bitwise copies of
4011 TREE_ADDRESSABLE types. */
4012 if (valid_const_initializer
4013 && !(cleared || num_nonzero_elements == 0)
4014 && !TREE_ADDRESSABLE (type))
4015 {
4016 HOST_WIDE_INT size = int_size_in_bytes (type);
4017 unsigned int align;
4018
4019 /* ??? We can still get unbounded array types, at least
4020 from the C++ front end. This seems wrong, but attempt
4021 to work around it for now. */
4022 if (size < 0)
4023 {
4024 size = int_size_in_bytes (TREE_TYPE (object));
4025 if (size >= 0)
4026 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4027 }
4028
4029 /* Find the maximum alignment we can assume for the object. */
4030 /* ??? Make use of DECL_OFFSET_ALIGN. */
4031 if (DECL_P (object))
4032 align = DECL_ALIGN (object);
4033 else
4034 align = TYPE_ALIGN (type);
4035
4036 if (size > 0
4037 && num_nonzero_elements > 1
4038 && !can_move_by_pieces (size, align))
4039 {
4040 if (notify_temp_creation)
4041 return GS_ERROR;
4042
4043 walk_tree (&ctor, force_labels_r, NULL, NULL);
4044 ctor = tree_output_constant_def (ctor);
4045 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4046 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4047 TREE_OPERAND (*expr_p, 1) = ctor;
4048
4049 /* This is no longer an assignment of a CONSTRUCTOR, but
4050 we still may have processing to do on the LHS. So
4051 pretend we didn't do anything here to let that happen. */
4052 return GS_UNHANDLED;
4053 }
4054 }
4055
4056 /* If the target is volatile, we have non-zero elements and more than
4057 one field to assign, initialize the target from a temporary. */
4058 if (TREE_THIS_VOLATILE (object)
4059 && !TREE_ADDRESSABLE (type)
4060 && num_nonzero_elements > 0
4061 && VEC_length (constructor_elt, elts) > 1)
4062 {
4063 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
4064 TREE_OPERAND (*expr_p, 0) = temp;
4065 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4066 *expr_p,
4067 build2 (MODIFY_EXPR, void_type_node,
4068 object, temp));
4069 return GS_OK;
4070 }
4071
4072 if (notify_temp_creation)
4073 return GS_OK;
4074
4075 /* If there are nonzero elements and if needed, pre-evaluate to capture
4076 elements overlapping with the lhs into temporaries. We must do this
4077 before clearing to fetch the values before they are zeroed-out. */
4078 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4079 {
4080 preeval_data.lhs_base_decl = get_base_address (object);
4081 if (!DECL_P (preeval_data.lhs_base_decl))
4082 preeval_data.lhs_base_decl = NULL;
4083 preeval_data.lhs_alias_set = get_alias_set (object);
4084
4085 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4086 pre_p, post_p, &preeval_data);
4087 }
4088
4089 if (cleared)
4090 {
4091 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4092 Note that we still have to gimplify, in order to handle the
4093 case of variable sized types. Avoid shared tree structures. */
4094 CONSTRUCTOR_ELTS (ctor) = NULL;
4095 TREE_SIDE_EFFECTS (ctor) = 0;
4096 object = unshare_expr (object);
4097 gimplify_stmt (expr_p, pre_p);
4098 }
4099
4100 /* If we have not block cleared the object, or if there are nonzero
4101 elements in the constructor, add assignments to the individual
4102 scalar fields of the object. */
4103 if (!cleared || num_nonzero_elements > 0)
4104 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4105
4106 *expr_p = NULL_TREE;
4107 }
4108 break;
4109
4110 case COMPLEX_TYPE:
4111 {
4112 tree r, i;
4113
4114 if (notify_temp_creation)
4115 return GS_OK;
4116
4117 /* Extract the real and imaginary parts out of the ctor. */
4118 gcc_assert (VEC_length (constructor_elt, elts) == 2);
4119 r = VEC_index (constructor_elt, elts, 0)->value;
4120 i = VEC_index (constructor_elt, elts, 1)->value;
4121 if (r == NULL || i == NULL)
4122 {
4123 tree zero = build_zero_cst (TREE_TYPE (type));
4124 if (r == NULL)
4125 r = zero;
4126 if (i == NULL)
4127 i = zero;
4128 }
4129
4130 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4131 represent creation of a complex value. */
4132 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4133 {
4134 ctor = build_complex (type, r, i);
4135 TREE_OPERAND (*expr_p, 1) = ctor;
4136 }
4137 else
4138 {
4139 ctor = build2 (COMPLEX_EXPR, type, r, i);
4140 TREE_OPERAND (*expr_p, 1) = ctor;
4141 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4142 pre_p,
4143 post_p,
4144 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4145 fb_rvalue);
4146 }
4147 }
4148 break;
4149
4150 case VECTOR_TYPE:
4151 {
4152 unsigned HOST_WIDE_INT ix;
4153 constructor_elt *ce;
4154
4155 if (notify_temp_creation)
4156 return GS_OK;
4157
4158 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4159 if (TREE_CONSTANT (ctor))
4160 {
4161 bool constant_p = true;
4162 tree value;
4163
4164 /* Even when ctor is constant, it might contain non-*_CST
4165 elements, such as addresses or trapping values like
4166 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4167 in VECTOR_CST nodes. */
4168 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4169 if (!CONSTANT_CLASS_P (value))
4170 {
4171 constant_p = false;
4172 break;
4173 }
4174
4175 if (constant_p)
4176 {
4177 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4178 break;
4179 }
4180
4181 /* Don't reduce an initializer constant even if we can't
4182 make a VECTOR_CST. It won't do anything for us, and it'll
4183 prevent us from representing it as a single constant. */
4184 if (initializer_constant_valid_p (ctor, type))
4185 break;
4186
4187 TREE_CONSTANT (ctor) = 0;
4188 }
4189
4190 /* Vector types use CONSTRUCTOR all the way through gimple
4191 compilation as a general initializer. */
4192 FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
4193 {
4194 enum gimplify_status tret;
4195 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4196 fb_rvalue);
4197 if (tret == GS_ERROR)
4198 ret = GS_ERROR;
4199 }
4200 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4201 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4202 }
4203 break;
4204
4205 default:
4206 /* So how did we get a CONSTRUCTOR for a scalar type? */
4207 gcc_unreachable ();
4208 }
4209
4210 if (ret == GS_ERROR)
4211 return GS_ERROR;
4212 else if (want_value)
4213 {
4214 *expr_p = object;
4215 return GS_OK;
4216 }
4217 else
4218 {
4219 /* If we have gimplified both sides of the initializer but have
4220 not emitted an assignment, do so now. */
4221 if (*expr_p)
4222 {
4223 tree lhs = TREE_OPERAND (*expr_p, 0);
4224 tree rhs = TREE_OPERAND (*expr_p, 1);
4225 gimple init = gimple_build_assign (lhs, rhs);
4226 gimplify_seq_add_stmt (pre_p, init);
4227 *expr_p = NULL;
4228 }
4229
4230 return GS_ALL_DONE;
4231 }
4232 }
4233
4234 /* Given a pointer value OP0, return a simplified version of an
4235 indirection through OP0, or NULL_TREE if no simplification is
4236 possible. Note that the resulting type may be different from
4237 the type pointed to in the sense that it is still compatible
4238 from the langhooks point of view. */
4239
4240 tree
4241 gimple_fold_indirect_ref (tree t)
4242 {
4243 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4244 tree sub = t;
4245 tree subtype;
4246
4247 STRIP_NOPS (sub);
4248 subtype = TREE_TYPE (sub);
4249 if (!POINTER_TYPE_P (subtype))
4250 return NULL_TREE;
4251
4252 if (TREE_CODE (sub) == ADDR_EXPR)
4253 {
4254 tree op = TREE_OPERAND (sub, 0);
4255 tree optype = TREE_TYPE (op);
4256 /* *&p => p */
4257 if (useless_type_conversion_p (type, optype))
4258 return op;
4259
4260 /* *(foo *)&fooarray => fooarray[0] */
4261 if (TREE_CODE (optype) == ARRAY_TYPE
4262 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4263 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4264 {
4265 tree type_domain = TYPE_DOMAIN (optype);
4266 tree min_val = size_zero_node;
4267 if (type_domain && TYPE_MIN_VALUE (type_domain))
4268 min_val = TYPE_MIN_VALUE (type_domain);
4269 if (TREE_CODE (min_val) == INTEGER_CST)
4270 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4271 }
4272 /* *(foo *)&complexfoo => __real__ complexfoo */
4273 else if (TREE_CODE (optype) == COMPLEX_TYPE
4274 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4275 return fold_build1 (REALPART_EXPR, type, op);
4276 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4277 else if (TREE_CODE (optype) == VECTOR_TYPE
4278 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4279 {
4280 tree part_width = TYPE_SIZE (type);
4281 tree index = bitsize_int (0);
4282 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4283 }
4284 }
4285
4286 /* *(p + CST) -> ... */
4287 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4288 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4289 {
4290 tree addr = TREE_OPERAND (sub, 0);
4291 tree off = TREE_OPERAND (sub, 1);
4292 tree addrtype;
4293
4294 STRIP_NOPS (addr);
4295 addrtype = TREE_TYPE (addr);
4296
4297 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4298 if (TREE_CODE (addr) == ADDR_EXPR
4299 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4300 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4301 && host_integerp (off, 1))
4302 {
4303 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
4304 tree part_width = TYPE_SIZE (type);
4305 unsigned HOST_WIDE_INT part_widthi
4306 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4307 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4308 tree index = bitsize_int (indexi);
4309 if (offset / part_widthi
4310 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4311 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4312 part_width, index);
4313 }
4314
4315 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4316 if (TREE_CODE (addr) == ADDR_EXPR
4317 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4318 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4319 {
4320 tree size = TYPE_SIZE_UNIT (type);
4321 if (tree_int_cst_equal (size, off))
4322 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4323 }
4324
4325 /* *(p + CST) -> MEM_REF <p, CST>. */
4326 if (TREE_CODE (addr) != ADDR_EXPR
4327 || DECL_P (TREE_OPERAND (addr, 0)))
4328 return fold_build2 (MEM_REF, type,
4329 addr,
4330 build_int_cst_wide (ptype,
4331 TREE_INT_CST_LOW (off),
4332 TREE_INT_CST_HIGH (off)));
4333 }
4334
4335 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4336 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4337 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4338 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4339 {
4340 tree type_domain;
4341 tree min_val = size_zero_node;
4342 tree osub = sub;
4343 sub = gimple_fold_indirect_ref (sub);
4344 if (! sub)
4345 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4346 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4347 if (type_domain && TYPE_MIN_VALUE (type_domain))
4348 min_val = TYPE_MIN_VALUE (type_domain);
4349 if (TREE_CODE (min_val) == INTEGER_CST)
4350 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4351 }
4352
4353 return NULL_TREE;
4354 }
4355
4356 /* Given a pointer value OP0, return a simplified version of an
4357 indirection through OP0, or NULL_TREE if no simplification is
4358 possible. This may only be applied to a rhs of an expression.
4359 Note that the resulting type may be different from the type pointed
4360 to in the sense that it is still compatible from the langhooks
4361 point of view. */
4362
4363 static tree
4364 gimple_fold_indirect_ref_rhs (tree t)
4365 {
4366 return gimple_fold_indirect_ref (t);
4367 }
4368
4369 /* Subroutine of gimplify_modify_expr to do simplifications of
4370 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4371 something changes. */
4372
4373 static enum gimplify_status
4374 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4375 gimple_seq *pre_p, gimple_seq *post_p,
4376 bool want_value)
4377 {
4378 enum gimplify_status ret = GS_UNHANDLED;
4379 bool changed;
4380
4381 do
4382 {
4383 changed = false;
4384 switch (TREE_CODE (*from_p))
4385 {
4386 case VAR_DECL:
4387 /* If we're assigning from a read-only variable initialized with
4388 a constructor, do the direct assignment from the constructor,
4389 but only if neither source nor target are volatile since this
4390 latter assignment might end up being done on a per-field basis. */
4391 if (DECL_INITIAL (*from_p)
4392 && TREE_READONLY (*from_p)
4393 && !TREE_THIS_VOLATILE (*from_p)
4394 && !TREE_THIS_VOLATILE (*to_p)
4395 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4396 {
4397 tree old_from = *from_p;
4398 enum gimplify_status subret;
4399
4400 /* Move the constructor into the RHS. */
4401 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4402
4403 /* Let's see if gimplify_init_constructor will need to put
4404 it in memory. */
4405 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4406 false, true);
4407 if (subret == GS_ERROR)
4408 {
4409 /* If so, revert the change. */
4410 *from_p = old_from;
4411 }
4412 else
4413 {
4414 ret = GS_OK;
4415 changed = true;
4416 }
4417 }
4418 break;
4419 case INDIRECT_REF:
4420 {
4421 /* If we have code like
4422
4423 *(const A*)(A*)&x
4424
4425 where the type of "x" is a (possibly cv-qualified variant
4426 of "A"), treat the entire expression as identical to "x".
4427 This kind of code arises in C++ when an object is bound
4428 to a const reference, and if "x" is a TARGET_EXPR we want
4429 to take advantage of the optimization below. */
4430 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4431 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4432 if (t)
4433 {
4434 if (TREE_THIS_VOLATILE (t) != volatile_p)
4435 {
4436 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4437 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4438 build_fold_addr_expr (t));
4439 if (REFERENCE_CLASS_P (t))
4440 TREE_THIS_VOLATILE (t) = volatile_p;
4441 }
4442 *from_p = t;
4443 ret = GS_OK;
4444 changed = true;
4445 }
4446 break;
4447 }
4448
4449 case TARGET_EXPR:
4450 {
4451 /* If we are initializing something from a TARGET_EXPR, strip the
4452 TARGET_EXPR and initialize it directly, if possible. This can't
4453 be done if the initializer is void, since that implies that the
4454 temporary is set in some non-trivial way.
4455
4456 ??? What about code that pulls out the temp and uses it
4457 elsewhere? I think that such code never uses the TARGET_EXPR as
4458 an initializer. If I'm wrong, we'll die because the temp won't
4459 have any RTL. In that case, I guess we'll need to replace
4460 references somehow. */
4461 tree init = TARGET_EXPR_INITIAL (*from_p);
4462
4463 if (init
4464 && !VOID_TYPE_P (TREE_TYPE (init)))
4465 {
4466 *from_p = init;
4467 ret = GS_OK;
4468 changed = true;
4469 }
4470 }
4471 break;
4472
4473 case COMPOUND_EXPR:
4474 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4475 caught. */
4476 gimplify_compound_expr (from_p, pre_p, true);
4477 ret = GS_OK;
4478 changed = true;
4479 break;
4480
4481 case CONSTRUCTOR:
4482 /* If we already made some changes, let the front end have a
4483 crack at this before we break it down. */
4484 if (ret != GS_UNHANDLED)
4485 break;
4486 /* If we're initializing from a CONSTRUCTOR, break this into
4487 individual MODIFY_EXPRs. */
4488 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4489 false);
4490
4491 case COND_EXPR:
4492 /* If we're assigning to a non-register type, push the assignment
4493 down into the branches. This is mandatory for ADDRESSABLE types,
4494 since we cannot generate temporaries for such, but it saves a
4495 copy in other cases as well. */
4496 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4497 {
4498 /* This code should mirror the code in gimplify_cond_expr. */
4499 enum tree_code code = TREE_CODE (*expr_p);
4500 tree cond = *from_p;
4501 tree result = *to_p;
4502
4503 ret = gimplify_expr (&result, pre_p, post_p,
4504 is_gimple_lvalue, fb_lvalue);
4505 if (ret != GS_ERROR)
4506 ret = GS_OK;
4507
4508 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4509 TREE_OPERAND (cond, 1)
4510 = build2 (code, void_type_node, result,
4511 TREE_OPERAND (cond, 1));
4512 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4513 TREE_OPERAND (cond, 2)
4514 = build2 (code, void_type_node, unshare_expr (result),
4515 TREE_OPERAND (cond, 2));
4516
4517 TREE_TYPE (cond) = void_type_node;
4518 recalculate_side_effects (cond);
4519
4520 if (want_value)
4521 {
4522 gimplify_and_add (cond, pre_p);
4523 *expr_p = unshare_expr (result);
4524 }
4525 else
4526 *expr_p = cond;
4527 return ret;
4528 }
4529 break;
4530
4531 case CALL_EXPR:
4532 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4533 return slot so that we don't generate a temporary. */
4534 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4535 && aggregate_value_p (*from_p, *from_p))
4536 {
4537 bool use_target;
4538
4539 if (!(rhs_predicate_for (*to_p))(*from_p))
4540 /* If we need a temporary, *to_p isn't accurate. */
4541 use_target = false;
4542 /* It's OK to use the return slot directly unless it's an NRV. */
4543 else if (TREE_CODE (*to_p) == RESULT_DECL
4544 && DECL_NAME (*to_p) == NULL_TREE
4545 && needs_to_live_in_memory (*to_p))
4546 use_target = true;
4547 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4548 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4549 /* Don't force regs into memory. */
4550 use_target = false;
4551 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4552 /* It's OK to use the target directly if it's being
4553 initialized. */
4554 use_target = true;
4555 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4556 /* Always use the target and thus RSO for variable-sized types.
4557 GIMPLE cannot deal with a variable-sized assignment
4558 embedded in a call statement. */
4559 use_target = true;
4560 else if (TREE_CODE (*to_p) != SSA_NAME
4561 && (!is_gimple_variable (*to_p)
4562 || needs_to_live_in_memory (*to_p)))
4563 /* Don't use the original target if it's already addressable;
4564 if its address escapes, and the called function uses the
4565 NRV optimization, a conforming program could see *to_p
4566 change before the called function returns; see c++/19317.
4567 When optimizing, the return_slot pass marks more functions
4568 as safe after we have escape info. */
4569 use_target = false;
4570 else
4571 use_target = true;
4572
4573 if (use_target)
4574 {
4575 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4576 mark_addressable (*to_p);
4577 }
4578 }
4579 break;
4580
4581 case WITH_SIZE_EXPR:
4582 /* Likewise for calls that return an aggregate of non-constant size,
4583 since we would not be able to generate a temporary at all. */
4584 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4585 {
4586 *from_p = TREE_OPERAND (*from_p, 0);
4587 /* We don't change ret in this case because the
4588 WITH_SIZE_EXPR might have been added in
4589 gimplify_modify_expr, so returning GS_OK would lead to an
4590 infinite loop. */
4591 changed = true;
4592 }
4593 break;
4594
4595 /* If we're initializing from a container, push the initialization
4596 inside it. */
4597 case CLEANUP_POINT_EXPR:
4598 case BIND_EXPR:
4599 case STATEMENT_LIST:
4600 {
4601 tree wrap = *from_p;
4602 tree t;
4603
4604 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4605 fb_lvalue);
4606 if (ret != GS_ERROR)
4607 ret = GS_OK;
4608
4609 t = voidify_wrapper_expr (wrap, *expr_p);
4610 gcc_assert (t == *expr_p);
4611
4612 if (want_value)
4613 {
4614 gimplify_and_add (wrap, pre_p);
4615 *expr_p = unshare_expr (*to_p);
4616 }
4617 else
4618 *expr_p = wrap;
4619 return GS_OK;
4620 }
4621
4622 case COMPOUND_LITERAL_EXPR:
4623 {
4624 tree complit = TREE_OPERAND (*expr_p, 1);
4625 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4626 tree decl = DECL_EXPR_DECL (decl_s);
4627 tree init = DECL_INITIAL (decl);
4628
4629 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4630 into struct T x = { 0, 1, 2 } if the address of the
4631 compound literal has never been taken. */
4632 if (!TREE_ADDRESSABLE (complit)
4633 && !TREE_ADDRESSABLE (decl)
4634 && init)
4635 {
4636 *expr_p = copy_node (*expr_p);
4637 TREE_OPERAND (*expr_p, 1) = init;
4638 return GS_OK;
4639 }
4640 }
4641
4642 default:
4643 break;
4644 }
4645 }
4646 while (changed);
4647
4648 return ret;
4649 }
4650
4651
4652 /* Return true if T looks like a valid GIMPLE statement. */
4653
4654 static bool
4655 is_gimple_stmt (tree t)
4656 {
4657 const enum tree_code code = TREE_CODE (t);
4658
4659 switch (code)
4660 {
4661 case NOP_EXPR:
4662 /* The only valid NOP_EXPR is the empty statement. */
4663 return IS_EMPTY_STMT (t);
4664
4665 case BIND_EXPR:
4666 case COND_EXPR:
4667 /* These are only valid if they're void. */
4668 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4669
4670 case SWITCH_EXPR:
4671 case GOTO_EXPR:
4672 case RETURN_EXPR:
4673 case LABEL_EXPR:
4674 case CASE_LABEL_EXPR:
4675 case TRY_CATCH_EXPR:
4676 case TRY_FINALLY_EXPR:
4677 case EH_FILTER_EXPR:
4678 case CATCH_EXPR:
4679 case ASM_EXPR:
4680 case STATEMENT_LIST:
4681 case OMP_PARALLEL:
4682 case OMP_FOR:
4683 case OMP_SECTIONS:
4684 case OMP_SECTION:
4685 case OMP_SINGLE:
4686 case OMP_MASTER:
4687 case OMP_ORDERED:
4688 case OMP_CRITICAL:
4689 case OMP_TASK:
4690 /* These are always void. */
4691 return true;
4692
4693 case CALL_EXPR:
4694 case MODIFY_EXPR:
4695 case PREDICT_EXPR:
4696 /* These are valid regardless of their type. */
4697 return true;
4698
4699 default:
4700 return false;
4701 }
4702 }
4703
4704
4705 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4706 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4707 DECL_GIMPLE_REG_P set.
4708
4709 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4710 other, unmodified part of the complex object just before the total store.
4711 As a consequence, if the object is still uninitialized, an undefined value
4712 will be loaded into a register, which may result in a spurious exception
4713 if the register is floating-point and the value happens to be a signaling
4714 NaN for example. Then the fully-fledged complex operations lowering pass
4715 followed by a DCE pass are necessary in order to fix things up. */
4716
4717 static enum gimplify_status
4718 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4719 bool want_value)
4720 {
4721 enum tree_code code, ocode;
4722 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4723
4724 lhs = TREE_OPERAND (*expr_p, 0);
4725 rhs = TREE_OPERAND (*expr_p, 1);
4726 code = TREE_CODE (lhs);
4727 lhs = TREE_OPERAND (lhs, 0);
4728
4729 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4730 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4731 TREE_NO_WARNING (other) = 1;
4732 other = get_formal_tmp_var (other, pre_p);
4733
4734 realpart = code == REALPART_EXPR ? rhs : other;
4735 imagpart = code == REALPART_EXPR ? other : rhs;
4736
4737 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4738 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4739 else
4740 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4741
4742 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4743 *expr_p = (want_value) ? rhs : NULL_TREE;
4744
4745 return GS_ALL_DONE;
4746 }
4747
4748 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4749
4750 modify_expr
4751 : varname '=' rhs
4752 | '*' ID '=' rhs
4753
4754 PRE_P points to the list where side effects that must happen before
4755 *EXPR_P should be stored.
4756
4757 POST_P points to the list where side effects that must happen after
4758 *EXPR_P should be stored.
4759
4760 WANT_VALUE is nonzero iff we want to use the value of this expression
4761 in another expression. */
4762
4763 static enum gimplify_status
4764 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4765 bool want_value)
4766 {
4767 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4768 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4769 enum gimplify_status ret = GS_UNHANDLED;
4770 gimple assign;
4771 location_t loc = EXPR_LOCATION (*expr_p);
4772
4773 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4774 || TREE_CODE (*expr_p) == INIT_EXPR);
4775
4776 /* Trying to simplify a clobber using normal logic doesn't work,
4777 so handle it here. */
4778 if (TREE_CLOBBER_P (*from_p))
4779 {
4780 gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
4781 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4782 *expr_p = NULL;
4783 return GS_ALL_DONE;
4784 }
4785
4786 /* Insert pointer conversions required by the middle-end that are not
4787 required by the frontend. This fixes middle-end type checking for
4788 for example gcc.dg/redecl-6.c. */
4789 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4790 {
4791 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4792 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4793 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4794 }
4795
4796 /* See if any simplifications can be done based on what the RHS is. */
4797 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4798 want_value);
4799 if (ret != GS_UNHANDLED)
4800 return ret;
4801
4802 /* For zero sized types only gimplify the left hand side and right hand
4803 side as statements and throw away the assignment. Do this after
4804 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4805 types properly. */
4806 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4807 {
4808 gimplify_stmt (from_p, pre_p);
4809 gimplify_stmt (to_p, pre_p);
4810 *expr_p = NULL_TREE;
4811 return GS_ALL_DONE;
4812 }
4813
4814 /* If the value being copied is of variable width, compute the length
4815 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4816 before gimplifying any of the operands so that we can resolve any
4817 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4818 the size of the expression to be copied, not of the destination, so
4819 that is what we must do here. */
4820 maybe_with_size_expr (from_p);
4821
4822 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4823 if (ret == GS_ERROR)
4824 return ret;
4825
4826 /* As a special case, we have to temporarily allow for assignments
4827 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4828 a toplevel statement, when gimplifying the GENERIC expression
4829 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4830 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4831
4832 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4833 prevent gimplify_expr from trying to create a new temporary for
4834 foo's LHS, we tell it that it should only gimplify until it
4835 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4836 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4837 and all we need to do here is set 'a' to be its LHS. */
4838 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4839 fb_rvalue);
4840 if (ret == GS_ERROR)
4841 return ret;
4842
4843 /* Now see if the above changed *from_p to something we handle specially. */
4844 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4845 want_value);
4846 if (ret != GS_UNHANDLED)
4847 return ret;
4848
4849 /* If we've got a variable sized assignment between two lvalues (i.e. does
4850 not involve a call), then we can make things a bit more straightforward
4851 by converting the assignment to memcpy or memset. */
4852 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4853 {
4854 tree from = TREE_OPERAND (*from_p, 0);
4855 tree size = TREE_OPERAND (*from_p, 1);
4856
4857 if (TREE_CODE (from) == CONSTRUCTOR)
4858 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4859
4860 if (is_gimple_addressable (from))
4861 {
4862 *from_p = from;
4863 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4864 pre_p);
4865 }
4866 }
4867
4868 /* Transform partial stores to non-addressable complex variables into
4869 total stores. This allows us to use real instead of virtual operands
4870 for these variables, which improves optimization. */
4871 if ((TREE_CODE (*to_p) == REALPART_EXPR
4872 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4873 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4874 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4875
4876 /* Try to alleviate the effects of the gimplification creating artificial
4877 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4878 if (!gimplify_ctxp->into_ssa
4879 && TREE_CODE (*from_p) == VAR_DECL
4880 && DECL_IGNORED_P (*from_p)
4881 && DECL_P (*to_p)
4882 && !DECL_IGNORED_P (*to_p))
4883 {
4884 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4885 DECL_NAME (*from_p)
4886 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4887 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4888 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4889 }
4890
4891 if (want_value && TREE_THIS_VOLATILE (*to_p))
4892 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4893
4894 if (TREE_CODE (*from_p) == CALL_EXPR)
4895 {
4896 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4897 instead of a GIMPLE_ASSIGN. */
4898 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4899 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4900 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4901 assign = gimple_build_call_from_tree (*from_p);
4902 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4903 if (!gimple_call_noreturn_p (assign))
4904 gimple_call_set_lhs (assign, *to_p);
4905 }
4906 else
4907 {
4908 assign = gimple_build_assign (*to_p, *from_p);
4909 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4910 }
4911
4912 gimplify_seq_add_stmt (pre_p, assign);
4913
4914 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4915 {
4916 /* If we've somehow already got an SSA_NAME on the LHS, then
4917 we've probably modified it twice. Not good. */
4918 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4919 *to_p = make_ssa_name (*to_p, assign);
4920 gimple_set_lhs (assign, *to_p);
4921 }
4922
4923 if (want_value)
4924 {
4925 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4926 return GS_OK;
4927 }
4928 else
4929 *expr_p = NULL;
4930
4931 return GS_ALL_DONE;
4932 }
4933
4934 /* Gimplify a comparison between two variable-sized objects. Do this
4935 with a call to BUILT_IN_MEMCMP. */
4936
4937 static enum gimplify_status
4938 gimplify_variable_sized_compare (tree *expr_p)
4939 {
4940 location_t loc = EXPR_LOCATION (*expr_p);
4941 tree op0 = TREE_OPERAND (*expr_p, 0);
4942 tree op1 = TREE_OPERAND (*expr_p, 1);
4943 tree t, arg, dest, src, expr;
4944
4945 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4946 arg = unshare_expr (arg);
4947 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4948 src = build_fold_addr_expr_loc (loc, op1);
4949 dest = build_fold_addr_expr_loc (loc, op0);
4950 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4951 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4952
4953 expr
4954 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4955 SET_EXPR_LOCATION (expr, loc);
4956 *expr_p = expr;
4957
4958 return GS_OK;
4959 }
4960
4961 /* Gimplify a comparison between two aggregate objects of integral scalar
4962 mode as a comparison between the bitwise equivalent scalar values. */
4963
4964 static enum gimplify_status
4965 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4966 {
4967 location_t loc = EXPR_LOCATION (*expr_p);
4968 tree op0 = TREE_OPERAND (*expr_p, 0);
4969 tree op1 = TREE_OPERAND (*expr_p, 1);
4970
4971 tree type = TREE_TYPE (op0);
4972 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4973
4974 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4975 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4976
4977 *expr_p
4978 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4979
4980 return GS_OK;
4981 }
4982
4983 /* Gimplify an expression sequence. This function gimplifies each
4984 expression and rewrites the original expression with the last
4985 expression of the sequence in GIMPLE form.
4986
4987 PRE_P points to the list where the side effects for all the
4988 expressions in the sequence will be emitted.
4989
4990 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4991
4992 static enum gimplify_status
4993 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4994 {
4995 tree t = *expr_p;
4996
4997 do
4998 {
4999 tree *sub_p = &TREE_OPERAND (t, 0);
5000
5001 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5002 gimplify_compound_expr (sub_p, pre_p, false);
5003 else
5004 gimplify_stmt (sub_p, pre_p);
5005
5006 t = TREE_OPERAND (t, 1);
5007 }
5008 while (TREE_CODE (t) == COMPOUND_EXPR);
5009
5010 *expr_p = t;
5011 if (want_value)
5012 return GS_OK;
5013 else
5014 {
5015 gimplify_stmt (expr_p, pre_p);
5016 return GS_ALL_DONE;
5017 }
5018 }
5019
5020 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5021 gimplify. After gimplification, EXPR_P will point to a new temporary
5022 that holds the original value of the SAVE_EXPR node.
5023
5024 PRE_P points to the list where side effects that must happen before
5025 *EXPR_P should be stored. */
5026
5027 static enum gimplify_status
5028 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5029 {
5030 enum gimplify_status ret = GS_ALL_DONE;
5031 tree val;
5032
5033 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5034 val = TREE_OPERAND (*expr_p, 0);
5035
5036 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5037 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5038 {
5039 /* The operand may be a void-valued expression such as SAVE_EXPRs
5040 generated by the Java frontend for class initialization. It is
5041 being executed only for its side-effects. */
5042 if (TREE_TYPE (val) == void_type_node)
5043 {
5044 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5045 is_gimple_stmt, fb_none);
5046 val = NULL;
5047 }
5048 else
5049 val = get_initialized_tmp_var (val, pre_p, post_p);
5050
5051 TREE_OPERAND (*expr_p, 0) = val;
5052 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5053 }
5054
5055 *expr_p = val;
5056
5057 return ret;
5058 }
5059
5060 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5061
5062 unary_expr
5063 : ...
5064 | '&' varname
5065 ...
5066
5067 PRE_P points to the list where side effects that must happen before
5068 *EXPR_P should be stored.
5069
5070 POST_P points to the list where side effects that must happen after
5071 *EXPR_P should be stored. */
5072
5073 static enum gimplify_status
5074 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5075 {
5076 tree expr = *expr_p;
5077 tree op0 = TREE_OPERAND (expr, 0);
5078 enum gimplify_status ret;
5079 location_t loc = EXPR_LOCATION (*expr_p);
5080
5081 switch (TREE_CODE (op0))
5082 {
5083 case INDIRECT_REF:
5084 do_indirect_ref:
5085 /* Check if we are dealing with an expression of the form '&*ptr'.
5086 While the front end folds away '&*ptr' into 'ptr', these
5087 expressions may be generated internally by the compiler (e.g.,
5088 builtins like __builtin_va_end). */
5089 /* Caution: the silent array decomposition semantics we allow for
5090 ADDR_EXPR means we can't always discard the pair. */
5091 /* Gimplification of the ADDR_EXPR operand may drop
5092 cv-qualification conversions, so make sure we add them if
5093 needed. */
5094 {
5095 tree op00 = TREE_OPERAND (op0, 0);
5096 tree t_expr = TREE_TYPE (expr);
5097 tree t_op00 = TREE_TYPE (op00);
5098
5099 if (!useless_type_conversion_p (t_expr, t_op00))
5100 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5101 *expr_p = op00;
5102 ret = GS_OK;
5103 }
5104 break;
5105
5106 case VIEW_CONVERT_EXPR:
5107 /* Take the address of our operand and then convert it to the type of
5108 this ADDR_EXPR.
5109
5110 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5111 all clear. The impact of this transformation is even less clear. */
5112
5113 /* If the operand is a useless conversion, look through it. Doing so
5114 guarantees that the ADDR_EXPR and its operand will remain of the
5115 same type. */
5116 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5117 op0 = TREE_OPERAND (op0, 0);
5118
5119 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5120 build_fold_addr_expr_loc (loc,
5121 TREE_OPERAND (op0, 0)));
5122 ret = GS_OK;
5123 break;
5124
5125 default:
5126 /* We use fb_either here because the C frontend sometimes takes
5127 the address of a call that returns a struct; see
5128 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5129 the implied temporary explicit. */
5130
5131 /* Make the operand addressable. */
5132 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5133 is_gimple_addressable, fb_either);
5134 if (ret == GS_ERROR)
5135 break;
5136
5137 /* Then mark it. Beware that it may not be possible to do so directly
5138 if a temporary has been created by the gimplification. */
5139 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5140
5141 op0 = TREE_OPERAND (expr, 0);
5142
5143 /* For various reasons, the gimplification of the expression
5144 may have made a new INDIRECT_REF. */
5145 if (TREE_CODE (op0) == INDIRECT_REF)
5146 goto do_indirect_ref;
5147
5148 mark_addressable (TREE_OPERAND (expr, 0));
5149
5150 /* The FEs may end up building ADDR_EXPRs early on a decl with
5151 an incomplete type. Re-build ADDR_EXPRs in canonical form
5152 here. */
5153 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5154 *expr_p = build_fold_addr_expr (op0);
5155
5156 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5157 recompute_tree_invariant_for_addr_expr (*expr_p);
5158
5159 /* If we re-built the ADDR_EXPR add a conversion to the original type
5160 if required. */
5161 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5162 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5163
5164 break;
5165 }
5166
5167 return ret;
5168 }
5169
5170 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5171 value; output operands should be a gimple lvalue. */
5172
5173 static enum gimplify_status
5174 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5175 {
5176 tree expr;
5177 int noutputs;
5178 const char **oconstraints;
5179 int i;
5180 tree link;
5181 const char *constraint;
5182 bool allows_mem, allows_reg, is_inout;
5183 enum gimplify_status ret, tret;
5184 gimple stmt;
5185 VEC(tree, gc) *inputs;
5186 VEC(tree, gc) *outputs;
5187 VEC(tree, gc) *clobbers;
5188 VEC(tree, gc) *labels;
5189 tree link_next;
5190
5191 expr = *expr_p;
5192 noutputs = list_length (ASM_OUTPUTS (expr));
5193 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5194
5195 inputs = outputs = clobbers = labels = NULL;
5196
5197 ret = GS_ALL_DONE;
5198 link_next = NULL_TREE;
5199 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5200 {
5201 bool ok;
5202 size_t constraint_len;
5203
5204 link_next = TREE_CHAIN (link);
5205
5206 oconstraints[i]
5207 = constraint
5208 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5209 constraint_len = strlen (constraint);
5210 if (constraint_len == 0)
5211 continue;
5212
5213 ok = parse_output_constraint (&constraint, i, 0, 0,
5214 &allows_mem, &allows_reg, &is_inout);
5215 if (!ok)
5216 {
5217 ret = GS_ERROR;
5218 is_inout = false;
5219 }
5220
5221 if (!allows_reg && allows_mem)
5222 mark_addressable (TREE_VALUE (link));
5223
5224 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5225 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5226 fb_lvalue | fb_mayfail);
5227 if (tret == GS_ERROR)
5228 {
5229 error ("invalid lvalue in asm output %d", i);
5230 ret = tret;
5231 }
5232
5233 VEC_safe_push (tree, gc, outputs, link);
5234 TREE_CHAIN (link) = NULL_TREE;
5235
5236 if (is_inout)
5237 {
5238 /* An input/output operand. To give the optimizers more
5239 flexibility, split it into separate input and output
5240 operands. */
5241 tree input;
5242 char buf[10];
5243
5244 /* Turn the in/out constraint into an output constraint. */
5245 char *p = xstrdup (constraint);
5246 p[0] = '=';
5247 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5248
5249 /* And add a matching input constraint. */
5250 if (allows_reg)
5251 {
5252 sprintf (buf, "%d", i);
5253
5254 /* If there are multiple alternatives in the constraint,
5255 handle each of them individually. Those that allow register
5256 will be replaced with operand number, the others will stay
5257 unchanged. */
5258 if (strchr (p, ',') != NULL)
5259 {
5260 size_t len = 0, buflen = strlen (buf);
5261 char *beg, *end, *str, *dst;
5262
5263 for (beg = p + 1;;)
5264 {
5265 end = strchr (beg, ',');
5266 if (end == NULL)
5267 end = strchr (beg, '\0');
5268 if ((size_t) (end - beg) < buflen)
5269 len += buflen + 1;
5270 else
5271 len += end - beg + 1;
5272 if (*end)
5273 beg = end + 1;
5274 else
5275 break;
5276 }
5277
5278 str = (char *) alloca (len);
5279 for (beg = p + 1, dst = str;;)
5280 {
5281 const char *tem;
5282 bool mem_p, reg_p, inout_p;
5283
5284 end = strchr (beg, ',');
5285 if (end)
5286 *end = '\0';
5287 beg[-1] = '=';
5288 tem = beg - 1;
5289 parse_output_constraint (&tem, i, 0, 0,
5290 &mem_p, &reg_p, &inout_p);
5291 if (dst != str)
5292 *dst++ = ',';
5293 if (reg_p)
5294 {
5295 memcpy (dst, buf, buflen);
5296 dst += buflen;
5297 }
5298 else
5299 {
5300 if (end)
5301 len = end - beg;
5302 else
5303 len = strlen (beg);
5304 memcpy (dst, beg, len);
5305 dst += len;
5306 }
5307 if (end)
5308 beg = end + 1;
5309 else
5310 break;
5311 }
5312 *dst = '\0';
5313 input = build_string (dst - str, str);
5314 }
5315 else
5316 input = build_string (strlen (buf), buf);
5317 }
5318 else
5319 input = build_string (constraint_len - 1, constraint + 1);
5320
5321 free (p);
5322
5323 input = build_tree_list (build_tree_list (NULL_TREE, input),
5324 unshare_expr (TREE_VALUE (link)));
5325 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5326 }
5327 }
5328
5329 link_next = NULL_TREE;
5330 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5331 {
5332 link_next = TREE_CHAIN (link);
5333 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5334 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5335 oconstraints, &allows_mem, &allows_reg);
5336
5337 /* If we can't make copies, we can only accept memory. */
5338 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5339 {
5340 if (allows_mem)
5341 allows_reg = 0;
5342 else
5343 {
5344 error ("impossible constraint in %<asm%>");
5345 error ("non-memory input %d must stay in memory", i);
5346 return GS_ERROR;
5347 }
5348 }
5349
5350 /* If the operand is a memory input, it should be an lvalue. */
5351 if (!allows_reg && allows_mem)
5352 {
5353 tree inputv = TREE_VALUE (link);
5354 STRIP_NOPS (inputv);
5355 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5356 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5357 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5358 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5359 TREE_VALUE (link) = error_mark_node;
5360 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5361 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5362 mark_addressable (TREE_VALUE (link));
5363 if (tret == GS_ERROR)
5364 {
5365 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5366 input_location = EXPR_LOCATION (TREE_VALUE (link));
5367 error ("memory input %d is not directly addressable", i);
5368 ret = tret;
5369 }
5370 }
5371 else
5372 {
5373 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5374 is_gimple_asm_val, fb_rvalue);
5375 if (tret == GS_ERROR)
5376 ret = tret;
5377 }
5378
5379 TREE_CHAIN (link) = NULL_TREE;
5380 VEC_safe_push (tree, gc, inputs, link);
5381 }
5382
5383 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5384 VEC_safe_push (tree, gc, clobbers, link);
5385
5386 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5387 VEC_safe_push (tree, gc, labels, link);
5388
5389 /* Do not add ASMs with errors to the gimple IL stream. */
5390 if (ret != GS_ERROR)
5391 {
5392 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5393 inputs, outputs, clobbers, labels);
5394
5395 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5396 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5397
5398 gimplify_seq_add_stmt (pre_p, stmt);
5399 }
5400
5401 return ret;
5402 }
5403
5404 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5405 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5406 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5407 return to this function.
5408
5409 FIXME should we complexify the prequeue handling instead? Or use flags
5410 for all the cleanups and let the optimizer tighten them up? The current
5411 code seems pretty fragile; it will break on a cleanup within any
5412 non-conditional nesting. But any such nesting would be broken, anyway;
5413 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5414 and continues out of it. We can do that at the RTL level, though, so
5415 having an optimizer to tighten up try/finally regions would be a Good
5416 Thing. */
5417
5418 static enum gimplify_status
5419 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5420 {
5421 gimple_stmt_iterator iter;
5422 gimple_seq body_sequence = NULL;
5423
5424 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5425
5426 /* We only care about the number of conditions between the innermost
5427 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5428 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5429 int old_conds = gimplify_ctxp->conditions;
5430 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5431 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5432 gimplify_ctxp->conditions = 0;
5433 gimplify_ctxp->conditional_cleanups = NULL;
5434 gimplify_ctxp->in_cleanup_point_expr = true;
5435
5436 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5437
5438 gimplify_ctxp->conditions = old_conds;
5439 gimplify_ctxp->conditional_cleanups = old_cleanups;
5440 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5441
5442 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5443 {
5444 gimple wce = gsi_stmt (iter);
5445
5446 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5447 {
5448 if (gsi_one_before_end_p (iter))
5449 {
5450 /* Note that gsi_insert_seq_before and gsi_remove do not
5451 scan operands, unlike some other sequence mutators. */
5452 if (!gimple_wce_cleanup_eh_only (wce))
5453 gsi_insert_seq_before_without_update (&iter,
5454 gimple_wce_cleanup (wce),
5455 GSI_SAME_STMT);
5456 gsi_remove (&iter, true);
5457 break;
5458 }
5459 else
5460 {
5461 gimple gtry;
5462 gimple_seq seq;
5463 enum gimple_try_flags kind;
5464
5465 if (gimple_wce_cleanup_eh_only (wce))
5466 kind = GIMPLE_TRY_CATCH;
5467 else
5468 kind = GIMPLE_TRY_FINALLY;
5469 seq = gsi_split_seq_after (iter);
5470
5471 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5472 /* Do not use gsi_replace here, as it may scan operands.
5473 We want to do a simple structural modification only. */
5474 gsi_set_stmt (&iter, gtry);
5475 iter = gsi_start (gtry->gimple_try.eval);
5476 }
5477 }
5478 else
5479 gsi_next (&iter);
5480 }
5481
5482 gimplify_seq_add_seq (pre_p, body_sequence);
5483 if (temp)
5484 {
5485 *expr_p = temp;
5486 return GS_OK;
5487 }
5488 else
5489 {
5490 *expr_p = NULL;
5491 return GS_ALL_DONE;
5492 }
5493 }
5494
5495 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5496 is the cleanup action required. EH_ONLY is true if the cleanup should
5497 only be executed if an exception is thrown, not on normal exit. */
5498
5499 static void
5500 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5501 {
5502 gimple wce;
5503 gimple_seq cleanup_stmts = NULL;
5504
5505 /* Errors can result in improperly nested cleanups. Which results in
5506 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5507 if (seen_error ())
5508 return;
5509
5510 if (gimple_conditional_context ())
5511 {
5512 /* If we're in a conditional context, this is more complex. We only
5513 want to run the cleanup if we actually ran the initialization that
5514 necessitates it, but we want to run it after the end of the
5515 conditional context. So we wrap the try/finally around the
5516 condition and use a flag to determine whether or not to actually
5517 run the destructor. Thus
5518
5519 test ? f(A()) : 0
5520
5521 becomes (approximately)
5522
5523 flag = 0;
5524 try {
5525 if (test) { A::A(temp); flag = 1; val = f(temp); }
5526 else { val = 0; }
5527 } finally {
5528 if (flag) A::~A(temp);
5529 }
5530 val
5531 */
5532 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5533 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5534 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5535
5536 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5537 gimplify_stmt (&cleanup, &cleanup_stmts);
5538 wce = gimple_build_wce (cleanup_stmts);
5539
5540 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5541 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5542 gimplify_seq_add_stmt (pre_p, ftrue);
5543
5544 /* Because of this manipulation, and the EH edges that jump
5545 threading cannot redirect, the temporary (VAR) will appear
5546 to be used uninitialized. Don't warn. */
5547 TREE_NO_WARNING (var) = 1;
5548 }
5549 else
5550 {
5551 gimplify_stmt (&cleanup, &cleanup_stmts);
5552 wce = gimple_build_wce (cleanup_stmts);
5553 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5554 gimplify_seq_add_stmt (pre_p, wce);
5555 }
5556 }
5557
5558 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5559
5560 static enum gimplify_status
5561 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5562 {
5563 tree targ = *expr_p;
5564 tree temp = TARGET_EXPR_SLOT (targ);
5565 tree init = TARGET_EXPR_INITIAL (targ);
5566 enum gimplify_status ret;
5567
5568 if (init)
5569 {
5570 tree cleanup = NULL_TREE;
5571
5572 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5573 to the temps list. Handle also variable length TARGET_EXPRs. */
5574 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5575 {
5576 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5577 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5578 gimplify_vla_decl (temp, pre_p);
5579 }
5580 else
5581 gimple_add_tmp_var (temp);
5582
5583 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5584 expression is supposed to initialize the slot. */
5585 if (VOID_TYPE_P (TREE_TYPE (init)))
5586 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5587 else
5588 {
5589 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5590 init = init_expr;
5591 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5592 init = NULL;
5593 ggc_free (init_expr);
5594 }
5595 if (ret == GS_ERROR)
5596 {
5597 /* PR c++/28266 Make sure this is expanded only once. */
5598 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5599 return GS_ERROR;
5600 }
5601 if (init)
5602 gimplify_and_add (init, pre_p);
5603
5604 /* If needed, push the cleanup for the temp. */
5605 if (TARGET_EXPR_CLEANUP (targ))
5606 {
5607 if (CLEANUP_EH_ONLY (targ))
5608 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5609 CLEANUP_EH_ONLY (targ), pre_p);
5610 else
5611 cleanup = TARGET_EXPR_CLEANUP (targ);
5612 }
5613
5614 /* Add a clobber for the temporary going out of scope, like
5615 gimplify_bind_expr. */
5616 if (gimplify_ctxp->in_cleanup_point_expr
5617 && needs_to_live_in_memory (temp))
5618 {
5619 tree clobber = build_constructor (TREE_TYPE (temp), NULL);
5620 TREE_THIS_VOLATILE (clobber) = true;
5621 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5622 if (cleanup)
5623 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5624 clobber);
5625 else
5626 cleanup = clobber;
5627 }
5628
5629 if (cleanup)
5630 gimple_push_cleanup (temp, cleanup, false, pre_p);
5631
5632 /* Only expand this once. */
5633 TREE_OPERAND (targ, 3) = init;
5634 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5635 }
5636 else
5637 /* We should have expanded this before. */
5638 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5639
5640 *expr_p = temp;
5641 return GS_OK;
5642 }
5643
5644 /* Gimplification of expression trees. */
5645
5646 /* Gimplify an expression which appears at statement context. The
5647 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5648 NULL, a new sequence is allocated.
5649
5650 Return true if we actually added a statement to the queue. */
5651
5652 bool
5653 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5654 {
5655 gimple_seq_node last;
5656
5657 last = gimple_seq_last (*seq_p);
5658 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5659 return last != gimple_seq_last (*seq_p);
5660 }
5661
5662 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5663 to CTX. If entries already exist, force them to be some flavor of private.
5664 If there is no enclosing parallel, do nothing. */
5665
5666 void
5667 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5668 {
5669 splay_tree_node n;
5670
5671 if (decl == NULL || !DECL_P (decl))
5672 return;
5673
5674 do
5675 {
5676 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5677 if (n != NULL)
5678 {
5679 if (n->value & GOVD_SHARED)
5680 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5681 else
5682 return;
5683 }
5684 else if (ctx->region_type != ORT_WORKSHARE)
5685 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5686
5687 ctx = ctx->outer_context;
5688 }
5689 while (ctx);
5690 }
5691
5692 /* Similarly for each of the type sizes of TYPE. */
5693
5694 static void
5695 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5696 {
5697 if (type == NULL || type == error_mark_node)
5698 return;
5699 type = TYPE_MAIN_VARIANT (type);
5700
5701 if (pointer_set_insert (ctx->privatized_types, type))
5702 return;
5703
5704 switch (TREE_CODE (type))
5705 {
5706 case INTEGER_TYPE:
5707 case ENUMERAL_TYPE:
5708 case BOOLEAN_TYPE:
5709 case REAL_TYPE:
5710 case FIXED_POINT_TYPE:
5711 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5712 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5713 break;
5714
5715 case ARRAY_TYPE:
5716 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5717 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5718 break;
5719
5720 case RECORD_TYPE:
5721 case UNION_TYPE:
5722 case QUAL_UNION_TYPE:
5723 {
5724 tree field;
5725 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5726 if (TREE_CODE (field) == FIELD_DECL)
5727 {
5728 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5729 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5730 }
5731 }
5732 break;
5733
5734 case POINTER_TYPE:
5735 case REFERENCE_TYPE:
5736 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5737 break;
5738
5739 default:
5740 break;
5741 }
5742
5743 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5744 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5745 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5746 }
5747
5748 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5749
5750 static void
5751 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5752 {
5753 splay_tree_node n;
5754 unsigned int nflags;
5755 tree t;
5756
5757 if (error_operand_p (decl))
5758 return;
5759
5760 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5761 there are constructors involved somewhere. */
5762 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5763 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5764 flags |= GOVD_SEEN;
5765
5766 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5767 if (n != NULL)
5768 {
5769 /* We shouldn't be re-adding the decl with the same data
5770 sharing class. */
5771 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5772 /* The only combination of data sharing classes we should see is
5773 FIRSTPRIVATE and LASTPRIVATE. */
5774 nflags = n->value | flags;
5775 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5776 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5777 n->value = nflags;
5778 return;
5779 }
5780
5781 /* When adding a variable-sized variable, we have to handle all sorts
5782 of additional bits of data: the pointer replacement variable, and
5783 the parameters of the type. */
5784 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5785 {
5786 /* Add the pointer replacement variable as PRIVATE if the variable
5787 replacement is private, else FIRSTPRIVATE since we'll need the
5788 address of the original variable either for SHARED, or for the
5789 copy into or out of the context. */
5790 if (!(flags & GOVD_LOCAL))
5791 {
5792 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5793 nflags |= flags & GOVD_SEEN;
5794 t = DECL_VALUE_EXPR (decl);
5795 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5796 t = TREE_OPERAND (t, 0);
5797 gcc_assert (DECL_P (t));
5798 omp_add_variable (ctx, t, nflags);
5799 }
5800
5801 /* Add all of the variable and type parameters (which should have
5802 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5803 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5804 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5805 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5806
5807 /* The variable-sized variable itself is never SHARED, only some form
5808 of PRIVATE. The sharing would take place via the pointer variable
5809 which we remapped above. */
5810 if (flags & GOVD_SHARED)
5811 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5812 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5813
5814 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5815 alloca statement we generate for the variable, so make sure it
5816 is available. This isn't automatically needed for the SHARED
5817 case, since we won't be allocating local storage then.
5818 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5819 in this case omp_notice_variable will be called later
5820 on when it is gimplified. */
5821 else if (! (flags & GOVD_LOCAL)
5822 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5823 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5824 }
5825 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5826 {
5827 gcc_assert ((flags & GOVD_LOCAL) == 0);
5828 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5829
5830 /* Similar to the direct variable sized case above, we'll need the
5831 size of references being privatized. */
5832 if ((flags & GOVD_SHARED) == 0)
5833 {
5834 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5835 if (TREE_CODE (t) != INTEGER_CST)
5836 omp_notice_variable (ctx, t, true);
5837 }
5838 }
5839
5840 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5841 }
5842
5843 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5844 This just prints out diagnostics about threadprivate variable uses
5845 in untied tasks. If DECL2 is non-NULL, prevent this warning
5846 on that variable. */
5847
5848 static bool
5849 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5850 tree decl2)
5851 {
5852 splay_tree_node n;
5853
5854 if (ctx->region_type != ORT_UNTIED_TASK)
5855 return false;
5856 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5857 if (n == NULL)
5858 {
5859 error ("threadprivate variable %qE used in untied task",
5860 DECL_NAME (decl));
5861 error_at (ctx->location, "enclosing task");
5862 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5863 }
5864 if (decl2)
5865 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5866 return false;
5867 }
5868
5869 /* Record the fact that DECL was used within the OpenMP context CTX.
5870 IN_CODE is true when real code uses DECL, and false when we should
5871 merely emit default(none) errors. Return true if DECL is going to
5872 be remapped and thus DECL shouldn't be gimplified into its
5873 DECL_VALUE_EXPR (if any). */
5874
5875 static bool
5876 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5877 {
5878 splay_tree_node n;
5879 unsigned flags = in_code ? GOVD_SEEN : 0;
5880 bool ret = false, shared;
5881
5882 if (error_operand_p (decl))
5883 return false;
5884
5885 /* Threadprivate variables are predetermined. */
5886 if (is_global_var (decl))
5887 {
5888 if (DECL_THREAD_LOCAL_P (decl))
5889 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5890
5891 if (DECL_HAS_VALUE_EXPR_P (decl))
5892 {
5893 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5894
5895 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5896 return omp_notice_threadprivate_variable (ctx, decl, value);
5897 }
5898 }
5899
5900 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5901 if (n == NULL)
5902 {
5903 enum omp_clause_default_kind default_kind, kind;
5904 struct gimplify_omp_ctx *octx;
5905
5906 if (ctx->region_type == ORT_WORKSHARE)
5907 goto do_outer;
5908
5909 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5910 remapped firstprivate instead of shared. To some extent this is
5911 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5912 default_kind = ctx->default_kind;
5913 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5914 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5915 default_kind = kind;
5916
5917 switch (default_kind)
5918 {
5919 case OMP_CLAUSE_DEFAULT_NONE:
5920 error ("%qE not specified in enclosing parallel",
5921 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5922 if ((ctx->region_type & ORT_TASK) != 0)
5923 error_at (ctx->location, "enclosing task");
5924 else
5925 error_at (ctx->location, "enclosing parallel");
5926 /* FALLTHRU */
5927 case OMP_CLAUSE_DEFAULT_SHARED:
5928 flags |= GOVD_SHARED;
5929 break;
5930 case OMP_CLAUSE_DEFAULT_PRIVATE:
5931 flags |= GOVD_PRIVATE;
5932 break;
5933 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5934 flags |= GOVD_FIRSTPRIVATE;
5935 break;
5936 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5937 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5938 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5939 if (ctx->outer_context)
5940 omp_notice_variable (ctx->outer_context, decl, in_code);
5941 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5942 {
5943 splay_tree_node n2;
5944
5945 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5946 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5947 {
5948 flags |= GOVD_FIRSTPRIVATE;
5949 break;
5950 }
5951 if ((octx->region_type & ORT_PARALLEL) != 0)
5952 break;
5953 }
5954 if (flags & GOVD_FIRSTPRIVATE)
5955 break;
5956 if (octx == NULL
5957 && (TREE_CODE (decl) == PARM_DECL
5958 || (!is_global_var (decl)
5959 && DECL_CONTEXT (decl) == current_function_decl)))
5960 {
5961 flags |= GOVD_FIRSTPRIVATE;
5962 break;
5963 }
5964 flags |= GOVD_SHARED;
5965 break;
5966 default:
5967 gcc_unreachable ();
5968 }
5969
5970 if ((flags & GOVD_PRIVATE)
5971 && lang_hooks.decls.omp_private_outer_ref (decl))
5972 flags |= GOVD_PRIVATE_OUTER_REF;
5973
5974 omp_add_variable (ctx, decl, flags);
5975
5976 shared = (flags & GOVD_SHARED) != 0;
5977 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5978 goto do_outer;
5979 }
5980
5981 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5982 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5983 && DECL_SIZE (decl)
5984 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5985 {
5986 splay_tree_node n2;
5987 tree t = DECL_VALUE_EXPR (decl);
5988 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5989 t = TREE_OPERAND (t, 0);
5990 gcc_assert (DECL_P (t));
5991 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5992 n2->value |= GOVD_SEEN;
5993 }
5994
5995 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5996 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5997
5998 /* If nothing changed, there's nothing left to do. */
5999 if ((n->value & flags) == flags)
6000 return ret;
6001 flags |= n->value;
6002 n->value = flags;
6003
6004 do_outer:
6005 /* If the variable is private in the current context, then we don't
6006 need to propagate anything to an outer context. */
6007 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6008 return ret;
6009 if (ctx->outer_context
6010 && omp_notice_variable (ctx->outer_context, decl, in_code))
6011 return true;
6012 return ret;
6013 }
6014
6015 /* Verify that DECL is private within CTX. If there's specific information
6016 to the contrary in the innermost scope, generate an error. */
6017
6018 static bool
6019 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
6020 {
6021 splay_tree_node n;
6022
6023 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6024 if (n != NULL)
6025 {
6026 if (n->value & GOVD_SHARED)
6027 {
6028 if (ctx == gimplify_omp_ctxp)
6029 {
6030 error ("iteration variable %qE should be private",
6031 DECL_NAME (decl));
6032 n->value = GOVD_PRIVATE;
6033 return true;
6034 }
6035 else
6036 return false;
6037 }
6038 else if ((n->value & GOVD_EXPLICIT) != 0
6039 && (ctx == gimplify_omp_ctxp
6040 || (ctx->region_type == ORT_COMBINED_PARALLEL
6041 && gimplify_omp_ctxp->outer_context == ctx)))
6042 {
6043 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6044 error ("iteration variable %qE should not be firstprivate",
6045 DECL_NAME (decl));
6046 else if ((n->value & GOVD_REDUCTION) != 0)
6047 error ("iteration variable %qE should not be reduction",
6048 DECL_NAME (decl));
6049 }
6050 return (ctx == gimplify_omp_ctxp
6051 || (ctx->region_type == ORT_COMBINED_PARALLEL
6052 && gimplify_omp_ctxp->outer_context == ctx));
6053 }
6054
6055 if (ctx->region_type != ORT_WORKSHARE)
6056 return false;
6057 else if (ctx->outer_context)
6058 return omp_is_private (ctx->outer_context, decl);
6059 return false;
6060 }
6061
6062 /* Return true if DECL is private within a parallel region
6063 that binds to the current construct's context or in parallel
6064 region's REDUCTION clause. */
6065
6066 static bool
6067 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
6068 {
6069 splay_tree_node n;
6070
6071 do
6072 {
6073 ctx = ctx->outer_context;
6074 if (ctx == NULL)
6075 return !(is_global_var (decl)
6076 /* References might be private, but might be shared too. */
6077 || lang_hooks.decls.omp_privatize_by_reference (decl));
6078
6079 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6080 if (n != NULL)
6081 return (n->value & GOVD_SHARED) == 0;
6082 }
6083 while (ctx->region_type == ORT_WORKSHARE);
6084 return false;
6085 }
6086
6087 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6088 and previous omp contexts. */
6089
6090 static void
6091 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6092 enum omp_region_type region_type)
6093 {
6094 struct gimplify_omp_ctx *ctx, *outer_ctx;
6095 struct gimplify_ctx gctx;
6096 tree c;
6097
6098 ctx = new_omp_context (region_type);
6099 outer_ctx = ctx->outer_context;
6100
6101 while ((c = *list_p) != NULL)
6102 {
6103 bool remove = false;
6104 bool notice_outer = true;
6105 const char *check_non_private = NULL;
6106 unsigned int flags;
6107 tree decl;
6108
6109 switch (OMP_CLAUSE_CODE (c))
6110 {
6111 case OMP_CLAUSE_PRIVATE:
6112 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6113 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6114 {
6115 flags |= GOVD_PRIVATE_OUTER_REF;
6116 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6117 }
6118 else
6119 notice_outer = false;
6120 goto do_add;
6121 case OMP_CLAUSE_SHARED:
6122 flags = GOVD_SHARED | GOVD_EXPLICIT;
6123 goto do_add;
6124 case OMP_CLAUSE_FIRSTPRIVATE:
6125 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6126 check_non_private = "firstprivate";
6127 goto do_add;
6128 case OMP_CLAUSE_LASTPRIVATE:
6129 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6130 check_non_private = "lastprivate";
6131 goto do_add;
6132 case OMP_CLAUSE_REDUCTION:
6133 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6134 check_non_private = "reduction";
6135 goto do_add;
6136
6137 do_add:
6138 decl = OMP_CLAUSE_DECL (c);
6139 if (error_operand_p (decl))
6140 {
6141 remove = true;
6142 break;
6143 }
6144 omp_add_variable (ctx, decl, flags);
6145 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6146 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6147 {
6148 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6149 GOVD_LOCAL | GOVD_SEEN);
6150 gimplify_omp_ctxp = ctx;
6151 push_gimplify_context (&gctx);
6152
6153 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6154 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6155
6156 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6157 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6158 pop_gimplify_context
6159 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6160 push_gimplify_context (&gctx);
6161 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6162 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6163 pop_gimplify_context
6164 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6165 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6166 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6167
6168 gimplify_omp_ctxp = outer_ctx;
6169 }
6170 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6171 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6172 {
6173 gimplify_omp_ctxp = ctx;
6174 push_gimplify_context (&gctx);
6175 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6176 {
6177 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6178 NULL, NULL);
6179 TREE_SIDE_EFFECTS (bind) = 1;
6180 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6181 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6182 }
6183 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6184 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6185 pop_gimplify_context
6186 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6187 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6188
6189 gimplify_omp_ctxp = outer_ctx;
6190 }
6191 if (notice_outer)
6192 goto do_notice;
6193 break;
6194
6195 case OMP_CLAUSE_COPYIN:
6196 case OMP_CLAUSE_COPYPRIVATE:
6197 decl = OMP_CLAUSE_DECL (c);
6198 if (error_operand_p (decl))
6199 {
6200 remove = true;
6201 break;
6202 }
6203 do_notice:
6204 if (outer_ctx)
6205 omp_notice_variable (outer_ctx, decl, true);
6206 if (check_non_private
6207 && region_type == ORT_WORKSHARE
6208 && omp_check_private (ctx, decl))
6209 {
6210 error ("%s variable %qE is private in outer context",
6211 check_non_private, DECL_NAME (decl));
6212 remove = true;
6213 }
6214 break;
6215
6216 case OMP_CLAUSE_FINAL:
6217 case OMP_CLAUSE_IF:
6218 OMP_CLAUSE_OPERAND (c, 0)
6219 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6220 /* Fall through. */
6221
6222 case OMP_CLAUSE_SCHEDULE:
6223 case OMP_CLAUSE_NUM_THREADS:
6224 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6225 is_gimple_val, fb_rvalue) == GS_ERROR)
6226 remove = true;
6227 break;
6228
6229 case OMP_CLAUSE_NOWAIT:
6230 case OMP_CLAUSE_ORDERED:
6231 case OMP_CLAUSE_UNTIED:
6232 case OMP_CLAUSE_COLLAPSE:
6233 case OMP_CLAUSE_MERGEABLE:
6234 break;
6235
6236 case OMP_CLAUSE_DEFAULT:
6237 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6238 break;
6239
6240 default:
6241 gcc_unreachable ();
6242 }
6243
6244 if (remove)
6245 *list_p = OMP_CLAUSE_CHAIN (c);
6246 else
6247 list_p = &OMP_CLAUSE_CHAIN (c);
6248 }
6249
6250 gimplify_omp_ctxp = ctx;
6251 }
6252
6253 /* For all variables that were not actually used within the context,
6254 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6255
6256 static int
6257 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6258 {
6259 tree *list_p = (tree *) data;
6260 tree decl = (tree) n->key;
6261 unsigned flags = n->value;
6262 enum omp_clause_code code;
6263 tree clause;
6264 bool private_debug;
6265
6266 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6267 return 0;
6268 if ((flags & GOVD_SEEN) == 0)
6269 return 0;
6270 if (flags & GOVD_DEBUG_PRIVATE)
6271 {
6272 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6273 private_debug = true;
6274 }
6275 else
6276 private_debug
6277 = lang_hooks.decls.omp_private_debug_clause (decl,
6278 !!(flags & GOVD_SHARED));
6279 if (private_debug)
6280 code = OMP_CLAUSE_PRIVATE;
6281 else if (flags & GOVD_SHARED)
6282 {
6283 if (is_global_var (decl))
6284 {
6285 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6286 while (ctx != NULL)
6287 {
6288 splay_tree_node on
6289 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6290 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6291 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
6292 break;
6293 ctx = ctx->outer_context;
6294 }
6295 if (ctx == NULL)
6296 return 0;
6297 }
6298 code = OMP_CLAUSE_SHARED;
6299 }
6300 else if (flags & GOVD_PRIVATE)
6301 code = OMP_CLAUSE_PRIVATE;
6302 else if (flags & GOVD_FIRSTPRIVATE)
6303 code = OMP_CLAUSE_FIRSTPRIVATE;
6304 else
6305 gcc_unreachable ();
6306
6307 clause = build_omp_clause (input_location, code);
6308 OMP_CLAUSE_DECL (clause) = decl;
6309 OMP_CLAUSE_CHAIN (clause) = *list_p;
6310 if (private_debug)
6311 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6312 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6313 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6314 *list_p = clause;
6315 lang_hooks.decls.omp_finish_clause (clause);
6316
6317 return 0;
6318 }
6319
6320 static void
6321 gimplify_adjust_omp_clauses (tree *list_p)
6322 {
6323 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6324 tree c, decl;
6325
6326 while ((c = *list_p) != NULL)
6327 {
6328 splay_tree_node n;
6329 bool remove = false;
6330
6331 switch (OMP_CLAUSE_CODE (c))
6332 {
6333 case OMP_CLAUSE_PRIVATE:
6334 case OMP_CLAUSE_SHARED:
6335 case OMP_CLAUSE_FIRSTPRIVATE:
6336 decl = OMP_CLAUSE_DECL (c);
6337 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6338 remove = !(n->value & GOVD_SEEN);
6339 if (! remove)
6340 {
6341 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6342 if ((n->value & GOVD_DEBUG_PRIVATE)
6343 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6344 {
6345 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6346 || ((n->value & GOVD_DATA_SHARE_CLASS)
6347 == GOVD_PRIVATE));
6348 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6349 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6350 }
6351 }
6352 break;
6353
6354 case OMP_CLAUSE_LASTPRIVATE:
6355 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6356 accurately reflect the presence of a FIRSTPRIVATE clause. */
6357 decl = OMP_CLAUSE_DECL (c);
6358 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6359 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6360 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6361 break;
6362
6363 case OMP_CLAUSE_REDUCTION:
6364 case OMP_CLAUSE_COPYIN:
6365 case OMP_CLAUSE_COPYPRIVATE:
6366 case OMP_CLAUSE_IF:
6367 case OMP_CLAUSE_NUM_THREADS:
6368 case OMP_CLAUSE_SCHEDULE:
6369 case OMP_CLAUSE_NOWAIT:
6370 case OMP_CLAUSE_ORDERED:
6371 case OMP_CLAUSE_DEFAULT:
6372 case OMP_CLAUSE_UNTIED:
6373 case OMP_CLAUSE_COLLAPSE:
6374 case OMP_CLAUSE_FINAL:
6375 case OMP_CLAUSE_MERGEABLE:
6376 break;
6377
6378 default:
6379 gcc_unreachable ();
6380 }
6381
6382 if (remove)
6383 *list_p = OMP_CLAUSE_CHAIN (c);
6384 else
6385 list_p = &OMP_CLAUSE_CHAIN (c);
6386 }
6387
6388 /* Add in any implicit data sharing. */
6389 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6390
6391 gimplify_omp_ctxp = ctx->outer_context;
6392 delete_omp_context (ctx);
6393 }
6394
6395 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6396 gimplification of the body, as well as scanning the body for used
6397 variables. We need to do this scan now, because variable-sized
6398 decls will be decomposed during gimplification. */
6399
6400 static void
6401 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6402 {
6403 tree expr = *expr_p;
6404 gimple g;
6405 gimple_seq body = NULL;
6406 struct gimplify_ctx gctx;
6407
6408 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6409 OMP_PARALLEL_COMBINED (expr)
6410 ? ORT_COMBINED_PARALLEL
6411 : ORT_PARALLEL);
6412
6413 push_gimplify_context (&gctx);
6414
6415 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6416 if (gimple_code (g) == GIMPLE_BIND)
6417 pop_gimplify_context (g);
6418 else
6419 pop_gimplify_context (NULL);
6420
6421 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6422
6423 g = gimple_build_omp_parallel (body,
6424 OMP_PARALLEL_CLAUSES (expr),
6425 NULL_TREE, NULL_TREE);
6426 if (OMP_PARALLEL_COMBINED (expr))
6427 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6428 gimplify_seq_add_stmt (pre_p, g);
6429 *expr_p = NULL_TREE;
6430 }
6431
6432 /* Gimplify the contents of an OMP_TASK statement. This involves
6433 gimplification of the body, as well as scanning the body for used
6434 variables. We need to do this scan now, because variable-sized
6435 decls will be decomposed during gimplification. */
6436
6437 static void
6438 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6439 {
6440 tree expr = *expr_p;
6441 gimple g;
6442 gimple_seq body = NULL;
6443 struct gimplify_ctx gctx;
6444
6445 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6446 find_omp_clause (OMP_TASK_CLAUSES (expr),
6447 OMP_CLAUSE_UNTIED)
6448 ? ORT_UNTIED_TASK : ORT_TASK);
6449
6450 push_gimplify_context (&gctx);
6451
6452 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6453 if (gimple_code (g) == GIMPLE_BIND)
6454 pop_gimplify_context (g);
6455 else
6456 pop_gimplify_context (NULL);
6457
6458 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6459
6460 g = gimple_build_omp_task (body,
6461 OMP_TASK_CLAUSES (expr),
6462 NULL_TREE, NULL_TREE,
6463 NULL_TREE, NULL_TREE, NULL_TREE);
6464 gimplify_seq_add_stmt (pre_p, g);
6465 *expr_p = NULL_TREE;
6466 }
6467
6468 /* Gimplify the gross structure of an OMP_FOR statement. */
6469
6470 static enum gimplify_status
6471 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6472 {
6473 tree for_stmt, decl, var, t;
6474 enum gimplify_status ret = GS_ALL_DONE;
6475 enum gimplify_status tret;
6476 gimple gfor;
6477 gimple_seq for_body, for_pre_body;
6478 int i;
6479
6480 for_stmt = *expr_p;
6481
6482 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6483 ORT_WORKSHARE);
6484
6485 /* Handle OMP_FOR_INIT. */
6486 for_pre_body = NULL;
6487 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6488 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6489
6490 for_body = NULL;
6491 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6492 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6493 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6494 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6495 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6496 {
6497 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6498 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6499 decl = TREE_OPERAND (t, 0);
6500 gcc_assert (DECL_P (decl));
6501 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6502 || POINTER_TYPE_P (TREE_TYPE (decl)));
6503
6504 /* Make sure the iteration variable is private. */
6505 if (omp_is_private (gimplify_omp_ctxp, decl))
6506 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6507 else
6508 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6509
6510 /* If DECL is not a gimple register, create a temporary variable to act
6511 as an iteration counter. This is valid, since DECL cannot be
6512 modified in the body of the loop. */
6513 if (!is_gimple_reg (decl))
6514 {
6515 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6516 TREE_OPERAND (t, 0) = var;
6517
6518 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6519
6520 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6521 }
6522 else
6523 var = decl;
6524
6525 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6526 is_gimple_val, fb_rvalue);
6527 ret = MIN (ret, tret);
6528 if (ret == GS_ERROR)
6529 return ret;
6530
6531 /* Handle OMP_FOR_COND. */
6532 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6533 gcc_assert (COMPARISON_CLASS_P (t));
6534 gcc_assert (TREE_OPERAND (t, 0) == decl);
6535
6536 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6537 is_gimple_val, fb_rvalue);
6538 ret = MIN (ret, tret);
6539
6540 /* Handle OMP_FOR_INCR. */
6541 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6542 switch (TREE_CODE (t))
6543 {
6544 case PREINCREMENT_EXPR:
6545 case POSTINCREMENT_EXPR:
6546 t = build_int_cst (TREE_TYPE (decl), 1);
6547 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6548 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6549 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6550 break;
6551
6552 case PREDECREMENT_EXPR:
6553 case POSTDECREMENT_EXPR:
6554 t = build_int_cst (TREE_TYPE (decl), -1);
6555 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6556 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6557 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6558 break;
6559
6560 case MODIFY_EXPR:
6561 gcc_assert (TREE_OPERAND (t, 0) == decl);
6562 TREE_OPERAND (t, 0) = var;
6563
6564 t = TREE_OPERAND (t, 1);
6565 switch (TREE_CODE (t))
6566 {
6567 case PLUS_EXPR:
6568 if (TREE_OPERAND (t, 1) == decl)
6569 {
6570 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6571 TREE_OPERAND (t, 0) = var;
6572 break;
6573 }
6574
6575 /* Fallthru. */
6576 case MINUS_EXPR:
6577 case POINTER_PLUS_EXPR:
6578 gcc_assert (TREE_OPERAND (t, 0) == decl);
6579 TREE_OPERAND (t, 0) = var;
6580 break;
6581 default:
6582 gcc_unreachable ();
6583 }
6584
6585 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6586 is_gimple_val, fb_rvalue);
6587 ret = MIN (ret, tret);
6588 break;
6589
6590 default:
6591 gcc_unreachable ();
6592 }
6593
6594 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6595 {
6596 tree c;
6597 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6598 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6599 && OMP_CLAUSE_DECL (c) == decl
6600 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6601 {
6602 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6603 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6604 gcc_assert (TREE_OPERAND (t, 0) == var);
6605 t = TREE_OPERAND (t, 1);
6606 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6607 || TREE_CODE (t) == MINUS_EXPR
6608 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6609 gcc_assert (TREE_OPERAND (t, 0) == var);
6610 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6611 TREE_OPERAND (t, 1));
6612 gimplify_assign (decl, t,
6613 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6614 }
6615 }
6616 }
6617
6618 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6619
6620 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6621
6622 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6623 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6624 for_pre_body);
6625
6626 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6627 {
6628 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6629 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6630 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6631 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6632 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6633 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6634 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6635 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6636 }
6637
6638 gimplify_seq_add_stmt (pre_p, gfor);
6639 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6640 }
6641
6642 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6643 In particular, OMP_SECTIONS and OMP_SINGLE. */
6644
6645 static void
6646 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6647 {
6648 tree expr = *expr_p;
6649 gimple stmt;
6650 gimple_seq body = NULL;
6651
6652 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6653 gimplify_and_add (OMP_BODY (expr), &body);
6654 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6655
6656 if (TREE_CODE (expr) == OMP_SECTIONS)
6657 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6658 else if (TREE_CODE (expr) == OMP_SINGLE)
6659 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6660 else
6661 gcc_unreachable ();
6662
6663 gimplify_seq_add_stmt (pre_p, stmt);
6664 }
6665
6666 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6667 stabilized the lhs of the atomic operation as *ADDR. Return true if
6668 EXPR is this stabilized form. */
6669
6670 static bool
6671 goa_lhs_expr_p (tree expr, tree addr)
6672 {
6673 /* Also include casts to other type variants. The C front end is fond
6674 of adding these for e.g. volatile variables. This is like
6675 STRIP_TYPE_NOPS but includes the main variant lookup. */
6676 STRIP_USELESS_TYPE_CONVERSION (expr);
6677
6678 if (TREE_CODE (expr) == INDIRECT_REF)
6679 {
6680 expr = TREE_OPERAND (expr, 0);
6681 while (expr != addr
6682 && (CONVERT_EXPR_P (expr)
6683 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6684 && TREE_CODE (expr) == TREE_CODE (addr)
6685 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6686 {
6687 expr = TREE_OPERAND (expr, 0);
6688 addr = TREE_OPERAND (addr, 0);
6689 }
6690 if (expr == addr)
6691 return true;
6692 return (TREE_CODE (addr) == ADDR_EXPR
6693 && TREE_CODE (expr) == ADDR_EXPR
6694 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6695 }
6696 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6697 return true;
6698 return false;
6699 }
6700
6701 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
6702 expression does not involve the lhs, evaluate it into a temporary.
6703 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6704 or -1 if an error was encountered. */
6705
6706 static int
6707 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6708 tree lhs_var)
6709 {
6710 tree expr = *expr_p;
6711 int saw_lhs;
6712
6713 if (goa_lhs_expr_p (expr, lhs_addr))
6714 {
6715 *expr_p = lhs_var;
6716 return 1;
6717 }
6718 if (is_gimple_val (expr))
6719 return 0;
6720
6721 saw_lhs = 0;
6722 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6723 {
6724 case tcc_binary:
6725 case tcc_comparison:
6726 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6727 lhs_var);
6728 case tcc_unary:
6729 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6730 lhs_var);
6731 break;
6732 case tcc_expression:
6733 switch (TREE_CODE (expr))
6734 {
6735 case TRUTH_ANDIF_EXPR:
6736 case TRUTH_ORIF_EXPR:
6737 case TRUTH_AND_EXPR:
6738 case TRUTH_OR_EXPR:
6739 case TRUTH_XOR_EXPR:
6740 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6741 lhs_addr, lhs_var);
6742 case TRUTH_NOT_EXPR:
6743 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6744 lhs_addr, lhs_var);
6745 break;
6746 case COMPOUND_EXPR:
6747 /* Break out any preevaluations from cp_build_modify_expr. */
6748 for (; TREE_CODE (expr) == COMPOUND_EXPR;
6749 expr = TREE_OPERAND (expr, 1))
6750 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6751 *expr_p = expr;
6752 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6753 default:
6754 break;
6755 }
6756 break;
6757 default:
6758 break;
6759 }
6760
6761 if (saw_lhs == 0)
6762 {
6763 enum gimplify_status gs;
6764 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6765 if (gs != GS_ALL_DONE)
6766 saw_lhs = -1;
6767 }
6768
6769 return saw_lhs;
6770 }
6771
6772 /* Gimplify an OMP_ATOMIC statement. */
6773
6774 static enum gimplify_status
6775 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6776 {
6777 tree addr = TREE_OPERAND (*expr_p, 0);
6778 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6779 ? NULL : TREE_OPERAND (*expr_p, 1);
6780 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6781 tree tmp_load;
6782 gimple loadstmt, storestmt;
6783
6784 tmp_load = create_tmp_reg (type, NULL);
6785 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6786 return GS_ERROR;
6787
6788 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6789 != GS_ALL_DONE)
6790 return GS_ERROR;
6791
6792 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6793 gimplify_seq_add_stmt (pre_p, loadstmt);
6794 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6795 != GS_ALL_DONE)
6796 return GS_ERROR;
6797
6798 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6799 rhs = tmp_load;
6800 storestmt = gimple_build_omp_atomic_store (rhs);
6801 gimplify_seq_add_stmt (pre_p, storestmt);
6802 switch (TREE_CODE (*expr_p))
6803 {
6804 case OMP_ATOMIC_READ:
6805 case OMP_ATOMIC_CAPTURE_OLD:
6806 *expr_p = tmp_load;
6807 gimple_omp_atomic_set_need_value (loadstmt);
6808 break;
6809 case OMP_ATOMIC_CAPTURE_NEW:
6810 *expr_p = rhs;
6811 gimple_omp_atomic_set_need_value (storestmt);
6812 break;
6813 default:
6814 *expr_p = NULL;
6815 break;
6816 }
6817
6818 return GS_ALL_DONE;
6819 }
6820
6821 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
6822 body, and adding some EH bits. */
6823
6824 static enum gimplify_status
6825 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6826 {
6827 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6828 gimple g;
6829 gimple_seq body = NULL;
6830 struct gimplify_ctx gctx;
6831 int subcode = 0;
6832
6833 /* Wrap the transaction body in a BIND_EXPR so we have a context
6834 where to put decls for OpenMP. */
6835 if (TREE_CODE (tbody) != BIND_EXPR)
6836 {
6837 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6838 TREE_SIDE_EFFECTS (bind) = 1;
6839 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6840 TRANSACTION_EXPR_BODY (expr) = bind;
6841 }
6842
6843 push_gimplify_context (&gctx);
6844 temp = voidify_wrapper_expr (*expr_p, NULL);
6845
6846 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6847 pop_gimplify_context (g);
6848
6849 g = gimple_build_transaction (body, NULL);
6850 if (TRANSACTION_EXPR_OUTER (expr))
6851 subcode = GTMA_IS_OUTER;
6852 else if (TRANSACTION_EXPR_RELAXED (expr))
6853 subcode = GTMA_IS_RELAXED;
6854 gimple_transaction_set_subcode (g, subcode);
6855
6856 gimplify_seq_add_stmt (pre_p, g);
6857
6858 if (temp)
6859 {
6860 *expr_p = temp;
6861 return GS_OK;
6862 }
6863
6864 *expr_p = NULL_TREE;
6865 return GS_ALL_DONE;
6866 }
6867
6868 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
6869 expression produces a value to be used as an operand inside a GIMPLE
6870 statement, the value will be stored back in *EXPR_P. This value will
6871 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6872 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6873 emitted in PRE_P and POST_P.
6874
6875 Additionally, this process may overwrite parts of the input
6876 expression during gimplification. Ideally, it should be
6877 possible to do non-destructive gimplification.
6878
6879 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6880 the expression needs to evaluate to a value to be used as
6881 an operand in a GIMPLE statement, this value will be stored in
6882 *EXPR_P on exit. This happens when the caller specifies one
6883 of fb_lvalue or fb_rvalue fallback flags.
6884
6885 PRE_P will contain the sequence of GIMPLE statements corresponding
6886 to the evaluation of EXPR and all the side-effects that must
6887 be executed before the main expression. On exit, the last
6888 statement of PRE_P is the core statement being gimplified. For
6889 instance, when gimplifying 'if (++a)' the last statement in
6890 PRE_P will be 'if (t.1)' where t.1 is the result of
6891 pre-incrementing 'a'.
6892
6893 POST_P will contain the sequence of GIMPLE statements corresponding
6894 to the evaluation of all the side-effects that must be executed
6895 after the main expression. If this is NULL, the post
6896 side-effects are stored at the end of PRE_P.
6897
6898 The reason why the output is split in two is to handle post
6899 side-effects explicitly. In some cases, an expression may have
6900 inner and outer post side-effects which need to be emitted in
6901 an order different from the one given by the recursive
6902 traversal. For instance, for the expression (*p--)++ the post
6903 side-effects of '--' must actually occur *after* the post
6904 side-effects of '++'. However, gimplification will first visit
6905 the inner expression, so if a separate POST sequence was not
6906 used, the resulting sequence would be:
6907
6908 1 t.1 = *p
6909 2 p = p - 1
6910 3 t.2 = t.1 + 1
6911 4 *p = t.2
6912
6913 However, the post-decrement operation in line #2 must not be
6914 evaluated until after the store to *p at line #4, so the
6915 correct sequence should be:
6916
6917 1 t.1 = *p
6918 2 t.2 = t.1 + 1
6919 3 *p = t.2
6920 4 p = p - 1
6921
6922 So, by specifying a separate post queue, it is possible
6923 to emit the post side-effects in the correct order.
6924 If POST_P is NULL, an internal queue will be used. Before
6925 returning to the caller, the sequence POST_P is appended to
6926 the main output sequence PRE_P.
6927
6928 GIMPLE_TEST_F points to a function that takes a tree T and
6929 returns nonzero if T is in the GIMPLE form requested by the
6930 caller. The GIMPLE predicates are in gimple.c.
6931
6932 FALLBACK tells the function what sort of a temporary we want if
6933 gimplification cannot produce an expression that complies with
6934 GIMPLE_TEST_F.
6935
6936 fb_none means that no temporary should be generated
6937 fb_rvalue means that an rvalue is OK to generate
6938 fb_lvalue means that an lvalue is OK to generate
6939 fb_either means that either is OK, but an lvalue is preferable.
6940 fb_mayfail means that gimplification may fail (in which case
6941 GS_ERROR will be returned)
6942
6943 The return value is either GS_ERROR or GS_ALL_DONE, since this
6944 function iterates until EXPR is completely gimplified or an error
6945 occurs. */
6946
6947 enum gimplify_status
6948 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6949 bool (*gimple_test_f) (tree), fallback_t fallback)
6950 {
6951 tree tmp;
6952 gimple_seq internal_pre = NULL;
6953 gimple_seq internal_post = NULL;
6954 tree save_expr;
6955 bool is_statement;
6956 location_t saved_location;
6957 enum gimplify_status ret;
6958 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6959
6960 save_expr = *expr_p;
6961 if (save_expr == NULL_TREE)
6962 return GS_ALL_DONE;
6963
6964 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6965 is_statement = gimple_test_f == is_gimple_stmt;
6966 if (is_statement)
6967 gcc_assert (pre_p);
6968
6969 /* Consistency checks. */
6970 if (gimple_test_f == is_gimple_reg)
6971 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6972 else if (gimple_test_f == is_gimple_val
6973 || gimple_test_f == is_gimple_call_addr
6974 || gimple_test_f == is_gimple_condexpr
6975 || gimple_test_f == is_gimple_mem_rhs
6976 || gimple_test_f == is_gimple_mem_rhs_or_call
6977 || gimple_test_f == is_gimple_reg_rhs
6978 || gimple_test_f == is_gimple_reg_rhs_or_call
6979 || gimple_test_f == is_gimple_asm_val
6980 || gimple_test_f == is_gimple_mem_ref_addr)
6981 gcc_assert (fallback & fb_rvalue);
6982 else if (gimple_test_f == is_gimple_min_lval
6983 || gimple_test_f == is_gimple_lvalue)
6984 gcc_assert (fallback & fb_lvalue);
6985 else if (gimple_test_f == is_gimple_addressable)
6986 gcc_assert (fallback & fb_either);
6987 else if (gimple_test_f == is_gimple_stmt)
6988 gcc_assert (fallback == fb_none);
6989 else
6990 {
6991 /* We should have recognized the GIMPLE_TEST_F predicate to
6992 know what kind of fallback to use in case a temporary is
6993 needed to hold the value or address of *EXPR_P. */
6994 gcc_unreachable ();
6995 }
6996
6997 /* We used to check the predicate here and return immediately if it
6998 succeeds. This is wrong; the design is for gimplification to be
6999 idempotent, and for the predicates to only test for valid forms, not
7000 whether they are fully simplified. */
7001 if (pre_p == NULL)
7002 pre_p = &internal_pre;
7003
7004 if (post_p == NULL)
7005 post_p = &internal_post;
7006
7007 /* Remember the last statements added to PRE_P and POST_P. Every
7008 new statement added by the gimplification helpers needs to be
7009 annotated with location information. To centralize the
7010 responsibility, we remember the last statement that had been
7011 added to both queues before gimplifying *EXPR_P. If
7012 gimplification produces new statements in PRE_P and POST_P, those
7013 statements will be annotated with the same location information
7014 as *EXPR_P. */
7015 pre_last_gsi = gsi_last (*pre_p);
7016 post_last_gsi = gsi_last (*post_p);
7017
7018 saved_location = input_location;
7019 if (save_expr != error_mark_node
7020 && EXPR_HAS_LOCATION (*expr_p))
7021 input_location = EXPR_LOCATION (*expr_p);
7022
7023 /* Loop over the specific gimplifiers until the toplevel node
7024 remains the same. */
7025 do
7026 {
7027 /* Strip away as many useless type conversions as possible
7028 at the toplevel. */
7029 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7030
7031 /* Remember the expr. */
7032 save_expr = *expr_p;
7033
7034 /* Die, die, die, my darling. */
7035 if (save_expr == error_mark_node
7036 || (TREE_TYPE (save_expr)
7037 && TREE_TYPE (save_expr) == error_mark_node))
7038 {
7039 ret = GS_ERROR;
7040 break;
7041 }
7042
7043 /* Do any language-specific gimplification. */
7044 ret = ((enum gimplify_status)
7045 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7046 if (ret == GS_OK)
7047 {
7048 if (*expr_p == NULL_TREE)
7049 break;
7050 if (*expr_p != save_expr)
7051 continue;
7052 }
7053 else if (ret != GS_UNHANDLED)
7054 break;
7055
7056 /* Make sure that all the cases set 'ret' appropriately. */
7057 ret = GS_UNHANDLED;
7058 switch (TREE_CODE (*expr_p))
7059 {
7060 /* First deal with the special cases. */
7061
7062 case POSTINCREMENT_EXPR:
7063 case POSTDECREMENT_EXPR:
7064 case PREINCREMENT_EXPR:
7065 case PREDECREMENT_EXPR:
7066 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7067 fallback != fb_none);
7068 break;
7069
7070 case ARRAY_REF:
7071 case ARRAY_RANGE_REF:
7072 case REALPART_EXPR:
7073 case IMAGPART_EXPR:
7074 case COMPONENT_REF:
7075 case VIEW_CONVERT_EXPR:
7076 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7077 fallback ? fallback : fb_rvalue);
7078 break;
7079
7080 case COND_EXPR:
7081 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7082
7083 /* C99 code may assign to an array in a structure value of a
7084 conditional expression, and this has undefined behavior
7085 only on execution, so create a temporary if an lvalue is
7086 required. */
7087 if (fallback == fb_lvalue)
7088 {
7089 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7090 mark_addressable (*expr_p);
7091 ret = GS_OK;
7092 }
7093 break;
7094
7095 case CALL_EXPR:
7096 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7097
7098 /* C99 code may assign to an array in a structure returned
7099 from a function, and this has undefined behavior only on
7100 execution, so create a temporary if an lvalue is
7101 required. */
7102 if (fallback == fb_lvalue)
7103 {
7104 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7105 mark_addressable (*expr_p);
7106 ret = GS_OK;
7107 }
7108 break;
7109
7110 case TREE_LIST:
7111 gcc_unreachable ();
7112
7113 case COMPOUND_EXPR:
7114 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7115 break;
7116
7117 case COMPOUND_LITERAL_EXPR:
7118 ret = gimplify_compound_literal_expr (expr_p, pre_p, fallback);
7119 break;
7120
7121 case MODIFY_EXPR:
7122 case INIT_EXPR:
7123 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7124 fallback != fb_none);
7125 break;
7126
7127 case TRUTH_ANDIF_EXPR:
7128 case TRUTH_ORIF_EXPR:
7129 {
7130 /* Preserve the original type of the expression and the
7131 source location of the outer expression. */
7132 tree org_type = TREE_TYPE (*expr_p);
7133 *expr_p = gimple_boolify (*expr_p);
7134 *expr_p = build3_loc (input_location, COND_EXPR,
7135 org_type, *expr_p,
7136 fold_convert_loc
7137 (input_location,
7138 org_type, boolean_true_node),
7139 fold_convert_loc
7140 (input_location,
7141 org_type, boolean_false_node));
7142 ret = GS_OK;
7143 break;
7144 }
7145
7146 case TRUTH_NOT_EXPR:
7147 {
7148 tree type = TREE_TYPE (*expr_p);
7149 /* The parsers are careful to generate TRUTH_NOT_EXPR
7150 only with operands that are always zero or one.
7151 We do not fold here but handle the only interesting case
7152 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7153 *expr_p = gimple_boolify (*expr_p);
7154 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7155 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7156 TREE_TYPE (*expr_p),
7157 TREE_OPERAND (*expr_p, 0));
7158 else
7159 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7160 TREE_TYPE (*expr_p),
7161 TREE_OPERAND (*expr_p, 0),
7162 build_int_cst (TREE_TYPE (*expr_p), 1));
7163 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7164 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7165 ret = GS_OK;
7166 break;
7167 }
7168
7169 case ADDR_EXPR:
7170 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7171 break;
7172
7173 case VA_ARG_EXPR:
7174 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7175 break;
7176
7177 CASE_CONVERT:
7178 if (IS_EMPTY_STMT (*expr_p))
7179 {
7180 ret = GS_ALL_DONE;
7181 break;
7182 }
7183
7184 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7185 || fallback == fb_none)
7186 {
7187 /* Just strip a conversion to void (or in void context) and
7188 try again. */
7189 *expr_p = TREE_OPERAND (*expr_p, 0);
7190 ret = GS_OK;
7191 break;
7192 }
7193
7194 ret = gimplify_conversion (expr_p);
7195 if (ret == GS_ERROR)
7196 break;
7197 if (*expr_p != save_expr)
7198 break;
7199 /* FALLTHRU */
7200
7201 case FIX_TRUNC_EXPR:
7202 /* unary_expr: ... | '(' cast ')' val | ... */
7203 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7204 is_gimple_val, fb_rvalue);
7205 recalculate_side_effects (*expr_p);
7206 break;
7207
7208 case INDIRECT_REF:
7209 {
7210 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7211 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7212 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7213
7214 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7215 if (*expr_p != save_expr)
7216 {
7217 ret = GS_OK;
7218 break;
7219 }
7220
7221 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7222 is_gimple_reg, fb_rvalue);
7223 if (ret == GS_ERROR)
7224 break;
7225
7226 recalculate_side_effects (*expr_p);
7227 *expr_p = fold_build2_loc (input_location, MEM_REF,
7228 TREE_TYPE (*expr_p),
7229 TREE_OPERAND (*expr_p, 0),
7230 build_int_cst (saved_ptr_type, 0));
7231 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7232 TREE_THIS_NOTRAP (*expr_p) = notrap;
7233 ret = GS_OK;
7234 break;
7235 }
7236
7237 /* We arrive here through the various re-gimplifcation paths. */
7238 case MEM_REF:
7239 /* First try re-folding the whole thing. */
7240 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7241 TREE_OPERAND (*expr_p, 0),
7242 TREE_OPERAND (*expr_p, 1));
7243 if (tmp)
7244 {
7245 *expr_p = tmp;
7246 recalculate_side_effects (*expr_p);
7247 ret = GS_OK;
7248 break;
7249 }
7250 /* Avoid re-gimplifying the address operand if it is already
7251 in suitable form. Re-gimplifying would mark the address
7252 operand addressable. Always gimplify when not in SSA form
7253 as we still may have to gimplify decls with value-exprs. */
7254 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7255 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7256 {
7257 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7258 is_gimple_mem_ref_addr, fb_rvalue);
7259 if (ret == GS_ERROR)
7260 break;
7261 }
7262 recalculate_side_effects (*expr_p);
7263 ret = GS_ALL_DONE;
7264 break;
7265
7266 /* Constants need not be gimplified. */
7267 case INTEGER_CST:
7268 case REAL_CST:
7269 case FIXED_CST:
7270 case STRING_CST:
7271 case COMPLEX_CST:
7272 case VECTOR_CST:
7273 ret = GS_ALL_DONE;
7274 break;
7275
7276 case CONST_DECL:
7277 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7278 CONST_DECL node. Otherwise the decl is replaceable by its
7279 value. */
7280 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7281 if (fallback & fb_lvalue)
7282 ret = GS_ALL_DONE;
7283 else
7284 {
7285 *expr_p = DECL_INITIAL (*expr_p);
7286 ret = GS_OK;
7287 }
7288 break;
7289
7290 case DECL_EXPR:
7291 ret = gimplify_decl_expr (expr_p, pre_p);
7292 break;
7293
7294 case BIND_EXPR:
7295 ret = gimplify_bind_expr (expr_p, pre_p);
7296 break;
7297
7298 case LOOP_EXPR:
7299 ret = gimplify_loop_expr (expr_p, pre_p);
7300 break;
7301
7302 case SWITCH_EXPR:
7303 ret = gimplify_switch_expr (expr_p, pre_p);
7304 break;
7305
7306 case EXIT_EXPR:
7307 ret = gimplify_exit_expr (expr_p);
7308 break;
7309
7310 case GOTO_EXPR:
7311 /* If the target is not LABEL, then it is a computed jump
7312 and the target needs to be gimplified. */
7313 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7314 {
7315 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7316 NULL, is_gimple_val, fb_rvalue);
7317 if (ret == GS_ERROR)
7318 break;
7319 }
7320 gimplify_seq_add_stmt (pre_p,
7321 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7322 ret = GS_ALL_DONE;
7323 break;
7324
7325 case PREDICT_EXPR:
7326 gimplify_seq_add_stmt (pre_p,
7327 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7328 PREDICT_EXPR_OUTCOME (*expr_p)));
7329 ret = GS_ALL_DONE;
7330 break;
7331
7332 case LABEL_EXPR:
7333 ret = GS_ALL_DONE;
7334 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7335 == current_function_decl);
7336 gimplify_seq_add_stmt (pre_p,
7337 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7338 break;
7339
7340 case CASE_LABEL_EXPR:
7341 ret = gimplify_case_label_expr (expr_p, pre_p);
7342 break;
7343
7344 case RETURN_EXPR:
7345 ret = gimplify_return_expr (*expr_p, pre_p);
7346 break;
7347
7348 case CONSTRUCTOR:
7349 /* Don't reduce this in place; let gimplify_init_constructor work its
7350 magic. Buf if we're just elaborating this for side effects, just
7351 gimplify any element that has side-effects. */
7352 if (fallback == fb_none)
7353 {
7354 unsigned HOST_WIDE_INT ix;
7355 tree val;
7356 tree temp = NULL_TREE;
7357 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7358 if (TREE_SIDE_EFFECTS (val))
7359 append_to_statement_list (val, &temp);
7360
7361 *expr_p = temp;
7362 ret = temp ? GS_OK : GS_ALL_DONE;
7363 }
7364 /* C99 code may assign to an array in a constructed
7365 structure or union, and this has undefined behavior only
7366 on execution, so create a temporary if an lvalue is
7367 required. */
7368 else if (fallback == fb_lvalue)
7369 {
7370 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7371 mark_addressable (*expr_p);
7372 ret = GS_OK;
7373 }
7374 else
7375 ret = GS_ALL_DONE;
7376 break;
7377
7378 /* The following are special cases that are not handled by the
7379 original GIMPLE grammar. */
7380
7381 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7382 eliminated. */
7383 case SAVE_EXPR:
7384 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7385 break;
7386
7387 case BIT_FIELD_REF:
7388 {
7389 enum gimplify_status r0, r1, r2;
7390
7391 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7392 post_p, is_gimple_lvalue, fb_either);
7393 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7394 post_p, is_gimple_val, fb_rvalue);
7395 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7396 post_p, is_gimple_val, fb_rvalue);
7397 recalculate_side_effects (*expr_p);
7398
7399 ret = MIN (r0, MIN (r1, r2));
7400 }
7401 break;
7402
7403 case TARGET_MEM_REF:
7404 {
7405 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7406
7407 if (TMR_BASE (*expr_p))
7408 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7409 post_p, is_gimple_mem_ref_addr, fb_either);
7410 if (TMR_INDEX (*expr_p))
7411 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7412 post_p, is_gimple_val, fb_rvalue);
7413 if (TMR_INDEX2 (*expr_p))
7414 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7415 post_p, is_gimple_val, fb_rvalue);
7416 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7417 ret = MIN (r0, r1);
7418 }
7419 break;
7420
7421 case NON_LVALUE_EXPR:
7422 /* This should have been stripped above. */
7423 gcc_unreachable ();
7424
7425 case ASM_EXPR:
7426 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7427 break;
7428
7429 case TRY_FINALLY_EXPR:
7430 case TRY_CATCH_EXPR:
7431 {
7432 gimple_seq eval, cleanup;
7433 gimple try_;
7434
7435 eval = cleanup = NULL;
7436 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7437 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7438 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7439 if (gimple_seq_empty_p (cleanup))
7440 {
7441 gimple_seq_add_seq (pre_p, eval);
7442 ret = GS_ALL_DONE;
7443 break;
7444 }
7445 try_ = gimple_build_try (eval, cleanup,
7446 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7447 ? GIMPLE_TRY_FINALLY
7448 : GIMPLE_TRY_CATCH);
7449 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7450 gimple_try_set_catch_is_cleanup (try_,
7451 TRY_CATCH_IS_CLEANUP (*expr_p));
7452 gimplify_seq_add_stmt (pre_p, try_);
7453 ret = GS_ALL_DONE;
7454 break;
7455 }
7456
7457 case CLEANUP_POINT_EXPR:
7458 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7459 break;
7460
7461 case TARGET_EXPR:
7462 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7463 break;
7464
7465 case CATCH_EXPR:
7466 {
7467 gimple c;
7468 gimple_seq handler = NULL;
7469 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7470 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7471 gimplify_seq_add_stmt (pre_p, c);
7472 ret = GS_ALL_DONE;
7473 break;
7474 }
7475
7476 case EH_FILTER_EXPR:
7477 {
7478 gimple ehf;
7479 gimple_seq failure = NULL;
7480
7481 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7482 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7483 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7484 gimplify_seq_add_stmt (pre_p, ehf);
7485 ret = GS_ALL_DONE;
7486 break;
7487 }
7488
7489 case OBJ_TYPE_REF:
7490 {
7491 enum gimplify_status r0, r1;
7492 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7493 post_p, is_gimple_val, fb_rvalue);
7494 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7495 post_p, is_gimple_val, fb_rvalue);
7496 TREE_SIDE_EFFECTS (*expr_p) = 0;
7497 ret = MIN (r0, r1);
7498 }
7499 break;
7500
7501 case LABEL_DECL:
7502 /* We get here when taking the address of a label. We mark
7503 the label as "forced"; meaning it can never be removed and
7504 it is a potential target for any computed goto. */
7505 FORCED_LABEL (*expr_p) = 1;
7506 ret = GS_ALL_DONE;
7507 break;
7508
7509 case STATEMENT_LIST:
7510 ret = gimplify_statement_list (expr_p, pre_p);
7511 break;
7512
7513 case WITH_SIZE_EXPR:
7514 {
7515 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7516 post_p == &internal_post ? NULL : post_p,
7517 gimple_test_f, fallback);
7518 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7519 is_gimple_val, fb_rvalue);
7520 ret = GS_ALL_DONE;
7521 }
7522 break;
7523
7524 case VAR_DECL:
7525 case PARM_DECL:
7526 ret = gimplify_var_or_parm_decl (expr_p);
7527 break;
7528
7529 case RESULT_DECL:
7530 /* When within an OpenMP context, notice uses of variables. */
7531 if (gimplify_omp_ctxp)
7532 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7533 ret = GS_ALL_DONE;
7534 break;
7535
7536 case SSA_NAME:
7537 /* Allow callbacks into the gimplifier during optimization. */
7538 ret = GS_ALL_DONE;
7539 break;
7540
7541 case OMP_PARALLEL:
7542 gimplify_omp_parallel (expr_p, pre_p);
7543 ret = GS_ALL_DONE;
7544 break;
7545
7546 case OMP_TASK:
7547 gimplify_omp_task (expr_p, pre_p);
7548 ret = GS_ALL_DONE;
7549 break;
7550
7551 case OMP_FOR:
7552 ret = gimplify_omp_for (expr_p, pre_p);
7553 break;
7554
7555 case OMP_SECTIONS:
7556 case OMP_SINGLE:
7557 gimplify_omp_workshare (expr_p, pre_p);
7558 ret = GS_ALL_DONE;
7559 break;
7560
7561 case OMP_SECTION:
7562 case OMP_MASTER:
7563 case OMP_ORDERED:
7564 case OMP_CRITICAL:
7565 {
7566 gimple_seq body = NULL;
7567 gimple g;
7568
7569 gimplify_and_add (OMP_BODY (*expr_p), &body);
7570 switch (TREE_CODE (*expr_p))
7571 {
7572 case OMP_SECTION:
7573 g = gimple_build_omp_section (body);
7574 break;
7575 case OMP_MASTER:
7576 g = gimple_build_omp_master (body);
7577 break;
7578 case OMP_ORDERED:
7579 g = gimple_build_omp_ordered (body);
7580 break;
7581 case OMP_CRITICAL:
7582 g = gimple_build_omp_critical (body,
7583 OMP_CRITICAL_NAME (*expr_p));
7584 break;
7585 default:
7586 gcc_unreachable ();
7587 }
7588 gimplify_seq_add_stmt (pre_p, g);
7589 ret = GS_ALL_DONE;
7590 break;
7591 }
7592
7593 case OMP_ATOMIC:
7594 case OMP_ATOMIC_READ:
7595 case OMP_ATOMIC_CAPTURE_OLD:
7596 case OMP_ATOMIC_CAPTURE_NEW:
7597 ret = gimplify_omp_atomic (expr_p, pre_p);
7598 break;
7599
7600 case TRANSACTION_EXPR:
7601 ret = gimplify_transaction (expr_p, pre_p);
7602 break;
7603
7604 case TRUTH_AND_EXPR:
7605 case TRUTH_OR_EXPR:
7606 case TRUTH_XOR_EXPR:
7607 {
7608 tree orig_type = TREE_TYPE (*expr_p);
7609 tree new_type, xop0, xop1;
7610 *expr_p = gimple_boolify (*expr_p);
7611 new_type = TREE_TYPE (*expr_p);
7612 if (!useless_type_conversion_p (orig_type, new_type))
7613 {
7614 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7615 ret = GS_OK;
7616 break;
7617 }
7618
7619 /* Boolified binary truth expressions are semantically equivalent
7620 to bitwise binary expressions. Canonicalize them to the
7621 bitwise variant. */
7622 switch (TREE_CODE (*expr_p))
7623 {
7624 case TRUTH_AND_EXPR:
7625 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7626 break;
7627 case TRUTH_OR_EXPR:
7628 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7629 break;
7630 case TRUTH_XOR_EXPR:
7631 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7632 break;
7633 default:
7634 break;
7635 }
7636 /* Now make sure that operands have compatible type to
7637 expression's new_type. */
7638 xop0 = TREE_OPERAND (*expr_p, 0);
7639 xop1 = TREE_OPERAND (*expr_p, 1);
7640 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7641 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7642 new_type,
7643 xop0);
7644 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7645 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7646 new_type,
7647 xop1);
7648 /* Continue classified as tcc_binary. */
7649 goto expr_2;
7650 }
7651
7652 case FMA_EXPR:
7653 case VEC_PERM_EXPR:
7654 /* Classified as tcc_expression. */
7655 goto expr_3;
7656
7657 case POINTER_PLUS_EXPR:
7658 {
7659 enum gimplify_status r0, r1;
7660 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7661 post_p, is_gimple_val, fb_rvalue);
7662 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7663 post_p, is_gimple_val, fb_rvalue);
7664 recalculate_side_effects (*expr_p);
7665 ret = MIN (r0, r1);
7666 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
7667 after gimplifying operands - this is similar to how
7668 it would be folding all gimplified stmts on creation
7669 to have them canonicalized, which is what we eventually
7670 should do anyway. */
7671 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7672 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7673 {
7674 *expr_p = build_fold_addr_expr_with_type_loc
7675 (input_location,
7676 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7677 TREE_OPERAND (*expr_p, 0),
7678 fold_convert (ptr_type_node,
7679 TREE_OPERAND (*expr_p, 1))),
7680 TREE_TYPE (*expr_p));
7681 ret = MIN (ret, GS_OK);
7682 }
7683 break;
7684 }
7685
7686 default:
7687 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7688 {
7689 case tcc_comparison:
7690 /* Handle comparison of objects of non scalar mode aggregates
7691 with a call to memcmp. It would be nice to only have to do
7692 this for variable-sized objects, but then we'd have to allow
7693 the same nest of reference nodes we allow for MODIFY_EXPR and
7694 that's too complex.
7695
7696 Compare scalar mode aggregates as scalar mode values. Using
7697 memcmp for them would be very inefficient at best, and is
7698 plain wrong if bitfields are involved. */
7699 {
7700 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7701
7702 /* Vector comparisons need no boolification. */
7703 if (TREE_CODE (type) == VECTOR_TYPE)
7704 goto expr_2;
7705 else if (!AGGREGATE_TYPE_P (type))
7706 {
7707 tree org_type = TREE_TYPE (*expr_p);
7708 *expr_p = gimple_boolify (*expr_p);
7709 if (!useless_type_conversion_p (org_type,
7710 TREE_TYPE (*expr_p)))
7711 {
7712 *expr_p = fold_convert_loc (input_location,
7713 org_type, *expr_p);
7714 ret = GS_OK;
7715 }
7716 else
7717 goto expr_2;
7718 }
7719 else if (TYPE_MODE (type) != BLKmode)
7720 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7721 else
7722 ret = gimplify_variable_sized_compare (expr_p);
7723
7724 break;
7725 }
7726
7727 /* If *EXPR_P does not need to be special-cased, handle it
7728 according to its class. */
7729 case tcc_unary:
7730 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7731 post_p, is_gimple_val, fb_rvalue);
7732 break;
7733
7734 case tcc_binary:
7735 expr_2:
7736 {
7737 enum gimplify_status r0, r1;
7738
7739 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7740 post_p, is_gimple_val, fb_rvalue);
7741 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7742 post_p, is_gimple_val, fb_rvalue);
7743
7744 ret = MIN (r0, r1);
7745 break;
7746 }
7747
7748 expr_3:
7749 {
7750 enum gimplify_status r0, r1, r2;
7751
7752 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7753 post_p, is_gimple_val, fb_rvalue);
7754 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7755 post_p, is_gimple_val, fb_rvalue);
7756 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7757 post_p, is_gimple_val, fb_rvalue);
7758
7759 ret = MIN (MIN (r0, r1), r2);
7760 break;
7761 }
7762
7763 case tcc_declaration:
7764 case tcc_constant:
7765 ret = GS_ALL_DONE;
7766 goto dont_recalculate;
7767
7768 default:
7769 gcc_unreachable ();
7770 }
7771
7772 recalculate_side_effects (*expr_p);
7773
7774 dont_recalculate:
7775 break;
7776 }
7777
7778 gcc_assert (*expr_p || ret != GS_OK);
7779 }
7780 while (ret == GS_OK);
7781
7782 /* If we encountered an error_mark somewhere nested inside, either
7783 stub out the statement or propagate the error back out. */
7784 if (ret == GS_ERROR)
7785 {
7786 if (is_statement)
7787 *expr_p = NULL;
7788 goto out;
7789 }
7790
7791 /* This was only valid as a return value from the langhook, which
7792 we handled. Make sure it doesn't escape from any other context. */
7793 gcc_assert (ret != GS_UNHANDLED);
7794
7795 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7796 {
7797 /* We aren't looking for a value, and we don't have a valid
7798 statement. If it doesn't have side-effects, throw it away. */
7799 if (!TREE_SIDE_EFFECTS (*expr_p))
7800 *expr_p = NULL;
7801 else if (!TREE_THIS_VOLATILE (*expr_p))
7802 {
7803 /* This is probably a _REF that contains something nested that
7804 has side effects. Recurse through the operands to find it. */
7805 enum tree_code code = TREE_CODE (*expr_p);
7806
7807 switch (code)
7808 {
7809 case COMPONENT_REF:
7810 case REALPART_EXPR:
7811 case IMAGPART_EXPR:
7812 case VIEW_CONVERT_EXPR:
7813 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7814 gimple_test_f, fallback);
7815 break;
7816
7817 case ARRAY_REF:
7818 case ARRAY_RANGE_REF:
7819 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7820 gimple_test_f, fallback);
7821 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7822 gimple_test_f, fallback);
7823 break;
7824
7825 default:
7826 /* Anything else with side-effects must be converted to
7827 a valid statement before we get here. */
7828 gcc_unreachable ();
7829 }
7830
7831 *expr_p = NULL;
7832 }
7833 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7834 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7835 {
7836 /* Historically, the compiler has treated a bare reference
7837 to a non-BLKmode volatile lvalue as forcing a load. */
7838 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7839
7840 /* Normally, we do not want to create a temporary for a
7841 TREE_ADDRESSABLE type because such a type should not be
7842 copied by bitwise-assignment. However, we make an
7843 exception here, as all we are doing here is ensuring that
7844 we read the bytes that make up the type. We use
7845 create_tmp_var_raw because create_tmp_var will abort when
7846 given a TREE_ADDRESSABLE type. */
7847 tree tmp = create_tmp_var_raw (type, "vol");
7848 gimple_add_tmp_var (tmp);
7849 gimplify_assign (tmp, *expr_p, pre_p);
7850 *expr_p = NULL;
7851 }
7852 else
7853 /* We can't do anything useful with a volatile reference to
7854 an incomplete type, so just throw it away. Likewise for
7855 a BLKmode type, since any implicit inner load should
7856 already have been turned into an explicit one by the
7857 gimplification process. */
7858 *expr_p = NULL;
7859 }
7860
7861 /* If we are gimplifying at the statement level, we're done. Tack
7862 everything together and return. */
7863 if (fallback == fb_none || is_statement)
7864 {
7865 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7866 it out for GC to reclaim it. */
7867 *expr_p = NULL_TREE;
7868
7869 if (!gimple_seq_empty_p (internal_pre)
7870 || !gimple_seq_empty_p (internal_post))
7871 {
7872 gimplify_seq_add_seq (&internal_pre, internal_post);
7873 gimplify_seq_add_seq (pre_p, internal_pre);
7874 }
7875
7876 /* The result of gimplifying *EXPR_P is going to be the last few
7877 statements in *PRE_P and *POST_P. Add location information
7878 to all the statements that were added by the gimplification
7879 helpers. */
7880 if (!gimple_seq_empty_p (*pre_p))
7881 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7882
7883 if (!gimple_seq_empty_p (*post_p))
7884 annotate_all_with_location_after (*post_p, post_last_gsi,
7885 input_location);
7886
7887 goto out;
7888 }
7889
7890 #ifdef ENABLE_GIMPLE_CHECKING
7891 if (*expr_p)
7892 {
7893 enum tree_code code = TREE_CODE (*expr_p);
7894 /* These expressions should already be in gimple IR form. */
7895 gcc_assert (code != MODIFY_EXPR
7896 && code != ASM_EXPR
7897 && code != BIND_EXPR
7898 && code != CATCH_EXPR
7899 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7900 && code != EH_FILTER_EXPR
7901 && code != GOTO_EXPR
7902 && code != LABEL_EXPR
7903 && code != LOOP_EXPR
7904 && code != SWITCH_EXPR
7905 && code != TRY_FINALLY_EXPR
7906 && code != OMP_CRITICAL
7907 && code != OMP_FOR
7908 && code != OMP_MASTER
7909 && code != OMP_ORDERED
7910 && code != OMP_PARALLEL
7911 && code != OMP_SECTIONS
7912 && code != OMP_SECTION
7913 && code != OMP_SINGLE);
7914 }
7915 #endif
7916
7917 /* Otherwise we're gimplifying a subexpression, so the resulting
7918 value is interesting. If it's a valid operand that matches
7919 GIMPLE_TEST_F, we're done. Unless we are handling some
7920 post-effects internally; if that's the case, we need to copy into
7921 a temporary before adding the post-effects to POST_P. */
7922 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7923 goto out;
7924
7925 /* Otherwise, we need to create a new temporary for the gimplified
7926 expression. */
7927
7928 /* We can't return an lvalue if we have an internal postqueue. The
7929 object the lvalue refers to would (probably) be modified by the
7930 postqueue; we need to copy the value out first, which means an
7931 rvalue. */
7932 if ((fallback & fb_lvalue)
7933 && gimple_seq_empty_p (internal_post)
7934 && is_gimple_addressable (*expr_p))
7935 {
7936 /* An lvalue will do. Take the address of the expression, store it
7937 in a temporary, and replace the expression with an INDIRECT_REF of
7938 that temporary. */
7939 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7940 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7941 *expr_p = build_simple_mem_ref (tmp);
7942 }
7943 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7944 {
7945 /* An rvalue will do. Assign the gimplified expression into a
7946 new temporary TMP and replace the original expression with
7947 TMP. First, make sure that the expression has a type so that
7948 it can be assigned into a temporary. */
7949 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7950 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7951 }
7952 else
7953 {
7954 #ifdef ENABLE_GIMPLE_CHECKING
7955 if (!(fallback & fb_mayfail))
7956 {
7957 fprintf (stderr, "gimplification failed:\n");
7958 print_generic_expr (stderr, *expr_p, 0);
7959 debug_tree (*expr_p);
7960 internal_error ("gimplification failed");
7961 }
7962 #endif
7963 gcc_assert (fallback & fb_mayfail);
7964
7965 /* If this is an asm statement, and the user asked for the
7966 impossible, don't die. Fail and let gimplify_asm_expr
7967 issue an error. */
7968 ret = GS_ERROR;
7969 goto out;
7970 }
7971
7972 /* Make sure the temporary matches our predicate. */
7973 gcc_assert ((*gimple_test_f) (*expr_p));
7974
7975 if (!gimple_seq_empty_p (internal_post))
7976 {
7977 annotate_all_with_location (internal_post, input_location);
7978 gimplify_seq_add_seq (pre_p, internal_post);
7979 }
7980
7981 out:
7982 input_location = saved_location;
7983 return ret;
7984 }
7985
7986 /* Look through TYPE for variable-sized objects and gimplify each such
7987 size that we find. Add to LIST_P any statements generated. */
7988
7989 void
7990 gimplify_type_sizes (tree type, gimple_seq *list_p)
7991 {
7992 tree field, t;
7993
7994 if (type == NULL || type == error_mark_node)
7995 return;
7996
7997 /* We first do the main variant, then copy into any other variants. */
7998 type = TYPE_MAIN_VARIANT (type);
7999
8000 /* Avoid infinite recursion. */
8001 if (TYPE_SIZES_GIMPLIFIED (type))
8002 return;
8003
8004 TYPE_SIZES_GIMPLIFIED (type) = 1;
8005
8006 switch (TREE_CODE (type))
8007 {
8008 case INTEGER_TYPE:
8009 case ENUMERAL_TYPE:
8010 case BOOLEAN_TYPE:
8011 case REAL_TYPE:
8012 case FIXED_POINT_TYPE:
8013 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8014 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8015
8016 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8017 {
8018 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8019 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8020 }
8021 break;
8022
8023 case ARRAY_TYPE:
8024 /* These types may not have declarations, so handle them here. */
8025 gimplify_type_sizes (TREE_TYPE (type), list_p);
8026 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8027 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8028 with assigned stack slots, for -O1+ -g they should be tracked
8029 by VTA. */
8030 if (!(TYPE_NAME (type)
8031 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8032 && DECL_IGNORED_P (TYPE_NAME (type)))
8033 && TYPE_DOMAIN (type)
8034 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8035 {
8036 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8037 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8038 DECL_IGNORED_P (t) = 0;
8039 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8040 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8041 DECL_IGNORED_P (t) = 0;
8042 }
8043 break;
8044
8045 case RECORD_TYPE:
8046 case UNION_TYPE:
8047 case QUAL_UNION_TYPE:
8048 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8049 if (TREE_CODE (field) == FIELD_DECL)
8050 {
8051 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8052 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8053 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8054 gimplify_type_sizes (TREE_TYPE (field), list_p);
8055 }
8056 break;
8057
8058 case POINTER_TYPE:
8059 case REFERENCE_TYPE:
8060 /* We used to recurse on the pointed-to type here, which turned out to
8061 be incorrect because its definition might refer to variables not
8062 yet initialized at this point if a forward declaration is involved.
8063
8064 It was actually useful for anonymous pointed-to types to ensure
8065 that the sizes evaluation dominates every possible later use of the
8066 values. Restricting to such types here would be safe since there
8067 is no possible forward declaration around, but would introduce an
8068 undesirable middle-end semantic to anonymity. We then defer to
8069 front-ends the responsibility of ensuring that the sizes are
8070 evaluated both early and late enough, e.g. by attaching artificial
8071 type declarations to the tree. */
8072 break;
8073
8074 default:
8075 break;
8076 }
8077
8078 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8079 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8080
8081 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8082 {
8083 TYPE_SIZE (t) = TYPE_SIZE (type);
8084 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8085 TYPE_SIZES_GIMPLIFIED (t) = 1;
8086 }
8087 }
8088
8089 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8090 a size or position, has had all of its SAVE_EXPRs evaluated.
8091 We add any required statements to *STMT_P. */
8092
8093 void
8094 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8095 {
8096 tree expr = *expr_p;
8097
8098 /* We don't do anything if the value isn't there, is constant, or contains
8099 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8100 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8101 will want to replace it with a new variable, but that will cause problems
8102 if this type is from outside the function. It's OK to have that here. */
8103 if (expr == NULL_TREE || TREE_CONSTANT (expr)
8104 || TREE_CODE (expr) == VAR_DECL
8105 || CONTAINS_PLACEHOLDER_P (expr))
8106 return;
8107
8108 *expr_p = unshare_expr (expr);
8109
8110 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8111 expr = *expr_p;
8112 }
8113
8114 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8115 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8116 is true, also gimplify the parameters. */
8117
8118 gimple
8119 gimplify_body (tree fndecl, bool do_parms)
8120 {
8121 location_t saved_location = input_location;
8122 gimple_seq parm_stmts, seq;
8123 gimple outer_bind;
8124 struct gimplify_ctx gctx;
8125 struct cgraph_node *cgn;
8126
8127 timevar_push (TV_TREE_GIMPLIFY);
8128
8129 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8130 gimplification. */
8131 default_rtl_profile ();
8132
8133 gcc_assert (gimplify_ctxp == NULL);
8134 push_gimplify_context (&gctx);
8135
8136 /* Unshare most shared trees in the body and in that of any nested functions.
8137 It would seem we don't have to do this for nested functions because
8138 they are supposed to be output and then the outer function gimplified
8139 first, but the g++ front end doesn't always do it that way. */
8140 unshare_body (fndecl);
8141 unvisit_body (fndecl);
8142
8143 cgn = cgraph_get_node (fndecl);
8144 if (cgn && cgn->origin)
8145 nonlocal_vlas = pointer_set_create ();
8146
8147 /* Make sure input_location isn't set to something weird. */
8148 input_location = DECL_SOURCE_LOCATION (fndecl);
8149
8150 /* Resolve callee-copies. This has to be done before processing
8151 the body so that DECL_VALUE_EXPR gets processed correctly. */
8152 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8153
8154 /* Gimplify the function's body. */
8155 seq = NULL;
8156 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8157 outer_bind = gimple_seq_first_stmt (seq);
8158 if (!outer_bind)
8159 {
8160 outer_bind = gimple_build_nop ();
8161 gimplify_seq_add_stmt (&seq, outer_bind);
8162 }
8163
8164 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8165 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8166 if (gimple_code (outer_bind) == GIMPLE_BIND
8167 && gimple_seq_first (seq) == gimple_seq_last (seq))
8168 ;
8169 else
8170 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8171
8172 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8173
8174 /* If we had callee-copies statements, insert them at the beginning
8175 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8176 if (!gimple_seq_empty_p (parm_stmts))
8177 {
8178 tree parm;
8179
8180 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8181 gimple_bind_set_body (outer_bind, parm_stmts);
8182
8183 for (parm = DECL_ARGUMENTS (current_function_decl);
8184 parm; parm = DECL_CHAIN (parm))
8185 if (DECL_HAS_VALUE_EXPR_P (parm))
8186 {
8187 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8188 DECL_IGNORED_P (parm) = 0;
8189 }
8190 }
8191
8192 if (nonlocal_vlas)
8193 {
8194 pointer_set_destroy (nonlocal_vlas);
8195 nonlocal_vlas = NULL;
8196 }
8197
8198 pop_gimplify_context (outer_bind);
8199 gcc_assert (gimplify_ctxp == NULL);
8200
8201 if (!seen_error ())
8202 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8203
8204 timevar_pop (TV_TREE_GIMPLIFY);
8205 input_location = saved_location;
8206
8207 return outer_bind;
8208 }
8209
8210 typedef char *char_p; /* For DEF_VEC_P. */
8211 DEF_VEC_P(char_p);
8212 DEF_VEC_ALLOC_P(char_p,heap);
8213
8214 /* Return whether we should exclude FNDECL from instrumentation. */
8215
8216 static bool
8217 flag_instrument_functions_exclude_p (tree fndecl)
8218 {
8219 VEC(char_p,heap) *vec;
8220
8221 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
8222 if (VEC_length (char_p, vec) > 0)
8223 {
8224 const char *name;
8225 int i;
8226 char *s;
8227
8228 name = lang_hooks.decl_printable_name (fndecl, 0);
8229 FOR_EACH_VEC_ELT (char_p, vec, i, s)
8230 if (strstr (name, s) != NULL)
8231 return true;
8232 }
8233
8234 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
8235 if (VEC_length (char_p, vec) > 0)
8236 {
8237 const char *name;
8238 int i;
8239 char *s;
8240
8241 name = DECL_SOURCE_FILE (fndecl);
8242 FOR_EACH_VEC_ELT (char_p, vec, i, s)
8243 if (strstr (name, s) != NULL)
8244 return true;
8245 }
8246
8247 return false;
8248 }
8249
8250 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8251 node for the function we want to gimplify.
8252
8253 Return the sequence of GIMPLE statements corresponding to the body
8254 of FNDECL. */
8255
8256 void
8257 gimplify_function_tree (tree fndecl)
8258 {
8259 tree oldfn, parm, ret;
8260 gimple_seq seq;
8261 gimple bind;
8262
8263 gcc_assert (!gimple_body (fndecl));
8264
8265 oldfn = current_function_decl;
8266 current_function_decl = fndecl;
8267 if (DECL_STRUCT_FUNCTION (fndecl))
8268 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8269 else
8270 push_struct_function (fndecl);
8271
8272 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8273 {
8274 /* Preliminarily mark non-addressed complex variables as eligible
8275 for promotion to gimple registers. We'll transform their uses
8276 as we find them. */
8277 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8278 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8279 && !TREE_THIS_VOLATILE (parm)
8280 && !needs_to_live_in_memory (parm))
8281 DECL_GIMPLE_REG_P (parm) = 1;
8282 }
8283
8284 ret = DECL_RESULT (fndecl);
8285 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8286 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8287 && !needs_to_live_in_memory (ret))
8288 DECL_GIMPLE_REG_P (ret) = 1;
8289
8290 bind = gimplify_body (fndecl, true);
8291
8292 /* The tree body of the function is no longer needed, replace it
8293 with the new GIMPLE body. */
8294 seq = NULL;
8295 gimple_seq_add_stmt (&seq, bind);
8296 gimple_set_body (fndecl, seq);
8297
8298 /* If we're instrumenting function entry/exit, then prepend the call to
8299 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8300 catch the exit hook. */
8301 /* ??? Add some way to ignore exceptions for this TFE. */
8302 if (flag_instrument_function_entry_exit
8303 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8304 && !flag_instrument_functions_exclude_p (fndecl))
8305 {
8306 tree x;
8307 gimple new_bind;
8308 gimple tf;
8309 gimple_seq cleanup = NULL, body = NULL;
8310 tree tmp_var;
8311 gimple call;
8312
8313 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8314 call = gimple_build_call (x, 1, integer_zero_node);
8315 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8316 gimple_call_set_lhs (call, tmp_var);
8317 gimplify_seq_add_stmt (&cleanup, call);
8318 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8319 call = gimple_build_call (x, 2,
8320 build_fold_addr_expr (current_function_decl),
8321 tmp_var);
8322 gimplify_seq_add_stmt (&cleanup, call);
8323 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8324
8325 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8326 call = gimple_build_call (x, 1, integer_zero_node);
8327 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8328 gimple_call_set_lhs (call, tmp_var);
8329 gimplify_seq_add_stmt (&body, call);
8330 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8331 call = gimple_build_call (x, 2,
8332 build_fold_addr_expr (current_function_decl),
8333 tmp_var);
8334 gimplify_seq_add_stmt (&body, call);
8335 gimplify_seq_add_stmt (&body, tf);
8336 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8337 /* Clear the block for BIND, since it is no longer directly inside
8338 the function, but within a try block. */
8339 gimple_bind_set_block (bind, NULL);
8340
8341 /* Replace the current function body with the body
8342 wrapped in the try/finally TF. */
8343 seq = NULL;
8344 gimple_seq_add_stmt (&seq, new_bind);
8345 gimple_set_body (fndecl, seq);
8346 }
8347
8348 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8349 cfun->curr_properties = PROP_gimple_any;
8350
8351 current_function_decl = oldfn;
8352 pop_cfun ();
8353 }
8354
8355 /* Some transformations like inlining may invalidate the GIMPLE form
8356 for operands. This function traverses all the operands in STMT and
8357 gimplifies anything that is not a valid gimple operand. Any new
8358 GIMPLE statements are inserted before *GSI_P. */
8359
8360 void
8361 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8362 {
8363 size_t i, num_ops;
8364 tree orig_lhs = NULL_TREE, lhs, t;
8365 gimple_seq pre = NULL;
8366 gimple post_stmt = NULL;
8367 struct gimplify_ctx gctx;
8368
8369 push_gimplify_context (&gctx);
8370 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8371
8372 switch (gimple_code (stmt))
8373 {
8374 case GIMPLE_COND:
8375 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8376 is_gimple_val, fb_rvalue);
8377 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8378 is_gimple_val, fb_rvalue);
8379 break;
8380 case GIMPLE_SWITCH:
8381 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8382 is_gimple_val, fb_rvalue);
8383 break;
8384 case GIMPLE_OMP_ATOMIC_LOAD:
8385 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8386 is_gimple_val, fb_rvalue);
8387 break;
8388 case GIMPLE_ASM:
8389 {
8390 size_t i, noutputs = gimple_asm_noutputs (stmt);
8391 const char *constraint, **oconstraints;
8392 bool allows_mem, allows_reg, is_inout;
8393
8394 oconstraints
8395 = (const char **) alloca ((noutputs) * sizeof (const char *));
8396 for (i = 0; i < noutputs; i++)
8397 {
8398 tree op = gimple_asm_output_op (stmt, i);
8399 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8400 oconstraints[i] = constraint;
8401 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8402 &allows_reg, &is_inout);
8403 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8404 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8405 fb_lvalue | fb_mayfail);
8406 }
8407 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8408 {
8409 tree op = gimple_asm_input_op (stmt, i);
8410 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8411 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8412 oconstraints, &allows_mem, &allows_reg);
8413 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8414 allows_reg = 0;
8415 if (!allows_reg && allows_mem)
8416 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8417 is_gimple_lvalue, fb_lvalue | fb_mayfail);
8418 else
8419 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8420 is_gimple_asm_val, fb_rvalue);
8421 }
8422 }
8423 break;
8424 default:
8425 /* NOTE: We start gimplifying operands from last to first to
8426 make sure that side-effects on the RHS of calls, assignments
8427 and ASMs are executed before the LHS. The ordering is not
8428 important for other statements. */
8429 num_ops = gimple_num_ops (stmt);
8430 orig_lhs = gimple_get_lhs (stmt);
8431 for (i = num_ops; i > 0; i--)
8432 {
8433 tree op = gimple_op (stmt, i - 1);
8434 if (op == NULL_TREE)
8435 continue;
8436 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8437 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8438 else if (i == 2
8439 && is_gimple_assign (stmt)
8440 && num_ops == 2
8441 && get_gimple_rhs_class (gimple_expr_code (stmt))
8442 == GIMPLE_SINGLE_RHS)
8443 gimplify_expr (&op, &pre, NULL,
8444 rhs_predicate_for (gimple_assign_lhs (stmt)),
8445 fb_rvalue);
8446 else if (i == 2 && is_gimple_call (stmt))
8447 {
8448 if (TREE_CODE (op) == FUNCTION_DECL)
8449 continue;
8450 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8451 }
8452 else
8453 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8454 gimple_set_op (stmt, i - 1, op);
8455 }
8456
8457 lhs = gimple_get_lhs (stmt);
8458 /* If the LHS changed it in a way that requires a simple RHS,
8459 create temporary. */
8460 if (lhs && !is_gimple_reg (lhs))
8461 {
8462 bool need_temp = false;
8463
8464 if (is_gimple_assign (stmt)
8465 && num_ops == 2
8466 && get_gimple_rhs_class (gimple_expr_code (stmt))
8467 == GIMPLE_SINGLE_RHS)
8468 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8469 rhs_predicate_for (gimple_assign_lhs (stmt)),
8470 fb_rvalue);
8471 else if (is_gimple_reg (lhs))
8472 {
8473 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8474 {
8475 if (is_gimple_call (stmt))
8476 {
8477 i = gimple_call_flags (stmt);
8478 if ((i & ECF_LOOPING_CONST_OR_PURE)
8479 || !(i & (ECF_CONST | ECF_PURE)))
8480 need_temp = true;
8481 }
8482 if (stmt_can_throw_internal (stmt))
8483 need_temp = true;
8484 }
8485 }
8486 else
8487 {
8488 if (is_gimple_reg_type (TREE_TYPE (lhs)))
8489 need_temp = true;
8490 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8491 {
8492 if (is_gimple_call (stmt))
8493 {
8494 tree fndecl = gimple_call_fndecl (stmt);
8495
8496 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8497 && !(fndecl && DECL_RESULT (fndecl)
8498 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8499 need_temp = true;
8500 }
8501 else
8502 need_temp = true;
8503 }
8504 }
8505 if (need_temp)
8506 {
8507 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
8508
8509 if (TREE_CODE (orig_lhs) == SSA_NAME)
8510 orig_lhs = SSA_NAME_VAR (orig_lhs);
8511
8512 if (gimple_in_ssa_p (cfun))
8513 temp = make_ssa_name (temp, NULL);
8514 gimple_set_lhs (stmt, temp);
8515 post_stmt = gimple_build_assign (lhs, temp);
8516 if (TREE_CODE (lhs) == SSA_NAME)
8517 SSA_NAME_DEF_STMT (lhs) = post_stmt;
8518 }
8519 }
8520 break;
8521 }
8522
8523 if (gimple_referenced_vars (cfun))
8524 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
8525 add_referenced_var (t);
8526
8527 if (!gimple_seq_empty_p (pre))
8528 {
8529 if (gimple_in_ssa_p (cfun))
8530 {
8531 gimple_stmt_iterator i;
8532
8533 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
8534 {
8535 tree lhs = gimple_get_lhs (gsi_stmt (i));
8536 if (lhs
8537 && TREE_CODE (lhs) != SSA_NAME
8538 && is_gimple_reg (lhs))
8539 mark_sym_for_renaming (lhs);
8540 }
8541 }
8542 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8543 }
8544 if (post_stmt)
8545 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8546
8547 pop_gimplify_context (NULL);
8548 }
8549
8550 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
8551 the predicate that will hold for the result. If VAR is not NULL, make the
8552 base variable of the final destination be VAR if suitable. */
8553
8554 tree
8555 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8556 gimple_predicate gimple_test_f, tree var)
8557 {
8558 tree t;
8559 enum gimplify_status ret;
8560 struct gimplify_ctx gctx;
8561
8562 *stmts = NULL;
8563
8564 /* gimple_test_f might be more strict than is_gimple_val, make
8565 sure we pass both. Just checking gimple_test_f doesn't work
8566 because most gimple predicates do not work recursively. */
8567 if (is_gimple_val (expr)
8568 && (*gimple_test_f) (expr))
8569 return expr;
8570
8571 push_gimplify_context (&gctx);
8572 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8573 gimplify_ctxp->allow_rhs_cond_expr = true;
8574
8575 if (var)
8576 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8577
8578 if (TREE_CODE (expr) != MODIFY_EXPR
8579 && TREE_TYPE (expr) == void_type_node)
8580 {
8581 gimplify_and_add (expr, stmts);
8582 expr = NULL_TREE;
8583 }
8584 else
8585 {
8586 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8587 gcc_assert (ret != GS_ERROR);
8588 }
8589
8590 if (gimple_referenced_vars (cfun))
8591 for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
8592 add_referenced_var (t);
8593
8594 if (!gimple_seq_empty_p (*stmts)
8595 && gimplify_ctxp->into_ssa)
8596 {
8597 gimple_stmt_iterator i;
8598
8599 for (i = gsi_start (*stmts); !gsi_end_p (i); gsi_next (&i))
8600 {
8601 tree lhs = gimple_get_lhs (gsi_stmt (i));
8602 if (lhs
8603 && TREE_CODE (lhs) != SSA_NAME
8604 && is_gimple_reg (lhs))
8605 mark_sym_for_renaming (lhs);
8606 }
8607 }
8608
8609 pop_gimplify_context (NULL);
8610
8611 return expr;
8612 }
8613
8614 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
8615 force the result to be either ssa_name or an invariant, otherwise
8616 just force it to be a rhs expression. If VAR is not NULL, make the
8617 base variable of the final destination be VAR if suitable. */
8618
8619 tree
8620 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8621 {
8622 return force_gimple_operand_1 (expr, stmts,
8623 simple ? is_gimple_val : is_gimple_reg_rhs,
8624 var);
8625 }
8626
8627 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
8628 and VAR. If some statements are produced, emits them at GSI.
8629 If BEFORE is true. the statements are appended before GSI, otherwise
8630 they are appended after it. M specifies the way GSI moves after
8631 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
8632
8633 tree
8634 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8635 gimple_predicate gimple_test_f,
8636 tree var, bool before,
8637 enum gsi_iterator_update m)
8638 {
8639 gimple_seq stmts;
8640
8641 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
8642
8643 if (!gimple_seq_empty_p (stmts))
8644 {
8645 if (before)
8646 gsi_insert_seq_before (gsi, stmts, m);
8647 else
8648 gsi_insert_seq_after (gsi, stmts, m);
8649 }
8650
8651 return expr;
8652 }
8653
8654 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
8655 If SIMPLE is true, force the result to be either ssa_name or an invariant,
8656 otherwise just force it to be a rhs expression. If some statements are
8657 produced, emits them at GSI. If BEFORE is true, the statements are
8658 appended before GSI, otherwise they are appended after it. M specifies
8659 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8660 are the usual values). */
8661
8662 tree
8663 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8664 bool simple_p, tree var, bool before,
8665 enum gsi_iterator_update m)
8666 {
8667 return force_gimple_operand_gsi_1 (gsi, expr,
8668 simple_p
8669 ? is_gimple_val : is_gimple_reg_rhs,
8670 var, before, m);
8671 }
8672
8673
8674 #include "gt-gimplify.h"