re PR middle-end/28724 (atomic produces type mismatches)
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "tree-gimple.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51
52
53 enum gimplify_omp_var_data
54 {
55 GOVD_SEEN = 1,
56 GOVD_EXPLICIT = 2,
57 GOVD_SHARED = 4,
58 GOVD_PRIVATE = 8,
59 GOVD_FIRSTPRIVATE = 16,
60 GOVD_LASTPRIVATE = 32,
61 GOVD_REDUCTION = 64,
62 GOVD_LOCAL = 128,
63 GOVD_DEBUG_PRIVATE = 256,
64 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
65 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
66 };
67
68 struct gimplify_omp_ctx
69 {
70 struct gimplify_omp_ctx *outer_context;
71 splay_tree variables;
72 struct pointer_set_t *privatized_types;
73 location_t location;
74 enum omp_clause_default_kind default_kind;
75 bool is_parallel;
76 bool is_combined_parallel;
77 };
78
79 struct gimplify_ctx
80 {
81 struct gimplify_ctx *prev_context;
82
83 tree current_bind_expr;
84 tree temps;
85 tree conditional_cleanups;
86 tree exit_label;
87 tree return_temp;
88
89 VEC(tree,heap) *case_labels;
90 /* The formal temporary table. Should this be persistent? */
91 htab_t temp_htab;
92
93 int conditions;
94 bool save_stack;
95 bool into_ssa;
96 };
97
98 static struct gimplify_ctx *gimplify_ctxp;
99 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
100
101
102
103 /* Formal (expression) temporary table handling: Multiple occurrences of
104 the same scalar expression are evaluated into the same temporary. */
105
106 typedef struct gimple_temp_hash_elt
107 {
108 tree val; /* Key */
109 tree temp; /* Value */
110 } elt_t;
111
112 /* Forward declarations. */
113 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
114 #ifdef ENABLE_CHECKING
115 static bool cpt_same_type (tree a, tree b);
116 #endif
117
118
119 /* Return a hash value for a formal temporary table entry. */
120
121 static hashval_t
122 gimple_tree_hash (const void *p)
123 {
124 tree t = ((const elt_t *) p)->val;
125 return iterative_hash_expr (t, 0);
126 }
127
128 /* Compare two formal temporary table entries. */
129
130 static int
131 gimple_tree_eq (const void *p1, const void *p2)
132 {
133 tree t1 = ((const elt_t *) p1)->val;
134 tree t2 = ((const elt_t *) p2)->val;
135 enum tree_code code = TREE_CODE (t1);
136
137 if (TREE_CODE (t2) != code
138 || TREE_TYPE (t1) != TREE_TYPE (t2))
139 return 0;
140
141 if (!operand_equal_p (t1, t2, 0))
142 return 0;
143
144 /* Only allow them to compare equal if they also hash equal; otherwise
145 results are nondeterminate, and we fail bootstrap comparison. */
146 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
147
148 return 1;
149 }
150
151 /* Set up a context for the gimplifier. */
152
153 void
154 push_gimplify_context (void)
155 {
156 struct gimplify_ctx *c;
157
158 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
159 c->prev_context = gimplify_ctxp;
160 if (optimize)
161 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
162
163 gimplify_ctxp = c;
164 }
165
166 /* Tear down a context for the gimplifier. If BODY is non-null, then
167 put the temporaries into the outer BIND_EXPR. Otherwise, put them
168 in the unexpanded_var_list. */
169
170 void
171 pop_gimplify_context (tree body)
172 {
173 struct gimplify_ctx *c = gimplify_ctxp;
174 tree t;
175
176 gcc_assert (c && !c->current_bind_expr);
177 gimplify_ctxp = c->prev_context;
178
179 for (t = c->temps; t ; t = TREE_CHAIN (t))
180 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
181
182 if (body)
183 declare_vars (c->temps, body, false);
184 else
185 record_vars (c->temps);
186
187 if (optimize)
188 htab_delete (c->temp_htab);
189 free (c);
190 }
191
192 static void
193 gimple_push_bind_expr (tree bind)
194 {
195 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
196 gimplify_ctxp->current_bind_expr = bind;
197 }
198
199 static void
200 gimple_pop_bind_expr (void)
201 {
202 gimplify_ctxp->current_bind_expr
203 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
204 }
205
206 tree
207 gimple_current_bind_expr (void)
208 {
209 return gimplify_ctxp->current_bind_expr;
210 }
211
212 /* Returns true iff there is a COND_EXPR between us and the innermost
213 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
214
215 static bool
216 gimple_conditional_context (void)
217 {
218 return gimplify_ctxp->conditions > 0;
219 }
220
221 /* Note that we've entered a COND_EXPR. */
222
223 static void
224 gimple_push_condition (void)
225 {
226 #ifdef ENABLE_CHECKING
227 if (gimplify_ctxp->conditions == 0)
228 gcc_assert (!gimplify_ctxp->conditional_cleanups);
229 #endif
230 ++(gimplify_ctxp->conditions);
231 }
232
233 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
234 now, add any conditional cleanups we've seen to the prequeue. */
235
236 static void
237 gimple_pop_condition (tree *pre_p)
238 {
239 int conds = --(gimplify_ctxp->conditions);
240
241 gcc_assert (conds >= 0);
242 if (conds == 0)
243 {
244 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
245 gimplify_ctxp->conditional_cleanups = NULL_TREE;
246 }
247 }
248
249 /* A stable comparison routine for use with splay trees and DECLs. */
250
251 static int
252 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
253 {
254 tree a = (tree) xa;
255 tree b = (tree) xb;
256
257 return DECL_UID (a) - DECL_UID (b);
258 }
259
260 /* Create a new omp construct that deals with variable remapping. */
261
262 static struct gimplify_omp_ctx *
263 new_omp_context (bool is_parallel, bool is_combined_parallel)
264 {
265 struct gimplify_omp_ctx *c;
266
267 c = XCNEW (struct gimplify_omp_ctx);
268 c->outer_context = gimplify_omp_ctxp;
269 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
270 c->privatized_types = pointer_set_create ();
271 c->location = input_location;
272 c->is_parallel = is_parallel;
273 c->is_combined_parallel = is_combined_parallel;
274 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
275
276 return c;
277 }
278
279 /* Destroy an omp construct that deals with variable remapping. */
280
281 static void
282 delete_omp_context (struct gimplify_omp_ctx *c)
283 {
284 splay_tree_delete (c->variables);
285 pointer_set_destroy (c->privatized_types);
286 XDELETE (c);
287 }
288
289 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
290 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
291
292 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
293
294 static void
295 append_to_statement_list_1 (tree t, tree *list_p)
296 {
297 tree list = *list_p;
298 tree_stmt_iterator i;
299
300 if (!list)
301 {
302 if (t && TREE_CODE (t) == STATEMENT_LIST)
303 {
304 *list_p = t;
305 return;
306 }
307 *list_p = list = alloc_stmt_list ();
308 }
309
310 i = tsi_last (list);
311 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
312 }
313
314 /* Add T to the end of the list container pointed to by LIST_P.
315 If T is an expression with no effects, it is ignored. */
316
317 void
318 append_to_statement_list (tree t, tree *list_p)
319 {
320 if (t && TREE_SIDE_EFFECTS (t))
321 append_to_statement_list_1 (t, list_p);
322 }
323
324 /* Similar, but the statement is always added, regardless of side effects. */
325
326 void
327 append_to_statement_list_force (tree t, tree *list_p)
328 {
329 if (t != NULL_TREE)
330 append_to_statement_list_1 (t, list_p);
331 }
332
333 /* Both gimplify the statement T and append it to LIST_P. */
334
335 void
336 gimplify_and_add (tree t, tree *list_p)
337 {
338 gimplify_stmt (&t);
339 append_to_statement_list (t, list_p);
340 }
341
342 /* Strip off a legitimate source ending from the input string NAME of
343 length LEN. Rather than having to know the names used by all of
344 our front ends, we strip off an ending of a period followed by
345 up to five characters. (Java uses ".class".) */
346
347 static inline void
348 remove_suffix (char *name, int len)
349 {
350 int i;
351
352 for (i = 2; i < 8 && len > i; i++)
353 {
354 if (name[len - i] == '.')
355 {
356 name[len - i] = '\0';
357 break;
358 }
359 }
360 }
361
362 /* Create a nameless artificial label and put it in the current function
363 context. Returns the newly created label. */
364
365 tree
366 create_artificial_label (void)
367 {
368 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
369
370 DECL_ARTIFICIAL (lab) = 1;
371 DECL_IGNORED_P (lab) = 1;
372 DECL_CONTEXT (lab) = current_function_decl;
373 return lab;
374 }
375
376 /* Subroutine for find_single_pointer_decl. */
377
378 static tree
379 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
380 void *data)
381 {
382 tree *pdecl = (tree *) data;
383
384 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
385 {
386 if (*pdecl)
387 {
388 /* We already found a pointer decl; return anything other
389 than NULL_TREE to unwind from walk_tree signalling that
390 we have a duplicate. */
391 return *tp;
392 }
393 *pdecl = *tp;
394 }
395
396 return NULL_TREE;
397 }
398
399 /* Find the single DECL of pointer type in the tree T and return it.
400 If there are zero or more than one such DECLs, return NULL. */
401
402 static tree
403 find_single_pointer_decl (tree t)
404 {
405 tree decl = NULL_TREE;
406
407 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
408 {
409 /* find_single_pointer_decl_1 returns a nonzero value, causing
410 walk_tree to return a nonzero value, to indicate that it
411 found more than one pointer DECL. */
412 return NULL_TREE;
413 }
414
415 return decl;
416 }
417
418 /* Create a new temporary name with PREFIX. Returns an identifier. */
419
420 static GTY(()) unsigned int tmp_var_id_num;
421
422 tree
423 create_tmp_var_name (const char *prefix)
424 {
425 char *tmp_name;
426
427 if (prefix)
428 {
429 char *preftmp = ASTRDUP (prefix);
430
431 remove_suffix (preftmp, strlen (preftmp));
432 prefix = preftmp;
433 }
434
435 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
436 return get_identifier (tmp_name);
437 }
438
439
440 /* Create a new temporary variable declaration of type TYPE.
441 Does NOT push it into the current binding. */
442
443 tree
444 create_tmp_var_raw (tree type, const char *prefix)
445 {
446 tree tmp_var;
447 tree new_type;
448
449 /* Make the type of the variable writable. */
450 new_type = build_type_variant (type, 0, 0);
451 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
452
453 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
454 type);
455
456 /* The variable was declared by the compiler. */
457 DECL_ARTIFICIAL (tmp_var) = 1;
458 /* And we don't want debug info for it. */
459 DECL_IGNORED_P (tmp_var) = 1;
460
461 /* Make the variable writable. */
462 TREE_READONLY (tmp_var) = 0;
463
464 DECL_EXTERNAL (tmp_var) = 0;
465 TREE_STATIC (tmp_var) = 0;
466 TREE_USED (tmp_var) = 1;
467
468 return tmp_var;
469 }
470
471 /* Create a new temporary variable declaration of type TYPE. DOES push the
472 variable into the current binding. Further, assume that this is called
473 only from gimplification or optimization, at which point the creation of
474 certain types are bugs. */
475
476 tree
477 create_tmp_var (tree type, const char *prefix)
478 {
479 tree tmp_var;
480
481 /* We don't allow types that are addressable (meaning we can't make copies),
482 or incomplete. We also used to reject every variable size objects here,
483 but now support those for which a constant upper bound can be obtained.
484 The processing for variable sizes is performed in gimple_add_tmp_var,
485 point at which it really matters and possibly reached via paths not going
486 through this function, e.g. after direct calls to create_tmp_var_raw. */
487 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
488
489 tmp_var = create_tmp_var_raw (type, prefix);
490 gimple_add_tmp_var (tmp_var);
491 return tmp_var;
492 }
493
494 /* Given a tree, try to return a useful variable name that we can use
495 to prefix a temporary that is being assigned the value of the tree.
496 I.E. given <temp> = &A, return A. */
497
498 const char *
499 get_name (tree t)
500 {
501 tree stripped_decl;
502
503 stripped_decl = t;
504 STRIP_NOPS (stripped_decl);
505 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
506 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
507 else
508 {
509 switch (TREE_CODE (stripped_decl))
510 {
511 case ADDR_EXPR:
512 return get_name (TREE_OPERAND (stripped_decl, 0));
513 break;
514 default:
515 return NULL;
516 }
517 }
518 }
519
520 /* Create a temporary with a name derived from VAL. Subroutine of
521 lookup_tmp_var; nobody else should call this function. */
522
523 static inline tree
524 create_tmp_from_val (tree val)
525 {
526 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
527 }
528
529 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
530 an existing expression temporary. */
531
532 static tree
533 lookup_tmp_var (tree val, bool is_formal)
534 {
535 tree ret;
536
537 /* If not optimizing, never really reuse a temporary. local-alloc
538 won't allocate any variable that is used in more than one basic
539 block, which means it will go into memory, causing much extra
540 work in reload and final and poorer code generation, outweighing
541 the extra memory allocation here. */
542 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
543 ret = create_tmp_from_val (val);
544 else
545 {
546 elt_t elt, *elt_p;
547 void **slot;
548
549 elt.val = val;
550 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
551 if (*slot == NULL)
552 {
553 elt_p = XNEW (elt_t);
554 elt_p->val = val;
555 elt_p->temp = ret = create_tmp_from_val (val);
556 *slot = (void *) elt_p;
557 }
558 else
559 {
560 elt_p = (elt_t *) *slot;
561 ret = elt_p->temp;
562 }
563 }
564
565 if (is_formal)
566 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
567
568 return ret;
569 }
570
571 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
572 in gimplify_expr. Only use this function if:
573
574 1) The value of the unfactored expression represented by VAL will not
575 change between the initialization and use of the temporary, and
576 2) The temporary will not be otherwise modified.
577
578 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
579 and #2 means it is inappropriate for && temps.
580
581 For other cases, use get_initialized_tmp_var instead. */
582
583 static tree
584 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
585 {
586 tree t, mod;
587
588 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
589
590 t = lookup_tmp_var (val, is_formal);
591
592 if (is_formal)
593 {
594 tree u = find_single_pointer_decl (val);
595
596 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
597 u = DECL_GET_RESTRICT_BASE (u);
598 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
599 {
600 if (DECL_BASED_ON_RESTRICT_P (t))
601 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
602 else
603 {
604 DECL_BASED_ON_RESTRICT_P (t) = 1;
605 SET_DECL_RESTRICT_BASE (t, u);
606 }
607 }
608 }
609
610 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
611 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
612
613 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, val);
614
615 if (EXPR_HAS_LOCATION (val))
616 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
617 else
618 SET_EXPR_LOCATION (mod, input_location);
619
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622
623 /* If we're gimplifying into ssa, gimplify_modify_expr will have
624 given our temporary an ssa name. Find and return it. */
625 if (gimplify_ctxp->into_ssa)
626 t = TREE_OPERAND (mod, 0);
627
628 return t;
629 }
630
631 /* Returns a formal temporary variable initialized with VAL. PRE_P
632 points to a statement list where side-effects needed to compute VAL
633 should be stored. */
634
635 tree
636 get_formal_tmp_var (tree val, tree *pre_p)
637 {
638 return internal_get_tmp_var (val, pre_p, NULL, true);
639 }
640
641 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
642 are as in gimplify_expr. */
643
644 tree
645 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
646 {
647 return internal_get_tmp_var (val, pre_p, post_p, false);
648 }
649
650 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
651 true, generate debug info for them; otherwise don't. */
652
653 void
654 declare_vars (tree vars, tree scope, bool debug_info)
655 {
656 tree last = vars;
657 if (last)
658 {
659 tree temps, block;
660
661 /* C99 mode puts the default 'return 0;' for main outside the outer
662 braces. So drill down until we find an actual scope. */
663 while (TREE_CODE (scope) == COMPOUND_EXPR)
664 scope = TREE_OPERAND (scope, 0);
665
666 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
667
668 temps = nreverse (last);
669
670 block = BIND_EXPR_BLOCK (scope);
671 if (!block || !debug_info)
672 {
673 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
674 BIND_EXPR_VARS (scope) = temps;
675 }
676 else
677 {
678 /* We need to attach the nodes both to the BIND_EXPR and to its
679 associated BLOCK for debugging purposes. The key point here
680 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
681 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
682 if (BLOCK_VARS (block))
683 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
684 else
685 {
686 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
687 BLOCK_VARS (block) = temps;
688 }
689 }
690 }
691 }
692
693 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
694 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
695 no such upper bound can be obtained. */
696
697 static void
698 force_constant_size (tree var)
699 {
700 /* The only attempt we make is by querying the maximum size of objects
701 of the variable's type. */
702
703 HOST_WIDE_INT max_size;
704
705 gcc_assert (TREE_CODE (var) == VAR_DECL);
706
707 max_size = max_int_size_in_bytes (TREE_TYPE (var));
708
709 gcc_assert (max_size >= 0);
710
711 DECL_SIZE_UNIT (var)
712 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
713 DECL_SIZE (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
715 }
716
717 void
718 gimple_add_tmp_var (tree tmp)
719 {
720 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
726 force_constant_size (tmp);
727
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730
731 if (gimplify_ctxp)
732 {
733 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
735
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
738 {
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx && !ctx->is_parallel)
741 ctx = ctx->outer_context;
742 if (ctx)
743 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
744 }
745 }
746 else if (cfun)
747 record_vars (tmp);
748 else
749 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
750 }
751
752 /* Determines whether to assign a locus to the statement STMT. */
753
754 static bool
755 should_carry_locus_p (tree stmt)
756 {
757 /* Don't emit a line note for a label. We particularly don't want to
758 emit one for the break label, since it doesn't actually correspond
759 to the beginning of the loop/switch. */
760 if (TREE_CODE (stmt) == LABEL_EXPR)
761 return false;
762
763 /* Do not annotate empty statements, since it confuses gcov. */
764 if (!TREE_SIDE_EFFECTS (stmt))
765 return false;
766
767 return true;
768 }
769
770 static void
771 annotate_one_with_locus (tree t, location_t locus)
772 {
773 if (EXPR_P (t) && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
774 SET_EXPR_LOCATION (t, locus);
775 }
776
777 void
778 annotate_all_with_locus (tree *stmt_p, location_t locus)
779 {
780 tree_stmt_iterator i;
781
782 if (!*stmt_p)
783 return;
784
785 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
786 {
787 tree t = tsi_stmt (i);
788
789 /* Assuming we've already been gimplified, we shouldn't
790 see nested chaining constructs anymore. */
791 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
792 && TREE_CODE (t) != COMPOUND_EXPR);
793
794 annotate_one_with_locus (t, locus);
795 }
796 }
797
798 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that should only be done once. If we
800 were to unshare something like SAVE_EXPR(i++), the gimplification
801 process would create wrong code. */
802
803 static tree
804 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
805 {
806 enum tree_code code = TREE_CODE (*tp);
807 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
808 if (TREE_CODE_CLASS (code) == tcc_type
809 || TREE_CODE_CLASS (code) == tcc_declaration
810 || TREE_CODE_CLASS (code) == tcc_constant
811 || code == SAVE_EXPR || code == TARGET_EXPR
812 /* We can't do anything sensible with a BLOCK used as an expression,
813 but we also can't just die when we see it because of non-expression
814 uses. So just avert our eyes and cross our fingers. Silly Java. */
815 || code == BLOCK)
816 *walk_subtrees = 0;
817 else
818 {
819 gcc_assert (code != BIND_EXPR);
820 copy_tree_r (tp, walk_subtrees, data);
821 }
822
823 return NULL_TREE;
824 }
825
826 /* Callback for walk_tree to unshare most of the shared trees rooted at
827 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
828 then *TP is deep copied by calling copy_tree_r.
829
830 This unshares the same trees as copy_tree_r with the exception of
831 SAVE_EXPR nodes. These nodes model computations that should only be
832 done once. If we were to unshare something like SAVE_EXPR(i++), the
833 gimplification process would create wrong code. */
834
835 static tree
836 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
837 void *data ATTRIBUTE_UNUSED)
838 {
839 tree t = *tp;
840 enum tree_code code = TREE_CODE (t);
841
842 /* Skip types, decls, and constants. But we do want to look at their
843 types and the bounds of types. Mark them as visited so we properly
844 unmark their subtrees on the unmark pass. If we've already seen them,
845 don't look down further. */
846 if (TREE_CODE_CLASS (code) == tcc_type
847 || TREE_CODE_CLASS (code) == tcc_declaration
848 || TREE_CODE_CLASS (code) == tcc_constant)
849 {
850 if (TREE_VISITED (t))
851 *walk_subtrees = 0;
852 else
853 TREE_VISITED (t) = 1;
854 }
855
856 /* If this node has been visited already, unshare it and don't look
857 any deeper. */
858 else if (TREE_VISITED (t))
859 {
860 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
861 *walk_subtrees = 0;
862 }
863
864 /* Otherwise, mark the tree as visited and keep looking. */
865 else
866 TREE_VISITED (t) = 1;
867
868 return NULL_TREE;
869 }
870
871 static tree
872 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
873 void *data ATTRIBUTE_UNUSED)
874 {
875 if (TREE_VISITED (*tp))
876 TREE_VISITED (*tp) = 0;
877 else
878 *walk_subtrees = 0;
879
880 return NULL_TREE;
881 }
882
883 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
884 bodies of any nested functions if we are unsharing the entire body of
885 FNDECL. */
886
887 static void
888 unshare_body (tree *body_p, tree fndecl)
889 {
890 struct cgraph_node *cgn = cgraph_node (fndecl);
891
892 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
893 if (body_p == &DECL_SAVED_TREE (fndecl))
894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
895 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
896 }
897
898 /* Likewise, but mark all trees as not visited. */
899
900 static void
901 unvisit_body (tree *body_p, tree fndecl)
902 {
903 struct cgraph_node *cgn = cgraph_node (fndecl);
904
905 walk_tree (body_p, unmark_visited_r, NULL, NULL);
906 if (body_p == &DECL_SAVED_TREE (fndecl))
907 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
908 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
909 }
910
911 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
912
913 static void
914 unshare_all_trees (tree t)
915 {
916 walk_tree (&t, copy_if_shared_r, NULL, NULL);
917 walk_tree (&t, unmark_visited_r, NULL, NULL);
918 }
919
920 /* Unconditionally make an unshared copy of EXPR. This is used when using
921 stored expressions which span multiple functions, such as BINFO_VTABLE,
922 as the normal unsharing process can't tell that they're shared. */
923
924 tree
925 unshare_expr (tree expr)
926 {
927 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
928 return expr;
929 }
930
931 /* A terser interface for building a representation of an exception
932 specification. */
933
934 tree
935 gimple_build_eh_filter (tree body, tree allowed, tree failure)
936 {
937 tree t;
938
939 /* FIXME should the allowed types go in TREE_TYPE? */
940 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
941 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
942
943 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
944 append_to_statement_list (body, &TREE_OPERAND (t, 0));
945
946 return t;
947 }
948
949 \f
950 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
951 contain statements and have a value. Assign its value to a temporary
952 and give it void_type_node. Returns the temporary, or NULL_TREE if
953 WRAPPER was already void. */
954
955 tree
956 voidify_wrapper_expr (tree wrapper, tree temp)
957 {
958 if (!VOID_TYPE_P (TREE_TYPE (wrapper)))
959 {
960 tree *p, sub = wrapper;
961
962 restart:
963 /* Set p to point to the body of the wrapper. */
964 switch (TREE_CODE (sub))
965 {
966 case BIND_EXPR:
967 /* For a BIND_EXPR, the body is operand 1. */
968 p = &BIND_EXPR_BODY (sub);
969 break;
970
971 default:
972 p = &TREE_OPERAND (sub, 0);
973 break;
974 }
975
976 /* Advance to the last statement. Set all container types to void. */
977 if (TREE_CODE (*p) == STATEMENT_LIST)
978 {
979 tree_stmt_iterator i = tsi_last (*p);
980 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
981 }
982 else
983 {
984 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
985 {
986 TREE_SIDE_EFFECTS (*p) = 1;
987 TREE_TYPE (*p) = void_type_node;
988 }
989 }
990
991 if (p == NULL || IS_EMPTY_STMT (*p))
992 ;
993 /* Look through exception handling. */
994 else if (TREE_CODE (*p) == TRY_FINALLY_EXPR
995 || TREE_CODE (*p) == TRY_CATCH_EXPR)
996 {
997 sub = *p;
998 goto restart;
999 }
1000 /* The C++ frontend already did this for us. */
1001 else if (TREE_CODE (*p) == INIT_EXPR
1002 || TREE_CODE (*p) == TARGET_EXPR)
1003 temp = TREE_OPERAND (*p, 0);
1004 /* If we're returning a dereference, move the dereference
1005 outside the wrapper. */
1006 else if (TREE_CODE (*p) == INDIRECT_REF)
1007 {
1008 tree ptr = TREE_OPERAND (*p, 0);
1009 temp = create_tmp_var (TREE_TYPE (ptr), "retval");
1010 *p = build2 (MODIFY_EXPR, TREE_TYPE (ptr), temp, ptr);
1011 temp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (temp)), temp);
1012 /* If this is a BIND_EXPR for a const inline function, it might not
1013 have TREE_SIDE_EFFECTS set. That is no longer accurate. */
1014 TREE_SIDE_EFFECTS (wrapper) = 1;
1015 }
1016 else
1017 {
1018 if (!temp)
1019 temp = create_tmp_var (TREE_TYPE (wrapper), "retval");
1020 *p = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, *p);
1021 TREE_SIDE_EFFECTS (wrapper) = 1;
1022 }
1023
1024 TREE_TYPE (wrapper) = void_type_node;
1025 return temp;
1026 }
1027
1028 return NULL_TREE;
1029 }
1030
1031 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1032 a temporary through which they communicate. */
1033
1034 static void
1035 build_stack_save_restore (tree *save, tree *restore)
1036 {
1037 tree save_call, tmp_var;
1038
1039 save_call =
1040 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
1041 NULL_TREE);
1042 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1043
1044 *save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
1045 *restore =
1046 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1047 tree_cons (NULL_TREE, tmp_var, NULL_TREE));
1048 }
1049
1050 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1051
1052 static enum gimplify_status
1053 gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
1054 {
1055 tree bind_expr = *expr_p;
1056 bool old_save_stack = gimplify_ctxp->save_stack;
1057 tree t;
1058
1059 temp = voidify_wrapper_expr (bind_expr, temp);
1060
1061 /* Mark variables seen in this bind expr. */
1062 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1063 {
1064 if (TREE_CODE (t) == VAR_DECL)
1065 {
1066 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1067
1068 /* Mark variable as local. */
1069 if (ctx && !is_global_var (t)
1070 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1071 || splay_tree_lookup (ctx->variables,
1072 (splay_tree_key) t) == NULL))
1073 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1074
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1076 }
1077
1078 /* Preliminarily mark non-addressed complex variables as eligible
1079 for promotion to gimple registers. We'll transform their uses
1080 as we find them. */
1081 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1082 && !TREE_THIS_VOLATILE (t)
1083 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1084 && !needs_to_live_in_memory (t))
1085 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
1086 }
1087
1088 gimple_push_bind_expr (bind_expr);
1089 gimplify_ctxp->save_stack = false;
1090
1091 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1092
1093 if (gimplify_ctxp->save_stack)
1094 {
1095 tree stack_save, stack_restore;
1096
1097 /* Save stack on entry and restore it on exit. Add a try_finally
1098 block to achieve this. Note that mudflap depends on the
1099 format of the emitted code: see mx_register_decls(). */
1100 build_stack_save_restore (&stack_save, &stack_restore);
1101
1102 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1103 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1104 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1105
1106 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1107 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1108 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1109 }
1110
1111 gimplify_ctxp->save_stack = old_save_stack;
1112 gimple_pop_bind_expr ();
1113
1114 if (temp)
1115 {
1116 *expr_p = temp;
1117 append_to_statement_list (bind_expr, pre_p);
1118 return GS_OK;
1119 }
1120 else
1121 return GS_ALL_DONE;
1122 }
1123
1124 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1125 GIMPLE value, it is assigned to a new temporary and the statement is
1126 re-written to return the temporary.
1127
1128 PRE_P points to the list where side effects that must happen before
1129 STMT should be stored. */
1130
1131 static enum gimplify_status
1132 gimplify_return_expr (tree stmt, tree *pre_p)
1133 {
1134 tree ret_expr = TREE_OPERAND (stmt, 0);
1135 tree result_decl, result;
1136
1137 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1138 || ret_expr == error_mark_node)
1139 return GS_ALL_DONE;
1140
1141 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1142 result_decl = NULL_TREE;
1143 else
1144 {
1145 result_decl = TREE_OPERAND (ret_expr, 0);
1146 if (TREE_CODE (result_decl) == INDIRECT_REF)
1147 /* See through a return by reference. */
1148 result_decl = TREE_OPERAND (result_decl, 0);
1149
1150 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1151 || TREE_CODE (ret_expr) == INIT_EXPR)
1152 && TREE_CODE (result_decl) == RESULT_DECL);
1153 }
1154
1155 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1156 Recall that aggregate_value_p is FALSE for any aggregate type that is
1157 returned in registers. If we're returning values in registers, then
1158 we don't want to extend the lifetime of the RESULT_DECL, particularly
1159 across another call. In addition, for those aggregates for which
1160 hard_function_value generates a PARALLEL, we'll die during normal
1161 expansion of structure assignments; there's special code in expand_return
1162 to handle this case that does not exist in expand_expr. */
1163 if (!result_decl
1164 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1165 result = result_decl;
1166 else if (gimplify_ctxp->return_temp)
1167 result = gimplify_ctxp->return_temp;
1168 else
1169 {
1170 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1171
1172 /* ??? With complex control flow (usually involving abnormal edges),
1173 we can wind up warning about an uninitialized value for this. Due
1174 to how this variable is constructed and initialized, this is never
1175 true. Give up and never warn. */
1176 TREE_NO_WARNING (result) = 1;
1177
1178 gimplify_ctxp->return_temp = result;
1179 }
1180
1181 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1182 Then gimplify the whole thing. */
1183 if (result != result_decl)
1184 TREE_OPERAND (ret_expr, 0) = result;
1185
1186 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1187
1188 /* If we didn't use a temporary, then the result is just the result_decl.
1189 Otherwise we need a simple copy. This should already be gimple. */
1190 if (result == result_decl)
1191 ret_expr = result;
1192 else
1193 ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
1194 TREE_OPERAND (stmt, 0) = ret_expr;
1195
1196 return GS_ALL_DONE;
1197 }
1198
1199 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1200 and initialization explicit. */
1201
1202 static enum gimplify_status
1203 gimplify_decl_expr (tree *stmt_p)
1204 {
1205 tree stmt = *stmt_p;
1206 tree decl = DECL_EXPR_DECL (stmt);
1207
1208 *stmt_p = NULL_TREE;
1209
1210 if (TREE_TYPE (decl) == error_mark_node)
1211 return GS_ERROR;
1212
1213 if ((TREE_CODE (decl) == TYPE_DECL
1214 || TREE_CODE (decl) == VAR_DECL)
1215 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1216 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1217
1218 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1219 {
1220 tree init = DECL_INITIAL (decl);
1221
1222 if (!TREE_CONSTANT (DECL_SIZE (decl)))
1223 {
1224 /* This is a variable-sized decl. Simplify its size and mark it
1225 for deferred expansion. Note that mudflap depends on the format
1226 of the emitted code: see mx_register_decls(). */
1227 tree t, args, addr, ptr_type;
1228
1229 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1230 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1231
1232 /* All occurrences of this decl in final gimplified code will be
1233 replaced by indirection. Setting DECL_VALUE_EXPR does two
1234 things: First, it lets the rest of the gimplifier know what
1235 replacement to use. Second, it lets the debug info know
1236 where to find the value. */
1237 ptr_type = build_pointer_type (TREE_TYPE (decl));
1238 addr = create_tmp_var (ptr_type, get_name (decl));
1239 DECL_IGNORED_P (addr) = 0;
1240 t = build_fold_indirect_ref (addr);
1241 SET_DECL_VALUE_EXPR (decl, t);
1242 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1243
1244 args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
1245 t = built_in_decls[BUILT_IN_ALLOCA];
1246 t = build_function_call_expr (t, args);
1247 t = fold_convert (ptr_type, t);
1248 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1249
1250 gimplify_and_add (t, stmt_p);
1251
1252 /* Indicate that we need to restore the stack level when the
1253 enclosing BIND_EXPR is exited. */
1254 gimplify_ctxp->save_stack = true;
1255 }
1256
1257 if (init && init != error_mark_node)
1258 {
1259 if (!TREE_STATIC (decl))
1260 {
1261 DECL_INITIAL (decl) = NULL_TREE;
1262 init = build2 (INIT_EXPR, void_type_node, decl, init);
1263 gimplify_and_add (init, stmt_p);
1264 }
1265 else
1266 /* We must still examine initializers for static variables
1267 as they may contain a label address. */
1268 walk_tree (&init, force_labels_r, NULL, NULL);
1269 }
1270
1271 /* Some front ends do not explicitly declare all anonymous
1272 artificial variables. We compensate here by declaring the
1273 variables, though it would be better if the front ends would
1274 explicitly declare them. */
1275 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1276 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1277 gimple_add_tmp_var (decl);
1278 }
1279
1280 return GS_ALL_DONE;
1281 }
1282
1283 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1284 and replacing the LOOP_EXPR with goto, but if the loop contains an
1285 EXIT_EXPR, we need to append a label for it to jump to. */
1286
1287 static enum gimplify_status
1288 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1289 {
1290 tree saved_label = gimplify_ctxp->exit_label;
1291 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1292 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1293
1294 append_to_statement_list (start_label, pre_p);
1295
1296 gimplify_ctxp->exit_label = NULL_TREE;
1297
1298 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1299
1300 if (gimplify_ctxp->exit_label)
1301 {
1302 append_to_statement_list (jump_stmt, pre_p);
1303 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1304 }
1305 else
1306 *expr_p = jump_stmt;
1307
1308 gimplify_ctxp->exit_label = saved_label;
1309
1310 return GS_ALL_DONE;
1311 }
1312
1313 /* Compare two case labels. Because the front end should already have
1314 made sure that case ranges do not overlap, it is enough to only compare
1315 the CASE_LOW values of each case label. */
1316
1317 static int
1318 compare_case_labels (const void *p1, const void *p2)
1319 {
1320 tree case1 = *(tree *)p1;
1321 tree case2 = *(tree *)p2;
1322
1323 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1324 }
1325
1326 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1327
1328 void
1329 sort_case_labels (tree label_vec)
1330 {
1331 size_t len = TREE_VEC_LENGTH (label_vec);
1332 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1333
1334 if (CASE_LOW (default_case))
1335 {
1336 size_t i;
1337
1338 /* The last label in the vector should be the default case
1339 but it is not. */
1340 for (i = 0; i < len; ++i)
1341 {
1342 tree t = TREE_VEC_ELT (label_vec, i);
1343 if (!CASE_LOW (t))
1344 {
1345 default_case = t;
1346 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1347 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1348 break;
1349 }
1350 }
1351 }
1352
1353 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1354 compare_case_labels);
1355 }
1356
1357 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1358 branch to. */
1359
1360 static enum gimplify_status
1361 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1362 {
1363 tree switch_expr = *expr_p;
1364 enum gimplify_status ret;
1365
1366 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1367 is_gimple_val, fb_rvalue);
1368
1369 if (SWITCH_BODY (switch_expr))
1370 {
1371 VEC(tree,heap) *labels, *saved_labels;
1372 tree label_vec, default_case = NULL_TREE;
1373 size_t i, len;
1374
1375 /* If someone can be bothered to fill in the labels, they can
1376 be bothered to null out the body too. */
1377 gcc_assert (!SWITCH_LABELS (switch_expr));
1378
1379 saved_labels = gimplify_ctxp->case_labels;
1380 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1381
1382 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1383
1384 labels = gimplify_ctxp->case_labels;
1385 gimplify_ctxp->case_labels = saved_labels;
1386
1387 i = 0;
1388 while (i < VEC_length (tree, labels))
1389 {
1390 tree elt = VEC_index (tree, labels, i);
1391 tree low = CASE_LOW (elt);
1392 bool remove_element = FALSE;
1393
1394 if (low)
1395 {
1396 /* Discard empty ranges. */
1397 tree high = CASE_HIGH (elt);
1398 if (high && INT_CST_LT (high, low))
1399 remove_element = TRUE;
1400 }
1401 else
1402 {
1403 /* The default case must be the last label in the list. */
1404 gcc_assert (!default_case);
1405 default_case = elt;
1406 remove_element = TRUE;
1407 }
1408
1409 if (remove_element)
1410 VEC_ordered_remove (tree, labels, i);
1411 else
1412 i++;
1413 }
1414 len = i;
1415
1416 label_vec = make_tree_vec (len + 1);
1417 SWITCH_LABELS (*expr_p) = label_vec;
1418 append_to_statement_list (switch_expr, pre_p);
1419
1420 if (! default_case)
1421 {
1422 /* If the switch has no default label, add one, so that we jump
1423 around the switch body. */
1424 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1425 NULL_TREE, create_artificial_label ());
1426 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1427 *expr_p = build1 (LABEL_EXPR, void_type_node,
1428 CASE_LABEL (default_case));
1429 }
1430 else
1431 *expr_p = SWITCH_BODY (switch_expr);
1432
1433 for (i = 0; i < len; ++i)
1434 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1435 TREE_VEC_ELT (label_vec, len) = default_case;
1436
1437 VEC_free (tree, heap, labels);
1438
1439 sort_case_labels (label_vec);
1440
1441 SWITCH_BODY (switch_expr) = NULL;
1442 }
1443 else
1444 gcc_assert (SWITCH_LABELS (switch_expr));
1445
1446 return ret;
1447 }
1448
1449 static enum gimplify_status
1450 gimplify_case_label_expr (tree *expr_p)
1451 {
1452 tree expr = *expr_p;
1453 struct gimplify_ctx *ctxp;
1454
1455 /* Invalid OpenMP programs can play Duff's Device type games with
1456 #pragma omp parallel. At least in the C front end, we don't
1457 detect such invalid branches until after gimplification. */
1458 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1459 if (ctxp->case_labels)
1460 break;
1461
1462 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1463 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1464 return GS_ALL_DONE;
1465 }
1466
1467 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1468 if necessary. */
1469
1470 tree
1471 build_and_jump (tree *label_p)
1472 {
1473 if (label_p == NULL)
1474 /* If there's nowhere to jump, just fall through. */
1475 return NULL_TREE;
1476
1477 if (*label_p == NULL_TREE)
1478 {
1479 tree label = create_artificial_label ();
1480 *label_p = label;
1481 }
1482
1483 return build1 (GOTO_EXPR, void_type_node, *label_p);
1484 }
1485
1486 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1487 This also involves building a label to jump to and communicating it to
1488 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1489
1490 static enum gimplify_status
1491 gimplify_exit_expr (tree *expr_p)
1492 {
1493 tree cond = TREE_OPERAND (*expr_p, 0);
1494 tree expr;
1495
1496 expr = build_and_jump (&gimplify_ctxp->exit_label);
1497 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1498 *expr_p = expr;
1499
1500 return GS_OK;
1501 }
1502
1503 /* A helper function to be called via walk_tree. Mark all labels under *TP
1504 as being forced. To be called for DECL_INITIAL of static variables. */
1505
1506 tree
1507 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1508 {
1509 if (TYPE_P (*tp))
1510 *walk_subtrees = 0;
1511 if (TREE_CODE (*tp) == LABEL_DECL)
1512 FORCED_LABEL (*tp) = 1;
1513
1514 return NULL_TREE;
1515 }
1516
1517 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1518 different from its canonical type, wrap the whole thing inside a
1519 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1520 type.
1521
1522 The canonical type of a COMPONENT_REF is the type of the field being
1523 referenced--unless the field is a bit-field which can be read directly
1524 in a smaller mode, in which case the canonical type is the
1525 sign-appropriate type corresponding to that mode. */
1526
1527 static void
1528 canonicalize_component_ref (tree *expr_p)
1529 {
1530 tree expr = *expr_p;
1531 tree type;
1532
1533 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1534
1535 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1536 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1537 else
1538 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1539
1540 if (TREE_TYPE (expr) != type)
1541 {
1542 tree old_type = TREE_TYPE (expr);
1543
1544 /* Set the type of the COMPONENT_REF to the underlying type. */
1545 TREE_TYPE (expr) = type;
1546
1547 /* And wrap the whole thing inside a NOP_EXPR. */
1548 expr = build1 (NOP_EXPR, old_type, expr);
1549
1550 *expr_p = expr;
1551 }
1552 }
1553
1554 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1555 to foo, embed that change in the ADDR_EXPR by converting
1556 T array[U];
1557 (T *)&array
1558 ==>
1559 &array[L]
1560 where L is the lower bound. For simplicity, only do this for constant
1561 lower bound. */
1562
1563 static void
1564 canonicalize_addr_expr (tree *expr_p)
1565 {
1566 tree expr = *expr_p;
1567 tree ctype = TREE_TYPE (expr);
1568 tree addr_expr = TREE_OPERAND (expr, 0);
1569 tree atype = TREE_TYPE (addr_expr);
1570 tree dctype, datype, ddatype, otype, obj_expr;
1571
1572 /* Both cast and addr_expr types should be pointers. */
1573 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1574 return;
1575
1576 /* The addr_expr type should be a pointer to an array. */
1577 datype = TREE_TYPE (atype);
1578 if (TREE_CODE (datype) != ARRAY_TYPE)
1579 return;
1580
1581 /* Both cast and addr_expr types should address the same object type. */
1582 dctype = TREE_TYPE (ctype);
1583 ddatype = TREE_TYPE (datype);
1584 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1585 return;
1586
1587 /* The addr_expr and the object type should match. */
1588 obj_expr = TREE_OPERAND (addr_expr, 0);
1589 otype = TREE_TYPE (obj_expr);
1590 if (!lang_hooks.types_compatible_p (otype, datype))
1591 return;
1592
1593 /* The lower bound and element sizes must be constant. */
1594 if (!TYPE_SIZE_UNIT (dctype)
1595 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1596 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1597 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1598 return;
1599
1600 /* All checks succeeded. Build a new node to merge the cast. */
1601 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1602 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1603 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1604 size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
1605 size_int (TYPE_ALIGN_UNIT (dctype))));
1606 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1607 }
1608
1609 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1610 underneath as appropriate. */
1611
1612 static enum gimplify_status
1613 gimplify_conversion (tree *expr_p)
1614 {
1615 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1616 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1617
1618 /* Then strip away all but the outermost conversion. */
1619 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1620
1621 /* And remove the outermost conversion if it's useless. */
1622 if (tree_ssa_useless_type_conversion (*expr_p))
1623 *expr_p = TREE_OPERAND (*expr_p, 0);
1624
1625 /* If we still have a conversion at the toplevel,
1626 then canonicalize some constructs. */
1627 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1628 {
1629 tree sub = TREE_OPERAND (*expr_p, 0);
1630
1631 /* If a NOP conversion is changing the type of a COMPONENT_REF
1632 expression, then canonicalize its type now in order to expose more
1633 redundant conversions. */
1634 if (TREE_CODE (sub) == COMPONENT_REF)
1635 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1636
1637 /* If a NOP conversion is changing a pointer to array of foo
1638 to a pointer to foo, embed that change in the ADDR_EXPR. */
1639 else if (TREE_CODE (sub) == ADDR_EXPR)
1640 canonicalize_addr_expr (expr_p);
1641 }
1642
1643 return GS_OK;
1644 }
1645
1646 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1647 DECL_VALUE_EXPR, and it's worth re-examining things. */
1648
1649 static enum gimplify_status
1650 gimplify_var_or_parm_decl (tree *expr_p)
1651 {
1652 tree decl = *expr_p;
1653
1654 /* ??? If this is a local variable, and it has not been seen in any
1655 outer BIND_EXPR, then it's probably the result of a duplicate
1656 declaration, for which we've already issued an error. It would
1657 be really nice if the front end wouldn't leak these at all.
1658 Currently the only known culprit is C++ destructors, as seen
1659 in g++.old-deja/g++.jason/binding.C. */
1660 if (TREE_CODE (decl) == VAR_DECL
1661 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1662 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1663 && decl_function_context (decl) == current_function_decl)
1664 {
1665 gcc_assert (errorcount || sorrycount);
1666 return GS_ERROR;
1667 }
1668
1669 /* When within an OpenMP context, notice uses of variables. */
1670 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1671 return GS_ALL_DONE;
1672
1673 /* If the decl is an alias for another expression, substitute it now. */
1674 if (DECL_HAS_VALUE_EXPR_P (decl))
1675 {
1676 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1677 return GS_OK;
1678 }
1679
1680 return GS_ALL_DONE;
1681 }
1682
1683
1684 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1685 node pointed to by EXPR_P.
1686
1687 compound_lval
1688 : min_lval '[' val ']'
1689 | min_lval '.' ID
1690 | compound_lval '[' val ']'
1691 | compound_lval '.' ID
1692
1693 This is not part of the original SIMPLE definition, which separates
1694 array and member references, but it seems reasonable to handle them
1695 together. Also, this way we don't run into problems with union
1696 aliasing; gcc requires that for accesses through a union to alias, the
1697 union reference must be explicit, which was not always the case when we
1698 were splitting up array and member refs.
1699
1700 PRE_P points to the list where side effects that must happen before
1701 *EXPR_P should be stored.
1702
1703 POST_P points to the list where side effects that must happen after
1704 *EXPR_P should be stored. */
1705
1706 static enum gimplify_status
1707 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1708 tree *post_p, fallback_t fallback)
1709 {
1710 tree *p;
1711 VEC(tree,heap) *stack;
1712 enum gimplify_status ret = GS_OK, tret;
1713 int i;
1714
1715 /* Create a stack of the subexpressions so later we can walk them in
1716 order from inner to outer. */
1717 stack = VEC_alloc (tree, heap, 10);
1718
1719 /* We can handle anything that get_inner_reference can deal with. */
1720 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1721 {
1722 restart:
1723 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1724 if (TREE_CODE (*p) == INDIRECT_REF)
1725 *p = fold_indirect_ref (*p);
1726
1727 if (handled_component_p (*p))
1728 ;
1729 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1730 additional COMPONENT_REFs. */
1731 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1732 && gimplify_var_or_parm_decl (p) == GS_OK)
1733 goto restart;
1734 else
1735 break;
1736
1737 VEC_safe_push (tree, heap, stack, *p);
1738 }
1739
1740 gcc_assert (VEC_length (tree, stack));
1741
1742 /* Now STACK is a stack of pointers to all the refs we've walked through
1743 and P points to the innermost expression.
1744
1745 Java requires that we elaborated nodes in source order. That
1746 means we must gimplify the inner expression followed by each of
1747 the indices, in order. But we can't gimplify the inner
1748 expression until we deal with any variable bounds, sizes, or
1749 positions in order to deal with PLACEHOLDER_EXPRs.
1750
1751 So we do this in three steps. First we deal with the annotations
1752 for any variables in the components, then we gimplify the base,
1753 then we gimplify any indices, from left to right. */
1754 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1755 {
1756 tree t = VEC_index (tree, stack, i);
1757
1758 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1759 {
1760 /* Gimplify the low bound and element type size and put them into
1761 the ARRAY_REF. If these values are set, they have already been
1762 gimplified. */
1763 if (!TREE_OPERAND (t, 2))
1764 {
1765 tree low = unshare_expr (array_ref_low_bound (t));
1766 if (!is_gimple_min_invariant (low))
1767 {
1768 TREE_OPERAND (t, 2) = low;
1769 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1770 is_gimple_formal_tmp_reg, fb_rvalue);
1771 ret = MIN (ret, tret);
1772 }
1773 }
1774
1775 if (!TREE_OPERAND (t, 3))
1776 {
1777 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1778 tree elmt_size = unshare_expr (array_ref_element_size (t));
1779 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1780
1781 /* Divide the element size by the alignment of the element
1782 type (above). */
1783 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1784
1785 if (!is_gimple_min_invariant (elmt_size))
1786 {
1787 TREE_OPERAND (t, 3) = elmt_size;
1788 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1789 is_gimple_formal_tmp_reg, fb_rvalue);
1790 ret = MIN (ret, tret);
1791 }
1792 }
1793 }
1794 else if (TREE_CODE (t) == COMPONENT_REF)
1795 {
1796 /* Set the field offset into T and gimplify it. */
1797 if (!TREE_OPERAND (t, 2))
1798 {
1799 tree offset = unshare_expr (component_ref_field_offset (t));
1800 tree field = TREE_OPERAND (t, 1);
1801 tree factor
1802 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1803
1804 /* Divide the offset by its alignment. */
1805 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1806
1807 if (!is_gimple_min_invariant (offset))
1808 {
1809 TREE_OPERAND (t, 2) = offset;
1810 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1811 is_gimple_formal_tmp_reg, fb_rvalue);
1812 ret = MIN (ret, tret);
1813 }
1814 }
1815 }
1816 }
1817
1818 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1819 so as to match the min_lval predicate. Failure to do so may result
1820 in the creation of large aggregate temporaries. */
1821 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1822 fallback | fb_lvalue);
1823 ret = MIN (ret, tret);
1824
1825 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1826 loop we also remove any useless conversions. */
1827 for (; VEC_length (tree, stack) > 0; )
1828 {
1829 tree t = VEC_pop (tree, stack);
1830
1831 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1832 {
1833 /* Gimplify the dimension.
1834 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1835 Gimplify non-constant array indices into a temporary
1836 variable.
1837 FIXME - The real fix is to gimplify post-modify
1838 expressions into a minimal gimple lvalue. However, that
1839 exposes bugs in alias analysis. The alias analyzer does
1840 not handle &PTR->FIELD very well. Will fix after the
1841 branch is merged into mainline (dnovillo 2004-05-03). */
1842 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1843 {
1844 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1845 is_gimple_formal_tmp_reg, fb_rvalue);
1846 ret = MIN (ret, tret);
1847 }
1848 }
1849 else if (TREE_CODE (t) == BIT_FIELD_REF)
1850 {
1851 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1852 is_gimple_val, fb_rvalue);
1853 ret = MIN (ret, tret);
1854 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1855 is_gimple_val, fb_rvalue);
1856 ret = MIN (ret, tret);
1857 }
1858
1859 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1860
1861 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1862 set which would have caused all the outer expressions in EXPR_P
1863 leading to P to also have had TREE_SIDE_EFFECTS set. */
1864 recalculate_side_effects (t);
1865 }
1866
1867 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1868 ret = MIN (ret, tret);
1869
1870 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1871 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1872 {
1873 canonicalize_component_ref (expr_p);
1874 ret = MIN (ret, GS_OK);
1875 }
1876
1877 VEC_free (tree, heap, stack);
1878
1879 return ret;
1880 }
1881
1882 /* Gimplify the self modifying expression pointed to by EXPR_P
1883 (++, --, +=, -=).
1884
1885 PRE_P points to the list where side effects that must happen before
1886 *EXPR_P should be stored.
1887
1888 POST_P points to the list where side effects that must happen after
1889 *EXPR_P should be stored.
1890
1891 WANT_VALUE is nonzero iff we want to use the value of this expression
1892 in another expression. */
1893
1894 static enum gimplify_status
1895 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1896 bool want_value)
1897 {
1898 enum tree_code code;
1899 tree lhs, lvalue, rhs, t1;
1900 bool postfix;
1901 enum tree_code arith_code;
1902 enum gimplify_status ret;
1903
1904 code = TREE_CODE (*expr_p);
1905
1906 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1907 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1908
1909 /* Prefix or postfix? */
1910 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1911 /* Faster to treat as prefix if result is not used. */
1912 postfix = want_value;
1913 else
1914 postfix = false;
1915
1916 /* Add or subtract? */
1917 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1918 arith_code = PLUS_EXPR;
1919 else
1920 arith_code = MINUS_EXPR;
1921
1922 /* Gimplify the LHS into a GIMPLE lvalue. */
1923 lvalue = TREE_OPERAND (*expr_p, 0);
1924 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1925 if (ret == GS_ERROR)
1926 return ret;
1927
1928 /* Extract the operands to the arithmetic operation. */
1929 lhs = lvalue;
1930 rhs = TREE_OPERAND (*expr_p, 1);
1931
1932 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1933 that as the result value and in the postqueue operation. */
1934 if (postfix)
1935 {
1936 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1937 if (ret == GS_ERROR)
1938 return ret;
1939 }
1940
1941 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1942 t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
1943
1944 if (postfix)
1945 {
1946 gimplify_and_add (t1, post_p);
1947 *expr_p = lhs;
1948 return GS_ALL_DONE;
1949 }
1950 else
1951 {
1952 *expr_p = t1;
1953 return GS_OK;
1954 }
1955 }
1956
1957 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1958
1959 static void
1960 maybe_with_size_expr (tree *expr_p)
1961 {
1962 tree expr = *expr_p;
1963 tree type = TREE_TYPE (expr);
1964 tree size;
1965
1966 /* If we've already wrapped this or the type is error_mark_node, we can't do
1967 anything. */
1968 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1969 || type == error_mark_node)
1970 return;
1971
1972 /* If the size isn't known or is a constant, we have nothing to do. */
1973 size = TYPE_SIZE_UNIT (type);
1974 if (!size || TREE_CODE (size) == INTEGER_CST)
1975 return;
1976
1977 /* Otherwise, make a WITH_SIZE_EXPR. */
1978 size = unshare_expr (size);
1979 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1980 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1981 }
1982
1983 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1984
1985 static enum gimplify_status
1986 gimplify_arg (tree *expr_p, tree *pre_p)
1987 {
1988 bool (*test) (tree);
1989 fallback_t fb;
1990
1991 /* In general, we allow lvalues for function arguments to avoid
1992 extra overhead of copying large aggregates out of even larger
1993 aggregates into temporaries only to copy the temporaries to
1994 the argument list. Make optimizers happy by pulling out to
1995 temporaries those types that fit in registers. */
1996 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
1997 test = is_gimple_val, fb = fb_rvalue;
1998 else
1999 test = is_gimple_lvalue, fb = fb_either;
2000
2001 /* If this is a variable sized type, we must remember the size. */
2002 maybe_with_size_expr (expr_p);
2003
2004 /* There is a sequence point before a function call. Side effects in
2005 the argument list must occur before the actual call. So, when
2006 gimplifying arguments, force gimplify_expr to use an internal
2007 post queue which is then appended to the end of PRE_P. */
2008 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2009 }
2010
2011 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2012 list where side effects that must happen before *EXPR_P should be stored.
2013 WANT_VALUE is true if the result of the call is desired. */
2014
2015 static enum gimplify_status
2016 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2017 {
2018 tree decl;
2019 tree arglist;
2020 enum gimplify_status ret;
2021
2022 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2023
2024 /* For reliable diagnostics during inlining, it is necessary that
2025 every call_expr be annotated with file and line. */
2026 if (! EXPR_HAS_LOCATION (*expr_p))
2027 SET_EXPR_LOCATION (*expr_p, input_location);
2028
2029 /* This may be a call to a builtin function.
2030
2031 Builtin function calls may be transformed into different
2032 (and more efficient) builtin function calls under certain
2033 circumstances. Unfortunately, gimplification can muck things
2034 up enough that the builtin expanders are not aware that certain
2035 transformations are still valid.
2036
2037 So we attempt transformation/gimplification of the call before
2038 we gimplify the CALL_EXPR. At this time we do not manage to
2039 transform all calls in the same manner as the expanders do, but
2040 we do transform most of them. */
2041 decl = get_callee_fndecl (*expr_p);
2042 if (decl && DECL_BUILT_IN (decl))
2043 {
2044 tree arglist = TREE_OPERAND (*expr_p, 1);
2045 tree new = fold_builtin (decl, arglist, !want_value);
2046
2047 if (new && new != *expr_p)
2048 {
2049 /* There was a transformation of this call which computes the
2050 same value, but in a more efficient way. Return and try
2051 again. */
2052 *expr_p = new;
2053 return GS_OK;
2054 }
2055
2056 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2057 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2058 {
2059 if (!arglist || !TREE_CHAIN (arglist))
2060 {
2061 error ("too few arguments to function %<va_start%>");
2062 *expr_p = build_empty_stmt ();
2063 return GS_OK;
2064 }
2065
2066 if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
2067 {
2068 *expr_p = build_empty_stmt ();
2069 return GS_OK;
2070 }
2071 /* Avoid gimplifying the second argument to va_start, which needs
2072 to be the plain PARM_DECL. */
2073 return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
2074 }
2075 }
2076
2077 /* There is a sequence point before the call, so any side effects in
2078 the calling expression must occur before the actual call. Force
2079 gimplify_expr to use an internal post queue. */
2080 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
2081 is_gimple_call_addr, fb_rvalue);
2082
2083 if (PUSH_ARGS_REVERSED)
2084 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2085 for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
2086 arglist = TREE_CHAIN (arglist))
2087 {
2088 enum gimplify_status t;
2089
2090 t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
2091
2092 if (t == GS_ERROR)
2093 ret = GS_ERROR;
2094 }
2095 if (PUSH_ARGS_REVERSED)
2096 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2097
2098 /* Try this again in case gimplification exposed something. */
2099 if (ret != GS_ERROR)
2100 {
2101 decl = get_callee_fndecl (*expr_p);
2102 if (decl && DECL_BUILT_IN (decl))
2103 {
2104 tree arglist = TREE_OPERAND (*expr_p, 1);
2105 tree new = fold_builtin (decl, arglist, !want_value);
2106
2107 if (new && new != *expr_p)
2108 {
2109 /* There was a transformation of this call which computes the
2110 same value, but in a more efficient way. Return and try
2111 again. */
2112 *expr_p = new;
2113 return GS_OK;
2114 }
2115 }
2116 }
2117
2118 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2119 decl. This allows us to eliminate redundant or useless
2120 calls to "const" functions. */
2121 if (TREE_CODE (*expr_p) == CALL_EXPR
2122 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2123 TREE_SIDE_EFFECTS (*expr_p) = 0;
2124
2125 return ret;
2126 }
2127
2128 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2129 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2130
2131 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2132 condition is true or false, respectively. If null, we should generate
2133 our own to skip over the evaluation of this specific expression.
2134
2135 This function is the tree equivalent of do_jump.
2136
2137 shortcut_cond_r should only be called by shortcut_cond_expr. */
2138
2139 static tree
2140 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2141 {
2142 tree local_label = NULL_TREE;
2143 tree t, expr = NULL;
2144
2145 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2146 retain the shortcut semantics. Just insert the gotos here;
2147 shortcut_cond_expr will append the real blocks later. */
2148 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2149 {
2150 /* Turn if (a && b) into
2151
2152 if (a); else goto no;
2153 if (b) goto yes; else goto no;
2154 (no:) */
2155
2156 if (false_label_p == NULL)
2157 false_label_p = &local_label;
2158
2159 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2160 append_to_statement_list (t, &expr);
2161
2162 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2163 false_label_p);
2164 append_to_statement_list (t, &expr);
2165 }
2166 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2167 {
2168 /* Turn if (a || b) into
2169
2170 if (a) goto yes;
2171 if (b) goto yes; else goto no;
2172 (yes:) */
2173
2174 if (true_label_p == NULL)
2175 true_label_p = &local_label;
2176
2177 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2178 append_to_statement_list (t, &expr);
2179
2180 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2181 false_label_p);
2182 append_to_statement_list (t, &expr);
2183 }
2184 else if (TREE_CODE (pred) == COND_EXPR)
2185 {
2186 /* As long as we're messing with gotos, turn if (a ? b : c) into
2187 if (a)
2188 if (b) goto yes; else goto no;
2189 else
2190 if (c) goto yes; else goto no; */
2191 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2192 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2193 false_label_p),
2194 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2195 false_label_p));
2196 }
2197 else
2198 {
2199 expr = build3 (COND_EXPR, void_type_node, pred,
2200 build_and_jump (true_label_p),
2201 build_and_jump (false_label_p));
2202 }
2203
2204 if (local_label)
2205 {
2206 t = build1 (LABEL_EXPR, void_type_node, local_label);
2207 append_to_statement_list (t, &expr);
2208 }
2209
2210 return expr;
2211 }
2212
2213 static tree
2214 shortcut_cond_expr (tree expr)
2215 {
2216 tree pred = TREE_OPERAND (expr, 0);
2217 tree then_ = TREE_OPERAND (expr, 1);
2218 tree else_ = TREE_OPERAND (expr, 2);
2219 tree true_label, false_label, end_label, t;
2220 tree *true_label_p;
2221 tree *false_label_p;
2222 bool emit_end, emit_false, jump_over_else;
2223 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2224 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2225
2226 /* First do simple transformations. */
2227 if (!else_se)
2228 {
2229 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2230 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2231 {
2232 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2233 then_ = shortcut_cond_expr (expr);
2234 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2235 pred = TREE_OPERAND (pred, 0);
2236 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2237 }
2238 }
2239 if (!then_se)
2240 {
2241 /* If there is no 'then', turn
2242 if (a || b); else d
2243 into
2244 if (a); else if (b); else d. */
2245 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2246 {
2247 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2248 else_ = shortcut_cond_expr (expr);
2249 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2250 pred = TREE_OPERAND (pred, 0);
2251 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2252 }
2253 }
2254
2255 /* If we're done, great. */
2256 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2257 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2258 return expr;
2259
2260 /* Otherwise we need to mess with gotos. Change
2261 if (a) c; else d;
2262 to
2263 if (a); else goto no;
2264 c; goto end;
2265 no: d; end:
2266 and recursively gimplify the condition. */
2267
2268 true_label = false_label = end_label = NULL_TREE;
2269
2270 /* If our arms just jump somewhere, hijack those labels so we don't
2271 generate jumps to jumps. */
2272
2273 if (then_
2274 && TREE_CODE (then_) == GOTO_EXPR
2275 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2276 {
2277 true_label = GOTO_DESTINATION (then_);
2278 then_ = NULL;
2279 then_se = false;
2280 }
2281
2282 if (else_
2283 && TREE_CODE (else_) == GOTO_EXPR
2284 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2285 {
2286 false_label = GOTO_DESTINATION (else_);
2287 else_ = NULL;
2288 else_se = false;
2289 }
2290
2291 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2292 if (true_label)
2293 true_label_p = &true_label;
2294 else
2295 true_label_p = NULL;
2296
2297 /* The 'else' branch also needs a label if it contains interesting code. */
2298 if (false_label || else_se)
2299 false_label_p = &false_label;
2300 else
2301 false_label_p = NULL;
2302
2303 /* If there was nothing else in our arms, just forward the label(s). */
2304 if (!then_se && !else_se)
2305 return shortcut_cond_r (pred, true_label_p, false_label_p);
2306
2307 /* If our last subexpression already has a terminal label, reuse it. */
2308 if (else_se)
2309 expr = expr_last (else_);
2310 else if (then_se)
2311 expr = expr_last (then_);
2312 else
2313 expr = NULL;
2314 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2315 end_label = LABEL_EXPR_LABEL (expr);
2316
2317 /* If we don't care about jumping to the 'else' branch, jump to the end
2318 if the condition is false. */
2319 if (!false_label_p)
2320 false_label_p = &end_label;
2321
2322 /* We only want to emit these labels if we aren't hijacking them. */
2323 emit_end = (end_label == NULL_TREE);
2324 emit_false = (false_label == NULL_TREE);
2325
2326 /* We only emit the jump over the else clause if we have to--if the
2327 then clause may fall through. Otherwise we can wind up with a
2328 useless jump and a useless label at the end of gimplified code,
2329 which will cause us to think that this conditional as a whole
2330 falls through even if it doesn't. If we then inline a function
2331 which ends with such a condition, that can cause us to issue an
2332 inappropriate warning about control reaching the end of a
2333 non-void function. */
2334 jump_over_else = block_may_fallthru (then_);
2335
2336 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2337
2338 expr = NULL;
2339 append_to_statement_list (pred, &expr);
2340
2341 append_to_statement_list (then_, &expr);
2342 if (else_se)
2343 {
2344 if (jump_over_else)
2345 {
2346 t = build_and_jump (&end_label);
2347 append_to_statement_list (t, &expr);
2348 }
2349 if (emit_false)
2350 {
2351 t = build1 (LABEL_EXPR, void_type_node, false_label);
2352 append_to_statement_list (t, &expr);
2353 }
2354 append_to_statement_list (else_, &expr);
2355 }
2356 if (emit_end && end_label)
2357 {
2358 t = build1 (LABEL_EXPR, void_type_node, end_label);
2359 append_to_statement_list (t, &expr);
2360 }
2361
2362 return expr;
2363 }
2364
2365 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2366
2367 tree
2368 gimple_boolify (tree expr)
2369 {
2370 tree type = TREE_TYPE (expr);
2371
2372 if (TREE_CODE (type) == BOOLEAN_TYPE)
2373 return expr;
2374
2375 switch (TREE_CODE (expr))
2376 {
2377 case TRUTH_AND_EXPR:
2378 case TRUTH_OR_EXPR:
2379 case TRUTH_XOR_EXPR:
2380 case TRUTH_ANDIF_EXPR:
2381 case TRUTH_ORIF_EXPR:
2382 /* Also boolify the arguments of truth exprs. */
2383 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2384 /* FALLTHRU */
2385
2386 case TRUTH_NOT_EXPR:
2387 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2388 /* FALLTHRU */
2389
2390 case EQ_EXPR: case NE_EXPR:
2391 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2392 /* These expressions always produce boolean results. */
2393 TREE_TYPE (expr) = boolean_type_node;
2394 return expr;
2395
2396 default:
2397 /* Other expressions that get here must have boolean values, but
2398 might need to be converted to the appropriate mode. */
2399 return fold_convert (boolean_type_node, expr);
2400 }
2401 }
2402
2403 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2404 into
2405
2406 if (p) if (p)
2407 t1 = a; a;
2408 else or else
2409 t1 = b; b;
2410 t1;
2411
2412 The second form is used when *EXPR_P is of type void.
2413
2414 TARGET is the tree for T1 above.
2415
2416 PRE_P points to the list where side effects that must happen before
2417 *EXPR_P should be stored. */
2418
2419 static enum gimplify_status
2420 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2421 {
2422 tree expr = *expr_p;
2423 tree tmp, tmp2, type;
2424 enum gimplify_status ret;
2425
2426 type = TREE_TYPE (expr);
2427
2428 /* If this COND_EXPR has a value, copy the values into a temporary within
2429 the arms. */
2430 if (! VOID_TYPE_P (type))
2431 {
2432 tree result;
2433
2434 if ((fallback & fb_lvalue) == 0)
2435 {
2436 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2437 ret = GS_ALL_DONE;
2438 }
2439 else
2440 {
2441 tree type = build_pointer_type (TREE_TYPE (expr));
2442
2443 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2444 TREE_OPERAND (expr, 1) =
2445 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2446
2447 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2448 TREE_OPERAND (expr, 2) =
2449 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2450
2451 tmp2 = tmp = create_tmp_var (type, "iftmp");
2452
2453 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2454 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2455
2456 result = build_fold_indirect_ref (tmp);
2457 ret = GS_ALL_DONE;
2458 }
2459
2460 /* Build the then clause, 't1 = a;'. But don't build an assignment
2461 if this branch is void; in C++ it can be, if it's a throw. */
2462 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2463 TREE_OPERAND (expr, 1)
2464 = build2 (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1));
2465
2466 /* Build the else clause, 't1 = b;'. */
2467 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2468 TREE_OPERAND (expr, 2)
2469 = build2 (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2));
2470
2471 TREE_TYPE (expr) = void_type_node;
2472 recalculate_side_effects (expr);
2473
2474 /* Move the COND_EXPR to the prequeue. */
2475 gimplify_and_add (expr, pre_p);
2476
2477 *expr_p = result;
2478 return ret;
2479 }
2480
2481 /* Make sure the condition has BOOLEAN_TYPE. */
2482 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2483
2484 /* Break apart && and || conditions. */
2485 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2486 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2487 {
2488 expr = shortcut_cond_expr (expr);
2489
2490 if (expr != *expr_p)
2491 {
2492 *expr_p = expr;
2493
2494 /* We can't rely on gimplify_expr to re-gimplify the expanded
2495 form properly, as cleanups might cause the target labels to be
2496 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2497 set up a conditional context. */
2498 gimple_push_condition ();
2499 gimplify_stmt (expr_p);
2500 gimple_pop_condition (pre_p);
2501
2502 return GS_ALL_DONE;
2503 }
2504 }
2505
2506 /* Now do the normal gimplification. */
2507 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2508 is_gimple_condexpr, fb_rvalue);
2509
2510 gimple_push_condition ();
2511
2512 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2513 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2514 recalculate_side_effects (expr);
2515
2516 gimple_pop_condition (pre_p);
2517
2518 if (ret == GS_ERROR)
2519 ;
2520 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2521 ret = GS_ALL_DONE;
2522 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2523 /* Rewrite "if (a); else b" to "if (!a) b" */
2524 {
2525 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2526 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2527 is_gimple_condexpr, fb_rvalue);
2528
2529 tmp = TREE_OPERAND (expr, 1);
2530 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2531 TREE_OPERAND (expr, 2) = tmp;
2532 }
2533 else
2534 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2535 expr = TREE_OPERAND (expr, 0);
2536
2537 *expr_p = expr;
2538 return ret;
2539 }
2540
2541 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2542 a call to __builtin_memcpy. */
2543
2544 static enum gimplify_status
2545 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2546 {
2547 tree args, t, to, to_ptr, from;
2548
2549 to = TREE_OPERAND (*expr_p, 0);
2550 from = TREE_OPERAND (*expr_p, 1);
2551
2552 args = tree_cons (NULL, size, NULL);
2553
2554 t = build_fold_addr_expr (from);
2555 args = tree_cons (NULL, t, args);
2556
2557 to_ptr = build_fold_addr_expr (to);
2558 args = tree_cons (NULL, to_ptr, args);
2559 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2560 t = build_function_call_expr (t, args);
2561
2562 if (want_value)
2563 {
2564 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2565 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2566 }
2567
2568 *expr_p = t;
2569 return GS_OK;
2570 }
2571
2572 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2573 a call to __builtin_memset. In this case we know that the RHS is
2574 a CONSTRUCTOR with an empty element list. */
2575
2576 static enum gimplify_status
2577 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2578 {
2579 tree args, t, to, to_ptr;
2580
2581 to = TREE_OPERAND (*expr_p, 0);
2582
2583 args = tree_cons (NULL, size, NULL);
2584
2585 args = tree_cons (NULL, integer_zero_node, args);
2586
2587 to_ptr = build_fold_addr_expr (to);
2588 args = tree_cons (NULL, to_ptr, args);
2589 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2590 t = build_function_call_expr (t, args);
2591
2592 if (want_value)
2593 {
2594 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2595 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2596 }
2597
2598 *expr_p = t;
2599 return GS_OK;
2600 }
2601
2602 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2603 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2604 assignment. Returns non-null if we detect a potential overlap. */
2605
2606 struct gimplify_init_ctor_preeval_data
2607 {
2608 /* The base decl of the lhs object. May be NULL, in which case we
2609 have to assume the lhs is indirect. */
2610 tree lhs_base_decl;
2611
2612 /* The alias set of the lhs object. */
2613 int lhs_alias_set;
2614 };
2615
2616 static tree
2617 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2618 {
2619 struct gimplify_init_ctor_preeval_data *data
2620 = (struct gimplify_init_ctor_preeval_data *) xdata;
2621 tree t = *tp;
2622
2623 /* If we find the base object, obviously we have overlap. */
2624 if (data->lhs_base_decl == t)
2625 return t;
2626
2627 /* If the constructor component is indirect, determine if we have a
2628 potential overlap with the lhs. The only bits of information we
2629 have to go on at this point are addressability and alias sets. */
2630 if (TREE_CODE (t) == INDIRECT_REF
2631 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2632 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2633 return t;
2634
2635 if (IS_TYPE_OR_DECL_P (t))
2636 *walk_subtrees = 0;
2637 return NULL;
2638 }
2639
2640 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2641 force values that overlap with the lhs (as described by *DATA)
2642 into temporaries. */
2643
2644 static void
2645 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2646 struct gimplify_init_ctor_preeval_data *data)
2647 {
2648 enum gimplify_status one;
2649
2650 /* If the value is invariant, then there's nothing to pre-evaluate.
2651 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2652 invariant but has side effects and might contain a reference to
2653 the object we're initializing. */
2654 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2655 return;
2656
2657 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2658 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2659 return;
2660
2661 /* Recurse for nested constructors. */
2662 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2663 {
2664 unsigned HOST_WIDE_INT ix;
2665 constructor_elt *ce;
2666 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2667
2668 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2669 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2670 return;
2671 }
2672
2673 /* We can't preevaluate if the type contains a placeholder. */
2674 if (type_contains_placeholder_p (TREE_TYPE (*expr_p)))
2675 return;
2676
2677 /* Gimplify the constructor element to something appropriate for the rhs
2678 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2679 the gimplifier will consider this a store to memory. Doing this
2680 gimplification now means that we won't have to deal with complicated
2681 language-specific trees, nor trees like SAVE_EXPR that can induce
2682 exponential search behavior. */
2683 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2684 if (one == GS_ERROR)
2685 {
2686 *expr_p = NULL;
2687 return;
2688 }
2689
2690 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2691 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2692 always be true for all scalars, since is_gimple_mem_rhs insists on a
2693 temporary variable for them. */
2694 if (DECL_P (*expr_p))
2695 return;
2696
2697 /* If this is of variable size, we have no choice but to assume it doesn't
2698 overlap since we can't make a temporary for it. */
2699 if (!TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (*expr_p))))
2700 return;
2701
2702 /* Otherwise, we must search for overlap ... */
2703 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2704 return;
2705
2706 /* ... and if found, force the value into a temporary. */
2707 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2708 }
2709
2710 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2711 a RANGE_EXPR in a CONSTRUCTOR for an array.
2712
2713 var = lower;
2714 loop_entry:
2715 object[var] = value;
2716 if (var == upper)
2717 goto loop_exit;
2718 var = var + 1;
2719 goto loop_entry;
2720 loop_exit:
2721
2722 We increment var _after_ the loop exit check because we might otherwise
2723 fail if upper == TYPE_MAX_VALUE (type for upper).
2724
2725 Note that we never have to deal with SAVE_EXPRs here, because this has
2726 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2727
2728 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2729 tree *, bool);
2730
2731 static void
2732 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2733 tree value, tree array_elt_type,
2734 tree *pre_p, bool cleared)
2735 {
2736 tree loop_entry_label, loop_exit_label;
2737 tree var, var_type, cref;
2738
2739 loop_entry_label = create_artificial_label ();
2740 loop_exit_label = create_artificial_label ();
2741
2742 /* Create and initialize the index variable. */
2743 var_type = TREE_TYPE (upper);
2744 var = create_tmp_var (var_type, NULL);
2745 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var, lower), pre_p);
2746
2747 /* Add the loop entry label. */
2748 append_to_statement_list (build1 (LABEL_EXPR,
2749 void_type_node,
2750 loop_entry_label),
2751 pre_p);
2752
2753 /* Build the reference. */
2754 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2755 var, NULL_TREE, NULL_TREE);
2756
2757 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2758 the store. Otherwise just assign value to the reference. */
2759
2760 if (TREE_CODE (value) == CONSTRUCTOR)
2761 /* NB we might have to call ourself recursively through
2762 gimplify_init_ctor_eval if the value is a constructor. */
2763 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2764 pre_p, cleared);
2765 else
2766 append_to_statement_list (build2 (MODIFY_EXPR, TREE_TYPE (cref),
2767 cref, value),
2768 pre_p);
2769
2770 /* We exit the loop when the index var is equal to the upper bound. */
2771 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2772 build2 (EQ_EXPR, boolean_type_node,
2773 var, upper),
2774 build1 (GOTO_EXPR,
2775 void_type_node,
2776 loop_exit_label),
2777 NULL_TREE),
2778 pre_p);
2779
2780 /* Otherwise, increment the index var... */
2781 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var,
2782 build2 (PLUS_EXPR, var_type, var,
2783 fold_convert (var_type,
2784 integer_one_node))),
2785 pre_p);
2786
2787 /* ...and jump back to the loop entry. */
2788 append_to_statement_list (build1 (GOTO_EXPR,
2789 void_type_node,
2790 loop_entry_label),
2791 pre_p);
2792
2793 /* Add the loop exit label. */
2794 append_to_statement_list (build1 (LABEL_EXPR,
2795 void_type_node,
2796 loop_exit_label),
2797 pre_p);
2798 }
2799
2800 /* Return true if FDECL is accessing a field that is zero sized. */
2801
2802 static bool
2803 zero_sized_field_decl (tree fdecl)
2804 {
2805 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2806 && integer_zerop (DECL_SIZE (fdecl)))
2807 return true;
2808 return false;
2809 }
2810
2811 /* Return true if TYPE is zero sized. */
2812
2813 static bool
2814 zero_sized_type (tree type)
2815 {
2816 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2817 && integer_zerop (TYPE_SIZE (type)))
2818 return true;
2819 return false;
2820 }
2821
2822 /* A subroutine of gimplify_init_constructor. Generate individual
2823 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2824 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2825 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2826 zeroed first. */
2827
2828 static void
2829 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2830 tree *pre_p, bool cleared)
2831 {
2832 tree array_elt_type = NULL;
2833 unsigned HOST_WIDE_INT ix;
2834 tree purpose, value;
2835
2836 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2837 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2838
2839 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2840 {
2841 tree cref, init;
2842
2843 /* NULL values are created above for gimplification errors. */
2844 if (value == NULL)
2845 continue;
2846
2847 if (cleared && initializer_zerop (value))
2848 continue;
2849
2850 /* ??? Here's to hoping the front end fills in all of the indices,
2851 so we don't have to figure out what's missing ourselves. */
2852 gcc_assert (purpose);
2853
2854 /* Skip zero-sized fields, unless value has side-effects. This can
2855 happen with calls to functions returning a zero-sized type, which
2856 we shouldn't discard. As a number of downstream passes don't
2857 expect sets of zero-sized fields, we rely on the gimplification of
2858 the MODIFY_EXPR we make below to drop the assignment statement. */
2859 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2860 continue;
2861
2862 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2863 whole range. */
2864 if (TREE_CODE (purpose) == RANGE_EXPR)
2865 {
2866 tree lower = TREE_OPERAND (purpose, 0);
2867 tree upper = TREE_OPERAND (purpose, 1);
2868
2869 /* If the lower bound is equal to upper, just treat it as if
2870 upper was the index. */
2871 if (simple_cst_equal (lower, upper))
2872 purpose = upper;
2873 else
2874 {
2875 gimplify_init_ctor_eval_range (object, lower, upper, value,
2876 array_elt_type, pre_p, cleared);
2877 continue;
2878 }
2879 }
2880
2881 if (array_elt_type)
2882 {
2883 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2884 purpose, NULL_TREE, NULL_TREE);
2885 }
2886 else
2887 {
2888 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2889 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2890 unshare_expr (object), purpose, NULL_TREE);
2891 }
2892
2893 if (TREE_CODE (value) == CONSTRUCTOR
2894 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2895 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2896 pre_p, cleared);
2897 else
2898 {
2899 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2900 gimplify_and_add (init, pre_p);
2901 }
2902 }
2903 }
2904
2905 /* A subroutine of gimplify_modify_expr. Break out elements of a
2906 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2907
2908 Note that we still need to clear any elements that don't have explicit
2909 initializers, so if not all elements are initialized we keep the
2910 original MODIFY_EXPR, we just remove all of the constructor elements. */
2911
2912 static enum gimplify_status
2913 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2914 tree *post_p, bool want_value)
2915 {
2916 tree object;
2917 tree ctor = TREE_OPERAND (*expr_p, 1);
2918 tree type = TREE_TYPE (ctor);
2919 enum gimplify_status ret;
2920 VEC(constructor_elt,gc) *elts;
2921
2922 if (TREE_CODE (ctor) != CONSTRUCTOR)
2923 return GS_UNHANDLED;
2924
2925 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2926 is_gimple_lvalue, fb_lvalue);
2927 if (ret == GS_ERROR)
2928 return ret;
2929 object = TREE_OPERAND (*expr_p, 0);
2930
2931 elts = CONSTRUCTOR_ELTS (ctor);
2932
2933 ret = GS_ALL_DONE;
2934 switch (TREE_CODE (type))
2935 {
2936 case RECORD_TYPE:
2937 case UNION_TYPE:
2938 case QUAL_UNION_TYPE:
2939 case ARRAY_TYPE:
2940 {
2941 struct gimplify_init_ctor_preeval_data preeval_data;
2942 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2943 HOST_WIDE_INT num_nonzero_elements;
2944 bool cleared, valid_const_initializer;
2945
2946 /* Aggregate types must lower constructors to initialization of
2947 individual elements. The exception is that a CONSTRUCTOR node
2948 with no elements indicates zero-initialization of the whole. */
2949 if (VEC_empty (constructor_elt, elts))
2950 break;
2951
2952 /* Fetch information about the constructor to direct later processing.
2953 We might want to make static versions of it in various cases, and
2954 can only do so if it known to be a valid constant initializer. */
2955 valid_const_initializer
2956 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2957 &num_ctor_elements, &cleared);
2958
2959 /* If a const aggregate variable is being initialized, then it
2960 should never be a lose to promote the variable to be static. */
2961 if (valid_const_initializer
2962 && num_nonzero_elements > 1
2963 && TREE_READONLY (object)
2964 && TREE_CODE (object) == VAR_DECL)
2965 {
2966 DECL_INITIAL (object) = ctor;
2967 TREE_STATIC (object) = 1;
2968 if (!DECL_NAME (object))
2969 DECL_NAME (object) = create_tmp_var_name ("C");
2970 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2971
2972 /* ??? C++ doesn't automatically append a .<number> to the
2973 assembler name, and even when it does, it looks a FE private
2974 data structures to figure out what that number should be,
2975 which are not set for this variable. I suppose this is
2976 important for local statics for inline functions, which aren't
2977 "local" in the object file sense. So in order to get a unique
2978 TU-local symbol, we must invoke the lhd version now. */
2979 lhd_set_decl_assembler_name (object);
2980
2981 *expr_p = NULL_TREE;
2982 break;
2983 }
2984
2985 /* If there are "lots" of initialized elements, even discounting
2986 those that are not address constants (and thus *must* be
2987 computed at runtime), then partition the constructor into
2988 constant and non-constant parts. Block copy the constant
2989 parts in, then generate code for the non-constant parts. */
2990 /* TODO. There's code in cp/typeck.c to do this. */
2991
2992 num_type_elements = count_type_elements (type, true);
2993
2994 /* If count_type_elements could not determine number of type elements
2995 for a constant-sized object, assume clearing is needed.
2996 Don't do this for variable-sized objects, as store_constructor
2997 will ignore the clearing of variable-sized objects. */
2998 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
2999 cleared = true;
3000 /* If there are "lots" of zeros, then block clear the object first. */
3001 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3002 && num_nonzero_elements < num_type_elements/4)
3003 cleared = true;
3004 /* ??? This bit ought not be needed. For any element not present
3005 in the initializer, we should simply set them to zero. Except
3006 we'd need to *find* the elements that are not present, and that
3007 requires trickery to avoid quadratic compile-time behavior in
3008 large cases or excessive memory use in small cases. */
3009 else if (num_ctor_elements < num_type_elements)
3010 cleared = true;
3011
3012 /* If there are "lots" of initialized elements, and all of them
3013 are valid address constants, then the entire initializer can
3014 be dropped to memory, and then memcpy'd out. Don't do this
3015 for sparse arrays, though, as it's more efficient to follow
3016 the standard CONSTRUCTOR behavior of memset followed by
3017 individual element initialization. */
3018 if (valid_const_initializer && !cleared)
3019 {
3020 HOST_WIDE_INT size = int_size_in_bytes (type);
3021 unsigned int align;
3022
3023 /* ??? We can still get unbounded array types, at least
3024 from the C++ front end. This seems wrong, but attempt
3025 to work around it for now. */
3026 if (size < 0)
3027 {
3028 size = int_size_in_bytes (TREE_TYPE (object));
3029 if (size >= 0)
3030 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3031 }
3032
3033 /* Find the maximum alignment we can assume for the object. */
3034 /* ??? Make use of DECL_OFFSET_ALIGN. */
3035 if (DECL_P (object))
3036 align = DECL_ALIGN (object);
3037 else
3038 align = TYPE_ALIGN (type);
3039
3040 if (size > 0 && !can_move_by_pieces (size, align))
3041 {
3042 tree new = create_tmp_var_raw (type, "C");
3043
3044 gimple_add_tmp_var (new);
3045 TREE_STATIC (new) = 1;
3046 TREE_READONLY (new) = 1;
3047 DECL_INITIAL (new) = ctor;
3048 if (align > DECL_ALIGN (new))
3049 {
3050 DECL_ALIGN (new) = align;
3051 DECL_USER_ALIGN (new) = 1;
3052 }
3053 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3054
3055 TREE_OPERAND (*expr_p, 1) = new;
3056
3057 /* This is no longer an assignment of a CONSTRUCTOR, but
3058 we still may have processing to do on the LHS. So
3059 pretend we didn't do anything here to let that happen. */
3060 return GS_UNHANDLED;
3061 }
3062 }
3063
3064 if (cleared)
3065 {
3066 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3067 Note that we still have to gimplify, in order to handle the
3068 case of variable sized types. Avoid shared tree structures. */
3069 CONSTRUCTOR_ELTS (ctor) = NULL;
3070 object = unshare_expr (object);
3071 gimplify_stmt (expr_p);
3072 append_to_statement_list (*expr_p, pre_p);
3073 }
3074
3075 /* If we have not block cleared the object, or if there are nonzero
3076 elements in the constructor, add assignments to the individual
3077 scalar fields of the object. */
3078 if (!cleared || num_nonzero_elements > 0)
3079 {
3080 preeval_data.lhs_base_decl = get_base_address (object);
3081 if (!DECL_P (preeval_data.lhs_base_decl))
3082 preeval_data.lhs_base_decl = NULL;
3083 preeval_data.lhs_alias_set = get_alias_set (object);
3084
3085 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3086 pre_p, post_p, &preeval_data);
3087 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3088 }
3089
3090 *expr_p = NULL_TREE;
3091 }
3092 break;
3093
3094 case COMPLEX_TYPE:
3095 {
3096 tree r, i;
3097
3098 /* Extract the real and imaginary parts out of the ctor. */
3099 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3100 r = VEC_index (constructor_elt, elts, 0)->value;
3101 i = VEC_index (constructor_elt, elts, 1)->value;
3102 if (r == NULL || i == NULL)
3103 {
3104 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3105 if (r == NULL)
3106 r = zero;
3107 if (i == NULL)
3108 i = zero;
3109 }
3110
3111 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3112 represent creation of a complex value. */
3113 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3114 {
3115 ctor = build_complex (type, r, i);
3116 TREE_OPERAND (*expr_p, 1) = ctor;
3117 }
3118 else
3119 {
3120 ctor = build2 (COMPLEX_EXPR, type, r, i);
3121 TREE_OPERAND (*expr_p, 1) = ctor;
3122 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3123 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3124 fb_rvalue);
3125 }
3126 }
3127 break;
3128
3129 case VECTOR_TYPE:
3130 {
3131 unsigned HOST_WIDE_INT ix;
3132 constructor_elt *ce;
3133
3134 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3135 if (TREE_CONSTANT (ctor))
3136 {
3137 bool constant_p = true;
3138 tree value;
3139
3140 /* Even when ctor is constant, it might contain non-*_CST
3141 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3142 belong into VECTOR_CST nodes. */
3143 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3144 if (!CONSTANT_CLASS_P (value))
3145 {
3146 constant_p = false;
3147 break;
3148 }
3149
3150 if (constant_p)
3151 {
3152 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3153 break;
3154 }
3155 }
3156
3157 /* Vector types use CONSTRUCTOR all the way through gimple
3158 compilation as a general initializer. */
3159 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3160 {
3161 enum gimplify_status tret;
3162 tret = gimplify_expr (&ce->value, pre_p, post_p,
3163 is_gimple_val, fb_rvalue);
3164 if (tret == GS_ERROR)
3165 ret = GS_ERROR;
3166 }
3167 }
3168 break;
3169
3170 default:
3171 /* So how did we get a CONSTRUCTOR for a scalar type? */
3172 gcc_unreachable ();
3173 }
3174
3175 if (ret == GS_ERROR)
3176 return GS_ERROR;
3177 else if (want_value)
3178 {
3179 append_to_statement_list (*expr_p, pre_p);
3180 *expr_p = object;
3181 return GS_OK;
3182 }
3183 else
3184 return GS_ALL_DONE;
3185 }
3186
3187 /* Given a pointer value OP0, return a simplified version of an
3188 indirection through OP0, or NULL_TREE if no simplification is
3189 possible. This may only be applied to a rhs of an expression.
3190 Note that the resulting type may be different from the type pointed
3191 to in the sense that it is still compatible from the langhooks
3192 point of view. */
3193
3194 static tree
3195 fold_indirect_ref_rhs (tree t)
3196 {
3197 tree type = TREE_TYPE (TREE_TYPE (t));
3198 tree sub = t;
3199 tree subtype;
3200
3201 STRIP_NOPS (sub);
3202 subtype = TREE_TYPE (sub);
3203 if (!POINTER_TYPE_P (subtype))
3204 return NULL_TREE;
3205
3206 if (TREE_CODE (sub) == ADDR_EXPR)
3207 {
3208 tree op = TREE_OPERAND (sub, 0);
3209 tree optype = TREE_TYPE (op);
3210 /* *&p => p */
3211 if (lang_hooks.types_compatible_p (type, optype))
3212 return op;
3213 /* *(foo *)&fooarray => fooarray[0] */
3214 else if (TREE_CODE (optype) == ARRAY_TYPE
3215 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3216 {
3217 tree type_domain = TYPE_DOMAIN (optype);
3218 tree min_val = size_zero_node;
3219 if (type_domain && TYPE_MIN_VALUE (type_domain))
3220 min_val = TYPE_MIN_VALUE (type_domain);
3221 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3222 }
3223 }
3224
3225 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3226 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3227 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3228 {
3229 tree type_domain;
3230 tree min_val = size_zero_node;
3231 tree osub = sub;
3232 sub = fold_indirect_ref_rhs (sub);
3233 if (! sub)
3234 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3235 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3236 if (type_domain && TYPE_MIN_VALUE (type_domain))
3237 min_val = TYPE_MIN_VALUE (type_domain);
3238 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3239 }
3240
3241 return NULL_TREE;
3242 }
3243
3244 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3245 based on the code of the RHS. We loop for as long as something changes. */
3246
3247 static enum gimplify_status
3248 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3249 tree *post_p, bool want_value)
3250 {
3251 enum gimplify_status ret = GS_OK;
3252
3253 while (ret != GS_UNHANDLED)
3254 switch (TREE_CODE (*from_p))
3255 {
3256 case INDIRECT_REF:
3257 {
3258 /* If we have code like
3259
3260 *(const A*)(A*)&x
3261
3262 where the type of "x" is a (possibly cv-qualified variant
3263 of "A"), treat the entire expression as identical to "x".
3264 This kind of code arises in C++ when an object is bound
3265 to a const reference, and if "x" is a TARGET_EXPR we want
3266 to take advantage of the optimization below. */
3267 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3268 if (t)
3269 {
3270 *from_p = t;
3271 ret = GS_OK;
3272 }
3273 else
3274 ret = GS_UNHANDLED;
3275 break;
3276 }
3277
3278 case TARGET_EXPR:
3279 {
3280 /* If we are initializing something from a TARGET_EXPR, strip the
3281 TARGET_EXPR and initialize it directly, if possible. This can't
3282 be done if the initializer is void, since that implies that the
3283 temporary is set in some non-trivial way.
3284
3285 ??? What about code that pulls out the temp and uses it
3286 elsewhere? I think that such code never uses the TARGET_EXPR as
3287 an initializer. If I'm wrong, we'll die because the temp won't
3288 have any RTL. In that case, I guess we'll need to replace
3289 references somehow. */
3290 tree init = TARGET_EXPR_INITIAL (*from_p);
3291
3292 if (!VOID_TYPE_P (TREE_TYPE (init)))
3293 {
3294 *from_p = init;
3295 ret = GS_OK;
3296 }
3297 else
3298 ret = GS_UNHANDLED;
3299 }
3300 break;
3301
3302 case COMPOUND_EXPR:
3303 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3304 caught. */
3305 gimplify_compound_expr (from_p, pre_p, true);
3306 ret = GS_OK;
3307 break;
3308
3309 case CONSTRUCTOR:
3310 /* If we're initializing from a CONSTRUCTOR, break this into
3311 individual MODIFY_EXPRs. */
3312 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3313
3314 case COND_EXPR:
3315 /* If we're assigning to a non-register type, push the assignment
3316 down into the branches. This is mandatory for ADDRESSABLE types,
3317 since we cannot generate temporaries for such, but it saves a
3318 copy in other cases as well. */
3319 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3320 {
3321 /* This code should mirror the code in gimplify_cond_expr. */
3322 enum tree_code code = TREE_CODE (*expr_p);
3323 tree cond = *from_p;
3324 tree result = *to_p;
3325
3326 ret = gimplify_expr (&result, pre_p, post_p,
3327 is_gimple_min_lval, fb_lvalue);
3328 if (ret != GS_ERROR)
3329 ret = GS_OK;
3330
3331 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3332 TREE_OPERAND (cond, 1)
3333 = build2 (code, void_type_node, result,
3334 TREE_OPERAND (cond, 1));
3335 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3336 TREE_OPERAND (cond, 2)
3337 = build2 (code, void_type_node, unshare_expr (result),
3338 TREE_OPERAND (cond, 2));
3339
3340 TREE_TYPE (cond) = void_type_node;
3341 recalculate_side_effects (cond);
3342
3343 if (want_value)
3344 {
3345 gimplify_and_add (cond, pre_p);
3346 *expr_p = unshare_expr (result);
3347 }
3348 else
3349 *expr_p = cond;
3350 return ret;
3351 }
3352 else
3353 ret = GS_UNHANDLED;
3354 break;
3355
3356 case CALL_EXPR:
3357 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3358 return slot so that we don't generate a temporary. */
3359 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3360 && aggregate_value_p (*from_p, *from_p))
3361 {
3362 bool use_target;
3363
3364 if (!(rhs_predicate_for (*to_p))(*from_p))
3365 /* If we need a temporary, *to_p isn't accurate. */
3366 use_target = false;
3367 else if (TREE_CODE (*to_p) == RESULT_DECL
3368 && DECL_NAME (*to_p) == NULL_TREE
3369 && needs_to_live_in_memory (*to_p))
3370 /* It's OK to use the return slot directly unless it's an NRV. */
3371 use_target = true;
3372 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3373 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3374 /* Don't force regs into memory. */
3375 use_target = false;
3376 else if (TREE_CODE (*to_p) == VAR_DECL
3377 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3378 /* Don't use the original target if it's a formal temp; we
3379 don't want to take their addresses. */
3380 use_target = false;
3381 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3382 /* It's OK to use the target directly if it's being
3383 initialized. */
3384 use_target = true;
3385 else if (!is_gimple_non_addressable (*to_p))
3386 /* Don't use the original target if it's already addressable;
3387 if its address escapes, and the called function uses the
3388 NRV optimization, a conforming program could see *to_p
3389 change before the called function returns; see c++/19317.
3390 When optimizing, the return_slot pass marks more functions
3391 as safe after we have escape info. */
3392 use_target = false;
3393 else
3394 use_target = true;
3395
3396 if (use_target)
3397 {
3398 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3399 lang_hooks.mark_addressable (*to_p);
3400 }
3401 }
3402
3403 ret = GS_UNHANDLED;
3404 break;
3405
3406 default:
3407 ret = GS_UNHANDLED;
3408 break;
3409 }
3410
3411 return ret;
3412 }
3413
3414 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3415 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3416 DECL_COMPLEX_GIMPLE_REG_P set. */
3417
3418 static enum gimplify_status
3419 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3420 {
3421 enum tree_code code, ocode;
3422 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3423
3424 lhs = TREE_OPERAND (*expr_p, 0);
3425 rhs = TREE_OPERAND (*expr_p, 1);
3426 code = TREE_CODE (lhs);
3427 lhs = TREE_OPERAND (lhs, 0);
3428
3429 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3430 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3431 other = get_formal_tmp_var (other, pre_p);
3432
3433 realpart = code == REALPART_EXPR ? rhs : other;
3434 imagpart = code == REALPART_EXPR ? other : rhs;
3435
3436 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3437 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3438 else
3439 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3440
3441 TREE_OPERAND (*expr_p, 0) = lhs;
3442 TREE_OPERAND (*expr_p, 1) = new_rhs;
3443
3444 if (want_value)
3445 {
3446 append_to_statement_list (*expr_p, pre_p);
3447 *expr_p = rhs;
3448 }
3449
3450 return GS_ALL_DONE;
3451 }
3452
3453 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3454
3455 modify_expr
3456 : varname '=' rhs
3457 | '*' ID '=' rhs
3458
3459 PRE_P points to the list where side effects that must happen before
3460 *EXPR_P should be stored.
3461
3462 POST_P points to the list where side effects that must happen after
3463 *EXPR_P should be stored.
3464
3465 WANT_VALUE is nonzero iff we want to use the value of this expression
3466 in another expression. */
3467
3468 static enum gimplify_status
3469 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3470 {
3471 tree *from_p = &TREE_OPERAND (*expr_p, 1);
3472 tree *to_p = &TREE_OPERAND (*expr_p, 0);
3473 enum gimplify_status ret = GS_UNHANDLED;
3474
3475 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3476 || TREE_CODE (*expr_p) == INIT_EXPR);
3477
3478 /* For zero sized types only gimplify the left hand side and right hand side
3479 as statements and throw away the assignment. */
3480 if (zero_sized_type (TREE_TYPE (*from_p)))
3481 {
3482 gimplify_stmt (from_p);
3483 gimplify_stmt (to_p);
3484 append_to_statement_list (*from_p, pre_p);
3485 append_to_statement_list (*to_p, pre_p);
3486 *expr_p = NULL_TREE;
3487 return GS_ALL_DONE;
3488 }
3489
3490 /* See if any simplifications can be done based on what the RHS is. */
3491 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3492 want_value);
3493 if (ret != GS_UNHANDLED)
3494 return ret;
3495
3496 /* If the value being copied is of variable width, compute the length
3497 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3498 before gimplifying any of the operands so that we can resolve any
3499 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3500 the size of the expression to be copied, not of the destination, so
3501 that is what we must here. */
3502 maybe_with_size_expr (from_p);
3503
3504 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3505 if (ret == GS_ERROR)
3506 return ret;
3507
3508 ret = gimplify_expr (from_p, pre_p, post_p,
3509 rhs_predicate_for (*to_p), fb_rvalue);
3510 if (ret == GS_ERROR)
3511 return ret;
3512
3513 /* Now see if the above changed *from_p to something we handle specially. */
3514 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3515 want_value);
3516 if (ret != GS_UNHANDLED)
3517 return ret;
3518
3519 /* If we've got a variable sized assignment between two lvalues (i.e. does
3520 not involve a call), then we can make things a bit more straightforward
3521 by converting the assignment to memcpy or memset. */
3522 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3523 {
3524 tree from = TREE_OPERAND (*from_p, 0);
3525 tree size = TREE_OPERAND (*from_p, 1);
3526
3527 if (TREE_CODE (from) == CONSTRUCTOR)
3528 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3529 if (is_gimple_addressable (from))
3530 {
3531 *from_p = from;
3532 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3533 }
3534 }
3535
3536 /* Transform partial stores to non-addressable complex variables into
3537 total stores. This allows us to use real instead of virtual operands
3538 for these variables, which improves optimization. */
3539 if ((TREE_CODE (*to_p) == REALPART_EXPR
3540 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3541 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3542 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3543
3544 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3545 {
3546 /* If we've somehow already got an SSA_NAME on the LHS, then
3547 we're probably modified it twice. Not good. */
3548 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3549 *to_p = make_ssa_name (*to_p, *expr_p);
3550 }
3551
3552 if (want_value)
3553 {
3554 append_to_statement_list (*expr_p, pre_p);
3555 *expr_p = *to_p;
3556 return GS_OK;
3557 }
3558
3559 return GS_ALL_DONE;
3560 }
3561
3562 /* Gimplify a comparison between two variable-sized objects. Do this
3563 with a call to BUILT_IN_MEMCMP. */
3564
3565 static enum gimplify_status
3566 gimplify_variable_sized_compare (tree *expr_p)
3567 {
3568 tree op0 = TREE_OPERAND (*expr_p, 0);
3569 tree op1 = TREE_OPERAND (*expr_p, 1);
3570 tree args, t, dest;
3571
3572 t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3573 t = unshare_expr (t);
3574 t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
3575 args = tree_cons (NULL, t, NULL);
3576 t = build_fold_addr_expr (op1);
3577 args = tree_cons (NULL, t, args);
3578 dest = build_fold_addr_expr (op0);
3579 args = tree_cons (NULL, dest, args);
3580 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3581 t = build_function_call_expr (t, args);
3582 *expr_p
3583 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3584
3585 return GS_OK;
3586 }
3587
3588 /* Gimplify a comparison between two aggregate objects of integral scalar
3589 mode as a comparison between the bitwise equivalent scalar values. */
3590
3591 static enum gimplify_status
3592 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3593 {
3594 tree op0 = TREE_OPERAND (*expr_p, 0);
3595 tree op1 = TREE_OPERAND (*expr_p, 1);
3596
3597 tree type = TREE_TYPE (op0);
3598 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3599
3600 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3601 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3602
3603 *expr_p
3604 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3605
3606 return GS_OK;
3607 }
3608
3609 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3610 points to the expression to gimplify.
3611
3612 Expressions of the form 'a && b' are gimplified to:
3613
3614 a && b ? true : false
3615
3616 gimplify_cond_expr will do the rest.
3617
3618 PRE_P points to the list where side effects that must happen before
3619 *EXPR_P should be stored. */
3620
3621 static enum gimplify_status
3622 gimplify_boolean_expr (tree *expr_p)
3623 {
3624 /* Preserve the original type of the expression. */
3625 tree type = TREE_TYPE (*expr_p);
3626
3627 *expr_p = build3 (COND_EXPR, type, *expr_p,
3628 fold_convert (type, boolean_true_node),
3629 fold_convert (type, boolean_false_node));
3630
3631 return GS_OK;
3632 }
3633
3634 /* Gimplifies an expression sequence. This function gimplifies each
3635 expression and re-writes the original expression with the last
3636 expression of the sequence in GIMPLE form.
3637
3638 PRE_P points to the list where the side effects for all the
3639 expressions in the sequence will be emitted.
3640
3641 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3642 /* ??? Should rearrange to share the pre-queue with all the indirect
3643 invocations of gimplify_expr. Would probably save on creations
3644 of statement_list nodes. */
3645
3646 static enum gimplify_status
3647 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3648 {
3649 tree t = *expr_p;
3650
3651 do
3652 {
3653 tree *sub_p = &TREE_OPERAND (t, 0);
3654
3655 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3656 gimplify_compound_expr (sub_p, pre_p, false);
3657 else
3658 gimplify_stmt (sub_p);
3659 append_to_statement_list (*sub_p, pre_p);
3660
3661 t = TREE_OPERAND (t, 1);
3662 }
3663 while (TREE_CODE (t) == COMPOUND_EXPR);
3664
3665 *expr_p = t;
3666 if (want_value)
3667 return GS_OK;
3668 else
3669 {
3670 gimplify_stmt (expr_p);
3671 return GS_ALL_DONE;
3672 }
3673 }
3674
3675 /* Gimplifies a statement list. These may be created either by an
3676 enlightened front-end, or by shortcut_cond_expr. */
3677
3678 static enum gimplify_status
3679 gimplify_statement_list (tree *expr_p)
3680 {
3681 tree_stmt_iterator i = tsi_start (*expr_p);
3682
3683 while (!tsi_end_p (i))
3684 {
3685 tree t;
3686
3687 gimplify_stmt (tsi_stmt_ptr (i));
3688
3689 t = tsi_stmt (i);
3690 if (t == NULL)
3691 tsi_delink (&i);
3692 else if (TREE_CODE (t) == STATEMENT_LIST)
3693 {
3694 tsi_link_before (&i, t, TSI_SAME_STMT);
3695 tsi_delink (&i);
3696 }
3697 else
3698 tsi_next (&i);
3699 }
3700
3701 return GS_ALL_DONE;
3702 }
3703
3704 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3705 gimplify. After gimplification, EXPR_P will point to a new temporary
3706 that holds the original value of the SAVE_EXPR node.
3707
3708 PRE_P points to the list where side effects that must happen before
3709 *EXPR_P should be stored. */
3710
3711 static enum gimplify_status
3712 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3713 {
3714 enum gimplify_status ret = GS_ALL_DONE;
3715 tree val;
3716
3717 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3718 val = TREE_OPERAND (*expr_p, 0);
3719
3720 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3721 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3722 {
3723 /* The operand may be a void-valued expression such as SAVE_EXPRs
3724 generated by the Java frontend for class initialization. It is
3725 being executed only for its side-effects. */
3726 if (TREE_TYPE (val) == void_type_node)
3727 {
3728 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3729 is_gimple_stmt, fb_none);
3730 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3731 val = NULL;
3732 }
3733 else
3734 val = get_initialized_tmp_var (val, pre_p, post_p);
3735
3736 TREE_OPERAND (*expr_p, 0) = val;
3737 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3738 }
3739
3740 *expr_p = val;
3741
3742 return ret;
3743 }
3744
3745 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3746
3747 unary_expr
3748 : ...
3749 | '&' varname
3750 ...
3751
3752 PRE_P points to the list where side effects that must happen before
3753 *EXPR_P should be stored.
3754
3755 POST_P points to the list where side effects that must happen after
3756 *EXPR_P should be stored. */
3757
3758 static enum gimplify_status
3759 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3760 {
3761 tree expr = *expr_p;
3762 tree op0 = TREE_OPERAND (expr, 0);
3763 enum gimplify_status ret;
3764
3765 switch (TREE_CODE (op0))
3766 {
3767 case INDIRECT_REF:
3768 case MISALIGNED_INDIRECT_REF:
3769 do_indirect_ref:
3770 /* Check if we are dealing with an expression of the form '&*ptr'.
3771 While the front end folds away '&*ptr' into 'ptr', these
3772 expressions may be generated internally by the compiler (e.g.,
3773 builtins like __builtin_va_end). */
3774 /* Caution: the silent array decomposition semantics we allow for
3775 ADDR_EXPR means we can't always discard the pair. */
3776 /* Gimplification of the ADDR_EXPR operand may drop
3777 cv-qualification conversions, so make sure we add them if
3778 needed. */
3779 {
3780 tree op00 = TREE_OPERAND (op0, 0);
3781 tree t_expr = TREE_TYPE (expr);
3782 tree t_op00 = TREE_TYPE (op00);
3783
3784 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3785 {
3786 #ifdef ENABLE_CHECKING
3787 tree t_op0 = TREE_TYPE (op0);
3788 gcc_assert (POINTER_TYPE_P (t_expr)
3789 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3790 ? TREE_TYPE (t_op0) : t_op0,
3791 TREE_TYPE (t_expr))
3792 && POINTER_TYPE_P (t_op00)
3793 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3794 #endif
3795 op00 = fold_convert (TREE_TYPE (expr), op00);
3796 }
3797 *expr_p = op00;
3798 ret = GS_OK;
3799 }
3800 break;
3801
3802 case VIEW_CONVERT_EXPR:
3803 /* Take the address of our operand and then convert it to the type of
3804 this ADDR_EXPR.
3805
3806 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3807 all clear. The impact of this transformation is even less clear. */
3808
3809 /* If the operand is a useless conversion, look through it. Doing so
3810 guarantees that the ADDR_EXPR and its operand will remain of the
3811 same type. */
3812 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3813 op0 = TREE_OPERAND (op0, 0);
3814
3815 *expr_p = fold_convert (TREE_TYPE (expr),
3816 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3817 ret = GS_OK;
3818 break;
3819
3820 default:
3821 /* We use fb_either here because the C frontend sometimes takes
3822 the address of a call that returns a struct; see
3823 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3824 the implied temporary explicit. */
3825 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3826 is_gimple_addressable, fb_either);
3827 if (ret != GS_ERROR)
3828 {
3829 op0 = TREE_OPERAND (expr, 0);
3830
3831 /* For various reasons, the gimplification of the expression
3832 may have made a new INDIRECT_REF. */
3833 if (TREE_CODE (op0) == INDIRECT_REF)
3834 goto do_indirect_ref;
3835
3836 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3837 is set properly. */
3838 recompute_tree_invariant_for_addr_expr (expr);
3839
3840 /* Mark the RHS addressable. */
3841 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3842 }
3843 break;
3844 }
3845
3846 return ret;
3847 }
3848
3849 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3850 value; output operands should be a gimple lvalue. */
3851
3852 static enum gimplify_status
3853 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3854 {
3855 tree expr = *expr_p;
3856 int noutputs = list_length (ASM_OUTPUTS (expr));
3857 const char **oconstraints
3858 = (const char **) alloca ((noutputs) * sizeof (const char *));
3859 int i;
3860 tree link;
3861 const char *constraint;
3862 bool allows_mem, allows_reg, is_inout;
3863 enum gimplify_status ret, tret;
3864
3865 ret = GS_ALL_DONE;
3866 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3867 {
3868 size_t constraint_len;
3869 oconstraints[i] = constraint
3870 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3871 constraint_len = strlen (constraint);
3872 if (constraint_len == 0)
3873 continue;
3874
3875 parse_output_constraint (&constraint, i, 0, 0,
3876 &allows_mem, &allows_reg, &is_inout);
3877
3878 if (!allows_reg && allows_mem)
3879 lang_hooks.mark_addressable (TREE_VALUE (link));
3880
3881 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3882 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3883 fb_lvalue | fb_mayfail);
3884 if (tret == GS_ERROR)
3885 {
3886 error ("invalid lvalue in asm output %d", i);
3887 ret = tret;
3888 }
3889
3890 if (is_inout)
3891 {
3892 /* An input/output operand. To give the optimizers more
3893 flexibility, split it into separate input and output
3894 operands. */
3895 tree input;
3896 char buf[10];
3897
3898 /* Turn the in/out constraint into an output constraint. */
3899 char *p = xstrdup (constraint);
3900 p[0] = '=';
3901 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
3902
3903 /* And add a matching input constraint. */
3904 if (allows_reg)
3905 {
3906 sprintf (buf, "%d", i);
3907
3908 /* If there are multiple alternatives in the constraint,
3909 handle each of them individually. Those that allow register
3910 will be replaced with operand number, the others will stay
3911 unchanged. */
3912 if (strchr (p, ',') != NULL)
3913 {
3914 size_t len = 0, buflen = strlen (buf);
3915 char *beg, *end, *str, *dst;
3916
3917 for (beg = p + 1;;)
3918 {
3919 end = strchr (beg, ',');
3920 if (end == NULL)
3921 end = strchr (beg, '\0');
3922 if ((size_t) (end - beg) < buflen)
3923 len += buflen + 1;
3924 else
3925 len += end - beg + 1;
3926 if (*end)
3927 beg = end + 1;
3928 else
3929 break;
3930 }
3931
3932 str = (char *) alloca (len);
3933 for (beg = p + 1, dst = str;;)
3934 {
3935 const char *tem;
3936 bool mem_p, reg_p, inout_p;
3937
3938 end = strchr (beg, ',');
3939 if (end)
3940 *end = '\0';
3941 beg[-1] = '=';
3942 tem = beg - 1;
3943 parse_output_constraint (&tem, i, 0, 0,
3944 &mem_p, &reg_p, &inout_p);
3945 if (dst != str)
3946 *dst++ = ',';
3947 if (reg_p)
3948 {
3949 memcpy (dst, buf, buflen);
3950 dst += buflen;
3951 }
3952 else
3953 {
3954 if (end)
3955 len = end - beg;
3956 else
3957 len = strlen (beg);
3958 memcpy (dst, beg, len);
3959 dst += len;
3960 }
3961 if (end)
3962 beg = end + 1;
3963 else
3964 break;
3965 }
3966 *dst = '\0';
3967 input = build_string (dst - str, str);
3968 }
3969 else
3970 input = build_string (strlen (buf), buf);
3971 }
3972 else
3973 input = build_string (constraint_len - 1, constraint + 1);
3974
3975 free (p);
3976
3977 input = build_tree_list (build_tree_list (NULL_TREE, input),
3978 unshare_expr (TREE_VALUE (link)));
3979 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
3980 }
3981 }
3982
3983 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3984 {
3985 constraint
3986 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3987 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3988 oconstraints, &allows_mem, &allows_reg);
3989
3990 /* If the operand is a memory input, it should be an lvalue. */
3991 if (!allows_reg && allows_mem)
3992 {
3993 lang_hooks.mark_addressable (TREE_VALUE (link));
3994 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3995 is_gimple_lvalue, fb_lvalue | fb_mayfail);
3996 if (tret == GS_ERROR)
3997 {
3998 error ("memory input %d is not directly addressable", i);
3999 ret = tret;
4000 }
4001 }
4002 else
4003 {
4004 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4005 is_gimple_asm_val, fb_rvalue);
4006 if (tret == GS_ERROR)
4007 ret = tret;
4008 }
4009 }
4010
4011 return ret;
4012 }
4013
4014 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4015 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4016 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4017 return to this function.
4018
4019 FIXME should we complexify the prequeue handling instead? Or use flags
4020 for all the cleanups and let the optimizer tighten them up? The current
4021 code seems pretty fragile; it will break on a cleanup within any
4022 non-conditional nesting. But any such nesting would be broken, anyway;
4023 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4024 and continues out of it. We can do that at the RTL level, though, so
4025 having an optimizer to tighten up try/finally regions would be a Good
4026 Thing. */
4027
4028 static enum gimplify_status
4029 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4030 {
4031 tree_stmt_iterator iter;
4032 tree body;
4033
4034 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4035
4036 /* We only care about the number of conditions between the innermost
4037 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4038 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4039 int old_conds = gimplify_ctxp->conditions;
4040 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4041 gimplify_ctxp->conditions = 0;
4042 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4043
4044 body = TREE_OPERAND (*expr_p, 0);
4045 gimplify_to_stmt_list (&body);
4046
4047 gimplify_ctxp->conditions = old_conds;
4048 gimplify_ctxp->conditional_cleanups = old_cleanups;
4049
4050 for (iter = tsi_start (body); !tsi_end_p (iter); )
4051 {
4052 tree *wce_p = tsi_stmt_ptr (iter);
4053 tree wce = *wce_p;
4054
4055 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4056 {
4057 if (tsi_one_before_end_p (iter))
4058 {
4059 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4060 tsi_delink (&iter);
4061 break;
4062 }
4063 else
4064 {
4065 tree sl, tfe;
4066 enum tree_code code;
4067
4068 if (CLEANUP_EH_ONLY (wce))
4069 code = TRY_CATCH_EXPR;
4070 else
4071 code = TRY_FINALLY_EXPR;
4072
4073 sl = tsi_split_statement_list_after (&iter);
4074 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4075 append_to_statement_list (TREE_OPERAND (wce, 0),
4076 &TREE_OPERAND (tfe, 1));
4077 *wce_p = tfe;
4078 iter = tsi_start (sl);
4079 }
4080 }
4081 else
4082 tsi_next (&iter);
4083 }
4084
4085 if (temp)
4086 {
4087 *expr_p = temp;
4088 append_to_statement_list (body, pre_p);
4089 return GS_OK;
4090 }
4091 else
4092 {
4093 *expr_p = body;
4094 return GS_ALL_DONE;
4095 }
4096 }
4097
4098 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4099 is the cleanup action required. */
4100
4101 static void
4102 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4103 {
4104 tree wce;
4105
4106 /* Errors can result in improperly nested cleanups. Which results in
4107 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4108 if (errorcount || sorrycount)
4109 return;
4110
4111 if (gimple_conditional_context ())
4112 {
4113 /* If we're in a conditional context, this is more complex. We only
4114 want to run the cleanup if we actually ran the initialization that
4115 necessitates it, but we want to run it after the end of the
4116 conditional context. So we wrap the try/finally around the
4117 condition and use a flag to determine whether or not to actually
4118 run the destructor. Thus
4119
4120 test ? f(A()) : 0
4121
4122 becomes (approximately)
4123
4124 flag = 0;
4125 try {
4126 if (test) { A::A(temp); flag = 1; val = f(temp); }
4127 else { val = 0; }
4128 } finally {
4129 if (flag) A::~A(temp);
4130 }
4131 val
4132 */
4133
4134 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4135 tree ffalse = build2 (MODIFY_EXPR, void_type_node, flag,
4136 boolean_false_node);
4137 tree ftrue = build2 (MODIFY_EXPR, void_type_node, flag,
4138 boolean_true_node);
4139 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4140 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4141 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4142 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4143 append_to_statement_list (ftrue, pre_p);
4144
4145 /* Because of this manipulation, and the EH edges that jump
4146 threading cannot redirect, the temporary (VAR) will appear
4147 to be used uninitialized. Don't warn. */
4148 TREE_NO_WARNING (var) = 1;
4149 }
4150 else
4151 {
4152 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4153 CLEANUP_EH_ONLY (wce) = eh_only;
4154 append_to_statement_list (wce, pre_p);
4155 }
4156
4157 gimplify_stmt (&TREE_OPERAND (wce, 0));
4158 }
4159
4160 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4161
4162 static enum gimplify_status
4163 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4164 {
4165 tree targ = *expr_p;
4166 tree temp = TARGET_EXPR_SLOT (targ);
4167 tree init = TARGET_EXPR_INITIAL (targ);
4168 enum gimplify_status ret;
4169
4170 if (init)
4171 {
4172 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4173 to the temps list. */
4174 gimple_add_tmp_var (temp);
4175
4176 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4177 expression is supposed to initialize the slot. */
4178 if (VOID_TYPE_P (TREE_TYPE (init)))
4179 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4180 else
4181 {
4182 /* Special handling for BIND_EXPR can result in fewer temps. */
4183 ret = GS_OK;
4184 if (TREE_CODE (init) == BIND_EXPR)
4185 gimplify_bind_expr (&init, temp, pre_p);
4186 if (init != temp)
4187 {
4188 init = build2 (INIT_EXPR, void_type_node, temp, init);
4189 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4190 fb_none);
4191 }
4192 }
4193 if (ret == GS_ERROR)
4194 return GS_ERROR;
4195 append_to_statement_list (init, pre_p);
4196
4197 /* If needed, push the cleanup for the temp. */
4198 if (TARGET_EXPR_CLEANUP (targ))
4199 {
4200 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4201 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4202 CLEANUP_EH_ONLY (targ), pre_p);
4203 }
4204
4205 /* Only expand this once. */
4206 TREE_OPERAND (targ, 3) = init;
4207 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4208 }
4209 else
4210 /* We should have expanded this before. */
4211 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4212
4213 *expr_p = temp;
4214 return GS_OK;
4215 }
4216
4217 /* Gimplification of expression trees. */
4218
4219 /* Gimplify an expression which appears at statement context; usually, this
4220 means replacing it with a suitably gimple STATEMENT_LIST. */
4221
4222 void
4223 gimplify_stmt (tree *stmt_p)
4224 {
4225 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4226 }
4227
4228 /* Similarly, but force the result to be a STATEMENT_LIST. */
4229
4230 void
4231 gimplify_to_stmt_list (tree *stmt_p)
4232 {
4233 gimplify_stmt (stmt_p);
4234 if (!*stmt_p)
4235 *stmt_p = alloc_stmt_list ();
4236 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4237 {
4238 tree t = *stmt_p;
4239 *stmt_p = alloc_stmt_list ();
4240 append_to_statement_list (t, stmt_p);
4241 }
4242 }
4243
4244
4245 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4246 to CTX. If entries already exist, force them to be some flavor of private.
4247 If there is no enclosing parallel, do nothing. */
4248
4249 void
4250 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4251 {
4252 splay_tree_node n;
4253
4254 if (decl == NULL || !DECL_P (decl))
4255 return;
4256
4257 do
4258 {
4259 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4260 if (n != NULL)
4261 {
4262 if (n->value & GOVD_SHARED)
4263 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4264 else
4265 return;
4266 }
4267 else if (ctx->is_parallel)
4268 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4269
4270 ctx = ctx->outer_context;
4271 }
4272 while (ctx);
4273 }
4274
4275 /* Similarly for each of the type sizes of TYPE. */
4276
4277 static void
4278 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4279 {
4280 if (type == NULL || type == error_mark_node)
4281 return;
4282 type = TYPE_MAIN_VARIANT (type);
4283
4284 if (pointer_set_insert (ctx->privatized_types, type))
4285 return;
4286
4287 switch (TREE_CODE (type))
4288 {
4289 case INTEGER_TYPE:
4290 case ENUMERAL_TYPE:
4291 case BOOLEAN_TYPE:
4292 case REAL_TYPE:
4293 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4294 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4295 break;
4296
4297 case ARRAY_TYPE:
4298 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4299 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4300 break;
4301
4302 case RECORD_TYPE:
4303 case UNION_TYPE:
4304 case QUAL_UNION_TYPE:
4305 {
4306 tree field;
4307 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4308 if (TREE_CODE (field) == FIELD_DECL)
4309 {
4310 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4311 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4312 }
4313 }
4314 break;
4315
4316 case POINTER_TYPE:
4317 case REFERENCE_TYPE:
4318 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4319 break;
4320
4321 default:
4322 break;
4323 }
4324
4325 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4326 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4327 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4328 }
4329
4330 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4331
4332 static void
4333 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4334 {
4335 splay_tree_node n;
4336 unsigned int nflags;
4337 tree t;
4338
4339 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4340 return;
4341
4342 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4343 there are constructors involved somewhere. */
4344 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4345 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4346 flags |= GOVD_SEEN;
4347
4348 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4349 if (n != NULL)
4350 {
4351 /* We shouldn't be re-adding the decl with the same data
4352 sharing class. */
4353 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4354 /* The only combination of data sharing classes we should see is
4355 FIRSTPRIVATE and LASTPRIVATE. */
4356 nflags = n->value | flags;
4357 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4358 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4359 n->value = nflags;
4360 return;
4361 }
4362
4363 /* When adding a variable-sized variable, we have to handle all sorts
4364 of additional bits of data: the pointer replacement variable, and
4365 the parameters of the type. */
4366 if (DECL_SIZE (decl) && !TREE_CONSTANT (DECL_SIZE (decl)))
4367 {
4368 /* Add the pointer replacement variable as PRIVATE if the variable
4369 replacement is private, else FIRSTPRIVATE since we'll need the
4370 address of the original variable either for SHARED, or for the
4371 copy into or out of the context. */
4372 if (!(flags & GOVD_LOCAL))
4373 {
4374 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4375 nflags |= flags & GOVD_SEEN;
4376 t = DECL_VALUE_EXPR (decl);
4377 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4378 t = TREE_OPERAND (t, 0);
4379 gcc_assert (DECL_P (t));
4380 omp_add_variable (ctx, t, nflags);
4381 }
4382
4383 /* Add all of the variable and type parameters (which should have
4384 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4385 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4386 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4387 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4388
4389 /* The variable-sized variable itself is never SHARED, only some form
4390 of PRIVATE. The sharing would take place via the pointer variable
4391 which we remapped above. */
4392 if (flags & GOVD_SHARED)
4393 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4394 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4395
4396 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4397 alloca statement we generate for the variable, so make sure it
4398 is available. This isn't automatically needed for the SHARED
4399 case, since we won't be allocating local storage then. */
4400 else
4401 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4402 }
4403 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4404 {
4405 gcc_assert ((flags & GOVD_LOCAL) == 0);
4406 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4407
4408 /* Similar to the direct variable sized case above, we'll need the
4409 size of references being privatized. */
4410 if ((flags & GOVD_SHARED) == 0)
4411 {
4412 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4413 if (!TREE_CONSTANT (t))
4414 omp_notice_variable (ctx, t, true);
4415 }
4416 }
4417
4418 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4419 }
4420
4421 /* Record the fact that DECL was used within the OpenMP context CTX.
4422 IN_CODE is true when real code uses DECL, and false when we should
4423 merely emit default(none) errors. Return true if DECL is going to
4424 be remapped and thus DECL shouldn't be gimplified into its
4425 DECL_VALUE_EXPR (if any). */
4426
4427 static bool
4428 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4429 {
4430 splay_tree_node n;
4431 unsigned flags = in_code ? GOVD_SEEN : 0;
4432 bool ret = false, shared;
4433
4434 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4435 return false;
4436
4437 /* Threadprivate variables are predetermined. */
4438 if (is_global_var (decl))
4439 {
4440 if (DECL_THREAD_LOCAL_P (decl))
4441 return false;
4442
4443 if (DECL_HAS_VALUE_EXPR_P (decl))
4444 {
4445 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4446
4447 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4448 return false;
4449 }
4450 }
4451
4452 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4453 if (n == NULL)
4454 {
4455 enum omp_clause_default_kind default_kind, kind;
4456
4457 if (!ctx->is_parallel)
4458 goto do_outer;
4459
4460 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4461 remapped firstprivate instead of shared. To some extent this is
4462 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4463 default_kind = ctx->default_kind;
4464 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4465 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4466 default_kind = kind;
4467
4468 switch (default_kind)
4469 {
4470 case OMP_CLAUSE_DEFAULT_NONE:
4471 error ("%qs not specified in enclosing parallel",
4472 IDENTIFIER_POINTER (DECL_NAME (decl)));
4473 error ("%Henclosing parallel", &ctx->location);
4474 /* FALLTHRU */
4475 case OMP_CLAUSE_DEFAULT_SHARED:
4476 flags |= GOVD_SHARED;
4477 break;
4478 case OMP_CLAUSE_DEFAULT_PRIVATE:
4479 flags |= GOVD_PRIVATE;
4480 break;
4481 default:
4482 gcc_unreachable ();
4483 }
4484
4485 omp_add_variable (ctx, decl, flags);
4486
4487 shared = (flags & GOVD_SHARED) != 0;
4488 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4489 goto do_outer;
4490 }
4491
4492 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4493 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4494
4495 /* If nothing changed, there's nothing left to do. */
4496 if ((n->value & flags) == flags)
4497 return ret;
4498 flags |= n->value;
4499 n->value = flags;
4500
4501 do_outer:
4502 /* If the variable is private in the current context, then we don't
4503 need to propagate anything to an outer context. */
4504 if (flags & GOVD_PRIVATE)
4505 return ret;
4506 if (ctx->outer_context
4507 && omp_notice_variable (ctx->outer_context, decl, in_code))
4508 return true;
4509 return ret;
4510 }
4511
4512 /* Verify that DECL is private within CTX. If there's specific information
4513 to the contrary in the innermost scope, generate an error. */
4514
4515 static bool
4516 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4517 {
4518 splay_tree_node n;
4519
4520 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4521 if (n != NULL)
4522 {
4523 if (n->value & GOVD_SHARED)
4524 {
4525 if (ctx == gimplify_omp_ctxp)
4526 {
4527 error ("iteration variable %qs should be private",
4528 IDENTIFIER_POINTER (DECL_NAME (decl)));
4529 n->value = GOVD_PRIVATE;
4530 return true;
4531 }
4532 else
4533 return false;
4534 }
4535 else if ((n->value & GOVD_EXPLICIT) != 0
4536 && (ctx == gimplify_omp_ctxp
4537 || (ctx->is_combined_parallel
4538 && gimplify_omp_ctxp->outer_context == ctx)))
4539 {
4540 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4541 error ("iteration variable %qs should not be firstprivate",
4542 IDENTIFIER_POINTER (DECL_NAME (decl)));
4543 else if ((n->value & GOVD_REDUCTION) != 0)
4544 error ("iteration variable %qs should not be reduction",
4545 IDENTIFIER_POINTER (DECL_NAME (decl)));
4546 }
4547 return true;
4548 }
4549
4550 if (ctx->is_parallel)
4551 return false;
4552 else if (ctx->outer_context)
4553 return omp_is_private (ctx->outer_context, decl);
4554 else
4555 return !is_global_var (decl);
4556 }
4557
4558 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4559 and previous omp contexts. */
4560
4561 static void
4562 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4563 bool in_combined_parallel)
4564 {
4565 struct gimplify_omp_ctx *ctx, *outer_ctx;
4566 tree c;
4567
4568 ctx = new_omp_context (in_parallel, in_combined_parallel);
4569 outer_ctx = ctx->outer_context;
4570
4571 while ((c = *list_p) != NULL)
4572 {
4573 enum gimplify_status gs;
4574 bool remove = false;
4575 bool notice_outer = true;
4576 unsigned int flags;
4577 tree decl;
4578
4579 switch (OMP_CLAUSE_CODE (c))
4580 {
4581 case OMP_CLAUSE_PRIVATE:
4582 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4583 notice_outer = false;
4584 goto do_add;
4585 case OMP_CLAUSE_SHARED:
4586 flags = GOVD_SHARED | GOVD_EXPLICIT;
4587 goto do_add;
4588 case OMP_CLAUSE_FIRSTPRIVATE:
4589 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4590 goto do_add;
4591 case OMP_CLAUSE_LASTPRIVATE:
4592 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4593 goto do_add;
4594 case OMP_CLAUSE_REDUCTION:
4595 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4596 goto do_add;
4597
4598 do_add:
4599 decl = OMP_CLAUSE_DECL (c);
4600 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4601 {
4602 remove = true;
4603 break;
4604 }
4605 /* Handle NRV results passed by reference. */
4606 if (TREE_CODE (decl) == INDIRECT_REF
4607 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4608 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4609 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4610 omp_add_variable (ctx, decl, flags);
4611 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4612 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4613 {
4614 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4615 GOVD_LOCAL | GOVD_SEEN);
4616 gimplify_omp_ctxp = ctx;
4617 push_gimplify_context ();
4618 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4619 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4620 push_gimplify_context ();
4621 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4622 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4623 gimplify_omp_ctxp = outer_ctx;
4624 }
4625 if (notice_outer)
4626 goto do_notice;
4627 break;
4628
4629 case OMP_CLAUSE_COPYIN:
4630 case OMP_CLAUSE_COPYPRIVATE:
4631 decl = OMP_CLAUSE_DECL (c);
4632 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4633 {
4634 remove = true;
4635 break;
4636 }
4637 /* Handle NRV results passed by reference. */
4638 if (TREE_CODE (decl) == INDIRECT_REF
4639 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4640 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4641 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4642 do_notice:
4643 if (outer_ctx)
4644 omp_notice_variable (outer_ctx, decl, true);
4645 break;
4646
4647 case OMP_CLAUSE_IF:
4648 OMP_CLAUSE_OPERAND (c, 0)
4649 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4650 /* Fall through. */
4651
4652 case OMP_CLAUSE_SCHEDULE:
4653 case OMP_CLAUSE_NUM_THREADS:
4654 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4655 is_gimple_val, fb_rvalue);
4656 if (gs == GS_ERROR)
4657 remove = true;
4658 break;
4659
4660 case OMP_CLAUSE_NOWAIT:
4661 case OMP_CLAUSE_ORDERED:
4662 break;
4663
4664 case OMP_CLAUSE_DEFAULT:
4665 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4666 break;
4667
4668 default:
4669 gcc_unreachable ();
4670 }
4671
4672 if (remove)
4673 *list_p = OMP_CLAUSE_CHAIN (c);
4674 else
4675 list_p = &OMP_CLAUSE_CHAIN (c);
4676 }
4677
4678 gimplify_omp_ctxp = ctx;
4679 }
4680
4681 /* For all variables that were not actually used within the context,
4682 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4683
4684 static int
4685 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4686 {
4687 tree *list_p = (tree *) data;
4688 tree decl = (tree) n->key;
4689 unsigned flags = n->value;
4690 enum omp_clause_code code;
4691 tree clause;
4692 bool private_debug;
4693
4694 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4695 return 0;
4696 if ((flags & GOVD_SEEN) == 0)
4697 return 0;
4698 if (flags & GOVD_DEBUG_PRIVATE)
4699 {
4700 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4701 private_debug = true;
4702 }
4703 else
4704 private_debug
4705 = lang_hooks.decls.omp_private_debug_clause (decl,
4706 !!(flags & GOVD_SHARED));
4707 if (private_debug)
4708 code = OMP_CLAUSE_PRIVATE;
4709 else if (flags & GOVD_SHARED)
4710 {
4711 if (is_global_var (decl))
4712 return 0;
4713 code = OMP_CLAUSE_SHARED;
4714 }
4715 else if (flags & GOVD_PRIVATE)
4716 code = OMP_CLAUSE_PRIVATE;
4717 else if (flags & GOVD_FIRSTPRIVATE)
4718 code = OMP_CLAUSE_FIRSTPRIVATE;
4719 else
4720 gcc_unreachable ();
4721
4722 clause = build_omp_clause (code);
4723 OMP_CLAUSE_DECL (clause) = decl;
4724 OMP_CLAUSE_CHAIN (clause) = *list_p;
4725 if (private_debug)
4726 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4727 *list_p = clause;
4728
4729 return 0;
4730 }
4731
4732 static void
4733 gimplify_adjust_omp_clauses (tree *list_p)
4734 {
4735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4736 tree c, decl;
4737
4738 while ((c = *list_p) != NULL)
4739 {
4740 splay_tree_node n;
4741 bool remove = false;
4742
4743 switch (OMP_CLAUSE_CODE (c))
4744 {
4745 case OMP_CLAUSE_PRIVATE:
4746 case OMP_CLAUSE_SHARED:
4747 case OMP_CLAUSE_FIRSTPRIVATE:
4748 decl = OMP_CLAUSE_DECL (c);
4749 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4750 remove = !(n->value & GOVD_SEEN);
4751 if (! remove)
4752 {
4753 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4754 if ((n->value & GOVD_DEBUG_PRIVATE)
4755 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4756 {
4757 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4758 || ((n->value & GOVD_DATA_SHARE_CLASS)
4759 == GOVD_PRIVATE));
4760 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4761 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4762 }
4763 }
4764 break;
4765
4766 case OMP_CLAUSE_LASTPRIVATE:
4767 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4768 accurately reflect the presence of a FIRSTPRIVATE clause. */
4769 decl = OMP_CLAUSE_DECL (c);
4770 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4771 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4772 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4773 break;
4774
4775 case OMP_CLAUSE_REDUCTION:
4776 case OMP_CLAUSE_COPYIN:
4777 case OMP_CLAUSE_COPYPRIVATE:
4778 case OMP_CLAUSE_IF:
4779 case OMP_CLAUSE_NUM_THREADS:
4780 case OMP_CLAUSE_SCHEDULE:
4781 case OMP_CLAUSE_NOWAIT:
4782 case OMP_CLAUSE_ORDERED:
4783 case OMP_CLAUSE_DEFAULT:
4784 break;
4785
4786 default:
4787 gcc_unreachable ();
4788 }
4789
4790 if (remove)
4791 *list_p = OMP_CLAUSE_CHAIN (c);
4792 else
4793 list_p = &OMP_CLAUSE_CHAIN (c);
4794 }
4795
4796 /* Add in any implicit data sharing. */
4797 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4798
4799 gimplify_omp_ctxp = ctx->outer_context;
4800 delete_omp_context (ctx);
4801 }
4802
4803 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4804 gimplification of the body, as well as scanning the body for used
4805 variables. We need to do this scan now, because variable-sized
4806 decls will be decomposed during gimplification. */
4807
4808 static enum gimplify_status
4809 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4810 {
4811 tree expr = *expr_p;
4812
4813 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4814 OMP_PARALLEL_COMBINED (expr));
4815
4816 push_gimplify_context ();
4817
4818 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4819
4820 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4821 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4822 else
4823 pop_gimplify_context (NULL_TREE);
4824
4825 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4826
4827 return GS_ALL_DONE;
4828 }
4829
4830 /* Gimplify the gross structure of an OMP_FOR statement. */
4831
4832 static enum gimplify_status
4833 gimplify_omp_for (tree *expr_p, tree *pre_p)
4834 {
4835 tree for_stmt, decl, t;
4836 enum gimplify_status ret = 0;
4837
4838 for_stmt = *expr_p;
4839
4840 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4841
4842 t = OMP_FOR_INIT (for_stmt);
4843 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
4844 decl = TREE_OPERAND (t, 0);
4845 gcc_assert (DECL_P (decl));
4846 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4847
4848 /* Make sure the iteration variable is private. */
4849 if (omp_is_private (gimplify_omp_ctxp, decl))
4850 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4851 else
4852 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4853
4854 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4855 NULL, is_gimple_val, fb_rvalue);
4856
4857 t = OMP_FOR_COND (for_stmt);
4858 gcc_assert (COMPARISON_CLASS_P (t));
4859 gcc_assert (TREE_OPERAND (t, 0) == decl);
4860
4861 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4862 NULL, is_gimple_val, fb_rvalue);
4863
4864 t = OMP_FOR_INCR (for_stmt);
4865 switch (TREE_CODE (t))
4866 {
4867 case PREINCREMENT_EXPR:
4868 case POSTINCREMENT_EXPR:
4869 t = build_int_cst (TREE_TYPE (decl), 1);
4870 goto build_modify;
4871 case PREDECREMENT_EXPR:
4872 case POSTDECREMENT_EXPR:
4873 t = build_int_cst (TREE_TYPE (decl), -1);
4874 goto build_modify;
4875 build_modify:
4876 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
4877 t = build2 (MODIFY_EXPR, void_type_node, decl, t);
4878 OMP_FOR_INCR (for_stmt) = t;
4879 break;
4880
4881 case MODIFY_EXPR:
4882 gcc_assert (TREE_OPERAND (t, 0) == decl);
4883 t = TREE_OPERAND (t, 1);
4884 switch (TREE_CODE (t))
4885 {
4886 case PLUS_EXPR:
4887 if (TREE_OPERAND (t, 1) == decl)
4888 {
4889 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
4890 TREE_OPERAND (t, 0) = decl;
4891 break;
4892 }
4893 case MINUS_EXPR:
4894 gcc_assert (TREE_OPERAND (t, 0) == decl);
4895 break;
4896 default:
4897 gcc_unreachable ();
4898 }
4899
4900 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4901 NULL, is_gimple_val, fb_rvalue);
4902 break;
4903
4904 default:
4905 gcc_unreachable ();
4906 }
4907
4908 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
4909 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
4910
4911 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
4912 }
4913
4914 /* Gimplify the gross structure of other OpenMP worksharing constructs.
4915 In particular, OMP_SECTIONS and OMP_SINGLE. */
4916
4917 static enum gimplify_status
4918 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
4919 {
4920 tree stmt = *expr_p;
4921
4922 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
4923 gimplify_to_stmt_list (&OMP_BODY (stmt));
4924 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
4925
4926 return GS_ALL_DONE;
4927 }
4928
4929 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
4930 stabilized the lhs of the atomic operation as *ADDR. Return true if
4931 EXPR is this stabilized form. */
4932
4933 static bool
4934 goa_lhs_expr_p (tree expr, tree addr)
4935 {
4936 /* Also include casts to other type variants. The C front end is fond
4937 of adding these for e.g. volatile variables. This is like
4938 STRIP_TYPE_NOPS but includes the main variant lookup. */
4939 while ((TREE_CODE (expr) == NOP_EXPR
4940 || TREE_CODE (expr) == CONVERT_EXPR
4941 || TREE_CODE (expr) == NON_LVALUE_EXPR)
4942 && TREE_OPERAND (expr, 0) != error_mark_node
4943 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
4944 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
4945 expr = TREE_OPERAND (expr, 0);
4946
4947 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
4948 return true;
4949 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
4950 return true;
4951 return false;
4952 }
4953
4954 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
4955 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4956 size of the data type, and thus usable to find the index of the builtin
4957 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
4958
4959 static enum gimplify_status
4960 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
4961 {
4962 enum built_in_function base;
4963 tree decl, args, itype;
4964 enum insn_code *optab;
4965
4966 /* Check for one of the supported fetch-op operations. */
4967 switch (TREE_CODE (rhs))
4968 {
4969 case PLUS_EXPR:
4970 base = BUILT_IN_FETCH_AND_ADD_N;
4971 optab = sync_add_optab;
4972 break;
4973 case MINUS_EXPR:
4974 base = BUILT_IN_FETCH_AND_SUB_N;
4975 optab = sync_add_optab;
4976 break;
4977 case BIT_AND_EXPR:
4978 base = BUILT_IN_FETCH_AND_AND_N;
4979 optab = sync_and_optab;
4980 break;
4981 case BIT_IOR_EXPR:
4982 base = BUILT_IN_FETCH_AND_OR_N;
4983 optab = sync_ior_optab;
4984 break;
4985 case BIT_XOR_EXPR:
4986 base = BUILT_IN_FETCH_AND_XOR_N;
4987 optab = sync_xor_optab;
4988 break;
4989 default:
4990 return GS_UNHANDLED;
4991 }
4992
4993 /* Make sure the expression is of the proper form. */
4994 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
4995 rhs = TREE_OPERAND (rhs, 1);
4996 else if (commutative_tree_code (TREE_CODE (rhs))
4997 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
4998 rhs = TREE_OPERAND (rhs, 0);
4999 else
5000 return GS_UNHANDLED;
5001
5002 decl = built_in_decls[base + index + 1];
5003 itype = TREE_TYPE (TREE_TYPE (decl));
5004
5005 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5006 return GS_UNHANDLED;
5007
5008 args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
5009 args = tree_cons (NULL, addr, args);
5010 *expr_p = build_function_call_expr (decl, args);
5011 return GS_OK;
5012 }
5013
5014 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5015 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5016 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5017 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5018
5019 static int
5020 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5021 {
5022 tree expr = *expr_p;
5023 int saw_lhs;
5024
5025 if (goa_lhs_expr_p (expr, lhs_addr))
5026 {
5027 *expr_p = lhs_var;
5028 return 1;
5029 }
5030 if (is_gimple_val (expr))
5031 return 0;
5032
5033 saw_lhs = 0;
5034 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5035 {
5036 case tcc_binary:
5037 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5038 lhs_addr, lhs_var);
5039 case tcc_unary:
5040 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5041 lhs_addr, lhs_var);
5042 break;
5043 default:
5044 break;
5045 }
5046
5047 if (saw_lhs == 0)
5048 {
5049 enum gimplify_status gs;
5050 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5051 if (gs != GS_ALL_DONE)
5052 saw_lhs = -1;
5053 }
5054
5055 return saw_lhs;
5056 }
5057
5058 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5059
5060 oldval = *addr;
5061 repeat:
5062 newval = rhs; // with oldval replacing *addr in rhs
5063 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5064 if (oldval != newval)
5065 goto repeat;
5066
5067 INDEX is log2 of the size of the data type, and thus usable to find the
5068 index of the builtin decl. */
5069
5070 static enum gimplify_status
5071 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5072 tree rhs, int index)
5073 {
5074 tree oldval, oldival, oldival2, newval, newival, label;
5075 tree type, itype, cmpxchg, args, x, iaddr;
5076
5077 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5078 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5079 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5080
5081 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5082 return GS_UNHANDLED;
5083
5084 oldval = create_tmp_var (type, NULL);
5085 newval = create_tmp_var (type, NULL);
5086
5087 /* Precompute as much of RHS as possible. In the same walk, replace
5088 occurrences of the lhs value with our temporary. */
5089 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5090 return GS_ERROR;
5091
5092 x = build_fold_indirect_ref (addr);
5093 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5094 gimplify_and_add (x, pre_p);
5095
5096 /* For floating-point values, we'll need to view-convert them to integers
5097 so that we can perform the atomic compare and swap. Simplify the
5098 following code by always setting up the "i"ntegral variables. */
5099 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5100 {
5101 oldival = oldval;
5102 newival = newval;
5103 iaddr = addr;
5104 }
5105 else
5106 {
5107 oldival = create_tmp_var (itype, NULL);
5108 newival = create_tmp_var (itype, NULL);
5109
5110 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5111 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5112 gimplify_and_add (x, pre_p);
5113 iaddr = fold_convert (build_pointer_type (itype), addr);
5114 }
5115
5116 oldival2 = create_tmp_var (itype, NULL);
5117
5118 label = create_artificial_label ();
5119 x = build1 (LABEL_EXPR, void_type_node, label);
5120 gimplify_and_add (x, pre_p);
5121
5122 x = build2 (MODIFY_EXPR, void_type_node, newval, rhs);
5123 gimplify_and_add (x, pre_p);
5124
5125 if (newval != newival)
5126 {
5127 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5128 x = build2 (MODIFY_EXPR, void_type_node, newival, x);
5129 gimplify_and_add (x, pre_p);
5130 }
5131
5132 x = build2 (MODIFY_EXPR, void_type_node, oldival2,
5133 fold_convert (itype, oldival));
5134 gimplify_and_add (x, pre_p);
5135
5136 args = tree_cons (NULL, fold_convert (itype, newival), NULL);
5137 args = tree_cons (NULL, fold_convert (itype, oldival), args);
5138 args = tree_cons (NULL, iaddr, args);
5139 x = build_function_call_expr (cmpxchg, args);
5140 if (oldval == oldival)
5141 x = fold_convert (type, x);
5142 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5143 gimplify_and_add (x, pre_p);
5144
5145 /* For floating point, be prepared for the loop backedge. */
5146 if (oldval != oldival)
5147 {
5148 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5149 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5150 gimplify_and_add (x, pre_p);
5151 }
5152
5153 /* Note that we always perform the comparison as an integer, even for
5154 floating point. This allows the atomic operation to properly
5155 succeed even with NaNs and -0.0. */
5156 x = build3 (COND_EXPR, void_type_node,
5157 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5158 build1 (GOTO_EXPR, void_type_node, label), NULL);
5159 gimplify_and_add (x, pre_p);
5160
5161 *expr_p = NULL;
5162 return GS_ALL_DONE;
5163 }
5164
5165 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5166
5167 GOMP_atomic_start ();
5168 *addr = rhs;
5169 GOMP_atomic_end ();
5170
5171 The result is not globally atomic, but works so long as all parallel
5172 references are within #pragma omp atomic directives. According to
5173 responses received from omp@openmp.org, appears to be within spec.
5174 Which makes sense, since that's how several other compilers handle
5175 this situation as well. */
5176
5177 static enum gimplify_status
5178 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5179 {
5180 tree t;
5181
5182 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5183 t = build_function_call_expr (t, NULL);
5184 gimplify_and_add (t, pre_p);
5185
5186 t = build_fold_indirect_ref (addr);
5187 t = build2 (MODIFY_EXPR, void_type_node, t, rhs);
5188 gimplify_and_add (t, pre_p);
5189
5190 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5191 t = build_function_call_expr (t, NULL);
5192 gimplify_and_add (t, pre_p);
5193
5194 *expr_p = NULL;
5195 return GS_ALL_DONE;
5196 }
5197
5198 /* Gimplify an OMP_ATOMIC statement. */
5199
5200 static enum gimplify_status
5201 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5202 {
5203 tree addr = TREE_OPERAND (*expr_p, 0);
5204 tree rhs = TREE_OPERAND (*expr_p, 1);
5205 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5206 HOST_WIDE_INT index;
5207
5208 /* Make sure the type is one of the supported sizes. */
5209 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5210 index = exact_log2 (index);
5211 if (index >= 0 && index <= 4)
5212 {
5213 enum gimplify_status gs;
5214 unsigned int align;
5215
5216 if (DECL_P (TREE_OPERAND (addr, 0)))
5217 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5218 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5219 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5220 == FIELD_DECL)
5221 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5222 else
5223 align = TYPE_ALIGN_UNIT (type);
5224
5225 /* __sync builtins require strict data alignment. */
5226 if (exact_log2 (align) >= index)
5227 {
5228 /* When possible, use specialized atomic update functions. */
5229 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5230 {
5231 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5232 if (gs != GS_UNHANDLED)
5233 return gs;
5234 }
5235
5236 /* If we don't have specialized __sync builtins, try and implement
5237 as a compare and swap loop. */
5238 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5239 if (gs != GS_UNHANDLED)
5240 return gs;
5241 }
5242 }
5243
5244 /* The ultimate fallback is wrapping the operation in a mutex. */
5245 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5246 }
5247
5248 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5249 gimplification failed.
5250
5251 PRE_P points to the list where side effects that must happen before
5252 EXPR should be stored.
5253
5254 POST_P points to the list where side effects that must happen after
5255 EXPR should be stored, or NULL if there is no suitable list. In
5256 that case, we copy the result to a temporary, emit the
5257 post-effects, and then return the temporary.
5258
5259 GIMPLE_TEST_F points to a function that takes a tree T and
5260 returns nonzero if T is in the GIMPLE form requested by the
5261 caller. The GIMPLE predicates are in tree-gimple.c.
5262
5263 This test is used twice. Before gimplification, the test is
5264 invoked to determine whether *EXPR_P is already gimple enough. If
5265 that fails, *EXPR_P is gimplified according to its code and
5266 GIMPLE_TEST_F is called again. If the test still fails, then a new
5267 temporary variable is created and assigned the value of the
5268 gimplified expression.
5269
5270 FALLBACK tells the function what sort of a temporary we want. If the 1
5271 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5272 If both are set, either is OK, but an lvalue is preferable.
5273
5274 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5275 iterates until solution. */
5276
5277 enum gimplify_status
5278 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5279 bool (* gimple_test_f) (tree), fallback_t fallback)
5280 {
5281 tree tmp;
5282 tree internal_pre = NULL_TREE;
5283 tree internal_post = NULL_TREE;
5284 tree save_expr;
5285 int is_statement = (pre_p == NULL);
5286 location_t saved_location;
5287 enum gimplify_status ret;
5288
5289 save_expr = *expr_p;
5290 if (save_expr == NULL_TREE)
5291 return GS_ALL_DONE;
5292
5293 /* We used to check the predicate here and return immediately if it
5294 succeeds. This is wrong; the design is for gimplification to be
5295 idempotent, and for the predicates to only test for valid forms, not
5296 whether they are fully simplified. */
5297
5298 /* Set up our internal queues if needed. */
5299 if (pre_p == NULL)
5300 pre_p = &internal_pre;
5301 if (post_p == NULL)
5302 post_p = &internal_post;
5303
5304 saved_location = input_location;
5305 if (save_expr != error_mark_node
5306 && EXPR_HAS_LOCATION (*expr_p))
5307 input_location = EXPR_LOCATION (*expr_p);
5308
5309 /* Loop over the specific gimplifiers until the toplevel node
5310 remains the same. */
5311 do
5312 {
5313 /* Strip away as many useless type conversions as possible
5314 at the toplevel. */
5315 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5316
5317 /* Remember the expr. */
5318 save_expr = *expr_p;
5319
5320 /* Die, die, die, my darling. */
5321 if (save_expr == error_mark_node
5322 || (TREE_TYPE (save_expr)
5323 && TREE_TYPE (save_expr) == error_mark_node))
5324 {
5325 ret = GS_ERROR;
5326 break;
5327 }
5328
5329 /* Do any language-specific gimplification. */
5330 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5331 if (ret == GS_OK)
5332 {
5333 if (*expr_p == NULL_TREE)
5334 break;
5335 if (*expr_p != save_expr)
5336 continue;
5337 }
5338 else if (ret != GS_UNHANDLED)
5339 break;
5340
5341 ret = GS_OK;
5342 switch (TREE_CODE (*expr_p))
5343 {
5344 /* First deal with the special cases. */
5345
5346 case POSTINCREMENT_EXPR:
5347 case POSTDECREMENT_EXPR:
5348 case PREINCREMENT_EXPR:
5349 case PREDECREMENT_EXPR:
5350 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5351 fallback != fb_none);
5352 break;
5353
5354 case ARRAY_REF:
5355 case ARRAY_RANGE_REF:
5356 case REALPART_EXPR:
5357 case IMAGPART_EXPR:
5358 case COMPONENT_REF:
5359 case VIEW_CONVERT_EXPR:
5360 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5361 fallback ? fallback : fb_rvalue);
5362 break;
5363
5364 case COND_EXPR:
5365 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5366 /* C99 code may assign to an array in a structure value of a
5367 conditional expression, and this has undefined behavior
5368 only on execution, so create a temporary if an lvalue is
5369 required. */
5370 if (fallback == fb_lvalue)
5371 {
5372 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5373 lang_hooks.mark_addressable (*expr_p);
5374 }
5375 break;
5376
5377 case CALL_EXPR:
5378 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5379 /* C99 code may assign to an array in a structure returned
5380 from a function, and this has undefined behavior only on
5381 execution, so create a temporary if an lvalue is
5382 required. */
5383 if (fallback == fb_lvalue)
5384 {
5385 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5386 lang_hooks.mark_addressable (*expr_p);
5387 }
5388 break;
5389
5390 case TREE_LIST:
5391 gcc_unreachable ();
5392
5393 case COMPOUND_EXPR:
5394 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5395 break;
5396
5397 case MODIFY_EXPR:
5398 case INIT_EXPR:
5399 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5400 fallback != fb_none);
5401
5402 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5403 useful. */
5404 if (*expr_p && TREE_CODE (*expr_p) == INIT_EXPR)
5405 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5406 break;
5407
5408 case TRUTH_ANDIF_EXPR:
5409 case TRUTH_ORIF_EXPR:
5410 ret = gimplify_boolean_expr (expr_p);
5411 break;
5412
5413 case TRUTH_NOT_EXPR:
5414 TREE_OPERAND (*expr_p, 0)
5415 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5416 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5417 is_gimple_val, fb_rvalue);
5418 recalculate_side_effects (*expr_p);
5419 break;
5420
5421 case ADDR_EXPR:
5422 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5423 break;
5424
5425 case VA_ARG_EXPR:
5426 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5427 break;
5428
5429 case CONVERT_EXPR:
5430 case NOP_EXPR:
5431 if (IS_EMPTY_STMT (*expr_p))
5432 {
5433 ret = GS_ALL_DONE;
5434 break;
5435 }
5436
5437 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5438 || fallback == fb_none)
5439 {
5440 /* Just strip a conversion to void (or in void context) and
5441 try again. */
5442 *expr_p = TREE_OPERAND (*expr_p, 0);
5443 break;
5444 }
5445
5446 ret = gimplify_conversion (expr_p);
5447 if (ret == GS_ERROR)
5448 break;
5449 if (*expr_p != save_expr)
5450 break;
5451 /* FALLTHRU */
5452
5453 case FIX_TRUNC_EXPR:
5454 case FIX_CEIL_EXPR:
5455 case FIX_FLOOR_EXPR:
5456 case FIX_ROUND_EXPR:
5457 /* unary_expr: ... | '(' cast ')' val | ... */
5458 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5459 is_gimple_val, fb_rvalue);
5460 recalculate_side_effects (*expr_p);
5461 break;
5462
5463 case INDIRECT_REF:
5464 *expr_p = fold_indirect_ref (*expr_p);
5465 if (*expr_p != save_expr)
5466 break;
5467 /* else fall through. */
5468 case ALIGN_INDIRECT_REF:
5469 case MISALIGNED_INDIRECT_REF:
5470 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5471 is_gimple_reg, fb_rvalue);
5472 recalculate_side_effects (*expr_p);
5473 break;
5474
5475 /* Constants need not be gimplified. */
5476 case INTEGER_CST:
5477 case REAL_CST:
5478 case STRING_CST:
5479 case COMPLEX_CST:
5480 case VECTOR_CST:
5481 ret = GS_ALL_DONE;
5482 break;
5483
5484 case CONST_DECL:
5485 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5486 CONST_DECL node. Otherwise the decl is replaceable by its
5487 value. */
5488 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5489 if (fallback & fb_lvalue)
5490 ret = GS_ALL_DONE;
5491 else
5492 *expr_p = DECL_INITIAL (*expr_p);
5493 break;
5494
5495 case DECL_EXPR:
5496 ret = gimplify_decl_expr (expr_p);
5497 break;
5498
5499 case EXC_PTR_EXPR:
5500 /* FIXME make this a decl. */
5501 ret = GS_ALL_DONE;
5502 break;
5503
5504 case BIND_EXPR:
5505 ret = gimplify_bind_expr (expr_p, NULL, pre_p);
5506 break;
5507
5508 case LOOP_EXPR:
5509 ret = gimplify_loop_expr (expr_p, pre_p);
5510 break;
5511
5512 case SWITCH_EXPR:
5513 ret = gimplify_switch_expr (expr_p, pre_p);
5514 break;
5515
5516 case EXIT_EXPR:
5517 ret = gimplify_exit_expr (expr_p);
5518 break;
5519
5520 case GOTO_EXPR:
5521 /* If the target is not LABEL, then it is a computed jump
5522 and the target needs to be gimplified. */
5523 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5524 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5525 NULL, is_gimple_val, fb_rvalue);
5526 break;
5527
5528 case LABEL_EXPR:
5529 ret = GS_ALL_DONE;
5530 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5531 == current_function_decl);
5532 break;
5533
5534 case CASE_LABEL_EXPR:
5535 ret = gimplify_case_label_expr (expr_p);
5536 break;
5537
5538 case RETURN_EXPR:
5539 ret = gimplify_return_expr (*expr_p, pre_p);
5540 break;
5541
5542 case CONSTRUCTOR:
5543 /* Don't reduce this in place; let gimplify_init_constructor work its
5544 magic. Buf if we're just elaborating this for side effects, just
5545 gimplify any element that has side-effects. */
5546 if (fallback == fb_none)
5547 {
5548 unsigned HOST_WIDE_INT ix;
5549 constructor_elt *ce;
5550 tree temp = NULL_TREE;
5551 for (ix = 0;
5552 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5553 ix, ce);
5554 ix++)
5555 if (TREE_SIDE_EFFECTS (ce->value))
5556 append_to_statement_list (ce->value, &temp);
5557
5558 *expr_p = temp;
5559 ret = GS_OK;
5560 }
5561 /* C99 code may assign to an array in a constructed
5562 structure or union, and this has undefined behavior only
5563 on execution, so create a temporary if an lvalue is
5564 required. */
5565 else if (fallback == fb_lvalue)
5566 {
5567 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5568 lang_hooks.mark_addressable (*expr_p);
5569 }
5570 else
5571 ret = GS_ALL_DONE;
5572 break;
5573
5574 /* The following are special cases that are not handled by the
5575 original GIMPLE grammar. */
5576
5577 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5578 eliminated. */
5579 case SAVE_EXPR:
5580 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5581 break;
5582
5583 case BIT_FIELD_REF:
5584 {
5585 enum gimplify_status r0, r1, r2;
5586
5587 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5588 is_gimple_lvalue, fb_either);
5589 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5590 is_gimple_val, fb_rvalue);
5591 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5592 is_gimple_val, fb_rvalue);
5593 recalculate_side_effects (*expr_p);
5594
5595 ret = MIN (r0, MIN (r1, r2));
5596 }
5597 break;
5598
5599 case NON_LVALUE_EXPR:
5600 /* This should have been stripped above. */
5601 gcc_unreachable ();
5602
5603 case ASM_EXPR:
5604 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5605 break;
5606
5607 case TRY_FINALLY_EXPR:
5608 case TRY_CATCH_EXPR:
5609 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5610 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5611 ret = GS_ALL_DONE;
5612 break;
5613
5614 case CLEANUP_POINT_EXPR:
5615 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5616 break;
5617
5618 case TARGET_EXPR:
5619 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5620 break;
5621
5622 case CATCH_EXPR:
5623 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5624 ret = GS_ALL_DONE;
5625 break;
5626
5627 case EH_FILTER_EXPR:
5628 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5629 ret = GS_ALL_DONE;
5630 break;
5631
5632 case OBJ_TYPE_REF:
5633 {
5634 enum gimplify_status r0, r1;
5635 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5636 is_gimple_val, fb_rvalue);
5637 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5638 is_gimple_val, fb_rvalue);
5639 ret = MIN (r0, r1);
5640 }
5641 break;
5642
5643 case LABEL_DECL:
5644 /* We get here when taking the address of a label. We mark
5645 the label as "forced"; meaning it can never be removed and
5646 it is a potential target for any computed goto. */
5647 FORCED_LABEL (*expr_p) = 1;
5648 ret = GS_ALL_DONE;
5649 break;
5650
5651 case STATEMENT_LIST:
5652 ret = gimplify_statement_list (expr_p);
5653 break;
5654
5655 case WITH_SIZE_EXPR:
5656 {
5657 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5658 post_p == &internal_post ? NULL : post_p,
5659 gimple_test_f, fallback);
5660 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5661 is_gimple_val, fb_rvalue);
5662 }
5663 break;
5664
5665 case VAR_DECL:
5666 case PARM_DECL:
5667 ret = gimplify_var_or_parm_decl (expr_p);
5668 break;
5669
5670 case RESULT_DECL:
5671 /* When within an OpenMP context, notice uses of variables. */
5672 if (gimplify_omp_ctxp)
5673 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5674 ret = GS_ALL_DONE;
5675 break;
5676
5677 case SSA_NAME:
5678 /* Allow callbacks into the gimplifier during optimization. */
5679 ret = GS_ALL_DONE;
5680 break;
5681
5682 case OMP_PARALLEL:
5683 ret = gimplify_omp_parallel (expr_p, pre_p);
5684 break;
5685
5686 case OMP_FOR:
5687 ret = gimplify_omp_for (expr_p, pre_p);
5688 break;
5689
5690 case OMP_SECTIONS:
5691 case OMP_SINGLE:
5692 ret = gimplify_omp_workshare (expr_p, pre_p);
5693 break;
5694
5695 case OMP_SECTION:
5696 case OMP_MASTER:
5697 case OMP_ORDERED:
5698 case OMP_CRITICAL:
5699 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5700 break;
5701
5702 case OMP_ATOMIC:
5703 ret = gimplify_omp_atomic (expr_p, pre_p);
5704 break;
5705
5706 case OMP_RETURN:
5707 case OMP_CONTINUE:
5708 ret = GS_ALL_DONE;
5709 break;
5710
5711 default:
5712 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5713 {
5714 case tcc_comparison:
5715 /* Handle comparison of objects of non scalar mode aggregates
5716 with a call to memcmp. It would be nice to only have to do
5717 this for variable-sized objects, but then we'd have to allow
5718 the same nest of reference nodes we allow for MODIFY_EXPR and
5719 that's too complex.
5720
5721 Compare scalar mode aggregates as scalar mode values. Using
5722 memcmp for them would be very inefficient at best, and is
5723 plain wrong if bitfields are involved. */
5724
5725 {
5726 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5727
5728 if (!AGGREGATE_TYPE_P (type))
5729 goto expr_2;
5730 else if (TYPE_MODE (type) != BLKmode)
5731 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5732 else
5733 ret = gimplify_variable_sized_compare (expr_p);
5734
5735 break;
5736 }
5737
5738 /* If *EXPR_P does not need to be special-cased, handle it
5739 according to its class. */
5740 case tcc_unary:
5741 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5742 post_p, is_gimple_val, fb_rvalue);
5743 break;
5744
5745 case tcc_binary:
5746 expr_2:
5747 {
5748 enum gimplify_status r0, r1;
5749
5750 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5751 post_p, is_gimple_val, fb_rvalue);
5752 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5753 post_p, is_gimple_val, fb_rvalue);
5754
5755 ret = MIN (r0, r1);
5756 break;
5757 }
5758
5759 case tcc_declaration:
5760 case tcc_constant:
5761 ret = GS_ALL_DONE;
5762 goto dont_recalculate;
5763
5764 default:
5765 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5766 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5767 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5768 goto expr_2;
5769 }
5770
5771 recalculate_side_effects (*expr_p);
5772 dont_recalculate:
5773 break;
5774 }
5775
5776 /* If we replaced *expr_p, gimplify again. */
5777 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5778 ret = GS_ALL_DONE;
5779 }
5780 while (ret == GS_OK);
5781
5782 /* If we encountered an error_mark somewhere nested inside, either
5783 stub out the statement or propagate the error back out. */
5784 if (ret == GS_ERROR)
5785 {
5786 if (is_statement)
5787 *expr_p = NULL;
5788 goto out;
5789 }
5790
5791 /* This was only valid as a return value from the langhook, which
5792 we handled. Make sure it doesn't escape from any other context. */
5793 gcc_assert (ret != GS_UNHANDLED);
5794
5795 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5796 {
5797 /* We aren't looking for a value, and we don't have a valid
5798 statement. If it doesn't have side-effects, throw it away. */
5799 if (!TREE_SIDE_EFFECTS (*expr_p))
5800 *expr_p = NULL;
5801 else if (!TREE_THIS_VOLATILE (*expr_p))
5802 {
5803 /* This is probably a _REF that contains something nested that
5804 has side effects. Recurse through the operands to find it. */
5805 enum tree_code code = TREE_CODE (*expr_p);
5806
5807 switch (code)
5808 {
5809 case COMPONENT_REF:
5810 case REALPART_EXPR:
5811 case IMAGPART_EXPR:
5812 case VIEW_CONVERT_EXPR:
5813 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5814 gimple_test_f, fallback);
5815 break;
5816
5817 case ARRAY_REF: case ARRAY_RANGE_REF:
5818 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5819 gimple_test_f, fallback);
5820 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5821 gimple_test_f, fallback);
5822 break;
5823
5824 default:
5825 /* Anything else with side-effects must be converted to
5826 a valid statement before we get here. */
5827 gcc_unreachable ();
5828 }
5829
5830 *expr_p = NULL;
5831 }
5832 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)))
5833 {
5834 /* Historically, the compiler has treated a bare
5835 reference to a volatile lvalue as forcing a load. */
5836 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5837 /* Normally, we do not want to create a temporary for a
5838 TREE_ADDRESSABLE type because such a type should not be
5839 copied by bitwise-assignment. However, we make an
5840 exception here, as all we are doing here is ensuring that
5841 we read the bytes that make up the type. We use
5842 create_tmp_var_raw because create_tmp_var will abort when
5843 given a TREE_ADDRESSABLE type. */
5844 tree tmp = create_tmp_var_raw (type, "vol");
5845 gimple_add_tmp_var (tmp);
5846 *expr_p = build2 (MODIFY_EXPR, type, tmp, *expr_p);
5847 }
5848 else
5849 /* We can't do anything useful with a volatile reference to
5850 incomplete type, so just throw it away. */
5851 *expr_p = NULL;
5852 }
5853
5854 /* If we are gimplifying at the statement level, we're done. Tack
5855 everything together and replace the original statement with the
5856 gimplified form. */
5857 if (fallback == fb_none || is_statement)
5858 {
5859 if (internal_pre || internal_post)
5860 {
5861 append_to_statement_list (*expr_p, &internal_pre);
5862 append_to_statement_list (internal_post, &internal_pre);
5863 annotate_all_with_locus (&internal_pre, input_location);
5864 *expr_p = internal_pre;
5865 }
5866 else if (!*expr_p)
5867 ;
5868 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
5869 annotate_all_with_locus (expr_p, input_location);
5870 else
5871 annotate_one_with_locus (*expr_p, input_location);
5872 goto out;
5873 }
5874
5875 /* Otherwise we're gimplifying a subexpression, so the resulting value is
5876 interesting. */
5877
5878 /* If it's sufficiently simple already, we're done. Unless we are
5879 handling some post-effects internally; if that's the case, we need to
5880 copy into a temp before adding the post-effects to the tree. */
5881 if (!internal_post && (*gimple_test_f) (*expr_p))
5882 goto out;
5883
5884 /* Otherwise, we need to create a new temporary for the gimplified
5885 expression. */
5886
5887 /* We can't return an lvalue if we have an internal postqueue. The
5888 object the lvalue refers to would (probably) be modified by the
5889 postqueue; we need to copy the value out first, which means an
5890 rvalue. */
5891 if ((fallback & fb_lvalue) && !internal_post
5892 && is_gimple_addressable (*expr_p))
5893 {
5894 /* An lvalue will do. Take the address of the expression, store it
5895 in a temporary, and replace the expression with an INDIRECT_REF of
5896 that temporary. */
5897 tmp = build_fold_addr_expr (*expr_p);
5898 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
5899 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
5900 }
5901 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
5902 {
5903 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
5904
5905 /* An rvalue will do. Assign the gimplified expression into a new
5906 temporary TMP and replace the original expression with TMP. */
5907
5908 if (internal_post || (fallback & fb_lvalue))
5909 /* The postqueue might change the value of the expression between
5910 the initialization and use of the temporary, so we can't use a
5911 formal temp. FIXME do we care? */
5912 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5913 else
5914 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
5915
5916 if (TREE_CODE (*expr_p) != SSA_NAME)
5917 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
5918 }
5919 else
5920 {
5921 #ifdef ENABLE_CHECKING
5922 if (!(fallback & fb_mayfail))
5923 {
5924 fprintf (stderr, "gimplification failed:\n");
5925 print_generic_expr (stderr, *expr_p, 0);
5926 debug_tree (*expr_p);
5927 internal_error ("gimplification failed");
5928 }
5929 #endif
5930 gcc_assert (fallback & fb_mayfail);
5931 /* If this is an asm statement, and the user asked for the
5932 impossible, don't die. Fail and let gimplify_asm_expr
5933 issue an error. */
5934 ret = GS_ERROR;
5935 goto out;
5936 }
5937
5938 /* Make sure the temporary matches our predicate. */
5939 gcc_assert ((*gimple_test_f) (*expr_p));
5940
5941 if (internal_post)
5942 {
5943 annotate_all_with_locus (&internal_post, input_location);
5944 append_to_statement_list (internal_post, pre_p);
5945 }
5946
5947 out:
5948 input_location = saved_location;
5949 return ret;
5950 }
5951
5952 /* Look through TYPE for variable-sized objects and gimplify each such
5953 size that we find. Add to LIST_P any statements generated. */
5954
5955 void
5956 gimplify_type_sizes (tree type, tree *list_p)
5957 {
5958 tree field, t;
5959
5960 if (type == NULL || type == error_mark_node)
5961 return;
5962
5963 /* We first do the main variant, then copy into any other variants. */
5964 type = TYPE_MAIN_VARIANT (type);
5965
5966 /* Avoid infinite recursion. */
5967 if (TYPE_SIZES_GIMPLIFIED (type))
5968 return;
5969
5970 TYPE_SIZES_GIMPLIFIED (type) = 1;
5971
5972 switch (TREE_CODE (type))
5973 {
5974 case INTEGER_TYPE:
5975 case ENUMERAL_TYPE:
5976 case BOOLEAN_TYPE:
5977 case REAL_TYPE:
5978 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
5979 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
5980
5981 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5982 {
5983 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
5984 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
5985 }
5986 break;
5987
5988 case ARRAY_TYPE:
5989 /* These types may not have declarations, so handle them here. */
5990 gimplify_type_sizes (TREE_TYPE (type), list_p);
5991 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
5992 break;
5993
5994 case RECORD_TYPE:
5995 case UNION_TYPE:
5996 case QUAL_UNION_TYPE:
5997 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5998 if (TREE_CODE (field) == FIELD_DECL)
5999 {
6000 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6001 gimplify_type_sizes (TREE_TYPE (field), list_p);
6002 }
6003 break;
6004
6005 case POINTER_TYPE:
6006 case REFERENCE_TYPE:
6007 gimplify_type_sizes (TREE_TYPE (type), list_p);
6008 break;
6009
6010 default:
6011 break;
6012 }
6013
6014 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6015 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6016
6017 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6018 {
6019 TYPE_SIZE (t) = TYPE_SIZE (type);
6020 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6021 TYPE_SIZES_GIMPLIFIED (t) = 1;
6022 }
6023 }
6024
6025 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6026 a size or position, has had all of its SAVE_EXPRs evaluated.
6027 We add any required statements to STMT_P. */
6028
6029 void
6030 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6031 {
6032 tree type, expr = *expr_p;
6033
6034 /* We don't do anything if the value isn't there, is constant, or contains
6035 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6036 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6037 will want to replace it with a new variable, but that will cause problems
6038 if this type is from outside the function. It's OK to have that here. */
6039 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6040 || TREE_CODE (expr) == VAR_DECL
6041 || CONTAINS_PLACEHOLDER_P (expr))
6042 return;
6043
6044 type = TREE_TYPE (expr);
6045 *expr_p = unshare_expr (expr);
6046
6047 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6048 expr = *expr_p;
6049
6050 /* Verify that we've an exact type match with the original expression.
6051 In particular, we do not wish to drop a "sizetype" in favour of a
6052 type of similar dimensions. We don't want to pollute the generic
6053 type-stripping code with this knowledge because it doesn't matter
6054 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6055 and friends retain their "sizetype-ness". */
6056 if (TREE_TYPE (expr) != type
6057 && TREE_CODE (type) == INTEGER_TYPE
6058 && TYPE_IS_SIZETYPE (type))
6059 {
6060 tree tmp;
6061
6062 *expr_p = create_tmp_var (type, NULL);
6063 tmp = build1 (NOP_EXPR, type, expr);
6064 tmp = build2 (MODIFY_EXPR, type, *expr_p, tmp);
6065 if (EXPR_HAS_LOCATION (expr))
6066 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6067 else
6068 SET_EXPR_LOCATION (tmp, input_location);
6069
6070 gimplify_and_add (tmp, stmt_p);
6071 }
6072 }
6073 \f
6074 #ifdef ENABLE_CHECKING
6075 /* Compare types A and B for a "close enough" match. */
6076
6077 static bool
6078 cpt_same_type (tree a, tree b)
6079 {
6080 if (lang_hooks.types_compatible_p (a, b))
6081 return true;
6082
6083 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6084 link them together. This routine is intended to catch type errors
6085 that will affect the optimizers, and the optimizers don't add new
6086 dereferences of function pointers, so ignore it. */
6087 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6088 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6089 return true;
6090
6091 /* ??? The C FE pushes type qualifiers after the fact into the type of
6092 the element from the type of the array. See build_unary_op's handling
6093 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6094 should have done it when creating the variable in the first place.
6095 Alternately, why aren't the two array types made variants? */
6096 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6097 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6098
6099 /* And because of those, we have to recurse down through pointers. */
6100 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6101 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6102
6103 return false;
6104 }
6105
6106 /* Check for some cases of the front end missing cast expressions.
6107 The type of a dereference should correspond to the pointer type;
6108 similarly the type of an address should match its object. */
6109
6110 static tree
6111 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6112 void *data ATTRIBUTE_UNUSED)
6113 {
6114 tree t = *tp;
6115 tree ptype, otype, dtype;
6116
6117 switch (TREE_CODE (t))
6118 {
6119 case INDIRECT_REF:
6120 case ARRAY_REF:
6121 otype = TREE_TYPE (t);
6122 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6123 dtype = TREE_TYPE (ptype);
6124 gcc_assert (cpt_same_type (otype, dtype));
6125 break;
6126
6127 case ADDR_EXPR:
6128 ptype = TREE_TYPE (t);
6129 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6130 dtype = TREE_TYPE (ptype);
6131 if (!cpt_same_type (otype, dtype))
6132 {
6133 /* &array is allowed to produce a pointer to the element, rather than
6134 a pointer to the array type. We must allow this in order to
6135 properly represent assigning the address of an array in C into
6136 pointer to the element type. */
6137 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6138 && POINTER_TYPE_P (ptype)
6139 && cpt_same_type (TREE_TYPE (otype), dtype));
6140 break;
6141 }
6142 break;
6143
6144 default:
6145 return NULL_TREE;
6146 }
6147
6148
6149 return NULL_TREE;
6150 }
6151 #endif
6152
6153 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6154 function decl containing BODY. */
6155
6156 void
6157 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6158 {
6159 location_t saved_location = input_location;
6160 tree body, parm_stmts;
6161
6162 timevar_push (TV_TREE_GIMPLIFY);
6163
6164 gcc_assert (gimplify_ctxp == NULL);
6165 push_gimplify_context ();
6166
6167 /* Unshare most shared trees in the body and in that of any nested functions.
6168 It would seem we don't have to do this for nested functions because
6169 they are supposed to be output and then the outer function gimplified
6170 first, but the g++ front end doesn't always do it that way. */
6171 unshare_body (body_p, fndecl);
6172 unvisit_body (body_p, fndecl);
6173
6174 /* Make sure input_location isn't set to something wierd. */
6175 input_location = DECL_SOURCE_LOCATION (fndecl);
6176
6177 /* Resolve callee-copies. This has to be done before processing
6178 the body so that DECL_VALUE_EXPR gets processed correctly. */
6179 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6180
6181 /* Gimplify the function's body. */
6182 gimplify_stmt (body_p);
6183 body = *body_p;
6184
6185 if (!body)
6186 body = alloc_stmt_list ();
6187 else if (TREE_CODE (body) == STATEMENT_LIST)
6188 {
6189 tree t = expr_only (*body_p);
6190 if (t)
6191 body = t;
6192 }
6193
6194 /* If there isn't an outer BIND_EXPR, add one. */
6195 if (TREE_CODE (body) != BIND_EXPR)
6196 {
6197 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6198 NULL_TREE, NULL_TREE);
6199 TREE_SIDE_EFFECTS (b) = 1;
6200 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6201 body = b;
6202 }
6203
6204 /* If we had callee-copies statements, insert them at the beginning
6205 of the function. */
6206 if (parm_stmts)
6207 {
6208 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6209 BIND_EXPR_BODY (body) = parm_stmts;
6210 }
6211
6212 /* Unshare again, in case gimplification was sloppy. */
6213 unshare_all_trees (body);
6214
6215 *body_p = body;
6216
6217 pop_gimplify_context (body);
6218 gcc_assert (gimplify_ctxp == NULL);
6219
6220 #ifdef ENABLE_CHECKING
6221 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6222 #endif
6223
6224 timevar_pop (TV_TREE_GIMPLIFY);
6225 input_location = saved_location;
6226 }
6227
6228 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6229 node for the function we want to gimplify. */
6230
6231 void
6232 gimplify_function_tree (tree fndecl)
6233 {
6234 tree oldfn, parm, ret;
6235
6236 oldfn = current_function_decl;
6237 current_function_decl = fndecl;
6238 cfun = DECL_STRUCT_FUNCTION (fndecl);
6239 if (cfun == NULL)
6240 allocate_struct_function (fndecl);
6241
6242 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6243 {
6244 /* Preliminarily mark non-addressed complex variables as eligible
6245 for promotion to gimple registers. We'll transform their uses
6246 as we find them. */
6247 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6248 && !TREE_THIS_VOLATILE (parm)
6249 && !needs_to_live_in_memory (parm))
6250 DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
6251 }
6252
6253 ret = DECL_RESULT (fndecl);
6254 if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6255 && !needs_to_live_in_memory (ret))
6256 DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
6257
6258 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6259
6260 /* If we're instrumenting function entry/exit, then prepend the call to
6261 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6262 catch the exit hook. */
6263 /* ??? Add some way to ignore exceptions for this TFE. */
6264 if (flag_instrument_function_entry_exit
6265 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6266 {
6267 tree tf, x, bind;
6268
6269 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6270 TREE_SIDE_EFFECTS (tf) = 1;
6271 x = DECL_SAVED_TREE (fndecl);
6272 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6273 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6274 x = build_function_call_expr (x, NULL);
6275 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6276
6277 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6278 TREE_SIDE_EFFECTS (bind) = 1;
6279 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6280 x = build_function_call_expr (x, NULL);
6281 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6282 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6283
6284 DECL_SAVED_TREE (fndecl) = bind;
6285 }
6286
6287 current_function_decl = oldfn;
6288 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6289 }
6290
6291 \f
6292 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6293 force the result to be either ssa_name or an invariant, otherwise
6294 just force it to be a rhs expression. If VAR is not NULL, make the
6295 base variable of the final destination be VAR if suitable. */
6296
6297 tree
6298 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6299 {
6300 tree t;
6301 enum gimplify_status ret;
6302 gimple_predicate gimple_test_f;
6303
6304 *stmts = NULL_TREE;
6305
6306 if (is_gimple_val (expr))
6307 return expr;
6308
6309 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6310
6311 push_gimplify_context ();
6312 gimplify_ctxp->into_ssa = in_ssa_p;
6313
6314 if (var)
6315 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
6316
6317 ret = gimplify_expr (&expr, stmts, NULL,
6318 gimple_test_f, fb_rvalue);
6319 gcc_assert (ret != GS_ERROR);
6320
6321 if (referenced_vars)
6322 {
6323 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6324 add_referenced_var (t);
6325 }
6326
6327 pop_gimplify_context (NULL);
6328
6329 return expr;
6330 }
6331
6332 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6333 some statements are produced, emits them before BSI. */
6334
6335 tree
6336 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6337 bool simple_p, tree var)
6338 {
6339 tree stmts;
6340
6341 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6342 if (stmts)
6343 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6344
6345 return expr;
6346 }
6347
6348 #include "gt-gimplify.h"