gimplify.c (gimplify_init_constructor): Arrange for the temporary captures of compone...
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "tree-gimple.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51
52
53 enum gimplify_omp_var_data
54 {
55 GOVD_SEEN = 1,
56 GOVD_EXPLICIT = 2,
57 GOVD_SHARED = 4,
58 GOVD_PRIVATE = 8,
59 GOVD_FIRSTPRIVATE = 16,
60 GOVD_LASTPRIVATE = 32,
61 GOVD_REDUCTION = 64,
62 GOVD_LOCAL = 128,
63 GOVD_DEBUG_PRIVATE = 256,
64 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
65 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
66 };
67
68 struct gimplify_omp_ctx
69 {
70 struct gimplify_omp_ctx *outer_context;
71 splay_tree variables;
72 struct pointer_set_t *privatized_types;
73 location_t location;
74 enum omp_clause_default_kind default_kind;
75 bool is_parallel;
76 bool is_combined_parallel;
77 };
78
79 struct gimplify_ctx
80 {
81 struct gimplify_ctx *prev_context;
82
83 tree current_bind_expr;
84 tree temps;
85 tree conditional_cleanups;
86 tree exit_label;
87 tree return_temp;
88
89 VEC(tree,heap) *case_labels;
90 /* The formal temporary table. Should this be persistent? */
91 htab_t temp_htab;
92
93 int conditions;
94 bool save_stack;
95 bool into_ssa;
96 };
97
98 static struct gimplify_ctx *gimplify_ctxp;
99 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
100
101
102
103 /* Formal (expression) temporary table handling: Multiple occurrences of
104 the same scalar expression are evaluated into the same temporary. */
105
106 typedef struct gimple_temp_hash_elt
107 {
108 tree val; /* Key */
109 tree temp; /* Value */
110 } elt_t;
111
112 /* Forward declarations. */
113 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
114 #ifdef ENABLE_CHECKING
115 static bool cpt_same_type (tree a, tree b);
116 #endif
117
118
119 /* Return a hash value for a formal temporary table entry. */
120
121 static hashval_t
122 gimple_tree_hash (const void *p)
123 {
124 tree t = ((const elt_t *) p)->val;
125 return iterative_hash_expr (t, 0);
126 }
127
128 /* Compare two formal temporary table entries. */
129
130 static int
131 gimple_tree_eq (const void *p1, const void *p2)
132 {
133 tree t1 = ((const elt_t *) p1)->val;
134 tree t2 = ((const elt_t *) p2)->val;
135 enum tree_code code = TREE_CODE (t1);
136
137 if (TREE_CODE (t2) != code
138 || TREE_TYPE (t1) != TREE_TYPE (t2))
139 return 0;
140
141 if (!operand_equal_p (t1, t2, 0))
142 return 0;
143
144 /* Only allow them to compare equal if they also hash equal; otherwise
145 results are nondeterminate, and we fail bootstrap comparison. */
146 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
147
148 return 1;
149 }
150
151 /* Set up a context for the gimplifier. */
152
153 void
154 push_gimplify_context (void)
155 {
156 struct gimplify_ctx *c;
157
158 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
159 c->prev_context = gimplify_ctxp;
160 if (optimize)
161 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
162
163 gimplify_ctxp = c;
164 }
165
166 /* Tear down a context for the gimplifier. If BODY is non-null, then
167 put the temporaries into the outer BIND_EXPR. Otherwise, put them
168 in the unexpanded_var_list. */
169
170 void
171 pop_gimplify_context (tree body)
172 {
173 struct gimplify_ctx *c = gimplify_ctxp;
174 tree t;
175
176 gcc_assert (c && !c->current_bind_expr);
177 gimplify_ctxp = c->prev_context;
178
179 for (t = c->temps; t ; t = TREE_CHAIN (t))
180 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
181
182 if (body)
183 declare_vars (c->temps, body, false);
184 else
185 record_vars (c->temps);
186
187 if (optimize)
188 htab_delete (c->temp_htab);
189 free (c);
190 }
191
192 static void
193 gimple_push_bind_expr (tree bind)
194 {
195 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
196 gimplify_ctxp->current_bind_expr = bind;
197 }
198
199 static void
200 gimple_pop_bind_expr (void)
201 {
202 gimplify_ctxp->current_bind_expr
203 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
204 }
205
206 tree
207 gimple_current_bind_expr (void)
208 {
209 return gimplify_ctxp->current_bind_expr;
210 }
211
212 /* Returns true iff there is a COND_EXPR between us and the innermost
213 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
214
215 static bool
216 gimple_conditional_context (void)
217 {
218 return gimplify_ctxp->conditions > 0;
219 }
220
221 /* Note that we've entered a COND_EXPR. */
222
223 static void
224 gimple_push_condition (void)
225 {
226 #ifdef ENABLE_CHECKING
227 if (gimplify_ctxp->conditions == 0)
228 gcc_assert (!gimplify_ctxp->conditional_cleanups);
229 #endif
230 ++(gimplify_ctxp->conditions);
231 }
232
233 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
234 now, add any conditional cleanups we've seen to the prequeue. */
235
236 static void
237 gimple_pop_condition (tree *pre_p)
238 {
239 int conds = --(gimplify_ctxp->conditions);
240
241 gcc_assert (conds >= 0);
242 if (conds == 0)
243 {
244 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
245 gimplify_ctxp->conditional_cleanups = NULL_TREE;
246 }
247 }
248
249 /* A stable comparison routine for use with splay trees and DECLs. */
250
251 static int
252 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
253 {
254 tree a = (tree) xa;
255 tree b = (tree) xb;
256
257 return DECL_UID (a) - DECL_UID (b);
258 }
259
260 /* Create a new omp construct that deals with variable remapping. */
261
262 static struct gimplify_omp_ctx *
263 new_omp_context (bool is_parallel, bool is_combined_parallel)
264 {
265 struct gimplify_omp_ctx *c;
266
267 c = XCNEW (struct gimplify_omp_ctx);
268 c->outer_context = gimplify_omp_ctxp;
269 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
270 c->privatized_types = pointer_set_create ();
271 c->location = input_location;
272 c->is_parallel = is_parallel;
273 c->is_combined_parallel = is_combined_parallel;
274 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
275
276 return c;
277 }
278
279 /* Destroy an omp construct that deals with variable remapping. */
280
281 static void
282 delete_omp_context (struct gimplify_omp_ctx *c)
283 {
284 splay_tree_delete (c->variables);
285 pointer_set_destroy (c->privatized_types);
286 XDELETE (c);
287 }
288
289 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
290 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
291
292 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
293
294 static void
295 append_to_statement_list_1 (tree t, tree *list_p)
296 {
297 tree list = *list_p;
298 tree_stmt_iterator i;
299
300 if (!list)
301 {
302 if (t && TREE_CODE (t) == STATEMENT_LIST)
303 {
304 *list_p = t;
305 return;
306 }
307 *list_p = list = alloc_stmt_list ();
308 }
309
310 i = tsi_last (list);
311 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
312 }
313
314 /* Add T to the end of the list container pointed to by LIST_P.
315 If T is an expression with no effects, it is ignored. */
316
317 void
318 append_to_statement_list (tree t, tree *list_p)
319 {
320 if (t && TREE_SIDE_EFFECTS (t))
321 append_to_statement_list_1 (t, list_p);
322 }
323
324 /* Similar, but the statement is always added, regardless of side effects. */
325
326 void
327 append_to_statement_list_force (tree t, tree *list_p)
328 {
329 if (t != NULL_TREE)
330 append_to_statement_list_1 (t, list_p);
331 }
332
333 /* Both gimplify the statement T and append it to LIST_P. */
334
335 void
336 gimplify_and_add (tree t, tree *list_p)
337 {
338 gimplify_stmt (&t);
339 append_to_statement_list (t, list_p);
340 }
341
342 /* Strip off a legitimate source ending from the input string NAME of
343 length LEN. Rather than having to know the names used by all of
344 our front ends, we strip off an ending of a period followed by
345 up to five characters. (Java uses ".class".) */
346
347 static inline void
348 remove_suffix (char *name, int len)
349 {
350 int i;
351
352 for (i = 2; i < 8 && len > i; i++)
353 {
354 if (name[len - i] == '.')
355 {
356 name[len - i] = '\0';
357 break;
358 }
359 }
360 }
361
362 /* Create a nameless artificial label and put it in the current function
363 context. Returns the newly created label. */
364
365 tree
366 create_artificial_label (void)
367 {
368 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
369
370 DECL_ARTIFICIAL (lab) = 1;
371 DECL_IGNORED_P (lab) = 1;
372 DECL_CONTEXT (lab) = current_function_decl;
373 return lab;
374 }
375
376 /* Subroutine for find_single_pointer_decl. */
377
378 static tree
379 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
380 void *data)
381 {
382 tree *pdecl = (tree *) data;
383
384 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
385 {
386 if (*pdecl)
387 {
388 /* We already found a pointer decl; return anything other
389 than NULL_TREE to unwind from walk_tree signalling that
390 we have a duplicate. */
391 return *tp;
392 }
393 *pdecl = *tp;
394 }
395
396 return NULL_TREE;
397 }
398
399 /* Find the single DECL of pointer type in the tree T and return it.
400 If there are zero or more than one such DECLs, return NULL. */
401
402 static tree
403 find_single_pointer_decl (tree t)
404 {
405 tree decl = NULL_TREE;
406
407 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
408 {
409 /* find_single_pointer_decl_1 returns a nonzero value, causing
410 walk_tree to return a nonzero value, to indicate that it
411 found more than one pointer DECL. */
412 return NULL_TREE;
413 }
414
415 return decl;
416 }
417
418 /* Create a new temporary name with PREFIX. Returns an identifier. */
419
420 static GTY(()) unsigned int tmp_var_id_num;
421
422 tree
423 create_tmp_var_name (const char *prefix)
424 {
425 char *tmp_name;
426
427 if (prefix)
428 {
429 char *preftmp = ASTRDUP (prefix);
430
431 remove_suffix (preftmp, strlen (preftmp));
432 prefix = preftmp;
433 }
434
435 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
436 return get_identifier (tmp_name);
437 }
438
439
440 /* Create a new temporary variable declaration of type TYPE.
441 Does NOT push it into the current binding. */
442
443 tree
444 create_tmp_var_raw (tree type, const char *prefix)
445 {
446 tree tmp_var;
447 tree new_type;
448
449 /* Make the type of the variable writable. */
450 new_type = build_type_variant (type, 0, 0);
451 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
452
453 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
454 type);
455
456 /* The variable was declared by the compiler. */
457 DECL_ARTIFICIAL (tmp_var) = 1;
458 /* And we don't want debug info for it. */
459 DECL_IGNORED_P (tmp_var) = 1;
460
461 /* Make the variable writable. */
462 TREE_READONLY (tmp_var) = 0;
463
464 DECL_EXTERNAL (tmp_var) = 0;
465 TREE_STATIC (tmp_var) = 0;
466 TREE_USED (tmp_var) = 1;
467
468 return tmp_var;
469 }
470
471 /* Create a new temporary variable declaration of type TYPE. DOES push the
472 variable into the current binding. Further, assume that this is called
473 only from gimplification or optimization, at which point the creation of
474 certain types are bugs. */
475
476 tree
477 create_tmp_var (tree type, const char *prefix)
478 {
479 tree tmp_var;
480
481 /* We don't allow types that are addressable (meaning we can't make copies),
482 or incomplete. We also used to reject every variable size objects here,
483 but now support those for which a constant upper bound can be obtained.
484 The processing for variable sizes is performed in gimple_add_tmp_var,
485 point at which it really matters and possibly reached via paths not going
486 through this function, e.g. after direct calls to create_tmp_var_raw. */
487 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
488
489 tmp_var = create_tmp_var_raw (type, prefix);
490 gimple_add_tmp_var (tmp_var);
491 return tmp_var;
492 }
493
494 /* Given a tree, try to return a useful variable name that we can use
495 to prefix a temporary that is being assigned the value of the tree.
496 I.E. given <temp> = &A, return A. */
497
498 const char *
499 get_name (tree t)
500 {
501 tree stripped_decl;
502
503 stripped_decl = t;
504 STRIP_NOPS (stripped_decl);
505 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
506 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
507 else
508 {
509 switch (TREE_CODE (stripped_decl))
510 {
511 case ADDR_EXPR:
512 return get_name (TREE_OPERAND (stripped_decl, 0));
513 break;
514 default:
515 return NULL;
516 }
517 }
518 }
519
520 /* Create a temporary with a name derived from VAL. Subroutine of
521 lookup_tmp_var; nobody else should call this function. */
522
523 static inline tree
524 create_tmp_from_val (tree val)
525 {
526 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
527 }
528
529 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
530 an existing expression temporary. */
531
532 static tree
533 lookup_tmp_var (tree val, bool is_formal)
534 {
535 tree ret;
536
537 /* If not optimizing, never really reuse a temporary. local-alloc
538 won't allocate any variable that is used in more than one basic
539 block, which means it will go into memory, causing much extra
540 work in reload and final and poorer code generation, outweighing
541 the extra memory allocation here. */
542 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
543 ret = create_tmp_from_val (val);
544 else
545 {
546 elt_t elt, *elt_p;
547 void **slot;
548
549 elt.val = val;
550 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
551 if (*slot == NULL)
552 {
553 elt_p = XNEW (elt_t);
554 elt_p->val = val;
555 elt_p->temp = ret = create_tmp_from_val (val);
556 *slot = (void *) elt_p;
557 }
558 else
559 {
560 elt_p = (elt_t *) *slot;
561 ret = elt_p->temp;
562 }
563 }
564
565 if (is_formal)
566 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
567
568 return ret;
569 }
570
571 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
572 in gimplify_expr. Only use this function if:
573
574 1) The value of the unfactored expression represented by VAL will not
575 change between the initialization and use of the temporary, and
576 2) The temporary will not be otherwise modified.
577
578 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
579 and #2 means it is inappropriate for && temps.
580
581 For other cases, use get_initialized_tmp_var instead. */
582
583 static tree
584 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
585 {
586 tree t, mod;
587
588 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
589
590 t = lookup_tmp_var (val, is_formal);
591
592 if (is_formal)
593 {
594 tree u = find_single_pointer_decl (val);
595
596 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
597 u = DECL_GET_RESTRICT_BASE (u);
598 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
599 {
600 if (DECL_BASED_ON_RESTRICT_P (t))
601 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
602 else
603 {
604 DECL_BASED_ON_RESTRICT_P (t) = 1;
605 SET_DECL_RESTRICT_BASE (t, u);
606 }
607 }
608 }
609
610 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
611 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
612
613 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, val);
614
615 if (EXPR_HAS_LOCATION (val))
616 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
617 else
618 SET_EXPR_LOCATION (mod, input_location);
619
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622
623 /* If we're gimplifying into ssa, gimplify_modify_expr will have
624 given our temporary an ssa name. Find and return it. */
625 if (gimplify_ctxp->into_ssa)
626 t = TREE_OPERAND (mod, 0);
627
628 return t;
629 }
630
631 /* Returns a formal temporary variable initialized with VAL. PRE_P
632 points to a statement list where side-effects needed to compute VAL
633 should be stored. */
634
635 tree
636 get_formal_tmp_var (tree val, tree *pre_p)
637 {
638 return internal_get_tmp_var (val, pre_p, NULL, true);
639 }
640
641 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
642 are as in gimplify_expr. */
643
644 tree
645 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
646 {
647 return internal_get_tmp_var (val, pre_p, post_p, false);
648 }
649
650 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
651 true, generate debug info for them; otherwise don't. */
652
653 void
654 declare_vars (tree vars, tree scope, bool debug_info)
655 {
656 tree last = vars;
657 if (last)
658 {
659 tree temps, block;
660
661 /* C99 mode puts the default 'return 0;' for main outside the outer
662 braces. So drill down until we find an actual scope. */
663 while (TREE_CODE (scope) == COMPOUND_EXPR)
664 scope = TREE_OPERAND (scope, 0);
665
666 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
667
668 temps = nreverse (last);
669
670 block = BIND_EXPR_BLOCK (scope);
671 if (!block || !debug_info)
672 {
673 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
674 BIND_EXPR_VARS (scope) = temps;
675 }
676 else
677 {
678 /* We need to attach the nodes both to the BIND_EXPR and to its
679 associated BLOCK for debugging purposes. The key point here
680 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
681 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
682 if (BLOCK_VARS (block))
683 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
684 else
685 {
686 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
687 BLOCK_VARS (block) = temps;
688 }
689 }
690 }
691 }
692
693 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
694 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
695 no such upper bound can be obtained. */
696
697 static void
698 force_constant_size (tree var)
699 {
700 /* The only attempt we make is by querying the maximum size of objects
701 of the variable's type. */
702
703 HOST_WIDE_INT max_size;
704
705 gcc_assert (TREE_CODE (var) == VAR_DECL);
706
707 max_size = max_int_size_in_bytes (TREE_TYPE (var));
708
709 gcc_assert (max_size >= 0);
710
711 DECL_SIZE_UNIT (var)
712 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
713 DECL_SIZE (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
715 }
716
717 void
718 gimple_add_tmp_var (tree tmp)
719 {
720 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
726 force_constant_size (tmp);
727
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730
731 if (gimplify_ctxp)
732 {
733 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
735
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
738 {
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx && !ctx->is_parallel)
741 ctx = ctx->outer_context;
742 if (ctx)
743 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
744 }
745 }
746 else if (cfun)
747 record_vars (tmp);
748 else
749 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
750 }
751
752 /* Determines whether to assign a locus to the statement STMT. */
753
754 static bool
755 should_carry_locus_p (tree stmt)
756 {
757 /* Don't emit a line note for a label. We particularly don't want to
758 emit one for the break label, since it doesn't actually correspond
759 to the beginning of the loop/switch. */
760 if (TREE_CODE (stmt) == LABEL_EXPR)
761 return false;
762
763 /* Do not annotate empty statements, since it confuses gcov. */
764 if (!TREE_SIDE_EFFECTS (stmt))
765 return false;
766
767 return true;
768 }
769
770 static void
771 annotate_one_with_locus (tree t, location_t locus)
772 {
773 if (EXPR_P (t) && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
774 SET_EXPR_LOCATION (t, locus);
775 }
776
777 void
778 annotate_all_with_locus (tree *stmt_p, location_t locus)
779 {
780 tree_stmt_iterator i;
781
782 if (!*stmt_p)
783 return;
784
785 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
786 {
787 tree t = tsi_stmt (i);
788
789 /* Assuming we've already been gimplified, we shouldn't
790 see nested chaining constructs anymore. */
791 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
792 && TREE_CODE (t) != COMPOUND_EXPR);
793
794 annotate_one_with_locus (t, locus);
795 }
796 }
797
798 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that should only be done once. If we
800 were to unshare something like SAVE_EXPR(i++), the gimplification
801 process would create wrong code. */
802
803 static tree
804 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
805 {
806 enum tree_code code = TREE_CODE (*tp);
807 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
808 if (TREE_CODE_CLASS (code) == tcc_type
809 || TREE_CODE_CLASS (code) == tcc_declaration
810 || TREE_CODE_CLASS (code) == tcc_constant
811 || code == SAVE_EXPR || code == TARGET_EXPR
812 /* We can't do anything sensible with a BLOCK used as an expression,
813 but we also can't just die when we see it because of non-expression
814 uses. So just avert our eyes and cross our fingers. Silly Java. */
815 || code == BLOCK)
816 *walk_subtrees = 0;
817 else
818 {
819 gcc_assert (code != BIND_EXPR);
820 copy_tree_r (tp, walk_subtrees, data);
821 }
822
823 return NULL_TREE;
824 }
825
826 /* Callback for walk_tree to unshare most of the shared trees rooted at
827 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
828 then *TP is deep copied by calling copy_tree_r.
829
830 This unshares the same trees as copy_tree_r with the exception of
831 SAVE_EXPR nodes. These nodes model computations that should only be
832 done once. If we were to unshare something like SAVE_EXPR(i++), the
833 gimplification process would create wrong code. */
834
835 static tree
836 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
837 void *data ATTRIBUTE_UNUSED)
838 {
839 tree t = *tp;
840 enum tree_code code = TREE_CODE (t);
841
842 /* Skip types, decls, and constants. But we do want to look at their
843 types and the bounds of types. Mark them as visited so we properly
844 unmark their subtrees on the unmark pass. If we've already seen them,
845 don't look down further. */
846 if (TREE_CODE_CLASS (code) == tcc_type
847 || TREE_CODE_CLASS (code) == tcc_declaration
848 || TREE_CODE_CLASS (code) == tcc_constant)
849 {
850 if (TREE_VISITED (t))
851 *walk_subtrees = 0;
852 else
853 TREE_VISITED (t) = 1;
854 }
855
856 /* If this node has been visited already, unshare it and don't look
857 any deeper. */
858 else if (TREE_VISITED (t))
859 {
860 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
861 *walk_subtrees = 0;
862 }
863
864 /* Otherwise, mark the tree as visited and keep looking. */
865 else
866 TREE_VISITED (t) = 1;
867
868 return NULL_TREE;
869 }
870
871 static tree
872 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
873 void *data ATTRIBUTE_UNUSED)
874 {
875 if (TREE_VISITED (*tp))
876 TREE_VISITED (*tp) = 0;
877 else
878 *walk_subtrees = 0;
879
880 return NULL_TREE;
881 }
882
883 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
884 bodies of any nested functions if we are unsharing the entire body of
885 FNDECL. */
886
887 static void
888 unshare_body (tree *body_p, tree fndecl)
889 {
890 struct cgraph_node *cgn = cgraph_node (fndecl);
891
892 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
893 if (body_p == &DECL_SAVED_TREE (fndecl))
894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
895 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
896 }
897
898 /* Likewise, but mark all trees as not visited. */
899
900 static void
901 unvisit_body (tree *body_p, tree fndecl)
902 {
903 struct cgraph_node *cgn = cgraph_node (fndecl);
904
905 walk_tree (body_p, unmark_visited_r, NULL, NULL);
906 if (body_p == &DECL_SAVED_TREE (fndecl))
907 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
908 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
909 }
910
911 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
912
913 static void
914 unshare_all_trees (tree t)
915 {
916 walk_tree (&t, copy_if_shared_r, NULL, NULL);
917 walk_tree (&t, unmark_visited_r, NULL, NULL);
918 }
919
920 /* Unconditionally make an unshared copy of EXPR. This is used when using
921 stored expressions which span multiple functions, such as BINFO_VTABLE,
922 as the normal unsharing process can't tell that they're shared. */
923
924 tree
925 unshare_expr (tree expr)
926 {
927 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
928 return expr;
929 }
930
931 /* A terser interface for building a representation of an exception
932 specification. */
933
934 tree
935 gimple_build_eh_filter (tree body, tree allowed, tree failure)
936 {
937 tree t;
938
939 /* FIXME should the allowed types go in TREE_TYPE? */
940 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
941 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
942
943 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
944 append_to_statement_list (body, &TREE_OPERAND (t, 0));
945
946 return t;
947 }
948
949 \f
950 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
951 contain statements and have a value. Assign its value to a temporary
952 and give it void_type_node. Returns the temporary, or NULL_TREE if
953 WRAPPER was already void. */
954
955 tree
956 voidify_wrapper_expr (tree wrapper, tree temp)
957 {
958 if (!VOID_TYPE_P (TREE_TYPE (wrapper)))
959 {
960 tree *p, sub = wrapper;
961
962 restart:
963 /* Set p to point to the body of the wrapper. */
964 switch (TREE_CODE (sub))
965 {
966 case BIND_EXPR:
967 /* For a BIND_EXPR, the body is operand 1. */
968 p = &BIND_EXPR_BODY (sub);
969 break;
970
971 default:
972 p = &TREE_OPERAND (sub, 0);
973 break;
974 }
975
976 /* Advance to the last statement. Set all container types to void. */
977 if (TREE_CODE (*p) == STATEMENT_LIST)
978 {
979 tree_stmt_iterator i = tsi_last (*p);
980 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
981 }
982 else
983 {
984 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
985 {
986 TREE_SIDE_EFFECTS (*p) = 1;
987 TREE_TYPE (*p) = void_type_node;
988 }
989 }
990
991 if (p == NULL || IS_EMPTY_STMT (*p))
992 ;
993 /* Look through exception handling. */
994 else if (TREE_CODE (*p) == TRY_FINALLY_EXPR
995 || TREE_CODE (*p) == TRY_CATCH_EXPR)
996 {
997 sub = *p;
998 goto restart;
999 }
1000 /* The C++ frontend already did this for us. */
1001 else if (TREE_CODE (*p) == INIT_EXPR
1002 || TREE_CODE (*p) == TARGET_EXPR)
1003 temp = TREE_OPERAND (*p, 0);
1004 /* If we're returning a dereference, move the dereference
1005 outside the wrapper. */
1006 else if (TREE_CODE (*p) == INDIRECT_REF)
1007 {
1008 tree ptr = TREE_OPERAND (*p, 0);
1009 temp = create_tmp_var (TREE_TYPE (ptr), "retval");
1010 *p = build2 (MODIFY_EXPR, TREE_TYPE (ptr), temp, ptr);
1011 temp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (temp)), temp);
1012 /* If this is a BIND_EXPR for a const inline function, it might not
1013 have TREE_SIDE_EFFECTS set. That is no longer accurate. */
1014 TREE_SIDE_EFFECTS (wrapper) = 1;
1015 }
1016 else
1017 {
1018 if (!temp)
1019 temp = create_tmp_var (TREE_TYPE (wrapper), "retval");
1020 *p = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, *p);
1021 TREE_SIDE_EFFECTS (wrapper) = 1;
1022 }
1023
1024 TREE_TYPE (wrapper) = void_type_node;
1025 return temp;
1026 }
1027
1028 return NULL_TREE;
1029 }
1030
1031 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1032 a temporary through which they communicate. */
1033
1034 static void
1035 build_stack_save_restore (tree *save, tree *restore)
1036 {
1037 tree save_call, tmp_var;
1038
1039 save_call =
1040 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
1041 NULL_TREE);
1042 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1043
1044 *save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
1045 *restore =
1046 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1047 tree_cons (NULL_TREE, tmp_var, NULL_TREE));
1048 }
1049
1050 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1051
1052 static enum gimplify_status
1053 gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
1054 {
1055 tree bind_expr = *expr_p;
1056 bool old_save_stack = gimplify_ctxp->save_stack;
1057 tree t;
1058
1059 temp = voidify_wrapper_expr (bind_expr, temp);
1060
1061 /* Mark variables seen in this bind expr. */
1062 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1063 {
1064 if (TREE_CODE (t) == VAR_DECL)
1065 {
1066 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1067
1068 /* Mark variable as local. */
1069 if (ctx && !is_global_var (t)
1070 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1071 || splay_tree_lookup (ctx->variables,
1072 (splay_tree_key) t) == NULL))
1073 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1074
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1076 }
1077
1078 /* Preliminarily mark non-addressed complex variables as eligible
1079 for promotion to gimple registers. We'll transform their uses
1080 as we find them. */
1081 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1082 && !TREE_THIS_VOLATILE (t)
1083 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1084 && !needs_to_live_in_memory (t))
1085 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
1086 }
1087
1088 gimple_push_bind_expr (bind_expr);
1089 gimplify_ctxp->save_stack = false;
1090
1091 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1092
1093 if (gimplify_ctxp->save_stack)
1094 {
1095 tree stack_save, stack_restore;
1096
1097 /* Save stack on entry and restore it on exit. Add a try_finally
1098 block to achieve this. Note that mudflap depends on the
1099 format of the emitted code: see mx_register_decls(). */
1100 build_stack_save_restore (&stack_save, &stack_restore);
1101
1102 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1103 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1104 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1105
1106 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1107 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1108 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1109 }
1110
1111 gimplify_ctxp->save_stack = old_save_stack;
1112 gimple_pop_bind_expr ();
1113
1114 if (temp)
1115 {
1116 *expr_p = temp;
1117 append_to_statement_list (bind_expr, pre_p);
1118 return GS_OK;
1119 }
1120 else
1121 return GS_ALL_DONE;
1122 }
1123
1124 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1125 GIMPLE value, it is assigned to a new temporary and the statement is
1126 re-written to return the temporary.
1127
1128 PRE_P points to the list where side effects that must happen before
1129 STMT should be stored. */
1130
1131 static enum gimplify_status
1132 gimplify_return_expr (tree stmt, tree *pre_p)
1133 {
1134 tree ret_expr = TREE_OPERAND (stmt, 0);
1135 tree result_decl, result;
1136
1137 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1138 || ret_expr == error_mark_node)
1139 return GS_ALL_DONE;
1140
1141 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1142 result_decl = NULL_TREE;
1143 else
1144 {
1145 result_decl = TREE_OPERAND (ret_expr, 0);
1146 if (TREE_CODE (result_decl) == INDIRECT_REF)
1147 /* See through a return by reference. */
1148 result_decl = TREE_OPERAND (result_decl, 0);
1149
1150 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1151 || TREE_CODE (ret_expr) == INIT_EXPR)
1152 && TREE_CODE (result_decl) == RESULT_DECL);
1153 }
1154
1155 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1156 Recall that aggregate_value_p is FALSE for any aggregate type that is
1157 returned in registers. If we're returning values in registers, then
1158 we don't want to extend the lifetime of the RESULT_DECL, particularly
1159 across another call. In addition, for those aggregates for which
1160 hard_function_value generates a PARALLEL, we'll die during normal
1161 expansion of structure assignments; there's special code in expand_return
1162 to handle this case that does not exist in expand_expr. */
1163 if (!result_decl
1164 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1165 result = result_decl;
1166 else if (gimplify_ctxp->return_temp)
1167 result = gimplify_ctxp->return_temp;
1168 else
1169 {
1170 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1171
1172 /* ??? With complex control flow (usually involving abnormal edges),
1173 we can wind up warning about an uninitialized value for this. Due
1174 to how this variable is constructed and initialized, this is never
1175 true. Give up and never warn. */
1176 TREE_NO_WARNING (result) = 1;
1177
1178 gimplify_ctxp->return_temp = result;
1179 }
1180
1181 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1182 Then gimplify the whole thing. */
1183 if (result != result_decl)
1184 TREE_OPERAND (ret_expr, 0) = result;
1185
1186 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1187
1188 /* If we didn't use a temporary, then the result is just the result_decl.
1189 Otherwise we need a simple copy. This should already be gimple. */
1190 if (result == result_decl)
1191 ret_expr = result;
1192 else
1193 ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
1194 TREE_OPERAND (stmt, 0) = ret_expr;
1195
1196 return GS_ALL_DONE;
1197 }
1198
1199 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1200 and initialization explicit. */
1201
1202 static enum gimplify_status
1203 gimplify_decl_expr (tree *stmt_p)
1204 {
1205 tree stmt = *stmt_p;
1206 tree decl = DECL_EXPR_DECL (stmt);
1207
1208 *stmt_p = NULL_TREE;
1209
1210 if (TREE_TYPE (decl) == error_mark_node)
1211 return GS_ERROR;
1212
1213 if ((TREE_CODE (decl) == TYPE_DECL
1214 || TREE_CODE (decl) == VAR_DECL)
1215 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1216 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1217
1218 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1219 {
1220 tree init = DECL_INITIAL (decl);
1221
1222 if (!TREE_CONSTANT (DECL_SIZE (decl)))
1223 {
1224 /* This is a variable-sized decl. Simplify its size and mark it
1225 for deferred expansion. Note that mudflap depends on the format
1226 of the emitted code: see mx_register_decls(). */
1227 tree t, args, addr, ptr_type;
1228
1229 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1230 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1231
1232 /* All occurrences of this decl in final gimplified code will be
1233 replaced by indirection. Setting DECL_VALUE_EXPR does two
1234 things: First, it lets the rest of the gimplifier know what
1235 replacement to use. Second, it lets the debug info know
1236 where to find the value. */
1237 ptr_type = build_pointer_type (TREE_TYPE (decl));
1238 addr = create_tmp_var (ptr_type, get_name (decl));
1239 DECL_IGNORED_P (addr) = 0;
1240 t = build_fold_indirect_ref (addr);
1241 SET_DECL_VALUE_EXPR (decl, t);
1242 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1243
1244 args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
1245 t = built_in_decls[BUILT_IN_ALLOCA];
1246 t = build_function_call_expr (t, args);
1247 t = fold_convert (ptr_type, t);
1248 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1249
1250 gimplify_and_add (t, stmt_p);
1251
1252 /* Indicate that we need to restore the stack level when the
1253 enclosing BIND_EXPR is exited. */
1254 gimplify_ctxp->save_stack = true;
1255 }
1256
1257 if (init && init != error_mark_node)
1258 {
1259 if (!TREE_STATIC (decl))
1260 {
1261 DECL_INITIAL (decl) = NULL_TREE;
1262 init = build2 (INIT_EXPR, void_type_node, decl, init);
1263 gimplify_and_add (init, stmt_p);
1264 }
1265 else
1266 /* We must still examine initializers for static variables
1267 as they may contain a label address. */
1268 walk_tree (&init, force_labels_r, NULL, NULL);
1269 }
1270
1271 /* Some front ends do not explicitly declare all anonymous
1272 artificial variables. We compensate here by declaring the
1273 variables, though it would be better if the front ends would
1274 explicitly declare them. */
1275 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1276 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1277 gimple_add_tmp_var (decl);
1278 }
1279
1280 return GS_ALL_DONE;
1281 }
1282
1283 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1284 and replacing the LOOP_EXPR with goto, but if the loop contains an
1285 EXIT_EXPR, we need to append a label for it to jump to. */
1286
1287 static enum gimplify_status
1288 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1289 {
1290 tree saved_label = gimplify_ctxp->exit_label;
1291 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1292 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1293
1294 append_to_statement_list (start_label, pre_p);
1295
1296 gimplify_ctxp->exit_label = NULL_TREE;
1297
1298 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1299
1300 if (gimplify_ctxp->exit_label)
1301 {
1302 append_to_statement_list (jump_stmt, pre_p);
1303 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1304 }
1305 else
1306 *expr_p = jump_stmt;
1307
1308 gimplify_ctxp->exit_label = saved_label;
1309
1310 return GS_ALL_DONE;
1311 }
1312
1313 /* Compare two case labels. Because the front end should already have
1314 made sure that case ranges do not overlap, it is enough to only compare
1315 the CASE_LOW values of each case label. */
1316
1317 static int
1318 compare_case_labels (const void *p1, const void *p2)
1319 {
1320 tree case1 = *(tree *)p1;
1321 tree case2 = *(tree *)p2;
1322
1323 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1324 }
1325
1326 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1327
1328 void
1329 sort_case_labels (tree label_vec)
1330 {
1331 size_t len = TREE_VEC_LENGTH (label_vec);
1332 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1333
1334 if (CASE_LOW (default_case))
1335 {
1336 size_t i;
1337
1338 /* The last label in the vector should be the default case
1339 but it is not. */
1340 for (i = 0; i < len; ++i)
1341 {
1342 tree t = TREE_VEC_ELT (label_vec, i);
1343 if (!CASE_LOW (t))
1344 {
1345 default_case = t;
1346 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1347 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1348 break;
1349 }
1350 }
1351 }
1352
1353 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1354 compare_case_labels);
1355 }
1356
1357 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1358 branch to. */
1359
1360 static enum gimplify_status
1361 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1362 {
1363 tree switch_expr = *expr_p;
1364 enum gimplify_status ret;
1365
1366 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1367 is_gimple_val, fb_rvalue);
1368
1369 if (SWITCH_BODY (switch_expr))
1370 {
1371 VEC(tree,heap) *labels, *saved_labels;
1372 tree label_vec, default_case = NULL_TREE;
1373 size_t i, len;
1374
1375 /* If someone can be bothered to fill in the labels, they can
1376 be bothered to null out the body too. */
1377 gcc_assert (!SWITCH_LABELS (switch_expr));
1378
1379 saved_labels = gimplify_ctxp->case_labels;
1380 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1381
1382 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1383
1384 labels = gimplify_ctxp->case_labels;
1385 gimplify_ctxp->case_labels = saved_labels;
1386
1387 i = 0;
1388 while (i < VEC_length (tree, labels))
1389 {
1390 tree elt = VEC_index (tree, labels, i);
1391 tree low = CASE_LOW (elt);
1392 bool remove_element = FALSE;
1393
1394 if (low)
1395 {
1396 /* Discard empty ranges. */
1397 tree high = CASE_HIGH (elt);
1398 if (high && INT_CST_LT (high, low))
1399 remove_element = TRUE;
1400 }
1401 else
1402 {
1403 /* The default case must be the last label in the list. */
1404 gcc_assert (!default_case);
1405 default_case = elt;
1406 remove_element = TRUE;
1407 }
1408
1409 if (remove_element)
1410 VEC_ordered_remove (tree, labels, i);
1411 else
1412 i++;
1413 }
1414 len = i;
1415
1416 label_vec = make_tree_vec (len + 1);
1417 SWITCH_LABELS (*expr_p) = label_vec;
1418 append_to_statement_list (switch_expr, pre_p);
1419
1420 if (! default_case)
1421 {
1422 /* If the switch has no default label, add one, so that we jump
1423 around the switch body. */
1424 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1425 NULL_TREE, create_artificial_label ());
1426 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1427 *expr_p = build1 (LABEL_EXPR, void_type_node,
1428 CASE_LABEL (default_case));
1429 }
1430 else
1431 *expr_p = SWITCH_BODY (switch_expr);
1432
1433 for (i = 0; i < len; ++i)
1434 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1435 TREE_VEC_ELT (label_vec, len) = default_case;
1436
1437 VEC_free (tree, heap, labels);
1438
1439 sort_case_labels (label_vec);
1440
1441 SWITCH_BODY (switch_expr) = NULL;
1442 }
1443 else
1444 gcc_assert (SWITCH_LABELS (switch_expr));
1445
1446 return ret;
1447 }
1448
1449 static enum gimplify_status
1450 gimplify_case_label_expr (tree *expr_p)
1451 {
1452 tree expr = *expr_p;
1453 struct gimplify_ctx *ctxp;
1454
1455 /* Invalid OpenMP programs can play Duff's Device type games with
1456 #pragma omp parallel. At least in the C front end, we don't
1457 detect such invalid branches until after gimplification. */
1458 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1459 if (ctxp->case_labels)
1460 break;
1461
1462 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1463 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1464 return GS_ALL_DONE;
1465 }
1466
1467 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1468 if necessary. */
1469
1470 tree
1471 build_and_jump (tree *label_p)
1472 {
1473 if (label_p == NULL)
1474 /* If there's nowhere to jump, just fall through. */
1475 return NULL_TREE;
1476
1477 if (*label_p == NULL_TREE)
1478 {
1479 tree label = create_artificial_label ();
1480 *label_p = label;
1481 }
1482
1483 return build1 (GOTO_EXPR, void_type_node, *label_p);
1484 }
1485
1486 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1487 This also involves building a label to jump to and communicating it to
1488 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1489
1490 static enum gimplify_status
1491 gimplify_exit_expr (tree *expr_p)
1492 {
1493 tree cond = TREE_OPERAND (*expr_p, 0);
1494 tree expr;
1495
1496 expr = build_and_jump (&gimplify_ctxp->exit_label);
1497 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1498 *expr_p = expr;
1499
1500 return GS_OK;
1501 }
1502
1503 /* A helper function to be called via walk_tree. Mark all labels under *TP
1504 as being forced. To be called for DECL_INITIAL of static variables. */
1505
1506 tree
1507 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1508 {
1509 if (TYPE_P (*tp))
1510 *walk_subtrees = 0;
1511 if (TREE_CODE (*tp) == LABEL_DECL)
1512 FORCED_LABEL (*tp) = 1;
1513
1514 return NULL_TREE;
1515 }
1516
1517 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1518 different from its canonical type, wrap the whole thing inside a
1519 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1520 type.
1521
1522 The canonical type of a COMPONENT_REF is the type of the field being
1523 referenced--unless the field is a bit-field which can be read directly
1524 in a smaller mode, in which case the canonical type is the
1525 sign-appropriate type corresponding to that mode. */
1526
1527 static void
1528 canonicalize_component_ref (tree *expr_p)
1529 {
1530 tree expr = *expr_p;
1531 tree type;
1532
1533 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1534
1535 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1536 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1537 else
1538 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1539
1540 if (TREE_TYPE (expr) != type)
1541 {
1542 tree old_type = TREE_TYPE (expr);
1543
1544 /* Set the type of the COMPONENT_REF to the underlying type. */
1545 TREE_TYPE (expr) = type;
1546
1547 /* And wrap the whole thing inside a NOP_EXPR. */
1548 expr = build1 (NOP_EXPR, old_type, expr);
1549
1550 *expr_p = expr;
1551 }
1552 }
1553
1554 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1555 to foo, embed that change in the ADDR_EXPR by converting
1556 T array[U];
1557 (T *)&array
1558 ==>
1559 &array[L]
1560 where L is the lower bound. For simplicity, only do this for constant
1561 lower bound. */
1562
1563 static void
1564 canonicalize_addr_expr (tree *expr_p)
1565 {
1566 tree expr = *expr_p;
1567 tree ctype = TREE_TYPE (expr);
1568 tree addr_expr = TREE_OPERAND (expr, 0);
1569 tree atype = TREE_TYPE (addr_expr);
1570 tree dctype, datype, ddatype, otype, obj_expr;
1571
1572 /* Both cast and addr_expr types should be pointers. */
1573 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1574 return;
1575
1576 /* The addr_expr type should be a pointer to an array. */
1577 datype = TREE_TYPE (atype);
1578 if (TREE_CODE (datype) != ARRAY_TYPE)
1579 return;
1580
1581 /* Both cast and addr_expr types should address the same object type. */
1582 dctype = TREE_TYPE (ctype);
1583 ddatype = TREE_TYPE (datype);
1584 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1585 return;
1586
1587 /* The addr_expr and the object type should match. */
1588 obj_expr = TREE_OPERAND (addr_expr, 0);
1589 otype = TREE_TYPE (obj_expr);
1590 if (!lang_hooks.types_compatible_p (otype, datype))
1591 return;
1592
1593 /* The lower bound and element sizes must be constant. */
1594 if (!TYPE_SIZE_UNIT (dctype)
1595 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1596 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1597 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1598 return;
1599
1600 /* All checks succeeded. Build a new node to merge the cast. */
1601 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1602 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1603 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1604 size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
1605 size_int (TYPE_ALIGN_UNIT (dctype))));
1606 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1607 }
1608
1609 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1610 underneath as appropriate. */
1611
1612 static enum gimplify_status
1613 gimplify_conversion (tree *expr_p)
1614 {
1615 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1616 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1617
1618 /* Then strip away all but the outermost conversion. */
1619 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1620
1621 /* And remove the outermost conversion if it's useless. */
1622 if (tree_ssa_useless_type_conversion (*expr_p))
1623 *expr_p = TREE_OPERAND (*expr_p, 0);
1624
1625 /* If we still have a conversion at the toplevel,
1626 then canonicalize some constructs. */
1627 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1628 {
1629 tree sub = TREE_OPERAND (*expr_p, 0);
1630
1631 /* If a NOP conversion is changing the type of a COMPONENT_REF
1632 expression, then canonicalize its type now in order to expose more
1633 redundant conversions. */
1634 if (TREE_CODE (sub) == COMPONENT_REF)
1635 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1636
1637 /* If a NOP conversion is changing a pointer to array of foo
1638 to a pointer to foo, embed that change in the ADDR_EXPR. */
1639 else if (TREE_CODE (sub) == ADDR_EXPR)
1640 canonicalize_addr_expr (expr_p);
1641 }
1642
1643 return GS_OK;
1644 }
1645
1646 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1647 DECL_VALUE_EXPR, and it's worth re-examining things. */
1648
1649 static enum gimplify_status
1650 gimplify_var_or_parm_decl (tree *expr_p)
1651 {
1652 tree decl = *expr_p;
1653
1654 /* ??? If this is a local variable, and it has not been seen in any
1655 outer BIND_EXPR, then it's probably the result of a duplicate
1656 declaration, for which we've already issued an error. It would
1657 be really nice if the front end wouldn't leak these at all.
1658 Currently the only known culprit is C++ destructors, as seen
1659 in g++.old-deja/g++.jason/binding.C. */
1660 if (TREE_CODE (decl) == VAR_DECL
1661 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1662 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1663 && decl_function_context (decl) == current_function_decl)
1664 {
1665 gcc_assert (errorcount || sorrycount);
1666 return GS_ERROR;
1667 }
1668
1669 /* When within an OpenMP context, notice uses of variables. */
1670 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1671 return GS_ALL_DONE;
1672
1673 /* If the decl is an alias for another expression, substitute it now. */
1674 if (DECL_HAS_VALUE_EXPR_P (decl))
1675 {
1676 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1677 return GS_OK;
1678 }
1679
1680 return GS_ALL_DONE;
1681 }
1682
1683
1684 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1685 node pointed to by EXPR_P.
1686
1687 compound_lval
1688 : min_lval '[' val ']'
1689 | min_lval '.' ID
1690 | compound_lval '[' val ']'
1691 | compound_lval '.' ID
1692
1693 This is not part of the original SIMPLE definition, which separates
1694 array and member references, but it seems reasonable to handle them
1695 together. Also, this way we don't run into problems with union
1696 aliasing; gcc requires that for accesses through a union to alias, the
1697 union reference must be explicit, which was not always the case when we
1698 were splitting up array and member refs.
1699
1700 PRE_P points to the list where side effects that must happen before
1701 *EXPR_P should be stored.
1702
1703 POST_P points to the list where side effects that must happen after
1704 *EXPR_P should be stored. */
1705
1706 static enum gimplify_status
1707 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1708 tree *post_p, fallback_t fallback)
1709 {
1710 tree *p;
1711 VEC(tree,heap) *stack;
1712 enum gimplify_status ret = GS_OK, tret;
1713 int i;
1714
1715 /* Create a stack of the subexpressions so later we can walk them in
1716 order from inner to outer. */
1717 stack = VEC_alloc (tree, heap, 10);
1718
1719 /* We can handle anything that get_inner_reference can deal with. */
1720 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1721 {
1722 restart:
1723 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1724 if (TREE_CODE (*p) == INDIRECT_REF)
1725 *p = fold_indirect_ref (*p);
1726
1727 if (handled_component_p (*p))
1728 ;
1729 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1730 additional COMPONENT_REFs. */
1731 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1732 && gimplify_var_or_parm_decl (p) == GS_OK)
1733 goto restart;
1734 else
1735 break;
1736
1737 VEC_safe_push (tree, heap, stack, *p);
1738 }
1739
1740 gcc_assert (VEC_length (tree, stack));
1741
1742 /* Now STACK is a stack of pointers to all the refs we've walked through
1743 and P points to the innermost expression.
1744
1745 Java requires that we elaborated nodes in source order. That
1746 means we must gimplify the inner expression followed by each of
1747 the indices, in order. But we can't gimplify the inner
1748 expression until we deal with any variable bounds, sizes, or
1749 positions in order to deal with PLACEHOLDER_EXPRs.
1750
1751 So we do this in three steps. First we deal with the annotations
1752 for any variables in the components, then we gimplify the base,
1753 then we gimplify any indices, from left to right. */
1754 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1755 {
1756 tree t = VEC_index (tree, stack, i);
1757
1758 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1759 {
1760 /* Gimplify the low bound and element type size and put them into
1761 the ARRAY_REF. If these values are set, they have already been
1762 gimplified. */
1763 if (!TREE_OPERAND (t, 2))
1764 {
1765 tree low = unshare_expr (array_ref_low_bound (t));
1766 if (!is_gimple_min_invariant (low))
1767 {
1768 TREE_OPERAND (t, 2) = low;
1769 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1770 is_gimple_formal_tmp_reg, fb_rvalue);
1771 ret = MIN (ret, tret);
1772 }
1773 }
1774
1775 if (!TREE_OPERAND (t, 3))
1776 {
1777 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1778 tree elmt_size = unshare_expr (array_ref_element_size (t));
1779 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1780
1781 /* Divide the element size by the alignment of the element
1782 type (above). */
1783 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1784
1785 if (!is_gimple_min_invariant (elmt_size))
1786 {
1787 TREE_OPERAND (t, 3) = elmt_size;
1788 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1789 is_gimple_formal_tmp_reg, fb_rvalue);
1790 ret = MIN (ret, tret);
1791 }
1792 }
1793 }
1794 else if (TREE_CODE (t) == COMPONENT_REF)
1795 {
1796 /* Set the field offset into T and gimplify it. */
1797 if (!TREE_OPERAND (t, 2))
1798 {
1799 tree offset = unshare_expr (component_ref_field_offset (t));
1800 tree field = TREE_OPERAND (t, 1);
1801 tree factor
1802 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1803
1804 /* Divide the offset by its alignment. */
1805 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1806
1807 if (!is_gimple_min_invariant (offset))
1808 {
1809 TREE_OPERAND (t, 2) = offset;
1810 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1811 is_gimple_formal_tmp_reg, fb_rvalue);
1812 ret = MIN (ret, tret);
1813 }
1814 }
1815 }
1816 }
1817
1818 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1819 so as to match the min_lval predicate. Failure to do so may result
1820 in the creation of large aggregate temporaries. */
1821 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1822 fallback | fb_lvalue);
1823 ret = MIN (ret, tret);
1824
1825 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1826 loop we also remove any useless conversions. */
1827 for (; VEC_length (tree, stack) > 0; )
1828 {
1829 tree t = VEC_pop (tree, stack);
1830
1831 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1832 {
1833 /* Gimplify the dimension.
1834 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1835 Gimplify non-constant array indices into a temporary
1836 variable.
1837 FIXME - The real fix is to gimplify post-modify
1838 expressions into a minimal gimple lvalue. However, that
1839 exposes bugs in alias analysis. The alias analyzer does
1840 not handle &PTR->FIELD very well. Will fix after the
1841 branch is merged into mainline (dnovillo 2004-05-03). */
1842 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1843 {
1844 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1845 is_gimple_formal_tmp_reg, fb_rvalue);
1846 ret = MIN (ret, tret);
1847 }
1848 }
1849 else if (TREE_CODE (t) == BIT_FIELD_REF)
1850 {
1851 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1852 is_gimple_val, fb_rvalue);
1853 ret = MIN (ret, tret);
1854 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1855 is_gimple_val, fb_rvalue);
1856 ret = MIN (ret, tret);
1857 }
1858
1859 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1860
1861 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1862 set which would have caused all the outer expressions in EXPR_P
1863 leading to P to also have had TREE_SIDE_EFFECTS set. */
1864 recalculate_side_effects (t);
1865 }
1866
1867 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1868 ret = MIN (ret, tret);
1869
1870 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1871 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1872 {
1873 canonicalize_component_ref (expr_p);
1874 ret = MIN (ret, GS_OK);
1875 }
1876
1877 VEC_free (tree, heap, stack);
1878
1879 return ret;
1880 }
1881
1882 /* Gimplify the self modifying expression pointed to by EXPR_P
1883 (++, --, +=, -=).
1884
1885 PRE_P points to the list where side effects that must happen before
1886 *EXPR_P should be stored.
1887
1888 POST_P points to the list where side effects that must happen after
1889 *EXPR_P should be stored.
1890
1891 WANT_VALUE is nonzero iff we want to use the value of this expression
1892 in another expression. */
1893
1894 static enum gimplify_status
1895 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1896 bool want_value)
1897 {
1898 enum tree_code code;
1899 tree lhs, lvalue, rhs, t1;
1900 bool postfix;
1901 enum tree_code arith_code;
1902 enum gimplify_status ret;
1903
1904 code = TREE_CODE (*expr_p);
1905
1906 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1907 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1908
1909 /* Prefix or postfix? */
1910 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1911 /* Faster to treat as prefix if result is not used. */
1912 postfix = want_value;
1913 else
1914 postfix = false;
1915
1916 /* Add or subtract? */
1917 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1918 arith_code = PLUS_EXPR;
1919 else
1920 arith_code = MINUS_EXPR;
1921
1922 /* Gimplify the LHS into a GIMPLE lvalue. */
1923 lvalue = TREE_OPERAND (*expr_p, 0);
1924 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1925 if (ret == GS_ERROR)
1926 return ret;
1927
1928 /* Extract the operands to the arithmetic operation. */
1929 lhs = lvalue;
1930 rhs = TREE_OPERAND (*expr_p, 1);
1931
1932 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1933 that as the result value and in the postqueue operation. */
1934 if (postfix)
1935 {
1936 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1937 if (ret == GS_ERROR)
1938 return ret;
1939 }
1940
1941 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1942 t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
1943
1944 if (postfix)
1945 {
1946 gimplify_and_add (t1, post_p);
1947 *expr_p = lhs;
1948 return GS_ALL_DONE;
1949 }
1950 else
1951 {
1952 *expr_p = t1;
1953 return GS_OK;
1954 }
1955 }
1956
1957 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1958
1959 static void
1960 maybe_with_size_expr (tree *expr_p)
1961 {
1962 tree expr = *expr_p;
1963 tree type = TREE_TYPE (expr);
1964 tree size;
1965
1966 /* If we've already wrapped this or the type is error_mark_node, we can't do
1967 anything. */
1968 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1969 || type == error_mark_node)
1970 return;
1971
1972 /* If the size isn't known or is a constant, we have nothing to do. */
1973 size = TYPE_SIZE_UNIT (type);
1974 if (!size || TREE_CODE (size) == INTEGER_CST)
1975 return;
1976
1977 /* Otherwise, make a WITH_SIZE_EXPR. */
1978 size = unshare_expr (size);
1979 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1980 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1981 }
1982
1983 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1984
1985 static enum gimplify_status
1986 gimplify_arg (tree *expr_p, tree *pre_p)
1987 {
1988 bool (*test) (tree);
1989 fallback_t fb;
1990
1991 /* In general, we allow lvalues for function arguments to avoid
1992 extra overhead of copying large aggregates out of even larger
1993 aggregates into temporaries only to copy the temporaries to
1994 the argument list. Make optimizers happy by pulling out to
1995 temporaries those types that fit in registers. */
1996 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
1997 test = is_gimple_val, fb = fb_rvalue;
1998 else
1999 test = is_gimple_lvalue, fb = fb_either;
2000
2001 /* If this is a variable sized type, we must remember the size. */
2002 maybe_with_size_expr (expr_p);
2003
2004 /* There is a sequence point before a function call. Side effects in
2005 the argument list must occur before the actual call. So, when
2006 gimplifying arguments, force gimplify_expr to use an internal
2007 post queue which is then appended to the end of PRE_P. */
2008 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2009 }
2010
2011 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2012 list where side effects that must happen before *EXPR_P should be stored.
2013 WANT_VALUE is true if the result of the call is desired. */
2014
2015 static enum gimplify_status
2016 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2017 {
2018 tree decl;
2019 tree arglist;
2020 enum gimplify_status ret;
2021
2022 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2023
2024 /* For reliable diagnostics during inlining, it is necessary that
2025 every call_expr be annotated with file and line. */
2026 if (! EXPR_HAS_LOCATION (*expr_p))
2027 SET_EXPR_LOCATION (*expr_p, input_location);
2028
2029 /* This may be a call to a builtin function.
2030
2031 Builtin function calls may be transformed into different
2032 (and more efficient) builtin function calls under certain
2033 circumstances. Unfortunately, gimplification can muck things
2034 up enough that the builtin expanders are not aware that certain
2035 transformations are still valid.
2036
2037 So we attempt transformation/gimplification of the call before
2038 we gimplify the CALL_EXPR. At this time we do not manage to
2039 transform all calls in the same manner as the expanders do, but
2040 we do transform most of them. */
2041 decl = get_callee_fndecl (*expr_p);
2042 if (decl && DECL_BUILT_IN (decl))
2043 {
2044 tree arglist = TREE_OPERAND (*expr_p, 1);
2045 tree new = fold_builtin (decl, arglist, !want_value);
2046
2047 if (new && new != *expr_p)
2048 {
2049 /* There was a transformation of this call which computes the
2050 same value, but in a more efficient way. Return and try
2051 again. */
2052 *expr_p = new;
2053 return GS_OK;
2054 }
2055
2056 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2057 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2058 {
2059 if (!arglist || !TREE_CHAIN (arglist))
2060 {
2061 error ("too few arguments to function %<va_start%>");
2062 *expr_p = build_empty_stmt ();
2063 return GS_OK;
2064 }
2065
2066 if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
2067 {
2068 *expr_p = build_empty_stmt ();
2069 return GS_OK;
2070 }
2071 /* Avoid gimplifying the second argument to va_start, which needs
2072 to be the plain PARM_DECL. */
2073 return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
2074 }
2075 }
2076
2077 /* There is a sequence point before the call, so any side effects in
2078 the calling expression must occur before the actual call. Force
2079 gimplify_expr to use an internal post queue. */
2080 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
2081 is_gimple_call_addr, fb_rvalue);
2082
2083 if (PUSH_ARGS_REVERSED)
2084 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2085 for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
2086 arglist = TREE_CHAIN (arglist))
2087 {
2088 enum gimplify_status t;
2089
2090 t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
2091
2092 if (t == GS_ERROR)
2093 ret = GS_ERROR;
2094 }
2095 if (PUSH_ARGS_REVERSED)
2096 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2097
2098 /* Try this again in case gimplification exposed something. */
2099 if (ret != GS_ERROR)
2100 {
2101 decl = get_callee_fndecl (*expr_p);
2102 if (decl && DECL_BUILT_IN (decl))
2103 {
2104 tree arglist = TREE_OPERAND (*expr_p, 1);
2105 tree new = fold_builtin (decl, arglist, !want_value);
2106
2107 if (new && new != *expr_p)
2108 {
2109 /* There was a transformation of this call which computes the
2110 same value, but in a more efficient way. Return and try
2111 again. */
2112 *expr_p = new;
2113 return GS_OK;
2114 }
2115 }
2116 }
2117
2118 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2119 decl. This allows us to eliminate redundant or useless
2120 calls to "const" functions. */
2121 if (TREE_CODE (*expr_p) == CALL_EXPR
2122 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2123 TREE_SIDE_EFFECTS (*expr_p) = 0;
2124
2125 return ret;
2126 }
2127
2128 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2129 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2130
2131 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2132 condition is true or false, respectively. If null, we should generate
2133 our own to skip over the evaluation of this specific expression.
2134
2135 This function is the tree equivalent of do_jump.
2136
2137 shortcut_cond_r should only be called by shortcut_cond_expr. */
2138
2139 static tree
2140 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2141 {
2142 tree local_label = NULL_TREE;
2143 tree t, expr = NULL;
2144
2145 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2146 retain the shortcut semantics. Just insert the gotos here;
2147 shortcut_cond_expr will append the real blocks later. */
2148 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2149 {
2150 /* Turn if (a && b) into
2151
2152 if (a); else goto no;
2153 if (b) goto yes; else goto no;
2154 (no:) */
2155
2156 if (false_label_p == NULL)
2157 false_label_p = &local_label;
2158
2159 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2160 append_to_statement_list (t, &expr);
2161
2162 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2163 false_label_p);
2164 append_to_statement_list (t, &expr);
2165 }
2166 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2167 {
2168 /* Turn if (a || b) into
2169
2170 if (a) goto yes;
2171 if (b) goto yes; else goto no;
2172 (yes:) */
2173
2174 if (true_label_p == NULL)
2175 true_label_p = &local_label;
2176
2177 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2178 append_to_statement_list (t, &expr);
2179
2180 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2181 false_label_p);
2182 append_to_statement_list (t, &expr);
2183 }
2184 else if (TREE_CODE (pred) == COND_EXPR)
2185 {
2186 /* As long as we're messing with gotos, turn if (a ? b : c) into
2187 if (a)
2188 if (b) goto yes; else goto no;
2189 else
2190 if (c) goto yes; else goto no; */
2191 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2192 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2193 false_label_p),
2194 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2195 false_label_p));
2196 }
2197 else
2198 {
2199 expr = build3 (COND_EXPR, void_type_node, pred,
2200 build_and_jump (true_label_p),
2201 build_and_jump (false_label_p));
2202 }
2203
2204 if (local_label)
2205 {
2206 t = build1 (LABEL_EXPR, void_type_node, local_label);
2207 append_to_statement_list (t, &expr);
2208 }
2209
2210 return expr;
2211 }
2212
2213 static tree
2214 shortcut_cond_expr (tree expr)
2215 {
2216 tree pred = TREE_OPERAND (expr, 0);
2217 tree then_ = TREE_OPERAND (expr, 1);
2218 tree else_ = TREE_OPERAND (expr, 2);
2219 tree true_label, false_label, end_label, t;
2220 tree *true_label_p;
2221 tree *false_label_p;
2222 bool emit_end, emit_false, jump_over_else;
2223 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2224 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2225
2226 /* First do simple transformations. */
2227 if (!else_se)
2228 {
2229 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2230 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2231 {
2232 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2233 then_ = shortcut_cond_expr (expr);
2234 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2235 pred = TREE_OPERAND (pred, 0);
2236 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2237 }
2238 }
2239 if (!then_se)
2240 {
2241 /* If there is no 'then', turn
2242 if (a || b); else d
2243 into
2244 if (a); else if (b); else d. */
2245 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2246 {
2247 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2248 else_ = shortcut_cond_expr (expr);
2249 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2250 pred = TREE_OPERAND (pred, 0);
2251 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2252 }
2253 }
2254
2255 /* If we're done, great. */
2256 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2257 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2258 return expr;
2259
2260 /* Otherwise we need to mess with gotos. Change
2261 if (a) c; else d;
2262 to
2263 if (a); else goto no;
2264 c; goto end;
2265 no: d; end:
2266 and recursively gimplify the condition. */
2267
2268 true_label = false_label = end_label = NULL_TREE;
2269
2270 /* If our arms just jump somewhere, hijack those labels so we don't
2271 generate jumps to jumps. */
2272
2273 if (then_
2274 && TREE_CODE (then_) == GOTO_EXPR
2275 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2276 {
2277 true_label = GOTO_DESTINATION (then_);
2278 then_ = NULL;
2279 then_se = false;
2280 }
2281
2282 if (else_
2283 && TREE_CODE (else_) == GOTO_EXPR
2284 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2285 {
2286 false_label = GOTO_DESTINATION (else_);
2287 else_ = NULL;
2288 else_se = false;
2289 }
2290
2291 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2292 if (true_label)
2293 true_label_p = &true_label;
2294 else
2295 true_label_p = NULL;
2296
2297 /* The 'else' branch also needs a label if it contains interesting code. */
2298 if (false_label || else_se)
2299 false_label_p = &false_label;
2300 else
2301 false_label_p = NULL;
2302
2303 /* If there was nothing else in our arms, just forward the label(s). */
2304 if (!then_se && !else_se)
2305 return shortcut_cond_r (pred, true_label_p, false_label_p);
2306
2307 /* If our last subexpression already has a terminal label, reuse it. */
2308 if (else_se)
2309 expr = expr_last (else_);
2310 else if (then_se)
2311 expr = expr_last (then_);
2312 else
2313 expr = NULL;
2314 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2315 end_label = LABEL_EXPR_LABEL (expr);
2316
2317 /* If we don't care about jumping to the 'else' branch, jump to the end
2318 if the condition is false. */
2319 if (!false_label_p)
2320 false_label_p = &end_label;
2321
2322 /* We only want to emit these labels if we aren't hijacking them. */
2323 emit_end = (end_label == NULL_TREE);
2324 emit_false = (false_label == NULL_TREE);
2325
2326 /* We only emit the jump over the else clause if we have to--if the
2327 then clause may fall through. Otherwise we can wind up with a
2328 useless jump and a useless label at the end of gimplified code,
2329 which will cause us to think that this conditional as a whole
2330 falls through even if it doesn't. If we then inline a function
2331 which ends with such a condition, that can cause us to issue an
2332 inappropriate warning about control reaching the end of a
2333 non-void function. */
2334 jump_over_else = block_may_fallthru (then_);
2335
2336 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2337
2338 expr = NULL;
2339 append_to_statement_list (pred, &expr);
2340
2341 append_to_statement_list (then_, &expr);
2342 if (else_se)
2343 {
2344 if (jump_over_else)
2345 {
2346 t = build_and_jump (&end_label);
2347 append_to_statement_list (t, &expr);
2348 }
2349 if (emit_false)
2350 {
2351 t = build1 (LABEL_EXPR, void_type_node, false_label);
2352 append_to_statement_list (t, &expr);
2353 }
2354 append_to_statement_list (else_, &expr);
2355 }
2356 if (emit_end && end_label)
2357 {
2358 t = build1 (LABEL_EXPR, void_type_node, end_label);
2359 append_to_statement_list (t, &expr);
2360 }
2361
2362 return expr;
2363 }
2364
2365 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2366
2367 tree
2368 gimple_boolify (tree expr)
2369 {
2370 tree type = TREE_TYPE (expr);
2371
2372 if (TREE_CODE (type) == BOOLEAN_TYPE)
2373 return expr;
2374
2375 switch (TREE_CODE (expr))
2376 {
2377 case TRUTH_AND_EXPR:
2378 case TRUTH_OR_EXPR:
2379 case TRUTH_XOR_EXPR:
2380 case TRUTH_ANDIF_EXPR:
2381 case TRUTH_ORIF_EXPR:
2382 /* Also boolify the arguments of truth exprs. */
2383 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2384 /* FALLTHRU */
2385
2386 case TRUTH_NOT_EXPR:
2387 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2388 /* FALLTHRU */
2389
2390 case EQ_EXPR: case NE_EXPR:
2391 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2392 /* These expressions always produce boolean results. */
2393 TREE_TYPE (expr) = boolean_type_node;
2394 return expr;
2395
2396 default:
2397 /* Other expressions that get here must have boolean values, but
2398 might need to be converted to the appropriate mode. */
2399 return fold_convert (boolean_type_node, expr);
2400 }
2401 }
2402
2403 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2404 into
2405
2406 if (p) if (p)
2407 t1 = a; a;
2408 else or else
2409 t1 = b; b;
2410 t1;
2411
2412 The second form is used when *EXPR_P is of type void.
2413
2414 TARGET is the tree for T1 above.
2415
2416 PRE_P points to the list where side effects that must happen before
2417 *EXPR_P should be stored. */
2418
2419 static enum gimplify_status
2420 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2421 {
2422 tree expr = *expr_p;
2423 tree tmp, tmp2, type;
2424 enum gimplify_status ret;
2425
2426 type = TREE_TYPE (expr);
2427
2428 /* If this COND_EXPR has a value, copy the values into a temporary within
2429 the arms. */
2430 if (! VOID_TYPE_P (type))
2431 {
2432 tree result;
2433
2434 if ((fallback & fb_lvalue) == 0)
2435 {
2436 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2437 ret = GS_ALL_DONE;
2438 }
2439 else
2440 {
2441 tree type = build_pointer_type (TREE_TYPE (expr));
2442
2443 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2444 TREE_OPERAND (expr, 1) =
2445 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2446
2447 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2448 TREE_OPERAND (expr, 2) =
2449 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2450
2451 tmp2 = tmp = create_tmp_var (type, "iftmp");
2452
2453 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2454 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2455
2456 result = build_fold_indirect_ref (tmp);
2457 ret = GS_ALL_DONE;
2458 }
2459
2460 /* Build the then clause, 't1 = a;'. But don't build an assignment
2461 if this branch is void; in C++ it can be, if it's a throw. */
2462 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2463 TREE_OPERAND (expr, 1)
2464 = build2 (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1));
2465
2466 /* Build the else clause, 't1 = b;'. */
2467 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2468 TREE_OPERAND (expr, 2)
2469 = build2 (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2));
2470
2471 TREE_TYPE (expr) = void_type_node;
2472 recalculate_side_effects (expr);
2473
2474 /* Move the COND_EXPR to the prequeue. */
2475 gimplify_and_add (expr, pre_p);
2476
2477 *expr_p = result;
2478 return ret;
2479 }
2480
2481 /* Make sure the condition has BOOLEAN_TYPE. */
2482 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2483
2484 /* Break apart && and || conditions. */
2485 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2486 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2487 {
2488 expr = shortcut_cond_expr (expr);
2489
2490 if (expr != *expr_p)
2491 {
2492 *expr_p = expr;
2493
2494 /* We can't rely on gimplify_expr to re-gimplify the expanded
2495 form properly, as cleanups might cause the target labels to be
2496 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2497 set up a conditional context. */
2498 gimple_push_condition ();
2499 gimplify_stmt (expr_p);
2500 gimple_pop_condition (pre_p);
2501
2502 return GS_ALL_DONE;
2503 }
2504 }
2505
2506 /* Now do the normal gimplification. */
2507 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2508 is_gimple_condexpr, fb_rvalue);
2509
2510 gimple_push_condition ();
2511
2512 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2513 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2514 recalculate_side_effects (expr);
2515
2516 gimple_pop_condition (pre_p);
2517
2518 if (ret == GS_ERROR)
2519 ;
2520 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2521 ret = GS_ALL_DONE;
2522 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2523 /* Rewrite "if (a); else b" to "if (!a) b" */
2524 {
2525 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2526 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2527 is_gimple_condexpr, fb_rvalue);
2528
2529 tmp = TREE_OPERAND (expr, 1);
2530 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2531 TREE_OPERAND (expr, 2) = tmp;
2532 }
2533 else
2534 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2535 expr = TREE_OPERAND (expr, 0);
2536
2537 *expr_p = expr;
2538 return ret;
2539 }
2540
2541 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2542 a call to __builtin_memcpy. */
2543
2544 static enum gimplify_status
2545 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2546 {
2547 tree args, t, to, to_ptr, from;
2548
2549 to = TREE_OPERAND (*expr_p, 0);
2550 from = TREE_OPERAND (*expr_p, 1);
2551
2552 args = tree_cons (NULL, size, NULL);
2553
2554 t = build_fold_addr_expr (from);
2555 args = tree_cons (NULL, t, args);
2556
2557 to_ptr = build_fold_addr_expr (to);
2558 args = tree_cons (NULL, to_ptr, args);
2559 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2560 t = build_function_call_expr (t, args);
2561
2562 if (want_value)
2563 {
2564 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2565 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2566 }
2567
2568 *expr_p = t;
2569 return GS_OK;
2570 }
2571
2572 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2573 a call to __builtin_memset. In this case we know that the RHS is
2574 a CONSTRUCTOR with an empty element list. */
2575
2576 static enum gimplify_status
2577 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2578 {
2579 tree args, t, to, to_ptr;
2580
2581 to = TREE_OPERAND (*expr_p, 0);
2582
2583 args = tree_cons (NULL, size, NULL);
2584
2585 args = tree_cons (NULL, integer_zero_node, args);
2586
2587 to_ptr = build_fold_addr_expr (to);
2588 args = tree_cons (NULL, to_ptr, args);
2589 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2590 t = build_function_call_expr (t, args);
2591
2592 if (want_value)
2593 {
2594 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2595 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2596 }
2597
2598 *expr_p = t;
2599 return GS_OK;
2600 }
2601
2602 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2603 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2604 assignment. Returns non-null if we detect a potential overlap. */
2605
2606 struct gimplify_init_ctor_preeval_data
2607 {
2608 /* The base decl of the lhs object. May be NULL, in which case we
2609 have to assume the lhs is indirect. */
2610 tree lhs_base_decl;
2611
2612 /* The alias set of the lhs object. */
2613 int lhs_alias_set;
2614 };
2615
2616 static tree
2617 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2618 {
2619 struct gimplify_init_ctor_preeval_data *data
2620 = (struct gimplify_init_ctor_preeval_data *) xdata;
2621 tree t = *tp;
2622
2623 /* If we find the base object, obviously we have overlap. */
2624 if (data->lhs_base_decl == t)
2625 return t;
2626
2627 /* If the constructor component is indirect, determine if we have a
2628 potential overlap with the lhs. The only bits of information we
2629 have to go on at this point are addressability and alias sets. */
2630 if (TREE_CODE (t) == INDIRECT_REF
2631 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2632 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2633 return t;
2634
2635 if (IS_TYPE_OR_DECL_P (t))
2636 *walk_subtrees = 0;
2637 return NULL;
2638 }
2639
2640 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2641 force values that overlap with the lhs (as described by *DATA)
2642 into temporaries. */
2643
2644 static void
2645 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2646 struct gimplify_init_ctor_preeval_data *data)
2647 {
2648 enum gimplify_status one;
2649
2650 /* If the value is invariant, then there's nothing to pre-evaluate.
2651 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2652 invariant but has side effects and might contain a reference to
2653 the object we're initializing. */
2654 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2655 return;
2656
2657 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2658 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2659 return;
2660
2661 /* Recurse for nested constructors. */
2662 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2663 {
2664 unsigned HOST_WIDE_INT ix;
2665 constructor_elt *ce;
2666 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2667
2668 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2669 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2670 return;
2671 }
2672
2673 /* We can't preevaluate if the type contains a placeholder. */
2674 if (type_contains_placeholder_p (TREE_TYPE (*expr_p)))
2675 return;
2676
2677 /* Gimplify the constructor element to something appropriate for the rhs
2678 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2679 the gimplifier will consider this a store to memory. Doing this
2680 gimplification now means that we won't have to deal with complicated
2681 language-specific trees, nor trees like SAVE_EXPR that can induce
2682 exponential search behavior. */
2683 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2684 if (one == GS_ERROR)
2685 {
2686 *expr_p = NULL;
2687 return;
2688 }
2689
2690 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2691 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2692 always be true for all scalars, since is_gimple_mem_rhs insists on a
2693 temporary variable for them. */
2694 if (DECL_P (*expr_p))
2695 return;
2696
2697 /* If this is of variable size, we have no choice but to assume it doesn't
2698 overlap since we can't make a temporary for it. */
2699 if (!TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (*expr_p))))
2700 return;
2701
2702 /* Otherwise, we must search for overlap ... */
2703 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2704 return;
2705
2706 /* ... and if found, force the value into a temporary. */
2707 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2708 }
2709
2710 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2711 a RANGE_EXPR in a CONSTRUCTOR for an array.
2712
2713 var = lower;
2714 loop_entry:
2715 object[var] = value;
2716 if (var == upper)
2717 goto loop_exit;
2718 var = var + 1;
2719 goto loop_entry;
2720 loop_exit:
2721
2722 We increment var _after_ the loop exit check because we might otherwise
2723 fail if upper == TYPE_MAX_VALUE (type for upper).
2724
2725 Note that we never have to deal with SAVE_EXPRs here, because this has
2726 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2727
2728 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2729 tree *, bool);
2730
2731 static void
2732 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2733 tree value, tree array_elt_type,
2734 tree *pre_p, bool cleared)
2735 {
2736 tree loop_entry_label, loop_exit_label;
2737 tree var, var_type, cref;
2738
2739 loop_entry_label = create_artificial_label ();
2740 loop_exit_label = create_artificial_label ();
2741
2742 /* Create and initialize the index variable. */
2743 var_type = TREE_TYPE (upper);
2744 var = create_tmp_var (var_type, NULL);
2745 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var, lower), pre_p);
2746
2747 /* Add the loop entry label. */
2748 append_to_statement_list (build1 (LABEL_EXPR,
2749 void_type_node,
2750 loop_entry_label),
2751 pre_p);
2752
2753 /* Build the reference. */
2754 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2755 var, NULL_TREE, NULL_TREE);
2756
2757 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2758 the store. Otherwise just assign value to the reference. */
2759
2760 if (TREE_CODE (value) == CONSTRUCTOR)
2761 /* NB we might have to call ourself recursively through
2762 gimplify_init_ctor_eval if the value is a constructor. */
2763 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2764 pre_p, cleared);
2765 else
2766 append_to_statement_list (build2 (MODIFY_EXPR, TREE_TYPE (cref),
2767 cref, value),
2768 pre_p);
2769
2770 /* We exit the loop when the index var is equal to the upper bound. */
2771 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2772 build2 (EQ_EXPR, boolean_type_node,
2773 var, upper),
2774 build1 (GOTO_EXPR,
2775 void_type_node,
2776 loop_exit_label),
2777 NULL_TREE),
2778 pre_p);
2779
2780 /* Otherwise, increment the index var... */
2781 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var,
2782 build2 (PLUS_EXPR, var_type, var,
2783 fold_convert (var_type,
2784 integer_one_node))),
2785 pre_p);
2786
2787 /* ...and jump back to the loop entry. */
2788 append_to_statement_list (build1 (GOTO_EXPR,
2789 void_type_node,
2790 loop_entry_label),
2791 pre_p);
2792
2793 /* Add the loop exit label. */
2794 append_to_statement_list (build1 (LABEL_EXPR,
2795 void_type_node,
2796 loop_exit_label),
2797 pre_p);
2798 }
2799
2800 /* Return true if FDECL is accessing a field that is zero sized. */
2801
2802 static bool
2803 zero_sized_field_decl (tree fdecl)
2804 {
2805 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2806 && integer_zerop (DECL_SIZE (fdecl)))
2807 return true;
2808 return false;
2809 }
2810
2811 /* Return true if TYPE is zero sized. */
2812
2813 static bool
2814 zero_sized_type (tree type)
2815 {
2816 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2817 && integer_zerop (TYPE_SIZE (type)))
2818 return true;
2819 return false;
2820 }
2821
2822 /* A subroutine of gimplify_init_constructor. Generate individual
2823 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2824 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2825 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2826 zeroed first. */
2827
2828 static void
2829 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2830 tree *pre_p, bool cleared)
2831 {
2832 tree array_elt_type = NULL;
2833 unsigned HOST_WIDE_INT ix;
2834 tree purpose, value;
2835
2836 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2837 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2838
2839 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2840 {
2841 tree cref, init;
2842
2843 /* NULL values are created above for gimplification errors. */
2844 if (value == NULL)
2845 continue;
2846
2847 if (cleared && initializer_zerop (value))
2848 continue;
2849
2850 /* ??? Here's to hoping the front end fills in all of the indices,
2851 so we don't have to figure out what's missing ourselves. */
2852 gcc_assert (purpose);
2853
2854 /* Skip zero-sized fields, unless value has side-effects. This can
2855 happen with calls to functions returning a zero-sized type, which
2856 we shouldn't discard. As a number of downstream passes don't
2857 expect sets of zero-sized fields, we rely on the gimplification of
2858 the MODIFY_EXPR we make below to drop the assignment statement. */
2859 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2860 continue;
2861
2862 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2863 whole range. */
2864 if (TREE_CODE (purpose) == RANGE_EXPR)
2865 {
2866 tree lower = TREE_OPERAND (purpose, 0);
2867 tree upper = TREE_OPERAND (purpose, 1);
2868
2869 /* If the lower bound is equal to upper, just treat it as if
2870 upper was the index. */
2871 if (simple_cst_equal (lower, upper))
2872 purpose = upper;
2873 else
2874 {
2875 gimplify_init_ctor_eval_range (object, lower, upper, value,
2876 array_elt_type, pre_p, cleared);
2877 continue;
2878 }
2879 }
2880
2881 if (array_elt_type)
2882 {
2883 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2884 purpose, NULL_TREE, NULL_TREE);
2885 }
2886 else
2887 {
2888 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2889 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2890 unshare_expr (object), purpose, NULL_TREE);
2891 }
2892
2893 if (TREE_CODE (value) == CONSTRUCTOR
2894 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2895 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2896 pre_p, cleared);
2897 else
2898 {
2899 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2900 gimplify_and_add (init, pre_p);
2901 }
2902 }
2903 }
2904
2905 /* A subroutine of gimplify_modify_expr. Break out elements of a
2906 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2907
2908 Note that we still need to clear any elements that don't have explicit
2909 initializers, so if not all elements are initialized we keep the
2910 original MODIFY_EXPR, we just remove all of the constructor elements. */
2911
2912 static enum gimplify_status
2913 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2914 tree *post_p, bool want_value)
2915 {
2916 tree object;
2917 tree ctor = TREE_OPERAND (*expr_p, 1);
2918 tree type = TREE_TYPE (ctor);
2919 enum gimplify_status ret;
2920 VEC(constructor_elt,gc) *elts;
2921
2922 if (TREE_CODE (ctor) != CONSTRUCTOR)
2923 return GS_UNHANDLED;
2924
2925 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2926 is_gimple_lvalue, fb_lvalue);
2927 if (ret == GS_ERROR)
2928 return ret;
2929 object = TREE_OPERAND (*expr_p, 0);
2930
2931 elts = CONSTRUCTOR_ELTS (ctor);
2932
2933 ret = GS_ALL_DONE;
2934 switch (TREE_CODE (type))
2935 {
2936 case RECORD_TYPE:
2937 case UNION_TYPE:
2938 case QUAL_UNION_TYPE:
2939 case ARRAY_TYPE:
2940 {
2941 struct gimplify_init_ctor_preeval_data preeval_data;
2942 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2943 HOST_WIDE_INT num_nonzero_elements;
2944 bool cleared, valid_const_initializer;
2945
2946 /* Aggregate types must lower constructors to initialization of
2947 individual elements. The exception is that a CONSTRUCTOR node
2948 with no elements indicates zero-initialization of the whole. */
2949 if (VEC_empty (constructor_elt, elts))
2950 break;
2951
2952 /* Fetch information about the constructor to direct later processing.
2953 We might want to make static versions of it in various cases, and
2954 can only do so if it known to be a valid constant initializer. */
2955 valid_const_initializer
2956 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2957 &num_ctor_elements, &cleared);
2958
2959 /* If a const aggregate variable is being initialized, then it
2960 should never be a lose to promote the variable to be static. */
2961 if (valid_const_initializer
2962 && num_nonzero_elements > 1
2963 && TREE_READONLY (object)
2964 && TREE_CODE (object) == VAR_DECL)
2965 {
2966 DECL_INITIAL (object) = ctor;
2967 TREE_STATIC (object) = 1;
2968 if (!DECL_NAME (object))
2969 DECL_NAME (object) = create_tmp_var_name ("C");
2970 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2971
2972 /* ??? C++ doesn't automatically append a .<number> to the
2973 assembler name, and even when it does, it looks a FE private
2974 data structures to figure out what that number should be,
2975 which are not set for this variable. I suppose this is
2976 important for local statics for inline functions, which aren't
2977 "local" in the object file sense. So in order to get a unique
2978 TU-local symbol, we must invoke the lhd version now. */
2979 lhd_set_decl_assembler_name (object);
2980
2981 *expr_p = NULL_TREE;
2982 break;
2983 }
2984
2985 /* If there are "lots" of initialized elements, even discounting
2986 those that are not address constants (and thus *must* be
2987 computed at runtime), then partition the constructor into
2988 constant and non-constant parts. Block copy the constant
2989 parts in, then generate code for the non-constant parts. */
2990 /* TODO. There's code in cp/typeck.c to do this. */
2991
2992 num_type_elements = count_type_elements (type, true);
2993
2994 /* If count_type_elements could not determine number of type elements
2995 for a constant-sized object, assume clearing is needed.
2996 Don't do this for variable-sized objects, as store_constructor
2997 will ignore the clearing of variable-sized objects. */
2998 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
2999 cleared = true;
3000 /* If there are "lots" of zeros, then block clear the object first. */
3001 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3002 && num_nonzero_elements < num_type_elements/4)
3003 cleared = true;
3004 /* ??? This bit ought not be needed. For any element not present
3005 in the initializer, we should simply set them to zero. Except
3006 we'd need to *find* the elements that are not present, and that
3007 requires trickery to avoid quadratic compile-time behavior in
3008 large cases or excessive memory use in small cases. */
3009 else if (num_ctor_elements < num_type_elements)
3010 cleared = true;
3011
3012 /* If there are "lots" of initialized elements, and all of them
3013 are valid address constants, then the entire initializer can
3014 be dropped to memory, and then memcpy'd out. Don't do this
3015 for sparse arrays, though, as it's more efficient to follow
3016 the standard CONSTRUCTOR behavior of memset followed by
3017 individual element initialization. */
3018 if (valid_const_initializer && !cleared)
3019 {
3020 HOST_WIDE_INT size = int_size_in_bytes (type);
3021 unsigned int align;
3022
3023 /* ??? We can still get unbounded array types, at least
3024 from the C++ front end. This seems wrong, but attempt
3025 to work around it for now. */
3026 if (size < 0)
3027 {
3028 size = int_size_in_bytes (TREE_TYPE (object));
3029 if (size >= 0)
3030 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3031 }
3032
3033 /* Find the maximum alignment we can assume for the object. */
3034 /* ??? Make use of DECL_OFFSET_ALIGN. */
3035 if (DECL_P (object))
3036 align = DECL_ALIGN (object);
3037 else
3038 align = TYPE_ALIGN (type);
3039
3040 if (size > 0 && !can_move_by_pieces (size, align))
3041 {
3042 tree new = create_tmp_var_raw (type, "C");
3043
3044 gimple_add_tmp_var (new);
3045 TREE_STATIC (new) = 1;
3046 TREE_READONLY (new) = 1;
3047 DECL_INITIAL (new) = ctor;
3048 if (align > DECL_ALIGN (new))
3049 {
3050 DECL_ALIGN (new) = align;
3051 DECL_USER_ALIGN (new) = 1;
3052 }
3053 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3054
3055 TREE_OPERAND (*expr_p, 1) = new;
3056
3057 /* This is no longer an assignment of a CONSTRUCTOR, but
3058 we still may have processing to do on the LHS. So
3059 pretend we didn't do anything here to let that happen. */
3060 return GS_UNHANDLED;
3061 }
3062 }
3063
3064 /* If there are nonzero elements, pre-evaluate to capture elements
3065 overlapping with the lhs into temporaries. We must do this before
3066 clearing to fetch the values before they are zeroed-out. */
3067 if (num_nonzero_elements > 0)
3068 {
3069 preeval_data.lhs_base_decl = get_base_address (object);
3070 if (!DECL_P (preeval_data.lhs_base_decl))
3071 preeval_data.lhs_base_decl = NULL;
3072 preeval_data.lhs_alias_set = get_alias_set (object);
3073
3074 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3075 pre_p, post_p, &preeval_data);
3076 }
3077
3078 if (cleared)
3079 {
3080 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3081 Note that we still have to gimplify, in order to handle the
3082 case of variable sized types. Avoid shared tree structures. */
3083 CONSTRUCTOR_ELTS (ctor) = NULL;
3084 object = unshare_expr (object);
3085 gimplify_stmt (expr_p);
3086 append_to_statement_list (*expr_p, pre_p);
3087 }
3088
3089 /* If we have not block cleared the object, or if there are nonzero
3090 elements in the constructor, add assignments to the individual
3091 scalar fields of the object. */
3092 if (!cleared || num_nonzero_elements > 0)
3093 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3094
3095 *expr_p = NULL_TREE;
3096 }
3097 break;
3098
3099 case COMPLEX_TYPE:
3100 {
3101 tree r, i;
3102
3103 /* Extract the real and imaginary parts out of the ctor. */
3104 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3105 r = VEC_index (constructor_elt, elts, 0)->value;
3106 i = VEC_index (constructor_elt, elts, 1)->value;
3107 if (r == NULL || i == NULL)
3108 {
3109 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3110 if (r == NULL)
3111 r = zero;
3112 if (i == NULL)
3113 i = zero;
3114 }
3115
3116 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3117 represent creation of a complex value. */
3118 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3119 {
3120 ctor = build_complex (type, r, i);
3121 TREE_OPERAND (*expr_p, 1) = ctor;
3122 }
3123 else
3124 {
3125 ctor = build2 (COMPLEX_EXPR, type, r, i);
3126 TREE_OPERAND (*expr_p, 1) = ctor;
3127 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3128 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3129 fb_rvalue);
3130 }
3131 }
3132 break;
3133
3134 case VECTOR_TYPE:
3135 {
3136 unsigned HOST_WIDE_INT ix;
3137 constructor_elt *ce;
3138
3139 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3140 if (TREE_CONSTANT (ctor))
3141 {
3142 bool constant_p = true;
3143 tree value;
3144
3145 /* Even when ctor is constant, it might contain non-*_CST
3146 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3147 belong into VECTOR_CST nodes. */
3148 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3149 if (!CONSTANT_CLASS_P (value))
3150 {
3151 constant_p = false;
3152 break;
3153 }
3154
3155 if (constant_p)
3156 {
3157 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3158 break;
3159 }
3160 }
3161
3162 /* Vector types use CONSTRUCTOR all the way through gimple
3163 compilation as a general initializer. */
3164 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3165 {
3166 enum gimplify_status tret;
3167 tret = gimplify_expr (&ce->value, pre_p, post_p,
3168 is_gimple_val, fb_rvalue);
3169 if (tret == GS_ERROR)
3170 ret = GS_ERROR;
3171 }
3172 }
3173 break;
3174
3175 default:
3176 /* So how did we get a CONSTRUCTOR for a scalar type? */
3177 gcc_unreachable ();
3178 }
3179
3180 if (ret == GS_ERROR)
3181 return GS_ERROR;
3182 else if (want_value)
3183 {
3184 append_to_statement_list (*expr_p, pre_p);
3185 *expr_p = object;
3186 return GS_OK;
3187 }
3188 else
3189 return GS_ALL_DONE;
3190 }
3191
3192 /* Given a pointer value OP0, return a simplified version of an
3193 indirection through OP0, or NULL_TREE if no simplification is
3194 possible. This may only be applied to a rhs of an expression.
3195 Note that the resulting type may be different from the type pointed
3196 to in the sense that it is still compatible from the langhooks
3197 point of view. */
3198
3199 static tree
3200 fold_indirect_ref_rhs (tree t)
3201 {
3202 tree type = TREE_TYPE (TREE_TYPE (t));
3203 tree sub = t;
3204 tree subtype;
3205
3206 STRIP_NOPS (sub);
3207 subtype = TREE_TYPE (sub);
3208 if (!POINTER_TYPE_P (subtype))
3209 return NULL_TREE;
3210
3211 if (TREE_CODE (sub) == ADDR_EXPR)
3212 {
3213 tree op = TREE_OPERAND (sub, 0);
3214 tree optype = TREE_TYPE (op);
3215 /* *&p => p */
3216 if (lang_hooks.types_compatible_p (type, optype))
3217 return op;
3218 /* *(foo *)&fooarray => fooarray[0] */
3219 else if (TREE_CODE (optype) == ARRAY_TYPE
3220 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3221 {
3222 tree type_domain = TYPE_DOMAIN (optype);
3223 tree min_val = size_zero_node;
3224 if (type_domain && TYPE_MIN_VALUE (type_domain))
3225 min_val = TYPE_MIN_VALUE (type_domain);
3226 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3227 }
3228 }
3229
3230 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3231 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3232 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3233 {
3234 tree type_domain;
3235 tree min_val = size_zero_node;
3236 tree osub = sub;
3237 sub = fold_indirect_ref_rhs (sub);
3238 if (! sub)
3239 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3240 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3241 if (type_domain && TYPE_MIN_VALUE (type_domain))
3242 min_val = TYPE_MIN_VALUE (type_domain);
3243 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3244 }
3245
3246 return NULL_TREE;
3247 }
3248
3249 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3250 based on the code of the RHS. We loop for as long as something changes. */
3251
3252 static enum gimplify_status
3253 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3254 tree *post_p, bool want_value)
3255 {
3256 enum gimplify_status ret = GS_OK;
3257
3258 while (ret != GS_UNHANDLED)
3259 switch (TREE_CODE (*from_p))
3260 {
3261 case INDIRECT_REF:
3262 {
3263 /* If we have code like
3264
3265 *(const A*)(A*)&x
3266
3267 where the type of "x" is a (possibly cv-qualified variant
3268 of "A"), treat the entire expression as identical to "x".
3269 This kind of code arises in C++ when an object is bound
3270 to a const reference, and if "x" is a TARGET_EXPR we want
3271 to take advantage of the optimization below. */
3272 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3273 if (t)
3274 {
3275 *from_p = t;
3276 ret = GS_OK;
3277 }
3278 else
3279 ret = GS_UNHANDLED;
3280 break;
3281 }
3282
3283 case TARGET_EXPR:
3284 {
3285 /* If we are initializing something from a TARGET_EXPR, strip the
3286 TARGET_EXPR and initialize it directly, if possible. This can't
3287 be done if the initializer is void, since that implies that the
3288 temporary is set in some non-trivial way.
3289
3290 ??? What about code that pulls out the temp and uses it
3291 elsewhere? I think that such code never uses the TARGET_EXPR as
3292 an initializer. If I'm wrong, we'll die because the temp won't
3293 have any RTL. In that case, I guess we'll need to replace
3294 references somehow. */
3295 tree init = TARGET_EXPR_INITIAL (*from_p);
3296
3297 if (!VOID_TYPE_P (TREE_TYPE (init)))
3298 {
3299 *from_p = init;
3300 ret = GS_OK;
3301 }
3302 else
3303 ret = GS_UNHANDLED;
3304 }
3305 break;
3306
3307 case COMPOUND_EXPR:
3308 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3309 caught. */
3310 gimplify_compound_expr (from_p, pre_p, true);
3311 ret = GS_OK;
3312 break;
3313
3314 case CONSTRUCTOR:
3315 /* If we're initializing from a CONSTRUCTOR, break this into
3316 individual MODIFY_EXPRs. */
3317 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3318
3319 case COND_EXPR:
3320 /* If we're assigning to a non-register type, push the assignment
3321 down into the branches. This is mandatory for ADDRESSABLE types,
3322 since we cannot generate temporaries for such, but it saves a
3323 copy in other cases as well. */
3324 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3325 {
3326 /* This code should mirror the code in gimplify_cond_expr. */
3327 enum tree_code code = TREE_CODE (*expr_p);
3328 tree cond = *from_p;
3329 tree result = *to_p;
3330
3331 ret = gimplify_expr (&result, pre_p, post_p,
3332 is_gimple_min_lval, fb_lvalue);
3333 if (ret != GS_ERROR)
3334 ret = GS_OK;
3335
3336 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3337 TREE_OPERAND (cond, 1)
3338 = build2 (code, void_type_node, result,
3339 TREE_OPERAND (cond, 1));
3340 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3341 TREE_OPERAND (cond, 2)
3342 = build2 (code, void_type_node, unshare_expr (result),
3343 TREE_OPERAND (cond, 2));
3344
3345 TREE_TYPE (cond) = void_type_node;
3346 recalculate_side_effects (cond);
3347
3348 if (want_value)
3349 {
3350 gimplify_and_add (cond, pre_p);
3351 *expr_p = unshare_expr (result);
3352 }
3353 else
3354 *expr_p = cond;
3355 return ret;
3356 }
3357 else
3358 ret = GS_UNHANDLED;
3359 break;
3360
3361 case CALL_EXPR:
3362 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3363 return slot so that we don't generate a temporary. */
3364 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3365 && aggregate_value_p (*from_p, *from_p))
3366 {
3367 bool use_target;
3368
3369 if (!(rhs_predicate_for (*to_p))(*from_p))
3370 /* If we need a temporary, *to_p isn't accurate. */
3371 use_target = false;
3372 else if (TREE_CODE (*to_p) == RESULT_DECL
3373 && DECL_NAME (*to_p) == NULL_TREE
3374 && needs_to_live_in_memory (*to_p))
3375 /* It's OK to use the return slot directly unless it's an NRV. */
3376 use_target = true;
3377 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3378 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3379 /* Don't force regs into memory. */
3380 use_target = false;
3381 else if (TREE_CODE (*to_p) == VAR_DECL
3382 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3383 /* Don't use the original target if it's a formal temp; we
3384 don't want to take their addresses. */
3385 use_target = false;
3386 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3387 /* It's OK to use the target directly if it's being
3388 initialized. */
3389 use_target = true;
3390 else if (!is_gimple_non_addressable (*to_p))
3391 /* Don't use the original target if it's already addressable;
3392 if its address escapes, and the called function uses the
3393 NRV optimization, a conforming program could see *to_p
3394 change before the called function returns; see c++/19317.
3395 When optimizing, the return_slot pass marks more functions
3396 as safe after we have escape info. */
3397 use_target = false;
3398 else
3399 use_target = true;
3400
3401 if (use_target)
3402 {
3403 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3404 lang_hooks.mark_addressable (*to_p);
3405 }
3406 }
3407
3408 ret = GS_UNHANDLED;
3409 break;
3410
3411 default:
3412 ret = GS_UNHANDLED;
3413 break;
3414 }
3415
3416 return ret;
3417 }
3418
3419 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3420 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3421 DECL_COMPLEX_GIMPLE_REG_P set. */
3422
3423 static enum gimplify_status
3424 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3425 {
3426 enum tree_code code, ocode;
3427 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3428
3429 lhs = TREE_OPERAND (*expr_p, 0);
3430 rhs = TREE_OPERAND (*expr_p, 1);
3431 code = TREE_CODE (lhs);
3432 lhs = TREE_OPERAND (lhs, 0);
3433
3434 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3435 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3436 other = get_formal_tmp_var (other, pre_p);
3437
3438 realpart = code == REALPART_EXPR ? rhs : other;
3439 imagpart = code == REALPART_EXPR ? other : rhs;
3440
3441 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3442 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3443 else
3444 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3445
3446 TREE_OPERAND (*expr_p, 0) = lhs;
3447 TREE_OPERAND (*expr_p, 1) = new_rhs;
3448
3449 if (want_value)
3450 {
3451 append_to_statement_list (*expr_p, pre_p);
3452 *expr_p = rhs;
3453 }
3454
3455 return GS_ALL_DONE;
3456 }
3457
3458 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3459
3460 modify_expr
3461 : varname '=' rhs
3462 | '*' ID '=' rhs
3463
3464 PRE_P points to the list where side effects that must happen before
3465 *EXPR_P should be stored.
3466
3467 POST_P points to the list where side effects that must happen after
3468 *EXPR_P should be stored.
3469
3470 WANT_VALUE is nonzero iff we want to use the value of this expression
3471 in another expression. */
3472
3473 static enum gimplify_status
3474 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3475 {
3476 tree *from_p = &TREE_OPERAND (*expr_p, 1);
3477 tree *to_p = &TREE_OPERAND (*expr_p, 0);
3478 enum gimplify_status ret = GS_UNHANDLED;
3479
3480 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3481 || TREE_CODE (*expr_p) == INIT_EXPR);
3482
3483 /* For zero sized types only gimplify the left hand side and right hand side
3484 as statements and throw away the assignment. */
3485 if (zero_sized_type (TREE_TYPE (*from_p)))
3486 {
3487 gimplify_stmt (from_p);
3488 gimplify_stmt (to_p);
3489 append_to_statement_list (*from_p, pre_p);
3490 append_to_statement_list (*to_p, pre_p);
3491 *expr_p = NULL_TREE;
3492 return GS_ALL_DONE;
3493 }
3494
3495 /* See if any simplifications can be done based on what the RHS is. */
3496 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3497 want_value);
3498 if (ret != GS_UNHANDLED)
3499 return ret;
3500
3501 /* If the value being copied is of variable width, compute the length
3502 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3503 before gimplifying any of the operands so that we can resolve any
3504 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3505 the size of the expression to be copied, not of the destination, so
3506 that is what we must here. */
3507 maybe_with_size_expr (from_p);
3508
3509 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3510 if (ret == GS_ERROR)
3511 return ret;
3512
3513 ret = gimplify_expr (from_p, pre_p, post_p,
3514 rhs_predicate_for (*to_p), fb_rvalue);
3515 if (ret == GS_ERROR)
3516 return ret;
3517
3518 /* Now see if the above changed *from_p to something we handle specially. */
3519 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3520 want_value);
3521 if (ret != GS_UNHANDLED)
3522 return ret;
3523
3524 /* If we've got a variable sized assignment between two lvalues (i.e. does
3525 not involve a call), then we can make things a bit more straightforward
3526 by converting the assignment to memcpy or memset. */
3527 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3528 {
3529 tree from = TREE_OPERAND (*from_p, 0);
3530 tree size = TREE_OPERAND (*from_p, 1);
3531
3532 if (TREE_CODE (from) == CONSTRUCTOR)
3533 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3534 if (is_gimple_addressable (from))
3535 {
3536 *from_p = from;
3537 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3538 }
3539 }
3540
3541 /* Transform partial stores to non-addressable complex variables into
3542 total stores. This allows us to use real instead of virtual operands
3543 for these variables, which improves optimization. */
3544 if ((TREE_CODE (*to_p) == REALPART_EXPR
3545 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3546 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3547 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3548
3549 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3550 {
3551 /* If we've somehow already got an SSA_NAME on the LHS, then
3552 we're probably modified it twice. Not good. */
3553 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3554 *to_p = make_ssa_name (*to_p, *expr_p);
3555 }
3556
3557 if (want_value)
3558 {
3559 append_to_statement_list (*expr_p, pre_p);
3560 *expr_p = *to_p;
3561 return GS_OK;
3562 }
3563
3564 return GS_ALL_DONE;
3565 }
3566
3567 /* Gimplify a comparison between two variable-sized objects. Do this
3568 with a call to BUILT_IN_MEMCMP. */
3569
3570 static enum gimplify_status
3571 gimplify_variable_sized_compare (tree *expr_p)
3572 {
3573 tree op0 = TREE_OPERAND (*expr_p, 0);
3574 tree op1 = TREE_OPERAND (*expr_p, 1);
3575 tree args, t, dest;
3576
3577 t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3578 t = unshare_expr (t);
3579 t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
3580 args = tree_cons (NULL, t, NULL);
3581 t = build_fold_addr_expr (op1);
3582 args = tree_cons (NULL, t, args);
3583 dest = build_fold_addr_expr (op0);
3584 args = tree_cons (NULL, dest, args);
3585 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3586 t = build_function_call_expr (t, args);
3587 *expr_p
3588 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3589
3590 return GS_OK;
3591 }
3592
3593 /* Gimplify a comparison between two aggregate objects of integral scalar
3594 mode as a comparison between the bitwise equivalent scalar values. */
3595
3596 static enum gimplify_status
3597 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3598 {
3599 tree op0 = TREE_OPERAND (*expr_p, 0);
3600 tree op1 = TREE_OPERAND (*expr_p, 1);
3601
3602 tree type = TREE_TYPE (op0);
3603 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3604
3605 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3606 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3607
3608 *expr_p
3609 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3610
3611 return GS_OK;
3612 }
3613
3614 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3615 points to the expression to gimplify.
3616
3617 Expressions of the form 'a && b' are gimplified to:
3618
3619 a && b ? true : false
3620
3621 gimplify_cond_expr will do the rest.
3622
3623 PRE_P points to the list where side effects that must happen before
3624 *EXPR_P should be stored. */
3625
3626 static enum gimplify_status
3627 gimplify_boolean_expr (tree *expr_p)
3628 {
3629 /* Preserve the original type of the expression. */
3630 tree type = TREE_TYPE (*expr_p);
3631
3632 *expr_p = build3 (COND_EXPR, type, *expr_p,
3633 fold_convert (type, boolean_true_node),
3634 fold_convert (type, boolean_false_node));
3635
3636 return GS_OK;
3637 }
3638
3639 /* Gimplifies an expression sequence. This function gimplifies each
3640 expression and re-writes the original expression with the last
3641 expression of the sequence in GIMPLE form.
3642
3643 PRE_P points to the list where the side effects for all the
3644 expressions in the sequence will be emitted.
3645
3646 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3647 /* ??? Should rearrange to share the pre-queue with all the indirect
3648 invocations of gimplify_expr. Would probably save on creations
3649 of statement_list nodes. */
3650
3651 static enum gimplify_status
3652 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3653 {
3654 tree t = *expr_p;
3655
3656 do
3657 {
3658 tree *sub_p = &TREE_OPERAND (t, 0);
3659
3660 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3661 gimplify_compound_expr (sub_p, pre_p, false);
3662 else
3663 gimplify_stmt (sub_p);
3664 append_to_statement_list (*sub_p, pre_p);
3665
3666 t = TREE_OPERAND (t, 1);
3667 }
3668 while (TREE_CODE (t) == COMPOUND_EXPR);
3669
3670 *expr_p = t;
3671 if (want_value)
3672 return GS_OK;
3673 else
3674 {
3675 gimplify_stmt (expr_p);
3676 return GS_ALL_DONE;
3677 }
3678 }
3679
3680 /* Gimplifies a statement list. These may be created either by an
3681 enlightened front-end, or by shortcut_cond_expr. */
3682
3683 static enum gimplify_status
3684 gimplify_statement_list (tree *expr_p)
3685 {
3686 tree_stmt_iterator i = tsi_start (*expr_p);
3687
3688 while (!tsi_end_p (i))
3689 {
3690 tree t;
3691
3692 gimplify_stmt (tsi_stmt_ptr (i));
3693
3694 t = tsi_stmt (i);
3695 if (t == NULL)
3696 tsi_delink (&i);
3697 else if (TREE_CODE (t) == STATEMENT_LIST)
3698 {
3699 tsi_link_before (&i, t, TSI_SAME_STMT);
3700 tsi_delink (&i);
3701 }
3702 else
3703 tsi_next (&i);
3704 }
3705
3706 return GS_ALL_DONE;
3707 }
3708
3709 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3710 gimplify. After gimplification, EXPR_P will point to a new temporary
3711 that holds the original value of the SAVE_EXPR node.
3712
3713 PRE_P points to the list where side effects that must happen before
3714 *EXPR_P should be stored. */
3715
3716 static enum gimplify_status
3717 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3718 {
3719 enum gimplify_status ret = GS_ALL_DONE;
3720 tree val;
3721
3722 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3723 val = TREE_OPERAND (*expr_p, 0);
3724
3725 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3726 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3727 {
3728 /* The operand may be a void-valued expression such as SAVE_EXPRs
3729 generated by the Java frontend for class initialization. It is
3730 being executed only for its side-effects. */
3731 if (TREE_TYPE (val) == void_type_node)
3732 {
3733 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3734 is_gimple_stmt, fb_none);
3735 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3736 val = NULL;
3737 }
3738 else
3739 val = get_initialized_tmp_var (val, pre_p, post_p);
3740
3741 TREE_OPERAND (*expr_p, 0) = val;
3742 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3743 }
3744
3745 *expr_p = val;
3746
3747 return ret;
3748 }
3749
3750 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3751
3752 unary_expr
3753 : ...
3754 | '&' varname
3755 ...
3756
3757 PRE_P points to the list where side effects that must happen before
3758 *EXPR_P should be stored.
3759
3760 POST_P points to the list where side effects that must happen after
3761 *EXPR_P should be stored. */
3762
3763 static enum gimplify_status
3764 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3765 {
3766 tree expr = *expr_p;
3767 tree op0 = TREE_OPERAND (expr, 0);
3768 enum gimplify_status ret;
3769
3770 switch (TREE_CODE (op0))
3771 {
3772 case INDIRECT_REF:
3773 case MISALIGNED_INDIRECT_REF:
3774 do_indirect_ref:
3775 /* Check if we are dealing with an expression of the form '&*ptr'.
3776 While the front end folds away '&*ptr' into 'ptr', these
3777 expressions may be generated internally by the compiler (e.g.,
3778 builtins like __builtin_va_end). */
3779 /* Caution: the silent array decomposition semantics we allow for
3780 ADDR_EXPR means we can't always discard the pair. */
3781 /* Gimplification of the ADDR_EXPR operand may drop
3782 cv-qualification conversions, so make sure we add them if
3783 needed. */
3784 {
3785 tree op00 = TREE_OPERAND (op0, 0);
3786 tree t_expr = TREE_TYPE (expr);
3787 tree t_op00 = TREE_TYPE (op00);
3788
3789 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3790 {
3791 #ifdef ENABLE_CHECKING
3792 tree t_op0 = TREE_TYPE (op0);
3793 gcc_assert (POINTER_TYPE_P (t_expr)
3794 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3795 ? TREE_TYPE (t_op0) : t_op0,
3796 TREE_TYPE (t_expr))
3797 && POINTER_TYPE_P (t_op00)
3798 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3799 #endif
3800 op00 = fold_convert (TREE_TYPE (expr), op00);
3801 }
3802 *expr_p = op00;
3803 ret = GS_OK;
3804 }
3805 break;
3806
3807 case VIEW_CONVERT_EXPR:
3808 /* Take the address of our operand and then convert it to the type of
3809 this ADDR_EXPR.
3810
3811 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3812 all clear. The impact of this transformation is even less clear. */
3813
3814 /* If the operand is a useless conversion, look through it. Doing so
3815 guarantees that the ADDR_EXPR and its operand will remain of the
3816 same type. */
3817 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3818 op0 = TREE_OPERAND (op0, 0);
3819
3820 *expr_p = fold_convert (TREE_TYPE (expr),
3821 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3822 ret = GS_OK;
3823 break;
3824
3825 default:
3826 /* We use fb_either here because the C frontend sometimes takes
3827 the address of a call that returns a struct; see
3828 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3829 the implied temporary explicit. */
3830 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3831 is_gimple_addressable, fb_either);
3832 if (ret != GS_ERROR)
3833 {
3834 op0 = TREE_OPERAND (expr, 0);
3835
3836 /* For various reasons, the gimplification of the expression
3837 may have made a new INDIRECT_REF. */
3838 if (TREE_CODE (op0) == INDIRECT_REF)
3839 goto do_indirect_ref;
3840
3841 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3842 is set properly. */
3843 recompute_tree_invariant_for_addr_expr (expr);
3844
3845 /* Mark the RHS addressable. */
3846 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3847 }
3848 break;
3849 }
3850
3851 return ret;
3852 }
3853
3854 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3855 value; output operands should be a gimple lvalue. */
3856
3857 static enum gimplify_status
3858 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3859 {
3860 tree expr = *expr_p;
3861 int noutputs = list_length (ASM_OUTPUTS (expr));
3862 const char **oconstraints
3863 = (const char **) alloca ((noutputs) * sizeof (const char *));
3864 int i;
3865 tree link;
3866 const char *constraint;
3867 bool allows_mem, allows_reg, is_inout;
3868 enum gimplify_status ret, tret;
3869
3870 ret = GS_ALL_DONE;
3871 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3872 {
3873 size_t constraint_len;
3874 oconstraints[i] = constraint
3875 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3876 constraint_len = strlen (constraint);
3877 if (constraint_len == 0)
3878 continue;
3879
3880 parse_output_constraint (&constraint, i, 0, 0,
3881 &allows_mem, &allows_reg, &is_inout);
3882
3883 if (!allows_reg && allows_mem)
3884 lang_hooks.mark_addressable (TREE_VALUE (link));
3885
3886 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3887 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3888 fb_lvalue | fb_mayfail);
3889 if (tret == GS_ERROR)
3890 {
3891 error ("invalid lvalue in asm output %d", i);
3892 ret = tret;
3893 }
3894
3895 if (is_inout)
3896 {
3897 /* An input/output operand. To give the optimizers more
3898 flexibility, split it into separate input and output
3899 operands. */
3900 tree input;
3901 char buf[10];
3902
3903 /* Turn the in/out constraint into an output constraint. */
3904 char *p = xstrdup (constraint);
3905 p[0] = '=';
3906 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
3907
3908 /* And add a matching input constraint. */
3909 if (allows_reg)
3910 {
3911 sprintf (buf, "%d", i);
3912
3913 /* If there are multiple alternatives in the constraint,
3914 handle each of them individually. Those that allow register
3915 will be replaced with operand number, the others will stay
3916 unchanged. */
3917 if (strchr (p, ',') != NULL)
3918 {
3919 size_t len = 0, buflen = strlen (buf);
3920 char *beg, *end, *str, *dst;
3921
3922 for (beg = p + 1;;)
3923 {
3924 end = strchr (beg, ',');
3925 if (end == NULL)
3926 end = strchr (beg, '\0');
3927 if ((size_t) (end - beg) < buflen)
3928 len += buflen + 1;
3929 else
3930 len += end - beg + 1;
3931 if (*end)
3932 beg = end + 1;
3933 else
3934 break;
3935 }
3936
3937 str = (char *) alloca (len);
3938 for (beg = p + 1, dst = str;;)
3939 {
3940 const char *tem;
3941 bool mem_p, reg_p, inout_p;
3942
3943 end = strchr (beg, ',');
3944 if (end)
3945 *end = '\0';
3946 beg[-1] = '=';
3947 tem = beg - 1;
3948 parse_output_constraint (&tem, i, 0, 0,
3949 &mem_p, &reg_p, &inout_p);
3950 if (dst != str)
3951 *dst++ = ',';
3952 if (reg_p)
3953 {
3954 memcpy (dst, buf, buflen);
3955 dst += buflen;
3956 }
3957 else
3958 {
3959 if (end)
3960 len = end - beg;
3961 else
3962 len = strlen (beg);
3963 memcpy (dst, beg, len);
3964 dst += len;
3965 }
3966 if (end)
3967 beg = end + 1;
3968 else
3969 break;
3970 }
3971 *dst = '\0';
3972 input = build_string (dst - str, str);
3973 }
3974 else
3975 input = build_string (strlen (buf), buf);
3976 }
3977 else
3978 input = build_string (constraint_len - 1, constraint + 1);
3979
3980 free (p);
3981
3982 input = build_tree_list (build_tree_list (NULL_TREE, input),
3983 unshare_expr (TREE_VALUE (link)));
3984 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
3985 }
3986 }
3987
3988 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3989 {
3990 constraint
3991 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3992 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3993 oconstraints, &allows_mem, &allows_reg);
3994
3995 /* If the operand is a memory input, it should be an lvalue. */
3996 if (!allows_reg && allows_mem)
3997 {
3998 lang_hooks.mark_addressable (TREE_VALUE (link));
3999 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4000 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4001 if (tret == GS_ERROR)
4002 {
4003 error ("memory input %d is not directly addressable", i);
4004 ret = tret;
4005 }
4006 }
4007 else
4008 {
4009 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4010 is_gimple_asm_val, fb_rvalue);
4011 if (tret == GS_ERROR)
4012 ret = tret;
4013 }
4014 }
4015
4016 return ret;
4017 }
4018
4019 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4020 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4021 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4022 return to this function.
4023
4024 FIXME should we complexify the prequeue handling instead? Or use flags
4025 for all the cleanups and let the optimizer tighten them up? The current
4026 code seems pretty fragile; it will break on a cleanup within any
4027 non-conditional nesting. But any such nesting would be broken, anyway;
4028 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4029 and continues out of it. We can do that at the RTL level, though, so
4030 having an optimizer to tighten up try/finally regions would be a Good
4031 Thing. */
4032
4033 static enum gimplify_status
4034 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4035 {
4036 tree_stmt_iterator iter;
4037 tree body;
4038
4039 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4040
4041 /* We only care about the number of conditions between the innermost
4042 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4043 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4044 int old_conds = gimplify_ctxp->conditions;
4045 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4046 gimplify_ctxp->conditions = 0;
4047 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4048
4049 body = TREE_OPERAND (*expr_p, 0);
4050 gimplify_to_stmt_list (&body);
4051
4052 gimplify_ctxp->conditions = old_conds;
4053 gimplify_ctxp->conditional_cleanups = old_cleanups;
4054
4055 for (iter = tsi_start (body); !tsi_end_p (iter); )
4056 {
4057 tree *wce_p = tsi_stmt_ptr (iter);
4058 tree wce = *wce_p;
4059
4060 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4061 {
4062 if (tsi_one_before_end_p (iter))
4063 {
4064 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4065 tsi_delink (&iter);
4066 break;
4067 }
4068 else
4069 {
4070 tree sl, tfe;
4071 enum tree_code code;
4072
4073 if (CLEANUP_EH_ONLY (wce))
4074 code = TRY_CATCH_EXPR;
4075 else
4076 code = TRY_FINALLY_EXPR;
4077
4078 sl = tsi_split_statement_list_after (&iter);
4079 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4080 append_to_statement_list (TREE_OPERAND (wce, 0),
4081 &TREE_OPERAND (tfe, 1));
4082 *wce_p = tfe;
4083 iter = tsi_start (sl);
4084 }
4085 }
4086 else
4087 tsi_next (&iter);
4088 }
4089
4090 if (temp)
4091 {
4092 *expr_p = temp;
4093 append_to_statement_list (body, pre_p);
4094 return GS_OK;
4095 }
4096 else
4097 {
4098 *expr_p = body;
4099 return GS_ALL_DONE;
4100 }
4101 }
4102
4103 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4104 is the cleanup action required. */
4105
4106 static void
4107 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4108 {
4109 tree wce;
4110
4111 /* Errors can result in improperly nested cleanups. Which results in
4112 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4113 if (errorcount || sorrycount)
4114 return;
4115
4116 if (gimple_conditional_context ())
4117 {
4118 /* If we're in a conditional context, this is more complex. We only
4119 want to run the cleanup if we actually ran the initialization that
4120 necessitates it, but we want to run it after the end of the
4121 conditional context. So we wrap the try/finally around the
4122 condition and use a flag to determine whether or not to actually
4123 run the destructor. Thus
4124
4125 test ? f(A()) : 0
4126
4127 becomes (approximately)
4128
4129 flag = 0;
4130 try {
4131 if (test) { A::A(temp); flag = 1; val = f(temp); }
4132 else { val = 0; }
4133 } finally {
4134 if (flag) A::~A(temp);
4135 }
4136 val
4137 */
4138
4139 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4140 tree ffalse = build2 (MODIFY_EXPR, void_type_node, flag,
4141 boolean_false_node);
4142 tree ftrue = build2 (MODIFY_EXPR, void_type_node, flag,
4143 boolean_true_node);
4144 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4145 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4146 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4147 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4148 append_to_statement_list (ftrue, pre_p);
4149
4150 /* Because of this manipulation, and the EH edges that jump
4151 threading cannot redirect, the temporary (VAR) will appear
4152 to be used uninitialized. Don't warn. */
4153 TREE_NO_WARNING (var) = 1;
4154 }
4155 else
4156 {
4157 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4158 CLEANUP_EH_ONLY (wce) = eh_only;
4159 append_to_statement_list (wce, pre_p);
4160 }
4161
4162 gimplify_stmt (&TREE_OPERAND (wce, 0));
4163 }
4164
4165 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4166
4167 static enum gimplify_status
4168 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4169 {
4170 tree targ = *expr_p;
4171 tree temp = TARGET_EXPR_SLOT (targ);
4172 tree init = TARGET_EXPR_INITIAL (targ);
4173 enum gimplify_status ret;
4174
4175 if (init)
4176 {
4177 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4178 to the temps list. */
4179 gimple_add_tmp_var (temp);
4180
4181 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4182 expression is supposed to initialize the slot. */
4183 if (VOID_TYPE_P (TREE_TYPE (init)))
4184 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4185 else
4186 {
4187 /* Special handling for BIND_EXPR can result in fewer temps. */
4188 ret = GS_OK;
4189 if (TREE_CODE (init) == BIND_EXPR)
4190 gimplify_bind_expr (&init, temp, pre_p);
4191 if (init != temp)
4192 {
4193 init = build2 (INIT_EXPR, void_type_node, temp, init);
4194 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4195 fb_none);
4196 }
4197 }
4198 if (ret == GS_ERROR)
4199 return GS_ERROR;
4200 append_to_statement_list (init, pre_p);
4201
4202 /* If needed, push the cleanup for the temp. */
4203 if (TARGET_EXPR_CLEANUP (targ))
4204 {
4205 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4206 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4207 CLEANUP_EH_ONLY (targ), pre_p);
4208 }
4209
4210 /* Only expand this once. */
4211 TREE_OPERAND (targ, 3) = init;
4212 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4213 }
4214 else
4215 /* We should have expanded this before. */
4216 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4217
4218 *expr_p = temp;
4219 return GS_OK;
4220 }
4221
4222 /* Gimplification of expression trees. */
4223
4224 /* Gimplify an expression which appears at statement context; usually, this
4225 means replacing it with a suitably gimple STATEMENT_LIST. */
4226
4227 void
4228 gimplify_stmt (tree *stmt_p)
4229 {
4230 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4231 }
4232
4233 /* Similarly, but force the result to be a STATEMENT_LIST. */
4234
4235 void
4236 gimplify_to_stmt_list (tree *stmt_p)
4237 {
4238 gimplify_stmt (stmt_p);
4239 if (!*stmt_p)
4240 *stmt_p = alloc_stmt_list ();
4241 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4242 {
4243 tree t = *stmt_p;
4244 *stmt_p = alloc_stmt_list ();
4245 append_to_statement_list (t, stmt_p);
4246 }
4247 }
4248
4249
4250 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4251 to CTX. If entries already exist, force them to be some flavor of private.
4252 If there is no enclosing parallel, do nothing. */
4253
4254 void
4255 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4256 {
4257 splay_tree_node n;
4258
4259 if (decl == NULL || !DECL_P (decl))
4260 return;
4261
4262 do
4263 {
4264 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4265 if (n != NULL)
4266 {
4267 if (n->value & GOVD_SHARED)
4268 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4269 else
4270 return;
4271 }
4272 else if (ctx->is_parallel)
4273 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4274
4275 ctx = ctx->outer_context;
4276 }
4277 while (ctx);
4278 }
4279
4280 /* Similarly for each of the type sizes of TYPE. */
4281
4282 static void
4283 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4284 {
4285 if (type == NULL || type == error_mark_node)
4286 return;
4287 type = TYPE_MAIN_VARIANT (type);
4288
4289 if (pointer_set_insert (ctx->privatized_types, type))
4290 return;
4291
4292 switch (TREE_CODE (type))
4293 {
4294 case INTEGER_TYPE:
4295 case ENUMERAL_TYPE:
4296 case BOOLEAN_TYPE:
4297 case REAL_TYPE:
4298 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4299 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4300 break;
4301
4302 case ARRAY_TYPE:
4303 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4304 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4305 break;
4306
4307 case RECORD_TYPE:
4308 case UNION_TYPE:
4309 case QUAL_UNION_TYPE:
4310 {
4311 tree field;
4312 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4313 if (TREE_CODE (field) == FIELD_DECL)
4314 {
4315 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4316 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4317 }
4318 }
4319 break;
4320
4321 case POINTER_TYPE:
4322 case REFERENCE_TYPE:
4323 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4324 break;
4325
4326 default:
4327 break;
4328 }
4329
4330 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4331 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4332 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4333 }
4334
4335 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4336
4337 static void
4338 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4339 {
4340 splay_tree_node n;
4341 unsigned int nflags;
4342 tree t;
4343
4344 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4345 return;
4346
4347 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4348 there are constructors involved somewhere. */
4349 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4350 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4351 flags |= GOVD_SEEN;
4352
4353 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4354 if (n != NULL)
4355 {
4356 /* We shouldn't be re-adding the decl with the same data
4357 sharing class. */
4358 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4359 /* The only combination of data sharing classes we should see is
4360 FIRSTPRIVATE and LASTPRIVATE. */
4361 nflags = n->value | flags;
4362 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4363 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4364 n->value = nflags;
4365 return;
4366 }
4367
4368 /* When adding a variable-sized variable, we have to handle all sorts
4369 of additional bits of data: the pointer replacement variable, and
4370 the parameters of the type. */
4371 if (DECL_SIZE (decl) && !TREE_CONSTANT (DECL_SIZE (decl)))
4372 {
4373 /* Add the pointer replacement variable as PRIVATE if the variable
4374 replacement is private, else FIRSTPRIVATE since we'll need the
4375 address of the original variable either for SHARED, or for the
4376 copy into or out of the context. */
4377 if (!(flags & GOVD_LOCAL))
4378 {
4379 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4380 nflags |= flags & GOVD_SEEN;
4381 t = DECL_VALUE_EXPR (decl);
4382 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4383 t = TREE_OPERAND (t, 0);
4384 gcc_assert (DECL_P (t));
4385 omp_add_variable (ctx, t, nflags);
4386 }
4387
4388 /* Add all of the variable and type parameters (which should have
4389 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4390 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4391 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4392 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4393
4394 /* The variable-sized variable itself is never SHARED, only some form
4395 of PRIVATE. The sharing would take place via the pointer variable
4396 which we remapped above. */
4397 if (flags & GOVD_SHARED)
4398 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4399 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4400
4401 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4402 alloca statement we generate for the variable, so make sure it
4403 is available. This isn't automatically needed for the SHARED
4404 case, since we won't be allocating local storage then. */
4405 else
4406 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4407 }
4408 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4409 {
4410 gcc_assert ((flags & GOVD_LOCAL) == 0);
4411 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4412
4413 /* Similar to the direct variable sized case above, we'll need the
4414 size of references being privatized. */
4415 if ((flags & GOVD_SHARED) == 0)
4416 {
4417 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4418 if (!TREE_CONSTANT (t))
4419 omp_notice_variable (ctx, t, true);
4420 }
4421 }
4422
4423 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4424 }
4425
4426 /* Record the fact that DECL was used within the OpenMP context CTX.
4427 IN_CODE is true when real code uses DECL, and false when we should
4428 merely emit default(none) errors. Return true if DECL is going to
4429 be remapped and thus DECL shouldn't be gimplified into its
4430 DECL_VALUE_EXPR (if any). */
4431
4432 static bool
4433 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4434 {
4435 splay_tree_node n;
4436 unsigned flags = in_code ? GOVD_SEEN : 0;
4437 bool ret = false, shared;
4438
4439 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4440 return false;
4441
4442 /* Threadprivate variables are predetermined. */
4443 if (is_global_var (decl))
4444 {
4445 if (DECL_THREAD_LOCAL_P (decl))
4446 return false;
4447
4448 if (DECL_HAS_VALUE_EXPR_P (decl))
4449 {
4450 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4451
4452 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4453 return false;
4454 }
4455 }
4456
4457 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4458 if (n == NULL)
4459 {
4460 enum omp_clause_default_kind default_kind, kind;
4461
4462 if (!ctx->is_parallel)
4463 goto do_outer;
4464
4465 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4466 remapped firstprivate instead of shared. To some extent this is
4467 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4468 default_kind = ctx->default_kind;
4469 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4470 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4471 default_kind = kind;
4472
4473 switch (default_kind)
4474 {
4475 case OMP_CLAUSE_DEFAULT_NONE:
4476 error ("%qs not specified in enclosing parallel",
4477 IDENTIFIER_POINTER (DECL_NAME (decl)));
4478 error ("%Henclosing parallel", &ctx->location);
4479 /* FALLTHRU */
4480 case OMP_CLAUSE_DEFAULT_SHARED:
4481 flags |= GOVD_SHARED;
4482 break;
4483 case OMP_CLAUSE_DEFAULT_PRIVATE:
4484 flags |= GOVD_PRIVATE;
4485 break;
4486 default:
4487 gcc_unreachable ();
4488 }
4489
4490 omp_add_variable (ctx, decl, flags);
4491
4492 shared = (flags & GOVD_SHARED) != 0;
4493 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4494 goto do_outer;
4495 }
4496
4497 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4498 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4499
4500 /* If nothing changed, there's nothing left to do. */
4501 if ((n->value & flags) == flags)
4502 return ret;
4503 flags |= n->value;
4504 n->value = flags;
4505
4506 do_outer:
4507 /* If the variable is private in the current context, then we don't
4508 need to propagate anything to an outer context. */
4509 if (flags & GOVD_PRIVATE)
4510 return ret;
4511 if (ctx->outer_context
4512 && omp_notice_variable (ctx->outer_context, decl, in_code))
4513 return true;
4514 return ret;
4515 }
4516
4517 /* Verify that DECL is private within CTX. If there's specific information
4518 to the contrary in the innermost scope, generate an error. */
4519
4520 static bool
4521 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4522 {
4523 splay_tree_node n;
4524
4525 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4526 if (n != NULL)
4527 {
4528 if (n->value & GOVD_SHARED)
4529 {
4530 if (ctx == gimplify_omp_ctxp)
4531 {
4532 error ("iteration variable %qs should be private",
4533 IDENTIFIER_POINTER (DECL_NAME (decl)));
4534 n->value = GOVD_PRIVATE;
4535 return true;
4536 }
4537 else
4538 return false;
4539 }
4540 else if ((n->value & GOVD_EXPLICIT) != 0
4541 && (ctx == gimplify_omp_ctxp
4542 || (ctx->is_combined_parallel
4543 && gimplify_omp_ctxp->outer_context == ctx)))
4544 {
4545 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4546 error ("iteration variable %qs should not be firstprivate",
4547 IDENTIFIER_POINTER (DECL_NAME (decl)));
4548 else if ((n->value & GOVD_REDUCTION) != 0)
4549 error ("iteration variable %qs should not be reduction",
4550 IDENTIFIER_POINTER (DECL_NAME (decl)));
4551 }
4552 return true;
4553 }
4554
4555 if (ctx->is_parallel)
4556 return false;
4557 else if (ctx->outer_context)
4558 return omp_is_private (ctx->outer_context, decl);
4559 else
4560 return !is_global_var (decl);
4561 }
4562
4563 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4564 and previous omp contexts. */
4565
4566 static void
4567 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4568 bool in_combined_parallel)
4569 {
4570 struct gimplify_omp_ctx *ctx, *outer_ctx;
4571 tree c;
4572
4573 ctx = new_omp_context (in_parallel, in_combined_parallel);
4574 outer_ctx = ctx->outer_context;
4575
4576 while ((c = *list_p) != NULL)
4577 {
4578 enum gimplify_status gs;
4579 bool remove = false;
4580 bool notice_outer = true;
4581 unsigned int flags;
4582 tree decl;
4583
4584 switch (OMP_CLAUSE_CODE (c))
4585 {
4586 case OMP_CLAUSE_PRIVATE:
4587 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4588 notice_outer = false;
4589 goto do_add;
4590 case OMP_CLAUSE_SHARED:
4591 flags = GOVD_SHARED | GOVD_EXPLICIT;
4592 goto do_add;
4593 case OMP_CLAUSE_FIRSTPRIVATE:
4594 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4595 goto do_add;
4596 case OMP_CLAUSE_LASTPRIVATE:
4597 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4598 goto do_add;
4599 case OMP_CLAUSE_REDUCTION:
4600 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4601 goto do_add;
4602
4603 do_add:
4604 decl = OMP_CLAUSE_DECL (c);
4605 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4606 {
4607 remove = true;
4608 break;
4609 }
4610 /* Handle NRV results passed by reference. */
4611 if (TREE_CODE (decl) == INDIRECT_REF
4612 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4613 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4614 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4615 omp_add_variable (ctx, decl, flags);
4616 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4617 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4618 {
4619 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4620 GOVD_LOCAL | GOVD_SEEN);
4621 gimplify_omp_ctxp = ctx;
4622 push_gimplify_context ();
4623 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4624 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4625 push_gimplify_context ();
4626 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4627 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4628 gimplify_omp_ctxp = outer_ctx;
4629 }
4630 if (notice_outer)
4631 goto do_notice;
4632 break;
4633
4634 case OMP_CLAUSE_COPYIN:
4635 case OMP_CLAUSE_COPYPRIVATE:
4636 decl = OMP_CLAUSE_DECL (c);
4637 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4638 {
4639 remove = true;
4640 break;
4641 }
4642 /* Handle NRV results passed by reference. */
4643 if (TREE_CODE (decl) == INDIRECT_REF
4644 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4645 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4646 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4647 do_notice:
4648 if (outer_ctx)
4649 omp_notice_variable (outer_ctx, decl, true);
4650 break;
4651
4652 case OMP_CLAUSE_IF:
4653 OMP_CLAUSE_OPERAND (c, 0)
4654 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4655 /* Fall through. */
4656
4657 case OMP_CLAUSE_SCHEDULE:
4658 case OMP_CLAUSE_NUM_THREADS:
4659 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4660 is_gimple_val, fb_rvalue);
4661 if (gs == GS_ERROR)
4662 remove = true;
4663 break;
4664
4665 case OMP_CLAUSE_NOWAIT:
4666 case OMP_CLAUSE_ORDERED:
4667 break;
4668
4669 case OMP_CLAUSE_DEFAULT:
4670 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4671 break;
4672
4673 default:
4674 gcc_unreachable ();
4675 }
4676
4677 if (remove)
4678 *list_p = OMP_CLAUSE_CHAIN (c);
4679 else
4680 list_p = &OMP_CLAUSE_CHAIN (c);
4681 }
4682
4683 gimplify_omp_ctxp = ctx;
4684 }
4685
4686 /* For all variables that were not actually used within the context,
4687 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4688
4689 static int
4690 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4691 {
4692 tree *list_p = (tree *) data;
4693 tree decl = (tree) n->key;
4694 unsigned flags = n->value;
4695 enum omp_clause_code code;
4696 tree clause;
4697 bool private_debug;
4698
4699 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4700 return 0;
4701 if ((flags & GOVD_SEEN) == 0)
4702 return 0;
4703 if (flags & GOVD_DEBUG_PRIVATE)
4704 {
4705 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4706 private_debug = true;
4707 }
4708 else
4709 private_debug
4710 = lang_hooks.decls.omp_private_debug_clause (decl,
4711 !!(flags & GOVD_SHARED));
4712 if (private_debug)
4713 code = OMP_CLAUSE_PRIVATE;
4714 else if (flags & GOVD_SHARED)
4715 {
4716 if (is_global_var (decl))
4717 return 0;
4718 code = OMP_CLAUSE_SHARED;
4719 }
4720 else if (flags & GOVD_PRIVATE)
4721 code = OMP_CLAUSE_PRIVATE;
4722 else if (flags & GOVD_FIRSTPRIVATE)
4723 code = OMP_CLAUSE_FIRSTPRIVATE;
4724 else
4725 gcc_unreachable ();
4726
4727 clause = build_omp_clause (code);
4728 OMP_CLAUSE_DECL (clause) = decl;
4729 OMP_CLAUSE_CHAIN (clause) = *list_p;
4730 if (private_debug)
4731 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4732 *list_p = clause;
4733
4734 return 0;
4735 }
4736
4737 static void
4738 gimplify_adjust_omp_clauses (tree *list_p)
4739 {
4740 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4741 tree c, decl;
4742
4743 while ((c = *list_p) != NULL)
4744 {
4745 splay_tree_node n;
4746 bool remove = false;
4747
4748 switch (OMP_CLAUSE_CODE (c))
4749 {
4750 case OMP_CLAUSE_PRIVATE:
4751 case OMP_CLAUSE_SHARED:
4752 case OMP_CLAUSE_FIRSTPRIVATE:
4753 decl = OMP_CLAUSE_DECL (c);
4754 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4755 remove = !(n->value & GOVD_SEEN);
4756 if (! remove)
4757 {
4758 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4759 if ((n->value & GOVD_DEBUG_PRIVATE)
4760 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4761 {
4762 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4763 || ((n->value & GOVD_DATA_SHARE_CLASS)
4764 == GOVD_PRIVATE));
4765 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4766 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4767 }
4768 }
4769 break;
4770
4771 case OMP_CLAUSE_LASTPRIVATE:
4772 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4773 accurately reflect the presence of a FIRSTPRIVATE clause. */
4774 decl = OMP_CLAUSE_DECL (c);
4775 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4776 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4777 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4778 break;
4779
4780 case OMP_CLAUSE_REDUCTION:
4781 case OMP_CLAUSE_COPYIN:
4782 case OMP_CLAUSE_COPYPRIVATE:
4783 case OMP_CLAUSE_IF:
4784 case OMP_CLAUSE_NUM_THREADS:
4785 case OMP_CLAUSE_SCHEDULE:
4786 case OMP_CLAUSE_NOWAIT:
4787 case OMP_CLAUSE_ORDERED:
4788 case OMP_CLAUSE_DEFAULT:
4789 break;
4790
4791 default:
4792 gcc_unreachable ();
4793 }
4794
4795 if (remove)
4796 *list_p = OMP_CLAUSE_CHAIN (c);
4797 else
4798 list_p = &OMP_CLAUSE_CHAIN (c);
4799 }
4800
4801 /* Add in any implicit data sharing. */
4802 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4803
4804 gimplify_omp_ctxp = ctx->outer_context;
4805 delete_omp_context (ctx);
4806 }
4807
4808 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4809 gimplification of the body, as well as scanning the body for used
4810 variables. We need to do this scan now, because variable-sized
4811 decls will be decomposed during gimplification. */
4812
4813 static enum gimplify_status
4814 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4815 {
4816 tree expr = *expr_p;
4817
4818 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4819 OMP_PARALLEL_COMBINED (expr));
4820
4821 push_gimplify_context ();
4822
4823 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4824
4825 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4826 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4827 else
4828 pop_gimplify_context (NULL_TREE);
4829
4830 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4831
4832 return GS_ALL_DONE;
4833 }
4834
4835 /* Gimplify the gross structure of an OMP_FOR statement. */
4836
4837 static enum gimplify_status
4838 gimplify_omp_for (tree *expr_p, tree *pre_p)
4839 {
4840 tree for_stmt, decl, t;
4841 enum gimplify_status ret = 0;
4842
4843 for_stmt = *expr_p;
4844
4845 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4846
4847 t = OMP_FOR_INIT (for_stmt);
4848 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
4849 decl = TREE_OPERAND (t, 0);
4850 gcc_assert (DECL_P (decl));
4851 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4852
4853 /* Make sure the iteration variable is private. */
4854 if (omp_is_private (gimplify_omp_ctxp, decl))
4855 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4856 else
4857 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4858
4859 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4860 NULL, is_gimple_val, fb_rvalue);
4861
4862 t = OMP_FOR_COND (for_stmt);
4863 gcc_assert (COMPARISON_CLASS_P (t));
4864 gcc_assert (TREE_OPERAND (t, 0) == decl);
4865
4866 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4867 NULL, is_gimple_val, fb_rvalue);
4868
4869 t = OMP_FOR_INCR (for_stmt);
4870 switch (TREE_CODE (t))
4871 {
4872 case PREINCREMENT_EXPR:
4873 case POSTINCREMENT_EXPR:
4874 t = build_int_cst (TREE_TYPE (decl), 1);
4875 goto build_modify;
4876 case PREDECREMENT_EXPR:
4877 case POSTDECREMENT_EXPR:
4878 t = build_int_cst (TREE_TYPE (decl), -1);
4879 goto build_modify;
4880 build_modify:
4881 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
4882 t = build2 (MODIFY_EXPR, void_type_node, decl, t);
4883 OMP_FOR_INCR (for_stmt) = t;
4884 break;
4885
4886 case MODIFY_EXPR:
4887 gcc_assert (TREE_OPERAND (t, 0) == decl);
4888 t = TREE_OPERAND (t, 1);
4889 switch (TREE_CODE (t))
4890 {
4891 case PLUS_EXPR:
4892 if (TREE_OPERAND (t, 1) == decl)
4893 {
4894 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
4895 TREE_OPERAND (t, 0) = decl;
4896 break;
4897 }
4898 case MINUS_EXPR:
4899 gcc_assert (TREE_OPERAND (t, 0) == decl);
4900 break;
4901 default:
4902 gcc_unreachable ();
4903 }
4904
4905 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4906 NULL, is_gimple_val, fb_rvalue);
4907 break;
4908
4909 default:
4910 gcc_unreachable ();
4911 }
4912
4913 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
4914 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
4915
4916 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
4917 }
4918
4919 /* Gimplify the gross structure of other OpenMP worksharing constructs.
4920 In particular, OMP_SECTIONS and OMP_SINGLE. */
4921
4922 static enum gimplify_status
4923 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
4924 {
4925 tree stmt = *expr_p;
4926
4927 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
4928 gimplify_to_stmt_list (&OMP_BODY (stmt));
4929 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
4930
4931 return GS_ALL_DONE;
4932 }
4933
4934 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
4935 stabilized the lhs of the atomic operation as *ADDR. Return true if
4936 EXPR is this stabilized form. */
4937
4938 static bool
4939 goa_lhs_expr_p (tree expr, tree addr)
4940 {
4941 /* Also include casts to other type variants. The C front end is fond
4942 of adding these for e.g. volatile variables. This is like
4943 STRIP_TYPE_NOPS but includes the main variant lookup. */
4944 while ((TREE_CODE (expr) == NOP_EXPR
4945 || TREE_CODE (expr) == CONVERT_EXPR
4946 || TREE_CODE (expr) == NON_LVALUE_EXPR)
4947 && TREE_OPERAND (expr, 0) != error_mark_node
4948 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
4949 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
4950 expr = TREE_OPERAND (expr, 0);
4951
4952 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
4953 return true;
4954 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
4955 return true;
4956 return false;
4957 }
4958
4959 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
4960 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4961 size of the data type, and thus usable to find the index of the builtin
4962 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
4963
4964 static enum gimplify_status
4965 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
4966 {
4967 enum built_in_function base;
4968 tree decl, args, itype;
4969 enum insn_code *optab;
4970
4971 /* Check for one of the supported fetch-op operations. */
4972 switch (TREE_CODE (rhs))
4973 {
4974 case PLUS_EXPR:
4975 base = BUILT_IN_FETCH_AND_ADD_N;
4976 optab = sync_add_optab;
4977 break;
4978 case MINUS_EXPR:
4979 base = BUILT_IN_FETCH_AND_SUB_N;
4980 optab = sync_add_optab;
4981 break;
4982 case BIT_AND_EXPR:
4983 base = BUILT_IN_FETCH_AND_AND_N;
4984 optab = sync_and_optab;
4985 break;
4986 case BIT_IOR_EXPR:
4987 base = BUILT_IN_FETCH_AND_OR_N;
4988 optab = sync_ior_optab;
4989 break;
4990 case BIT_XOR_EXPR:
4991 base = BUILT_IN_FETCH_AND_XOR_N;
4992 optab = sync_xor_optab;
4993 break;
4994 default:
4995 return GS_UNHANDLED;
4996 }
4997
4998 /* Make sure the expression is of the proper form. */
4999 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
5000 rhs = TREE_OPERAND (rhs, 1);
5001 else if (commutative_tree_code (TREE_CODE (rhs))
5002 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
5003 rhs = TREE_OPERAND (rhs, 0);
5004 else
5005 return GS_UNHANDLED;
5006
5007 decl = built_in_decls[base + index + 1];
5008 itype = TREE_TYPE (TREE_TYPE (decl));
5009
5010 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5011 return GS_UNHANDLED;
5012
5013 args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
5014 args = tree_cons (NULL, addr, args);
5015 *expr_p = build_function_call_expr (decl, args);
5016 return GS_OK;
5017 }
5018
5019 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5020 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5021 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5022 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5023
5024 static int
5025 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5026 {
5027 tree expr = *expr_p;
5028 int saw_lhs;
5029
5030 if (goa_lhs_expr_p (expr, lhs_addr))
5031 {
5032 *expr_p = lhs_var;
5033 return 1;
5034 }
5035 if (is_gimple_val (expr))
5036 return 0;
5037
5038 saw_lhs = 0;
5039 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5040 {
5041 case tcc_binary:
5042 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5043 lhs_addr, lhs_var);
5044 case tcc_unary:
5045 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5046 lhs_addr, lhs_var);
5047 break;
5048 default:
5049 break;
5050 }
5051
5052 if (saw_lhs == 0)
5053 {
5054 enum gimplify_status gs;
5055 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5056 if (gs != GS_ALL_DONE)
5057 saw_lhs = -1;
5058 }
5059
5060 return saw_lhs;
5061 }
5062
5063 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5064
5065 oldval = *addr;
5066 repeat:
5067 newval = rhs; // with oldval replacing *addr in rhs
5068 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5069 if (oldval != newval)
5070 goto repeat;
5071
5072 INDEX is log2 of the size of the data type, and thus usable to find the
5073 index of the builtin decl. */
5074
5075 static enum gimplify_status
5076 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5077 tree rhs, int index)
5078 {
5079 tree oldval, oldival, oldival2, newval, newival, label;
5080 tree type, itype, cmpxchg, args, x, iaddr;
5081
5082 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5083 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5084 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5085
5086 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5087 return GS_UNHANDLED;
5088
5089 oldval = create_tmp_var (type, NULL);
5090 newval = create_tmp_var (type, NULL);
5091
5092 /* Precompute as much of RHS as possible. In the same walk, replace
5093 occurrences of the lhs value with our temporary. */
5094 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5095 return GS_ERROR;
5096
5097 x = build_fold_indirect_ref (addr);
5098 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5099 gimplify_and_add (x, pre_p);
5100
5101 /* For floating-point values, we'll need to view-convert them to integers
5102 so that we can perform the atomic compare and swap. Simplify the
5103 following code by always setting up the "i"ntegral variables. */
5104 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5105 {
5106 oldival = oldval;
5107 newival = newval;
5108 iaddr = addr;
5109 }
5110 else
5111 {
5112 oldival = create_tmp_var (itype, NULL);
5113 newival = create_tmp_var (itype, NULL);
5114
5115 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5116 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5117 gimplify_and_add (x, pre_p);
5118 iaddr = fold_convert (build_pointer_type (itype), addr);
5119 }
5120
5121 oldival2 = create_tmp_var (itype, NULL);
5122
5123 label = create_artificial_label ();
5124 x = build1 (LABEL_EXPR, void_type_node, label);
5125 gimplify_and_add (x, pre_p);
5126
5127 x = build2 (MODIFY_EXPR, void_type_node, newval, rhs);
5128 gimplify_and_add (x, pre_p);
5129
5130 if (newval != newival)
5131 {
5132 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5133 x = build2 (MODIFY_EXPR, void_type_node, newival, x);
5134 gimplify_and_add (x, pre_p);
5135 }
5136
5137 x = build2 (MODIFY_EXPR, void_type_node, oldival2,
5138 fold_convert (itype, oldival));
5139 gimplify_and_add (x, pre_p);
5140
5141 args = tree_cons (NULL, fold_convert (itype, newival), NULL);
5142 args = tree_cons (NULL, fold_convert (itype, oldival), args);
5143 args = tree_cons (NULL, iaddr, args);
5144 x = build_function_call_expr (cmpxchg, args);
5145 if (oldval == oldival)
5146 x = fold_convert (type, x);
5147 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5148 gimplify_and_add (x, pre_p);
5149
5150 /* For floating point, be prepared for the loop backedge. */
5151 if (oldval != oldival)
5152 {
5153 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5154 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5155 gimplify_and_add (x, pre_p);
5156 }
5157
5158 /* Note that we always perform the comparison as an integer, even for
5159 floating point. This allows the atomic operation to properly
5160 succeed even with NaNs and -0.0. */
5161 x = build3 (COND_EXPR, void_type_node,
5162 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5163 build1 (GOTO_EXPR, void_type_node, label), NULL);
5164 gimplify_and_add (x, pre_p);
5165
5166 *expr_p = NULL;
5167 return GS_ALL_DONE;
5168 }
5169
5170 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5171
5172 GOMP_atomic_start ();
5173 *addr = rhs;
5174 GOMP_atomic_end ();
5175
5176 The result is not globally atomic, but works so long as all parallel
5177 references are within #pragma omp atomic directives. According to
5178 responses received from omp@openmp.org, appears to be within spec.
5179 Which makes sense, since that's how several other compilers handle
5180 this situation as well. */
5181
5182 static enum gimplify_status
5183 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5184 {
5185 tree t;
5186
5187 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5188 t = build_function_call_expr (t, NULL);
5189 gimplify_and_add (t, pre_p);
5190
5191 t = build_fold_indirect_ref (addr);
5192 t = build2 (MODIFY_EXPR, void_type_node, t, rhs);
5193 gimplify_and_add (t, pre_p);
5194
5195 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5196 t = build_function_call_expr (t, NULL);
5197 gimplify_and_add (t, pre_p);
5198
5199 *expr_p = NULL;
5200 return GS_ALL_DONE;
5201 }
5202
5203 /* Gimplify an OMP_ATOMIC statement. */
5204
5205 static enum gimplify_status
5206 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5207 {
5208 tree addr = TREE_OPERAND (*expr_p, 0);
5209 tree rhs = TREE_OPERAND (*expr_p, 1);
5210 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5211 HOST_WIDE_INT index;
5212
5213 /* Make sure the type is one of the supported sizes. */
5214 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5215 index = exact_log2 (index);
5216 if (index >= 0 && index <= 4)
5217 {
5218 enum gimplify_status gs;
5219 unsigned int align;
5220
5221 if (DECL_P (TREE_OPERAND (addr, 0)))
5222 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5223 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5224 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5225 == FIELD_DECL)
5226 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5227 else
5228 align = TYPE_ALIGN_UNIT (type);
5229
5230 /* __sync builtins require strict data alignment. */
5231 if (exact_log2 (align) >= index)
5232 {
5233 /* When possible, use specialized atomic update functions. */
5234 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5235 {
5236 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5237 if (gs != GS_UNHANDLED)
5238 return gs;
5239 }
5240
5241 /* If we don't have specialized __sync builtins, try and implement
5242 as a compare and swap loop. */
5243 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5244 if (gs != GS_UNHANDLED)
5245 return gs;
5246 }
5247 }
5248
5249 /* The ultimate fallback is wrapping the operation in a mutex. */
5250 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5251 }
5252
5253 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5254 gimplification failed.
5255
5256 PRE_P points to the list where side effects that must happen before
5257 EXPR should be stored.
5258
5259 POST_P points to the list where side effects that must happen after
5260 EXPR should be stored, or NULL if there is no suitable list. In
5261 that case, we copy the result to a temporary, emit the
5262 post-effects, and then return the temporary.
5263
5264 GIMPLE_TEST_F points to a function that takes a tree T and
5265 returns nonzero if T is in the GIMPLE form requested by the
5266 caller. The GIMPLE predicates are in tree-gimple.c.
5267
5268 This test is used twice. Before gimplification, the test is
5269 invoked to determine whether *EXPR_P is already gimple enough. If
5270 that fails, *EXPR_P is gimplified according to its code and
5271 GIMPLE_TEST_F is called again. If the test still fails, then a new
5272 temporary variable is created and assigned the value of the
5273 gimplified expression.
5274
5275 FALLBACK tells the function what sort of a temporary we want. If the 1
5276 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5277 If both are set, either is OK, but an lvalue is preferable.
5278
5279 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5280 iterates until solution. */
5281
5282 enum gimplify_status
5283 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5284 bool (* gimple_test_f) (tree), fallback_t fallback)
5285 {
5286 tree tmp;
5287 tree internal_pre = NULL_TREE;
5288 tree internal_post = NULL_TREE;
5289 tree save_expr;
5290 int is_statement = (pre_p == NULL);
5291 location_t saved_location;
5292 enum gimplify_status ret;
5293
5294 save_expr = *expr_p;
5295 if (save_expr == NULL_TREE)
5296 return GS_ALL_DONE;
5297
5298 /* We used to check the predicate here and return immediately if it
5299 succeeds. This is wrong; the design is for gimplification to be
5300 idempotent, and for the predicates to only test for valid forms, not
5301 whether they are fully simplified. */
5302
5303 /* Set up our internal queues if needed. */
5304 if (pre_p == NULL)
5305 pre_p = &internal_pre;
5306 if (post_p == NULL)
5307 post_p = &internal_post;
5308
5309 saved_location = input_location;
5310 if (save_expr != error_mark_node
5311 && EXPR_HAS_LOCATION (*expr_p))
5312 input_location = EXPR_LOCATION (*expr_p);
5313
5314 /* Loop over the specific gimplifiers until the toplevel node
5315 remains the same. */
5316 do
5317 {
5318 /* Strip away as many useless type conversions as possible
5319 at the toplevel. */
5320 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5321
5322 /* Remember the expr. */
5323 save_expr = *expr_p;
5324
5325 /* Die, die, die, my darling. */
5326 if (save_expr == error_mark_node
5327 || (TREE_TYPE (save_expr)
5328 && TREE_TYPE (save_expr) == error_mark_node))
5329 {
5330 ret = GS_ERROR;
5331 break;
5332 }
5333
5334 /* Do any language-specific gimplification. */
5335 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5336 if (ret == GS_OK)
5337 {
5338 if (*expr_p == NULL_TREE)
5339 break;
5340 if (*expr_p != save_expr)
5341 continue;
5342 }
5343 else if (ret != GS_UNHANDLED)
5344 break;
5345
5346 ret = GS_OK;
5347 switch (TREE_CODE (*expr_p))
5348 {
5349 /* First deal with the special cases. */
5350
5351 case POSTINCREMENT_EXPR:
5352 case POSTDECREMENT_EXPR:
5353 case PREINCREMENT_EXPR:
5354 case PREDECREMENT_EXPR:
5355 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5356 fallback != fb_none);
5357 break;
5358
5359 case ARRAY_REF:
5360 case ARRAY_RANGE_REF:
5361 case REALPART_EXPR:
5362 case IMAGPART_EXPR:
5363 case COMPONENT_REF:
5364 case VIEW_CONVERT_EXPR:
5365 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5366 fallback ? fallback : fb_rvalue);
5367 break;
5368
5369 case COND_EXPR:
5370 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5371 /* C99 code may assign to an array in a structure value of a
5372 conditional expression, and this has undefined behavior
5373 only on execution, so create a temporary if an lvalue is
5374 required. */
5375 if (fallback == fb_lvalue)
5376 {
5377 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5378 lang_hooks.mark_addressable (*expr_p);
5379 }
5380 break;
5381
5382 case CALL_EXPR:
5383 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5384 /* C99 code may assign to an array in a structure returned
5385 from a function, and this has undefined behavior only on
5386 execution, so create a temporary if an lvalue is
5387 required. */
5388 if (fallback == fb_lvalue)
5389 {
5390 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5391 lang_hooks.mark_addressable (*expr_p);
5392 }
5393 break;
5394
5395 case TREE_LIST:
5396 gcc_unreachable ();
5397
5398 case COMPOUND_EXPR:
5399 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5400 break;
5401
5402 case MODIFY_EXPR:
5403 case INIT_EXPR:
5404 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5405 fallback != fb_none);
5406
5407 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5408 useful. */
5409 if (*expr_p && TREE_CODE (*expr_p) == INIT_EXPR)
5410 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5411 break;
5412
5413 case TRUTH_ANDIF_EXPR:
5414 case TRUTH_ORIF_EXPR:
5415 ret = gimplify_boolean_expr (expr_p);
5416 break;
5417
5418 case TRUTH_NOT_EXPR:
5419 TREE_OPERAND (*expr_p, 0)
5420 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5421 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5422 is_gimple_val, fb_rvalue);
5423 recalculate_side_effects (*expr_p);
5424 break;
5425
5426 case ADDR_EXPR:
5427 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5428 break;
5429
5430 case VA_ARG_EXPR:
5431 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5432 break;
5433
5434 case CONVERT_EXPR:
5435 case NOP_EXPR:
5436 if (IS_EMPTY_STMT (*expr_p))
5437 {
5438 ret = GS_ALL_DONE;
5439 break;
5440 }
5441
5442 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5443 || fallback == fb_none)
5444 {
5445 /* Just strip a conversion to void (or in void context) and
5446 try again. */
5447 *expr_p = TREE_OPERAND (*expr_p, 0);
5448 break;
5449 }
5450
5451 ret = gimplify_conversion (expr_p);
5452 if (ret == GS_ERROR)
5453 break;
5454 if (*expr_p != save_expr)
5455 break;
5456 /* FALLTHRU */
5457
5458 case FIX_TRUNC_EXPR:
5459 case FIX_CEIL_EXPR:
5460 case FIX_FLOOR_EXPR:
5461 case FIX_ROUND_EXPR:
5462 /* unary_expr: ... | '(' cast ')' val | ... */
5463 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5464 is_gimple_val, fb_rvalue);
5465 recalculate_side_effects (*expr_p);
5466 break;
5467
5468 case INDIRECT_REF:
5469 *expr_p = fold_indirect_ref (*expr_p);
5470 if (*expr_p != save_expr)
5471 break;
5472 /* else fall through. */
5473 case ALIGN_INDIRECT_REF:
5474 case MISALIGNED_INDIRECT_REF:
5475 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5476 is_gimple_reg, fb_rvalue);
5477 recalculate_side_effects (*expr_p);
5478 break;
5479
5480 /* Constants need not be gimplified. */
5481 case INTEGER_CST:
5482 case REAL_CST:
5483 case STRING_CST:
5484 case COMPLEX_CST:
5485 case VECTOR_CST:
5486 ret = GS_ALL_DONE;
5487 break;
5488
5489 case CONST_DECL:
5490 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5491 CONST_DECL node. Otherwise the decl is replaceable by its
5492 value. */
5493 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5494 if (fallback & fb_lvalue)
5495 ret = GS_ALL_DONE;
5496 else
5497 *expr_p = DECL_INITIAL (*expr_p);
5498 break;
5499
5500 case DECL_EXPR:
5501 ret = gimplify_decl_expr (expr_p);
5502 break;
5503
5504 case EXC_PTR_EXPR:
5505 /* FIXME make this a decl. */
5506 ret = GS_ALL_DONE;
5507 break;
5508
5509 case BIND_EXPR:
5510 ret = gimplify_bind_expr (expr_p, NULL, pre_p);
5511 break;
5512
5513 case LOOP_EXPR:
5514 ret = gimplify_loop_expr (expr_p, pre_p);
5515 break;
5516
5517 case SWITCH_EXPR:
5518 ret = gimplify_switch_expr (expr_p, pre_p);
5519 break;
5520
5521 case EXIT_EXPR:
5522 ret = gimplify_exit_expr (expr_p);
5523 break;
5524
5525 case GOTO_EXPR:
5526 /* If the target is not LABEL, then it is a computed jump
5527 and the target needs to be gimplified. */
5528 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5529 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5530 NULL, is_gimple_val, fb_rvalue);
5531 break;
5532
5533 case LABEL_EXPR:
5534 ret = GS_ALL_DONE;
5535 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5536 == current_function_decl);
5537 break;
5538
5539 case CASE_LABEL_EXPR:
5540 ret = gimplify_case_label_expr (expr_p);
5541 break;
5542
5543 case RETURN_EXPR:
5544 ret = gimplify_return_expr (*expr_p, pre_p);
5545 break;
5546
5547 case CONSTRUCTOR:
5548 /* Don't reduce this in place; let gimplify_init_constructor work its
5549 magic. Buf if we're just elaborating this for side effects, just
5550 gimplify any element that has side-effects. */
5551 if (fallback == fb_none)
5552 {
5553 unsigned HOST_WIDE_INT ix;
5554 constructor_elt *ce;
5555 tree temp = NULL_TREE;
5556 for (ix = 0;
5557 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5558 ix, ce);
5559 ix++)
5560 if (TREE_SIDE_EFFECTS (ce->value))
5561 append_to_statement_list (ce->value, &temp);
5562
5563 *expr_p = temp;
5564 ret = GS_OK;
5565 }
5566 /* C99 code may assign to an array in a constructed
5567 structure or union, and this has undefined behavior only
5568 on execution, so create a temporary if an lvalue is
5569 required. */
5570 else if (fallback == fb_lvalue)
5571 {
5572 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5573 lang_hooks.mark_addressable (*expr_p);
5574 }
5575 else
5576 ret = GS_ALL_DONE;
5577 break;
5578
5579 /* The following are special cases that are not handled by the
5580 original GIMPLE grammar. */
5581
5582 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5583 eliminated. */
5584 case SAVE_EXPR:
5585 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5586 break;
5587
5588 case BIT_FIELD_REF:
5589 {
5590 enum gimplify_status r0, r1, r2;
5591
5592 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5593 is_gimple_lvalue, fb_either);
5594 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5595 is_gimple_val, fb_rvalue);
5596 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5597 is_gimple_val, fb_rvalue);
5598 recalculate_side_effects (*expr_p);
5599
5600 ret = MIN (r0, MIN (r1, r2));
5601 }
5602 break;
5603
5604 case NON_LVALUE_EXPR:
5605 /* This should have been stripped above. */
5606 gcc_unreachable ();
5607
5608 case ASM_EXPR:
5609 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5610 break;
5611
5612 case TRY_FINALLY_EXPR:
5613 case TRY_CATCH_EXPR:
5614 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5615 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5616 ret = GS_ALL_DONE;
5617 break;
5618
5619 case CLEANUP_POINT_EXPR:
5620 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5621 break;
5622
5623 case TARGET_EXPR:
5624 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5625 break;
5626
5627 case CATCH_EXPR:
5628 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5629 ret = GS_ALL_DONE;
5630 break;
5631
5632 case EH_FILTER_EXPR:
5633 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5634 ret = GS_ALL_DONE;
5635 break;
5636
5637 case OBJ_TYPE_REF:
5638 {
5639 enum gimplify_status r0, r1;
5640 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5641 is_gimple_val, fb_rvalue);
5642 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5643 is_gimple_val, fb_rvalue);
5644 ret = MIN (r0, r1);
5645 }
5646 break;
5647
5648 case LABEL_DECL:
5649 /* We get here when taking the address of a label. We mark
5650 the label as "forced"; meaning it can never be removed and
5651 it is a potential target for any computed goto. */
5652 FORCED_LABEL (*expr_p) = 1;
5653 ret = GS_ALL_DONE;
5654 break;
5655
5656 case STATEMENT_LIST:
5657 ret = gimplify_statement_list (expr_p);
5658 break;
5659
5660 case WITH_SIZE_EXPR:
5661 {
5662 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5663 post_p == &internal_post ? NULL : post_p,
5664 gimple_test_f, fallback);
5665 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5666 is_gimple_val, fb_rvalue);
5667 }
5668 break;
5669
5670 case VAR_DECL:
5671 case PARM_DECL:
5672 ret = gimplify_var_or_parm_decl (expr_p);
5673 break;
5674
5675 case RESULT_DECL:
5676 /* When within an OpenMP context, notice uses of variables. */
5677 if (gimplify_omp_ctxp)
5678 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5679 ret = GS_ALL_DONE;
5680 break;
5681
5682 case SSA_NAME:
5683 /* Allow callbacks into the gimplifier during optimization. */
5684 ret = GS_ALL_DONE;
5685 break;
5686
5687 case OMP_PARALLEL:
5688 ret = gimplify_omp_parallel (expr_p, pre_p);
5689 break;
5690
5691 case OMP_FOR:
5692 ret = gimplify_omp_for (expr_p, pre_p);
5693 break;
5694
5695 case OMP_SECTIONS:
5696 case OMP_SINGLE:
5697 ret = gimplify_omp_workshare (expr_p, pre_p);
5698 break;
5699
5700 case OMP_SECTION:
5701 case OMP_MASTER:
5702 case OMP_ORDERED:
5703 case OMP_CRITICAL:
5704 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5705 break;
5706
5707 case OMP_ATOMIC:
5708 ret = gimplify_omp_atomic (expr_p, pre_p);
5709 break;
5710
5711 case OMP_RETURN:
5712 case OMP_CONTINUE:
5713 ret = GS_ALL_DONE;
5714 break;
5715
5716 default:
5717 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5718 {
5719 case tcc_comparison:
5720 /* Handle comparison of objects of non scalar mode aggregates
5721 with a call to memcmp. It would be nice to only have to do
5722 this for variable-sized objects, but then we'd have to allow
5723 the same nest of reference nodes we allow for MODIFY_EXPR and
5724 that's too complex.
5725
5726 Compare scalar mode aggregates as scalar mode values. Using
5727 memcmp for them would be very inefficient at best, and is
5728 plain wrong if bitfields are involved. */
5729
5730 {
5731 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5732
5733 if (!AGGREGATE_TYPE_P (type))
5734 goto expr_2;
5735 else if (TYPE_MODE (type) != BLKmode)
5736 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5737 else
5738 ret = gimplify_variable_sized_compare (expr_p);
5739
5740 break;
5741 }
5742
5743 /* If *EXPR_P does not need to be special-cased, handle it
5744 according to its class. */
5745 case tcc_unary:
5746 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5747 post_p, is_gimple_val, fb_rvalue);
5748 break;
5749
5750 case tcc_binary:
5751 expr_2:
5752 {
5753 enum gimplify_status r0, r1;
5754
5755 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5756 post_p, is_gimple_val, fb_rvalue);
5757 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5758 post_p, is_gimple_val, fb_rvalue);
5759
5760 ret = MIN (r0, r1);
5761 break;
5762 }
5763
5764 case tcc_declaration:
5765 case tcc_constant:
5766 ret = GS_ALL_DONE;
5767 goto dont_recalculate;
5768
5769 default:
5770 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5771 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5772 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5773 goto expr_2;
5774 }
5775
5776 recalculate_side_effects (*expr_p);
5777 dont_recalculate:
5778 break;
5779 }
5780
5781 /* If we replaced *expr_p, gimplify again. */
5782 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5783 ret = GS_ALL_DONE;
5784 }
5785 while (ret == GS_OK);
5786
5787 /* If we encountered an error_mark somewhere nested inside, either
5788 stub out the statement or propagate the error back out. */
5789 if (ret == GS_ERROR)
5790 {
5791 if (is_statement)
5792 *expr_p = NULL;
5793 goto out;
5794 }
5795
5796 /* This was only valid as a return value from the langhook, which
5797 we handled. Make sure it doesn't escape from any other context. */
5798 gcc_assert (ret != GS_UNHANDLED);
5799
5800 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5801 {
5802 /* We aren't looking for a value, and we don't have a valid
5803 statement. If it doesn't have side-effects, throw it away. */
5804 if (!TREE_SIDE_EFFECTS (*expr_p))
5805 *expr_p = NULL;
5806 else if (!TREE_THIS_VOLATILE (*expr_p))
5807 {
5808 /* This is probably a _REF that contains something nested that
5809 has side effects. Recurse through the operands to find it. */
5810 enum tree_code code = TREE_CODE (*expr_p);
5811
5812 switch (code)
5813 {
5814 case COMPONENT_REF:
5815 case REALPART_EXPR:
5816 case IMAGPART_EXPR:
5817 case VIEW_CONVERT_EXPR:
5818 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5819 gimple_test_f, fallback);
5820 break;
5821
5822 case ARRAY_REF: case ARRAY_RANGE_REF:
5823 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5824 gimple_test_f, fallback);
5825 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5826 gimple_test_f, fallback);
5827 break;
5828
5829 default:
5830 /* Anything else with side-effects must be converted to
5831 a valid statement before we get here. */
5832 gcc_unreachable ();
5833 }
5834
5835 *expr_p = NULL;
5836 }
5837 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)))
5838 {
5839 /* Historically, the compiler has treated a bare
5840 reference to a volatile lvalue as forcing a load. */
5841 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5842 /* Normally, we do not want to create a temporary for a
5843 TREE_ADDRESSABLE type because such a type should not be
5844 copied by bitwise-assignment. However, we make an
5845 exception here, as all we are doing here is ensuring that
5846 we read the bytes that make up the type. We use
5847 create_tmp_var_raw because create_tmp_var will abort when
5848 given a TREE_ADDRESSABLE type. */
5849 tree tmp = create_tmp_var_raw (type, "vol");
5850 gimple_add_tmp_var (tmp);
5851 *expr_p = build2 (MODIFY_EXPR, type, tmp, *expr_p);
5852 }
5853 else
5854 /* We can't do anything useful with a volatile reference to
5855 incomplete type, so just throw it away. */
5856 *expr_p = NULL;
5857 }
5858
5859 /* If we are gimplifying at the statement level, we're done. Tack
5860 everything together and replace the original statement with the
5861 gimplified form. */
5862 if (fallback == fb_none || is_statement)
5863 {
5864 if (internal_pre || internal_post)
5865 {
5866 append_to_statement_list (*expr_p, &internal_pre);
5867 append_to_statement_list (internal_post, &internal_pre);
5868 annotate_all_with_locus (&internal_pre, input_location);
5869 *expr_p = internal_pre;
5870 }
5871 else if (!*expr_p)
5872 ;
5873 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
5874 annotate_all_with_locus (expr_p, input_location);
5875 else
5876 annotate_one_with_locus (*expr_p, input_location);
5877 goto out;
5878 }
5879
5880 /* Otherwise we're gimplifying a subexpression, so the resulting value is
5881 interesting. */
5882
5883 /* If it's sufficiently simple already, we're done. Unless we are
5884 handling some post-effects internally; if that's the case, we need to
5885 copy into a temp before adding the post-effects to the tree. */
5886 if (!internal_post && (*gimple_test_f) (*expr_p))
5887 goto out;
5888
5889 /* Otherwise, we need to create a new temporary for the gimplified
5890 expression. */
5891
5892 /* We can't return an lvalue if we have an internal postqueue. The
5893 object the lvalue refers to would (probably) be modified by the
5894 postqueue; we need to copy the value out first, which means an
5895 rvalue. */
5896 if ((fallback & fb_lvalue) && !internal_post
5897 && is_gimple_addressable (*expr_p))
5898 {
5899 /* An lvalue will do. Take the address of the expression, store it
5900 in a temporary, and replace the expression with an INDIRECT_REF of
5901 that temporary. */
5902 tmp = build_fold_addr_expr (*expr_p);
5903 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
5904 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
5905 }
5906 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
5907 {
5908 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
5909
5910 /* An rvalue will do. Assign the gimplified expression into a new
5911 temporary TMP and replace the original expression with TMP. */
5912
5913 if (internal_post || (fallback & fb_lvalue))
5914 /* The postqueue might change the value of the expression between
5915 the initialization and use of the temporary, so we can't use a
5916 formal temp. FIXME do we care? */
5917 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5918 else
5919 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
5920
5921 if (TREE_CODE (*expr_p) != SSA_NAME)
5922 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
5923 }
5924 else
5925 {
5926 #ifdef ENABLE_CHECKING
5927 if (!(fallback & fb_mayfail))
5928 {
5929 fprintf (stderr, "gimplification failed:\n");
5930 print_generic_expr (stderr, *expr_p, 0);
5931 debug_tree (*expr_p);
5932 internal_error ("gimplification failed");
5933 }
5934 #endif
5935 gcc_assert (fallback & fb_mayfail);
5936 /* If this is an asm statement, and the user asked for the
5937 impossible, don't die. Fail and let gimplify_asm_expr
5938 issue an error. */
5939 ret = GS_ERROR;
5940 goto out;
5941 }
5942
5943 /* Make sure the temporary matches our predicate. */
5944 gcc_assert ((*gimple_test_f) (*expr_p));
5945
5946 if (internal_post)
5947 {
5948 annotate_all_with_locus (&internal_post, input_location);
5949 append_to_statement_list (internal_post, pre_p);
5950 }
5951
5952 out:
5953 input_location = saved_location;
5954 return ret;
5955 }
5956
5957 /* Look through TYPE for variable-sized objects and gimplify each such
5958 size that we find. Add to LIST_P any statements generated. */
5959
5960 void
5961 gimplify_type_sizes (tree type, tree *list_p)
5962 {
5963 tree field, t;
5964
5965 if (type == NULL || type == error_mark_node)
5966 return;
5967
5968 /* We first do the main variant, then copy into any other variants. */
5969 type = TYPE_MAIN_VARIANT (type);
5970
5971 /* Avoid infinite recursion. */
5972 if (TYPE_SIZES_GIMPLIFIED (type))
5973 return;
5974
5975 TYPE_SIZES_GIMPLIFIED (type) = 1;
5976
5977 switch (TREE_CODE (type))
5978 {
5979 case INTEGER_TYPE:
5980 case ENUMERAL_TYPE:
5981 case BOOLEAN_TYPE:
5982 case REAL_TYPE:
5983 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
5984 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
5985
5986 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5987 {
5988 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
5989 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
5990 }
5991 break;
5992
5993 case ARRAY_TYPE:
5994 /* These types may not have declarations, so handle them here. */
5995 gimplify_type_sizes (TREE_TYPE (type), list_p);
5996 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
5997 break;
5998
5999 case RECORD_TYPE:
6000 case UNION_TYPE:
6001 case QUAL_UNION_TYPE:
6002 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6003 if (TREE_CODE (field) == FIELD_DECL)
6004 {
6005 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6006 gimplify_type_sizes (TREE_TYPE (field), list_p);
6007 }
6008 break;
6009
6010 case POINTER_TYPE:
6011 case REFERENCE_TYPE:
6012 gimplify_type_sizes (TREE_TYPE (type), list_p);
6013 break;
6014
6015 default:
6016 break;
6017 }
6018
6019 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6020 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6021
6022 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6023 {
6024 TYPE_SIZE (t) = TYPE_SIZE (type);
6025 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6026 TYPE_SIZES_GIMPLIFIED (t) = 1;
6027 }
6028 }
6029
6030 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6031 a size or position, has had all of its SAVE_EXPRs evaluated.
6032 We add any required statements to STMT_P. */
6033
6034 void
6035 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6036 {
6037 tree type, expr = *expr_p;
6038
6039 /* We don't do anything if the value isn't there, is constant, or contains
6040 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6041 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6042 will want to replace it with a new variable, but that will cause problems
6043 if this type is from outside the function. It's OK to have that here. */
6044 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6045 || TREE_CODE (expr) == VAR_DECL
6046 || CONTAINS_PLACEHOLDER_P (expr))
6047 return;
6048
6049 type = TREE_TYPE (expr);
6050 *expr_p = unshare_expr (expr);
6051
6052 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6053 expr = *expr_p;
6054
6055 /* Verify that we've an exact type match with the original expression.
6056 In particular, we do not wish to drop a "sizetype" in favour of a
6057 type of similar dimensions. We don't want to pollute the generic
6058 type-stripping code with this knowledge because it doesn't matter
6059 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6060 and friends retain their "sizetype-ness". */
6061 if (TREE_TYPE (expr) != type
6062 && TREE_CODE (type) == INTEGER_TYPE
6063 && TYPE_IS_SIZETYPE (type))
6064 {
6065 tree tmp;
6066
6067 *expr_p = create_tmp_var (type, NULL);
6068 tmp = build1 (NOP_EXPR, type, expr);
6069 tmp = build2 (MODIFY_EXPR, type, *expr_p, tmp);
6070 if (EXPR_HAS_LOCATION (expr))
6071 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6072 else
6073 SET_EXPR_LOCATION (tmp, input_location);
6074
6075 gimplify_and_add (tmp, stmt_p);
6076 }
6077 }
6078 \f
6079 #ifdef ENABLE_CHECKING
6080 /* Compare types A and B for a "close enough" match. */
6081
6082 static bool
6083 cpt_same_type (tree a, tree b)
6084 {
6085 if (lang_hooks.types_compatible_p (a, b))
6086 return true;
6087
6088 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6089 link them together. This routine is intended to catch type errors
6090 that will affect the optimizers, and the optimizers don't add new
6091 dereferences of function pointers, so ignore it. */
6092 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6093 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6094 return true;
6095
6096 /* ??? The C FE pushes type qualifiers after the fact into the type of
6097 the element from the type of the array. See build_unary_op's handling
6098 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6099 should have done it when creating the variable in the first place.
6100 Alternately, why aren't the two array types made variants? */
6101 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6102 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6103
6104 /* And because of those, we have to recurse down through pointers. */
6105 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6106 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6107
6108 return false;
6109 }
6110
6111 /* Check for some cases of the front end missing cast expressions.
6112 The type of a dereference should correspond to the pointer type;
6113 similarly the type of an address should match its object. */
6114
6115 static tree
6116 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6117 void *data ATTRIBUTE_UNUSED)
6118 {
6119 tree t = *tp;
6120 tree ptype, otype, dtype;
6121
6122 switch (TREE_CODE (t))
6123 {
6124 case INDIRECT_REF:
6125 case ARRAY_REF:
6126 otype = TREE_TYPE (t);
6127 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6128 dtype = TREE_TYPE (ptype);
6129 gcc_assert (cpt_same_type (otype, dtype));
6130 break;
6131
6132 case ADDR_EXPR:
6133 ptype = TREE_TYPE (t);
6134 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6135 dtype = TREE_TYPE (ptype);
6136 if (!cpt_same_type (otype, dtype))
6137 {
6138 /* &array is allowed to produce a pointer to the element, rather than
6139 a pointer to the array type. We must allow this in order to
6140 properly represent assigning the address of an array in C into
6141 pointer to the element type. */
6142 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6143 && POINTER_TYPE_P (ptype)
6144 && cpt_same_type (TREE_TYPE (otype), dtype));
6145 break;
6146 }
6147 break;
6148
6149 default:
6150 return NULL_TREE;
6151 }
6152
6153
6154 return NULL_TREE;
6155 }
6156 #endif
6157
6158 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6159 function decl containing BODY. */
6160
6161 void
6162 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6163 {
6164 location_t saved_location = input_location;
6165 tree body, parm_stmts;
6166
6167 timevar_push (TV_TREE_GIMPLIFY);
6168
6169 gcc_assert (gimplify_ctxp == NULL);
6170 push_gimplify_context ();
6171
6172 /* Unshare most shared trees in the body and in that of any nested functions.
6173 It would seem we don't have to do this for nested functions because
6174 they are supposed to be output and then the outer function gimplified
6175 first, but the g++ front end doesn't always do it that way. */
6176 unshare_body (body_p, fndecl);
6177 unvisit_body (body_p, fndecl);
6178
6179 /* Make sure input_location isn't set to something wierd. */
6180 input_location = DECL_SOURCE_LOCATION (fndecl);
6181
6182 /* Resolve callee-copies. This has to be done before processing
6183 the body so that DECL_VALUE_EXPR gets processed correctly. */
6184 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6185
6186 /* Gimplify the function's body. */
6187 gimplify_stmt (body_p);
6188 body = *body_p;
6189
6190 if (!body)
6191 body = alloc_stmt_list ();
6192 else if (TREE_CODE (body) == STATEMENT_LIST)
6193 {
6194 tree t = expr_only (*body_p);
6195 if (t)
6196 body = t;
6197 }
6198
6199 /* If there isn't an outer BIND_EXPR, add one. */
6200 if (TREE_CODE (body) != BIND_EXPR)
6201 {
6202 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6203 NULL_TREE, NULL_TREE);
6204 TREE_SIDE_EFFECTS (b) = 1;
6205 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6206 body = b;
6207 }
6208
6209 /* If we had callee-copies statements, insert them at the beginning
6210 of the function. */
6211 if (parm_stmts)
6212 {
6213 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6214 BIND_EXPR_BODY (body) = parm_stmts;
6215 }
6216
6217 /* Unshare again, in case gimplification was sloppy. */
6218 unshare_all_trees (body);
6219
6220 *body_p = body;
6221
6222 pop_gimplify_context (body);
6223 gcc_assert (gimplify_ctxp == NULL);
6224
6225 #ifdef ENABLE_CHECKING
6226 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6227 #endif
6228
6229 timevar_pop (TV_TREE_GIMPLIFY);
6230 input_location = saved_location;
6231 }
6232
6233 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6234 node for the function we want to gimplify. */
6235
6236 void
6237 gimplify_function_tree (tree fndecl)
6238 {
6239 tree oldfn, parm, ret;
6240
6241 oldfn = current_function_decl;
6242 current_function_decl = fndecl;
6243 cfun = DECL_STRUCT_FUNCTION (fndecl);
6244 if (cfun == NULL)
6245 allocate_struct_function (fndecl);
6246
6247 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6248 {
6249 /* Preliminarily mark non-addressed complex variables as eligible
6250 for promotion to gimple registers. We'll transform their uses
6251 as we find them. */
6252 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6253 && !TREE_THIS_VOLATILE (parm)
6254 && !needs_to_live_in_memory (parm))
6255 DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
6256 }
6257
6258 ret = DECL_RESULT (fndecl);
6259 if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6260 && !needs_to_live_in_memory (ret))
6261 DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
6262
6263 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6264
6265 /* If we're instrumenting function entry/exit, then prepend the call to
6266 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6267 catch the exit hook. */
6268 /* ??? Add some way to ignore exceptions for this TFE. */
6269 if (flag_instrument_function_entry_exit
6270 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6271 {
6272 tree tf, x, bind;
6273
6274 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6275 TREE_SIDE_EFFECTS (tf) = 1;
6276 x = DECL_SAVED_TREE (fndecl);
6277 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6278 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6279 x = build_function_call_expr (x, NULL);
6280 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6281
6282 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6283 TREE_SIDE_EFFECTS (bind) = 1;
6284 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6285 x = build_function_call_expr (x, NULL);
6286 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6287 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6288
6289 DECL_SAVED_TREE (fndecl) = bind;
6290 }
6291
6292 current_function_decl = oldfn;
6293 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6294 }
6295
6296 \f
6297 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6298 force the result to be either ssa_name or an invariant, otherwise
6299 just force it to be a rhs expression. If VAR is not NULL, make the
6300 base variable of the final destination be VAR if suitable. */
6301
6302 tree
6303 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6304 {
6305 tree t;
6306 enum gimplify_status ret;
6307 gimple_predicate gimple_test_f;
6308
6309 *stmts = NULL_TREE;
6310
6311 if (is_gimple_val (expr))
6312 return expr;
6313
6314 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6315
6316 push_gimplify_context ();
6317 gimplify_ctxp->into_ssa = in_ssa_p;
6318
6319 if (var)
6320 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
6321
6322 ret = gimplify_expr (&expr, stmts, NULL,
6323 gimple_test_f, fb_rvalue);
6324 gcc_assert (ret != GS_ERROR);
6325
6326 if (referenced_vars)
6327 {
6328 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6329 add_referenced_var (t);
6330 }
6331
6332 pop_gimplify_context (NULL);
6333
6334 return expr;
6335 }
6336
6337 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6338 some statements are produced, emits them before BSI. */
6339
6340 tree
6341 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6342 bool simple_p, tree var)
6343 {
6344 tree stmts;
6345
6346 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6347 if (stmts)
6348 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6349
6350 return expr;
6351 }
6352
6353 #include "gt-gimplify.h"