Merge gimple-tuples-branch into mainline.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "tree-gimple.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51
52
53 enum gimplify_omp_var_data
54 {
55 GOVD_SEEN = 1,
56 GOVD_EXPLICIT = 2,
57 GOVD_SHARED = 4,
58 GOVD_PRIVATE = 8,
59 GOVD_FIRSTPRIVATE = 16,
60 GOVD_LASTPRIVATE = 32,
61 GOVD_REDUCTION = 64,
62 GOVD_LOCAL = 128,
63 GOVD_DEBUG_PRIVATE = 256,
64 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
65 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
66 };
67
68 struct gimplify_omp_ctx
69 {
70 struct gimplify_omp_ctx *outer_context;
71 splay_tree variables;
72 struct pointer_set_t *privatized_types;
73 location_t location;
74 enum omp_clause_default_kind default_kind;
75 bool is_parallel;
76 bool is_combined_parallel;
77 };
78
79 struct gimplify_ctx
80 {
81 struct gimplify_ctx *prev_context;
82
83 tree current_bind_expr;
84 tree temps;
85 tree conditional_cleanups;
86 tree exit_label;
87 tree return_temp;
88
89 VEC(tree,heap) *case_labels;
90 /* The formal temporary table. Should this be persistent? */
91 htab_t temp_htab;
92
93 int conditions;
94 bool save_stack;
95 bool into_ssa;
96 };
97
98 static struct gimplify_ctx *gimplify_ctxp;
99 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
100
101
102
103 /* Formal (expression) temporary table handling: Multiple occurrences of
104 the same scalar expression are evaluated into the same temporary. */
105
106 typedef struct gimple_temp_hash_elt
107 {
108 tree val; /* Key */
109 tree temp; /* Value */
110 } elt_t;
111
112 /* Forward declarations. */
113 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
114 #ifdef ENABLE_CHECKING
115 static bool cpt_same_type (tree a, tree b);
116 #endif
117
118
119 /* Return a hash value for a formal temporary table entry. */
120
121 static hashval_t
122 gimple_tree_hash (const void *p)
123 {
124 tree t = ((const elt_t *) p)->val;
125 return iterative_hash_expr (t, 0);
126 }
127
128 /* Compare two formal temporary table entries. */
129
130 static int
131 gimple_tree_eq (const void *p1, const void *p2)
132 {
133 tree t1 = ((const elt_t *) p1)->val;
134 tree t2 = ((const elt_t *) p2)->val;
135 enum tree_code code = TREE_CODE (t1);
136
137 if (TREE_CODE (t2) != code
138 || TREE_TYPE (t1) != TREE_TYPE (t2))
139 return 0;
140
141 if (!operand_equal_p (t1, t2, 0))
142 return 0;
143
144 /* Only allow them to compare equal if they also hash equal; otherwise
145 results are nondeterminate, and we fail bootstrap comparison. */
146 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
147
148 return 1;
149 }
150
151 /* Set up a context for the gimplifier. */
152
153 void
154 push_gimplify_context (void)
155 {
156 struct gimplify_ctx *c;
157
158 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
159 c->prev_context = gimplify_ctxp;
160 if (optimize)
161 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
162
163 gimplify_ctxp = c;
164 }
165
166 /* Tear down a context for the gimplifier. If BODY is non-null, then
167 put the temporaries into the outer BIND_EXPR. Otherwise, put them
168 in the unexpanded_var_list. */
169
170 void
171 pop_gimplify_context (tree body)
172 {
173 struct gimplify_ctx *c = gimplify_ctxp;
174 tree t;
175
176 gcc_assert (c && !c->current_bind_expr);
177 gimplify_ctxp = c->prev_context;
178
179 for (t = c->temps; t ; t = TREE_CHAIN (t))
180 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
181
182 if (body)
183 declare_vars (c->temps, body, false);
184 else
185 record_vars (c->temps);
186
187 if (optimize)
188 htab_delete (c->temp_htab);
189 free (c);
190 }
191
192 static void
193 gimple_push_bind_expr (tree bind)
194 {
195 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
196 gimplify_ctxp->current_bind_expr = bind;
197 }
198
199 static void
200 gimple_pop_bind_expr (void)
201 {
202 gimplify_ctxp->current_bind_expr
203 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
204 }
205
206 tree
207 gimple_current_bind_expr (void)
208 {
209 return gimplify_ctxp->current_bind_expr;
210 }
211
212 /* Returns true iff there is a COND_EXPR between us and the innermost
213 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
214
215 static bool
216 gimple_conditional_context (void)
217 {
218 return gimplify_ctxp->conditions > 0;
219 }
220
221 /* Note that we've entered a COND_EXPR. */
222
223 static void
224 gimple_push_condition (void)
225 {
226 #ifdef ENABLE_CHECKING
227 if (gimplify_ctxp->conditions == 0)
228 gcc_assert (!gimplify_ctxp->conditional_cleanups);
229 #endif
230 ++(gimplify_ctxp->conditions);
231 }
232
233 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
234 now, add any conditional cleanups we've seen to the prequeue. */
235
236 static void
237 gimple_pop_condition (tree *pre_p)
238 {
239 int conds = --(gimplify_ctxp->conditions);
240
241 gcc_assert (conds >= 0);
242 if (conds == 0)
243 {
244 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
245 gimplify_ctxp->conditional_cleanups = NULL_TREE;
246 }
247 }
248
249 /* A stable comparison routine for use with splay trees and DECLs. */
250
251 static int
252 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
253 {
254 tree a = (tree) xa;
255 tree b = (tree) xb;
256
257 return DECL_UID (a) - DECL_UID (b);
258 }
259
260 /* Create a new omp construct that deals with variable remapping. */
261
262 static struct gimplify_omp_ctx *
263 new_omp_context (bool is_parallel, bool is_combined_parallel)
264 {
265 struct gimplify_omp_ctx *c;
266
267 c = XCNEW (struct gimplify_omp_ctx);
268 c->outer_context = gimplify_omp_ctxp;
269 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
270 c->privatized_types = pointer_set_create ();
271 c->location = input_location;
272 c->is_parallel = is_parallel;
273 c->is_combined_parallel = is_combined_parallel;
274 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
275
276 return c;
277 }
278
279 /* Destroy an omp construct that deals with variable remapping. */
280
281 static void
282 delete_omp_context (struct gimplify_omp_ctx *c)
283 {
284 splay_tree_delete (c->variables);
285 pointer_set_destroy (c->privatized_types);
286 XDELETE (c);
287 }
288
289 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
290 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
291
292 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
293
294 static void
295 append_to_statement_list_1 (tree t, tree *list_p)
296 {
297 tree list = *list_p;
298 tree_stmt_iterator i;
299
300 if (!list)
301 {
302 if (t && TREE_CODE (t) == STATEMENT_LIST)
303 {
304 *list_p = t;
305 return;
306 }
307 *list_p = list = alloc_stmt_list ();
308 }
309
310 i = tsi_last (list);
311 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
312 }
313
314 /* Add T to the end of the list container pointed to by LIST_P.
315 If T is an expression with no effects, it is ignored. */
316
317 void
318 append_to_statement_list (tree t, tree *list_p)
319 {
320 if (t && TREE_SIDE_EFFECTS (t))
321 append_to_statement_list_1 (t, list_p);
322 }
323
324 /* Similar, but the statement is always added, regardless of side effects. */
325
326 void
327 append_to_statement_list_force (tree t, tree *list_p)
328 {
329 if (t != NULL_TREE)
330 append_to_statement_list_1 (t, list_p);
331 }
332
333 /* Both gimplify the statement T and append it to LIST_P. */
334
335 void
336 gimplify_and_add (tree t, tree *list_p)
337 {
338 gimplify_stmt (&t);
339 append_to_statement_list (t, list_p);
340 }
341
342 /* Strip off a legitimate source ending from the input string NAME of
343 length LEN. Rather than having to know the names used by all of
344 our front ends, we strip off an ending of a period followed by
345 up to five characters. (Java uses ".class".) */
346
347 static inline void
348 remove_suffix (char *name, int len)
349 {
350 int i;
351
352 for (i = 2; i < 8 && len > i; i++)
353 {
354 if (name[len - i] == '.')
355 {
356 name[len - i] = '\0';
357 break;
358 }
359 }
360 }
361
362 /* Create a nameless artificial label and put it in the current function
363 context. Returns the newly created label. */
364
365 tree
366 create_artificial_label (void)
367 {
368 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
369
370 DECL_ARTIFICIAL (lab) = 1;
371 DECL_IGNORED_P (lab) = 1;
372 DECL_CONTEXT (lab) = current_function_decl;
373 return lab;
374 }
375
376 /* Subroutine for find_single_pointer_decl. */
377
378 static tree
379 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
380 void *data)
381 {
382 tree *pdecl = (tree *) data;
383
384 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
385 {
386 if (*pdecl)
387 {
388 /* We already found a pointer decl; return anything other
389 than NULL_TREE to unwind from walk_tree signalling that
390 we have a duplicate. */
391 return *tp;
392 }
393 *pdecl = *tp;
394 }
395
396 return NULL_TREE;
397 }
398
399 /* Find the single DECL of pointer type in the tree T and return it.
400 If there are zero or more than one such DECLs, return NULL. */
401
402 static tree
403 find_single_pointer_decl (tree t)
404 {
405 tree decl = NULL_TREE;
406
407 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
408 {
409 /* find_single_pointer_decl_1 returns a nonzero value, causing
410 walk_tree to return a nonzero value, to indicate that it
411 found more than one pointer DECL. */
412 return NULL_TREE;
413 }
414
415 return decl;
416 }
417
418 /* Create a new temporary name with PREFIX. Returns an identifier. */
419
420 static GTY(()) unsigned int tmp_var_id_num;
421
422 tree
423 create_tmp_var_name (const char *prefix)
424 {
425 char *tmp_name;
426
427 if (prefix)
428 {
429 char *preftmp = ASTRDUP (prefix);
430
431 remove_suffix (preftmp, strlen (preftmp));
432 prefix = preftmp;
433 }
434
435 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
436 return get_identifier (tmp_name);
437 }
438
439
440 /* Create a new temporary variable declaration of type TYPE.
441 Does NOT push it into the current binding. */
442
443 tree
444 create_tmp_var_raw (tree type, const char *prefix)
445 {
446 tree tmp_var;
447 tree new_type;
448
449 /* Make the type of the variable writable. */
450 new_type = build_type_variant (type, 0, 0);
451 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
452
453 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
454 type);
455
456 /* The variable was declared by the compiler. */
457 DECL_ARTIFICIAL (tmp_var) = 1;
458 /* And we don't want debug info for it. */
459 DECL_IGNORED_P (tmp_var) = 1;
460
461 /* Make the variable writable. */
462 TREE_READONLY (tmp_var) = 0;
463
464 DECL_EXTERNAL (tmp_var) = 0;
465 TREE_STATIC (tmp_var) = 0;
466 TREE_USED (tmp_var) = 1;
467
468 return tmp_var;
469 }
470
471 /* Create a new temporary variable declaration of type TYPE. DOES push the
472 variable into the current binding. Further, assume that this is called
473 only from gimplification or optimization, at which point the creation of
474 certain types are bugs. */
475
476 tree
477 create_tmp_var (tree type, const char *prefix)
478 {
479 tree tmp_var;
480
481 /* We don't allow types that are addressable (meaning we can't make copies),
482 or incomplete. We also used to reject every variable size objects here,
483 but now support those for which a constant upper bound can be obtained.
484 The processing for variable sizes is performed in gimple_add_tmp_var,
485 point at which it really matters and possibly reached via paths not going
486 through this function, e.g. after direct calls to create_tmp_var_raw. */
487 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
488
489 tmp_var = create_tmp_var_raw (type, prefix);
490 gimple_add_tmp_var (tmp_var);
491 return tmp_var;
492 }
493
494 /* Given a tree, try to return a useful variable name that we can use
495 to prefix a temporary that is being assigned the value of the tree.
496 I.E. given <temp> = &A, return A. */
497
498 const char *
499 get_name (tree t)
500 {
501 tree stripped_decl;
502
503 stripped_decl = t;
504 STRIP_NOPS (stripped_decl);
505 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
506 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
507 else
508 {
509 switch (TREE_CODE (stripped_decl))
510 {
511 case ADDR_EXPR:
512 return get_name (TREE_OPERAND (stripped_decl, 0));
513 default:
514 return NULL;
515 }
516 }
517 }
518
519 /* Create a temporary with a name derived from VAL. Subroutine of
520 lookup_tmp_var; nobody else should call this function. */
521
522 static inline tree
523 create_tmp_from_val (tree val)
524 {
525 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
526 }
527
528 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
529 an existing expression temporary. */
530
531 static tree
532 lookup_tmp_var (tree val, bool is_formal)
533 {
534 tree ret;
535
536 /* If not optimizing, never really reuse a temporary. local-alloc
537 won't allocate any variable that is used in more than one basic
538 block, which means it will go into memory, causing much extra
539 work in reload and final and poorer code generation, outweighing
540 the extra memory allocation here. */
541 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
542 ret = create_tmp_from_val (val);
543 else
544 {
545 elt_t elt, *elt_p;
546 void **slot;
547
548 elt.val = val;
549 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
550 if (*slot == NULL)
551 {
552 elt_p = XNEW (elt_t);
553 elt_p->val = val;
554 elt_p->temp = ret = create_tmp_from_val (val);
555 *slot = (void *) elt_p;
556 }
557 else
558 {
559 elt_p = (elt_t *) *slot;
560 ret = elt_p->temp;
561 }
562 }
563
564 if (is_formal)
565 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
566
567 return ret;
568 }
569
570 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
571 in gimplify_expr. Only use this function if:
572
573 1) The value of the unfactored expression represented by VAL will not
574 change between the initialization and use of the temporary, and
575 2) The temporary will not be otherwise modified.
576
577 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
578 and #2 means it is inappropriate for && temps.
579
580 For other cases, use get_initialized_tmp_var instead. */
581
582 static tree
583 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
584 {
585 tree t, mod;
586
587 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
588
589 t = lookup_tmp_var (val, is_formal);
590
591 if (is_formal)
592 {
593 tree u = find_single_pointer_decl (val);
594
595 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
596 u = DECL_GET_RESTRICT_BASE (u);
597 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
598 {
599 if (DECL_BASED_ON_RESTRICT_P (t))
600 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
601 else
602 {
603 DECL_BASED_ON_RESTRICT_P (t) = 1;
604 SET_DECL_RESTRICT_BASE (t, u);
605 }
606 }
607 }
608
609 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
610 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
611
612 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
613
614 if (EXPR_HAS_LOCATION (val))
615 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
616 else
617 SET_EXPR_LOCATION (mod, input_location);
618
619 /* gimplify_modify_expr might want to reduce this further. */
620 gimplify_and_add (mod, pre_p);
621
622 /* If we're gimplifying into ssa, gimplify_modify_expr will have
623 given our temporary an ssa name. Find and return it. */
624 if (gimplify_ctxp->into_ssa)
625 t = TREE_OPERAND (mod, 0);
626
627 return t;
628 }
629
630 /* Returns a formal temporary variable initialized with VAL. PRE_P
631 points to a statement list where side-effects needed to compute VAL
632 should be stored. */
633
634 tree
635 get_formal_tmp_var (tree val, tree *pre_p)
636 {
637 return internal_get_tmp_var (val, pre_p, NULL, true);
638 }
639
640 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
641 are as in gimplify_expr. */
642
643 tree
644 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
645 {
646 return internal_get_tmp_var (val, pre_p, post_p, false);
647 }
648
649 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
650 true, generate debug info for them; otherwise don't. */
651
652 void
653 declare_vars (tree vars, tree scope, bool debug_info)
654 {
655 tree last = vars;
656 if (last)
657 {
658 tree temps, block;
659
660 /* C99 mode puts the default 'return 0;' for main outside the outer
661 braces. So drill down until we find an actual scope. */
662 while (TREE_CODE (scope) == COMPOUND_EXPR)
663 scope = TREE_OPERAND (scope, 0);
664
665 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
666
667 temps = nreverse (last);
668
669 block = BIND_EXPR_BLOCK (scope);
670 if (!block || !debug_info)
671 {
672 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
673 BIND_EXPR_VARS (scope) = temps;
674 }
675 else
676 {
677 /* We need to attach the nodes both to the BIND_EXPR and to its
678 associated BLOCK for debugging purposes. The key point here
679 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
680 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
681 if (BLOCK_VARS (block))
682 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
683 else
684 {
685 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
686 BLOCK_VARS (block) = temps;
687 }
688 }
689 }
690 }
691
692 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
693 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
694 no such upper bound can be obtained. */
695
696 static void
697 force_constant_size (tree var)
698 {
699 /* The only attempt we make is by querying the maximum size of objects
700 of the variable's type. */
701
702 HOST_WIDE_INT max_size;
703
704 gcc_assert (TREE_CODE (var) == VAR_DECL);
705
706 max_size = max_int_size_in_bytes (TREE_TYPE (var));
707
708 gcc_assert (max_size >= 0);
709
710 DECL_SIZE_UNIT (var)
711 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
712 DECL_SIZE (var)
713 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
714 }
715
716 void
717 gimple_add_tmp_var (tree tmp)
718 {
719 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
720
721 /* Later processing assumes that the object size is constant, which might
722 not be true at this point. Force the use of a constant upper bound in
723 this case. */
724 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
725 force_constant_size (tmp);
726
727 DECL_CONTEXT (tmp) = current_function_decl;
728 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
729
730 if (gimplify_ctxp)
731 {
732 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
733 gimplify_ctxp->temps = tmp;
734
735 /* Mark temporaries local within the nearest enclosing parallel. */
736 if (gimplify_omp_ctxp)
737 {
738 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
739 while (ctx && !ctx->is_parallel)
740 ctx = ctx->outer_context;
741 if (ctx)
742 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
743 }
744 }
745 else if (cfun)
746 record_vars (tmp);
747 else
748 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
749 }
750
751 /* Determines whether to assign a locus to the statement STMT. */
752
753 static bool
754 should_carry_locus_p (tree stmt)
755 {
756 /* Don't emit a line note for a label. We particularly don't want to
757 emit one for the break label, since it doesn't actually correspond
758 to the beginning of the loop/switch. */
759 if (TREE_CODE (stmt) == LABEL_EXPR)
760 return false;
761
762 /* Do not annotate empty statements, since it confuses gcov. */
763 if (!TREE_SIDE_EFFECTS (stmt))
764 return false;
765
766 return true;
767 }
768
769 static void
770 annotate_one_with_locus (tree t, location_t locus)
771 {
772 if (CAN_HAVE_LOCATION_P (t)
773 && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
774 SET_EXPR_LOCATION (t, locus);
775 }
776
777 void
778 annotate_all_with_locus (tree *stmt_p, location_t locus)
779 {
780 tree_stmt_iterator i;
781
782 if (!*stmt_p)
783 return;
784
785 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
786 {
787 tree t = tsi_stmt (i);
788
789 /* Assuming we've already been gimplified, we shouldn't
790 see nested chaining constructs anymore. */
791 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
792 && TREE_CODE (t) != COMPOUND_EXPR);
793
794 annotate_one_with_locus (t, locus);
795 }
796 }
797
798 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that should only be done once. If we
800 were to unshare something like SAVE_EXPR(i++), the gimplification
801 process would create wrong code. */
802
803 static tree
804 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
805 {
806 enum tree_code code = TREE_CODE (*tp);
807 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
808 if (TREE_CODE_CLASS (code) == tcc_type
809 || TREE_CODE_CLASS (code) == tcc_declaration
810 || TREE_CODE_CLASS (code) == tcc_constant
811 || code == SAVE_EXPR || code == TARGET_EXPR
812 /* We can't do anything sensible with a BLOCK used as an expression,
813 but we also can't just die when we see it because of non-expression
814 uses. So just avert our eyes and cross our fingers. Silly Java. */
815 || code == BLOCK)
816 *walk_subtrees = 0;
817 else
818 {
819 gcc_assert (code != BIND_EXPR);
820 copy_tree_r (tp, walk_subtrees, data);
821 }
822
823 return NULL_TREE;
824 }
825
826 /* Callback for walk_tree to unshare most of the shared trees rooted at
827 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
828 then *TP is deep copied by calling copy_tree_r.
829
830 This unshares the same trees as copy_tree_r with the exception of
831 SAVE_EXPR nodes. These nodes model computations that should only be
832 done once. If we were to unshare something like SAVE_EXPR(i++), the
833 gimplification process would create wrong code. */
834
835 static tree
836 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
837 void *data ATTRIBUTE_UNUSED)
838 {
839 tree t = *tp;
840 enum tree_code code = TREE_CODE (t);
841
842 /* Skip types, decls, and constants. But we do want to look at their
843 types and the bounds of types. Mark them as visited so we properly
844 unmark their subtrees on the unmark pass. If we've already seen them,
845 don't look down further. */
846 if (TREE_CODE_CLASS (code) == tcc_type
847 || TREE_CODE_CLASS (code) == tcc_declaration
848 || TREE_CODE_CLASS (code) == tcc_constant)
849 {
850 if (TREE_VISITED (t))
851 *walk_subtrees = 0;
852 else
853 TREE_VISITED (t) = 1;
854 }
855
856 /* If this node has been visited already, unshare it and don't look
857 any deeper. */
858 else if (TREE_VISITED (t))
859 {
860 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
861 *walk_subtrees = 0;
862 }
863
864 /* Otherwise, mark the tree as visited and keep looking. */
865 else
866 TREE_VISITED (t) = 1;
867
868 return NULL_TREE;
869 }
870
871 static tree
872 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
873 void *data ATTRIBUTE_UNUSED)
874 {
875 if (TREE_VISITED (*tp))
876 TREE_VISITED (*tp) = 0;
877 else
878 *walk_subtrees = 0;
879
880 return NULL_TREE;
881 }
882
883 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
884 bodies of any nested functions if we are unsharing the entire body of
885 FNDECL. */
886
887 static void
888 unshare_body (tree *body_p, tree fndecl)
889 {
890 struct cgraph_node *cgn = cgraph_node (fndecl);
891
892 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
893 if (body_p == &DECL_SAVED_TREE (fndecl))
894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
895 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
896 }
897
898 /* Likewise, but mark all trees as not visited. */
899
900 static void
901 unvisit_body (tree *body_p, tree fndecl)
902 {
903 struct cgraph_node *cgn = cgraph_node (fndecl);
904
905 walk_tree (body_p, unmark_visited_r, NULL, NULL);
906 if (body_p == &DECL_SAVED_TREE (fndecl))
907 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
908 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
909 }
910
911 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
912
913 static void
914 unshare_all_trees (tree t)
915 {
916 walk_tree (&t, copy_if_shared_r, NULL, NULL);
917 walk_tree (&t, unmark_visited_r, NULL, NULL);
918 }
919
920 /* Unconditionally make an unshared copy of EXPR. This is used when using
921 stored expressions which span multiple functions, such as BINFO_VTABLE,
922 as the normal unsharing process can't tell that they're shared. */
923
924 tree
925 unshare_expr (tree expr)
926 {
927 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
928 return expr;
929 }
930
931 /* A terser interface for building a representation of an exception
932 specification. */
933
934 tree
935 gimple_build_eh_filter (tree body, tree allowed, tree failure)
936 {
937 tree t;
938
939 /* FIXME should the allowed types go in TREE_TYPE? */
940 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
941 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
942
943 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
944 append_to_statement_list (body, &TREE_OPERAND (t, 0));
945
946 return t;
947 }
948
949 \f
950 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
951 contain statements and have a value. Assign its value to a temporary
952 and give it void_type_node. Returns the temporary, or NULL_TREE if
953 WRAPPER was already void. */
954
955 tree
956 voidify_wrapper_expr (tree wrapper, tree temp)
957 {
958 tree type = TREE_TYPE (wrapper);
959 if (type && !VOID_TYPE_P (type))
960 {
961 tree *p;
962
963 /* Set p to point to the body of the wrapper. Loop until we find
964 something that isn't a wrapper. */
965 for (p = &wrapper; p && *p; )
966 {
967 switch (TREE_CODE (*p))
968 {
969 case BIND_EXPR:
970 TREE_SIDE_EFFECTS (*p) = 1;
971 TREE_TYPE (*p) = void_type_node;
972 /* For a BIND_EXPR, the body is operand 1. */
973 p = &BIND_EXPR_BODY (*p);
974 break;
975
976 case CLEANUP_POINT_EXPR:
977 case TRY_FINALLY_EXPR:
978 case TRY_CATCH_EXPR:
979 TREE_SIDE_EFFECTS (*p) = 1;
980 TREE_TYPE (*p) = void_type_node;
981 p = &TREE_OPERAND (*p, 0);
982 break;
983
984 case STATEMENT_LIST:
985 {
986 tree_stmt_iterator i = tsi_last (*p);
987 TREE_SIDE_EFFECTS (*p) = 1;
988 TREE_TYPE (*p) = void_type_node;
989 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
990 }
991 break;
992
993 case COMPOUND_EXPR:
994 /* Advance to the last statement. Set all container types to void. */
995 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
996 {
997 TREE_SIDE_EFFECTS (*p) = 1;
998 TREE_TYPE (*p) = void_type_node;
999 }
1000 break;
1001
1002 default:
1003 goto out;
1004 }
1005 }
1006
1007 out:
1008 if (p == NULL || IS_EMPTY_STMT (*p))
1009 temp = NULL_TREE;
1010 else if (temp)
1011 {
1012 /* The wrapper is on the RHS of an assignment that we're pushing
1013 down. */
1014 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1015 || TREE_CODE (temp) == MODIFY_EXPR);
1016 TREE_OPERAND (temp, 1) = *p;
1017 *p = temp;
1018 }
1019 else
1020 {
1021 temp = create_tmp_var (type, "retval");
1022 *p = build2 (INIT_EXPR, type, temp, *p);
1023 }
1024
1025 return temp;
1026 }
1027
1028 return NULL_TREE;
1029 }
1030
1031 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1032 a temporary through which they communicate. */
1033
1034 static void
1035 build_stack_save_restore (tree *save, tree *restore)
1036 {
1037 tree save_call, tmp_var;
1038
1039 save_call =
1040 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
1041 NULL_TREE);
1042 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1043
1044 *save = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, tmp_var, save_call);
1045 *restore =
1046 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1047 tree_cons (NULL_TREE, tmp_var, NULL_TREE));
1048 }
1049
1050 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1051
1052 static enum gimplify_status
1053 gimplify_bind_expr (tree *expr_p, tree *pre_p)
1054 {
1055 tree bind_expr = *expr_p;
1056 bool old_save_stack = gimplify_ctxp->save_stack;
1057 tree t;
1058
1059 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1060
1061 /* Mark variables seen in this bind expr. */
1062 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1063 {
1064 if (TREE_CODE (t) == VAR_DECL)
1065 {
1066 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1067
1068 /* Mark variable as local. */
1069 if (ctx && !is_global_var (t)
1070 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1071 || splay_tree_lookup (ctx->variables,
1072 (splay_tree_key) t) == NULL))
1073 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1074
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1076 }
1077
1078 /* Preliminarily mark non-addressed complex variables as eligible
1079 for promotion to gimple registers. We'll transform their uses
1080 as we find them. */
1081 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1082 && !TREE_THIS_VOLATILE (t)
1083 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1084 && !needs_to_live_in_memory (t))
1085 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
1086 }
1087
1088 gimple_push_bind_expr (bind_expr);
1089 gimplify_ctxp->save_stack = false;
1090
1091 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1092
1093 if (gimplify_ctxp->save_stack)
1094 {
1095 tree stack_save, stack_restore;
1096
1097 /* Save stack on entry and restore it on exit. Add a try_finally
1098 block to achieve this. Note that mudflap depends on the
1099 format of the emitted code: see mx_register_decls(). */
1100 build_stack_save_restore (&stack_save, &stack_restore);
1101
1102 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1103 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1104 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1105
1106 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1107 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1108 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1109 }
1110
1111 gimplify_ctxp->save_stack = old_save_stack;
1112 gimple_pop_bind_expr ();
1113
1114 if (temp)
1115 {
1116 *expr_p = temp;
1117 append_to_statement_list (bind_expr, pre_p);
1118 return GS_OK;
1119 }
1120 else
1121 return GS_ALL_DONE;
1122 }
1123
1124 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1125 GIMPLE value, it is assigned to a new temporary and the statement is
1126 re-written to return the temporary.
1127
1128 PRE_P points to the list where side effects that must happen before
1129 STMT should be stored. */
1130
1131 static enum gimplify_status
1132 gimplify_return_expr (tree stmt, tree *pre_p)
1133 {
1134 tree ret_expr = TREE_OPERAND (stmt, 0);
1135 tree result_decl, result;
1136
1137 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1138 || ret_expr == error_mark_node)
1139 return GS_ALL_DONE;
1140
1141 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1142 result_decl = NULL_TREE;
1143 else
1144 {
1145 result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
1146 if (TREE_CODE (result_decl) == INDIRECT_REF)
1147 /* See through a return by reference. */
1148 result_decl = TREE_OPERAND (result_decl, 0);
1149
1150 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1151 || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
1152 || TREE_CODE (ret_expr) == INIT_EXPR)
1153 && TREE_CODE (result_decl) == RESULT_DECL);
1154 }
1155
1156 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1157 Recall that aggregate_value_p is FALSE for any aggregate type that is
1158 returned in registers. If we're returning values in registers, then
1159 we don't want to extend the lifetime of the RESULT_DECL, particularly
1160 across another call. In addition, for those aggregates for which
1161 hard_function_value generates a PARALLEL, we'll die during normal
1162 expansion of structure assignments; there's special code in expand_return
1163 to handle this case that does not exist in expand_expr. */
1164 if (!result_decl
1165 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1166 result = result_decl;
1167 else if (gimplify_ctxp->return_temp)
1168 result = gimplify_ctxp->return_temp;
1169 else
1170 {
1171 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1172
1173 /* ??? With complex control flow (usually involving abnormal edges),
1174 we can wind up warning about an uninitialized value for this. Due
1175 to how this variable is constructed and initialized, this is never
1176 true. Give up and never warn. */
1177 TREE_NO_WARNING (result) = 1;
1178
1179 gimplify_ctxp->return_temp = result;
1180 }
1181
1182 /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
1183 Then gimplify the whole thing. */
1184 if (result != result_decl)
1185 GENERIC_TREE_OPERAND (ret_expr, 0) = result;
1186
1187 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1188
1189 /* If we didn't use a temporary, then the result is just the result_decl.
1190 Otherwise we need a simple copy. This should already be gimple. */
1191 if (result == result_decl)
1192 ret_expr = result;
1193 else
1194 ret_expr = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (result), result_decl,
1195 result);
1196 TREE_OPERAND (stmt, 0) = ret_expr;
1197
1198 return GS_ALL_DONE;
1199 }
1200
1201 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1202 and initialization explicit. */
1203
1204 static enum gimplify_status
1205 gimplify_decl_expr (tree *stmt_p)
1206 {
1207 tree stmt = *stmt_p;
1208 tree decl = DECL_EXPR_DECL (stmt);
1209
1210 *stmt_p = NULL_TREE;
1211
1212 if (TREE_TYPE (decl) == error_mark_node)
1213 return GS_ERROR;
1214
1215 if ((TREE_CODE (decl) == TYPE_DECL
1216 || TREE_CODE (decl) == VAR_DECL)
1217 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1218 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1219
1220 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1221 {
1222 tree init = DECL_INITIAL (decl);
1223
1224 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1225 {
1226 /* This is a variable-sized decl. Simplify its size and mark it
1227 for deferred expansion. Note that mudflap depends on the format
1228 of the emitted code: see mx_register_decls(). */
1229 tree t, args, addr, ptr_type;
1230
1231 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1232 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1233
1234 /* All occurrences of this decl in final gimplified code will be
1235 replaced by indirection. Setting DECL_VALUE_EXPR does two
1236 things: First, it lets the rest of the gimplifier know what
1237 replacement to use. Second, it lets the debug info know
1238 where to find the value. */
1239 ptr_type = build_pointer_type (TREE_TYPE (decl));
1240 addr = create_tmp_var (ptr_type, get_name (decl));
1241 DECL_IGNORED_P (addr) = 0;
1242 t = build_fold_indirect_ref (addr);
1243 SET_DECL_VALUE_EXPR (decl, t);
1244 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1245
1246 args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
1247 t = built_in_decls[BUILT_IN_ALLOCA];
1248 t = build_function_call_expr (t, args);
1249 t = fold_convert (ptr_type, t);
1250 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
1251
1252 gimplify_and_add (t, stmt_p);
1253
1254 /* Indicate that we need to restore the stack level when the
1255 enclosing BIND_EXPR is exited. */
1256 gimplify_ctxp->save_stack = true;
1257 }
1258
1259 if (init && init != error_mark_node)
1260 {
1261 if (!TREE_STATIC (decl))
1262 {
1263 DECL_INITIAL (decl) = NULL_TREE;
1264 init = build2 (INIT_EXPR, void_type_node, decl, init);
1265 gimplify_and_add (init, stmt_p);
1266 }
1267 else
1268 /* We must still examine initializers for static variables
1269 as they may contain a label address. */
1270 walk_tree (&init, force_labels_r, NULL, NULL);
1271 }
1272
1273 /* Some front ends do not explicitly declare all anonymous
1274 artificial variables. We compensate here by declaring the
1275 variables, though it would be better if the front ends would
1276 explicitly declare them. */
1277 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1278 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1279 gimple_add_tmp_var (decl);
1280 }
1281
1282 return GS_ALL_DONE;
1283 }
1284
1285 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1286 and replacing the LOOP_EXPR with goto, but if the loop contains an
1287 EXIT_EXPR, we need to append a label for it to jump to. */
1288
1289 static enum gimplify_status
1290 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1291 {
1292 tree saved_label = gimplify_ctxp->exit_label;
1293 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1294 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1295
1296 append_to_statement_list (start_label, pre_p);
1297
1298 gimplify_ctxp->exit_label = NULL_TREE;
1299
1300 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1301
1302 if (gimplify_ctxp->exit_label)
1303 {
1304 append_to_statement_list (jump_stmt, pre_p);
1305 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1306 }
1307 else
1308 *expr_p = jump_stmt;
1309
1310 gimplify_ctxp->exit_label = saved_label;
1311
1312 return GS_ALL_DONE;
1313 }
1314
1315 /* Compare two case labels. Because the front end should already have
1316 made sure that case ranges do not overlap, it is enough to only compare
1317 the CASE_LOW values of each case label. */
1318
1319 static int
1320 compare_case_labels (const void *p1, const void *p2)
1321 {
1322 tree case1 = *(tree *)p1;
1323 tree case2 = *(tree *)p2;
1324
1325 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1326 }
1327
1328 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1329
1330 void
1331 sort_case_labels (tree label_vec)
1332 {
1333 size_t len = TREE_VEC_LENGTH (label_vec);
1334 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1335
1336 if (CASE_LOW (default_case))
1337 {
1338 size_t i;
1339
1340 /* The last label in the vector should be the default case
1341 but it is not. */
1342 for (i = 0; i < len; ++i)
1343 {
1344 tree t = TREE_VEC_ELT (label_vec, i);
1345 if (!CASE_LOW (t))
1346 {
1347 default_case = t;
1348 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1349 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1350 break;
1351 }
1352 }
1353 }
1354
1355 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1356 compare_case_labels);
1357 }
1358
1359 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1360 branch to. */
1361
1362 static enum gimplify_status
1363 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1364 {
1365 tree switch_expr = *expr_p;
1366 enum gimplify_status ret;
1367
1368 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1369 is_gimple_val, fb_rvalue);
1370
1371 if (SWITCH_BODY (switch_expr))
1372 {
1373 VEC(tree,heap) *labels, *saved_labels;
1374 tree label_vec, default_case = NULL_TREE;
1375 size_t i, len;
1376
1377 /* If someone can be bothered to fill in the labels, they can
1378 be bothered to null out the body too. */
1379 gcc_assert (!SWITCH_LABELS (switch_expr));
1380
1381 saved_labels = gimplify_ctxp->case_labels;
1382 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1383
1384 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1385
1386 labels = gimplify_ctxp->case_labels;
1387 gimplify_ctxp->case_labels = saved_labels;
1388
1389 i = 0;
1390 while (i < VEC_length (tree, labels))
1391 {
1392 tree elt = VEC_index (tree, labels, i);
1393 tree low = CASE_LOW (elt);
1394 bool remove_element = FALSE;
1395
1396 if (low)
1397 {
1398 /* Discard empty ranges. */
1399 tree high = CASE_HIGH (elt);
1400 if (high && INT_CST_LT (high, low))
1401 remove_element = TRUE;
1402 }
1403 else
1404 {
1405 /* The default case must be the last label in the list. */
1406 gcc_assert (!default_case);
1407 default_case = elt;
1408 remove_element = TRUE;
1409 }
1410
1411 if (remove_element)
1412 VEC_ordered_remove (tree, labels, i);
1413 else
1414 i++;
1415 }
1416 len = i;
1417
1418 label_vec = make_tree_vec (len + 1);
1419 SWITCH_LABELS (*expr_p) = label_vec;
1420 append_to_statement_list (switch_expr, pre_p);
1421
1422 if (! default_case)
1423 {
1424 /* If the switch has no default label, add one, so that we jump
1425 around the switch body. */
1426 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1427 NULL_TREE, create_artificial_label ());
1428 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1429 *expr_p = build1 (LABEL_EXPR, void_type_node,
1430 CASE_LABEL (default_case));
1431 }
1432 else
1433 *expr_p = SWITCH_BODY (switch_expr);
1434
1435 for (i = 0; i < len; ++i)
1436 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1437 TREE_VEC_ELT (label_vec, len) = default_case;
1438
1439 VEC_free (tree, heap, labels);
1440
1441 sort_case_labels (label_vec);
1442
1443 SWITCH_BODY (switch_expr) = NULL;
1444 }
1445 else
1446 gcc_assert (SWITCH_LABELS (switch_expr));
1447
1448 return ret;
1449 }
1450
1451 static enum gimplify_status
1452 gimplify_case_label_expr (tree *expr_p)
1453 {
1454 tree expr = *expr_p;
1455 struct gimplify_ctx *ctxp;
1456
1457 /* Invalid OpenMP programs can play Duff's Device type games with
1458 #pragma omp parallel. At least in the C front end, we don't
1459 detect such invalid branches until after gimplification. */
1460 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1461 if (ctxp->case_labels)
1462 break;
1463
1464 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1465 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1466 return GS_ALL_DONE;
1467 }
1468
1469 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1470 if necessary. */
1471
1472 tree
1473 build_and_jump (tree *label_p)
1474 {
1475 if (label_p == NULL)
1476 /* If there's nowhere to jump, just fall through. */
1477 return NULL_TREE;
1478
1479 if (*label_p == NULL_TREE)
1480 {
1481 tree label = create_artificial_label ();
1482 *label_p = label;
1483 }
1484
1485 return build1 (GOTO_EXPR, void_type_node, *label_p);
1486 }
1487
1488 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1489 This also involves building a label to jump to and communicating it to
1490 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1491
1492 static enum gimplify_status
1493 gimplify_exit_expr (tree *expr_p)
1494 {
1495 tree cond = TREE_OPERAND (*expr_p, 0);
1496 tree expr;
1497
1498 expr = build_and_jump (&gimplify_ctxp->exit_label);
1499 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1500 *expr_p = expr;
1501
1502 return GS_OK;
1503 }
1504
1505 /* A helper function to be called via walk_tree. Mark all labels under *TP
1506 as being forced. To be called for DECL_INITIAL of static variables. */
1507
1508 tree
1509 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1510 {
1511 if (TYPE_P (*tp))
1512 *walk_subtrees = 0;
1513 if (TREE_CODE (*tp) == LABEL_DECL)
1514 FORCED_LABEL (*tp) = 1;
1515
1516 return NULL_TREE;
1517 }
1518
1519 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1520 different from its canonical type, wrap the whole thing inside a
1521 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1522 type.
1523
1524 The canonical type of a COMPONENT_REF is the type of the field being
1525 referenced--unless the field is a bit-field which can be read directly
1526 in a smaller mode, in which case the canonical type is the
1527 sign-appropriate type corresponding to that mode. */
1528
1529 static void
1530 canonicalize_component_ref (tree *expr_p)
1531 {
1532 tree expr = *expr_p;
1533 tree type;
1534
1535 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1536
1537 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1538 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1539 else
1540 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1541
1542 if (TREE_TYPE (expr) != type)
1543 {
1544 tree old_type = TREE_TYPE (expr);
1545
1546 /* Set the type of the COMPONENT_REF to the underlying type. */
1547 TREE_TYPE (expr) = type;
1548
1549 /* And wrap the whole thing inside a NOP_EXPR. */
1550 expr = build1 (NOP_EXPR, old_type, expr);
1551
1552 *expr_p = expr;
1553 }
1554 }
1555
1556 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1557 to foo, embed that change in the ADDR_EXPR by converting
1558 T array[U];
1559 (T *)&array
1560 ==>
1561 &array[L]
1562 where L is the lower bound. For simplicity, only do this for constant
1563 lower bound. */
1564
1565 static void
1566 canonicalize_addr_expr (tree *expr_p)
1567 {
1568 tree expr = *expr_p;
1569 tree ctype = TREE_TYPE (expr);
1570 tree addr_expr = TREE_OPERAND (expr, 0);
1571 tree atype = TREE_TYPE (addr_expr);
1572 tree dctype, datype, ddatype, otype, obj_expr;
1573
1574 /* Both cast and addr_expr types should be pointers. */
1575 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1576 return;
1577
1578 /* The addr_expr type should be a pointer to an array. */
1579 datype = TREE_TYPE (atype);
1580 if (TREE_CODE (datype) != ARRAY_TYPE)
1581 return;
1582
1583 /* Both cast and addr_expr types should address the same object type. */
1584 dctype = TREE_TYPE (ctype);
1585 ddatype = TREE_TYPE (datype);
1586 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1587 return;
1588
1589 /* The addr_expr and the object type should match. */
1590 obj_expr = TREE_OPERAND (addr_expr, 0);
1591 otype = TREE_TYPE (obj_expr);
1592 if (!lang_hooks.types_compatible_p (otype, datype))
1593 return;
1594
1595 /* The lower bound and element sizes must be constant. */
1596 if (!TYPE_SIZE_UNIT (dctype)
1597 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1598 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1599 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1600 return;
1601
1602 /* All checks succeeded. Build a new node to merge the cast. */
1603 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1604 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1605 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1606 size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
1607 size_int (TYPE_ALIGN_UNIT (dctype))));
1608 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1609 }
1610
1611 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1612 underneath as appropriate. */
1613
1614 static enum gimplify_status
1615 gimplify_conversion (tree *expr_p)
1616 {
1617 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1618 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1619
1620 /* Then strip away all but the outermost conversion. */
1621 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1622
1623 /* And remove the outermost conversion if it's useless. */
1624 if (tree_ssa_useless_type_conversion (*expr_p))
1625 *expr_p = TREE_OPERAND (*expr_p, 0);
1626
1627 /* If we still have a conversion at the toplevel,
1628 then canonicalize some constructs. */
1629 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1630 {
1631 tree sub = TREE_OPERAND (*expr_p, 0);
1632
1633 /* If a NOP conversion is changing the type of a COMPONENT_REF
1634 expression, then canonicalize its type now in order to expose more
1635 redundant conversions. */
1636 if (TREE_CODE (sub) == COMPONENT_REF)
1637 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1638
1639 /* If a NOP conversion is changing a pointer to array of foo
1640 to a pointer to foo, embed that change in the ADDR_EXPR. */
1641 else if (TREE_CODE (sub) == ADDR_EXPR)
1642 canonicalize_addr_expr (expr_p);
1643 }
1644
1645 return GS_OK;
1646 }
1647
1648 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1649 DECL_VALUE_EXPR, and it's worth re-examining things. */
1650
1651 static enum gimplify_status
1652 gimplify_var_or_parm_decl (tree *expr_p)
1653 {
1654 tree decl = *expr_p;
1655
1656 /* ??? If this is a local variable, and it has not been seen in any
1657 outer BIND_EXPR, then it's probably the result of a duplicate
1658 declaration, for which we've already issued an error. It would
1659 be really nice if the front end wouldn't leak these at all.
1660 Currently the only known culprit is C++ destructors, as seen
1661 in g++.old-deja/g++.jason/binding.C. */
1662 if (TREE_CODE (decl) == VAR_DECL
1663 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1664 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1665 && decl_function_context (decl) == current_function_decl)
1666 {
1667 gcc_assert (errorcount || sorrycount);
1668 return GS_ERROR;
1669 }
1670
1671 /* When within an OpenMP context, notice uses of variables. */
1672 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1673 return GS_ALL_DONE;
1674
1675 /* If the decl is an alias for another expression, substitute it now. */
1676 if (DECL_HAS_VALUE_EXPR_P (decl))
1677 {
1678 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1679 return GS_OK;
1680 }
1681
1682 return GS_ALL_DONE;
1683 }
1684
1685
1686 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1687 node pointed to by EXPR_P.
1688
1689 compound_lval
1690 : min_lval '[' val ']'
1691 | min_lval '.' ID
1692 | compound_lval '[' val ']'
1693 | compound_lval '.' ID
1694
1695 This is not part of the original SIMPLE definition, which separates
1696 array and member references, but it seems reasonable to handle them
1697 together. Also, this way we don't run into problems with union
1698 aliasing; gcc requires that for accesses through a union to alias, the
1699 union reference must be explicit, which was not always the case when we
1700 were splitting up array and member refs.
1701
1702 PRE_P points to the list where side effects that must happen before
1703 *EXPR_P should be stored.
1704
1705 POST_P points to the list where side effects that must happen after
1706 *EXPR_P should be stored. */
1707
1708 static enum gimplify_status
1709 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1710 tree *post_p, fallback_t fallback)
1711 {
1712 tree *p;
1713 VEC(tree,heap) *stack;
1714 enum gimplify_status ret = GS_OK, tret;
1715 int i;
1716
1717 /* Create a stack of the subexpressions so later we can walk them in
1718 order from inner to outer. */
1719 stack = VEC_alloc (tree, heap, 10);
1720
1721 /* We can handle anything that get_inner_reference can deal with. */
1722 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1723 {
1724 restart:
1725 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1726 if (TREE_CODE (*p) == INDIRECT_REF)
1727 *p = fold_indirect_ref (*p);
1728
1729 if (handled_component_p (*p))
1730 ;
1731 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1732 additional COMPONENT_REFs. */
1733 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1734 && gimplify_var_or_parm_decl (p) == GS_OK)
1735 goto restart;
1736 else
1737 break;
1738
1739 VEC_safe_push (tree, heap, stack, *p);
1740 }
1741
1742 gcc_assert (VEC_length (tree, stack));
1743
1744 /* Now STACK is a stack of pointers to all the refs we've walked through
1745 and P points to the innermost expression.
1746
1747 Java requires that we elaborated nodes in source order. That
1748 means we must gimplify the inner expression followed by each of
1749 the indices, in order. But we can't gimplify the inner
1750 expression until we deal with any variable bounds, sizes, or
1751 positions in order to deal with PLACEHOLDER_EXPRs.
1752
1753 So we do this in three steps. First we deal with the annotations
1754 for any variables in the components, then we gimplify the base,
1755 then we gimplify any indices, from left to right. */
1756 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1757 {
1758 tree t = VEC_index (tree, stack, i);
1759
1760 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1761 {
1762 /* Gimplify the low bound and element type size and put them into
1763 the ARRAY_REF. If these values are set, they have already been
1764 gimplified. */
1765 if (!TREE_OPERAND (t, 2))
1766 {
1767 tree low = unshare_expr (array_ref_low_bound (t));
1768 if (!is_gimple_min_invariant (low))
1769 {
1770 TREE_OPERAND (t, 2) = low;
1771 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1772 is_gimple_formal_tmp_reg, fb_rvalue);
1773 ret = MIN (ret, tret);
1774 }
1775 }
1776
1777 if (!TREE_OPERAND (t, 3))
1778 {
1779 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1780 tree elmt_size = unshare_expr (array_ref_element_size (t));
1781 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1782
1783 /* Divide the element size by the alignment of the element
1784 type (above). */
1785 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1786
1787 if (!is_gimple_min_invariant (elmt_size))
1788 {
1789 TREE_OPERAND (t, 3) = elmt_size;
1790 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1791 is_gimple_formal_tmp_reg, fb_rvalue);
1792 ret = MIN (ret, tret);
1793 }
1794 }
1795 }
1796 else if (TREE_CODE (t) == COMPONENT_REF)
1797 {
1798 /* Set the field offset into T and gimplify it. */
1799 if (!TREE_OPERAND (t, 2))
1800 {
1801 tree offset = unshare_expr (component_ref_field_offset (t));
1802 tree field = TREE_OPERAND (t, 1);
1803 tree factor
1804 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1805
1806 /* Divide the offset by its alignment. */
1807 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1808
1809 if (!is_gimple_min_invariant (offset))
1810 {
1811 TREE_OPERAND (t, 2) = offset;
1812 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1813 is_gimple_formal_tmp_reg, fb_rvalue);
1814 ret = MIN (ret, tret);
1815 }
1816 }
1817 }
1818 }
1819
1820 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1821 so as to match the min_lval predicate. Failure to do so may result
1822 in the creation of large aggregate temporaries. */
1823 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1824 fallback | fb_lvalue);
1825 ret = MIN (ret, tret);
1826
1827 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1828 loop we also remove any useless conversions. */
1829 for (; VEC_length (tree, stack) > 0; )
1830 {
1831 tree t = VEC_pop (tree, stack);
1832
1833 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1834 {
1835 /* Gimplify the dimension.
1836 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1837 Gimplify non-constant array indices into a temporary
1838 variable.
1839 FIXME - The real fix is to gimplify post-modify
1840 expressions into a minimal gimple lvalue. However, that
1841 exposes bugs in alias analysis. The alias analyzer does
1842 not handle &PTR->FIELD very well. Will fix after the
1843 branch is merged into mainline (dnovillo 2004-05-03). */
1844 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1845 {
1846 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1847 is_gimple_formal_tmp_reg, fb_rvalue);
1848 ret = MIN (ret, tret);
1849 }
1850 }
1851 else if (TREE_CODE (t) == BIT_FIELD_REF)
1852 {
1853 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1854 is_gimple_val, fb_rvalue);
1855 ret = MIN (ret, tret);
1856 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1857 is_gimple_val, fb_rvalue);
1858 ret = MIN (ret, tret);
1859 }
1860
1861 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1862
1863 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1864 set which would have caused all the outer expressions in EXPR_P
1865 leading to P to also have had TREE_SIDE_EFFECTS set. */
1866 recalculate_side_effects (t);
1867 }
1868
1869 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1870 ret = MIN (ret, tret);
1871
1872 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1873 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1874 {
1875 canonicalize_component_ref (expr_p);
1876 ret = MIN (ret, GS_OK);
1877 }
1878
1879 VEC_free (tree, heap, stack);
1880
1881 return ret;
1882 }
1883
1884 /* Gimplify the self modifying expression pointed to by EXPR_P
1885 (++, --, +=, -=).
1886
1887 PRE_P points to the list where side effects that must happen before
1888 *EXPR_P should be stored.
1889
1890 POST_P points to the list where side effects that must happen after
1891 *EXPR_P should be stored.
1892
1893 WANT_VALUE is nonzero iff we want to use the value of this expression
1894 in another expression. */
1895
1896 static enum gimplify_status
1897 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1898 bool want_value)
1899 {
1900 enum tree_code code;
1901 tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
1902 bool postfix;
1903 enum tree_code arith_code;
1904 enum gimplify_status ret;
1905
1906 code = TREE_CODE (*expr_p);
1907
1908 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1909 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1910
1911 /* Prefix or postfix? */
1912 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1913 /* Faster to treat as prefix if result is not used. */
1914 postfix = want_value;
1915 else
1916 postfix = false;
1917
1918 /* For postfix, make sure the inner expression's post side effects
1919 are executed after side effects from this expression. */
1920 if (postfix)
1921 post_p = &post;
1922
1923 /* Add or subtract? */
1924 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1925 arith_code = PLUS_EXPR;
1926 else
1927 arith_code = MINUS_EXPR;
1928
1929 /* Gimplify the LHS into a GIMPLE lvalue. */
1930 lvalue = TREE_OPERAND (*expr_p, 0);
1931 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1932 if (ret == GS_ERROR)
1933 return ret;
1934
1935 /* Extract the operands to the arithmetic operation. */
1936 lhs = lvalue;
1937 rhs = TREE_OPERAND (*expr_p, 1);
1938
1939 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1940 that as the result value and in the postqueue operation. */
1941 if (postfix)
1942 {
1943 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1944 if (ret == GS_ERROR)
1945 return ret;
1946 }
1947
1948 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1949 t1 = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (lvalue), lvalue, t1);
1950
1951 if (postfix)
1952 {
1953 gimplify_and_add (t1, orig_post_p);
1954 append_to_statement_list (post, orig_post_p);
1955 *expr_p = lhs;
1956 return GS_ALL_DONE;
1957 }
1958 else
1959 {
1960 *expr_p = t1;
1961 return GS_OK;
1962 }
1963 }
1964
1965 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1966
1967 static void
1968 maybe_with_size_expr (tree *expr_p)
1969 {
1970 tree expr = *expr_p;
1971 tree type = TREE_TYPE (expr);
1972 tree size;
1973
1974 /* If we've already wrapped this or the type is error_mark_node, we can't do
1975 anything. */
1976 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1977 || type == error_mark_node)
1978 return;
1979
1980 /* If the size isn't known or is a constant, we have nothing to do. */
1981 size = TYPE_SIZE_UNIT (type);
1982 if (!size || TREE_CODE (size) == INTEGER_CST)
1983 return;
1984
1985 /* Otherwise, make a WITH_SIZE_EXPR. */
1986 size = unshare_expr (size);
1987 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1988 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1989 }
1990
1991 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1992
1993 static enum gimplify_status
1994 gimplify_arg (tree *expr_p, tree *pre_p)
1995 {
1996 bool (*test) (tree);
1997 fallback_t fb;
1998
1999 /* In general, we allow lvalues for function arguments to avoid
2000 extra overhead of copying large aggregates out of even larger
2001 aggregates into temporaries only to copy the temporaries to
2002 the argument list. Make optimizers happy by pulling out to
2003 temporaries those types that fit in registers. */
2004 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
2005 test = is_gimple_val, fb = fb_rvalue;
2006 else
2007 test = is_gimple_lvalue, fb = fb_either;
2008
2009 /* If this is a variable sized type, we must remember the size. */
2010 maybe_with_size_expr (expr_p);
2011
2012 /* There is a sequence point before a function call. Side effects in
2013 the argument list must occur before the actual call. So, when
2014 gimplifying arguments, force gimplify_expr to use an internal
2015 post queue which is then appended to the end of PRE_P. */
2016 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2017 }
2018
2019 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2020 list where side effects that must happen before *EXPR_P should be stored.
2021 WANT_VALUE is true if the result of the call is desired. */
2022
2023 static enum gimplify_status
2024 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2025 {
2026 tree decl;
2027 tree arglist;
2028 enum gimplify_status ret;
2029
2030 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2031
2032 /* For reliable diagnostics during inlining, it is necessary that
2033 every call_expr be annotated with file and line. */
2034 if (! EXPR_HAS_LOCATION (*expr_p))
2035 SET_EXPR_LOCATION (*expr_p, input_location);
2036
2037 /* This may be a call to a builtin function.
2038
2039 Builtin function calls may be transformed into different
2040 (and more efficient) builtin function calls under certain
2041 circumstances. Unfortunately, gimplification can muck things
2042 up enough that the builtin expanders are not aware that certain
2043 transformations are still valid.
2044
2045 So we attempt transformation/gimplification of the call before
2046 we gimplify the CALL_EXPR. At this time we do not manage to
2047 transform all calls in the same manner as the expanders do, but
2048 we do transform most of them. */
2049 decl = get_callee_fndecl (*expr_p);
2050 if (decl && DECL_BUILT_IN (decl))
2051 {
2052 tree arglist = TREE_OPERAND (*expr_p, 1);
2053 tree new = fold_builtin (decl, arglist, !want_value);
2054
2055 if (new && new != *expr_p)
2056 {
2057 /* There was a transformation of this call which computes the
2058 same value, but in a more efficient way. Return and try
2059 again. */
2060 *expr_p = new;
2061 return GS_OK;
2062 }
2063
2064 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2065 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2066 {
2067 if (!arglist || !TREE_CHAIN (arglist))
2068 {
2069 error ("too few arguments to function %<va_start%>");
2070 *expr_p = build_empty_stmt ();
2071 return GS_OK;
2072 }
2073
2074 if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
2075 {
2076 *expr_p = build_empty_stmt ();
2077 return GS_OK;
2078 }
2079 /* Avoid gimplifying the second argument to va_start, which needs
2080 to be the plain PARM_DECL. */
2081 return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
2082 }
2083 }
2084
2085 /* There is a sequence point before the call, so any side effects in
2086 the calling expression must occur before the actual call. Force
2087 gimplify_expr to use an internal post queue. */
2088 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
2089 is_gimple_call_addr, fb_rvalue);
2090
2091 if (PUSH_ARGS_REVERSED)
2092 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2093 for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
2094 arglist = TREE_CHAIN (arglist))
2095 {
2096 enum gimplify_status t;
2097
2098 t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
2099
2100 if (t == GS_ERROR)
2101 ret = GS_ERROR;
2102 }
2103 if (PUSH_ARGS_REVERSED)
2104 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2105
2106 /* Try this again in case gimplification exposed something. */
2107 if (ret != GS_ERROR)
2108 {
2109 decl = get_callee_fndecl (*expr_p);
2110 if (decl && DECL_BUILT_IN (decl))
2111 {
2112 tree arglist = TREE_OPERAND (*expr_p, 1);
2113 tree new = fold_builtin (decl, arglist, !want_value);
2114
2115 if (new && new != *expr_p)
2116 {
2117 /* There was a transformation of this call which computes the
2118 same value, but in a more efficient way. Return and try
2119 again. */
2120 *expr_p = new;
2121 return GS_OK;
2122 }
2123 }
2124 }
2125
2126 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2127 decl. This allows us to eliminate redundant or useless
2128 calls to "const" functions. */
2129 if (TREE_CODE (*expr_p) == CALL_EXPR
2130 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2131 TREE_SIDE_EFFECTS (*expr_p) = 0;
2132
2133 return ret;
2134 }
2135
2136 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2137 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2138
2139 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2140 condition is true or false, respectively. If null, we should generate
2141 our own to skip over the evaluation of this specific expression.
2142
2143 This function is the tree equivalent of do_jump.
2144
2145 shortcut_cond_r should only be called by shortcut_cond_expr. */
2146
2147 static tree
2148 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2149 {
2150 tree local_label = NULL_TREE;
2151 tree t, expr = NULL;
2152
2153 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2154 retain the shortcut semantics. Just insert the gotos here;
2155 shortcut_cond_expr will append the real blocks later. */
2156 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2157 {
2158 /* Turn if (a && b) into
2159
2160 if (a); else goto no;
2161 if (b) goto yes; else goto no;
2162 (no:) */
2163
2164 if (false_label_p == NULL)
2165 false_label_p = &local_label;
2166
2167 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2168 append_to_statement_list (t, &expr);
2169
2170 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2171 false_label_p);
2172 append_to_statement_list (t, &expr);
2173 }
2174 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2175 {
2176 /* Turn if (a || b) into
2177
2178 if (a) goto yes;
2179 if (b) goto yes; else goto no;
2180 (yes:) */
2181
2182 if (true_label_p == NULL)
2183 true_label_p = &local_label;
2184
2185 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2186 append_to_statement_list (t, &expr);
2187
2188 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2189 false_label_p);
2190 append_to_statement_list (t, &expr);
2191 }
2192 else if (TREE_CODE (pred) == COND_EXPR)
2193 {
2194 /* As long as we're messing with gotos, turn if (a ? b : c) into
2195 if (a)
2196 if (b) goto yes; else goto no;
2197 else
2198 if (c) goto yes; else goto no; */
2199 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2200 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2201 false_label_p),
2202 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2203 false_label_p));
2204 }
2205 else
2206 {
2207 expr = build3 (COND_EXPR, void_type_node, pred,
2208 build_and_jump (true_label_p),
2209 build_and_jump (false_label_p));
2210 }
2211
2212 if (local_label)
2213 {
2214 t = build1 (LABEL_EXPR, void_type_node, local_label);
2215 append_to_statement_list (t, &expr);
2216 }
2217
2218 return expr;
2219 }
2220
2221 static tree
2222 shortcut_cond_expr (tree expr)
2223 {
2224 tree pred = TREE_OPERAND (expr, 0);
2225 tree then_ = TREE_OPERAND (expr, 1);
2226 tree else_ = TREE_OPERAND (expr, 2);
2227 tree true_label, false_label, end_label, t;
2228 tree *true_label_p;
2229 tree *false_label_p;
2230 bool emit_end, emit_false, jump_over_else;
2231 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2232 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2233
2234 /* First do simple transformations. */
2235 if (!else_se)
2236 {
2237 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2238 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2239 {
2240 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2241 then_ = shortcut_cond_expr (expr);
2242 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2243 pred = TREE_OPERAND (pred, 0);
2244 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2245 }
2246 }
2247 if (!then_se)
2248 {
2249 /* If there is no 'then', turn
2250 if (a || b); else d
2251 into
2252 if (a); else if (b); else d. */
2253 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2254 {
2255 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2256 else_ = shortcut_cond_expr (expr);
2257 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2258 pred = TREE_OPERAND (pred, 0);
2259 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2260 }
2261 }
2262
2263 /* If we're done, great. */
2264 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2265 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2266 return expr;
2267
2268 /* Otherwise we need to mess with gotos. Change
2269 if (a) c; else d;
2270 to
2271 if (a); else goto no;
2272 c; goto end;
2273 no: d; end:
2274 and recursively gimplify the condition. */
2275
2276 true_label = false_label = end_label = NULL_TREE;
2277
2278 /* If our arms just jump somewhere, hijack those labels so we don't
2279 generate jumps to jumps. */
2280
2281 if (then_
2282 && TREE_CODE (then_) == GOTO_EXPR
2283 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2284 {
2285 true_label = GOTO_DESTINATION (then_);
2286 then_ = NULL;
2287 then_se = false;
2288 }
2289
2290 if (else_
2291 && TREE_CODE (else_) == GOTO_EXPR
2292 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2293 {
2294 false_label = GOTO_DESTINATION (else_);
2295 else_ = NULL;
2296 else_se = false;
2297 }
2298
2299 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2300 if (true_label)
2301 true_label_p = &true_label;
2302 else
2303 true_label_p = NULL;
2304
2305 /* The 'else' branch also needs a label if it contains interesting code. */
2306 if (false_label || else_se)
2307 false_label_p = &false_label;
2308 else
2309 false_label_p = NULL;
2310
2311 /* If there was nothing else in our arms, just forward the label(s). */
2312 if (!then_se && !else_se)
2313 return shortcut_cond_r (pred, true_label_p, false_label_p);
2314
2315 /* If our last subexpression already has a terminal label, reuse it. */
2316 if (else_se)
2317 expr = expr_last (else_);
2318 else if (then_se)
2319 expr = expr_last (then_);
2320 else
2321 expr = NULL;
2322 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2323 end_label = LABEL_EXPR_LABEL (expr);
2324
2325 /* If we don't care about jumping to the 'else' branch, jump to the end
2326 if the condition is false. */
2327 if (!false_label_p)
2328 false_label_p = &end_label;
2329
2330 /* We only want to emit these labels if we aren't hijacking them. */
2331 emit_end = (end_label == NULL_TREE);
2332 emit_false = (false_label == NULL_TREE);
2333
2334 /* We only emit the jump over the else clause if we have to--if the
2335 then clause may fall through. Otherwise we can wind up with a
2336 useless jump and a useless label at the end of gimplified code,
2337 which will cause us to think that this conditional as a whole
2338 falls through even if it doesn't. If we then inline a function
2339 which ends with such a condition, that can cause us to issue an
2340 inappropriate warning about control reaching the end of a
2341 non-void function. */
2342 jump_over_else = block_may_fallthru (then_);
2343
2344 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2345
2346 expr = NULL;
2347 append_to_statement_list (pred, &expr);
2348
2349 append_to_statement_list (then_, &expr);
2350 if (else_se)
2351 {
2352 if (jump_over_else)
2353 {
2354 t = build_and_jump (&end_label);
2355 append_to_statement_list (t, &expr);
2356 }
2357 if (emit_false)
2358 {
2359 t = build1 (LABEL_EXPR, void_type_node, false_label);
2360 append_to_statement_list (t, &expr);
2361 }
2362 append_to_statement_list (else_, &expr);
2363 }
2364 if (emit_end && end_label)
2365 {
2366 t = build1 (LABEL_EXPR, void_type_node, end_label);
2367 append_to_statement_list (t, &expr);
2368 }
2369
2370 return expr;
2371 }
2372
2373 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2374
2375 tree
2376 gimple_boolify (tree expr)
2377 {
2378 tree type = TREE_TYPE (expr);
2379
2380 if (TREE_CODE (type) == BOOLEAN_TYPE)
2381 return expr;
2382
2383 switch (TREE_CODE (expr))
2384 {
2385 case TRUTH_AND_EXPR:
2386 case TRUTH_OR_EXPR:
2387 case TRUTH_XOR_EXPR:
2388 case TRUTH_ANDIF_EXPR:
2389 case TRUTH_ORIF_EXPR:
2390 /* Also boolify the arguments of truth exprs. */
2391 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2392 /* FALLTHRU */
2393
2394 case TRUTH_NOT_EXPR:
2395 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2396 /* FALLTHRU */
2397
2398 case EQ_EXPR: case NE_EXPR:
2399 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2400 /* These expressions always produce boolean results. */
2401 TREE_TYPE (expr) = boolean_type_node;
2402 return expr;
2403
2404 default:
2405 /* Other expressions that get here must have boolean values, but
2406 might need to be converted to the appropriate mode. */
2407 return fold_convert (boolean_type_node, expr);
2408 }
2409 }
2410
2411 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2412 into
2413
2414 if (p) if (p)
2415 t1 = a; a;
2416 else or else
2417 t1 = b; b;
2418 t1;
2419
2420 The second form is used when *EXPR_P is of type void.
2421
2422 TARGET is the tree for T1 above.
2423
2424 PRE_P points to the list where side effects that must happen before
2425 *EXPR_P should be stored. */
2426
2427 static enum gimplify_status
2428 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2429 {
2430 tree expr = *expr_p;
2431 tree tmp, tmp2, type;
2432 enum gimplify_status ret;
2433
2434 type = TREE_TYPE (expr);
2435
2436 /* If this COND_EXPR has a value, copy the values into a temporary within
2437 the arms. */
2438 if (! VOID_TYPE_P (type))
2439 {
2440 tree result;
2441
2442 if ((fallback & fb_lvalue) == 0)
2443 {
2444 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2445 ret = GS_ALL_DONE;
2446 }
2447 else
2448 {
2449 tree type = build_pointer_type (TREE_TYPE (expr));
2450
2451 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2452 TREE_OPERAND (expr, 1) =
2453 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2454
2455 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2456 TREE_OPERAND (expr, 2) =
2457 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2458
2459 tmp2 = tmp = create_tmp_var (type, "iftmp");
2460
2461 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2462 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2463
2464 result = build_fold_indirect_ref (tmp);
2465 ret = GS_ALL_DONE;
2466 }
2467
2468 /* Build the then clause, 't1 = a;'. But don't build an assignment
2469 if this branch is void; in C++ it can be, if it's a throw. */
2470 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2471 TREE_OPERAND (expr, 1)
2472 = build2 (GIMPLE_MODIFY_STMT, void_type_node, tmp,
2473 TREE_OPERAND (expr, 1));
2474
2475 /* Build the else clause, 't1 = b;'. */
2476 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2477 TREE_OPERAND (expr, 2)
2478 = build2 (GIMPLE_MODIFY_STMT, void_type_node, tmp2,
2479 TREE_OPERAND (expr, 2));
2480
2481 TREE_TYPE (expr) = void_type_node;
2482 recalculate_side_effects (expr);
2483
2484 /* Move the COND_EXPR to the prequeue. */
2485 gimplify_and_add (expr, pre_p);
2486
2487 *expr_p = result;
2488 return ret;
2489 }
2490
2491 /* Make sure the condition has BOOLEAN_TYPE. */
2492 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2493
2494 /* Break apart && and || conditions. */
2495 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2496 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2497 {
2498 expr = shortcut_cond_expr (expr);
2499
2500 if (expr != *expr_p)
2501 {
2502 *expr_p = expr;
2503
2504 /* We can't rely on gimplify_expr to re-gimplify the expanded
2505 form properly, as cleanups might cause the target labels to be
2506 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2507 set up a conditional context. */
2508 gimple_push_condition ();
2509 gimplify_stmt (expr_p);
2510 gimple_pop_condition (pre_p);
2511
2512 return GS_ALL_DONE;
2513 }
2514 }
2515
2516 /* Now do the normal gimplification. */
2517 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2518 is_gimple_condexpr, fb_rvalue);
2519
2520 gimple_push_condition ();
2521
2522 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2523 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2524 recalculate_side_effects (expr);
2525
2526 gimple_pop_condition (pre_p);
2527
2528 if (ret == GS_ERROR)
2529 ;
2530 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2531 ret = GS_ALL_DONE;
2532 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2533 /* Rewrite "if (a); else b" to "if (!a) b" */
2534 {
2535 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2536 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2537 is_gimple_condexpr, fb_rvalue);
2538
2539 tmp = TREE_OPERAND (expr, 1);
2540 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2541 TREE_OPERAND (expr, 2) = tmp;
2542 }
2543 else
2544 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2545 expr = TREE_OPERAND (expr, 0);
2546
2547 *expr_p = expr;
2548 return ret;
2549 }
2550
2551 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2552 a call to __builtin_memcpy. */
2553
2554 static enum gimplify_status
2555 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2556 {
2557 tree args, t, to, to_ptr, from;
2558
2559 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2560 from = GENERIC_TREE_OPERAND (*expr_p, 1);
2561
2562 args = tree_cons (NULL, size, NULL);
2563
2564 t = build_fold_addr_expr (from);
2565 args = tree_cons (NULL, t, args);
2566
2567 to_ptr = build_fold_addr_expr (to);
2568 args = tree_cons (NULL, to_ptr, args);
2569 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2570 t = build_function_call_expr (t, args);
2571
2572 if (want_value)
2573 {
2574 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2575 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2576 }
2577
2578 *expr_p = t;
2579 return GS_OK;
2580 }
2581
2582 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2583 a call to __builtin_memset. In this case we know that the RHS is
2584 a CONSTRUCTOR with an empty element list. */
2585
2586 static enum gimplify_status
2587 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2588 {
2589 tree args, t, to, to_ptr;
2590
2591 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2592
2593 args = tree_cons (NULL, size, NULL);
2594
2595 args = tree_cons (NULL, integer_zero_node, args);
2596
2597 to_ptr = build_fold_addr_expr (to);
2598 args = tree_cons (NULL, to_ptr, args);
2599 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2600 t = build_function_call_expr (t, args);
2601
2602 if (want_value)
2603 {
2604 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2605 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2606 }
2607
2608 *expr_p = t;
2609 return GS_OK;
2610 }
2611
2612 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2613 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2614 assignment. Returns non-null if we detect a potential overlap. */
2615
2616 struct gimplify_init_ctor_preeval_data
2617 {
2618 /* The base decl of the lhs object. May be NULL, in which case we
2619 have to assume the lhs is indirect. */
2620 tree lhs_base_decl;
2621
2622 /* The alias set of the lhs object. */
2623 int lhs_alias_set;
2624 };
2625
2626 static tree
2627 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2628 {
2629 struct gimplify_init_ctor_preeval_data *data
2630 = (struct gimplify_init_ctor_preeval_data *) xdata;
2631 tree t = *tp;
2632
2633 /* If we find the base object, obviously we have overlap. */
2634 if (data->lhs_base_decl == t)
2635 return t;
2636
2637 /* If the constructor component is indirect, determine if we have a
2638 potential overlap with the lhs. The only bits of information we
2639 have to go on at this point are addressability and alias sets. */
2640 if (TREE_CODE (t) == INDIRECT_REF
2641 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2642 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2643 return t;
2644
2645 if (IS_TYPE_OR_DECL_P (t))
2646 *walk_subtrees = 0;
2647 return NULL;
2648 }
2649
2650 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2651 force values that overlap with the lhs (as described by *DATA)
2652 into temporaries. */
2653
2654 static void
2655 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2656 struct gimplify_init_ctor_preeval_data *data)
2657 {
2658 enum gimplify_status one;
2659
2660 /* If the value is invariant, then there's nothing to pre-evaluate.
2661 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2662 invariant but has side effects and might contain a reference to
2663 the object we're initializing. */
2664 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2665 return;
2666
2667 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2668 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2669 return;
2670
2671 /* Recurse for nested constructors. */
2672 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2673 {
2674 unsigned HOST_WIDE_INT ix;
2675 constructor_elt *ce;
2676 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2677
2678 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2679 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2680 return;
2681 }
2682
2683 /* If this is a variable sized type, we must remember the size. */
2684 maybe_with_size_expr (expr_p);
2685
2686 /* Gimplify the constructor element to something appropriate for the rhs
2687 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2688 the gimplifier will consider this a store to memory. Doing this
2689 gimplification now means that we won't have to deal with complicated
2690 language-specific trees, nor trees like SAVE_EXPR that can induce
2691 exponential search behavior. */
2692 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2693 if (one == GS_ERROR)
2694 {
2695 *expr_p = NULL;
2696 return;
2697 }
2698
2699 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2700 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2701 always be true for all scalars, since is_gimple_mem_rhs insists on a
2702 temporary variable for them. */
2703 if (DECL_P (*expr_p))
2704 return;
2705
2706 /* If this is of variable size, we have no choice but to assume it doesn't
2707 overlap since we can't make a temporary for it. */
2708 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
2709 return;
2710
2711 /* Otherwise, we must search for overlap ... */
2712 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2713 return;
2714
2715 /* ... and if found, force the value into a temporary. */
2716 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2717 }
2718
2719 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2720 a RANGE_EXPR in a CONSTRUCTOR for an array.
2721
2722 var = lower;
2723 loop_entry:
2724 object[var] = value;
2725 if (var == upper)
2726 goto loop_exit;
2727 var = var + 1;
2728 goto loop_entry;
2729 loop_exit:
2730
2731 We increment var _after_ the loop exit check because we might otherwise
2732 fail if upper == TYPE_MAX_VALUE (type for upper).
2733
2734 Note that we never have to deal with SAVE_EXPRs here, because this has
2735 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2736
2737 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2738 tree *, bool);
2739
2740 static void
2741 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2742 tree value, tree array_elt_type,
2743 tree *pre_p, bool cleared)
2744 {
2745 tree loop_entry_label, loop_exit_label;
2746 tree var, var_type, cref;
2747
2748 loop_entry_label = create_artificial_label ();
2749 loop_exit_label = create_artificial_label ();
2750
2751 /* Create and initialize the index variable. */
2752 var_type = TREE_TYPE (upper);
2753 var = create_tmp_var (var_type, NULL);
2754 append_to_statement_list (build2 (GIMPLE_MODIFY_STMT, var_type, var, lower),
2755 pre_p);
2756
2757 /* Add the loop entry label. */
2758 append_to_statement_list (build1 (LABEL_EXPR,
2759 void_type_node,
2760 loop_entry_label),
2761 pre_p);
2762
2763 /* Build the reference. */
2764 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2765 var, NULL_TREE, NULL_TREE);
2766
2767 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2768 the store. Otherwise just assign value to the reference. */
2769
2770 if (TREE_CODE (value) == CONSTRUCTOR)
2771 /* NB we might have to call ourself recursively through
2772 gimplify_init_ctor_eval if the value is a constructor. */
2773 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2774 pre_p, cleared);
2775 else
2776 append_to_statement_list (build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (cref),
2777 cref, value),
2778 pre_p);
2779
2780 /* We exit the loop when the index var is equal to the upper bound. */
2781 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2782 build2 (EQ_EXPR, boolean_type_node,
2783 var, upper),
2784 build1 (GOTO_EXPR,
2785 void_type_node,
2786 loop_exit_label),
2787 NULL_TREE),
2788 pre_p);
2789
2790 /* Otherwise, increment the index var... */
2791 append_to_statement_list (build2 (GIMPLE_MODIFY_STMT, var_type, var,
2792 build2 (PLUS_EXPR, var_type, var,
2793 fold_convert (var_type,
2794 integer_one_node))),
2795 pre_p);
2796
2797 /* ...and jump back to the loop entry. */
2798 append_to_statement_list (build1 (GOTO_EXPR,
2799 void_type_node,
2800 loop_entry_label),
2801 pre_p);
2802
2803 /* Add the loop exit label. */
2804 append_to_statement_list (build1 (LABEL_EXPR,
2805 void_type_node,
2806 loop_exit_label),
2807 pre_p);
2808 }
2809
2810 /* Return true if FDECL is accessing a field that is zero sized. */
2811
2812 static bool
2813 zero_sized_field_decl (tree fdecl)
2814 {
2815 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2816 && integer_zerop (DECL_SIZE (fdecl)))
2817 return true;
2818 return false;
2819 }
2820
2821 /* Return true if TYPE is zero sized. */
2822
2823 static bool
2824 zero_sized_type (tree type)
2825 {
2826 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2827 && integer_zerop (TYPE_SIZE (type)))
2828 return true;
2829 return false;
2830 }
2831
2832 /* A subroutine of gimplify_init_constructor. Generate individual
2833 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2834 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2835 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2836 zeroed first. */
2837
2838 static void
2839 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2840 tree *pre_p, bool cleared)
2841 {
2842 tree array_elt_type = NULL;
2843 unsigned HOST_WIDE_INT ix;
2844 tree purpose, value;
2845
2846 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2847 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2848
2849 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2850 {
2851 tree cref, init;
2852
2853 /* NULL values are created above for gimplification errors. */
2854 if (value == NULL)
2855 continue;
2856
2857 if (cleared && initializer_zerop (value))
2858 continue;
2859
2860 /* ??? Here's to hoping the front end fills in all of the indices,
2861 so we don't have to figure out what's missing ourselves. */
2862 gcc_assert (purpose);
2863
2864 /* Skip zero-sized fields, unless value has side-effects. This can
2865 happen with calls to functions returning a zero-sized type, which
2866 we shouldn't discard. As a number of downstream passes don't
2867 expect sets of zero-sized fields, we rely on the gimplification of
2868 the MODIFY_EXPR we make below to drop the assignment statement. */
2869 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2870 continue;
2871
2872 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2873 whole range. */
2874 if (TREE_CODE (purpose) == RANGE_EXPR)
2875 {
2876 tree lower = TREE_OPERAND (purpose, 0);
2877 tree upper = TREE_OPERAND (purpose, 1);
2878
2879 /* If the lower bound is equal to upper, just treat it as if
2880 upper was the index. */
2881 if (simple_cst_equal (lower, upper))
2882 purpose = upper;
2883 else
2884 {
2885 gimplify_init_ctor_eval_range (object, lower, upper, value,
2886 array_elt_type, pre_p, cleared);
2887 continue;
2888 }
2889 }
2890
2891 if (array_elt_type)
2892 {
2893 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2894 purpose, NULL_TREE, NULL_TREE);
2895 }
2896 else
2897 {
2898 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2899 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2900 unshare_expr (object), purpose, NULL_TREE);
2901 }
2902
2903 if (TREE_CODE (value) == CONSTRUCTOR
2904 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2905 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2906 pre_p, cleared);
2907 else
2908 {
2909 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2910 gimplify_and_add (init, pre_p);
2911 }
2912 }
2913 }
2914
2915 /* A subroutine of gimplify_modify_expr. Break out elements of a
2916 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2917
2918 Note that we still need to clear any elements that don't have explicit
2919 initializers, so if not all elements are initialized we keep the
2920 original MODIFY_EXPR, we just remove all of the constructor elements. */
2921
2922 static enum gimplify_status
2923 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2924 tree *post_p, bool want_value)
2925 {
2926 tree object;
2927 tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
2928 tree type = TREE_TYPE (ctor);
2929 enum gimplify_status ret;
2930 VEC(constructor_elt,gc) *elts;
2931
2932 if (TREE_CODE (ctor) != CONSTRUCTOR)
2933 return GS_UNHANDLED;
2934
2935 ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2936 is_gimple_lvalue, fb_lvalue);
2937 if (ret == GS_ERROR)
2938 return ret;
2939 object = GENERIC_TREE_OPERAND (*expr_p, 0);
2940
2941 elts = CONSTRUCTOR_ELTS (ctor);
2942
2943 ret = GS_ALL_DONE;
2944 switch (TREE_CODE (type))
2945 {
2946 case RECORD_TYPE:
2947 case UNION_TYPE:
2948 case QUAL_UNION_TYPE:
2949 case ARRAY_TYPE:
2950 {
2951 struct gimplify_init_ctor_preeval_data preeval_data;
2952 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2953 HOST_WIDE_INT num_nonzero_elements;
2954 bool cleared, valid_const_initializer;
2955
2956 /* Aggregate types must lower constructors to initialization of
2957 individual elements. The exception is that a CONSTRUCTOR node
2958 with no elements indicates zero-initialization of the whole. */
2959 if (VEC_empty (constructor_elt, elts))
2960 break;
2961
2962 /* Fetch information about the constructor to direct later processing.
2963 We might want to make static versions of it in various cases, and
2964 can only do so if it known to be a valid constant initializer. */
2965 valid_const_initializer
2966 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2967 &num_ctor_elements, &cleared);
2968
2969 /* If a const aggregate variable is being initialized, then it
2970 should never be a lose to promote the variable to be static. */
2971 if (valid_const_initializer
2972 && num_nonzero_elements > 1
2973 && TREE_READONLY (object)
2974 && TREE_CODE (object) == VAR_DECL)
2975 {
2976 DECL_INITIAL (object) = ctor;
2977 TREE_STATIC (object) = 1;
2978 if (!DECL_NAME (object))
2979 DECL_NAME (object) = create_tmp_var_name ("C");
2980 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2981
2982 /* ??? C++ doesn't automatically append a .<number> to the
2983 assembler name, and even when it does, it looks a FE private
2984 data structures to figure out what that number should be,
2985 which are not set for this variable. I suppose this is
2986 important for local statics for inline functions, which aren't
2987 "local" in the object file sense. So in order to get a unique
2988 TU-local symbol, we must invoke the lhd version now. */
2989 lhd_set_decl_assembler_name (object);
2990
2991 *expr_p = NULL_TREE;
2992 break;
2993 }
2994
2995 /* If there are "lots" of initialized elements, even discounting
2996 those that are not address constants (and thus *must* be
2997 computed at runtime), then partition the constructor into
2998 constant and non-constant parts. Block copy the constant
2999 parts in, then generate code for the non-constant parts. */
3000 /* TODO. There's code in cp/typeck.c to do this. */
3001
3002 num_type_elements = count_type_elements (type, true);
3003
3004 /* If count_type_elements could not determine number of type elements
3005 for a constant-sized object, assume clearing is needed.
3006 Don't do this for variable-sized objects, as store_constructor
3007 will ignore the clearing of variable-sized objects. */
3008 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3009 cleared = true;
3010 /* If there are "lots" of zeros, then block clear the object first. */
3011 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3012 && num_nonzero_elements < num_type_elements/4)
3013 cleared = true;
3014 /* ??? This bit ought not be needed. For any element not present
3015 in the initializer, we should simply set them to zero. Except
3016 we'd need to *find* the elements that are not present, and that
3017 requires trickery to avoid quadratic compile-time behavior in
3018 large cases or excessive memory use in small cases. */
3019 else if (num_ctor_elements < num_type_elements)
3020 cleared = true;
3021
3022 /* If there are "lots" of initialized elements, and all of them
3023 are valid address constants, then the entire initializer can
3024 be dropped to memory, and then memcpy'd out. Don't do this
3025 for sparse arrays, though, as it's more efficient to follow
3026 the standard CONSTRUCTOR behavior of memset followed by
3027 individual element initialization. */
3028 if (valid_const_initializer && !cleared)
3029 {
3030 HOST_WIDE_INT size = int_size_in_bytes (type);
3031 unsigned int align;
3032
3033 /* ??? We can still get unbounded array types, at least
3034 from the C++ front end. This seems wrong, but attempt
3035 to work around it for now. */
3036 if (size < 0)
3037 {
3038 size = int_size_in_bytes (TREE_TYPE (object));
3039 if (size >= 0)
3040 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3041 }
3042
3043 /* Find the maximum alignment we can assume for the object. */
3044 /* ??? Make use of DECL_OFFSET_ALIGN. */
3045 if (DECL_P (object))
3046 align = DECL_ALIGN (object);
3047 else
3048 align = TYPE_ALIGN (type);
3049
3050 if (size > 0 && !can_move_by_pieces (size, align))
3051 {
3052 tree new = create_tmp_var_raw (type, "C");
3053
3054 gimple_add_tmp_var (new);
3055 TREE_STATIC (new) = 1;
3056 TREE_READONLY (new) = 1;
3057 DECL_INITIAL (new) = ctor;
3058 if (align > DECL_ALIGN (new))
3059 {
3060 DECL_ALIGN (new) = align;
3061 DECL_USER_ALIGN (new) = 1;
3062 }
3063 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3064
3065 GENERIC_TREE_OPERAND (*expr_p, 1) = new;
3066
3067 /* This is no longer an assignment of a CONSTRUCTOR, but
3068 we still may have processing to do on the LHS. So
3069 pretend we didn't do anything here to let that happen. */
3070 return GS_UNHANDLED;
3071 }
3072 }
3073
3074 /* If there are nonzero elements, pre-evaluate to capture elements
3075 overlapping with the lhs into temporaries. We must do this before
3076 clearing to fetch the values before they are zeroed-out. */
3077 if (num_nonzero_elements > 0)
3078 {
3079 preeval_data.lhs_base_decl = get_base_address (object);
3080 if (!DECL_P (preeval_data.lhs_base_decl))
3081 preeval_data.lhs_base_decl = NULL;
3082 preeval_data.lhs_alias_set = get_alias_set (object);
3083
3084 gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
3085 pre_p, post_p, &preeval_data);
3086 }
3087
3088 if (cleared)
3089 {
3090 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3091 Note that we still have to gimplify, in order to handle the
3092 case of variable sized types. Avoid shared tree structures. */
3093 CONSTRUCTOR_ELTS (ctor) = NULL;
3094 object = unshare_expr (object);
3095 gimplify_stmt (expr_p);
3096 append_to_statement_list (*expr_p, pre_p);
3097 }
3098
3099 /* If we have not block cleared the object, or if there are nonzero
3100 elements in the constructor, add assignments to the individual
3101 scalar fields of the object. */
3102 if (!cleared || num_nonzero_elements > 0)
3103 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3104
3105 *expr_p = NULL_TREE;
3106 }
3107 break;
3108
3109 case COMPLEX_TYPE:
3110 {
3111 tree r, i;
3112
3113 /* Extract the real and imaginary parts out of the ctor. */
3114 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3115 r = VEC_index (constructor_elt, elts, 0)->value;
3116 i = VEC_index (constructor_elt, elts, 1)->value;
3117 if (r == NULL || i == NULL)
3118 {
3119 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3120 if (r == NULL)
3121 r = zero;
3122 if (i == NULL)
3123 i = zero;
3124 }
3125
3126 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3127 represent creation of a complex value. */
3128 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3129 {
3130 ctor = build_complex (type, r, i);
3131 TREE_OPERAND (*expr_p, 1) = ctor;
3132 }
3133 else
3134 {
3135 ctor = build2 (COMPLEX_EXPR, type, r, i);
3136 TREE_OPERAND (*expr_p, 1) = ctor;
3137 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3138 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3139 fb_rvalue);
3140 }
3141 }
3142 break;
3143
3144 case VECTOR_TYPE:
3145 {
3146 unsigned HOST_WIDE_INT ix;
3147 constructor_elt *ce;
3148
3149 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3150 if (TREE_CONSTANT (ctor))
3151 {
3152 bool constant_p = true;
3153 tree value;
3154
3155 /* Even when ctor is constant, it might contain non-*_CST
3156 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3157 belong into VECTOR_CST nodes. */
3158 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3159 if (!CONSTANT_CLASS_P (value))
3160 {
3161 constant_p = false;
3162 break;
3163 }
3164
3165 if (constant_p)
3166 {
3167 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3168 break;
3169 }
3170
3171 /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
3172 make a VECTOR_CST. It won't do anything for us, and it'll
3173 prevent us from representing it as a single constant. */
3174 break;
3175 }
3176
3177 /* Vector types use CONSTRUCTOR all the way through gimple
3178 compilation as a general initializer. */
3179 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3180 {
3181 enum gimplify_status tret;
3182 tret = gimplify_expr (&ce->value, pre_p, post_p,
3183 is_gimple_val, fb_rvalue);
3184 if (tret == GS_ERROR)
3185 ret = GS_ERROR;
3186 }
3187 }
3188 break;
3189
3190 default:
3191 /* So how did we get a CONSTRUCTOR for a scalar type? */
3192 gcc_unreachable ();
3193 }
3194
3195 if (ret == GS_ERROR)
3196 return GS_ERROR;
3197 else if (want_value)
3198 {
3199 append_to_statement_list (*expr_p, pre_p);
3200 *expr_p = object;
3201 return GS_OK;
3202 }
3203 else
3204 return GS_ALL_DONE;
3205 }
3206
3207 /* Given a pointer value OP0, return a simplified version of an
3208 indirection through OP0, or NULL_TREE if no simplification is
3209 possible. This may only be applied to a rhs of an expression.
3210 Note that the resulting type may be different from the type pointed
3211 to in the sense that it is still compatible from the langhooks
3212 point of view. */
3213
3214 static tree
3215 fold_indirect_ref_rhs (tree t)
3216 {
3217 tree type = TREE_TYPE (TREE_TYPE (t));
3218 tree sub = t;
3219 tree subtype;
3220
3221 STRIP_USELESS_TYPE_CONVERSION (sub);
3222 subtype = TREE_TYPE (sub);
3223 if (!POINTER_TYPE_P (subtype))
3224 return NULL_TREE;
3225
3226 if (TREE_CODE (sub) == ADDR_EXPR)
3227 {
3228 tree op = TREE_OPERAND (sub, 0);
3229 tree optype = TREE_TYPE (op);
3230 /* *&p => p */
3231 if (lang_hooks.types_compatible_p (type, optype))
3232 return op;
3233 /* *(foo *)&fooarray => fooarray[0] */
3234 else if (TREE_CODE (optype) == ARRAY_TYPE
3235 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3236 {
3237 tree type_domain = TYPE_DOMAIN (optype);
3238 tree min_val = size_zero_node;
3239 if (type_domain && TYPE_MIN_VALUE (type_domain))
3240 min_val = TYPE_MIN_VALUE (type_domain);
3241 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3242 }
3243 }
3244
3245 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3246 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3247 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3248 {
3249 tree type_domain;
3250 tree min_val = size_zero_node;
3251 tree osub = sub;
3252 sub = fold_indirect_ref_rhs (sub);
3253 if (! sub)
3254 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3255 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3256 if (type_domain && TYPE_MIN_VALUE (type_domain))
3257 min_val = TYPE_MIN_VALUE (type_domain);
3258 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3259 }
3260
3261 return NULL_TREE;
3262 }
3263
3264 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3265 based on the code of the RHS. We loop for as long as something changes. */
3266
3267 static enum gimplify_status
3268 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3269 tree *post_p, bool want_value)
3270 {
3271 enum gimplify_status ret = GS_OK;
3272
3273 while (ret != GS_UNHANDLED)
3274 switch (TREE_CODE (*from_p))
3275 {
3276 case INDIRECT_REF:
3277 {
3278 /* If we have code like
3279
3280 *(const A*)(A*)&x
3281
3282 where the type of "x" is a (possibly cv-qualified variant
3283 of "A"), treat the entire expression as identical to "x".
3284 This kind of code arises in C++ when an object is bound
3285 to a const reference, and if "x" is a TARGET_EXPR we want
3286 to take advantage of the optimization below. */
3287 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3288 if (t)
3289 {
3290 *from_p = t;
3291 ret = GS_OK;
3292 }
3293 else
3294 ret = GS_UNHANDLED;
3295 break;
3296 }
3297
3298 case TARGET_EXPR:
3299 {
3300 /* If we are initializing something from a TARGET_EXPR, strip the
3301 TARGET_EXPR and initialize it directly, if possible. This can't
3302 be done if the initializer is void, since that implies that the
3303 temporary is set in some non-trivial way.
3304
3305 ??? What about code that pulls out the temp and uses it
3306 elsewhere? I think that such code never uses the TARGET_EXPR as
3307 an initializer. If I'm wrong, we'll die because the temp won't
3308 have any RTL. In that case, I guess we'll need to replace
3309 references somehow. */
3310 tree init = TARGET_EXPR_INITIAL (*from_p);
3311
3312 if (!VOID_TYPE_P (TREE_TYPE (init)))
3313 {
3314 *from_p = init;
3315 ret = GS_OK;
3316 }
3317 else
3318 ret = GS_UNHANDLED;
3319 }
3320 break;
3321
3322 case COMPOUND_EXPR:
3323 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3324 caught. */
3325 gimplify_compound_expr (from_p, pre_p, true);
3326 ret = GS_OK;
3327 break;
3328
3329 case CONSTRUCTOR:
3330 /* If we're initializing from a CONSTRUCTOR, break this into
3331 individual MODIFY_EXPRs. */
3332 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3333
3334 case COND_EXPR:
3335 /* If we're assigning to a non-register type, push the assignment
3336 down into the branches. This is mandatory for ADDRESSABLE types,
3337 since we cannot generate temporaries for such, but it saves a
3338 copy in other cases as well. */
3339 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3340 {
3341 /* This code should mirror the code in gimplify_cond_expr. */
3342 enum tree_code code = TREE_CODE (*expr_p);
3343 tree cond = *from_p;
3344 tree result = *to_p;
3345
3346 ret = gimplify_expr (&result, pre_p, post_p,
3347 is_gimple_min_lval, fb_lvalue);
3348 if (ret != GS_ERROR)
3349 ret = GS_OK;
3350
3351 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3352 TREE_OPERAND (cond, 1)
3353 = build2 (code, void_type_node, result,
3354 TREE_OPERAND (cond, 1));
3355 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3356 TREE_OPERAND (cond, 2)
3357 = build2 (code, void_type_node, unshare_expr (result),
3358 TREE_OPERAND (cond, 2));
3359
3360 TREE_TYPE (cond) = void_type_node;
3361 recalculate_side_effects (cond);
3362
3363 if (want_value)
3364 {
3365 gimplify_and_add (cond, pre_p);
3366 *expr_p = unshare_expr (result);
3367 }
3368 else
3369 *expr_p = cond;
3370 return ret;
3371 }
3372 else
3373 ret = GS_UNHANDLED;
3374 break;
3375
3376 case CALL_EXPR:
3377 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3378 return slot so that we don't generate a temporary. */
3379 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3380 && aggregate_value_p (*from_p, *from_p))
3381 {
3382 bool use_target;
3383
3384 if (!(rhs_predicate_for (*to_p))(*from_p))
3385 /* If we need a temporary, *to_p isn't accurate. */
3386 use_target = false;
3387 else if (TREE_CODE (*to_p) == RESULT_DECL
3388 && DECL_NAME (*to_p) == NULL_TREE
3389 && needs_to_live_in_memory (*to_p))
3390 /* It's OK to use the return slot directly unless it's an NRV. */
3391 use_target = true;
3392 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3393 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3394 /* Don't force regs into memory. */
3395 use_target = false;
3396 else if (TREE_CODE (*to_p) == VAR_DECL
3397 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3398 /* Don't use the original target if it's a formal temp; we
3399 don't want to take their addresses. */
3400 use_target = false;
3401 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3402 /* It's OK to use the target directly if it's being
3403 initialized. */
3404 use_target = true;
3405 else if (!is_gimple_non_addressable (*to_p))
3406 /* Don't use the original target if it's already addressable;
3407 if its address escapes, and the called function uses the
3408 NRV optimization, a conforming program could see *to_p
3409 change before the called function returns; see c++/19317.
3410 When optimizing, the return_slot pass marks more functions
3411 as safe after we have escape info. */
3412 use_target = false;
3413 else
3414 use_target = true;
3415
3416 if (use_target)
3417 {
3418 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3419 lang_hooks.mark_addressable (*to_p);
3420 }
3421 }
3422
3423 ret = GS_UNHANDLED;
3424 break;
3425
3426 /* If we're initializing from a container, push the initialization
3427 inside it. */
3428 case CLEANUP_POINT_EXPR:
3429 case BIND_EXPR:
3430 case STATEMENT_LIST:
3431 {
3432 tree wrap = *from_p;
3433 tree t;
3434
3435 ret = gimplify_expr (to_p, pre_p, post_p,
3436 is_gimple_min_lval, fb_lvalue);
3437 if (ret != GS_ERROR)
3438 ret = GS_OK;
3439
3440 t = voidify_wrapper_expr (wrap, *expr_p);
3441 gcc_assert (t == *expr_p);
3442
3443 if (want_value)
3444 {
3445 gimplify_and_add (wrap, pre_p);
3446 *expr_p = unshare_expr (*to_p);
3447 }
3448 else
3449 *expr_p = wrap;
3450 return GS_OK;
3451 }
3452
3453 default:
3454 ret = GS_UNHANDLED;
3455 break;
3456 }
3457
3458 return ret;
3459 }
3460
3461 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3462 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3463 DECL_COMPLEX_GIMPLE_REG_P set. */
3464
3465 static enum gimplify_status
3466 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3467 {
3468 enum tree_code code, ocode;
3469 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3470
3471 lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
3472 rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
3473 code = TREE_CODE (lhs);
3474 lhs = TREE_OPERAND (lhs, 0);
3475
3476 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3477 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3478 other = get_formal_tmp_var (other, pre_p);
3479
3480 realpart = code == REALPART_EXPR ? rhs : other;
3481 imagpart = code == REALPART_EXPR ? other : rhs;
3482
3483 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3484 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3485 else
3486 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3487
3488 GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
3489 GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
3490
3491 if (want_value)
3492 {
3493 append_to_statement_list (*expr_p, pre_p);
3494 *expr_p = rhs;
3495 }
3496
3497 return GS_ALL_DONE;
3498 }
3499
3500
3501 /* Destructively convert the TREE pointer in TP into a gimple tuple if
3502 appropriate. */
3503
3504 static void
3505 tree_to_gimple_tuple (tree *tp)
3506 {
3507
3508 switch (TREE_CODE (*tp))
3509 {
3510 case GIMPLE_MODIFY_STMT:
3511 return;
3512 case MODIFY_EXPR:
3513 {
3514 struct gimple_stmt *gs;
3515 tree lhs = TREE_OPERAND (*tp, 0);
3516 bool def_stmt_self_p = false;
3517
3518 if (TREE_CODE (lhs) == SSA_NAME)
3519 {
3520 if (SSA_NAME_DEF_STMT (lhs) == *tp)
3521 def_stmt_self_p = true;
3522 }
3523
3524 gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
3525 gs->base = (*tp)->base;
3526 /* The set to base above overwrites the CODE. */
3527 TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
3528
3529 gs->locus = EXPR_LOCUS (*tp);
3530 gs->operands[0] = TREE_OPERAND (*tp, 0);
3531 gs->operands[1] = TREE_OPERAND (*tp, 1);
3532 gs->block = TREE_BLOCK (*tp);
3533 *tp = (tree)gs;
3534
3535 /* If we re-gimplify a set to an SSA_NAME, we must change the
3536 SSA name's DEF_STMT link. */
3537 if (def_stmt_self_p)
3538 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
3539
3540 return;
3541 }
3542 default:
3543 break;
3544 }
3545 }
3546
3547 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3548
3549 modify_expr
3550 : varname '=' rhs
3551 | '*' ID '=' rhs
3552
3553 PRE_P points to the list where side effects that must happen before
3554 *EXPR_P should be stored.
3555
3556 POST_P points to the list where side effects that must happen after
3557 *EXPR_P should be stored.
3558
3559 WANT_VALUE is nonzero iff we want to use the value of this expression
3560 in another expression. */
3561
3562 static enum gimplify_status
3563 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3564 {
3565 tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
3566 tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
3567 enum gimplify_status ret = GS_UNHANDLED;
3568
3569 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3570 || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
3571 || TREE_CODE (*expr_p) == INIT_EXPR);
3572
3573 /* For zero sized types only gimplify the left hand side and right hand side
3574 as statements and throw away the assignment. */
3575 if (zero_sized_type (TREE_TYPE (*from_p)))
3576 {
3577 gimplify_stmt (from_p);
3578 gimplify_stmt (to_p);
3579 append_to_statement_list (*from_p, pre_p);
3580 append_to_statement_list (*to_p, pre_p);
3581 *expr_p = NULL_TREE;
3582 return GS_ALL_DONE;
3583 }
3584
3585 /* See if any simplifications can be done based on what the RHS is. */
3586 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3587 want_value);
3588 if (ret != GS_UNHANDLED)
3589 return ret;
3590
3591 /* If the value being copied is of variable width, compute the length
3592 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3593 before gimplifying any of the operands so that we can resolve any
3594 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3595 the size of the expression to be copied, not of the destination, so
3596 that is what we must here. */
3597 maybe_with_size_expr (from_p);
3598
3599 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3600 if (ret == GS_ERROR)
3601 return ret;
3602
3603 ret = gimplify_expr (from_p, pre_p, post_p,
3604 rhs_predicate_for (*to_p), fb_rvalue);
3605 if (ret == GS_ERROR)
3606 return ret;
3607
3608 /* Now see if the above changed *from_p to something we handle specially. */
3609 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3610 want_value);
3611 if (ret != GS_UNHANDLED)
3612 return ret;
3613
3614 /* If we've got a variable sized assignment between two lvalues (i.e. does
3615 not involve a call), then we can make things a bit more straightforward
3616 by converting the assignment to memcpy or memset. */
3617 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3618 {
3619 tree from = TREE_OPERAND (*from_p, 0);
3620 tree size = TREE_OPERAND (*from_p, 1);
3621
3622 if (TREE_CODE (from) == CONSTRUCTOR)
3623 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3624 if (is_gimple_addressable (from))
3625 {
3626 *from_p = from;
3627 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3628 }
3629 }
3630
3631 /* Transform partial stores to non-addressable complex variables into
3632 total stores. This allows us to use real instead of virtual operands
3633 for these variables, which improves optimization. */
3634 if ((TREE_CODE (*to_p) == REALPART_EXPR
3635 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3636 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3637 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3638
3639 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3640 {
3641 /* If we've somehow already got an SSA_NAME on the LHS, then
3642 we're probably modified it twice. Not good. */
3643 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3644 *to_p = make_ssa_name (*to_p, *expr_p);
3645 }
3646
3647 if (want_value)
3648 {
3649 tree_to_gimple_tuple (expr_p);
3650
3651 append_to_statement_list (*expr_p, pre_p);
3652 *expr_p = *to_p;
3653 return GS_OK;
3654 }
3655
3656 return GS_ALL_DONE;
3657 }
3658
3659 /* Gimplify a comparison between two variable-sized objects. Do this
3660 with a call to BUILT_IN_MEMCMP. */
3661
3662 static enum gimplify_status
3663 gimplify_variable_sized_compare (tree *expr_p)
3664 {
3665 tree op0 = TREE_OPERAND (*expr_p, 0);
3666 tree op1 = TREE_OPERAND (*expr_p, 1);
3667 tree args, t, dest;
3668
3669 t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3670 t = unshare_expr (t);
3671 t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
3672 args = tree_cons (NULL, t, NULL);
3673 t = build_fold_addr_expr (op1);
3674 args = tree_cons (NULL, t, args);
3675 dest = build_fold_addr_expr (op0);
3676 args = tree_cons (NULL, dest, args);
3677 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3678 t = build_function_call_expr (t, args);
3679 *expr_p
3680 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3681
3682 return GS_OK;
3683 }
3684
3685 /* Gimplify a comparison between two aggregate objects of integral scalar
3686 mode as a comparison between the bitwise equivalent scalar values. */
3687
3688 static enum gimplify_status
3689 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3690 {
3691 tree op0 = TREE_OPERAND (*expr_p, 0);
3692 tree op1 = TREE_OPERAND (*expr_p, 1);
3693
3694 tree type = TREE_TYPE (op0);
3695 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3696
3697 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3698 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3699
3700 *expr_p
3701 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3702
3703 return GS_OK;
3704 }
3705
3706 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3707 points to the expression to gimplify.
3708
3709 Expressions of the form 'a && b' are gimplified to:
3710
3711 a && b ? true : false
3712
3713 gimplify_cond_expr will do the rest.
3714
3715 PRE_P points to the list where side effects that must happen before
3716 *EXPR_P should be stored. */
3717
3718 static enum gimplify_status
3719 gimplify_boolean_expr (tree *expr_p)
3720 {
3721 /* Preserve the original type of the expression. */
3722 tree type = TREE_TYPE (*expr_p);
3723
3724 *expr_p = build3 (COND_EXPR, type, *expr_p,
3725 fold_convert (type, boolean_true_node),
3726 fold_convert (type, boolean_false_node));
3727
3728 return GS_OK;
3729 }
3730
3731 /* Gimplifies an expression sequence. This function gimplifies each
3732 expression and re-writes the original expression with the last
3733 expression of the sequence in GIMPLE form.
3734
3735 PRE_P points to the list where the side effects for all the
3736 expressions in the sequence will be emitted.
3737
3738 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3739 /* ??? Should rearrange to share the pre-queue with all the indirect
3740 invocations of gimplify_expr. Would probably save on creations
3741 of statement_list nodes. */
3742
3743 static enum gimplify_status
3744 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3745 {
3746 tree t = *expr_p;
3747
3748 do
3749 {
3750 tree *sub_p = &TREE_OPERAND (t, 0);
3751
3752 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3753 gimplify_compound_expr (sub_p, pre_p, false);
3754 else
3755 gimplify_stmt (sub_p);
3756 append_to_statement_list (*sub_p, pre_p);
3757
3758 t = TREE_OPERAND (t, 1);
3759 }
3760 while (TREE_CODE (t) == COMPOUND_EXPR);
3761
3762 *expr_p = t;
3763 if (want_value)
3764 return GS_OK;
3765 else
3766 {
3767 gimplify_stmt (expr_p);
3768 return GS_ALL_DONE;
3769 }
3770 }
3771
3772 /* Gimplifies a statement list. These may be created either by an
3773 enlightened front-end, or by shortcut_cond_expr. */
3774
3775 static enum gimplify_status
3776 gimplify_statement_list (tree *expr_p, tree *pre_p)
3777 {
3778 tree temp = voidify_wrapper_expr (*expr_p, NULL);
3779
3780 tree_stmt_iterator i = tsi_start (*expr_p);
3781
3782 while (!tsi_end_p (i))
3783 {
3784 tree t;
3785
3786 gimplify_stmt (tsi_stmt_ptr (i));
3787
3788 t = tsi_stmt (i);
3789 if (t == NULL)
3790 tsi_delink (&i);
3791 else if (TREE_CODE (t) == STATEMENT_LIST)
3792 {
3793 tsi_link_before (&i, t, TSI_SAME_STMT);
3794 tsi_delink (&i);
3795 }
3796 else
3797 tsi_next (&i);
3798 }
3799
3800 if (temp)
3801 {
3802 append_to_statement_list (*expr_p, pre_p);
3803 *expr_p = temp;
3804 return GS_OK;
3805 }
3806
3807 return GS_ALL_DONE;
3808 }
3809
3810 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3811 gimplify. After gimplification, EXPR_P will point to a new temporary
3812 that holds the original value of the SAVE_EXPR node.
3813
3814 PRE_P points to the list where side effects that must happen before
3815 *EXPR_P should be stored. */
3816
3817 static enum gimplify_status
3818 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3819 {
3820 enum gimplify_status ret = GS_ALL_DONE;
3821 tree val;
3822
3823 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3824 val = TREE_OPERAND (*expr_p, 0);
3825
3826 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3827 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3828 {
3829 /* The operand may be a void-valued expression such as SAVE_EXPRs
3830 generated by the Java frontend for class initialization. It is
3831 being executed only for its side-effects. */
3832 if (TREE_TYPE (val) == void_type_node)
3833 {
3834 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3835 is_gimple_stmt, fb_none);
3836 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3837 val = NULL;
3838 }
3839 else
3840 val = get_initialized_tmp_var (val, pre_p, post_p);
3841
3842 TREE_OPERAND (*expr_p, 0) = val;
3843 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3844 }
3845
3846 *expr_p = val;
3847
3848 return ret;
3849 }
3850
3851 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3852
3853 unary_expr
3854 : ...
3855 | '&' varname
3856 ...
3857
3858 PRE_P points to the list where side effects that must happen before
3859 *EXPR_P should be stored.
3860
3861 POST_P points to the list where side effects that must happen after
3862 *EXPR_P should be stored. */
3863
3864 static enum gimplify_status
3865 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3866 {
3867 tree expr = *expr_p;
3868 tree op0 = TREE_OPERAND (expr, 0);
3869 enum gimplify_status ret;
3870
3871 switch (TREE_CODE (op0))
3872 {
3873 case INDIRECT_REF:
3874 case MISALIGNED_INDIRECT_REF:
3875 do_indirect_ref:
3876 /* Check if we are dealing with an expression of the form '&*ptr'.
3877 While the front end folds away '&*ptr' into 'ptr', these
3878 expressions may be generated internally by the compiler (e.g.,
3879 builtins like __builtin_va_end). */
3880 /* Caution: the silent array decomposition semantics we allow for
3881 ADDR_EXPR means we can't always discard the pair. */
3882 /* Gimplification of the ADDR_EXPR operand may drop
3883 cv-qualification conversions, so make sure we add them if
3884 needed. */
3885 {
3886 tree op00 = TREE_OPERAND (op0, 0);
3887 tree t_expr = TREE_TYPE (expr);
3888 tree t_op00 = TREE_TYPE (op00);
3889
3890 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3891 {
3892 #ifdef ENABLE_CHECKING
3893 tree t_op0 = TREE_TYPE (op0);
3894 gcc_assert (POINTER_TYPE_P (t_expr)
3895 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3896 ? TREE_TYPE (t_op0) : t_op0,
3897 TREE_TYPE (t_expr))
3898 && POINTER_TYPE_P (t_op00)
3899 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3900 #endif
3901 op00 = fold_convert (TREE_TYPE (expr), op00);
3902 }
3903 *expr_p = op00;
3904 ret = GS_OK;
3905 }
3906 break;
3907
3908 case VIEW_CONVERT_EXPR:
3909 /* Take the address of our operand and then convert it to the type of
3910 this ADDR_EXPR.
3911
3912 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3913 all clear. The impact of this transformation is even less clear. */
3914
3915 /* If the operand is a useless conversion, look through it. Doing so
3916 guarantees that the ADDR_EXPR and its operand will remain of the
3917 same type. */
3918 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3919 op0 = TREE_OPERAND (op0, 0);
3920
3921 *expr_p = fold_convert (TREE_TYPE (expr),
3922 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3923 ret = GS_OK;
3924 break;
3925
3926 default:
3927 /* We use fb_either here because the C frontend sometimes takes
3928 the address of a call that returns a struct; see
3929 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3930 the implied temporary explicit. */
3931 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3932 is_gimple_addressable, fb_either);
3933 if (ret != GS_ERROR)
3934 {
3935 op0 = TREE_OPERAND (expr, 0);
3936
3937 /* For various reasons, the gimplification of the expression
3938 may have made a new INDIRECT_REF. */
3939 if (TREE_CODE (op0) == INDIRECT_REF)
3940 goto do_indirect_ref;
3941
3942 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3943 is set properly. */
3944 recompute_tree_invariant_for_addr_expr (expr);
3945
3946 /* Mark the RHS addressable. */
3947 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3948 }
3949 break;
3950 }
3951
3952 return ret;
3953 }
3954
3955 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3956 value; output operands should be a gimple lvalue. */
3957
3958 static enum gimplify_status
3959 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3960 {
3961 tree expr = *expr_p;
3962 int noutputs = list_length (ASM_OUTPUTS (expr));
3963 const char **oconstraints
3964 = (const char **) alloca ((noutputs) * sizeof (const char *));
3965 int i;
3966 tree link;
3967 const char *constraint;
3968 bool allows_mem, allows_reg, is_inout;
3969 enum gimplify_status ret, tret;
3970
3971 ret = GS_ALL_DONE;
3972 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3973 {
3974 size_t constraint_len;
3975 oconstraints[i] = constraint
3976 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3977 constraint_len = strlen (constraint);
3978 if (constraint_len == 0)
3979 continue;
3980
3981 parse_output_constraint (&constraint, i, 0, 0,
3982 &allows_mem, &allows_reg, &is_inout);
3983
3984 if (!allows_reg && allows_mem)
3985 lang_hooks.mark_addressable (TREE_VALUE (link));
3986
3987 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3988 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3989 fb_lvalue | fb_mayfail);
3990 if (tret == GS_ERROR)
3991 {
3992 error ("invalid lvalue in asm output %d", i);
3993 ret = tret;
3994 }
3995
3996 if (is_inout)
3997 {
3998 /* An input/output operand. To give the optimizers more
3999 flexibility, split it into separate input and output
4000 operands. */
4001 tree input;
4002 char buf[10];
4003
4004 /* Turn the in/out constraint into an output constraint. */
4005 char *p = xstrdup (constraint);
4006 p[0] = '=';
4007 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4008
4009 /* And add a matching input constraint. */
4010 if (allows_reg)
4011 {
4012 sprintf (buf, "%d", i);
4013
4014 /* If there are multiple alternatives in the constraint,
4015 handle each of them individually. Those that allow register
4016 will be replaced with operand number, the others will stay
4017 unchanged. */
4018 if (strchr (p, ',') != NULL)
4019 {
4020 size_t len = 0, buflen = strlen (buf);
4021 char *beg, *end, *str, *dst;
4022
4023 for (beg = p + 1;;)
4024 {
4025 end = strchr (beg, ',');
4026 if (end == NULL)
4027 end = strchr (beg, '\0');
4028 if ((size_t) (end - beg) < buflen)
4029 len += buflen + 1;
4030 else
4031 len += end - beg + 1;
4032 if (*end)
4033 beg = end + 1;
4034 else
4035 break;
4036 }
4037
4038 str = (char *) alloca (len);
4039 for (beg = p + 1, dst = str;;)
4040 {
4041 const char *tem;
4042 bool mem_p, reg_p, inout_p;
4043
4044 end = strchr (beg, ',');
4045 if (end)
4046 *end = '\0';
4047 beg[-1] = '=';
4048 tem = beg - 1;
4049 parse_output_constraint (&tem, i, 0, 0,
4050 &mem_p, &reg_p, &inout_p);
4051 if (dst != str)
4052 *dst++ = ',';
4053 if (reg_p)
4054 {
4055 memcpy (dst, buf, buflen);
4056 dst += buflen;
4057 }
4058 else
4059 {
4060 if (end)
4061 len = end - beg;
4062 else
4063 len = strlen (beg);
4064 memcpy (dst, beg, len);
4065 dst += len;
4066 }
4067 if (end)
4068 beg = end + 1;
4069 else
4070 break;
4071 }
4072 *dst = '\0';
4073 input = build_string (dst - str, str);
4074 }
4075 else
4076 input = build_string (strlen (buf), buf);
4077 }
4078 else
4079 input = build_string (constraint_len - 1, constraint + 1);
4080
4081 free (p);
4082
4083 input = build_tree_list (build_tree_list (NULL_TREE, input),
4084 unshare_expr (TREE_VALUE (link)));
4085 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4086 }
4087 }
4088
4089 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4090 {
4091 constraint
4092 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4093 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4094 oconstraints, &allows_mem, &allows_reg);
4095
4096 /* If the operand is a memory input, it should be an lvalue. */
4097 if (!allows_reg && allows_mem)
4098 {
4099 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4100 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4101 lang_hooks.mark_addressable (TREE_VALUE (link));
4102 if (tret == GS_ERROR)
4103 {
4104 error ("memory input %d is not directly addressable", i);
4105 ret = tret;
4106 }
4107 }
4108 else
4109 {
4110 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4111 is_gimple_asm_val, fb_rvalue);
4112 if (tret == GS_ERROR)
4113 ret = tret;
4114 }
4115 }
4116
4117 return ret;
4118 }
4119
4120 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4121 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4122 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4123 return to this function.
4124
4125 FIXME should we complexify the prequeue handling instead? Or use flags
4126 for all the cleanups and let the optimizer tighten them up? The current
4127 code seems pretty fragile; it will break on a cleanup within any
4128 non-conditional nesting. But any such nesting would be broken, anyway;
4129 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4130 and continues out of it. We can do that at the RTL level, though, so
4131 having an optimizer to tighten up try/finally regions would be a Good
4132 Thing. */
4133
4134 static enum gimplify_status
4135 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4136 {
4137 tree_stmt_iterator iter;
4138 tree body;
4139
4140 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4141
4142 /* We only care about the number of conditions between the innermost
4143 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4144 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4145 int old_conds = gimplify_ctxp->conditions;
4146 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4147 gimplify_ctxp->conditions = 0;
4148 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4149
4150 body = TREE_OPERAND (*expr_p, 0);
4151 gimplify_to_stmt_list (&body);
4152
4153 gimplify_ctxp->conditions = old_conds;
4154 gimplify_ctxp->conditional_cleanups = old_cleanups;
4155
4156 for (iter = tsi_start (body); !tsi_end_p (iter); )
4157 {
4158 tree *wce_p = tsi_stmt_ptr (iter);
4159 tree wce = *wce_p;
4160
4161 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4162 {
4163 if (tsi_one_before_end_p (iter))
4164 {
4165 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4166 tsi_delink (&iter);
4167 break;
4168 }
4169 else
4170 {
4171 tree sl, tfe;
4172 enum tree_code code;
4173
4174 if (CLEANUP_EH_ONLY (wce))
4175 code = TRY_CATCH_EXPR;
4176 else
4177 code = TRY_FINALLY_EXPR;
4178
4179 sl = tsi_split_statement_list_after (&iter);
4180 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4181 append_to_statement_list (TREE_OPERAND (wce, 0),
4182 &TREE_OPERAND (tfe, 1));
4183 *wce_p = tfe;
4184 iter = tsi_start (sl);
4185 }
4186 }
4187 else
4188 tsi_next (&iter);
4189 }
4190
4191 if (temp)
4192 {
4193 *expr_p = temp;
4194 append_to_statement_list (body, pre_p);
4195 return GS_OK;
4196 }
4197 else
4198 {
4199 *expr_p = body;
4200 return GS_ALL_DONE;
4201 }
4202 }
4203
4204 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4205 is the cleanup action required. */
4206
4207 static void
4208 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4209 {
4210 tree wce;
4211
4212 /* Errors can result in improperly nested cleanups. Which results in
4213 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4214 if (errorcount || sorrycount)
4215 return;
4216
4217 if (gimple_conditional_context ())
4218 {
4219 /* If we're in a conditional context, this is more complex. We only
4220 want to run the cleanup if we actually ran the initialization that
4221 necessitates it, but we want to run it after the end of the
4222 conditional context. So we wrap the try/finally around the
4223 condition and use a flag to determine whether or not to actually
4224 run the destructor. Thus
4225
4226 test ? f(A()) : 0
4227
4228 becomes (approximately)
4229
4230 flag = 0;
4231 try {
4232 if (test) { A::A(temp); flag = 1; val = f(temp); }
4233 else { val = 0; }
4234 } finally {
4235 if (flag) A::~A(temp);
4236 }
4237 val
4238 */
4239
4240 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4241 tree ffalse = build2 (GIMPLE_MODIFY_STMT, void_type_node, flag,
4242 boolean_false_node);
4243 tree ftrue = build2 (GIMPLE_MODIFY_STMT, void_type_node, flag,
4244 boolean_true_node);
4245 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4246 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4247 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4248 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4249 append_to_statement_list (ftrue, pre_p);
4250
4251 /* Because of this manipulation, and the EH edges that jump
4252 threading cannot redirect, the temporary (VAR) will appear
4253 to be used uninitialized. Don't warn. */
4254 TREE_NO_WARNING (var) = 1;
4255 }
4256 else
4257 {
4258 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4259 CLEANUP_EH_ONLY (wce) = eh_only;
4260 append_to_statement_list (wce, pre_p);
4261 }
4262
4263 gimplify_stmt (&TREE_OPERAND (wce, 0));
4264 }
4265
4266 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4267
4268 static enum gimplify_status
4269 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4270 {
4271 tree targ = *expr_p;
4272 tree temp = TARGET_EXPR_SLOT (targ);
4273 tree init = TARGET_EXPR_INITIAL (targ);
4274 enum gimplify_status ret;
4275
4276 if (init)
4277 {
4278 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4279 to the temps list. */
4280 gimple_add_tmp_var (temp);
4281
4282 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4283 expression is supposed to initialize the slot. */
4284 if (VOID_TYPE_P (TREE_TYPE (init)))
4285 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4286 else
4287 {
4288 init = build2 (INIT_EXPR, void_type_node, temp, init);
4289 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4290 fb_none);
4291 }
4292 if (ret == GS_ERROR)
4293 return GS_ERROR;
4294 append_to_statement_list (init, pre_p);
4295
4296 /* If needed, push the cleanup for the temp. */
4297 if (TARGET_EXPR_CLEANUP (targ))
4298 {
4299 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4300 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4301 CLEANUP_EH_ONLY (targ), pre_p);
4302 }
4303
4304 /* Only expand this once. */
4305 TREE_OPERAND (targ, 3) = init;
4306 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4307 }
4308 else
4309 /* We should have expanded this before. */
4310 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4311
4312 *expr_p = temp;
4313 return GS_OK;
4314 }
4315
4316 /* Gimplification of expression trees. */
4317
4318 /* Gimplify an expression which appears at statement context; usually, this
4319 means replacing it with a suitably gimple STATEMENT_LIST. */
4320
4321 void
4322 gimplify_stmt (tree *stmt_p)
4323 {
4324 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4325 }
4326
4327 /* Similarly, but force the result to be a STATEMENT_LIST. */
4328
4329 void
4330 gimplify_to_stmt_list (tree *stmt_p)
4331 {
4332 gimplify_stmt (stmt_p);
4333 if (!*stmt_p)
4334 *stmt_p = alloc_stmt_list ();
4335 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4336 {
4337 tree t = *stmt_p;
4338 *stmt_p = alloc_stmt_list ();
4339 append_to_statement_list (t, stmt_p);
4340 }
4341 }
4342
4343
4344 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4345 to CTX. If entries already exist, force them to be some flavor of private.
4346 If there is no enclosing parallel, do nothing. */
4347
4348 void
4349 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4350 {
4351 splay_tree_node n;
4352
4353 if (decl == NULL || !DECL_P (decl))
4354 return;
4355
4356 do
4357 {
4358 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4359 if (n != NULL)
4360 {
4361 if (n->value & GOVD_SHARED)
4362 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4363 else
4364 return;
4365 }
4366 else if (ctx->is_parallel)
4367 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4368
4369 ctx = ctx->outer_context;
4370 }
4371 while (ctx);
4372 }
4373
4374 /* Similarly for each of the type sizes of TYPE. */
4375
4376 static void
4377 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4378 {
4379 if (type == NULL || type == error_mark_node)
4380 return;
4381 type = TYPE_MAIN_VARIANT (type);
4382
4383 if (pointer_set_insert (ctx->privatized_types, type))
4384 return;
4385
4386 switch (TREE_CODE (type))
4387 {
4388 case INTEGER_TYPE:
4389 case ENUMERAL_TYPE:
4390 case BOOLEAN_TYPE:
4391 case REAL_TYPE:
4392 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4393 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4394 break;
4395
4396 case ARRAY_TYPE:
4397 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4398 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4399 break;
4400
4401 case RECORD_TYPE:
4402 case UNION_TYPE:
4403 case QUAL_UNION_TYPE:
4404 {
4405 tree field;
4406 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4407 if (TREE_CODE (field) == FIELD_DECL)
4408 {
4409 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4410 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4411 }
4412 }
4413 break;
4414
4415 case POINTER_TYPE:
4416 case REFERENCE_TYPE:
4417 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4418 break;
4419
4420 default:
4421 break;
4422 }
4423
4424 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4425 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4426 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4427 }
4428
4429 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4430
4431 static void
4432 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4433 {
4434 splay_tree_node n;
4435 unsigned int nflags;
4436 tree t;
4437
4438 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4439 return;
4440
4441 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4442 there are constructors involved somewhere. */
4443 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4444 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4445 flags |= GOVD_SEEN;
4446
4447 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4448 if (n != NULL)
4449 {
4450 /* We shouldn't be re-adding the decl with the same data
4451 sharing class. */
4452 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4453 /* The only combination of data sharing classes we should see is
4454 FIRSTPRIVATE and LASTPRIVATE. */
4455 nflags = n->value | flags;
4456 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4457 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4458 n->value = nflags;
4459 return;
4460 }
4461
4462 /* When adding a variable-sized variable, we have to handle all sorts
4463 of additional bits of data: the pointer replacement variable, and
4464 the parameters of the type. */
4465 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
4466 {
4467 /* Add the pointer replacement variable as PRIVATE if the variable
4468 replacement is private, else FIRSTPRIVATE since we'll need the
4469 address of the original variable either for SHARED, or for the
4470 copy into or out of the context. */
4471 if (!(flags & GOVD_LOCAL))
4472 {
4473 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4474 nflags |= flags & GOVD_SEEN;
4475 t = DECL_VALUE_EXPR (decl);
4476 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4477 t = TREE_OPERAND (t, 0);
4478 gcc_assert (DECL_P (t));
4479 omp_add_variable (ctx, t, nflags);
4480 }
4481
4482 /* Add all of the variable and type parameters (which should have
4483 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4484 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4485 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4486 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4487
4488 /* The variable-sized variable itself is never SHARED, only some form
4489 of PRIVATE. The sharing would take place via the pointer variable
4490 which we remapped above. */
4491 if (flags & GOVD_SHARED)
4492 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4493 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4494
4495 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4496 alloca statement we generate for the variable, so make sure it
4497 is available. This isn't automatically needed for the SHARED
4498 case, since we won't be allocating local storage then. */
4499 else
4500 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4501 }
4502 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4503 {
4504 gcc_assert ((flags & GOVD_LOCAL) == 0);
4505 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4506
4507 /* Similar to the direct variable sized case above, we'll need the
4508 size of references being privatized. */
4509 if ((flags & GOVD_SHARED) == 0)
4510 {
4511 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4512 if (TREE_CODE (t) != INTEGER_CST)
4513 omp_notice_variable (ctx, t, true);
4514 }
4515 }
4516
4517 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4518 }
4519
4520 /* Record the fact that DECL was used within the OpenMP context CTX.
4521 IN_CODE is true when real code uses DECL, and false when we should
4522 merely emit default(none) errors. Return true if DECL is going to
4523 be remapped and thus DECL shouldn't be gimplified into its
4524 DECL_VALUE_EXPR (if any). */
4525
4526 static bool
4527 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4528 {
4529 splay_tree_node n;
4530 unsigned flags = in_code ? GOVD_SEEN : 0;
4531 bool ret = false, shared;
4532
4533 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4534 return false;
4535
4536 /* Threadprivate variables are predetermined. */
4537 if (is_global_var (decl))
4538 {
4539 if (DECL_THREAD_LOCAL_P (decl))
4540 return false;
4541
4542 if (DECL_HAS_VALUE_EXPR_P (decl))
4543 {
4544 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4545
4546 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4547 return false;
4548 }
4549 }
4550
4551 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4552 if (n == NULL)
4553 {
4554 enum omp_clause_default_kind default_kind, kind;
4555
4556 if (!ctx->is_parallel)
4557 goto do_outer;
4558
4559 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4560 remapped firstprivate instead of shared. To some extent this is
4561 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4562 default_kind = ctx->default_kind;
4563 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4564 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4565 default_kind = kind;
4566
4567 switch (default_kind)
4568 {
4569 case OMP_CLAUSE_DEFAULT_NONE:
4570 error ("%qs not specified in enclosing parallel",
4571 IDENTIFIER_POINTER (DECL_NAME (decl)));
4572 error ("%Henclosing parallel", &ctx->location);
4573 /* FALLTHRU */
4574 case OMP_CLAUSE_DEFAULT_SHARED:
4575 flags |= GOVD_SHARED;
4576 break;
4577 case OMP_CLAUSE_DEFAULT_PRIVATE:
4578 flags |= GOVD_PRIVATE;
4579 break;
4580 default:
4581 gcc_unreachable ();
4582 }
4583
4584 omp_add_variable (ctx, decl, flags);
4585
4586 shared = (flags & GOVD_SHARED) != 0;
4587 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4588 goto do_outer;
4589 }
4590
4591 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4592 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4593
4594 /* If nothing changed, there's nothing left to do. */
4595 if ((n->value & flags) == flags)
4596 return ret;
4597 flags |= n->value;
4598 n->value = flags;
4599
4600 do_outer:
4601 /* If the variable is private in the current context, then we don't
4602 need to propagate anything to an outer context. */
4603 if (flags & GOVD_PRIVATE)
4604 return ret;
4605 if (ctx->outer_context
4606 && omp_notice_variable (ctx->outer_context, decl, in_code))
4607 return true;
4608 return ret;
4609 }
4610
4611 /* Verify that DECL is private within CTX. If there's specific information
4612 to the contrary in the innermost scope, generate an error. */
4613
4614 static bool
4615 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4616 {
4617 splay_tree_node n;
4618
4619 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4620 if (n != NULL)
4621 {
4622 if (n->value & GOVD_SHARED)
4623 {
4624 if (ctx == gimplify_omp_ctxp)
4625 {
4626 error ("iteration variable %qs should be private",
4627 IDENTIFIER_POINTER (DECL_NAME (decl)));
4628 n->value = GOVD_PRIVATE;
4629 return true;
4630 }
4631 else
4632 return false;
4633 }
4634 else if ((n->value & GOVD_EXPLICIT) != 0
4635 && (ctx == gimplify_omp_ctxp
4636 || (ctx->is_combined_parallel
4637 && gimplify_omp_ctxp->outer_context == ctx)))
4638 {
4639 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4640 error ("iteration variable %qs should not be firstprivate",
4641 IDENTIFIER_POINTER (DECL_NAME (decl)));
4642 else if ((n->value & GOVD_REDUCTION) != 0)
4643 error ("iteration variable %qs should not be reduction",
4644 IDENTIFIER_POINTER (DECL_NAME (decl)));
4645 }
4646 return true;
4647 }
4648
4649 if (ctx->is_parallel)
4650 return false;
4651 else if (ctx->outer_context)
4652 return omp_is_private (ctx->outer_context, decl);
4653 else
4654 return !is_global_var (decl);
4655 }
4656
4657 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4658 and previous omp contexts. */
4659
4660 static void
4661 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4662 bool in_combined_parallel)
4663 {
4664 struct gimplify_omp_ctx *ctx, *outer_ctx;
4665 tree c;
4666
4667 ctx = new_omp_context (in_parallel, in_combined_parallel);
4668 outer_ctx = ctx->outer_context;
4669
4670 while ((c = *list_p) != NULL)
4671 {
4672 enum gimplify_status gs;
4673 bool remove = false;
4674 bool notice_outer = true;
4675 unsigned int flags;
4676 tree decl;
4677
4678 switch (OMP_CLAUSE_CODE (c))
4679 {
4680 case OMP_CLAUSE_PRIVATE:
4681 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4682 notice_outer = false;
4683 goto do_add;
4684 case OMP_CLAUSE_SHARED:
4685 flags = GOVD_SHARED | GOVD_EXPLICIT;
4686 goto do_add;
4687 case OMP_CLAUSE_FIRSTPRIVATE:
4688 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4689 goto do_add;
4690 case OMP_CLAUSE_LASTPRIVATE:
4691 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4692 goto do_add;
4693 case OMP_CLAUSE_REDUCTION:
4694 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4695 goto do_add;
4696
4697 do_add:
4698 decl = OMP_CLAUSE_DECL (c);
4699 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4700 {
4701 remove = true;
4702 break;
4703 }
4704 /* Handle NRV results passed by reference. */
4705 if (TREE_CODE (decl) == INDIRECT_REF
4706 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4707 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4708 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4709 omp_add_variable (ctx, decl, flags);
4710 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4711 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4712 {
4713 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4714 GOVD_LOCAL | GOVD_SEEN);
4715 gimplify_omp_ctxp = ctx;
4716 push_gimplify_context ();
4717 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4718 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4719 push_gimplify_context ();
4720 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4721 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4722 gimplify_omp_ctxp = outer_ctx;
4723 }
4724 if (notice_outer)
4725 goto do_notice;
4726 break;
4727
4728 case OMP_CLAUSE_COPYIN:
4729 case OMP_CLAUSE_COPYPRIVATE:
4730 decl = OMP_CLAUSE_DECL (c);
4731 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4732 {
4733 remove = true;
4734 break;
4735 }
4736 /* Handle NRV results passed by reference. */
4737 if (TREE_CODE (decl) == INDIRECT_REF
4738 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4739 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4740 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4741 do_notice:
4742 if (outer_ctx)
4743 omp_notice_variable (outer_ctx, decl, true);
4744 break;
4745
4746 case OMP_CLAUSE_IF:
4747 OMP_CLAUSE_OPERAND (c, 0)
4748 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4749 /* Fall through. */
4750
4751 case OMP_CLAUSE_SCHEDULE:
4752 case OMP_CLAUSE_NUM_THREADS:
4753 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4754 is_gimple_val, fb_rvalue);
4755 if (gs == GS_ERROR)
4756 remove = true;
4757 break;
4758
4759 case OMP_CLAUSE_NOWAIT:
4760 case OMP_CLAUSE_ORDERED:
4761 break;
4762
4763 case OMP_CLAUSE_DEFAULT:
4764 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4765 break;
4766
4767 default:
4768 gcc_unreachable ();
4769 }
4770
4771 if (remove)
4772 *list_p = OMP_CLAUSE_CHAIN (c);
4773 else
4774 list_p = &OMP_CLAUSE_CHAIN (c);
4775 }
4776
4777 gimplify_omp_ctxp = ctx;
4778 }
4779
4780 /* For all variables that were not actually used within the context,
4781 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4782
4783 static int
4784 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4785 {
4786 tree *list_p = (tree *) data;
4787 tree decl = (tree) n->key;
4788 unsigned flags = n->value;
4789 enum omp_clause_code code;
4790 tree clause;
4791 bool private_debug;
4792
4793 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4794 return 0;
4795 if ((flags & GOVD_SEEN) == 0)
4796 return 0;
4797 if (flags & GOVD_DEBUG_PRIVATE)
4798 {
4799 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4800 private_debug = true;
4801 }
4802 else
4803 private_debug
4804 = lang_hooks.decls.omp_private_debug_clause (decl,
4805 !!(flags & GOVD_SHARED));
4806 if (private_debug)
4807 code = OMP_CLAUSE_PRIVATE;
4808 else if (flags & GOVD_SHARED)
4809 {
4810 if (is_global_var (decl))
4811 return 0;
4812 code = OMP_CLAUSE_SHARED;
4813 }
4814 else if (flags & GOVD_PRIVATE)
4815 code = OMP_CLAUSE_PRIVATE;
4816 else if (flags & GOVD_FIRSTPRIVATE)
4817 code = OMP_CLAUSE_FIRSTPRIVATE;
4818 else
4819 gcc_unreachable ();
4820
4821 clause = build_omp_clause (code);
4822 OMP_CLAUSE_DECL (clause) = decl;
4823 OMP_CLAUSE_CHAIN (clause) = *list_p;
4824 if (private_debug)
4825 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4826 *list_p = clause;
4827
4828 return 0;
4829 }
4830
4831 static void
4832 gimplify_adjust_omp_clauses (tree *list_p)
4833 {
4834 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4835 tree c, decl;
4836
4837 while ((c = *list_p) != NULL)
4838 {
4839 splay_tree_node n;
4840 bool remove = false;
4841
4842 switch (OMP_CLAUSE_CODE (c))
4843 {
4844 case OMP_CLAUSE_PRIVATE:
4845 case OMP_CLAUSE_SHARED:
4846 case OMP_CLAUSE_FIRSTPRIVATE:
4847 decl = OMP_CLAUSE_DECL (c);
4848 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4849 remove = !(n->value & GOVD_SEEN);
4850 if (! remove)
4851 {
4852 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4853 if ((n->value & GOVD_DEBUG_PRIVATE)
4854 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4855 {
4856 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4857 || ((n->value & GOVD_DATA_SHARE_CLASS)
4858 == GOVD_PRIVATE));
4859 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4860 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4861 }
4862 }
4863 break;
4864
4865 case OMP_CLAUSE_LASTPRIVATE:
4866 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4867 accurately reflect the presence of a FIRSTPRIVATE clause. */
4868 decl = OMP_CLAUSE_DECL (c);
4869 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4870 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4871 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4872 break;
4873
4874 case OMP_CLAUSE_REDUCTION:
4875 case OMP_CLAUSE_COPYIN:
4876 case OMP_CLAUSE_COPYPRIVATE:
4877 case OMP_CLAUSE_IF:
4878 case OMP_CLAUSE_NUM_THREADS:
4879 case OMP_CLAUSE_SCHEDULE:
4880 case OMP_CLAUSE_NOWAIT:
4881 case OMP_CLAUSE_ORDERED:
4882 case OMP_CLAUSE_DEFAULT:
4883 break;
4884
4885 default:
4886 gcc_unreachable ();
4887 }
4888
4889 if (remove)
4890 *list_p = OMP_CLAUSE_CHAIN (c);
4891 else
4892 list_p = &OMP_CLAUSE_CHAIN (c);
4893 }
4894
4895 /* Add in any implicit data sharing. */
4896 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4897
4898 gimplify_omp_ctxp = ctx->outer_context;
4899 delete_omp_context (ctx);
4900 }
4901
4902 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4903 gimplification of the body, as well as scanning the body for used
4904 variables. We need to do this scan now, because variable-sized
4905 decls will be decomposed during gimplification. */
4906
4907 static enum gimplify_status
4908 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4909 {
4910 tree expr = *expr_p;
4911
4912 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4913 OMP_PARALLEL_COMBINED (expr));
4914
4915 push_gimplify_context ();
4916
4917 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4918
4919 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4920 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4921 else
4922 pop_gimplify_context (NULL_TREE);
4923
4924 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4925
4926 return GS_ALL_DONE;
4927 }
4928
4929 /* Gimplify the gross structure of an OMP_FOR statement. */
4930
4931 static enum gimplify_status
4932 gimplify_omp_for (tree *expr_p, tree *pre_p)
4933 {
4934 tree for_stmt, decl, t;
4935 enum gimplify_status ret = GS_OK;
4936
4937 for_stmt = *expr_p;
4938
4939 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4940
4941 t = OMP_FOR_INIT (for_stmt);
4942 gcc_assert (TREE_CODE (t) == MODIFY_EXPR
4943 || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
4944 decl = GENERIC_TREE_OPERAND (t, 0);
4945 gcc_assert (DECL_P (decl));
4946 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4947
4948 /* Make sure the iteration variable is private. */
4949 if (omp_is_private (gimplify_omp_ctxp, decl))
4950 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4951 else
4952 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4953
4954 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
4955 &OMP_FOR_PRE_BODY (for_stmt),
4956 NULL, is_gimple_val, fb_rvalue);
4957
4958 tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
4959
4960 t = OMP_FOR_COND (for_stmt);
4961 gcc_assert (COMPARISON_CLASS_P (t));
4962 gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
4963
4964 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
4965 &OMP_FOR_PRE_BODY (for_stmt),
4966 NULL, is_gimple_val, fb_rvalue);
4967
4968 tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
4969 t = OMP_FOR_INCR (for_stmt);
4970 switch (TREE_CODE (t))
4971 {
4972 case PREINCREMENT_EXPR:
4973 case POSTINCREMENT_EXPR:
4974 t = build_int_cst (TREE_TYPE (decl), 1);
4975 goto build_modify;
4976 case PREDECREMENT_EXPR:
4977 case POSTDECREMENT_EXPR:
4978 t = build_int_cst (TREE_TYPE (decl), -1);
4979 goto build_modify;
4980 build_modify:
4981 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
4982 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, decl, t);
4983 OMP_FOR_INCR (for_stmt) = t;
4984 break;
4985
4986 case GIMPLE_MODIFY_STMT:
4987 gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
4988 t = GIMPLE_STMT_OPERAND (t, 1);
4989 switch (TREE_CODE (t))
4990 {
4991 case PLUS_EXPR:
4992 if (TREE_OPERAND (t, 1) == decl)
4993 {
4994 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
4995 TREE_OPERAND (t, 0) = decl;
4996 break;
4997 }
4998 case MINUS_EXPR:
4999 gcc_assert (TREE_OPERAND (t, 0) == decl);
5000 break;
5001 default:
5002 gcc_unreachable ();
5003 }
5004
5005 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
5006 NULL, is_gimple_val, fb_rvalue);
5007 break;
5008
5009 default:
5010 gcc_unreachable ();
5011 }
5012
5013 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
5014 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5015
5016 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5017 }
5018
5019 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5020 In particular, OMP_SECTIONS and OMP_SINGLE. */
5021
5022 static enum gimplify_status
5023 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
5024 {
5025 tree stmt = *expr_p;
5026
5027 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
5028 gimplify_to_stmt_list (&OMP_BODY (stmt));
5029 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
5030
5031 return GS_ALL_DONE;
5032 }
5033
5034 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5035 stabilized the lhs of the atomic operation as *ADDR. Return true if
5036 EXPR is this stabilized form. */
5037
5038 static bool
5039 goa_lhs_expr_p (tree expr, tree addr)
5040 {
5041 /* Also include casts to other type variants. The C front end is fond
5042 of adding these for e.g. volatile variables. This is like
5043 STRIP_TYPE_NOPS but includes the main variant lookup. */
5044 while ((TREE_CODE (expr) == NOP_EXPR
5045 || TREE_CODE (expr) == CONVERT_EXPR
5046 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5047 && TREE_OPERAND (expr, 0) != error_mark_node
5048 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5049 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5050 expr = TREE_OPERAND (expr, 0);
5051
5052 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
5053 return true;
5054 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5055 return true;
5056 return false;
5057 }
5058
5059 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
5060 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
5061 size of the data type, and thus usable to find the index of the builtin
5062 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
5063
5064 static enum gimplify_status
5065 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
5066 {
5067 enum built_in_function base;
5068 tree decl, args, itype;
5069 enum insn_code *optab;
5070
5071 /* Check for one of the supported fetch-op operations. */
5072 switch (TREE_CODE (rhs))
5073 {
5074 case PLUS_EXPR:
5075 base = BUILT_IN_FETCH_AND_ADD_N;
5076 optab = sync_add_optab;
5077 break;
5078 case MINUS_EXPR:
5079 base = BUILT_IN_FETCH_AND_SUB_N;
5080 optab = sync_add_optab;
5081 break;
5082 case BIT_AND_EXPR:
5083 base = BUILT_IN_FETCH_AND_AND_N;
5084 optab = sync_and_optab;
5085 break;
5086 case BIT_IOR_EXPR:
5087 base = BUILT_IN_FETCH_AND_OR_N;
5088 optab = sync_ior_optab;
5089 break;
5090 case BIT_XOR_EXPR:
5091 base = BUILT_IN_FETCH_AND_XOR_N;
5092 optab = sync_xor_optab;
5093 break;
5094 default:
5095 return GS_UNHANDLED;
5096 }
5097
5098 /* Make sure the expression is of the proper form. */
5099 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
5100 rhs = TREE_OPERAND (rhs, 1);
5101 else if (commutative_tree_code (TREE_CODE (rhs))
5102 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
5103 rhs = TREE_OPERAND (rhs, 0);
5104 else
5105 return GS_UNHANDLED;
5106
5107 decl = built_in_decls[base + index + 1];
5108 itype = TREE_TYPE (TREE_TYPE (decl));
5109
5110 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5111 return GS_UNHANDLED;
5112
5113 args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
5114 args = tree_cons (NULL, addr, args);
5115 *expr_p = build_function_call_expr (decl, args);
5116 return GS_OK;
5117 }
5118
5119 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5120 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5121 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5122 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5123
5124 static int
5125 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5126 {
5127 tree expr = *expr_p;
5128 int saw_lhs;
5129
5130 if (goa_lhs_expr_p (expr, lhs_addr))
5131 {
5132 *expr_p = lhs_var;
5133 return 1;
5134 }
5135 if (is_gimple_val (expr))
5136 return 0;
5137
5138 saw_lhs = 0;
5139 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5140 {
5141 case tcc_binary:
5142 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5143 lhs_addr, lhs_var);
5144 case tcc_unary:
5145 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5146 lhs_addr, lhs_var);
5147 break;
5148 default:
5149 break;
5150 }
5151
5152 if (saw_lhs == 0)
5153 {
5154 enum gimplify_status gs;
5155 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5156 if (gs != GS_ALL_DONE)
5157 saw_lhs = -1;
5158 }
5159
5160 return saw_lhs;
5161 }
5162
5163 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5164
5165 oldval = *addr;
5166 repeat:
5167 newval = rhs; // with oldval replacing *addr in rhs
5168 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5169 if (oldval != newval)
5170 goto repeat;
5171
5172 INDEX is log2 of the size of the data type, and thus usable to find the
5173 index of the builtin decl. */
5174
5175 static enum gimplify_status
5176 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5177 tree rhs, int index)
5178 {
5179 tree oldval, oldival, oldival2, newval, newival, label;
5180 tree type, itype, cmpxchg, args, x, iaddr;
5181
5182 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5183 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5184 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5185
5186 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5187 return GS_UNHANDLED;
5188
5189 oldval = create_tmp_var (type, NULL);
5190 newval = create_tmp_var (type, NULL);
5191
5192 /* Precompute as much of RHS as possible. In the same walk, replace
5193 occurrences of the lhs value with our temporary. */
5194 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5195 return GS_ERROR;
5196
5197 x = build_fold_indirect_ref (addr);
5198 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldval, x);
5199 gimplify_and_add (x, pre_p);
5200
5201 /* For floating-point values, we'll need to view-convert them to integers
5202 so that we can perform the atomic compare and swap. Simplify the
5203 following code by always setting up the "i"ntegral variables. */
5204 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5205 {
5206 oldival = oldval;
5207 newival = newval;
5208 iaddr = addr;
5209 }
5210 else
5211 {
5212 oldival = create_tmp_var (itype, NULL);
5213 newival = create_tmp_var (itype, NULL);
5214
5215 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5216 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldival, x);
5217 gimplify_and_add (x, pre_p);
5218 iaddr = fold_convert (build_pointer_type (itype), addr);
5219 }
5220
5221 oldival2 = create_tmp_var (itype, NULL);
5222
5223 label = create_artificial_label ();
5224 x = build1 (LABEL_EXPR, void_type_node, label);
5225 gimplify_and_add (x, pre_p);
5226
5227 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, newval, rhs);
5228 gimplify_and_add (x, pre_p);
5229
5230 if (newval != newival)
5231 {
5232 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5233 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, newival, x);
5234 gimplify_and_add (x, pre_p);
5235 }
5236
5237 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldival2,
5238 fold_convert (itype, oldival));
5239 gimplify_and_add (x, pre_p);
5240
5241 args = tree_cons (NULL, fold_convert (itype, newival), NULL);
5242 args = tree_cons (NULL, fold_convert (itype, oldival), args);
5243 args = tree_cons (NULL, iaddr, args);
5244 x = build_function_call_expr (cmpxchg, args);
5245 if (oldval == oldival)
5246 x = fold_convert (type, x);
5247 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldival, x);
5248 gimplify_and_add (x, pre_p);
5249
5250 /* For floating point, be prepared for the loop backedge. */
5251 if (oldval != oldival)
5252 {
5253 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5254 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldval, x);
5255 gimplify_and_add (x, pre_p);
5256 }
5257
5258 /* Note that we always perform the comparison as an integer, even for
5259 floating point. This allows the atomic operation to properly
5260 succeed even with NaNs and -0.0. */
5261 x = build3 (COND_EXPR, void_type_node,
5262 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5263 build1 (GOTO_EXPR, void_type_node, label), NULL);
5264 gimplify_and_add (x, pre_p);
5265
5266 *expr_p = NULL;
5267 return GS_ALL_DONE;
5268 }
5269
5270 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5271
5272 GOMP_atomic_start ();
5273 *addr = rhs;
5274 GOMP_atomic_end ();
5275
5276 The result is not globally atomic, but works so long as all parallel
5277 references are within #pragma omp atomic directives. According to
5278 responses received from omp@openmp.org, appears to be within spec.
5279 Which makes sense, since that's how several other compilers handle
5280 this situation as well. */
5281
5282 static enum gimplify_status
5283 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5284 {
5285 tree t;
5286
5287 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5288 t = build_function_call_expr (t, NULL);
5289 gimplify_and_add (t, pre_p);
5290
5291 t = build_fold_indirect_ref (addr);
5292 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, t, rhs);
5293 gimplify_and_add (t, pre_p);
5294
5295 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5296 t = build_function_call_expr (t, NULL);
5297 gimplify_and_add (t, pre_p);
5298
5299 *expr_p = NULL;
5300 return GS_ALL_DONE;
5301 }
5302
5303 /* Gimplify an OMP_ATOMIC statement. */
5304
5305 static enum gimplify_status
5306 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5307 {
5308 tree addr = TREE_OPERAND (*expr_p, 0);
5309 tree rhs = TREE_OPERAND (*expr_p, 1);
5310 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5311 HOST_WIDE_INT index;
5312
5313 /* Make sure the type is one of the supported sizes. */
5314 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5315 index = exact_log2 (index);
5316 if (index >= 0 && index <= 4)
5317 {
5318 enum gimplify_status gs;
5319 unsigned int align;
5320
5321 if (DECL_P (TREE_OPERAND (addr, 0)))
5322 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5323 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5324 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5325 == FIELD_DECL)
5326 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5327 else
5328 align = TYPE_ALIGN_UNIT (type);
5329
5330 /* __sync builtins require strict data alignment. */
5331 if (exact_log2 (align) >= index)
5332 {
5333 /* When possible, use specialized atomic update functions. */
5334 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5335 {
5336 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5337 if (gs != GS_UNHANDLED)
5338 return gs;
5339 }
5340
5341 /* If we don't have specialized __sync builtins, try and implement
5342 as a compare and swap loop. */
5343 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5344 if (gs != GS_UNHANDLED)
5345 return gs;
5346 }
5347 }
5348
5349 /* The ultimate fallback is wrapping the operation in a mutex. */
5350 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5351 }
5352
5353 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5354 gimplification failed.
5355
5356 PRE_P points to the list where side effects that must happen before
5357 EXPR should be stored.
5358
5359 POST_P points to the list where side effects that must happen after
5360 EXPR should be stored, or NULL if there is no suitable list. In
5361 that case, we copy the result to a temporary, emit the
5362 post-effects, and then return the temporary.
5363
5364 GIMPLE_TEST_F points to a function that takes a tree T and
5365 returns nonzero if T is in the GIMPLE form requested by the
5366 caller. The GIMPLE predicates are in tree-gimple.c.
5367
5368 This test is used twice. Before gimplification, the test is
5369 invoked to determine whether *EXPR_P is already gimple enough. If
5370 that fails, *EXPR_P is gimplified according to its code and
5371 GIMPLE_TEST_F is called again. If the test still fails, then a new
5372 temporary variable is created and assigned the value of the
5373 gimplified expression.
5374
5375 FALLBACK tells the function what sort of a temporary we want. If the 1
5376 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5377 If both are set, either is OK, but an lvalue is preferable.
5378
5379 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5380 iterates until solution. */
5381
5382 enum gimplify_status
5383 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5384 bool (* gimple_test_f) (tree), fallback_t fallback)
5385 {
5386 tree tmp;
5387 tree internal_pre = NULL_TREE;
5388 tree internal_post = NULL_TREE;
5389 tree save_expr;
5390 int is_statement = (pre_p == NULL);
5391 location_t saved_location;
5392 enum gimplify_status ret;
5393
5394 save_expr = *expr_p;
5395 if (save_expr == NULL_TREE)
5396 return GS_ALL_DONE;
5397
5398 /* We used to check the predicate here and return immediately if it
5399 succeeds. This is wrong; the design is for gimplification to be
5400 idempotent, and for the predicates to only test for valid forms, not
5401 whether they are fully simplified. */
5402
5403 /* Set up our internal queues if needed. */
5404 if (pre_p == NULL)
5405 pre_p = &internal_pre;
5406 if (post_p == NULL)
5407 post_p = &internal_post;
5408
5409 saved_location = input_location;
5410 if (save_expr != error_mark_node
5411 && EXPR_HAS_LOCATION (*expr_p))
5412 input_location = EXPR_LOCATION (*expr_p);
5413
5414 /* Loop over the specific gimplifiers until the toplevel node
5415 remains the same. */
5416 do
5417 {
5418 /* Strip away as many useless type conversions as possible
5419 at the toplevel. */
5420 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5421
5422 /* Remember the expr. */
5423 save_expr = *expr_p;
5424
5425 /* Die, die, die, my darling. */
5426 if (save_expr == error_mark_node
5427 || (!GIMPLE_STMT_P (save_expr)
5428 && TREE_TYPE (save_expr)
5429 && TREE_TYPE (save_expr) == error_mark_node))
5430 {
5431 ret = GS_ERROR;
5432 break;
5433 }
5434
5435 /* Do any language-specific gimplification. */
5436 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5437 if (ret == GS_OK)
5438 {
5439 if (*expr_p == NULL_TREE)
5440 break;
5441 if (*expr_p != save_expr)
5442 continue;
5443 }
5444 else if (ret != GS_UNHANDLED)
5445 break;
5446
5447 ret = GS_OK;
5448 switch (TREE_CODE (*expr_p))
5449 {
5450 /* First deal with the special cases. */
5451
5452 case POSTINCREMENT_EXPR:
5453 case POSTDECREMENT_EXPR:
5454 case PREINCREMENT_EXPR:
5455 case PREDECREMENT_EXPR:
5456 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5457 fallback != fb_none);
5458 break;
5459
5460 case ARRAY_REF:
5461 case ARRAY_RANGE_REF:
5462 case REALPART_EXPR:
5463 case IMAGPART_EXPR:
5464 case COMPONENT_REF:
5465 case VIEW_CONVERT_EXPR:
5466 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5467 fallback ? fallback : fb_rvalue);
5468 break;
5469
5470 case COND_EXPR:
5471 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5472 /* C99 code may assign to an array in a structure value of a
5473 conditional expression, and this has undefined behavior
5474 only on execution, so create a temporary if an lvalue is
5475 required. */
5476 if (fallback == fb_lvalue)
5477 {
5478 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5479 lang_hooks.mark_addressable (*expr_p);
5480 }
5481 break;
5482
5483 case CALL_EXPR:
5484 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5485 /* C99 code may assign to an array in a structure returned
5486 from a function, and this has undefined behavior only on
5487 execution, so create a temporary if an lvalue is
5488 required. */
5489 if (fallback == fb_lvalue)
5490 {
5491 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5492 lang_hooks.mark_addressable (*expr_p);
5493 }
5494 break;
5495
5496 case TREE_LIST:
5497 gcc_unreachable ();
5498
5499 case COMPOUND_EXPR:
5500 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5501 break;
5502
5503 case MODIFY_EXPR:
5504 case GIMPLE_MODIFY_STMT:
5505 case INIT_EXPR:
5506 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5507 fallback != fb_none);
5508
5509 if (*expr_p)
5510 {
5511 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5512 useful. */
5513 if (TREE_CODE (*expr_p) == INIT_EXPR)
5514 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5515
5516 /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT. */
5517 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
5518 tree_to_gimple_tuple (expr_p);
5519 }
5520
5521 break;
5522
5523 case TRUTH_ANDIF_EXPR:
5524 case TRUTH_ORIF_EXPR:
5525 ret = gimplify_boolean_expr (expr_p);
5526 break;
5527
5528 case TRUTH_NOT_EXPR:
5529 TREE_OPERAND (*expr_p, 0)
5530 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5531 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5532 is_gimple_val, fb_rvalue);
5533 recalculate_side_effects (*expr_p);
5534 break;
5535
5536 case ADDR_EXPR:
5537 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5538 break;
5539
5540 case VA_ARG_EXPR:
5541 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5542 break;
5543
5544 case CONVERT_EXPR:
5545 case NOP_EXPR:
5546 if (IS_EMPTY_STMT (*expr_p))
5547 {
5548 ret = GS_ALL_DONE;
5549 break;
5550 }
5551
5552 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5553 || fallback == fb_none)
5554 {
5555 /* Just strip a conversion to void (or in void context) and
5556 try again. */
5557 *expr_p = TREE_OPERAND (*expr_p, 0);
5558 break;
5559 }
5560
5561 ret = gimplify_conversion (expr_p);
5562 if (ret == GS_ERROR)
5563 break;
5564 if (*expr_p != save_expr)
5565 break;
5566 /* FALLTHRU */
5567
5568 case FIX_TRUNC_EXPR:
5569 /* unary_expr: ... | '(' cast ')' val | ... */
5570 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5571 is_gimple_val, fb_rvalue);
5572 recalculate_side_effects (*expr_p);
5573 break;
5574
5575 case INDIRECT_REF:
5576 *expr_p = fold_indirect_ref (*expr_p);
5577 if (*expr_p != save_expr)
5578 break;
5579 /* else fall through. */
5580 case ALIGN_INDIRECT_REF:
5581 case MISALIGNED_INDIRECT_REF:
5582 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5583 is_gimple_reg, fb_rvalue);
5584 recalculate_side_effects (*expr_p);
5585 break;
5586
5587 /* Constants need not be gimplified. */
5588 case INTEGER_CST:
5589 case REAL_CST:
5590 case STRING_CST:
5591 case COMPLEX_CST:
5592 case VECTOR_CST:
5593 ret = GS_ALL_DONE;
5594 break;
5595
5596 case CONST_DECL:
5597 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5598 CONST_DECL node. Otherwise the decl is replaceable by its
5599 value. */
5600 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5601 if (fallback & fb_lvalue)
5602 ret = GS_ALL_DONE;
5603 else
5604 *expr_p = DECL_INITIAL (*expr_p);
5605 break;
5606
5607 case DECL_EXPR:
5608 ret = gimplify_decl_expr (expr_p);
5609 break;
5610
5611 case EXC_PTR_EXPR:
5612 /* FIXME make this a decl. */
5613 ret = GS_ALL_DONE;
5614 break;
5615
5616 case BIND_EXPR:
5617 ret = gimplify_bind_expr (expr_p, pre_p);
5618 break;
5619
5620 case LOOP_EXPR:
5621 ret = gimplify_loop_expr (expr_p, pre_p);
5622 break;
5623
5624 case SWITCH_EXPR:
5625 ret = gimplify_switch_expr (expr_p, pre_p);
5626 break;
5627
5628 case EXIT_EXPR:
5629 ret = gimplify_exit_expr (expr_p);
5630 break;
5631
5632 case GOTO_EXPR:
5633 /* If the target is not LABEL, then it is a computed jump
5634 and the target needs to be gimplified. */
5635 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5636 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5637 NULL, is_gimple_val, fb_rvalue);
5638 break;
5639
5640 case LABEL_EXPR:
5641 ret = GS_ALL_DONE;
5642 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5643 == current_function_decl);
5644 break;
5645
5646 case CASE_LABEL_EXPR:
5647 ret = gimplify_case_label_expr (expr_p);
5648 break;
5649
5650 case RETURN_EXPR:
5651 ret = gimplify_return_expr (*expr_p, pre_p);
5652 break;
5653
5654 case CONSTRUCTOR:
5655 /* Don't reduce this in place; let gimplify_init_constructor work its
5656 magic. Buf if we're just elaborating this for side effects, just
5657 gimplify any element that has side-effects. */
5658 if (fallback == fb_none)
5659 {
5660 unsigned HOST_WIDE_INT ix;
5661 constructor_elt *ce;
5662 tree temp = NULL_TREE;
5663 for (ix = 0;
5664 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5665 ix, ce);
5666 ix++)
5667 if (TREE_SIDE_EFFECTS (ce->value))
5668 append_to_statement_list (ce->value, &temp);
5669
5670 *expr_p = temp;
5671 ret = GS_OK;
5672 }
5673 /* C99 code may assign to an array in a constructed
5674 structure or union, and this has undefined behavior only
5675 on execution, so create a temporary if an lvalue is
5676 required. */
5677 else if (fallback == fb_lvalue)
5678 {
5679 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5680 lang_hooks.mark_addressable (*expr_p);
5681 }
5682 else
5683 ret = GS_ALL_DONE;
5684 break;
5685
5686 /* The following are special cases that are not handled by the
5687 original GIMPLE grammar. */
5688
5689 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5690 eliminated. */
5691 case SAVE_EXPR:
5692 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5693 break;
5694
5695 case BIT_FIELD_REF:
5696 {
5697 enum gimplify_status r0, r1, r2;
5698
5699 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5700 is_gimple_lvalue, fb_either);
5701 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5702 is_gimple_val, fb_rvalue);
5703 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5704 is_gimple_val, fb_rvalue);
5705 recalculate_side_effects (*expr_p);
5706
5707 ret = MIN (r0, MIN (r1, r2));
5708 }
5709 break;
5710
5711 case NON_LVALUE_EXPR:
5712 /* This should have been stripped above. */
5713 gcc_unreachable ();
5714
5715 case ASM_EXPR:
5716 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5717 break;
5718
5719 case TRY_FINALLY_EXPR:
5720 case TRY_CATCH_EXPR:
5721 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5722 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5723 ret = GS_ALL_DONE;
5724 break;
5725
5726 case CLEANUP_POINT_EXPR:
5727 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5728 break;
5729
5730 case TARGET_EXPR:
5731 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5732 break;
5733
5734 case CATCH_EXPR:
5735 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5736 ret = GS_ALL_DONE;
5737 break;
5738
5739 case EH_FILTER_EXPR:
5740 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5741 ret = GS_ALL_DONE;
5742 break;
5743
5744 case OBJ_TYPE_REF:
5745 {
5746 enum gimplify_status r0, r1;
5747 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5748 is_gimple_val, fb_rvalue);
5749 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5750 is_gimple_val, fb_rvalue);
5751 ret = MIN (r0, r1);
5752 }
5753 break;
5754
5755 case LABEL_DECL:
5756 /* We get here when taking the address of a label. We mark
5757 the label as "forced"; meaning it can never be removed and
5758 it is a potential target for any computed goto. */
5759 FORCED_LABEL (*expr_p) = 1;
5760 ret = GS_ALL_DONE;
5761 break;
5762
5763 case STATEMENT_LIST:
5764 ret = gimplify_statement_list (expr_p, pre_p);
5765 break;
5766
5767 case WITH_SIZE_EXPR:
5768 {
5769 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5770 post_p == &internal_post ? NULL : post_p,
5771 gimple_test_f, fallback);
5772 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5773 is_gimple_val, fb_rvalue);
5774 }
5775 break;
5776
5777 case VAR_DECL:
5778 case PARM_DECL:
5779 ret = gimplify_var_or_parm_decl (expr_p);
5780 break;
5781
5782 case RESULT_DECL:
5783 /* When within an OpenMP context, notice uses of variables. */
5784 if (gimplify_omp_ctxp)
5785 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5786 ret = GS_ALL_DONE;
5787 break;
5788
5789 case SSA_NAME:
5790 /* Allow callbacks into the gimplifier during optimization. */
5791 ret = GS_ALL_DONE;
5792 break;
5793
5794 case OMP_PARALLEL:
5795 ret = gimplify_omp_parallel (expr_p, pre_p);
5796 break;
5797
5798 case OMP_FOR:
5799 ret = gimplify_omp_for (expr_p, pre_p);
5800 break;
5801
5802 case OMP_SECTIONS:
5803 case OMP_SINGLE:
5804 ret = gimplify_omp_workshare (expr_p, pre_p);
5805 break;
5806
5807 case OMP_SECTION:
5808 case OMP_MASTER:
5809 case OMP_ORDERED:
5810 case OMP_CRITICAL:
5811 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5812 break;
5813
5814 case OMP_ATOMIC:
5815 ret = gimplify_omp_atomic (expr_p, pre_p);
5816 break;
5817
5818 case OMP_RETURN:
5819 case OMP_CONTINUE:
5820 ret = GS_ALL_DONE;
5821 break;
5822
5823 default:
5824 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5825 {
5826 case tcc_comparison:
5827 /* Handle comparison of objects of non scalar mode aggregates
5828 with a call to memcmp. It would be nice to only have to do
5829 this for variable-sized objects, but then we'd have to allow
5830 the same nest of reference nodes we allow for MODIFY_EXPR and
5831 that's too complex.
5832
5833 Compare scalar mode aggregates as scalar mode values. Using
5834 memcmp for them would be very inefficient at best, and is
5835 plain wrong if bitfields are involved. */
5836
5837 {
5838 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5839
5840 if (!AGGREGATE_TYPE_P (type))
5841 goto expr_2;
5842 else if (TYPE_MODE (type) != BLKmode)
5843 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5844 else
5845 ret = gimplify_variable_sized_compare (expr_p);
5846
5847 break;
5848 }
5849
5850 /* If *EXPR_P does not need to be special-cased, handle it
5851 according to its class. */
5852 case tcc_unary:
5853 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5854 post_p, is_gimple_val, fb_rvalue);
5855 break;
5856
5857 case tcc_binary:
5858 expr_2:
5859 {
5860 enum gimplify_status r0, r1;
5861
5862 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5863 post_p, is_gimple_val, fb_rvalue);
5864 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5865 post_p, is_gimple_val, fb_rvalue);
5866
5867 ret = MIN (r0, r1);
5868 break;
5869 }
5870
5871 case tcc_declaration:
5872 case tcc_constant:
5873 ret = GS_ALL_DONE;
5874 goto dont_recalculate;
5875
5876 default:
5877 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5878 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5879 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5880 goto expr_2;
5881 }
5882
5883 recalculate_side_effects (*expr_p);
5884 dont_recalculate:
5885 break;
5886 }
5887
5888 /* If we replaced *expr_p, gimplify again. */
5889 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5890 ret = GS_ALL_DONE;
5891 }
5892 while (ret == GS_OK);
5893
5894 /* If we encountered an error_mark somewhere nested inside, either
5895 stub out the statement or propagate the error back out. */
5896 if (ret == GS_ERROR)
5897 {
5898 if (is_statement)
5899 *expr_p = NULL;
5900 goto out;
5901 }
5902
5903 /* This was only valid as a return value from the langhook, which
5904 we handled. Make sure it doesn't escape from any other context. */
5905 gcc_assert (ret != GS_UNHANDLED);
5906
5907 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5908 {
5909 /* We aren't looking for a value, and we don't have a valid
5910 statement. If it doesn't have side-effects, throw it away. */
5911 if (!TREE_SIDE_EFFECTS (*expr_p))
5912 *expr_p = NULL;
5913 else if (!TREE_THIS_VOLATILE (*expr_p))
5914 {
5915 /* This is probably a _REF that contains something nested that
5916 has side effects. Recurse through the operands to find it. */
5917 enum tree_code code = TREE_CODE (*expr_p);
5918
5919 switch (code)
5920 {
5921 case COMPONENT_REF:
5922 case REALPART_EXPR:
5923 case IMAGPART_EXPR:
5924 case VIEW_CONVERT_EXPR:
5925 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5926 gimple_test_f, fallback);
5927 break;
5928
5929 case ARRAY_REF:
5930 case ARRAY_RANGE_REF:
5931 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5932 gimple_test_f, fallback);
5933 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5934 gimple_test_f, fallback);
5935 break;
5936
5937 default:
5938 /* Anything else with side-effects must be converted to
5939 a valid statement before we get here. */
5940 gcc_unreachable ();
5941 }
5942
5943 *expr_p = NULL;
5944 }
5945 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
5946 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
5947 {
5948 /* Historically, the compiler has treated a bare reference
5949 to a non-BLKmode volatile lvalue as forcing a load. */
5950 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5951 /* Normally, we do not want to create a temporary for a
5952 TREE_ADDRESSABLE type because such a type should not be
5953 copied by bitwise-assignment. However, we make an
5954 exception here, as all we are doing here is ensuring that
5955 we read the bytes that make up the type. We use
5956 create_tmp_var_raw because create_tmp_var will abort when
5957 given a TREE_ADDRESSABLE type. */
5958 tree tmp = create_tmp_var_raw (type, "vol");
5959 gimple_add_tmp_var (tmp);
5960 *expr_p = build2 (GIMPLE_MODIFY_STMT, type, tmp, *expr_p);
5961 }
5962 else
5963 /* We can't do anything useful with a volatile reference to
5964 an incomplete type, so just throw it away. Likewise for
5965 a BLKmode type, since any implicit inner load should
5966 already have been turned into an explicit one by the
5967 gimplification process. */
5968 *expr_p = NULL;
5969 }
5970
5971 /* If we are gimplifying at the statement level, we're done. Tack
5972 everything together and replace the original statement with the
5973 gimplified form. */
5974 if (fallback == fb_none || is_statement)
5975 {
5976 if (internal_pre || internal_post)
5977 {
5978 append_to_statement_list (*expr_p, &internal_pre);
5979 append_to_statement_list (internal_post, &internal_pre);
5980 annotate_all_with_locus (&internal_pre, input_location);
5981 *expr_p = internal_pre;
5982 }
5983 else if (!*expr_p)
5984 ;
5985 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
5986 annotate_all_with_locus (expr_p, input_location);
5987 else
5988 annotate_one_with_locus (*expr_p, input_location);
5989 goto out;
5990 }
5991
5992 /* Otherwise we're gimplifying a subexpression, so the resulting value is
5993 interesting. */
5994
5995 /* If it's sufficiently simple already, we're done. Unless we are
5996 handling some post-effects internally; if that's the case, we need to
5997 copy into a temp before adding the post-effects to the tree. */
5998 if (!internal_post && (*gimple_test_f) (*expr_p))
5999 goto out;
6000
6001 /* Otherwise, we need to create a new temporary for the gimplified
6002 expression. */
6003
6004 /* We can't return an lvalue if we have an internal postqueue. The
6005 object the lvalue refers to would (probably) be modified by the
6006 postqueue; we need to copy the value out first, which means an
6007 rvalue. */
6008 if ((fallback & fb_lvalue) && !internal_post
6009 && is_gimple_addressable (*expr_p))
6010 {
6011 /* An lvalue will do. Take the address of the expression, store it
6012 in a temporary, and replace the expression with an INDIRECT_REF of
6013 that temporary. */
6014 tmp = build_fold_addr_expr (*expr_p);
6015 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6016 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6017 }
6018 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
6019 {
6020 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6021
6022 /* An rvalue will do. Assign the gimplified expression into a new
6023 temporary TMP and replace the original expression with TMP. */
6024
6025 if (internal_post || (fallback & fb_lvalue))
6026 /* The postqueue might change the value of the expression between
6027 the initialization and use of the temporary, so we can't use a
6028 formal temp. FIXME do we care? */
6029 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6030 else
6031 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6032
6033 if (TREE_CODE (*expr_p) != SSA_NAME)
6034 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
6035 }
6036 else
6037 {
6038 #ifdef ENABLE_CHECKING
6039 if (!(fallback & fb_mayfail))
6040 {
6041 fprintf (stderr, "gimplification failed:\n");
6042 print_generic_expr (stderr, *expr_p, 0);
6043 debug_tree (*expr_p);
6044 internal_error ("gimplification failed");
6045 }
6046 #endif
6047 gcc_assert (fallback & fb_mayfail);
6048 /* If this is an asm statement, and the user asked for the
6049 impossible, don't die. Fail and let gimplify_asm_expr
6050 issue an error. */
6051 ret = GS_ERROR;
6052 goto out;
6053 }
6054
6055 /* Make sure the temporary matches our predicate. */
6056 gcc_assert ((*gimple_test_f) (*expr_p));
6057
6058 if (internal_post)
6059 {
6060 annotate_all_with_locus (&internal_post, input_location);
6061 append_to_statement_list (internal_post, pre_p);
6062 }
6063
6064 out:
6065 input_location = saved_location;
6066 return ret;
6067 }
6068
6069 /* Look through TYPE for variable-sized objects and gimplify each such
6070 size that we find. Add to LIST_P any statements generated. */
6071
6072 void
6073 gimplify_type_sizes (tree type, tree *list_p)
6074 {
6075 tree field, t;
6076
6077 if (type == NULL || type == error_mark_node)
6078 return;
6079
6080 /* We first do the main variant, then copy into any other variants. */
6081 type = TYPE_MAIN_VARIANT (type);
6082
6083 /* Avoid infinite recursion. */
6084 if (TYPE_SIZES_GIMPLIFIED (type))
6085 return;
6086
6087 TYPE_SIZES_GIMPLIFIED (type) = 1;
6088
6089 switch (TREE_CODE (type))
6090 {
6091 case INTEGER_TYPE:
6092 case ENUMERAL_TYPE:
6093 case BOOLEAN_TYPE:
6094 case REAL_TYPE:
6095 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
6096 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
6097
6098 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6099 {
6100 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
6101 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
6102 }
6103 break;
6104
6105 case ARRAY_TYPE:
6106 /* These types may not have declarations, so handle them here. */
6107 gimplify_type_sizes (TREE_TYPE (type), list_p);
6108 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
6109 break;
6110
6111 case RECORD_TYPE:
6112 case UNION_TYPE:
6113 case QUAL_UNION_TYPE:
6114 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6115 if (TREE_CODE (field) == FIELD_DECL)
6116 {
6117 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6118 gimplify_type_sizes (TREE_TYPE (field), list_p);
6119 }
6120 break;
6121
6122 case POINTER_TYPE:
6123 case REFERENCE_TYPE:
6124 /* We used to recurse on the pointed-to type here, which turned out to
6125 be incorrect because its definition might refer to variables not
6126 yet initialized at this point if a forward declaration is involved.
6127
6128 It was actually useful for anonymous pointed-to types to ensure
6129 that the sizes evaluation dominates every possible later use of the
6130 values. Restricting to such types here would be safe since there
6131 is no possible forward declaration around, but would introduce an
6132 undesirable middle-end semantic to anonymity. We then defer to
6133 front-ends the responsibility of ensuring that the sizes are
6134 evaluated both early and late enough, e.g. by attaching artificial
6135 type declarations to the tree. */
6136 break;
6137
6138 default:
6139 break;
6140 }
6141
6142 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6143 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6144
6145 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6146 {
6147 TYPE_SIZE (t) = TYPE_SIZE (type);
6148 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6149 TYPE_SIZES_GIMPLIFIED (t) = 1;
6150 }
6151 }
6152
6153 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6154 a size or position, has had all of its SAVE_EXPRs evaluated.
6155 We add any required statements to STMT_P. */
6156
6157 void
6158 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6159 {
6160 tree type, expr = *expr_p;
6161
6162 /* We don't do anything if the value isn't there, is constant, or contains
6163 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6164 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6165 will want to replace it with a new variable, but that will cause problems
6166 if this type is from outside the function. It's OK to have that here. */
6167 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6168 || TREE_CODE (expr) == VAR_DECL
6169 || CONTAINS_PLACEHOLDER_P (expr))
6170 return;
6171
6172 type = TREE_TYPE (expr);
6173 *expr_p = unshare_expr (expr);
6174
6175 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6176 expr = *expr_p;
6177
6178 /* Verify that we've an exact type match with the original expression.
6179 In particular, we do not wish to drop a "sizetype" in favour of a
6180 type of similar dimensions. We don't want to pollute the generic
6181 type-stripping code with this knowledge because it doesn't matter
6182 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6183 and friends retain their "sizetype-ness". */
6184 if (TREE_TYPE (expr) != type
6185 && TREE_CODE (type) == INTEGER_TYPE
6186 && TYPE_IS_SIZETYPE (type))
6187 {
6188 tree tmp;
6189
6190 *expr_p = create_tmp_var (type, NULL);
6191 tmp = build1 (NOP_EXPR, type, expr);
6192 tmp = build2 (GIMPLE_MODIFY_STMT, type, *expr_p, tmp);
6193 if (EXPR_HAS_LOCATION (expr))
6194 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6195 else
6196 SET_EXPR_LOCATION (tmp, input_location);
6197
6198 gimplify_and_add (tmp, stmt_p);
6199 }
6200 }
6201 \f
6202 #ifdef ENABLE_CHECKING
6203 /* Compare types A and B for a "close enough" match. */
6204
6205 static bool
6206 cpt_same_type (tree a, tree b)
6207 {
6208 if (lang_hooks.types_compatible_p (a, b))
6209 return true;
6210
6211 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6212 link them together. This routine is intended to catch type errors
6213 that will affect the optimizers, and the optimizers don't add new
6214 dereferences of function pointers, so ignore it. */
6215 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6216 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6217 return true;
6218
6219 /* ??? The C FE pushes type qualifiers after the fact into the type of
6220 the element from the type of the array. See build_unary_op's handling
6221 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6222 should have done it when creating the variable in the first place.
6223 Alternately, why aren't the two array types made variants? */
6224 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6225 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6226
6227 /* And because of those, we have to recurse down through pointers. */
6228 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6229 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6230
6231 return false;
6232 }
6233
6234 /* Check for some cases of the front end missing cast expressions.
6235 The type of a dereference should correspond to the pointer type;
6236 similarly the type of an address should match its object. */
6237
6238 static tree
6239 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6240 void *data ATTRIBUTE_UNUSED)
6241 {
6242 tree t = *tp;
6243 tree ptype, otype, dtype;
6244
6245 switch (TREE_CODE (t))
6246 {
6247 case INDIRECT_REF:
6248 case ARRAY_REF:
6249 otype = TREE_TYPE (t);
6250 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6251 dtype = TREE_TYPE (ptype);
6252 gcc_assert (cpt_same_type (otype, dtype));
6253 break;
6254
6255 case ADDR_EXPR:
6256 ptype = TREE_TYPE (t);
6257 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6258 dtype = TREE_TYPE (ptype);
6259 if (!cpt_same_type (otype, dtype))
6260 {
6261 /* &array is allowed to produce a pointer to the element, rather than
6262 a pointer to the array type. We must allow this in order to
6263 properly represent assigning the address of an array in C into
6264 pointer to the element type. */
6265 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6266 && POINTER_TYPE_P (ptype)
6267 && cpt_same_type (TREE_TYPE (otype), dtype));
6268 break;
6269 }
6270 break;
6271
6272 default:
6273 return NULL_TREE;
6274 }
6275
6276
6277 return NULL_TREE;
6278 }
6279 #endif
6280
6281 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6282 function decl containing BODY. */
6283
6284 void
6285 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6286 {
6287 location_t saved_location = input_location;
6288 tree body, parm_stmts;
6289
6290 timevar_push (TV_TREE_GIMPLIFY);
6291
6292 gcc_assert (gimplify_ctxp == NULL);
6293 push_gimplify_context ();
6294
6295 /* Unshare most shared trees in the body and in that of any nested functions.
6296 It would seem we don't have to do this for nested functions because
6297 they are supposed to be output and then the outer function gimplified
6298 first, but the g++ front end doesn't always do it that way. */
6299 unshare_body (body_p, fndecl);
6300 unvisit_body (body_p, fndecl);
6301
6302 /* Make sure input_location isn't set to something wierd. */
6303 input_location = DECL_SOURCE_LOCATION (fndecl);
6304
6305 /* Resolve callee-copies. This has to be done before processing
6306 the body so that DECL_VALUE_EXPR gets processed correctly. */
6307 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6308
6309 /* Gimplify the function's body. */
6310 gimplify_stmt (body_p);
6311 body = *body_p;
6312
6313 if (!body)
6314 body = alloc_stmt_list ();
6315 else if (TREE_CODE (body) == STATEMENT_LIST)
6316 {
6317 tree t = expr_only (*body_p);
6318 if (t)
6319 body = t;
6320 }
6321
6322 /* If there isn't an outer BIND_EXPR, add one. */
6323 if (TREE_CODE (body) != BIND_EXPR)
6324 {
6325 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6326 NULL_TREE, NULL_TREE);
6327 TREE_SIDE_EFFECTS (b) = 1;
6328 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6329 body = b;
6330 }
6331
6332 /* If we had callee-copies statements, insert them at the beginning
6333 of the function. */
6334 if (parm_stmts)
6335 {
6336 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6337 BIND_EXPR_BODY (body) = parm_stmts;
6338 }
6339
6340 /* Unshare again, in case gimplification was sloppy. */
6341 unshare_all_trees (body);
6342
6343 *body_p = body;
6344
6345 pop_gimplify_context (body);
6346 gcc_assert (gimplify_ctxp == NULL);
6347
6348 #ifdef ENABLE_CHECKING
6349 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6350 #endif
6351
6352 timevar_pop (TV_TREE_GIMPLIFY);
6353 input_location = saved_location;
6354 }
6355
6356 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6357 node for the function we want to gimplify. */
6358
6359 void
6360 gimplify_function_tree (tree fndecl)
6361 {
6362 tree oldfn, parm, ret;
6363
6364 oldfn = current_function_decl;
6365 current_function_decl = fndecl;
6366 cfun = DECL_STRUCT_FUNCTION (fndecl);
6367 if (cfun == NULL)
6368 allocate_struct_function (fndecl);
6369
6370 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6371 {
6372 /* Preliminarily mark non-addressed complex variables as eligible
6373 for promotion to gimple registers. We'll transform their uses
6374 as we find them. */
6375 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6376 && !TREE_THIS_VOLATILE (parm)
6377 && !needs_to_live_in_memory (parm))
6378 DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
6379 }
6380
6381 ret = DECL_RESULT (fndecl);
6382 if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6383 && !needs_to_live_in_memory (ret))
6384 DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
6385
6386 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6387
6388 /* If we're instrumenting function entry/exit, then prepend the call to
6389 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6390 catch the exit hook. */
6391 /* ??? Add some way to ignore exceptions for this TFE. */
6392 if (flag_instrument_function_entry_exit
6393 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6394 {
6395 tree tf, x, bind;
6396
6397 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6398 TREE_SIDE_EFFECTS (tf) = 1;
6399 x = DECL_SAVED_TREE (fndecl);
6400 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6401 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6402 x = build_function_call_expr (x, NULL);
6403 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6404
6405 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6406 TREE_SIDE_EFFECTS (bind) = 1;
6407 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6408 x = build_function_call_expr (x, NULL);
6409 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6410 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6411
6412 DECL_SAVED_TREE (fndecl) = bind;
6413 }
6414
6415 cfun->gimplified = true;
6416 current_function_decl = oldfn;
6417 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6418 }
6419 \f
6420 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6421 force the result to be either ssa_name or an invariant, otherwise
6422 just force it to be a rhs expression. If VAR is not NULL, make the
6423 base variable of the final destination be VAR if suitable. */
6424
6425 tree
6426 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6427 {
6428 tree t;
6429 enum gimplify_status ret;
6430 gimple_predicate gimple_test_f;
6431
6432 *stmts = NULL_TREE;
6433
6434 if (is_gimple_val (expr))
6435 return expr;
6436
6437 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6438
6439 push_gimplify_context ();
6440 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
6441
6442 if (var)
6443 expr = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, expr);
6444
6445 ret = gimplify_expr (&expr, stmts, NULL,
6446 gimple_test_f, fb_rvalue);
6447 gcc_assert (ret != GS_ERROR);
6448
6449 if (gimple_referenced_vars (cfun))
6450 {
6451 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6452 add_referenced_var (t);
6453 }
6454
6455 pop_gimplify_context (NULL);
6456
6457 return expr;
6458 }
6459
6460 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6461 some statements are produced, emits them before BSI. */
6462
6463 tree
6464 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6465 bool simple_p, tree var)
6466 {
6467 tree stmts;
6468
6469 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6470 if (stmts)
6471 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6472
6473 return expr;
6474 }
6475
6476 #include "gt-gimplify.h"