New test cases.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "varray.h"
32 #include "tree-gimple.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
53
54
55 enum gimplify_omp_var_data
56 {
57 GOVD_SEEN = 1,
58 GOVD_EXPLICIT = 2,
59 GOVD_SHARED = 4,
60 GOVD_PRIVATE = 8,
61 GOVD_FIRSTPRIVATE = 16,
62 GOVD_LASTPRIVATE = 32,
63 GOVD_REDUCTION = 64,
64 GOVD_LOCAL = 128,
65 GOVD_DEBUG_PRIVATE = 256,
66 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
67 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
68 };
69
70 struct gimplify_omp_ctx
71 {
72 struct gimplify_omp_ctx *outer_context;
73 splay_tree variables;
74 struct pointer_set_t *privatized_types;
75 location_t location;
76 enum omp_clause_default_kind default_kind;
77 bool is_parallel;
78 bool is_combined_parallel;
79 };
80
81 struct gimplify_ctx
82 {
83 struct gimplify_ctx *prev_context;
84
85 tree current_bind_expr;
86 tree temps;
87 tree conditional_cleanups;
88 tree exit_label;
89 tree return_temp;
90
91 VEC(tree,heap) *case_labels;
92 /* The formal temporary table. Should this be persistent? */
93 htab_t temp_htab;
94
95 int conditions;
96 bool save_stack;
97 bool into_ssa;
98 };
99
100 static struct gimplify_ctx *gimplify_ctxp;
101 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
102
103
104
105 /* Formal (expression) temporary table handling: Multiple occurrences of
106 the same scalar expression are evaluated into the same temporary. */
107
108 typedef struct gimple_temp_hash_elt
109 {
110 tree val; /* Key */
111 tree temp; /* Value */
112 } elt_t;
113
114 /* Forward declarations. */
115 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
116 #ifdef ENABLE_CHECKING
117 static bool cpt_same_type (tree a, tree b);
118 #endif
119
120
121 /* Return a hash value for a formal temporary table entry. */
122
123 static hashval_t
124 gimple_tree_hash (const void *p)
125 {
126 tree t = ((const elt_t *) p)->val;
127 return iterative_hash_expr (t, 0);
128 }
129
130 /* Compare two formal temporary table entries. */
131
132 static int
133 gimple_tree_eq (const void *p1, const void *p2)
134 {
135 tree t1 = ((const elt_t *) p1)->val;
136 tree t2 = ((const elt_t *) p2)->val;
137 enum tree_code code = TREE_CODE (t1);
138
139 if (TREE_CODE (t2) != code
140 || TREE_TYPE (t1) != TREE_TYPE (t2))
141 return 0;
142
143 if (!operand_equal_p (t1, t2, 0))
144 return 0;
145
146 /* Only allow them to compare equal if they also hash equal; otherwise
147 results are nondeterminate, and we fail bootstrap comparison. */
148 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
149
150 return 1;
151 }
152
153 /* Set up a context for the gimplifier. */
154
155 void
156 push_gimplify_context (void)
157 {
158 struct gimplify_ctx *c;
159
160 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
161 c->prev_context = gimplify_ctxp;
162 if (optimize)
163 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
164
165 gimplify_ctxp = c;
166 }
167
168 /* Tear down a context for the gimplifier. If BODY is non-null, then
169 put the temporaries into the outer BIND_EXPR. Otherwise, put them
170 in the unexpanded_var_list. */
171
172 void
173 pop_gimplify_context (tree body)
174 {
175 struct gimplify_ctx *c = gimplify_ctxp;
176 tree t;
177
178 gcc_assert (c && !c->current_bind_expr);
179 gimplify_ctxp = c->prev_context;
180
181 for (t = c->temps; t ; t = TREE_CHAIN (t))
182 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
183
184 if (body)
185 declare_vars (c->temps, body, false);
186 else
187 record_vars (c->temps);
188
189 if (optimize)
190 htab_delete (c->temp_htab);
191 free (c);
192 }
193
194 static void
195 gimple_push_bind_expr (tree bind)
196 {
197 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
198 gimplify_ctxp->current_bind_expr = bind;
199 }
200
201 static void
202 gimple_pop_bind_expr (void)
203 {
204 gimplify_ctxp->current_bind_expr
205 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
206 }
207
208 tree
209 gimple_current_bind_expr (void)
210 {
211 return gimplify_ctxp->current_bind_expr;
212 }
213
214 /* Returns true iff there is a COND_EXPR between us and the innermost
215 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
216
217 static bool
218 gimple_conditional_context (void)
219 {
220 return gimplify_ctxp->conditions > 0;
221 }
222
223 /* Note that we've entered a COND_EXPR. */
224
225 static void
226 gimple_push_condition (void)
227 {
228 #ifdef ENABLE_CHECKING
229 if (gimplify_ctxp->conditions == 0)
230 gcc_assert (!gimplify_ctxp->conditional_cleanups);
231 #endif
232 ++(gimplify_ctxp->conditions);
233 }
234
235 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
236 now, add any conditional cleanups we've seen to the prequeue. */
237
238 static void
239 gimple_pop_condition (tree *pre_p)
240 {
241 int conds = --(gimplify_ctxp->conditions);
242
243 gcc_assert (conds >= 0);
244 if (conds == 0)
245 {
246 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
247 gimplify_ctxp->conditional_cleanups = NULL_TREE;
248 }
249 }
250
251 /* A stable comparison routine for use with splay trees and DECLs. */
252
253 static int
254 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
255 {
256 tree a = (tree) xa;
257 tree b = (tree) xb;
258
259 return DECL_UID (a) - DECL_UID (b);
260 }
261
262 /* Create a new omp construct that deals with variable remapping. */
263
264 static struct gimplify_omp_ctx *
265 new_omp_context (bool is_parallel, bool is_combined_parallel)
266 {
267 struct gimplify_omp_ctx *c;
268
269 c = XCNEW (struct gimplify_omp_ctx);
270 c->outer_context = gimplify_omp_ctxp;
271 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
272 c->privatized_types = pointer_set_create ();
273 c->location = input_location;
274 c->is_parallel = is_parallel;
275 c->is_combined_parallel = is_combined_parallel;
276 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
277
278 return c;
279 }
280
281 /* Destroy an omp construct that deals with variable remapping. */
282
283 static void
284 delete_omp_context (struct gimplify_omp_ctx *c)
285 {
286 splay_tree_delete (c->variables);
287 pointer_set_destroy (c->privatized_types);
288 XDELETE (c);
289 }
290
291 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
292 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
293
294 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
295
296 static void
297 append_to_statement_list_1 (tree t, tree *list_p)
298 {
299 tree list = *list_p;
300 tree_stmt_iterator i;
301
302 if (!list)
303 {
304 if (t && TREE_CODE (t) == STATEMENT_LIST)
305 {
306 *list_p = t;
307 return;
308 }
309 *list_p = list = alloc_stmt_list ();
310 }
311
312 i = tsi_last (list);
313 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
314 }
315
316 /* Add T to the end of the list container pointed to by LIST_P.
317 If T is an expression with no effects, it is ignored. */
318
319 void
320 append_to_statement_list (tree t, tree *list_p)
321 {
322 if (t && TREE_SIDE_EFFECTS (t))
323 append_to_statement_list_1 (t, list_p);
324 }
325
326 /* Similar, but the statement is always added, regardless of side effects. */
327
328 void
329 append_to_statement_list_force (tree t, tree *list_p)
330 {
331 if (t != NULL_TREE)
332 append_to_statement_list_1 (t, list_p);
333 }
334
335 /* Both gimplify the statement T and append it to LIST_P. */
336
337 void
338 gimplify_and_add (tree t, tree *list_p)
339 {
340 gimplify_stmt (&t);
341 append_to_statement_list (t, list_p);
342 }
343
344 /* Strip off a legitimate source ending from the input string NAME of
345 length LEN. Rather than having to know the names used by all of
346 our front ends, we strip off an ending of a period followed by
347 up to five characters. (Java uses ".class".) */
348
349 static inline void
350 remove_suffix (char *name, int len)
351 {
352 int i;
353
354 for (i = 2; i < 8 && len > i; i++)
355 {
356 if (name[len - i] == '.')
357 {
358 name[len - i] = '\0';
359 break;
360 }
361 }
362 }
363
364 /* Create a nameless artificial label and put it in the current function
365 context. Returns the newly created label. */
366
367 tree
368 create_artificial_label (void)
369 {
370 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
371
372 DECL_ARTIFICIAL (lab) = 1;
373 DECL_IGNORED_P (lab) = 1;
374 DECL_CONTEXT (lab) = current_function_decl;
375 return lab;
376 }
377
378 /* Subroutine for find_single_pointer_decl. */
379
380 static tree
381 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
382 void *data)
383 {
384 tree *pdecl = (tree *) data;
385
386 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
387 {
388 if (*pdecl)
389 {
390 /* We already found a pointer decl; return anything other
391 than NULL_TREE to unwind from walk_tree signalling that
392 we have a duplicate. */
393 return *tp;
394 }
395 *pdecl = *tp;
396 }
397
398 return NULL_TREE;
399 }
400
401 /* Find the single DECL of pointer type in the tree T and return it.
402 If there are zero or more than one such DECLs, return NULL. */
403
404 static tree
405 find_single_pointer_decl (tree t)
406 {
407 tree decl = NULL_TREE;
408
409 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
410 {
411 /* find_single_pointer_decl_1 returns a nonzero value, causing
412 walk_tree to return a nonzero value, to indicate that it
413 found more than one pointer DECL. */
414 return NULL_TREE;
415 }
416
417 return decl;
418 }
419
420 /* Create a new temporary name with PREFIX. Returns an identifier. */
421
422 static GTY(()) unsigned int tmp_var_id_num;
423
424 tree
425 create_tmp_var_name (const char *prefix)
426 {
427 char *tmp_name;
428
429 if (prefix)
430 {
431 char *preftmp = ASTRDUP (prefix);
432
433 remove_suffix (preftmp, strlen (preftmp));
434 prefix = preftmp;
435 }
436
437 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
438 return get_identifier (tmp_name);
439 }
440
441
442 /* Create a new temporary variable declaration of type TYPE.
443 Does NOT push it into the current binding. */
444
445 tree
446 create_tmp_var_raw (tree type, const char *prefix)
447 {
448 tree tmp_var;
449 tree new_type;
450
451 /* Make the type of the variable writable. */
452 new_type = build_type_variant (type, 0, 0);
453 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
454
455 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
456 type);
457
458 /* The variable was declared by the compiler. */
459 DECL_ARTIFICIAL (tmp_var) = 1;
460 /* And we don't want debug info for it. */
461 DECL_IGNORED_P (tmp_var) = 1;
462
463 /* Make the variable writable. */
464 TREE_READONLY (tmp_var) = 0;
465
466 DECL_EXTERNAL (tmp_var) = 0;
467 TREE_STATIC (tmp_var) = 0;
468 TREE_USED (tmp_var) = 1;
469
470 return tmp_var;
471 }
472
473 /* Create a new temporary variable declaration of type TYPE. DOES push the
474 variable into the current binding. Further, assume that this is called
475 only from gimplification or optimization, at which point the creation of
476 certain types are bugs. */
477
478 tree
479 create_tmp_var (tree type, const char *prefix)
480 {
481 tree tmp_var;
482
483 /* We don't allow types that are addressable (meaning we can't make copies),
484 or incomplete. We also used to reject every variable size objects here,
485 but now support those for which a constant upper bound can be obtained.
486 The processing for variable sizes is performed in gimple_add_tmp_var,
487 point at which it really matters and possibly reached via paths not going
488 through this function, e.g. after direct calls to create_tmp_var_raw. */
489 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
490
491 tmp_var = create_tmp_var_raw (type, prefix);
492 gimple_add_tmp_var (tmp_var);
493 return tmp_var;
494 }
495
496 /* Given a tree, try to return a useful variable name that we can use
497 to prefix a temporary that is being assigned the value of the tree.
498 I.E. given <temp> = &A, return A. */
499
500 const char *
501 get_name (tree t)
502 {
503 tree stripped_decl;
504
505 stripped_decl = t;
506 STRIP_NOPS (stripped_decl);
507 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
508 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
509 else
510 {
511 switch (TREE_CODE (stripped_decl))
512 {
513 case ADDR_EXPR:
514 return get_name (TREE_OPERAND (stripped_decl, 0));
515 default:
516 return NULL;
517 }
518 }
519 }
520
521 /* Create a temporary with a name derived from VAL. Subroutine of
522 lookup_tmp_var; nobody else should call this function. */
523
524 static inline tree
525 create_tmp_from_val (tree val)
526 {
527 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
528 }
529
530 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
531 an existing expression temporary. */
532
533 static tree
534 lookup_tmp_var (tree val, bool is_formal)
535 {
536 tree ret;
537
538 /* If not optimizing, never really reuse a temporary. local-alloc
539 won't allocate any variable that is used in more than one basic
540 block, which means it will go into memory, causing much extra
541 work in reload and final and poorer code generation, outweighing
542 the extra memory allocation here. */
543 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
544 ret = create_tmp_from_val (val);
545 else
546 {
547 elt_t elt, *elt_p;
548 void **slot;
549
550 elt.val = val;
551 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
552 if (*slot == NULL)
553 {
554 elt_p = XNEW (elt_t);
555 elt_p->val = val;
556 elt_p->temp = ret = create_tmp_from_val (val);
557 *slot = (void *) elt_p;
558 }
559 else
560 {
561 elt_p = (elt_t *) *slot;
562 ret = elt_p->temp;
563 }
564 }
565
566 if (is_formal)
567 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
568
569 return ret;
570 }
571
572 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
573 in gimplify_expr. Only use this function if:
574
575 1) The value of the unfactored expression represented by VAL will not
576 change between the initialization and use of the temporary, and
577 2) The temporary will not be otherwise modified.
578
579 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
580 and #2 means it is inappropriate for && temps.
581
582 For other cases, use get_initialized_tmp_var instead. */
583
584 static tree
585 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
586 {
587 tree t, mod;
588
589 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
590
591 t = lookup_tmp_var (val, is_formal);
592
593 if (is_formal)
594 {
595 tree u = find_single_pointer_decl (val);
596
597 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
598 u = DECL_GET_RESTRICT_BASE (u);
599 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
600 {
601 if (DECL_BASED_ON_RESTRICT_P (t))
602 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
603 else
604 {
605 DECL_BASED_ON_RESTRICT_P (t) = 1;
606 SET_DECL_RESTRICT_BASE (t, u);
607 }
608 }
609 }
610
611 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
612 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
613 DECL_GIMPLE_REG_P (t) = 1;
614
615 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
616
617 if (EXPR_HAS_LOCATION (val))
618 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
619 else
620 SET_EXPR_LOCATION (mod, input_location);
621
622 /* gimplify_modify_expr might want to reduce this further. */
623 gimplify_and_add (mod, pre_p);
624
625 /* If we're gimplifying into ssa, gimplify_modify_expr will have
626 given our temporary an ssa name. Find and return it. */
627 if (gimplify_ctxp->into_ssa)
628 t = TREE_OPERAND (mod, 0);
629
630 return t;
631 }
632
633 /* Returns a formal temporary variable initialized with VAL. PRE_P
634 points to a statement list where side-effects needed to compute VAL
635 should be stored. */
636
637 tree
638 get_formal_tmp_var (tree val, tree *pre_p)
639 {
640 return internal_get_tmp_var (val, pre_p, NULL, true);
641 }
642
643 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
644 are as in gimplify_expr. */
645
646 tree
647 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
648 {
649 return internal_get_tmp_var (val, pre_p, post_p, false);
650 }
651
652 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
653 true, generate debug info for them; otherwise don't. */
654
655 void
656 declare_vars (tree vars, tree scope, bool debug_info)
657 {
658 tree last = vars;
659 if (last)
660 {
661 tree temps, block;
662
663 /* C99 mode puts the default 'return 0;' for main outside the outer
664 braces. So drill down until we find an actual scope. */
665 while (TREE_CODE (scope) == COMPOUND_EXPR)
666 scope = TREE_OPERAND (scope, 0);
667
668 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
669
670 temps = nreverse (last);
671
672 block = BIND_EXPR_BLOCK (scope);
673 if (!block || !debug_info)
674 {
675 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
676 BIND_EXPR_VARS (scope) = temps;
677 }
678 else
679 {
680 /* We need to attach the nodes both to the BIND_EXPR and to its
681 associated BLOCK for debugging purposes. The key point here
682 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
683 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
684 if (BLOCK_VARS (block))
685 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
686 else
687 {
688 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
689 BLOCK_VARS (block) = temps;
690 }
691 }
692 }
693 }
694
695 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
696 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
697 no such upper bound can be obtained. */
698
699 static void
700 force_constant_size (tree var)
701 {
702 /* The only attempt we make is by querying the maximum size of objects
703 of the variable's type. */
704
705 HOST_WIDE_INT max_size;
706
707 gcc_assert (TREE_CODE (var) == VAR_DECL);
708
709 max_size = max_int_size_in_bytes (TREE_TYPE (var));
710
711 gcc_assert (max_size >= 0);
712
713 DECL_SIZE_UNIT (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
715 DECL_SIZE (var)
716 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
717 }
718
719 void
720 gimple_add_tmp_var (tree tmp)
721 {
722 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
723
724 /* Later processing assumes that the object size is constant, which might
725 not be true at this point. Force the use of a constant upper bound in
726 this case. */
727 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
728 force_constant_size (tmp);
729
730 DECL_CONTEXT (tmp) = current_function_decl;
731 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
732
733 if (gimplify_ctxp)
734 {
735 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
736 gimplify_ctxp->temps = tmp;
737
738 /* Mark temporaries local within the nearest enclosing parallel. */
739 if (gimplify_omp_ctxp)
740 {
741 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
742 while (ctx && !ctx->is_parallel)
743 ctx = ctx->outer_context;
744 if (ctx)
745 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
746 }
747 }
748 else if (cfun)
749 record_vars (tmp);
750 else
751 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
752 }
753
754 /* Determines whether to assign a locus to the statement STMT. */
755
756 static bool
757 should_carry_locus_p (tree stmt)
758 {
759 /* Don't emit a line note for a label. We particularly don't want to
760 emit one for the break label, since it doesn't actually correspond
761 to the beginning of the loop/switch. */
762 if (TREE_CODE (stmt) == LABEL_EXPR)
763 return false;
764
765 /* Do not annotate empty statements, since it confuses gcov. */
766 if (!TREE_SIDE_EFFECTS (stmt))
767 return false;
768
769 return true;
770 }
771
772 static void
773 annotate_one_with_locus (tree t, location_t locus)
774 {
775 if (CAN_HAVE_LOCATION_P (t)
776 && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
777 SET_EXPR_LOCATION (t, locus);
778 }
779
780 void
781 annotate_all_with_locus (tree *stmt_p, location_t locus)
782 {
783 tree_stmt_iterator i;
784
785 if (!*stmt_p)
786 return;
787
788 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
789 {
790 tree t = tsi_stmt (i);
791
792 /* Assuming we've already been gimplified, we shouldn't
793 see nested chaining constructs anymore. */
794 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
795 && TREE_CODE (t) != COMPOUND_EXPR);
796
797 annotate_one_with_locus (t, locus);
798 }
799 }
800
801 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
802 These nodes model computations that should only be done once. If we
803 were to unshare something like SAVE_EXPR(i++), the gimplification
804 process would create wrong code. */
805
806 static tree
807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
808 {
809 enum tree_code code = TREE_CODE (*tp);
810 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
811 if (TREE_CODE_CLASS (code) == tcc_type
812 || TREE_CODE_CLASS (code) == tcc_declaration
813 || TREE_CODE_CLASS (code) == tcc_constant
814 || code == SAVE_EXPR || code == TARGET_EXPR
815 /* We can't do anything sensible with a BLOCK used as an expression,
816 but we also can't just die when we see it because of non-expression
817 uses. So just avert our eyes and cross our fingers. Silly Java. */
818 || code == BLOCK)
819 *walk_subtrees = 0;
820 else
821 {
822 gcc_assert (code != BIND_EXPR);
823 copy_tree_r (tp, walk_subtrees, data);
824 }
825
826 return NULL_TREE;
827 }
828
829 /* Callback for walk_tree to unshare most of the shared trees rooted at
830 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
831 then *TP is deep copied by calling copy_tree_r.
832
833 This unshares the same trees as copy_tree_r with the exception of
834 SAVE_EXPR nodes. These nodes model computations that should only be
835 done once. If we were to unshare something like SAVE_EXPR(i++), the
836 gimplification process would create wrong code. */
837
838 static tree
839 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
840 void *data ATTRIBUTE_UNUSED)
841 {
842 tree t = *tp;
843 enum tree_code code = TREE_CODE (t);
844
845 /* Skip types, decls, and constants. But we do want to look at their
846 types and the bounds of types. Mark them as visited so we properly
847 unmark their subtrees on the unmark pass. If we've already seen them,
848 don't look down further. */
849 if (TREE_CODE_CLASS (code) == tcc_type
850 || TREE_CODE_CLASS (code) == tcc_declaration
851 || TREE_CODE_CLASS (code) == tcc_constant)
852 {
853 if (TREE_VISITED (t))
854 *walk_subtrees = 0;
855 else
856 TREE_VISITED (t) = 1;
857 }
858
859 /* If this node has been visited already, unshare it and don't look
860 any deeper. */
861 else if (TREE_VISITED (t))
862 {
863 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
864 *walk_subtrees = 0;
865 }
866
867 /* Otherwise, mark the tree as visited and keep looking. */
868 else
869 TREE_VISITED (t) = 1;
870
871 return NULL_TREE;
872 }
873
874 static tree
875 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
876 void *data ATTRIBUTE_UNUSED)
877 {
878 if (TREE_VISITED (*tp))
879 TREE_VISITED (*tp) = 0;
880 else
881 *walk_subtrees = 0;
882
883 return NULL_TREE;
884 }
885
886 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
887 bodies of any nested functions if we are unsharing the entire body of
888 FNDECL. */
889
890 static void
891 unshare_body (tree *body_p, tree fndecl)
892 {
893 struct cgraph_node *cgn = cgraph_node (fndecl);
894
895 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
896 if (body_p == &DECL_SAVED_TREE (fndecl))
897 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
898 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
899 }
900
901 /* Likewise, but mark all trees as not visited. */
902
903 static void
904 unvisit_body (tree *body_p, tree fndecl)
905 {
906 struct cgraph_node *cgn = cgraph_node (fndecl);
907
908 walk_tree (body_p, unmark_visited_r, NULL, NULL);
909 if (body_p == &DECL_SAVED_TREE (fndecl))
910 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
911 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
912 }
913
914 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
915
916 static void
917 unshare_all_trees (tree t)
918 {
919 walk_tree (&t, copy_if_shared_r, NULL, NULL);
920 walk_tree (&t, unmark_visited_r, NULL, NULL);
921 }
922
923 /* Unconditionally make an unshared copy of EXPR. This is used when using
924 stored expressions which span multiple functions, such as BINFO_VTABLE,
925 as the normal unsharing process can't tell that they're shared. */
926
927 tree
928 unshare_expr (tree expr)
929 {
930 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
931 return expr;
932 }
933
934 /* A terser interface for building a representation of an exception
935 specification. */
936
937 tree
938 gimple_build_eh_filter (tree body, tree allowed, tree failure)
939 {
940 tree t;
941
942 /* FIXME should the allowed types go in TREE_TYPE? */
943 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
944 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
945
946 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
947 append_to_statement_list (body, &TREE_OPERAND (t, 0));
948
949 return t;
950 }
951
952 \f
953 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
954 contain statements and have a value. Assign its value to a temporary
955 and give it void_type_node. Returns the temporary, or NULL_TREE if
956 WRAPPER was already void. */
957
958 tree
959 voidify_wrapper_expr (tree wrapper, tree temp)
960 {
961 tree type = TREE_TYPE (wrapper);
962 if (type && !VOID_TYPE_P (type))
963 {
964 tree *p;
965
966 /* Set p to point to the body of the wrapper. Loop until we find
967 something that isn't a wrapper. */
968 for (p = &wrapper; p && *p; )
969 {
970 switch (TREE_CODE (*p))
971 {
972 case BIND_EXPR:
973 TREE_SIDE_EFFECTS (*p) = 1;
974 TREE_TYPE (*p) = void_type_node;
975 /* For a BIND_EXPR, the body is operand 1. */
976 p = &BIND_EXPR_BODY (*p);
977 break;
978
979 case CLEANUP_POINT_EXPR:
980 case TRY_FINALLY_EXPR:
981 case TRY_CATCH_EXPR:
982 TREE_SIDE_EFFECTS (*p) = 1;
983 TREE_TYPE (*p) = void_type_node;
984 p = &TREE_OPERAND (*p, 0);
985 break;
986
987 case STATEMENT_LIST:
988 {
989 tree_stmt_iterator i = tsi_last (*p);
990 TREE_SIDE_EFFECTS (*p) = 1;
991 TREE_TYPE (*p) = void_type_node;
992 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
993 }
994 break;
995
996 case COMPOUND_EXPR:
997 /* Advance to the last statement. Set all container types to void. */
998 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
999 {
1000 TREE_SIDE_EFFECTS (*p) = 1;
1001 TREE_TYPE (*p) = void_type_node;
1002 }
1003 break;
1004
1005 default:
1006 goto out;
1007 }
1008 }
1009
1010 out:
1011 if (p == NULL || IS_EMPTY_STMT (*p))
1012 temp = NULL_TREE;
1013 else if (temp)
1014 {
1015 /* The wrapper is on the RHS of an assignment that we're pushing
1016 down. */
1017 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1018 || TREE_CODE (temp) == GIMPLE_MODIFY_STMT
1019 || TREE_CODE (temp) == MODIFY_EXPR);
1020 GENERIC_TREE_OPERAND (temp, 1) = *p;
1021 *p = temp;
1022 }
1023 else
1024 {
1025 temp = create_tmp_var (type, "retval");
1026 *p = build2 (INIT_EXPR, type, temp, *p);
1027 }
1028
1029 return temp;
1030 }
1031
1032 return NULL_TREE;
1033 }
1034
1035 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1036 a temporary through which they communicate. */
1037
1038 static void
1039 build_stack_save_restore (tree *save, tree *restore)
1040 {
1041 tree save_call, tmp_var;
1042
1043 save_call =
1044 build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1045 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1046
1047 *save = build_gimple_modify_stmt (tmp_var, save_call);
1048 *restore =
1049 build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1050 1, tmp_var);
1051 }
1052
1053 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1054
1055 static enum gimplify_status
1056 gimplify_bind_expr (tree *expr_p, tree *pre_p)
1057 {
1058 tree bind_expr = *expr_p;
1059 bool old_save_stack = gimplify_ctxp->save_stack;
1060 tree t;
1061
1062 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1063
1064 /* Mark variables seen in this bind expr. */
1065 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1066 {
1067 if (TREE_CODE (t) == VAR_DECL)
1068 {
1069 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1070
1071 /* Mark variable as local. */
1072 if (ctx && !is_global_var (t)
1073 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1074 || splay_tree_lookup (ctx->variables,
1075 (splay_tree_key) t) == NULL))
1076 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1077
1078 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1079 }
1080
1081 /* Preliminarily mark non-addressed complex variables as eligible
1082 for promotion to gimple registers. We'll transform their uses
1083 as we find them. */
1084 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1085 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1086 && !TREE_THIS_VOLATILE (t)
1087 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1088 && !needs_to_live_in_memory (t))
1089 DECL_GIMPLE_REG_P (t) = 1;
1090 }
1091
1092 gimple_push_bind_expr (bind_expr);
1093 gimplify_ctxp->save_stack = false;
1094
1095 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1096
1097 if (gimplify_ctxp->save_stack)
1098 {
1099 tree stack_save, stack_restore;
1100
1101 /* Save stack on entry and restore it on exit. Add a try_finally
1102 block to achieve this. Note that mudflap depends on the
1103 format of the emitted code: see mx_register_decls(). */
1104 build_stack_save_restore (&stack_save, &stack_restore);
1105
1106 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1107 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1108 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1109
1110 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1111 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1112 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1113 }
1114
1115 gimplify_ctxp->save_stack = old_save_stack;
1116 gimple_pop_bind_expr ();
1117
1118 if (temp)
1119 {
1120 *expr_p = temp;
1121 append_to_statement_list (bind_expr, pre_p);
1122 return GS_OK;
1123 }
1124 else
1125 return GS_ALL_DONE;
1126 }
1127
1128 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1129 GIMPLE value, it is assigned to a new temporary and the statement is
1130 re-written to return the temporary.
1131
1132 PRE_P points to the list where side effects that must happen before
1133 STMT should be stored. */
1134
1135 static enum gimplify_status
1136 gimplify_return_expr (tree stmt, tree *pre_p)
1137 {
1138 tree ret_expr = TREE_OPERAND (stmt, 0);
1139 tree result_decl, result;
1140
1141 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1142 || ret_expr == error_mark_node)
1143 return GS_ALL_DONE;
1144
1145 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1146 result_decl = NULL_TREE;
1147 else
1148 {
1149 result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
1150 if (TREE_CODE (result_decl) == INDIRECT_REF)
1151 /* See through a return by reference. */
1152 result_decl = TREE_OPERAND (result_decl, 0);
1153
1154 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1155 || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
1156 || TREE_CODE (ret_expr) == INIT_EXPR)
1157 && TREE_CODE (result_decl) == RESULT_DECL);
1158 }
1159
1160 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1161 Recall that aggregate_value_p is FALSE for any aggregate type that is
1162 returned in registers. If we're returning values in registers, then
1163 we don't want to extend the lifetime of the RESULT_DECL, particularly
1164 across another call. In addition, for those aggregates for which
1165 hard_function_value generates a PARALLEL, we'll die during normal
1166 expansion of structure assignments; there's special code in expand_return
1167 to handle this case that does not exist in expand_expr. */
1168 if (!result_decl
1169 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1170 result = result_decl;
1171 else if (gimplify_ctxp->return_temp)
1172 result = gimplify_ctxp->return_temp;
1173 else
1174 {
1175 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1176 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1177 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1178 DECL_GIMPLE_REG_P (result) = 1;
1179
1180 /* ??? With complex control flow (usually involving abnormal edges),
1181 we can wind up warning about an uninitialized value for this. Due
1182 to how this variable is constructed and initialized, this is never
1183 true. Give up and never warn. */
1184 TREE_NO_WARNING (result) = 1;
1185
1186 gimplify_ctxp->return_temp = result;
1187 }
1188
1189 /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
1190 Then gimplify the whole thing. */
1191 if (result != result_decl)
1192 GENERIC_TREE_OPERAND (ret_expr, 0) = result;
1193
1194 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1195
1196 /* If we didn't use a temporary, then the result is just the result_decl.
1197 Otherwise we need a simple copy. This should already be gimple. */
1198 if (result == result_decl)
1199 ret_expr = result;
1200 else
1201 ret_expr = build_gimple_modify_stmt (result_decl, result);
1202 TREE_OPERAND (stmt, 0) = ret_expr;
1203
1204 return GS_ALL_DONE;
1205 }
1206
1207 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1208 and initialization explicit. */
1209
1210 static enum gimplify_status
1211 gimplify_decl_expr (tree *stmt_p)
1212 {
1213 tree stmt = *stmt_p;
1214 tree decl = DECL_EXPR_DECL (stmt);
1215
1216 *stmt_p = NULL_TREE;
1217
1218 if (TREE_TYPE (decl) == error_mark_node)
1219 return GS_ERROR;
1220
1221 if ((TREE_CODE (decl) == TYPE_DECL
1222 || TREE_CODE (decl) == VAR_DECL)
1223 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1224 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1225
1226 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1227 {
1228 tree init = DECL_INITIAL (decl);
1229
1230 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1231 {
1232 /* This is a variable-sized decl. Simplify its size and mark it
1233 for deferred expansion. Note that mudflap depends on the format
1234 of the emitted code: see mx_register_decls(). */
1235 tree t, addr, ptr_type;
1236
1237 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1238 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1239
1240 /* All occurrences of this decl in final gimplified code will be
1241 replaced by indirection. Setting DECL_VALUE_EXPR does two
1242 things: First, it lets the rest of the gimplifier know what
1243 replacement to use. Second, it lets the debug info know
1244 where to find the value. */
1245 ptr_type = build_pointer_type (TREE_TYPE (decl));
1246 addr = create_tmp_var (ptr_type, get_name (decl));
1247 DECL_IGNORED_P (addr) = 0;
1248 t = build_fold_indirect_ref (addr);
1249 SET_DECL_VALUE_EXPR (decl, t);
1250 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1251
1252 t = built_in_decls[BUILT_IN_ALLOCA];
1253 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1254 t = fold_convert (ptr_type, t);
1255 t = build_gimple_modify_stmt (addr, t);
1256
1257 gimplify_and_add (t, stmt_p);
1258
1259 /* Indicate that we need to restore the stack level when the
1260 enclosing BIND_EXPR is exited. */
1261 gimplify_ctxp->save_stack = true;
1262 }
1263
1264 if (init && init != error_mark_node)
1265 {
1266 if (!TREE_STATIC (decl))
1267 {
1268 DECL_INITIAL (decl) = NULL_TREE;
1269 init = build2 (INIT_EXPR, void_type_node, decl, init);
1270 gimplify_and_add (init, stmt_p);
1271 }
1272 else
1273 /* We must still examine initializers for static variables
1274 as they may contain a label address. */
1275 walk_tree (&init, force_labels_r, NULL, NULL);
1276 }
1277
1278 /* Some front ends do not explicitly declare all anonymous
1279 artificial variables. We compensate here by declaring the
1280 variables, though it would be better if the front ends would
1281 explicitly declare them. */
1282 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1283 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1284 gimple_add_tmp_var (decl);
1285 }
1286
1287 return GS_ALL_DONE;
1288 }
1289
1290 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1291 and replacing the LOOP_EXPR with goto, but if the loop contains an
1292 EXIT_EXPR, we need to append a label for it to jump to. */
1293
1294 static enum gimplify_status
1295 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1296 {
1297 tree saved_label = gimplify_ctxp->exit_label;
1298 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1299 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1300
1301 append_to_statement_list (start_label, pre_p);
1302
1303 gimplify_ctxp->exit_label = NULL_TREE;
1304
1305 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1306
1307 if (gimplify_ctxp->exit_label)
1308 {
1309 append_to_statement_list (jump_stmt, pre_p);
1310 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1311 }
1312 else
1313 *expr_p = jump_stmt;
1314
1315 gimplify_ctxp->exit_label = saved_label;
1316
1317 return GS_ALL_DONE;
1318 }
1319
1320 /* Compare two case labels. Because the front end should already have
1321 made sure that case ranges do not overlap, it is enough to only compare
1322 the CASE_LOW values of each case label. */
1323
1324 static int
1325 compare_case_labels (const void *p1, const void *p2)
1326 {
1327 tree case1 = *(tree *)p1;
1328 tree case2 = *(tree *)p2;
1329
1330 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1331 }
1332
1333 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1334
1335 void
1336 sort_case_labels (tree label_vec)
1337 {
1338 size_t len = TREE_VEC_LENGTH (label_vec);
1339 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1340
1341 if (CASE_LOW (default_case))
1342 {
1343 size_t i;
1344
1345 /* The last label in the vector should be the default case
1346 but it is not. */
1347 for (i = 0; i < len; ++i)
1348 {
1349 tree t = TREE_VEC_ELT (label_vec, i);
1350 if (!CASE_LOW (t))
1351 {
1352 default_case = t;
1353 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1354 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1355 break;
1356 }
1357 }
1358 }
1359
1360 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1361 compare_case_labels);
1362 }
1363
1364 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1365 branch to. */
1366
1367 static enum gimplify_status
1368 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1369 {
1370 tree switch_expr = *expr_p;
1371 enum gimplify_status ret;
1372
1373 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1374 is_gimple_val, fb_rvalue);
1375
1376 if (SWITCH_BODY (switch_expr))
1377 {
1378 VEC(tree,heap) *labels, *saved_labels;
1379 tree label_vec, default_case = NULL_TREE;
1380 size_t i, len;
1381
1382 /* If someone can be bothered to fill in the labels, they can
1383 be bothered to null out the body too. */
1384 gcc_assert (!SWITCH_LABELS (switch_expr));
1385
1386 saved_labels = gimplify_ctxp->case_labels;
1387 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1388
1389 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1390
1391 labels = gimplify_ctxp->case_labels;
1392 gimplify_ctxp->case_labels = saved_labels;
1393
1394 i = 0;
1395 while (i < VEC_length (tree, labels))
1396 {
1397 tree elt = VEC_index (tree, labels, i);
1398 tree low = CASE_LOW (elt);
1399 bool remove_element = FALSE;
1400
1401 if (low)
1402 {
1403 /* Discard empty ranges. */
1404 tree high = CASE_HIGH (elt);
1405 if (high && INT_CST_LT (high, low))
1406 remove_element = TRUE;
1407 }
1408 else
1409 {
1410 /* The default case must be the last label in the list. */
1411 gcc_assert (!default_case);
1412 default_case = elt;
1413 remove_element = TRUE;
1414 }
1415
1416 if (remove_element)
1417 VEC_ordered_remove (tree, labels, i);
1418 else
1419 i++;
1420 }
1421 len = i;
1422
1423 label_vec = make_tree_vec (len + 1);
1424 SWITCH_LABELS (*expr_p) = label_vec;
1425 append_to_statement_list (switch_expr, pre_p);
1426
1427 if (! default_case)
1428 {
1429 /* If the switch has no default label, add one, so that we jump
1430 around the switch body. */
1431 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1432 NULL_TREE, create_artificial_label ());
1433 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1434 *expr_p = build1 (LABEL_EXPR, void_type_node,
1435 CASE_LABEL (default_case));
1436 }
1437 else
1438 *expr_p = SWITCH_BODY (switch_expr);
1439
1440 for (i = 0; i < len; ++i)
1441 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1442 TREE_VEC_ELT (label_vec, len) = default_case;
1443
1444 VEC_free (tree, heap, labels);
1445
1446 sort_case_labels (label_vec);
1447
1448 SWITCH_BODY (switch_expr) = NULL;
1449 }
1450 else
1451 gcc_assert (SWITCH_LABELS (switch_expr));
1452
1453 return ret;
1454 }
1455
1456 static enum gimplify_status
1457 gimplify_case_label_expr (tree *expr_p)
1458 {
1459 tree expr = *expr_p;
1460 struct gimplify_ctx *ctxp;
1461
1462 /* Invalid OpenMP programs can play Duff's Device type games with
1463 #pragma omp parallel. At least in the C front end, we don't
1464 detect such invalid branches until after gimplification. */
1465 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1466 if (ctxp->case_labels)
1467 break;
1468
1469 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1470 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1471 return GS_ALL_DONE;
1472 }
1473
1474 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1475 if necessary. */
1476
1477 tree
1478 build_and_jump (tree *label_p)
1479 {
1480 if (label_p == NULL)
1481 /* If there's nowhere to jump, just fall through. */
1482 return NULL_TREE;
1483
1484 if (*label_p == NULL_TREE)
1485 {
1486 tree label = create_artificial_label ();
1487 *label_p = label;
1488 }
1489
1490 return build1 (GOTO_EXPR, void_type_node, *label_p);
1491 }
1492
1493 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1494 This also involves building a label to jump to and communicating it to
1495 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1496
1497 static enum gimplify_status
1498 gimplify_exit_expr (tree *expr_p)
1499 {
1500 tree cond = TREE_OPERAND (*expr_p, 0);
1501 tree expr;
1502
1503 expr = build_and_jump (&gimplify_ctxp->exit_label);
1504 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1505 *expr_p = expr;
1506
1507 return GS_OK;
1508 }
1509
1510 /* A helper function to be called via walk_tree. Mark all labels under *TP
1511 as being forced. To be called for DECL_INITIAL of static variables. */
1512
1513 tree
1514 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1515 {
1516 if (TYPE_P (*tp))
1517 *walk_subtrees = 0;
1518 if (TREE_CODE (*tp) == LABEL_DECL)
1519 FORCED_LABEL (*tp) = 1;
1520
1521 return NULL_TREE;
1522 }
1523
1524 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1525 different from its canonical type, wrap the whole thing inside a
1526 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1527 type.
1528
1529 The canonical type of a COMPONENT_REF is the type of the field being
1530 referenced--unless the field is a bit-field which can be read directly
1531 in a smaller mode, in which case the canonical type is the
1532 sign-appropriate type corresponding to that mode. */
1533
1534 static void
1535 canonicalize_component_ref (tree *expr_p)
1536 {
1537 tree expr = *expr_p;
1538 tree type;
1539
1540 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1541
1542 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1543 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1544 else
1545 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1546
1547 if (TREE_TYPE (expr) != type)
1548 {
1549 tree old_type = TREE_TYPE (expr);
1550
1551 /* Set the type of the COMPONENT_REF to the underlying type. */
1552 TREE_TYPE (expr) = type;
1553
1554 /* And wrap the whole thing inside a NOP_EXPR. */
1555 expr = build1 (NOP_EXPR, old_type, expr);
1556
1557 *expr_p = expr;
1558 }
1559 }
1560
1561 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1562 to foo, embed that change in the ADDR_EXPR by converting
1563 T array[U];
1564 (T *)&array
1565 ==>
1566 &array[L]
1567 where L is the lower bound. For simplicity, only do this for constant
1568 lower bound. */
1569
1570 static void
1571 canonicalize_addr_expr (tree *expr_p)
1572 {
1573 tree expr = *expr_p;
1574 tree ctype = TREE_TYPE (expr);
1575 tree addr_expr = TREE_OPERAND (expr, 0);
1576 tree atype = TREE_TYPE (addr_expr);
1577 tree dctype, datype, ddatype, otype, obj_expr;
1578
1579 /* Both cast and addr_expr types should be pointers. */
1580 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1581 return;
1582
1583 /* The addr_expr type should be a pointer to an array. */
1584 datype = TREE_TYPE (atype);
1585 if (TREE_CODE (datype) != ARRAY_TYPE)
1586 return;
1587
1588 /* Both cast and addr_expr types should address the same object type. */
1589 dctype = TREE_TYPE (ctype);
1590 ddatype = TREE_TYPE (datype);
1591 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1592 return;
1593
1594 /* The addr_expr and the object type should match. */
1595 obj_expr = TREE_OPERAND (addr_expr, 0);
1596 otype = TREE_TYPE (obj_expr);
1597 if (!lang_hooks.types_compatible_p (otype, datype))
1598 return;
1599
1600 /* The lower bound and element sizes must be constant. */
1601 if (!TYPE_SIZE_UNIT (dctype)
1602 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1603 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1604 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1605 return;
1606
1607 /* All checks succeeded. Build a new node to merge the cast. */
1608 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1609 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1610 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1611 size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
1612 size_int (TYPE_ALIGN_UNIT (dctype))));
1613 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1614 }
1615
1616 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1617 underneath as appropriate. */
1618
1619 static enum gimplify_status
1620 gimplify_conversion (tree *expr_p)
1621 {
1622 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1623 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1624
1625 /* Then strip away all but the outermost conversion. */
1626 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1627
1628 /* And remove the outermost conversion if it's useless. */
1629 if (tree_ssa_useless_type_conversion (*expr_p))
1630 *expr_p = TREE_OPERAND (*expr_p, 0);
1631
1632 /* If we still have a conversion at the toplevel,
1633 then canonicalize some constructs. */
1634 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1635 {
1636 tree sub = TREE_OPERAND (*expr_p, 0);
1637
1638 /* If a NOP conversion is changing the type of a COMPONENT_REF
1639 expression, then canonicalize its type now in order to expose more
1640 redundant conversions. */
1641 if (TREE_CODE (sub) == COMPONENT_REF)
1642 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1643
1644 /* If a NOP conversion is changing a pointer to array of foo
1645 to a pointer to foo, embed that change in the ADDR_EXPR. */
1646 else if (TREE_CODE (sub) == ADDR_EXPR)
1647 canonicalize_addr_expr (expr_p);
1648 }
1649
1650 return GS_OK;
1651 }
1652
1653 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1654 DECL_VALUE_EXPR, and it's worth re-examining things. */
1655
1656 static enum gimplify_status
1657 gimplify_var_or_parm_decl (tree *expr_p)
1658 {
1659 tree decl = *expr_p;
1660
1661 /* ??? If this is a local variable, and it has not been seen in any
1662 outer BIND_EXPR, then it's probably the result of a duplicate
1663 declaration, for which we've already issued an error. It would
1664 be really nice if the front end wouldn't leak these at all.
1665 Currently the only known culprit is C++ destructors, as seen
1666 in g++.old-deja/g++.jason/binding.C. */
1667 if (TREE_CODE (decl) == VAR_DECL
1668 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1669 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1670 && decl_function_context (decl) == current_function_decl)
1671 {
1672 gcc_assert (errorcount || sorrycount);
1673 return GS_ERROR;
1674 }
1675
1676 /* When within an OpenMP context, notice uses of variables. */
1677 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1678 return GS_ALL_DONE;
1679
1680 /* If the decl is an alias for another expression, substitute it now. */
1681 if (DECL_HAS_VALUE_EXPR_P (decl))
1682 {
1683 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1684 return GS_OK;
1685 }
1686
1687 return GS_ALL_DONE;
1688 }
1689
1690
1691 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1692 node pointed to by EXPR_P.
1693
1694 compound_lval
1695 : min_lval '[' val ']'
1696 | min_lval '.' ID
1697 | compound_lval '[' val ']'
1698 | compound_lval '.' ID
1699
1700 This is not part of the original SIMPLE definition, which separates
1701 array and member references, but it seems reasonable to handle them
1702 together. Also, this way we don't run into problems with union
1703 aliasing; gcc requires that for accesses through a union to alias, the
1704 union reference must be explicit, which was not always the case when we
1705 were splitting up array and member refs.
1706
1707 PRE_P points to the list where side effects that must happen before
1708 *EXPR_P should be stored.
1709
1710 POST_P points to the list where side effects that must happen after
1711 *EXPR_P should be stored. */
1712
1713 static enum gimplify_status
1714 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1715 tree *post_p, fallback_t fallback)
1716 {
1717 tree *p;
1718 VEC(tree,heap) *stack;
1719 enum gimplify_status ret = GS_OK, tret;
1720 int i;
1721
1722 /* Create a stack of the subexpressions so later we can walk them in
1723 order from inner to outer. */
1724 stack = VEC_alloc (tree, heap, 10);
1725
1726 /* We can handle anything that get_inner_reference can deal with. */
1727 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1728 {
1729 restart:
1730 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1731 if (TREE_CODE (*p) == INDIRECT_REF)
1732 *p = fold_indirect_ref (*p);
1733
1734 if (handled_component_p (*p))
1735 ;
1736 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1737 additional COMPONENT_REFs. */
1738 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1739 && gimplify_var_or_parm_decl (p) == GS_OK)
1740 goto restart;
1741 else
1742 break;
1743
1744 VEC_safe_push (tree, heap, stack, *p);
1745 }
1746
1747 gcc_assert (VEC_length (tree, stack));
1748
1749 /* Now STACK is a stack of pointers to all the refs we've walked through
1750 and P points to the innermost expression.
1751
1752 Java requires that we elaborated nodes in source order. That
1753 means we must gimplify the inner expression followed by each of
1754 the indices, in order. But we can't gimplify the inner
1755 expression until we deal with any variable bounds, sizes, or
1756 positions in order to deal with PLACEHOLDER_EXPRs.
1757
1758 So we do this in three steps. First we deal with the annotations
1759 for any variables in the components, then we gimplify the base,
1760 then we gimplify any indices, from left to right. */
1761 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1762 {
1763 tree t = VEC_index (tree, stack, i);
1764
1765 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1766 {
1767 /* Gimplify the low bound and element type size and put them into
1768 the ARRAY_REF. If these values are set, they have already been
1769 gimplified. */
1770 if (!TREE_OPERAND (t, 2))
1771 {
1772 tree low = unshare_expr (array_ref_low_bound (t));
1773 if (!is_gimple_min_invariant (low))
1774 {
1775 TREE_OPERAND (t, 2) = low;
1776 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1777 is_gimple_formal_tmp_reg, fb_rvalue);
1778 ret = MIN (ret, tret);
1779 }
1780 }
1781
1782 if (!TREE_OPERAND (t, 3))
1783 {
1784 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1785 tree elmt_size = unshare_expr (array_ref_element_size (t));
1786 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1787
1788 /* Divide the element size by the alignment of the element
1789 type (above). */
1790 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1791
1792 if (!is_gimple_min_invariant (elmt_size))
1793 {
1794 TREE_OPERAND (t, 3) = elmt_size;
1795 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1796 is_gimple_formal_tmp_reg, fb_rvalue);
1797 ret = MIN (ret, tret);
1798 }
1799 }
1800 }
1801 else if (TREE_CODE (t) == COMPONENT_REF)
1802 {
1803 /* Set the field offset into T and gimplify it. */
1804 if (!TREE_OPERAND (t, 2))
1805 {
1806 tree offset = unshare_expr (component_ref_field_offset (t));
1807 tree field = TREE_OPERAND (t, 1);
1808 tree factor
1809 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1810
1811 /* Divide the offset by its alignment. */
1812 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1813
1814 if (!is_gimple_min_invariant (offset))
1815 {
1816 TREE_OPERAND (t, 2) = offset;
1817 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1818 is_gimple_formal_tmp_reg, fb_rvalue);
1819 ret = MIN (ret, tret);
1820 }
1821 }
1822 }
1823 }
1824
1825 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1826 so as to match the min_lval predicate. Failure to do so may result
1827 in the creation of large aggregate temporaries. */
1828 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1829 fallback | fb_lvalue);
1830 ret = MIN (ret, tret);
1831
1832 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1833 loop we also remove any useless conversions. */
1834 for (; VEC_length (tree, stack) > 0; )
1835 {
1836 tree t = VEC_pop (tree, stack);
1837
1838 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1839 {
1840 /* Gimplify the dimension.
1841 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1842 Gimplify non-constant array indices into a temporary
1843 variable.
1844 FIXME - The real fix is to gimplify post-modify
1845 expressions into a minimal gimple lvalue. However, that
1846 exposes bugs in alias analysis. The alias analyzer does
1847 not handle &PTR->FIELD very well. Will fix after the
1848 branch is merged into mainline (dnovillo 2004-05-03). */
1849 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1850 {
1851 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1852 is_gimple_formal_tmp_reg, fb_rvalue);
1853 ret = MIN (ret, tret);
1854 }
1855 }
1856 else if (TREE_CODE (t) == BIT_FIELD_REF)
1857 {
1858 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1859 is_gimple_val, fb_rvalue);
1860 ret = MIN (ret, tret);
1861 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1862 is_gimple_val, fb_rvalue);
1863 ret = MIN (ret, tret);
1864 }
1865
1866 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1867
1868 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1869 set which would have caused all the outer expressions in EXPR_P
1870 leading to P to also have had TREE_SIDE_EFFECTS set. */
1871 recalculate_side_effects (t);
1872 }
1873
1874 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1875 ret = MIN (ret, tret);
1876
1877 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1878 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1879 {
1880 canonicalize_component_ref (expr_p);
1881 ret = MIN (ret, GS_OK);
1882 }
1883
1884 VEC_free (tree, heap, stack);
1885
1886 return ret;
1887 }
1888
1889 /* Gimplify the self modifying expression pointed to by EXPR_P
1890 (++, --, +=, -=).
1891
1892 PRE_P points to the list where side effects that must happen before
1893 *EXPR_P should be stored.
1894
1895 POST_P points to the list where side effects that must happen after
1896 *EXPR_P should be stored.
1897
1898 WANT_VALUE is nonzero iff we want to use the value of this expression
1899 in another expression. */
1900
1901 static enum gimplify_status
1902 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1903 bool want_value)
1904 {
1905 enum tree_code code;
1906 tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
1907 bool postfix;
1908 enum tree_code arith_code;
1909 enum gimplify_status ret;
1910
1911 code = TREE_CODE (*expr_p);
1912
1913 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1914 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1915
1916 /* Prefix or postfix? */
1917 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1918 /* Faster to treat as prefix if result is not used. */
1919 postfix = want_value;
1920 else
1921 postfix = false;
1922
1923 /* For postfix, make sure the inner expression's post side effects
1924 are executed after side effects from this expression. */
1925 if (postfix)
1926 post_p = &post;
1927
1928 /* Add or subtract? */
1929 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1930 arith_code = PLUS_EXPR;
1931 else
1932 arith_code = MINUS_EXPR;
1933
1934 /* Gimplify the LHS into a GIMPLE lvalue. */
1935 lvalue = TREE_OPERAND (*expr_p, 0);
1936 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1937 if (ret == GS_ERROR)
1938 return ret;
1939
1940 /* Extract the operands to the arithmetic operation. */
1941 lhs = lvalue;
1942 rhs = TREE_OPERAND (*expr_p, 1);
1943
1944 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1945 that as the result value and in the postqueue operation. */
1946 if (postfix)
1947 {
1948 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1949 if (ret == GS_ERROR)
1950 return ret;
1951 }
1952
1953 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1954 t1 = build_gimple_modify_stmt (lvalue, t1);
1955
1956 if (postfix)
1957 {
1958 gimplify_and_add (t1, orig_post_p);
1959 append_to_statement_list (post, orig_post_p);
1960 *expr_p = lhs;
1961 return GS_ALL_DONE;
1962 }
1963 else
1964 {
1965 *expr_p = t1;
1966 return GS_OK;
1967 }
1968 }
1969
1970 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1971
1972 static void
1973 maybe_with_size_expr (tree *expr_p)
1974 {
1975 tree expr = *expr_p;
1976 tree type = TREE_TYPE (expr);
1977 tree size;
1978
1979 /* If we've already wrapped this or the type is error_mark_node, we can't do
1980 anything. */
1981 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1982 || type == error_mark_node)
1983 return;
1984
1985 /* If the size isn't known or is a constant, we have nothing to do. */
1986 size = TYPE_SIZE_UNIT (type);
1987 if (!size || TREE_CODE (size) == INTEGER_CST)
1988 return;
1989
1990 /* Otherwise, make a WITH_SIZE_EXPR. */
1991 size = unshare_expr (size);
1992 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1993 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1994 }
1995
1996 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1997
1998 static enum gimplify_status
1999 gimplify_arg (tree *expr_p, tree *pre_p)
2000 {
2001 bool (*test) (tree);
2002 fallback_t fb;
2003
2004 /* In general, we allow lvalues for function arguments to avoid
2005 extra overhead of copying large aggregates out of even larger
2006 aggregates into temporaries only to copy the temporaries to
2007 the argument list. Make optimizers happy by pulling out to
2008 temporaries those types that fit in registers. */
2009 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
2010 test = is_gimple_val, fb = fb_rvalue;
2011 else
2012 test = is_gimple_lvalue, fb = fb_either;
2013
2014 /* If this is a variable sized type, we must remember the size. */
2015 maybe_with_size_expr (expr_p);
2016
2017 /* There is a sequence point before a function call. Side effects in
2018 the argument list must occur before the actual call. So, when
2019 gimplifying arguments, force gimplify_expr to use an internal
2020 post queue which is then appended to the end of PRE_P. */
2021 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2022 }
2023
2024 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2025 list where side effects that must happen before *EXPR_P should be stored.
2026 WANT_VALUE is true if the result of the call is desired. */
2027
2028 static enum gimplify_status
2029 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2030 {
2031 tree decl;
2032 enum gimplify_status ret;
2033 int i, nargs;
2034
2035 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2036
2037 /* For reliable diagnostics during inlining, it is necessary that
2038 every call_expr be annotated with file and line. */
2039 if (! EXPR_HAS_LOCATION (*expr_p))
2040 SET_EXPR_LOCATION (*expr_p, input_location);
2041
2042 /* This may be a call to a builtin function.
2043
2044 Builtin function calls may be transformed into different
2045 (and more efficient) builtin function calls under certain
2046 circumstances. Unfortunately, gimplification can muck things
2047 up enough that the builtin expanders are not aware that certain
2048 transformations are still valid.
2049
2050 So we attempt transformation/gimplification of the call before
2051 we gimplify the CALL_EXPR. At this time we do not manage to
2052 transform all calls in the same manner as the expanders do, but
2053 we do transform most of them. */
2054 decl = get_callee_fndecl (*expr_p);
2055 if (decl && DECL_BUILT_IN (decl))
2056 {
2057 tree new = fold_call_expr (*expr_p, !want_value);
2058
2059 if (new && new != *expr_p)
2060 {
2061 /* There was a transformation of this call which computes the
2062 same value, but in a more efficient way. Return and try
2063 again. */
2064 *expr_p = new;
2065 return GS_OK;
2066 }
2067
2068 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2069 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2070 {
2071 if (call_expr_nargs (*expr_p) < 2)
2072 {
2073 error ("too few arguments to function %<va_start%>");
2074 *expr_p = build_empty_stmt ();
2075 return GS_OK;
2076 }
2077
2078 if (fold_builtin_next_arg (*expr_p, true))
2079 {
2080 *expr_p = build_empty_stmt ();
2081 return GS_OK;
2082 }
2083 /* Avoid gimplifying the second argument to va_start, which needs
2084 to be the plain PARM_DECL. */
2085 return gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p);
2086 }
2087 }
2088
2089 /* There is a sequence point before the call, so any side effects in
2090 the calling expression must occur before the actual call. Force
2091 gimplify_expr to use an internal post queue. */
2092 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2093 is_gimple_call_addr, fb_rvalue);
2094
2095 nargs = call_expr_nargs (*expr_p);
2096
2097 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2098 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2099 PUSH_ARGS_REVERSED ? i-- : i++)
2100 {
2101 enum gimplify_status t;
2102
2103 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p);
2104
2105 if (t == GS_ERROR)
2106 ret = GS_ERROR;
2107 }
2108
2109 /* Try this again in case gimplification exposed something. */
2110 if (ret != GS_ERROR)
2111 {
2112 tree new = fold_call_expr (*expr_p, !want_value);
2113
2114 if (new && new != *expr_p)
2115 {
2116 /* There was a transformation of this call which computes the
2117 same value, but in a more efficient way. Return and try
2118 again. */
2119 *expr_p = new;
2120 return GS_OK;
2121 }
2122 }
2123
2124 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2125 decl. This allows us to eliminate redundant or useless
2126 calls to "const" functions. */
2127 if (TREE_CODE (*expr_p) == CALL_EXPR
2128 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2129 TREE_SIDE_EFFECTS (*expr_p) = 0;
2130
2131 return ret;
2132 }
2133
2134 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2135 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2136
2137 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2138 condition is true or false, respectively. If null, we should generate
2139 our own to skip over the evaluation of this specific expression.
2140
2141 This function is the tree equivalent of do_jump.
2142
2143 shortcut_cond_r should only be called by shortcut_cond_expr. */
2144
2145 static tree
2146 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2147 {
2148 tree local_label = NULL_TREE;
2149 tree t, expr = NULL;
2150
2151 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2152 retain the shortcut semantics. Just insert the gotos here;
2153 shortcut_cond_expr will append the real blocks later. */
2154 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2155 {
2156 /* Turn if (a && b) into
2157
2158 if (a); else goto no;
2159 if (b) goto yes; else goto no;
2160 (no:) */
2161
2162 if (false_label_p == NULL)
2163 false_label_p = &local_label;
2164
2165 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2166 append_to_statement_list (t, &expr);
2167
2168 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2169 false_label_p);
2170 append_to_statement_list (t, &expr);
2171 }
2172 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2173 {
2174 /* Turn if (a || b) into
2175
2176 if (a) goto yes;
2177 if (b) goto yes; else goto no;
2178 (yes:) */
2179
2180 if (true_label_p == NULL)
2181 true_label_p = &local_label;
2182
2183 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2184 append_to_statement_list (t, &expr);
2185
2186 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2187 false_label_p);
2188 append_to_statement_list (t, &expr);
2189 }
2190 else if (TREE_CODE (pred) == COND_EXPR)
2191 {
2192 /* As long as we're messing with gotos, turn if (a ? b : c) into
2193 if (a)
2194 if (b) goto yes; else goto no;
2195 else
2196 if (c) goto yes; else goto no; */
2197 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2198 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2199 false_label_p),
2200 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2201 false_label_p));
2202 }
2203 else
2204 {
2205 expr = build3 (COND_EXPR, void_type_node, pred,
2206 build_and_jump (true_label_p),
2207 build_and_jump (false_label_p));
2208 }
2209
2210 if (local_label)
2211 {
2212 t = build1 (LABEL_EXPR, void_type_node, local_label);
2213 append_to_statement_list (t, &expr);
2214 }
2215
2216 return expr;
2217 }
2218
2219 static tree
2220 shortcut_cond_expr (tree expr)
2221 {
2222 tree pred = TREE_OPERAND (expr, 0);
2223 tree then_ = TREE_OPERAND (expr, 1);
2224 tree else_ = TREE_OPERAND (expr, 2);
2225 tree true_label, false_label, end_label, t;
2226 tree *true_label_p;
2227 tree *false_label_p;
2228 bool emit_end, emit_false, jump_over_else;
2229 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2230 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2231
2232 /* First do simple transformations. */
2233 if (!else_se)
2234 {
2235 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2236 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2237 {
2238 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2239 then_ = shortcut_cond_expr (expr);
2240 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2241 pred = TREE_OPERAND (pred, 0);
2242 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2243 }
2244 }
2245 if (!then_se)
2246 {
2247 /* If there is no 'then', turn
2248 if (a || b); else d
2249 into
2250 if (a); else if (b); else d. */
2251 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2252 {
2253 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2254 else_ = shortcut_cond_expr (expr);
2255 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2256 pred = TREE_OPERAND (pred, 0);
2257 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2258 }
2259 }
2260
2261 /* If we're done, great. */
2262 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2263 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2264 return expr;
2265
2266 /* Otherwise we need to mess with gotos. Change
2267 if (a) c; else d;
2268 to
2269 if (a); else goto no;
2270 c; goto end;
2271 no: d; end:
2272 and recursively gimplify the condition. */
2273
2274 true_label = false_label = end_label = NULL_TREE;
2275
2276 /* If our arms just jump somewhere, hijack those labels so we don't
2277 generate jumps to jumps. */
2278
2279 if (then_
2280 && TREE_CODE (then_) == GOTO_EXPR
2281 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2282 {
2283 true_label = GOTO_DESTINATION (then_);
2284 then_ = NULL;
2285 then_se = false;
2286 }
2287
2288 if (else_
2289 && TREE_CODE (else_) == GOTO_EXPR
2290 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2291 {
2292 false_label = GOTO_DESTINATION (else_);
2293 else_ = NULL;
2294 else_se = false;
2295 }
2296
2297 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2298 if (true_label)
2299 true_label_p = &true_label;
2300 else
2301 true_label_p = NULL;
2302
2303 /* The 'else' branch also needs a label if it contains interesting code. */
2304 if (false_label || else_se)
2305 false_label_p = &false_label;
2306 else
2307 false_label_p = NULL;
2308
2309 /* If there was nothing else in our arms, just forward the label(s). */
2310 if (!then_se && !else_se)
2311 return shortcut_cond_r (pred, true_label_p, false_label_p);
2312
2313 /* If our last subexpression already has a terminal label, reuse it. */
2314 if (else_se)
2315 expr = expr_last (else_);
2316 else if (then_se)
2317 expr = expr_last (then_);
2318 else
2319 expr = NULL;
2320 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2321 end_label = LABEL_EXPR_LABEL (expr);
2322
2323 /* If we don't care about jumping to the 'else' branch, jump to the end
2324 if the condition is false. */
2325 if (!false_label_p)
2326 false_label_p = &end_label;
2327
2328 /* We only want to emit these labels if we aren't hijacking them. */
2329 emit_end = (end_label == NULL_TREE);
2330 emit_false = (false_label == NULL_TREE);
2331
2332 /* We only emit the jump over the else clause if we have to--if the
2333 then clause may fall through. Otherwise we can wind up with a
2334 useless jump and a useless label at the end of gimplified code,
2335 which will cause us to think that this conditional as a whole
2336 falls through even if it doesn't. If we then inline a function
2337 which ends with such a condition, that can cause us to issue an
2338 inappropriate warning about control reaching the end of a
2339 non-void function. */
2340 jump_over_else = block_may_fallthru (then_);
2341
2342 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2343
2344 expr = NULL;
2345 append_to_statement_list (pred, &expr);
2346
2347 append_to_statement_list (then_, &expr);
2348 if (else_se)
2349 {
2350 if (jump_over_else)
2351 {
2352 t = build_and_jump (&end_label);
2353 append_to_statement_list (t, &expr);
2354 }
2355 if (emit_false)
2356 {
2357 t = build1 (LABEL_EXPR, void_type_node, false_label);
2358 append_to_statement_list (t, &expr);
2359 }
2360 append_to_statement_list (else_, &expr);
2361 }
2362 if (emit_end && end_label)
2363 {
2364 t = build1 (LABEL_EXPR, void_type_node, end_label);
2365 append_to_statement_list (t, &expr);
2366 }
2367
2368 return expr;
2369 }
2370
2371 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2372
2373 tree
2374 gimple_boolify (tree expr)
2375 {
2376 tree type = TREE_TYPE (expr);
2377
2378 if (TREE_CODE (type) == BOOLEAN_TYPE)
2379 return expr;
2380
2381 switch (TREE_CODE (expr))
2382 {
2383 case TRUTH_AND_EXPR:
2384 case TRUTH_OR_EXPR:
2385 case TRUTH_XOR_EXPR:
2386 case TRUTH_ANDIF_EXPR:
2387 case TRUTH_ORIF_EXPR:
2388 /* Also boolify the arguments of truth exprs. */
2389 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2390 /* FALLTHRU */
2391
2392 case TRUTH_NOT_EXPR:
2393 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2394 /* FALLTHRU */
2395
2396 case EQ_EXPR: case NE_EXPR:
2397 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2398 /* These expressions always produce boolean results. */
2399 TREE_TYPE (expr) = boolean_type_node;
2400 return expr;
2401
2402 default:
2403 /* Other expressions that get here must have boolean values, but
2404 might need to be converted to the appropriate mode. */
2405 return fold_convert (boolean_type_node, expr);
2406 }
2407 }
2408
2409 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2410 into
2411
2412 if (p) if (p)
2413 t1 = a; a;
2414 else or else
2415 t1 = b; b;
2416 t1;
2417
2418 The second form is used when *EXPR_P is of type void.
2419
2420 TARGET is the tree for T1 above.
2421
2422 PRE_P points to the list where side effects that must happen before
2423 *EXPR_P should be stored. */
2424
2425 static enum gimplify_status
2426 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2427 {
2428 tree expr = *expr_p;
2429 tree tmp, tmp2, type;
2430 enum gimplify_status ret;
2431
2432 type = TREE_TYPE (expr);
2433
2434 /* If this COND_EXPR has a value, copy the values into a temporary within
2435 the arms. */
2436 if (! VOID_TYPE_P (type))
2437 {
2438 tree result;
2439
2440 if ((fallback & fb_lvalue) == 0)
2441 {
2442 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2443 ret = GS_ALL_DONE;
2444 }
2445 else
2446 {
2447 tree type = build_pointer_type (TREE_TYPE (expr));
2448
2449 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2450 TREE_OPERAND (expr, 1) =
2451 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2452
2453 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2454 TREE_OPERAND (expr, 2) =
2455 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2456
2457 tmp2 = tmp = create_tmp_var (type, "iftmp");
2458
2459 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2460 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2461
2462 result = build_fold_indirect_ref (tmp);
2463 ret = GS_ALL_DONE;
2464 }
2465
2466 /* Build the then clause, 't1 = a;'. But don't build an assignment
2467 if this branch is void; in C++ it can be, if it's a throw. */
2468 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2469 TREE_OPERAND (expr, 1)
2470 = build_gimple_modify_stmt (tmp, TREE_OPERAND (expr, 1));
2471
2472 /* Build the else clause, 't1 = b;'. */
2473 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2474 TREE_OPERAND (expr, 2)
2475 = build_gimple_modify_stmt (tmp2, TREE_OPERAND (expr, 2));
2476
2477 TREE_TYPE (expr) = void_type_node;
2478 recalculate_side_effects (expr);
2479
2480 /* Move the COND_EXPR to the prequeue. */
2481 gimplify_and_add (expr, pre_p);
2482
2483 *expr_p = result;
2484 return ret;
2485 }
2486
2487 /* Make sure the condition has BOOLEAN_TYPE. */
2488 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2489
2490 /* Break apart && and || conditions. */
2491 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2492 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2493 {
2494 expr = shortcut_cond_expr (expr);
2495
2496 if (expr != *expr_p)
2497 {
2498 *expr_p = expr;
2499
2500 /* We can't rely on gimplify_expr to re-gimplify the expanded
2501 form properly, as cleanups might cause the target labels to be
2502 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2503 set up a conditional context. */
2504 gimple_push_condition ();
2505 gimplify_stmt (expr_p);
2506 gimple_pop_condition (pre_p);
2507
2508 return GS_ALL_DONE;
2509 }
2510 }
2511
2512 /* Now do the normal gimplification. */
2513 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2514 is_gimple_condexpr, fb_rvalue);
2515
2516 gimple_push_condition ();
2517
2518 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2519 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2520 recalculate_side_effects (expr);
2521
2522 gimple_pop_condition (pre_p);
2523
2524 if (ret == GS_ERROR)
2525 ;
2526 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2527 ret = GS_ALL_DONE;
2528 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2529 /* Rewrite "if (a); else b" to "if (!a) b" */
2530 {
2531 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2532 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2533 is_gimple_condexpr, fb_rvalue);
2534
2535 tmp = TREE_OPERAND (expr, 1);
2536 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2537 TREE_OPERAND (expr, 2) = tmp;
2538 }
2539 else
2540 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2541 expr = TREE_OPERAND (expr, 0);
2542
2543 *expr_p = expr;
2544 return ret;
2545 }
2546
2547 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2548 a call to __builtin_memcpy. */
2549
2550 static enum gimplify_status
2551 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2552 {
2553 tree t, to, to_ptr, from, from_ptr;
2554
2555 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2556 from = GENERIC_TREE_OPERAND (*expr_p, 1);
2557
2558 from_ptr = build_fold_addr_expr (from);
2559
2560 to_ptr = build_fold_addr_expr (to);
2561 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2562 t = build_call_expr (t, 3, to_ptr, from_ptr, size);
2563
2564 if (want_value)
2565 {
2566 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2567 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2568 }
2569
2570 *expr_p = t;
2571 return GS_OK;
2572 }
2573
2574 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2575 a call to __builtin_memset. In this case we know that the RHS is
2576 a CONSTRUCTOR with an empty element list. */
2577
2578 static enum gimplify_status
2579 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2580 {
2581 tree t, to, to_ptr;
2582
2583 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2584
2585 to_ptr = build_fold_addr_expr (to);
2586 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2587 t = build_call_expr (t, 3, to_ptr, integer_zero_node, size);
2588
2589 if (want_value)
2590 {
2591 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2592 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2593 }
2594
2595 *expr_p = t;
2596 return GS_OK;
2597 }
2598
2599 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2600 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2601 assignment. Returns non-null if we detect a potential overlap. */
2602
2603 struct gimplify_init_ctor_preeval_data
2604 {
2605 /* The base decl of the lhs object. May be NULL, in which case we
2606 have to assume the lhs is indirect. */
2607 tree lhs_base_decl;
2608
2609 /* The alias set of the lhs object. */
2610 int lhs_alias_set;
2611 };
2612
2613 static tree
2614 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2615 {
2616 struct gimplify_init_ctor_preeval_data *data
2617 = (struct gimplify_init_ctor_preeval_data *) xdata;
2618 tree t = *tp;
2619
2620 /* If we find the base object, obviously we have overlap. */
2621 if (data->lhs_base_decl == t)
2622 return t;
2623
2624 /* If the constructor component is indirect, determine if we have a
2625 potential overlap with the lhs. The only bits of information we
2626 have to go on at this point are addressability and alias sets. */
2627 if (TREE_CODE (t) == INDIRECT_REF
2628 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2629 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2630 return t;
2631
2632 /* If the constructor component is a call, determine if it can hide a
2633 potential overlap with the lhs through an INDIRECT_REF like above. */
2634 if (TREE_CODE (t) == CALL_EXPR)
2635 {
2636 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
2637
2638 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
2639 if (POINTER_TYPE_P (TREE_VALUE (type))
2640 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2641 && alias_sets_conflict_p (data->lhs_alias_set,
2642 get_alias_set
2643 (TREE_TYPE (TREE_VALUE (type)))))
2644 return t;
2645 }
2646
2647 if (IS_TYPE_OR_DECL_P (t))
2648 *walk_subtrees = 0;
2649 return NULL;
2650 }
2651
2652 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2653 force values that overlap with the lhs (as described by *DATA)
2654 into temporaries. */
2655
2656 static void
2657 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2658 struct gimplify_init_ctor_preeval_data *data)
2659 {
2660 enum gimplify_status one;
2661
2662 /* If the value is invariant, then there's nothing to pre-evaluate.
2663 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2664 invariant but has side effects and might contain a reference to
2665 the object we're initializing. */
2666 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2667 return;
2668
2669 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2670 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2671 return;
2672
2673 /* Recurse for nested constructors. */
2674 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2675 {
2676 unsigned HOST_WIDE_INT ix;
2677 constructor_elt *ce;
2678 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2679
2680 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2681 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2682 return;
2683 }
2684
2685 /* If this is a variable sized type, we must remember the size. */
2686 maybe_with_size_expr (expr_p);
2687
2688 /* Gimplify the constructor element to something appropriate for the rhs
2689 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2690 the gimplifier will consider this a store to memory. Doing this
2691 gimplification now means that we won't have to deal with complicated
2692 language-specific trees, nor trees like SAVE_EXPR that can induce
2693 exponential search behavior. */
2694 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2695 if (one == GS_ERROR)
2696 {
2697 *expr_p = NULL;
2698 return;
2699 }
2700
2701 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2702 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2703 always be true for all scalars, since is_gimple_mem_rhs insists on a
2704 temporary variable for them. */
2705 if (DECL_P (*expr_p))
2706 return;
2707
2708 /* If this is of variable size, we have no choice but to assume it doesn't
2709 overlap since we can't make a temporary for it. */
2710 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
2711 return;
2712
2713 /* Otherwise, we must search for overlap ... */
2714 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2715 return;
2716
2717 /* ... and if found, force the value into a temporary. */
2718 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2719 }
2720
2721 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2722 a RANGE_EXPR in a CONSTRUCTOR for an array.
2723
2724 var = lower;
2725 loop_entry:
2726 object[var] = value;
2727 if (var == upper)
2728 goto loop_exit;
2729 var = var + 1;
2730 goto loop_entry;
2731 loop_exit:
2732
2733 We increment var _after_ the loop exit check because we might otherwise
2734 fail if upper == TYPE_MAX_VALUE (type for upper).
2735
2736 Note that we never have to deal with SAVE_EXPRs here, because this has
2737 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2738
2739 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2740 tree *, bool);
2741
2742 static void
2743 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2744 tree value, tree array_elt_type,
2745 tree *pre_p, bool cleared)
2746 {
2747 tree loop_entry_label, loop_exit_label;
2748 tree var, var_type, cref, tmp;
2749
2750 loop_entry_label = create_artificial_label ();
2751 loop_exit_label = create_artificial_label ();
2752
2753 /* Create and initialize the index variable. */
2754 var_type = TREE_TYPE (upper);
2755 var = create_tmp_var (var_type, NULL);
2756 append_to_statement_list (build_gimple_modify_stmt (var, lower), pre_p);
2757
2758 /* Add the loop entry label. */
2759 append_to_statement_list (build1 (LABEL_EXPR,
2760 void_type_node,
2761 loop_entry_label),
2762 pre_p);
2763
2764 /* Build the reference. */
2765 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2766 var, NULL_TREE, NULL_TREE);
2767
2768 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2769 the store. Otherwise just assign value to the reference. */
2770
2771 if (TREE_CODE (value) == CONSTRUCTOR)
2772 /* NB we might have to call ourself recursively through
2773 gimplify_init_ctor_eval if the value is a constructor. */
2774 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2775 pre_p, cleared);
2776 else
2777 append_to_statement_list (build_gimple_modify_stmt (cref, value), pre_p);
2778
2779 /* We exit the loop when the index var is equal to the upper bound. */
2780 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2781 build2 (EQ_EXPR, boolean_type_node,
2782 var, upper),
2783 build1 (GOTO_EXPR,
2784 void_type_node,
2785 loop_exit_label),
2786 NULL_TREE),
2787 pre_p);
2788
2789 /* Otherwise, increment the index var... */
2790 tmp = build2 (PLUS_EXPR, var_type, var,
2791 fold_convert (var_type, integer_one_node));
2792 append_to_statement_list (build_gimple_modify_stmt (var, tmp), pre_p);
2793
2794 /* ...and jump back to the loop entry. */
2795 append_to_statement_list (build1 (GOTO_EXPR,
2796 void_type_node,
2797 loop_entry_label),
2798 pre_p);
2799
2800 /* Add the loop exit label. */
2801 append_to_statement_list (build1 (LABEL_EXPR,
2802 void_type_node,
2803 loop_exit_label),
2804 pre_p);
2805 }
2806
2807 /* Return true if FDECL is accessing a field that is zero sized. */
2808
2809 static bool
2810 zero_sized_field_decl (tree fdecl)
2811 {
2812 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2813 && integer_zerop (DECL_SIZE (fdecl)))
2814 return true;
2815 return false;
2816 }
2817
2818 /* Return true if TYPE is zero sized. */
2819
2820 static bool
2821 zero_sized_type (tree type)
2822 {
2823 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2824 && integer_zerop (TYPE_SIZE (type)))
2825 return true;
2826 return false;
2827 }
2828
2829 /* A subroutine of gimplify_init_constructor. Generate individual
2830 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2831 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2832 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2833 zeroed first. */
2834
2835 static void
2836 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2837 tree *pre_p, bool cleared)
2838 {
2839 tree array_elt_type = NULL;
2840 unsigned HOST_WIDE_INT ix;
2841 tree purpose, value;
2842
2843 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2844 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2845
2846 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2847 {
2848 tree cref, init;
2849
2850 /* NULL values are created above for gimplification errors. */
2851 if (value == NULL)
2852 continue;
2853
2854 if (cleared && initializer_zerop (value))
2855 continue;
2856
2857 /* ??? Here's to hoping the front end fills in all of the indices,
2858 so we don't have to figure out what's missing ourselves. */
2859 gcc_assert (purpose);
2860
2861 /* Skip zero-sized fields, unless value has side-effects. This can
2862 happen with calls to functions returning a zero-sized type, which
2863 we shouldn't discard. As a number of downstream passes don't
2864 expect sets of zero-sized fields, we rely on the gimplification of
2865 the MODIFY_EXPR we make below to drop the assignment statement. */
2866 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2867 continue;
2868
2869 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2870 whole range. */
2871 if (TREE_CODE (purpose) == RANGE_EXPR)
2872 {
2873 tree lower = TREE_OPERAND (purpose, 0);
2874 tree upper = TREE_OPERAND (purpose, 1);
2875
2876 /* If the lower bound is equal to upper, just treat it as if
2877 upper was the index. */
2878 if (simple_cst_equal (lower, upper))
2879 purpose = upper;
2880 else
2881 {
2882 gimplify_init_ctor_eval_range (object, lower, upper, value,
2883 array_elt_type, pre_p, cleared);
2884 continue;
2885 }
2886 }
2887
2888 if (array_elt_type)
2889 {
2890 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2891 purpose, NULL_TREE, NULL_TREE);
2892 }
2893 else
2894 {
2895 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2896 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2897 unshare_expr (object), purpose, NULL_TREE);
2898 }
2899
2900 if (TREE_CODE (value) == CONSTRUCTOR
2901 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2902 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2903 pre_p, cleared);
2904 else
2905 {
2906 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2907 gimplify_and_add (init, pre_p);
2908 }
2909 }
2910 }
2911
2912 /* A subroutine of gimplify_modify_expr. Break out elements of a
2913 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2914
2915 Note that we still need to clear any elements that don't have explicit
2916 initializers, so if not all elements are initialized we keep the
2917 original MODIFY_EXPR, we just remove all of the constructor elements. */
2918
2919 static enum gimplify_status
2920 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2921 tree *post_p, bool want_value)
2922 {
2923 tree object;
2924 tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
2925 tree type = TREE_TYPE (ctor);
2926 enum gimplify_status ret;
2927 VEC(constructor_elt,gc) *elts;
2928
2929 if (TREE_CODE (ctor) != CONSTRUCTOR)
2930 return GS_UNHANDLED;
2931
2932 ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2933 is_gimple_lvalue, fb_lvalue);
2934 if (ret == GS_ERROR)
2935 return ret;
2936 object = GENERIC_TREE_OPERAND (*expr_p, 0);
2937
2938 elts = CONSTRUCTOR_ELTS (ctor);
2939
2940 ret = GS_ALL_DONE;
2941 switch (TREE_CODE (type))
2942 {
2943 case RECORD_TYPE:
2944 case UNION_TYPE:
2945 case QUAL_UNION_TYPE:
2946 case ARRAY_TYPE:
2947 {
2948 struct gimplify_init_ctor_preeval_data preeval_data;
2949 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2950 HOST_WIDE_INT num_nonzero_elements;
2951 bool cleared, valid_const_initializer;
2952
2953 /* Aggregate types must lower constructors to initialization of
2954 individual elements. The exception is that a CONSTRUCTOR node
2955 with no elements indicates zero-initialization of the whole. */
2956 if (VEC_empty (constructor_elt, elts))
2957 break;
2958
2959 /* Fetch information about the constructor to direct later processing.
2960 We might want to make static versions of it in various cases, and
2961 can only do so if it known to be a valid constant initializer. */
2962 valid_const_initializer
2963 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2964 &num_ctor_elements, &cleared);
2965
2966 /* If a const aggregate variable is being initialized, then it
2967 should never be a lose to promote the variable to be static. */
2968 if (valid_const_initializer
2969 && num_nonzero_elements > 1
2970 && TREE_READONLY (object)
2971 && TREE_CODE (object) == VAR_DECL)
2972 {
2973 DECL_INITIAL (object) = ctor;
2974 TREE_STATIC (object) = 1;
2975 if (!DECL_NAME (object))
2976 DECL_NAME (object) = create_tmp_var_name ("C");
2977 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2978
2979 /* ??? C++ doesn't automatically append a .<number> to the
2980 assembler name, and even when it does, it looks a FE private
2981 data structures to figure out what that number should be,
2982 which are not set for this variable. I suppose this is
2983 important for local statics for inline functions, which aren't
2984 "local" in the object file sense. So in order to get a unique
2985 TU-local symbol, we must invoke the lhd version now. */
2986 lhd_set_decl_assembler_name (object);
2987
2988 *expr_p = NULL_TREE;
2989 break;
2990 }
2991
2992 /* If there are "lots" of initialized elements, even discounting
2993 those that are not address constants (and thus *must* be
2994 computed at runtime), then partition the constructor into
2995 constant and non-constant parts. Block copy the constant
2996 parts in, then generate code for the non-constant parts. */
2997 /* TODO. There's code in cp/typeck.c to do this. */
2998
2999 num_type_elements = count_type_elements (type, true);
3000
3001 /* If count_type_elements could not determine number of type elements
3002 for a constant-sized object, assume clearing is needed.
3003 Don't do this for variable-sized objects, as store_constructor
3004 will ignore the clearing of variable-sized objects. */
3005 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3006 cleared = true;
3007 /* If there are "lots" of zeros, then block clear the object first. */
3008 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3009 && num_nonzero_elements < num_type_elements/4)
3010 cleared = true;
3011 /* ??? This bit ought not be needed. For any element not present
3012 in the initializer, we should simply set them to zero. Except
3013 we'd need to *find* the elements that are not present, and that
3014 requires trickery to avoid quadratic compile-time behavior in
3015 large cases or excessive memory use in small cases. */
3016 else if (num_ctor_elements < num_type_elements)
3017 cleared = true;
3018
3019 /* If there are "lots" of initialized elements, and all of them
3020 are valid address constants, then the entire initializer can
3021 be dropped to memory, and then memcpy'd out. Don't do this
3022 for sparse arrays, though, as it's more efficient to follow
3023 the standard CONSTRUCTOR behavior of memset followed by
3024 individual element initialization. */
3025 if (valid_const_initializer && !cleared)
3026 {
3027 HOST_WIDE_INT size = int_size_in_bytes (type);
3028 unsigned int align;
3029
3030 /* ??? We can still get unbounded array types, at least
3031 from the C++ front end. This seems wrong, but attempt
3032 to work around it for now. */
3033 if (size < 0)
3034 {
3035 size = int_size_in_bytes (TREE_TYPE (object));
3036 if (size >= 0)
3037 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3038 }
3039
3040 /* Find the maximum alignment we can assume for the object. */
3041 /* ??? Make use of DECL_OFFSET_ALIGN. */
3042 if (DECL_P (object))
3043 align = DECL_ALIGN (object);
3044 else
3045 align = TYPE_ALIGN (type);
3046
3047 if (size > 0 && !can_move_by_pieces (size, align))
3048 {
3049 tree new = create_tmp_var_raw (type, "C");
3050
3051 gimple_add_tmp_var (new);
3052 TREE_STATIC (new) = 1;
3053 TREE_READONLY (new) = 1;
3054 DECL_INITIAL (new) = ctor;
3055 if (align > DECL_ALIGN (new))
3056 {
3057 DECL_ALIGN (new) = align;
3058 DECL_USER_ALIGN (new) = 1;
3059 }
3060 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3061
3062 GENERIC_TREE_OPERAND (*expr_p, 1) = new;
3063
3064 /* This is no longer an assignment of a CONSTRUCTOR, but
3065 we still may have processing to do on the LHS. So
3066 pretend we didn't do anything here to let that happen. */
3067 return GS_UNHANDLED;
3068 }
3069 }
3070
3071 /* If there are nonzero elements, pre-evaluate to capture elements
3072 overlapping with the lhs into temporaries. We must do this before
3073 clearing to fetch the values before they are zeroed-out. */
3074 if (num_nonzero_elements > 0)
3075 {
3076 preeval_data.lhs_base_decl = get_base_address (object);
3077 if (!DECL_P (preeval_data.lhs_base_decl))
3078 preeval_data.lhs_base_decl = NULL;
3079 preeval_data.lhs_alias_set = get_alias_set (object);
3080
3081 gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
3082 pre_p, post_p, &preeval_data);
3083 }
3084
3085 if (cleared)
3086 {
3087 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3088 Note that we still have to gimplify, in order to handle the
3089 case of variable sized types. Avoid shared tree structures. */
3090 CONSTRUCTOR_ELTS (ctor) = NULL;
3091 object = unshare_expr (object);
3092 gimplify_stmt (expr_p);
3093 append_to_statement_list (*expr_p, pre_p);
3094 }
3095
3096 /* If we have not block cleared the object, or if there are nonzero
3097 elements in the constructor, add assignments to the individual
3098 scalar fields of the object. */
3099 if (!cleared || num_nonzero_elements > 0)
3100 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3101
3102 *expr_p = NULL_TREE;
3103 }
3104 break;
3105
3106 case COMPLEX_TYPE:
3107 {
3108 tree r, i;
3109
3110 /* Extract the real and imaginary parts out of the ctor. */
3111 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3112 r = VEC_index (constructor_elt, elts, 0)->value;
3113 i = VEC_index (constructor_elt, elts, 1)->value;
3114 if (r == NULL || i == NULL)
3115 {
3116 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3117 if (r == NULL)
3118 r = zero;
3119 if (i == NULL)
3120 i = zero;
3121 }
3122
3123 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3124 represent creation of a complex value. */
3125 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3126 {
3127 ctor = build_complex (type, r, i);
3128 TREE_OPERAND (*expr_p, 1) = ctor;
3129 }
3130 else
3131 {
3132 ctor = build2 (COMPLEX_EXPR, type, r, i);
3133 TREE_OPERAND (*expr_p, 1) = ctor;
3134 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3135 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3136 fb_rvalue);
3137 }
3138 }
3139 break;
3140
3141 case VECTOR_TYPE:
3142 {
3143 unsigned HOST_WIDE_INT ix;
3144 constructor_elt *ce;
3145
3146 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3147 if (TREE_CONSTANT (ctor))
3148 {
3149 bool constant_p = true;
3150 tree value;
3151
3152 /* Even when ctor is constant, it might contain non-*_CST
3153 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3154 belong into VECTOR_CST nodes. */
3155 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3156 if (!CONSTANT_CLASS_P (value))
3157 {
3158 constant_p = false;
3159 break;
3160 }
3161
3162 if (constant_p)
3163 {
3164 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3165 break;
3166 }
3167
3168 /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
3169 make a VECTOR_CST. It won't do anything for us, and it'll
3170 prevent us from representing it as a single constant. */
3171 break;
3172 }
3173
3174 /* Vector types use CONSTRUCTOR all the way through gimple
3175 compilation as a general initializer. */
3176 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3177 {
3178 enum gimplify_status tret;
3179 tret = gimplify_expr (&ce->value, pre_p, post_p,
3180 is_gimple_val, fb_rvalue);
3181 if (tret == GS_ERROR)
3182 ret = GS_ERROR;
3183 }
3184 if (!is_gimple_reg (GENERIC_TREE_OPERAND (*expr_p, 0)))
3185 GENERIC_TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3186 }
3187 break;
3188
3189 default:
3190 /* So how did we get a CONSTRUCTOR for a scalar type? */
3191 gcc_unreachable ();
3192 }
3193
3194 if (ret == GS_ERROR)
3195 return GS_ERROR;
3196 else if (want_value)
3197 {
3198 append_to_statement_list (*expr_p, pre_p);
3199 *expr_p = object;
3200 return GS_OK;
3201 }
3202 else
3203 return GS_ALL_DONE;
3204 }
3205
3206 /* Given a pointer value OP0, return a simplified version of an
3207 indirection through OP0, or NULL_TREE if no simplification is
3208 possible. This may only be applied to a rhs of an expression.
3209 Note that the resulting type may be different from the type pointed
3210 to in the sense that it is still compatible from the langhooks
3211 point of view. */
3212
3213 static tree
3214 fold_indirect_ref_rhs (tree t)
3215 {
3216 tree type = TREE_TYPE (TREE_TYPE (t));
3217 tree sub = t;
3218 tree subtype;
3219
3220 STRIP_USELESS_TYPE_CONVERSION (sub);
3221 subtype = TREE_TYPE (sub);
3222 if (!POINTER_TYPE_P (subtype))
3223 return NULL_TREE;
3224
3225 if (TREE_CODE (sub) == ADDR_EXPR)
3226 {
3227 tree op = TREE_OPERAND (sub, 0);
3228 tree optype = TREE_TYPE (op);
3229 /* *&p => p */
3230 if (lang_hooks.types_compatible_p (type, optype))
3231 return op;
3232 /* *(foo *)&fooarray => fooarray[0] */
3233 else if (TREE_CODE (optype) == ARRAY_TYPE
3234 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3235 {
3236 tree type_domain = TYPE_DOMAIN (optype);
3237 tree min_val = size_zero_node;
3238 if (type_domain && TYPE_MIN_VALUE (type_domain))
3239 min_val = TYPE_MIN_VALUE (type_domain);
3240 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3241 }
3242 }
3243
3244 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3245 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3246 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3247 {
3248 tree type_domain;
3249 tree min_val = size_zero_node;
3250 tree osub = sub;
3251 sub = fold_indirect_ref_rhs (sub);
3252 if (! sub)
3253 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3254 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3255 if (type_domain && TYPE_MIN_VALUE (type_domain))
3256 min_val = TYPE_MIN_VALUE (type_domain);
3257 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3258 }
3259
3260 return NULL_TREE;
3261 }
3262
3263 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3264 based on the code of the RHS. We loop for as long as something changes. */
3265
3266 static enum gimplify_status
3267 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3268 tree *post_p, bool want_value)
3269 {
3270 enum gimplify_status ret = GS_OK;
3271
3272 while (ret != GS_UNHANDLED)
3273 switch (TREE_CODE (*from_p))
3274 {
3275 case INDIRECT_REF:
3276 {
3277 /* If we have code like
3278
3279 *(const A*)(A*)&x
3280
3281 where the type of "x" is a (possibly cv-qualified variant
3282 of "A"), treat the entire expression as identical to "x".
3283 This kind of code arises in C++ when an object is bound
3284 to a const reference, and if "x" is a TARGET_EXPR we want
3285 to take advantage of the optimization below. */
3286 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3287 if (t)
3288 {
3289 *from_p = t;
3290 ret = GS_OK;
3291 }
3292 else
3293 ret = GS_UNHANDLED;
3294 break;
3295 }
3296
3297 case TARGET_EXPR:
3298 {
3299 /* If we are initializing something from a TARGET_EXPR, strip the
3300 TARGET_EXPR and initialize it directly, if possible. This can't
3301 be done if the initializer is void, since that implies that the
3302 temporary is set in some non-trivial way.
3303
3304 ??? What about code that pulls out the temp and uses it
3305 elsewhere? I think that such code never uses the TARGET_EXPR as
3306 an initializer. If I'm wrong, we'll die because the temp won't
3307 have any RTL. In that case, I guess we'll need to replace
3308 references somehow. */
3309 tree init = TARGET_EXPR_INITIAL (*from_p);
3310
3311 if (!VOID_TYPE_P (TREE_TYPE (init)))
3312 {
3313 *from_p = init;
3314 ret = GS_OK;
3315 }
3316 else
3317 ret = GS_UNHANDLED;
3318 }
3319 break;
3320
3321 case COMPOUND_EXPR:
3322 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3323 caught. */
3324 gimplify_compound_expr (from_p, pre_p, true);
3325 ret = GS_OK;
3326 break;
3327
3328 case CONSTRUCTOR:
3329 /* If we're initializing from a CONSTRUCTOR, break this into
3330 individual MODIFY_EXPRs. */
3331 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3332
3333 case COND_EXPR:
3334 /* If we're assigning to a non-register type, push the assignment
3335 down into the branches. This is mandatory for ADDRESSABLE types,
3336 since we cannot generate temporaries for such, but it saves a
3337 copy in other cases as well. */
3338 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3339 {
3340 /* This code should mirror the code in gimplify_cond_expr. */
3341 enum tree_code code = TREE_CODE (*expr_p);
3342 tree cond = *from_p;
3343 tree result = *to_p;
3344
3345 ret = gimplify_expr (&result, pre_p, post_p,
3346 is_gimple_min_lval, fb_lvalue);
3347 if (ret != GS_ERROR)
3348 ret = GS_OK;
3349
3350 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3351 TREE_OPERAND (cond, 1)
3352 = build2 (code, void_type_node, result,
3353 TREE_OPERAND (cond, 1));
3354 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3355 TREE_OPERAND (cond, 2)
3356 = build2 (code, void_type_node, unshare_expr (result),
3357 TREE_OPERAND (cond, 2));
3358
3359 TREE_TYPE (cond) = void_type_node;
3360 recalculate_side_effects (cond);
3361
3362 if (want_value)
3363 {
3364 gimplify_and_add (cond, pre_p);
3365 *expr_p = unshare_expr (result);
3366 }
3367 else
3368 *expr_p = cond;
3369 return ret;
3370 }
3371 else
3372 ret = GS_UNHANDLED;
3373 break;
3374
3375 case CALL_EXPR:
3376 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3377 return slot so that we don't generate a temporary. */
3378 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3379 && aggregate_value_p (*from_p, *from_p))
3380 {
3381 bool use_target;
3382
3383 if (!(rhs_predicate_for (*to_p))(*from_p))
3384 /* If we need a temporary, *to_p isn't accurate. */
3385 use_target = false;
3386 else if (TREE_CODE (*to_p) == RESULT_DECL
3387 && DECL_NAME (*to_p) == NULL_TREE
3388 && needs_to_live_in_memory (*to_p))
3389 /* It's OK to use the return slot directly unless it's an NRV. */
3390 use_target = true;
3391 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3392 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3393 /* Don't force regs into memory. */
3394 use_target = false;
3395 else if (TREE_CODE (*to_p) == VAR_DECL
3396 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3397 /* Don't use the original target if it's a formal temp; we
3398 don't want to take their addresses. */
3399 use_target = false;
3400 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3401 /* It's OK to use the target directly if it's being
3402 initialized. */
3403 use_target = true;
3404 else if (!is_gimple_non_addressable (*to_p))
3405 /* Don't use the original target if it's already addressable;
3406 if its address escapes, and the called function uses the
3407 NRV optimization, a conforming program could see *to_p
3408 change before the called function returns; see c++/19317.
3409 When optimizing, the return_slot pass marks more functions
3410 as safe after we have escape info. */
3411 use_target = false;
3412 else
3413 use_target = true;
3414
3415 if (use_target)
3416 {
3417 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3418 lang_hooks.mark_addressable (*to_p);
3419 }
3420 }
3421
3422 ret = GS_UNHANDLED;
3423 break;
3424
3425 /* If we're initializing from a container, push the initialization
3426 inside it. */
3427 case CLEANUP_POINT_EXPR:
3428 case BIND_EXPR:
3429 case STATEMENT_LIST:
3430 {
3431 tree wrap = *from_p;
3432 tree t;
3433
3434 ret = gimplify_expr (to_p, pre_p, post_p,
3435 is_gimple_min_lval, fb_lvalue);
3436 if (ret != GS_ERROR)
3437 ret = GS_OK;
3438
3439 t = voidify_wrapper_expr (wrap, *expr_p);
3440 gcc_assert (t == *expr_p);
3441
3442 if (want_value)
3443 {
3444 gimplify_and_add (wrap, pre_p);
3445 *expr_p = unshare_expr (*to_p);
3446 }
3447 else
3448 *expr_p = wrap;
3449 return GS_OK;
3450 }
3451
3452 default:
3453 ret = GS_UNHANDLED;
3454 break;
3455 }
3456
3457 return ret;
3458 }
3459
3460 /* Destructively convert the TREE pointer in TP into a gimple tuple if
3461 appropriate. */
3462
3463 static void
3464 tree_to_gimple_tuple (tree *tp)
3465 {
3466
3467 switch (TREE_CODE (*tp))
3468 {
3469 case GIMPLE_MODIFY_STMT:
3470 return;
3471 case MODIFY_EXPR:
3472 {
3473 struct gimple_stmt *gs;
3474 tree lhs = TREE_OPERAND (*tp, 0);
3475 bool def_stmt_self_p = false;
3476
3477 if (TREE_CODE (lhs) == SSA_NAME)
3478 {
3479 if (SSA_NAME_DEF_STMT (lhs) == *tp)
3480 def_stmt_self_p = true;
3481 }
3482
3483 gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
3484 gs->base = (*tp)->base;
3485 /* The set to base above overwrites the CODE. */
3486 TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
3487
3488 gs->locus = EXPR_LOCUS (*tp);
3489 gs->operands[0] = TREE_OPERAND (*tp, 0);
3490 gs->operands[1] = TREE_OPERAND (*tp, 1);
3491 gs->block = TREE_BLOCK (*tp);
3492 *tp = (tree)gs;
3493
3494 /* If we re-gimplify a set to an SSA_NAME, we must change the
3495 SSA name's DEF_STMT link. */
3496 if (def_stmt_self_p)
3497 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
3498
3499 return;
3500 }
3501 default:
3502 break;
3503 }
3504 }
3505
3506 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3507 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3508 DECL_GIMPLE_REG_P set. */
3509
3510 static enum gimplify_status
3511 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3512 {
3513 enum tree_code code, ocode;
3514 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3515
3516 lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
3517 rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
3518 code = TREE_CODE (lhs);
3519 lhs = TREE_OPERAND (lhs, 0);
3520
3521 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3522 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3523 other = get_formal_tmp_var (other, pre_p);
3524
3525 realpart = code == REALPART_EXPR ? rhs : other;
3526 imagpart = code == REALPART_EXPR ? other : rhs;
3527
3528 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3529 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3530 else
3531 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3532
3533 GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
3534 GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
3535
3536 if (want_value)
3537 {
3538 tree_to_gimple_tuple (expr_p);
3539
3540 append_to_statement_list (*expr_p, pre_p);
3541 *expr_p = rhs;
3542 }
3543
3544 return GS_ALL_DONE;
3545 }
3546
3547 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3548
3549 modify_expr
3550 : varname '=' rhs
3551 | '*' ID '=' rhs
3552
3553 PRE_P points to the list where side effects that must happen before
3554 *EXPR_P should be stored.
3555
3556 POST_P points to the list where side effects that must happen after
3557 *EXPR_P should be stored.
3558
3559 WANT_VALUE is nonzero iff we want to use the value of this expression
3560 in another expression. */
3561
3562 static enum gimplify_status
3563 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3564 {
3565 tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
3566 tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
3567 enum gimplify_status ret = GS_UNHANDLED;
3568
3569 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3570 || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
3571 || TREE_CODE (*expr_p) == INIT_EXPR);
3572
3573 /* For zero sized types only gimplify the left hand side and right hand side
3574 as statements and throw away the assignment. */
3575 if (zero_sized_type (TREE_TYPE (*from_p)))
3576 {
3577 gimplify_stmt (from_p);
3578 gimplify_stmt (to_p);
3579 append_to_statement_list (*from_p, pre_p);
3580 append_to_statement_list (*to_p, pre_p);
3581 *expr_p = NULL_TREE;
3582 return GS_ALL_DONE;
3583 }
3584
3585 /* See if any simplifications can be done based on what the RHS is. */
3586 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3587 want_value);
3588 if (ret != GS_UNHANDLED)
3589 return ret;
3590
3591 /* If the value being copied is of variable width, compute the length
3592 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3593 before gimplifying any of the operands so that we can resolve any
3594 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3595 the size of the expression to be copied, not of the destination, so
3596 that is what we must here. */
3597 maybe_with_size_expr (from_p);
3598
3599 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3600 if (ret == GS_ERROR)
3601 return ret;
3602
3603 ret = gimplify_expr (from_p, pre_p, post_p,
3604 rhs_predicate_for (*to_p), fb_rvalue);
3605 if (ret == GS_ERROR)
3606 return ret;
3607
3608 /* Now see if the above changed *from_p to something we handle specially. */
3609 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3610 want_value);
3611 if (ret != GS_UNHANDLED)
3612 return ret;
3613
3614 /* If we've got a variable sized assignment between two lvalues (i.e. does
3615 not involve a call), then we can make things a bit more straightforward
3616 by converting the assignment to memcpy or memset. */
3617 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3618 {
3619 tree from = TREE_OPERAND (*from_p, 0);
3620 tree size = TREE_OPERAND (*from_p, 1);
3621
3622 if (TREE_CODE (from) == CONSTRUCTOR)
3623 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3624 if (is_gimple_addressable (from))
3625 {
3626 *from_p = from;
3627 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3628 }
3629 }
3630
3631 /* Transform partial stores to non-addressable complex variables into
3632 total stores. This allows us to use real instead of virtual operands
3633 for these variables, which improves optimization. */
3634 if ((TREE_CODE (*to_p) == REALPART_EXPR
3635 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3636 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3637 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3638
3639 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3640 {
3641 /* If we've somehow already got an SSA_NAME on the LHS, then
3642 we're probably modified it twice. Not good. */
3643 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3644 *to_p = make_ssa_name (*to_p, *expr_p);
3645 }
3646
3647 /* Try to alleviate the effects of the gimplification creating artificial
3648 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
3649 if (!gimplify_ctxp->into_ssa
3650 && DECL_P (*from_p) && DECL_IGNORED_P (*from_p)
3651 && DECL_P (*to_p) && !DECL_IGNORED_P (*to_p))
3652 {
3653 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
3654 DECL_NAME (*from_p)
3655 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
3656 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
3657 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
3658 }
3659
3660 if (want_value)
3661 {
3662 tree_to_gimple_tuple (expr_p);
3663
3664 append_to_statement_list (*expr_p, pre_p);
3665 *expr_p = *to_p;
3666 return GS_OK;
3667 }
3668
3669 return GS_ALL_DONE;
3670 }
3671
3672 /* Gimplify a comparison between two variable-sized objects. Do this
3673 with a call to BUILT_IN_MEMCMP. */
3674
3675 static enum gimplify_status
3676 gimplify_variable_sized_compare (tree *expr_p)
3677 {
3678 tree op0 = TREE_OPERAND (*expr_p, 0);
3679 tree op1 = TREE_OPERAND (*expr_p, 1);
3680 tree t, arg, dest, src;
3681
3682 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3683 arg = unshare_expr (arg);
3684 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
3685 src = build_fold_addr_expr (op1);
3686 dest = build_fold_addr_expr (op0);
3687 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3688 t = build_call_expr (t, 3, dest, src, arg);
3689 *expr_p
3690 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3691
3692 return GS_OK;
3693 }
3694
3695 /* Gimplify a comparison between two aggregate objects of integral scalar
3696 mode as a comparison between the bitwise equivalent scalar values. */
3697
3698 static enum gimplify_status
3699 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3700 {
3701 tree op0 = TREE_OPERAND (*expr_p, 0);
3702 tree op1 = TREE_OPERAND (*expr_p, 1);
3703
3704 tree type = TREE_TYPE (op0);
3705 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3706
3707 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3708 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3709
3710 *expr_p
3711 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3712
3713 return GS_OK;
3714 }
3715
3716 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3717 points to the expression to gimplify.
3718
3719 Expressions of the form 'a && b' are gimplified to:
3720
3721 a && b ? true : false
3722
3723 gimplify_cond_expr will do the rest.
3724
3725 PRE_P points to the list where side effects that must happen before
3726 *EXPR_P should be stored. */
3727
3728 static enum gimplify_status
3729 gimplify_boolean_expr (tree *expr_p)
3730 {
3731 /* Preserve the original type of the expression. */
3732 tree type = TREE_TYPE (*expr_p);
3733
3734 *expr_p = build3 (COND_EXPR, type, *expr_p,
3735 fold_convert (type, boolean_true_node),
3736 fold_convert (type, boolean_false_node));
3737
3738 return GS_OK;
3739 }
3740
3741 /* Gimplifies an expression sequence. This function gimplifies each
3742 expression and re-writes the original expression with the last
3743 expression of the sequence in GIMPLE form.
3744
3745 PRE_P points to the list where the side effects for all the
3746 expressions in the sequence will be emitted.
3747
3748 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3749 /* ??? Should rearrange to share the pre-queue with all the indirect
3750 invocations of gimplify_expr. Would probably save on creations
3751 of statement_list nodes. */
3752
3753 static enum gimplify_status
3754 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3755 {
3756 tree t = *expr_p;
3757
3758 do
3759 {
3760 tree *sub_p = &TREE_OPERAND (t, 0);
3761
3762 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3763 gimplify_compound_expr (sub_p, pre_p, false);
3764 else
3765 gimplify_stmt (sub_p);
3766 append_to_statement_list (*sub_p, pre_p);
3767
3768 t = TREE_OPERAND (t, 1);
3769 }
3770 while (TREE_CODE (t) == COMPOUND_EXPR);
3771
3772 *expr_p = t;
3773 if (want_value)
3774 return GS_OK;
3775 else
3776 {
3777 gimplify_stmt (expr_p);
3778 return GS_ALL_DONE;
3779 }
3780 }
3781
3782 /* Gimplifies a statement list. These may be created either by an
3783 enlightened front-end, or by shortcut_cond_expr. */
3784
3785 static enum gimplify_status
3786 gimplify_statement_list (tree *expr_p, tree *pre_p)
3787 {
3788 tree temp = voidify_wrapper_expr (*expr_p, NULL);
3789
3790 tree_stmt_iterator i = tsi_start (*expr_p);
3791
3792 while (!tsi_end_p (i))
3793 {
3794 tree t;
3795
3796 gimplify_stmt (tsi_stmt_ptr (i));
3797
3798 t = tsi_stmt (i);
3799 if (t == NULL)
3800 tsi_delink (&i);
3801 else if (TREE_CODE (t) == STATEMENT_LIST)
3802 {
3803 tsi_link_before (&i, t, TSI_SAME_STMT);
3804 tsi_delink (&i);
3805 }
3806 else
3807 tsi_next (&i);
3808 }
3809
3810 if (temp)
3811 {
3812 append_to_statement_list (*expr_p, pre_p);
3813 *expr_p = temp;
3814 return GS_OK;
3815 }
3816
3817 return GS_ALL_DONE;
3818 }
3819
3820 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3821 gimplify. After gimplification, EXPR_P will point to a new temporary
3822 that holds the original value of the SAVE_EXPR node.
3823
3824 PRE_P points to the list where side effects that must happen before
3825 *EXPR_P should be stored. */
3826
3827 static enum gimplify_status
3828 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3829 {
3830 enum gimplify_status ret = GS_ALL_DONE;
3831 tree val;
3832
3833 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3834 val = TREE_OPERAND (*expr_p, 0);
3835
3836 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3837 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3838 {
3839 /* The operand may be a void-valued expression such as SAVE_EXPRs
3840 generated by the Java frontend for class initialization. It is
3841 being executed only for its side-effects. */
3842 if (TREE_TYPE (val) == void_type_node)
3843 {
3844 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3845 is_gimple_stmt, fb_none);
3846 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3847 val = NULL;
3848 }
3849 else
3850 val = get_initialized_tmp_var (val, pre_p, post_p);
3851
3852 TREE_OPERAND (*expr_p, 0) = val;
3853 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3854 }
3855
3856 *expr_p = val;
3857
3858 return ret;
3859 }
3860
3861 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3862
3863 unary_expr
3864 : ...
3865 | '&' varname
3866 ...
3867
3868 PRE_P points to the list where side effects that must happen before
3869 *EXPR_P should be stored.
3870
3871 POST_P points to the list where side effects that must happen after
3872 *EXPR_P should be stored. */
3873
3874 static enum gimplify_status
3875 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3876 {
3877 tree expr = *expr_p;
3878 tree op0 = TREE_OPERAND (expr, 0);
3879 enum gimplify_status ret;
3880
3881 switch (TREE_CODE (op0))
3882 {
3883 case INDIRECT_REF:
3884 case MISALIGNED_INDIRECT_REF:
3885 do_indirect_ref:
3886 /* Check if we are dealing with an expression of the form '&*ptr'.
3887 While the front end folds away '&*ptr' into 'ptr', these
3888 expressions may be generated internally by the compiler (e.g.,
3889 builtins like __builtin_va_end). */
3890 /* Caution: the silent array decomposition semantics we allow for
3891 ADDR_EXPR means we can't always discard the pair. */
3892 /* Gimplification of the ADDR_EXPR operand may drop
3893 cv-qualification conversions, so make sure we add them if
3894 needed. */
3895 {
3896 tree op00 = TREE_OPERAND (op0, 0);
3897 tree t_expr = TREE_TYPE (expr);
3898 tree t_op00 = TREE_TYPE (op00);
3899
3900 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3901 {
3902 #ifdef ENABLE_CHECKING
3903 tree t_op0 = TREE_TYPE (op0);
3904 gcc_assert (POINTER_TYPE_P (t_expr)
3905 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3906 ? TREE_TYPE (t_op0) : t_op0,
3907 TREE_TYPE (t_expr))
3908 && POINTER_TYPE_P (t_op00)
3909 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3910 #endif
3911 op00 = fold_convert (TREE_TYPE (expr), op00);
3912 }
3913 *expr_p = op00;
3914 ret = GS_OK;
3915 }
3916 break;
3917
3918 case VIEW_CONVERT_EXPR:
3919 /* Take the address of our operand and then convert it to the type of
3920 this ADDR_EXPR.
3921
3922 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3923 all clear. The impact of this transformation is even less clear. */
3924
3925 /* If the operand is a useless conversion, look through it. Doing so
3926 guarantees that the ADDR_EXPR and its operand will remain of the
3927 same type. */
3928 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3929 op0 = TREE_OPERAND (op0, 0);
3930
3931 *expr_p = fold_convert (TREE_TYPE (expr),
3932 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3933 ret = GS_OK;
3934 break;
3935
3936 default:
3937 /* We use fb_either here because the C frontend sometimes takes
3938 the address of a call that returns a struct; see
3939 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3940 the implied temporary explicit. */
3941 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3942 is_gimple_addressable, fb_either);
3943 if (ret != GS_ERROR)
3944 {
3945 op0 = TREE_OPERAND (expr, 0);
3946
3947 /* For various reasons, the gimplification of the expression
3948 may have made a new INDIRECT_REF. */
3949 if (TREE_CODE (op0) == INDIRECT_REF)
3950 goto do_indirect_ref;
3951
3952 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3953 is set properly. */
3954 recompute_tree_invariant_for_addr_expr (expr);
3955
3956 /* Mark the RHS addressable. */
3957 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3958 }
3959 break;
3960 }
3961
3962 return ret;
3963 }
3964
3965 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3966 value; output operands should be a gimple lvalue. */
3967
3968 static enum gimplify_status
3969 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3970 {
3971 tree expr = *expr_p;
3972 int noutputs = list_length (ASM_OUTPUTS (expr));
3973 const char **oconstraints
3974 = (const char **) alloca ((noutputs) * sizeof (const char *));
3975 int i;
3976 tree link;
3977 const char *constraint;
3978 bool allows_mem, allows_reg, is_inout;
3979 enum gimplify_status ret, tret;
3980
3981 ret = GS_ALL_DONE;
3982 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3983 {
3984 size_t constraint_len;
3985 oconstraints[i] = constraint
3986 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3987 constraint_len = strlen (constraint);
3988 if (constraint_len == 0)
3989 continue;
3990
3991 parse_output_constraint (&constraint, i, 0, 0,
3992 &allows_mem, &allows_reg, &is_inout);
3993
3994 if (!allows_reg && allows_mem)
3995 lang_hooks.mark_addressable (TREE_VALUE (link));
3996
3997 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3998 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3999 fb_lvalue | fb_mayfail);
4000 if (tret == GS_ERROR)
4001 {
4002 error ("invalid lvalue in asm output %d", i);
4003 ret = tret;
4004 }
4005
4006 if (is_inout)
4007 {
4008 /* An input/output operand. To give the optimizers more
4009 flexibility, split it into separate input and output
4010 operands. */
4011 tree input;
4012 char buf[10];
4013
4014 /* Turn the in/out constraint into an output constraint. */
4015 char *p = xstrdup (constraint);
4016 p[0] = '=';
4017 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4018
4019 /* And add a matching input constraint. */
4020 if (allows_reg)
4021 {
4022 sprintf (buf, "%d", i);
4023
4024 /* If there are multiple alternatives in the constraint,
4025 handle each of them individually. Those that allow register
4026 will be replaced with operand number, the others will stay
4027 unchanged. */
4028 if (strchr (p, ',') != NULL)
4029 {
4030 size_t len = 0, buflen = strlen (buf);
4031 char *beg, *end, *str, *dst;
4032
4033 for (beg = p + 1;;)
4034 {
4035 end = strchr (beg, ',');
4036 if (end == NULL)
4037 end = strchr (beg, '\0');
4038 if ((size_t) (end - beg) < buflen)
4039 len += buflen + 1;
4040 else
4041 len += end - beg + 1;
4042 if (*end)
4043 beg = end + 1;
4044 else
4045 break;
4046 }
4047
4048 str = (char *) alloca (len);
4049 for (beg = p + 1, dst = str;;)
4050 {
4051 const char *tem;
4052 bool mem_p, reg_p, inout_p;
4053
4054 end = strchr (beg, ',');
4055 if (end)
4056 *end = '\0';
4057 beg[-1] = '=';
4058 tem = beg - 1;
4059 parse_output_constraint (&tem, i, 0, 0,
4060 &mem_p, &reg_p, &inout_p);
4061 if (dst != str)
4062 *dst++ = ',';
4063 if (reg_p)
4064 {
4065 memcpy (dst, buf, buflen);
4066 dst += buflen;
4067 }
4068 else
4069 {
4070 if (end)
4071 len = end - beg;
4072 else
4073 len = strlen (beg);
4074 memcpy (dst, beg, len);
4075 dst += len;
4076 }
4077 if (end)
4078 beg = end + 1;
4079 else
4080 break;
4081 }
4082 *dst = '\0';
4083 input = build_string (dst - str, str);
4084 }
4085 else
4086 input = build_string (strlen (buf), buf);
4087 }
4088 else
4089 input = build_string (constraint_len - 1, constraint + 1);
4090
4091 free (p);
4092
4093 input = build_tree_list (build_tree_list (NULL_TREE, input),
4094 unshare_expr (TREE_VALUE (link)));
4095 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4096 }
4097 }
4098
4099 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4100 {
4101 constraint
4102 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4103 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4104 oconstraints, &allows_mem, &allows_reg);
4105
4106 /* If the operand is a memory input, it should be an lvalue. */
4107 if (!allows_reg && allows_mem)
4108 {
4109 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4110 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4111 lang_hooks.mark_addressable (TREE_VALUE (link));
4112 if (tret == GS_ERROR)
4113 {
4114 error ("memory input %d is not directly addressable", i);
4115 ret = tret;
4116 }
4117 }
4118 else
4119 {
4120 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4121 is_gimple_asm_val, fb_rvalue);
4122 if (tret == GS_ERROR)
4123 ret = tret;
4124 }
4125 }
4126
4127 return ret;
4128 }
4129
4130 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4131 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4132 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4133 return to this function.
4134
4135 FIXME should we complexify the prequeue handling instead? Or use flags
4136 for all the cleanups and let the optimizer tighten them up? The current
4137 code seems pretty fragile; it will break on a cleanup within any
4138 non-conditional nesting. But any such nesting would be broken, anyway;
4139 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4140 and continues out of it. We can do that at the RTL level, though, so
4141 having an optimizer to tighten up try/finally regions would be a Good
4142 Thing. */
4143
4144 static enum gimplify_status
4145 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4146 {
4147 tree_stmt_iterator iter;
4148 tree body;
4149
4150 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4151
4152 /* We only care about the number of conditions between the innermost
4153 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4154 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4155 int old_conds = gimplify_ctxp->conditions;
4156 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4157 gimplify_ctxp->conditions = 0;
4158 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4159
4160 body = TREE_OPERAND (*expr_p, 0);
4161 gimplify_to_stmt_list (&body);
4162
4163 gimplify_ctxp->conditions = old_conds;
4164 gimplify_ctxp->conditional_cleanups = old_cleanups;
4165
4166 for (iter = tsi_start (body); !tsi_end_p (iter); )
4167 {
4168 tree *wce_p = tsi_stmt_ptr (iter);
4169 tree wce = *wce_p;
4170
4171 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4172 {
4173 if (tsi_one_before_end_p (iter))
4174 {
4175 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4176 tsi_delink (&iter);
4177 break;
4178 }
4179 else
4180 {
4181 tree sl, tfe;
4182 enum tree_code code;
4183
4184 if (CLEANUP_EH_ONLY (wce))
4185 code = TRY_CATCH_EXPR;
4186 else
4187 code = TRY_FINALLY_EXPR;
4188
4189 sl = tsi_split_statement_list_after (&iter);
4190 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4191 append_to_statement_list (TREE_OPERAND (wce, 0),
4192 &TREE_OPERAND (tfe, 1));
4193 *wce_p = tfe;
4194 iter = tsi_start (sl);
4195 }
4196 }
4197 else
4198 tsi_next (&iter);
4199 }
4200
4201 if (temp)
4202 {
4203 *expr_p = temp;
4204 append_to_statement_list (body, pre_p);
4205 return GS_OK;
4206 }
4207 else
4208 {
4209 *expr_p = body;
4210 return GS_ALL_DONE;
4211 }
4212 }
4213
4214 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4215 is the cleanup action required. */
4216
4217 static void
4218 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4219 {
4220 tree wce;
4221
4222 /* Errors can result in improperly nested cleanups. Which results in
4223 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4224 if (errorcount || sorrycount)
4225 return;
4226
4227 if (gimple_conditional_context ())
4228 {
4229 /* If we're in a conditional context, this is more complex. We only
4230 want to run the cleanup if we actually ran the initialization that
4231 necessitates it, but we want to run it after the end of the
4232 conditional context. So we wrap the try/finally around the
4233 condition and use a flag to determine whether or not to actually
4234 run the destructor. Thus
4235
4236 test ? f(A()) : 0
4237
4238 becomes (approximately)
4239
4240 flag = 0;
4241 try {
4242 if (test) { A::A(temp); flag = 1; val = f(temp); }
4243 else { val = 0; }
4244 } finally {
4245 if (flag) A::~A(temp);
4246 }
4247 val
4248 */
4249
4250 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4251 tree ffalse = build_gimple_modify_stmt (flag, boolean_false_node);
4252 tree ftrue = build_gimple_modify_stmt (flag, boolean_true_node);
4253 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4254 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4255 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4256 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4257 append_to_statement_list (ftrue, pre_p);
4258
4259 /* Because of this manipulation, and the EH edges that jump
4260 threading cannot redirect, the temporary (VAR) will appear
4261 to be used uninitialized. Don't warn. */
4262 TREE_NO_WARNING (var) = 1;
4263 }
4264 else
4265 {
4266 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4267 CLEANUP_EH_ONLY (wce) = eh_only;
4268 append_to_statement_list (wce, pre_p);
4269 }
4270
4271 gimplify_stmt (&TREE_OPERAND (wce, 0));
4272 }
4273
4274 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4275
4276 static enum gimplify_status
4277 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4278 {
4279 tree targ = *expr_p;
4280 tree temp = TARGET_EXPR_SLOT (targ);
4281 tree init = TARGET_EXPR_INITIAL (targ);
4282 enum gimplify_status ret;
4283
4284 if (init)
4285 {
4286 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4287 to the temps list. */
4288 gimple_add_tmp_var (temp);
4289
4290 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4291 expression is supposed to initialize the slot. */
4292 if (VOID_TYPE_P (TREE_TYPE (init)))
4293 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4294 else
4295 {
4296 init = build2 (INIT_EXPR, void_type_node, temp, init);
4297 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4298 fb_none);
4299 }
4300 if (ret == GS_ERROR)
4301 {
4302 /* PR c++/28266 Make sure this is expanded only once. */
4303 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4304 return GS_ERROR;
4305 }
4306 append_to_statement_list (init, pre_p);
4307
4308 /* If needed, push the cleanup for the temp. */
4309 if (TARGET_EXPR_CLEANUP (targ))
4310 {
4311 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4312 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4313 CLEANUP_EH_ONLY (targ), pre_p);
4314 }
4315
4316 /* Only expand this once. */
4317 TREE_OPERAND (targ, 3) = init;
4318 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4319 }
4320 else
4321 /* We should have expanded this before. */
4322 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4323
4324 *expr_p = temp;
4325 return GS_OK;
4326 }
4327
4328 /* Gimplification of expression trees. */
4329
4330 /* Gimplify an expression which appears at statement context; usually, this
4331 means replacing it with a suitably gimple STATEMENT_LIST. */
4332
4333 void
4334 gimplify_stmt (tree *stmt_p)
4335 {
4336 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4337 }
4338
4339 /* Similarly, but force the result to be a STATEMENT_LIST. */
4340
4341 void
4342 gimplify_to_stmt_list (tree *stmt_p)
4343 {
4344 gimplify_stmt (stmt_p);
4345 if (!*stmt_p)
4346 *stmt_p = alloc_stmt_list ();
4347 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4348 {
4349 tree t = *stmt_p;
4350 *stmt_p = alloc_stmt_list ();
4351 append_to_statement_list (t, stmt_p);
4352 }
4353 }
4354
4355
4356 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4357 to CTX. If entries already exist, force them to be some flavor of private.
4358 If there is no enclosing parallel, do nothing. */
4359
4360 void
4361 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4362 {
4363 splay_tree_node n;
4364
4365 if (decl == NULL || !DECL_P (decl))
4366 return;
4367
4368 do
4369 {
4370 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4371 if (n != NULL)
4372 {
4373 if (n->value & GOVD_SHARED)
4374 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4375 else
4376 return;
4377 }
4378 else if (ctx->is_parallel)
4379 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4380
4381 ctx = ctx->outer_context;
4382 }
4383 while (ctx);
4384 }
4385
4386 /* Similarly for each of the type sizes of TYPE. */
4387
4388 static void
4389 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4390 {
4391 if (type == NULL || type == error_mark_node)
4392 return;
4393 type = TYPE_MAIN_VARIANT (type);
4394
4395 if (pointer_set_insert (ctx->privatized_types, type))
4396 return;
4397
4398 switch (TREE_CODE (type))
4399 {
4400 case INTEGER_TYPE:
4401 case ENUMERAL_TYPE:
4402 case BOOLEAN_TYPE:
4403 case REAL_TYPE:
4404 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4405 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4406 break;
4407
4408 case ARRAY_TYPE:
4409 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4410 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4411 break;
4412
4413 case RECORD_TYPE:
4414 case UNION_TYPE:
4415 case QUAL_UNION_TYPE:
4416 {
4417 tree field;
4418 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4419 if (TREE_CODE (field) == FIELD_DECL)
4420 {
4421 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4422 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4423 }
4424 }
4425 break;
4426
4427 case POINTER_TYPE:
4428 case REFERENCE_TYPE:
4429 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4430 break;
4431
4432 default:
4433 break;
4434 }
4435
4436 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4437 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4438 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4439 }
4440
4441 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4442
4443 static void
4444 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4445 {
4446 splay_tree_node n;
4447 unsigned int nflags;
4448 tree t;
4449
4450 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4451 return;
4452
4453 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4454 there are constructors involved somewhere. */
4455 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4456 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4457 flags |= GOVD_SEEN;
4458
4459 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4460 if (n != NULL)
4461 {
4462 /* We shouldn't be re-adding the decl with the same data
4463 sharing class. */
4464 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4465 /* The only combination of data sharing classes we should see is
4466 FIRSTPRIVATE and LASTPRIVATE. */
4467 nflags = n->value | flags;
4468 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4469 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4470 n->value = nflags;
4471 return;
4472 }
4473
4474 /* When adding a variable-sized variable, we have to handle all sorts
4475 of additional bits of data: the pointer replacement variable, and
4476 the parameters of the type. */
4477 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
4478 {
4479 /* Add the pointer replacement variable as PRIVATE if the variable
4480 replacement is private, else FIRSTPRIVATE since we'll need the
4481 address of the original variable either for SHARED, or for the
4482 copy into or out of the context. */
4483 if (!(flags & GOVD_LOCAL))
4484 {
4485 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4486 nflags |= flags & GOVD_SEEN;
4487 t = DECL_VALUE_EXPR (decl);
4488 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4489 t = TREE_OPERAND (t, 0);
4490 gcc_assert (DECL_P (t));
4491 omp_add_variable (ctx, t, nflags);
4492 }
4493
4494 /* Add all of the variable and type parameters (which should have
4495 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4496 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4497 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4498 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4499
4500 /* The variable-sized variable itself is never SHARED, only some form
4501 of PRIVATE. The sharing would take place via the pointer variable
4502 which we remapped above. */
4503 if (flags & GOVD_SHARED)
4504 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4505 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4506
4507 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4508 alloca statement we generate for the variable, so make sure it
4509 is available. This isn't automatically needed for the SHARED
4510 case, since we won't be allocating local storage then.
4511 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
4512 in this case omp_notice_variable will be called later
4513 on when it is gimplified. */
4514 else if (! (flags & GOVD_LOCAL))
4515 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4516 }
4517 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4518 {
4519 gcc_assert ((flags & GOVD_LOCAL) == 0);
4520 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4521
4522 /* Similar to the direct variable sized case above, we'll need the
4523 size of references being privatized. */
4524 if ((flags & GOVD_SHARED) == 0)
4525 {
4526 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4527 if (TREE_CODE (t) != INTEGER_CST)
4528 omp_notice_variable (ctx, t, true);
4529 }
4530 }
4531
4532 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4533 }
4534
4535 /* Record the fact that DECL was used within the OpenMP context CTX.
4536 IN_CODE is true when real code uses DECL, and false when we should
4537 merely emit default(none) errors. Return true if DECL is going to
4538 be remapped and thus DECL shouldn't be gimplified into its
4539 DECL_VALUE_EXPR (if any). */
4540
4541 static bool
4542 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4543 {
4544 splay_tree_node n;
4545 unsigned flags = in_code ? GOVD_SEEN : 0;
4546 bool ret = false, shared;
4547
4548 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4549 return false;
4550
4551 /* Threadprivate variables are predetermined. */
4552 if (is_global_var (decl))
4553 {
4554 if (DECL_THREAD_LOCAL_P (decl))
4555 return false;
4556
4557 if (DECL_HAS_VALUE_EXPR_P (decl))
4558 {
4559 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4560
4561 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4562 return false;
4563 }
4564 }
4565
4566 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4567 if (n == NULL)
4568 {
4569 enum omp_clause_default_kind default_kind, kind;
4570
4571 if (!ctx->is_parallel)
4572 goto do_outer;
4573
4574 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4575 remapped firstprivate instead of shared. To some extent this is
4576 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4577 default_kind = ctx->default_kind;
4578 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4579 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4580 default_kind = kind;
4581
4582 switch (default_kind)
4583 {
4584 case OMP_CLAUSE_DEFAULT_NONE:
4585 error ("%qs not specified in enclosing parallel",
4586 IDENTIFIER_POINTER (DECL_NAME (decl)));
4587 error ("%Henclosing parallel", &ctx->location);
4588 /* FALLTHRU */
4589 case OMP_CLAUSE_DEFAULT_SHARED:
4590 flags |= GOVD_SHARED;
4591 break;
4592 case OMP_CLAUSE_DEFAULT_PRIVATE:
4593 flags |= GOVD_PRIVATE;
4594 break;
4595 default:
4596 gcc_unreachable ();
4597 }
4598
4599 omp_add_variable (ctx, decl, flags);
4600
4601 shared = (flags & GOVD_SHARED) != 0;
4602 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4603 goto do_outer;
4604 }
4605
4606 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4607 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4608
4609 /* If nothing changed, there's nothing left to do. */
4610 if ((n->value & flags) == flags)
4611 return ret;
4612 flags |= n->value;
4613 n->value = flags;
4614
4615 do_outer:
4616 /* If the variable is private in the current context, then we don't
4617 need to propagate anything to an outer context. */
4618 if (flags & GOVD_PRIVATE)
4619 return ret;
4620 if (ctx->outer_context
4621 && omp_notice_variable (ctx->outer_context, decl, in_code))
4622 return true;
4623 return ret;
4624 }
4625
4626 /* Verify that DECL is private within CTX. If there's specific information
4627 to the contrary in the innermost scope, generate an error. */
4628
4629 static bool
4630 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4631 {
4632 splay_tree_node n;
4633
4634 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4635 if (n != NULL)
4636 {
4637 if (n->value & GOVD_SHARED)
4638 {
4639 if (ctx == gimplify_omp_ctxp)
4640 {
4641 error ("iteration variable %qs should be private",
4642 IDENTIFIER_POINTER (DECL_NAME (decl)));
4643 n->value = GOVD_PRIVATE;
4644 return true;
4645 }
4646 else
4647 return false;
4648 }
4649 else if ((n->value & GOVD_EXPLICIT) != 0
4650 && (ctx == gimplify_omp_ctxp
4651 || (ctx->is_combined_parallel
4652 && gimplify_omp_ctxp->outer_context == ctx)))
4653 {
4654 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4655 error ("iteration variable %qs should not be firstprivate",
4656 IDENTIFIER_POINTER (DECL_NAME (decl)));
4657 else if ((n->value & GOVD_REDUCTION) != 0)
4658 error ("iteration variable %qs should not be reduction",
4659 IDENTIFIER_POINTER (DECL_NAME (decl)));
4660 }
4661 return true;
4662 }
4663
4664 if (ctx->is_parallel)
4665 return false;
4666 else if (ctx->outer_context)
4667 return omp_is_private (ctx->outer_context, decl);
4668 else
4669 return !is_global_var (decl);
4670 }
4671
4672 /* Return true if DECL is private within a parallel region
4673 that binds to the current construct's context or in parallel
4674 region's REDUCTION clause. */
4675
4676 static bool
4677 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
4678 {
4679 splay_tree_node n;
4680
4681 do
4682 {
4683 ctx = ctx->outer_context;
4684 if (ctx == NULL)
4685 return !(is_global_var (decl)
4686 /* References might be private, but might be shared too. */
4687 || lang_hooks.decls.omp_privatize_by_reference (decl));
4688
4689 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4690 if (n != NULL)
4691 return (n->value & GOVD_SHARED) == 0;
4692 }
4693 while (!ctx->is_parallel);
4694 return false;
4695 }
4696
4697 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4698 and previous omp contexts. */
4699
4700 static void
4701 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4702 bool in_combined_parallel)
4703 {
4704 struct gimplify_omp_ctx *ctx, *outer_ctx;
4705 tree c;
4706
4707 ctx = new_omp_context (in_parallel, in_combined_parallel);
4708 outer_ctx = ctx->outer_context;
4709
4710 while ((c = *list_p) != NULL)
4711 {
4712 enum gimplify_status gs;
4713 bool remove = false;
4714 bool notice_outer = true;
4715 const char *check_non_private = NULL;
4716 unsigned int flags;
4717 tree decl;
4718
4719 switch (OMP_CLAUSE_CODE (c))
4720 {
4721 case OMP_CLAUSE_PRIVATE:
4722 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4723 notice_outer = false;
4724 goto do_add;
4725 case OMP_CLAUSE_SHARED:
4726 flags = GOVD_SHARED | GOVD_EXPLICIT;
4727 goto do_add;
4728 case OMP_CLAUSE_FIRSTPRIVATE:
4729 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4730 check_non_private = "firstprivate";
4731 goto do_add;
4732 case OMP_CLAUSE_LASTPRIVATE:
4733 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4734 check_non_private = "lastprivate";
4735 goto do_add;
4736 case OMP_CLAUSE_REDUCTION:
4737 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4738 check_non_private = "reduction";
4739 goto do_add;
4740
4741 do_add:
4742 decl = OMP_CLAUSE_DECL (c);
4743 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4744 {
4745 remove = true;
4746 break;
4747 }
4748 omp_add_variable (ctx, decl, flags);
4749 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4750 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4751 {
4752 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4753 GOVD_LOCAL | GOVD_SEEN);
4754 gimplify_omp_ctxp = ctx;
4755 push_gimplify_context ();
4756 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4757 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4758 push_gimplify_context ();
4759 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4760 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4761 gimplify_omp_ctxp = outer_ctx;
4762 }
4763 if (notice_outer)
4764 goto do_notice;
4765 break;
4766
4767 case OMP_CLAUSE_COPYIN:
4768 case OMP_CLAUSE_COPYPRIVATE:
4769 decl = OMP_CLAUSE_DECL (c);
4770 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4771 {
4772 remove = true;
4773 break;
4774 }
4775 do_notice:
4776 if (outer_ctx)
4777 omp_notice_variable (outer_ctx, decl, true);
4778 if (check_non_private
4779 && !in_parallel
4780 && omp_check_private (ctx, decl))
4781 {
4782 error ("%s variable %qs is private in outer context",
4783 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
4784 remove = true;
4785 }
4786 break;
4787
4788 case OMP_CLAUSE_IF:
4789 OMP_CLAUSE_OPERAND (c, 0)
4790 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4791 /* Fall through. */
4792
4793 case OMP_CLAUSE_SCHEDULE:
4794 case OMP_CLAUSE_NUM_THREADS:
4795 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4796 is_gimple_val, fb_rvalue);
4797 if (gs == GS_ERROR)
4798 remove = true;
4799 break;
4800
4801 case OMP_CLAUSE_NOWAIT:
4802 case OMP_CLAUSE_ORDERED:
4803 break;
4804
4805 case OMP_CLAUSE_DEFAULT:
4806 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4807 break;
4808
4809 default:
4810 gcc_unreachable ();
4811 }
4812
4813 if (remove)
4814 *list_p = OMP_CLAUSE_CHAIN (c);
4815 else
4816 list_p = &OMP_CLAUSE_CHAIN (c);
4817 }
4818
4819 gimplify_omp_ctxp = ctx;
4820 }
4821
4822 /* For all variables that were not actually used within the context,
4823 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4824
4825 static int
4826 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4827 {
4828 tree *list_p = (tree *) data;
4829 tree decl = (tree) n->key;
4830 unsigned flags = n->value;
4831 enum omp_clause_code code;
4832 tree clause;
4833 bool private_debug;
4834
4835 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4836 return 0;
4837 if ((flags & GOVD_SEEN) == 0)
4838 return 0;
4839 if (flags & GOVD_DEBUG_PRIVATE)
4840 {
4841 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4842 private_debug = true;
4843 }
4844 else
4845 private_debug
4846 = lang_hooks.decls.omp_private_debug_clause (decl,
4847 !!(flags & GOVD_SHARED));
4848 if (private_debug)
4849 code = OMP_CLAUSE_PRIVATE;
4850 else if (flags & GOVD_SHARED)
4851 {
4852 if (is_global_var (decl))
4853 return 0;
4854 code = OMP_CLAUSE_SHARED;
4855 }
4856 else if (flags & GOVD_PRIVATE)
4857 code = OMP_CLAUSE_PRIVATE;
4858 else if (flags & GOVD_FIRSTPRIVATE)
4859 code = OMP_CLAUSE_FIRSTPRIVATE;
4860 else
4861 gcc_unreachable ();
4862
4863 clause = build_omp_clause (code);
4864 OMP_CLAUSE_DECL (clause) = decl;
4865 OMP_CLAUSE_CHAIN (clause) = *list_p;
4866 if (private_debug)
4867 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4868 *list_p = clause;
4869
4870 return 0;
4871 }
4872
4873 static void
4874 gimplify_adjust_omp_clauses (tree *list_p)
4875 {
4876 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4877 tree c, decl;
4878
4879 while ((c = *list_p) != NULL)
4880 {
4881 splay_tree_node n;
4882 bool remove = false;
4883
4884 switch (OMP_CLAUSE_CODE (c))
4885 {
4886 case OMP_CLAUSE_PRIVATE:
4887 case OMP_CLAUSE_SHARED:
4888 case OMP_CLAUSE_FIRSTPRIVATE:
4889 decl = OMP_CLAUSE_DECL (c);
4890 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4891 remove = !(n->value & GOVD_SEEN);
4892 if (! remove)
4893 {
4894 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4895 if ((n->value & GOVD_DEBUG_PRIVATE)
4896 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4897 {
4898 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4899 || ((n->value & GOVD_DATA_SHARE_CLASS)
4900 == GOVD_PRIVATE));
4901 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4902 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4903 }
4904 }
4905 break;
4906
4907 case OMP_CLAUSE_LASTPRIVATE:
4908 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4909 accurately reflect the presence of a FIRSTPRIVATE clause. */
4910 decl = OMP_CLAUSE_DECL (c);
4911 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4912 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4913 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4914 break;
4915
4916 case OMP_CLAUSE_REDUCTION:
4917 case OMP_CLAUSE_COPYIN:
4918 case OMP_CLAUSE_COPYPRIVATE:
4919 case OMP_CLAUSE_IF:
4920 case OMP_CLAUSE_NUM_THREADS:
4921 case OMP_CLAUSE_SCHEDULE:
4922 case OMP_CLAUSE_NOWAIT:
4923 case OMP_CLAUSE_ORDERED:
4924 case OMP_CLAUSE_DEFAULT:
4925 break;
4926
4927 default:
4928 gcc_unreachable ();
4929 }
4930
4931 if (remove)
4932 *list_p = OMP_CLAUSE_CHAIN (c);
4933 else
4934 list_p = &OMP_CLAUSE_CHAIN (c);
4935 }
4936
4937 /* Add in any implicit data sharing. */
4938 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4939
4940 gimplify_omp_ctxp = ctx->outer_context;
4941 delete_omp_context (ctx);
4942 }
4943
4944 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4945 gimplification of the body, as well as scanning the body for used
4946 variables. We need to do this scan now, because variable-sized
4947 decls will be decomposed during gimplification. */
4948
4949 static enum gimplify_status
4950 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4951 {
4952 tree expr = *expr_p;
4953
4954 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4955 OMP_PARALLEL_COMBINED (expr));
4956
4957 push_gimplify_context ();
4958
4959 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4960
4961 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4962 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4963 else
4964 pop_gimplify_context (NULL_TREE);
4965
4966 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4967
4968 return GS_ALL_DONE;
4969 }
4970
4971 /* Gimplify the gross structure of an OMP_FOR statement. */
4972
4973 static enum gimplify_status
4974 gimplify_omp_for (tree *expr_p, tree *pre_p)
4975 {
4976 tree for_stmt, decl, t;
4977 enum gimplify_status ret = GS_OK;
4978
4979 for_stmt = *expr_p;
4980
4981 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4982
4983 t = OMP_FOR_INIT (for_stmt);
4984 gcc_assert (TREE_CODE (t) == MODIFY_EXPR
4985 || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
4986 decl = GENERIC_TREE_OPERAND (t, 0);
4987 gcc_assert (DECL_P (decl));
4988 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4989
4990 /* Make sure the iteration variable is private. */
4991 if (omp_is_private (gimplify_omp_ctxp, decl))
4992 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4993 else
4994 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4995
4996 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
4997 &OMP_FOR_PRE_BODY (for_stmt),
4998 NULL, is_gimple_val, fb_rvalue);
4999
5000 tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
5001
5002 t = OMP_FOR_COND (for_stmt);
5003 gcc_assert (COMPARISON_CLASS_P (t));
5004 gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
5005
5006 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
5007 &OMP_FOR_PRE_BODY (for_stmt),
5008 NULL, is_gimple_val, fb_rvalue);
5009
5010 tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
5011 t = OMP_FOR_INCR (for_stmt);
5012 switch (TREE_CODE (t))
5013 {
5014 case PREINCREMENT_EXPR:
5015 case POSTINCREMENT_EXPR:
5016 t = build_int_cst (TREE_TYPE (decl), 1);
5017 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
5018 t = build_gimple_modify_stmt (decl, t);
5019 OMP_FOR_INCR (for_stmt) = t;
5020 break;
5021
5022 case PREDECREMENT_EXPR:
5023 case POSTDECREMENT_EXPR:
5024 t = build_int_cst (TREE_TYPE (decl), -1);
5025 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
5026 t = build_gimple_modify_stmt (decl, t);
5027 OMP_FOR_INCR (for_stmt) = t;
5028 break;
5029
5030 case GIMPLE_MODIFY_STMT:
5031 gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
5032 t = GIMPLE_STMT_OPERAND (t, 1);
5033 switch (TREE_CODE (t))
5034 {
5035 case PLUS_EXPR:
5036 if (TREE_OPERAND (t, 1) == decl)
5037 {
5038 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5039 TREE_OPERAND (t, 0) = decl;
5040 break;
5041 }
5042 case MINUS_EXPR:
5043 gcc_assert (TREE_OPERAND (t, 0) == decl);
5044 break;
5045 default:
5046 gcc_unreachable ();
5047 }
5048
5049 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
5050 NULL, is_gimple_val, fb_rvalue);
5051 break;
5052
5053 default:
5054 gcc_unreachable ();
5055 }
5056
5057 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
5058 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5059
5060 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5061 }
5062
5063 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5064 In particular, OMP_SECTIONS and OMP_SINGLE. */
5065
5066 static enum gimplify_status
5067 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
5068 {
5069 tree stmt = *expr_p;
5070
5071 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
5072 gimplify_to_stmt_list (&OMP_BODY (stmt));
5073 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
5074
5075 return GS_ALL_DONE;
5076 }
5077
5078 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5079 stabilized the lhs of the atomic operation as *ADDR. Return true if
5080 EXPR is this stabilized form. */
5081
5082 static bool
5083 goa_lhs_expr_p (tree expr, tree addr)
5084 {
5085 /* Also include casts to other type variants. The C front end is fond
5086 of adding these for e.g. volatile variables. This is like
5087 STRIP_TYPE_NOPS but includes the main variant lookup. */
5088 while ((TREE_CODE (expr) == NOP_EXPR
5089 || TREE_CODE (expr) == CONVERT_EXPR
5090 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5091 && TREE_OPERAND (expr, 0) != error_mark_node
5092 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5093 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5094 expr = TREE_OPERAND (expr, 0);
5095
5096 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
5097 return true;
5098 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5099 return true;
5100 return false;
5101 }
5102
5103 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
5104 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
5105 size of the data type, and thus usable to find the index of the builtin
5106 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
5107
5108 static enum gimplify_status
5109 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
5110 {
5111 enum built_in_function base;
5112 tree decl, itype;
5113 enum insn_code *optab;
5114
5115 /* Check for one of the supported fetch-op operations. */
5116 switch (TREE_CODE (rhs))
5117 {
5118 case PLUS_EXPR:
5119 base = BUILT_IN_FETCH_AND_ADD_N;
5120 optab = sync_add_optab;
5121 break;
5122 case MINUS_EXPR:
5123 base = BUILT_IN_FETCH_AND_SUB_N;
5124 optab = sync_add_optab;
5125 break;
5126 case BIT_AND_EXPR:
5127 base = BUILT_IN_FETCH_AND_AND_N;
5128 optab = sync_and_optab;
5129 break;
5130 case BIT_IOR_EXPR:
5131 base = BUILT_IN_FETCH_AND_OR_N;
5132 optab = sync_ior_optab;
5133 break;
5134 case BIT_XOR_EXPR:
5135 base = BUILT_IN_FETCH_AND_XOR_N;
5136 optab = sync_xor_optab;
5137 break;
5138 default:
5139 return GS_UNHANDLED;
5140 }
5141
5142 /* Make sure the expression is of the proper form. */
5143 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
5144 rhs = TREE_OPERAND (rhs, 1);
5145 else if (commutative_tree_code (TREE_CODE (rhs))
5146 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
5147 rhs = TREE_OPERAND (rhs, 0);
5148 else
5149 return GS_UNHANDLED;
5150
5151 decl = built_in_decls[base + index + 1];
5152 itype = TREE_TYPE (TREE_TYPE (decl));
5153
5154 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5155 return GS_UNHANDLED;
5156
5157 *expr_p = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
5158 return GS_OK;
5159 }
5160
5161 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5162 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5163 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5164 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5165
5166 static int
5167 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5168 {
5169 tree expr = *expr_p;
5170 int saw_lhs;
5171
5172 if (goa_lhs_expr_p (expr, lhs_addr))
5173 {
5174 *expr_p = lhs_var;
5175 return 1;
5176 }
5177 if (is_gimple_val (expr))
5178 return 0;
5179
5180 saw_lhs = 0;
5181 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5182 {
5183 case tcc_binary:
5184 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5185 lhs_addr, lhs_var);
5186 case tcc_unary:
5187 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5188 lhs_addr, lhs_var);
5189 break;
5190 default:
5191 break;
5192 }
5193
5194 if (saw_lhs == 0)
5195 {
5196 enum gimplify_status gs;
5197 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5198 if (gs != GS_ALL_DONE)
5199 saw_lhs = -1;
5200 }
5201
5202 return saw_lhs;
5203 }
5204
5205 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5206
5207 oldval = *addr;
5208 repeat:
5209 newval = rhs; // with oldval replacing *addr in rhs
5210 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5211 if (oldval != newval)
5212 goto repeat;
5213
5214 INDEX is log2 of the size of the data type, and thus usable to find the
5215 index of the builtin decl. */
5216
5217 static enum gimplify_status
5218 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5219 tree rhs, int index)
5220 {
5221 tree oldval, oldival, oldival2, newval, newival, label;
5222 tree type, itype, cmpxchg, x, iaddr;
5223
5224 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5225 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5226 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5227
5228 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5229 return GS_UNHANDLED;
5230
5231 oldval = create_tmp_var (type, NULL);
5232 newval = create_tmp_var (type, NULL);
5233
5234 /* Precompute as much of RHS as possible. In the same walk, replace
5235 occurrences of the lhs value with our temporary. */
5236 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5237 return GS_ERROR;
5238
5239 x = build_fold_indirect_ref (addr);
5240 x = build_gimple_modify_stmt (oldval, x);
5241 gimplify_and_add (x, pre_p);
5242
5243 /* For floating-point values, we'll need to view-convert them to integers
5244 so that we can perform the atomic compare and swap. Simplify the
5245 following code by always setting up the "i"ntegral variables. */
5246 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5247 {
5248 oldival = oldval;
5249 newival = newval;
5250 iaddr = addr;
5251 }
5252 else
5253 {
5254 oldival = create_tmp_var (itype, NULL);
5255 newival = create_tmp_var (itype, NULL);
5256
5257 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5258 x = build_gimple_modify_stmt (oldival, x);
5259 gimplify_and_add (x, pre_p);
5260 iaddr = fold_convert (build_pointer_type (itype), addr);
5261 }
5262
5263 oldival2 = create_tmp_var (itype, NULL);
5264
5265 label = create_artificial_label ();
5266 x = build1 (LABEL_EXPR, void_type_node, label);
5267 gimplify_and_add (x, pre_p);
5268
5269 x = build_gimple_modify_stmt (newval, rhs);
5270 gimplify_and_add (x, pre_p);
5271
5272 if (newval != newival)
5273 {
5274 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5275 x = build_gimple_modify_stmt (newival, x);
5276 gimplify_and_add (x, pre_p);
5277 }
5278
5279 x = build_gimple_modify_stmt (oldival2, fold_convert (itype, oldival));
5280 gimplify_and_add (x, pre_p);
5281
5282 x = build_call_expr (cmpxchg, 3, iaddr, fold_convert (itype, oldival),
5283 fold_convert (itype, newival));
5284 if (oldval == oldival)
5285 x = fold_convert (type, x);
5286 x = build_gimple_modify_stmt (oldival, x);
5287 gimplify_and_add (x, pre_p);
5288
5289 /* For floating point, be prepared for the loop backedge. */
5290 if (oldval != oldival)
5291 {
5292 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5293 x = build_gimple_modify_stmt (oldval, x);
5294 gimplify_and_add (x, pre_p);
5295 }
5296
5297 /* Note that we always perform the comparison as an integer, even for
5298 floating point. This allows the atomic operation to properly
5299 succeed even with NaNs and -0.0. */
5300 x = build3 (COND_EXPR, void_type_node,
5301 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5302 build1 (GOTO_EXPR, void_type_node, label), NULL);
5303 gimplify_and_add (x, pre_p);
5304
5305 *expr_p = NULL;
5306 return GS_ALL_DONE;
5307 }
5308
5309 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5310
5311 GOMP_atomic_start ();
5312 *addr = rhs;
5313 GOMP_atomic_end ();
5314
5315 The result is not globally atomic, but works so long as all parallel
5316 references are within #pragma omp atomic directives. According to
5317 responses received from omp@openmp.org, appears to be within spec.
5318 Which makes sense, since that's how several other compilers handle
5319 this situation as well. */
5320
5321 static enum gimplify_status
5322 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5323 {
5324 tree t;
5325
5326 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5327 t = build_call_expr (t, 0);
5328 gimplify_and_add (t, pre_p);
5329
5330 t = build_fold_indirect_ref (addr);
5331 t = build_gimple_modify_stmt (t, rhs);
5332 gimplify_and_add (t, pre_p);
5333
5334 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5335 t = build_call_expr (t, 0);
5336 gimplify_and_add (t, pre_p);
5337
5338 *expr_p = NULL;
5339 return GS_ALL_DONE;
5340 }
5341
5342 /* Gimplify an OMP_ATOMIC statement. */
5343
5344 static enum gimplify_status
5345 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5346 {
5347 tree addr = TREE_OPERAND (*expr_p, 0);
5348 tree rhs = TREE_OPERAND (*expr_p, 1);
5349 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5350 HOST_WIDE_INT index;
5351
5352 /* Make sure the type is one of the supported sizes. */
5353 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5354 index = exact_log2 (index);
5355 if (index >= 0 && index <= 4)
5356 {
5357 enum gimplify_status gs;
5358 unsigned int align;
5359
5360 if (DECL_P (TREE_OPERAND (addr, 0)))
5361 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5362 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5363 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5364 == FIELD_DECL)
5365 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5366 else
5367 align = TYPE_ALIGN_UNIT (type);
5368
5369 /* __sync builtins require strict data alignment. */
5370 if (exact_log2 (align) >= index)
5371 {
5372 /* When possible, use specialized atomic update functions. */
5373 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5374 {
5375 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5376 if (gs != GS_UNHANDLED)
5377 return gs;
5378 }
5379
5380 /* If we don't have specialized __sync builtins, try and implement
5381 as a compare and swap loop. */
5382 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5383 if (gs != GS_UNHANDLED)
5384 return gs;
5385 }
5386 }
5387
5388 /* The ultimate fallback is wrapping the operation in a mutex. */
5389 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5390 }
5391
5392 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5393 gimplification failed.
5394
5395 PRE_P points to the list where side effects that must happen before
5396 EXPR should be stored.
5397
5398 POST_P points to the list where side effects that must happen after
5399 EXPR should be stored, or NULL if there is no suitable list. In
5400 that case, we copy the result to a temporary, emit the
5401 post-effects, and then return the temporary.
5402
5403 GIMPLE_TEST_F points to a function that takes a tree T and
5404 returns nonzero if T is in the GIMPLE form requested by the
5405 caller. The GIMPLE predicates are in tree-gimple.c.
5406
5407 This test is used twice. Before gimplification, the test is
5408 invoked to determine whether *EXPR_P is already gimple enough. If
5409 that fails, *EXPR_P is gimplified according to its code and
5410 GIMPLE_TEST_F is called again. If the test still fails, then a new
5411 temporary variable is created and assigned the value of the
5412 gimplified expression.
5413
5414 FALLBACK tells the function what sort of a temporary we want. If the 1
5415 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5416 If both are set, either is OK, but an lvalue is preferable.
5417
5418 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5419 iterates until solution. */
5420
5421 enum gimplify_status
5422 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5423 bool (* gimple_test_f) (tree), fallback_t fallback)
5424 {
5425 tree tmp;
5426 tree internal_pre = NULL_TREE;
5427 tree internal_post = NULL_TREE;
5428 tree save_expr;
5429 int is_statement = (pre_p == NULL);
5430 location_t saved_location;
5431 enum gimplify_status ret;
5432
5433 save_expr = *expr_p;
5434 if (save_expr == NULL_TREE)
5435 return GS_ALL_DONE;
5436
5437 /* We used to check the predicate here and return immediately if it
5438 succeeds. This is wrong; the design is for gimplification to be
5439 idempotent, and for the predicates to only test for valid forms, not
5440 whether they are fully simplified. */
5441
5442 /* Set up our internal queues if needed. */
5443 if (pre_p == NULL)
5444 pre_p = &internal_pre;
5445 if (post_p == NULL)
5446 post_p = &internal_post;
5447
5448 saved_location = input_location;
5449 if (save_expr != error_mark_node
5450 && EXPR_HAS_LOCATION (*expr_p))
5451 input_location = EXPR_LOCATION (*expr_p);
5452
5453 /* Loop over the specific gimplifiers until the toplevel node
5454 remains the same. */
5455 do
5456 {
5457 /* Strip away as many useless type conversions as possible
5458 at the toplevel. */
5459 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5460
5461 /* Remember the expr. */
5462 save_expr = *expr_p;
5463
5464 /* Die, die, die, my darling. */
5465 if (save_expr == error_mark_node
5466 || (!GIMPLE_STMT_P (save_expr)
5467 && TREE_TYPE (save_expr)
5468 && TREE_TYPE (save_expr) == error_mark_node))
5469 {
5470 ret = GS_ERROR;
5471 break;
5472 }
5473
5474 /* Do any language-specific gimplification. */
5475 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5476 if (ret == GS_OK)
5477 {
5478 if (*expr_p == NULL_TREE)
5479 break;
5480 if (*expr_p != save_expr)
5481 continue;
5482 }
5483 else if (ret != GS_UNHANDLED)
5484 break;
5485
5486 ret = GS_OK;
5487 switch (TREE_CODE (*expr_p))
5488 {
5489 /* First deal with the special cases. */
5490
5491 case POSTINCREMENT_EXPR:
5492 case POSTDECREMENT_EXPR:
5493 case PREINCREMENT_EXPR:
5494 case PREDECREMENT_EXPR:
5495 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5496 fallback != fb_none);
5497 break;
5498
5499 case ARRAY_REF:
5500 case ARRAY_RANGE_REF:
5501 case REALPART_EXPR:
5502 case IMAGPART_EXPR:
5503 case COMPONENT_REF:
5504 case VIEW_CONVERT_EXPR:
5505 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5506 fallback ? fallback : fb_rvalue);
5507 break;
5508
5509 case COND_EXPR:
5510 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5511 /* C99 code may assign to an array in a structure value of a
5512 conditional expression, and this has undefined behavior
5513 only on execution, so create a temporary if an lvalue is
5514 required. */
5515 if (fallback == fb_lvalue)
5516 {
5517 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5518 lang_hooks.mark_addressable (*expr_p);
5519 }
5520 break;
5521
5522 case CALL_EXPR:
5523 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5524 /* C99 code may assign to an array in a structure returned
5525 from a function, and this has undefined behavior only on
5526 execution, so create a temporary if an lvalue is
5527 required. */
5528 if (fallback == fb_lvalue)
5529 {
5530 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5531 lang_hooks.mark_addressable (*expr_p);
5532 }
5533 break;
5534
5535 case TREE_LIST:
5536 gcc_unreachable ();
5537
5538 case COMPOUND_EXPR:
5539 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5540 break;
5541
5542 case MODIFY_EXPR:
5543 case GIMPLE_MODIFY_STMT:
5544 case INIT_EXPR:
5545 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5546 fallback != fb_none);
5547
5548 if (*expr_p)
5549 {
5550 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5551 useful. */
5552 if (TREE_CODE (*expr_p) == INIT_EXPR)
5553 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5554
5555 /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT. */
5556 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
5557 tree_to_gimple_tuple (expr_p);
5558 }
5559
5560 break;
5561
5562 case TRUTH_ANDIF_EXPR:
5563 case TRUTH_ORIF_EXPR:
5564 ret = gimplify_boolean_expr (expr_p);
5565 break;
5566
5567 case TRUTH_NOT_EXPR:
5568 TREE_OPERAND (*expr_p, 0)
5569 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5570 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5571 is_gimple_val, fb_rvalue);
5572 recalculate_side_effects (*expr_p);
5573 break;
5574
5575 case ADDR_EXPR:
5576 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5577 break;
5578
5579 case VA_ARG_EXPR:
5580 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5581 break;
5582
5583 case CONVERT_EXPR:
5584 case NOP_EXPR:
5585 if (IS_EMPTY_STMT (*expr_p))
5586 {
5587 ret = GS_ALL_DONE;
5588 break;
5589 }
5590
5591 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5592 || fallback == fb_none)
5593 {
5594 /* Just strip a conversion to void (or in void context) and
5595 try again. */
5596 *expr_p = TREE_OPERAND (*expr_p, 0);
5597 break;
5598 }
5599
5600 ret = gimplify_conversion (expr_p);
5601 if (ret == GS_ERROR)
5602 break;
5603 if (*expr_p != save_expr)
5604 break;
5605 /* FALLTHRU */
5606
5607 case FIX_TRUNC_EXPR:
5608 /* unary_expr: ... | '(' cast ')' val | ... */
5609 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5610 is_gimple_val, fb_rvalue);
5611 recalculate_side_effects (*expr_p);
5612 break;
5613
5614 case INDIRECT_REF:
5615 *expr_p = fold_indirect_ref (*expr_p);
5616 if (*expr_p != save_expr)
5617 break;
5618 /* else fall through. */
5619 case ALIGN_INDIRECT_REF:
5620 case MISALIGNED_INDIRECT_REF:
5621 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5622 is_gimple_reg, fb_rvalue);
5623 recalculate_side_effects (*expr_p);
5624 break;
5625
5626 /* Constants need not be gimplified. */
5627 case INTEGER_CST:
5628 case REAL_CST:
5629 case STRING_CST:
5630 case COMPLEX_CST:
5631 case VECTOR_CST:
5632 ret = GS_ALL_DONE;
5633 break;
5634
5635 case CONST_DECL:
5636 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5637 CONST_DECL node. Otherwise the decl is replaceable by its
5638 value. */
5639 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5640 if (fallback & fb_lvalue)
5641 ret = GS_ALL_DONE;
5642 else
5643 *expr_p = DECL_INITIAL (*expr_p);
5644 break;
5645
5646 case DECL_EXPR:
5647 ret = gimplify_decl_expr (expr_p);
5648 break;
5649
5650 case EXC_PTR_EXPR:
5651 /* FIXME make this a decl. */
5652 ret = GS_ALL_DONE;
5653 break;
5654
5655 case BIND_EXPR:
5656 ret = gimplify_bind_expr (expr_p, pre_p);
5657 break;
5658
5659 case LOOP_EXPR:
5660 ret = gimplify_loop_expr (expr_p, pre_p);
5661 break;
5662
5663 case SWITCH_EXPR:
5664 ret = gimplify_switch_expr (expr_p, pre_p);
5665 break;
5666
5667 case EXIT_EXPR:
5668 ret = gimplify_exit_expr (expr_p);
5669 break;
5670
5671 case GOTO_EXPR:
5672 /* If the target is not LABEL, then it is a computed jump
5673 and the target needs to be gimplified. */
5674 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5675 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5676 NULL, is_gimple_val, fb_rvalue);
5677 break;
5678
5679 case LABEL_EXPR:
5680 ret = GS_ALL_DONE;
5681 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5682 == current_function_decl);
5683 break;
5684
5685 case CASE_LABEL_EXPR:
5686 ret = gimplify_case_label_expr (expr_p);
5687 break;
5688
5689 case RETURN_EXPR:
5690 ret = gimplify_return_expr (*expr_p, pre_p);
5691 break;
5692
5693 case CONSTRUCTOR:
5694 /* Don't reduce this in place; let gimplify_init_constructor work its
5695 magic. Buf if we're just elaborating this for side effects, just
5696 gimplify any element that has side-effects. */
5697 if (fallback == fb_none)
5698 {
5699 unsigned HOST_WIDE_INT ix;
5700 constructor_elt *ce;
5701 tree temp = NULL_TREE;
5702 for (ix = 0;
5703 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5704 ix, ce);
5705 ix++)
5706 if (TREE_SIDE_EFFECTS (ce->value))
5707 append_to_statement_list (ce->value, &temp);
5708
5709 *expr_p = temp;
5710 ret = GS_OK;
5711 }
5712 /* C99 code may assign to an array in a constructed
5713 structure or union, and this has undefined behavior only
5714 on execution, so create a temporary if an lvalue is
5715 required. */
5716 else if (fallback == fb_lvalue)
5717 {
5718 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5719 lang_hooks.mark_addressable (*expr_p);
5720 }
5721 else
5722 ret = GS_ALL_DONE;
5723 break;
5724
5725 /* The following are special cases that are not handled by the
5726 original GIMPLE grammar. */
5727
5728 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5729 eliminated. */
5730 case SAVE_EXPR:
5731 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5732 break;
5733
5734 case BIT_FIELD_REF:
5735 {
5736 enum gimplify_status r0, r1, r2;
5737
5738 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5739 is_gimple_lvalue, fb_either);
5740 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5741 is_gimple_val, fb_rvalue);
5742 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5743 is_gimple_val, fb_rvalue);
5744 recalculate_side_effects (*expr_p);
5745
5746 ret = MIN (r0, MIN (r1, r2));
5747 }
5748 break;
5749
5750 case NON_LVALUE_EXPR:
5751 /* This should have been stripped above. */
5752 gcc_unreachable ();
5753
5754 case ASM_EXPR:
5755 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5756 break;
5757
5758 case TRY_FINALLY_EXPR:
5759 case TRY_CATCH_EXPR:
5760 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5761 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5762 ret = GS_ALL_DONE;
5763 break;
5764
5765 case CLEANUP_POINT_EXPR:
5766 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5767 break;
5768
5769 case TARGET_EXPR:
5770 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5771 break;
5772
5773 case CATCH_EXPR:
5774 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5775 ret = GS_ALL_DONE;
5776 break;
5777
5778 case EH_FILTER_EXPR:
5779 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5780 ret = GS_ALL_DONE;
5781 break;
5782
5783 case OBJ_TYPE_REF:
5784 {
5785 enum gimplify_status r0, r1;
5786 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5787 is_gimple_val, fb_rvalue);
5788 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5789 is_gimple_val, fb_rvalue);
5790 ret = MIN (r0, r1);
5791 }
5792 break;
5793
5794 case LABEL_DECL:
5795 /* We get here when taking the address of a label. We mark
5796 the label as "forced"; meaning it can never be removed and
5797 it is a potential target for any computed goto. */
5798 FORCED_LABEL (*expr_p) = 1;
5799 ret = GS_ALL_DONE;
5800 break;
5801
5802 case STATEMENT_LIST:
5803 ret = gimplify_statement_list (expr_p, pre_p);
5804 break;
5805
5806 case WITH_SIZE_EXPR:
5807 {
5808 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5809 post_p == &internal_post ? NULL : post_p,
5810 gimple_test_f, fallback);
5811 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5812 is_gimple_val, fb_rvalue);
5813 }
5814 break;
5815
5816 case VAR_DECL:
5817 case PARM_DECL:
5818 ret = gimplify_var_or_parm_decl (expr_p);
5819 break;
5820
5821 case RESULT_DECL:
5822 /* When within an OpenMP context, notice uses of variables. */
5823 if (gimplify_omp_ctxp)
5824 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5825 ret = GS_ALL_DONE;
5826 break;
5827
5828 case SSA_NAME:
5829 /* Allow callbacks into the gimplifier during optimization. */
5830 ret = GS_ALL_DONE;
5831 break;
5832
5833 case OMP_PARALLEL:
5834 ret = gimplify_omp_parallel (expr_p, pre_p);
5835 break;
5836
5837 case OMP_FOR:
5838 ret = gimplify_omp_for (expr_p, pre_p);
5839 break;
5840
5841 case OMP_SECTIONS:
5842 case OMP_SINGLE:
5843 ret = gimplify_omp_workshare (expr_p, pre_p);
5844 break;
5845
5846 case OMP_SECTION:
5847 case OMP_MASTER:
5848 case OMP_ORDERED:
5849 case OMP_CRITICAL:
5850 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5851 break;
5852
5853 case OMP_ATOMIC:
5854 ret = gimplify_omp_atomic (expr_p, pre_p);
5855 break;
5856
5857 case OMP_RETURN:
5858 case OMP_CONTINUE:
5859 ret = GS_ALL_DONE;
5860 break;
5861
5862 default:
5863 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5864 {
5865 case tcc_comparison:
5866 /* Handle comparison of objects of non scalar mode aggregates
5867 with a call to memcmp. It would be nice to only have to do
5868 this for variable-sized objects, but then we'd have to allow
5869 the same nest of reference nodes we allow for MODIFY_EXPR and
5870 that's too complex.
5871
5872 Compare scalar mode aggregates as scalar mode values. Using
5873 memcmp for them would be very inefficient at best, and is
5874 plain wrong if bitfields are involved. */
5875
5876 {
5877 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5878
5879 if (!AGGREGATE_TYPE_P (type))
5880 goto expr_2;
5881 else if (TYPE_MODE (type) != BLKmode)
5882 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5883 else
5884 ret = gimplify_variable_sized_compare (expr_p);
5885
5886 break;
5887 }
5888
5889 /* If *EXPR_P does not need to be special-cased, handle it
5890 according to its class. */
5891 case tcc_unary:
5892 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5893 post_p, is_gimple_val, fb_rvalue);
5894 break;
5895
5896 case tcc_binary:
5897 expr_2:
5898 {
5899 enum gimplify_status r0, r1;
5900
5901 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5902 post_p, is_gimple_val, fb_rvalue);
5903 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5904 post_p, is_gimple_val, fb_rvalue);
5905
5906 ret = MIN (r0, r1);
5907 break;
5908 }
5909
5910 case tcc_declaration:
5911 case tcc_constant:
5912 ret = GS_ALL_DONE;
5913 goto dont_recalculate;
5914
5915 default:
5916 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5917 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5918 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5919 goto expr_2;
5920 }
5921
5922 recalculate_side_effects (*expr_p);
5923 dont_recalculate:
5924 break;
5925 }
5926
5927 /* If we replaced *expr_p, gimplify again. */
5928 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5929 ret = GS_ALL_DONE;
5930 }
5931 while (ret == GS_OK);
5932
5933 /* If we encountered an error_mark somewhere nested inside, either
5934 stub out the statement or propagate the error back out. */
5935 if (ret == GS_ERROR)
5936 {
5937 if (is_statement)
5938 *expr_p = NULL;
5939 goto out;
5940 }
5941
5942 /* This was only valid as a return value from the langhook, which
5943 we handled. Make sure it doesn't escape from any other context. */
5944 gcc_assert (ret != GS_UNHANDLED);
5945
5946 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5947 {
5948 /* We aren't looking for a value, and we don't have a valid
5949 statement. If it doesn't have side-effects, throw it away. */
5950 if (!TREE_SIDE_EFFECTS (*expr_p))
5951 *expr_p = NULL;
5952 else if (!TREE_THIS_VOLATILE (*expr_p))
5953 {
5954 /* This is probably a _REF that contains something nested that
5955 has side effects. Recurse through the operands to find it. */
5956 enum tree_code code = TREE_CODE (*expr_p);
5957
5958 switch (code)
5959 {
5960 case COMPONENT_REF:
5961 case REALPART_EXPR:
5962 case IMAGPART_EXPR:
5963 case VIEW_CONVERT_EXPR:
5964 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5965 gimple_test_f, fallback);
5966 break;
5967
5968 case ARRAY_REF:
5969 case ARRAY_RANGE_REF:
5970 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5971 gimple_test_f, fallback);
5972 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5973 gimple_test_f, fallback);
5974 break;
5975
5976 default:
5977 /* Anything else with side-effects must be converted to
5978 a valid statement before we get here. */
5979 gcc_unreachable ();
5980 }
5981
5982 *expr_p = NULL;
5983 }
5984 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
5985 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
5986 {
5987 /* Historically, the compiler has treated a bare reference
5988 to a non-BLKmode volatile lvalue as forcing a load. */
5989 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5990 /* Normally, we do not want to create a temporary for a
5991 TREE_ADDRESSABLE type because such a type should not be
5992 copied by bitwise-assignment. However, we make an
5993 exception here, as all we are doing here is ensuring that
5994 we read the bytes that make up the type. We use
5995 create_tmp_var_raw because create_tmp_var will abort when
5996 given a TREE_ADDRESSABLE type. */
5997 tree tmp = create_tmp_var_raw (type, "vol");
5998 gimple_add_tmp_var (tmp);
5999 *expr_p = build_gimple_modify_stmt (tmp, *expr_p);
6000 }
6001 else
6002 /* We can't do anything useful with a volatile reference to
6003 an incomplete type, so just throw it away. Likewise for
6004 a BLKmode type, since any implicit inner load should
6005 already have been turned into an explicit one by the
6006 gimplification process. */
6007 *expr_p = NULL;
6008 }
6009
6010 /* If we are gimplifying at the statement level, we're done. Tack
6011 everything together and replace the original statement with the
6012 gimplified form. */
6013 if (fallback == fb_none || is_statement)
6014 {
6015 if (internal_pre || internal_post)
6016 {
6017 append_to_statement_list (*expr_p, &internal_pre);
6018 append_to_statement_list (internal_post, &internal_pre);
6019 annotate_all_with_locus (&internal_pre, input_location);
6020 *expr_p = internal_pre;
6021 }
6022 else if (!*expr_p)
6023 ;
6024 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
6025 annotate_all_with_locus (expr_p, input_location);
6026 else
6027 annotate_one_with_locus (*expr_p, input_location);
6028 goto out;
6029 }
6030
6031 /* Otherwise we're gimplifying a subexpression, so the resulting value is
6032 interesting. */
6033
6034 /* If it's sufficiently simple already, we're done. Unless we are
6035 handling some post-effects internally; if that's the case, we need to
6036 copy into a temp before adding the post-effects to the tree. */
6037 if (!internal_post && (*gimple_test_f) (*expr_p))
6038 goto out;
6039
6040 /* Otherwise, we need to create a new temporary for the gimplified
6041 expression. */
6042
6043 /* We can't return an lvalue if we have an internal postqueue. The
6044 object the lvalue refers to would (probably) be modified by the
6045 postqueue; we need to copy the value out first, which means an
6046 rvalue. */
6047 if ((fallback & fb_lvalue) && !internal_post
6048 && is_gimple_addressable (*expr_p))
6049 {
6050 /* An lvalue will do. Take the address of the expression, store it
6051 in a temporary, and replace the expression with an INDIRECT_REF of
6052 that temporary. */
6053 tmp = build_fold_addr_expr (*expr_p);
6054 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6055 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6056 }
6057 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
6058 {
6059 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6060
6061 /* An rvalue will do. Assign the gimplified expression into a new
6062 temporary TMP and replace the original expression with TMP. */
6063
6064 if (internal_post || (fallback & fb_lvalue))
6065 /* The postqueue might change the value of the expression between
6066 the initialization and use of the temporary, so we can't use a
6067 formal temp. FIXME do we care? */
6068 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6069 else
6070 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6071
6072 if (TREE_CODE (*expr_p) != SSA_NAME)
6073 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
6074 }
6075 else
6076 {
6077 #ifdef ENABLE_CHECKING
6078 if (!(fallback & fb_mayfail))
6079 {
6080 fprintf (stderr, "gimplification failed:\n");
6081 print_generic_expr (stderr, *expr_p, 0);
6082 debug_tree (*expr_p);
6083 internal_error ("gimplification failed");
6084 }
6085 #endif
6086 gcc_assert (fallback & fb_mayfail);
6087 /* If this is an asm statement, and the user asked for the
6088 impossible, don't die. Fail and let gimplify_asm_expr
6089 issue an error. */
6090 ret = GS_ERROR;
6091 goto out;
6092 }
6093
6094 /* Make sure the temporary matches our predicate. */
6095 gcc_assert ((*gimple_test_f) (*expr_p));
6096
6097 if (internal_post)
6098 {
6099 annotate_all_with_locus (&internal_post, input_location);
6100 append_to_statement_list (internal_post, pre_p);
6101 }
6102
6103 out:
6104 input_location = saved_location;
6105 return ret;
6106 }
6107
6108 /* Look through TYPE for variable-sized objects and gimplify each such
6109 size that we find. Add to LIST_P any statements generated. */
6110
6111 void
6112 gimplify_type_sizes (tree type, tree *list_p)
6113 {
6114 tree field, t;
6115
6116 if (type == NULL || type == error_mark_node)
6117 return;
6118
6119 /* We first do the main variant, then copy into any other variants. */
6120 type = TYPE_MAIN_VARIANT (type);
6121
6122 /* Avoid infinite recursion. */
6123 if (TYPE_SIZES_GIMPLIFIED (type))
6124 return;
6125
6126 TYPE_SIZES_GIMPLIFIED (type) = 1;
6127
6128 switch (TREE_CODE (type))
6129 {
6130 case INTEGER_TYPE:
6131 case ENUMERAL_TYPE:
6132 case BOOLEAN_TYPE:
6133 case REAL_TYPE:
6134 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
6135 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
6136
6137 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6138 {
6139 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
6140 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
6141 }
6142 break;
6143
6144 case ARRAY_TYPE:
6145 /* These types may not have declarations, so handle them here. */
6146 gimplify_type_sizes (TREE_TYPE (type), list_p);
6147 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
6148 break;
6149
6150 case RECORD_TYPE:
6151 case UNION_TYPE:
6152 case QUAL_UNION_TYPE:
6153 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6154 if (TREE_CODE (field) == FIELD_DECL)
6155 {
6156 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6157 gimplify_type_sizes (TREE_TYPE (field), list_p);
6158 }
6159 break;
6160
6161 case POINTER_TYPE:
6162 case REFERENCE_TYPE:
6163 /* We used to recurse on the pointed-to type here, which turned out to
6164 be incorrect because its definition might refer to variables not
6165 yet initialized at this point if a forward declaration is involved.
6166
6167 It was actually useful for anonymous pointed-to types to ensure
6168 that the sizes evaluation dominates every possible later use of the
6169 values. Restricting to such types here would be safe since there
6170 is no possible forward declaration around, but would introduce an
6171 undesirable middle-end semantic to anonymity. We then defer to
6172 front-ends the responsibility of ensuring that the sizes are
6173 evaluated both early and late enough, e.g. by attaching artificial
6174 type declarations to the tree. */
6175 break;
6176
6177 default:
6178 break;
6179 }
6180
6181 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6182 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6183
6184 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6185 {
6186 TYPE_SIZE (t) = TYPE_SIZE (type);
6187 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6188 TYPE_SIZES_GIMPLIFIED (t) = 1;
6189 }
6190 }
6191
6192 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6193 a size or position, has had all of its SAVE_EXPRs evaluated.
6194 We add any required statements to STMT_P. */
6195
6196 void
6197 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6198 {
6199 tree type, expr = *expr_p;
6200
6201 /* We don't do anything if the value isn't there, is constant, or contains
6202 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6203 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6204 will want to replace it with a new variable, but that will cause problems
6205 if this type is from outside the function. It's OK to have that here. */
6206 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6207 || TREE_CODE (expr) == VAR_DECL
6208 || CONTAINS_PLACEHOLDER_P (expr))
6209 return;
6210
6211 type = TREE_TYPE (expr);
6212 *expr_p = unshare_expr (expr);
6213
6214 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6215 expr = *expr_p;
6216
6217 /* Verify that we've an exact type match with the original expression.
6218 In particular, we do not wish to drop a "sizetype" in favour of a
6219 type of similar dimensions. We don't want to pollute the generic
6220 type-stripping code with this knowledge because it doesn't matter
6221 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6222 and friends retain their "sizetype-ness". */
6223 if (TREE_TYPE (expr) != type
6224 && TREE_CODE (type) == INTEGER_TYPE
6225 && TYPE_IS_SIZETYPE (type))
6226 {
6227 tree tmp;
6228
6229 *expr_p = create_tmp_var (type, NULL);
6230 tmp = build1 (NOP_EXPR, type, expr);
6231 tmp = build_gimple_modify_stmt (*expr_p, tmp);
6232 if (EXPR_HAS_LOCATION (expr))
6233 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6234 else
6235 SET_EXPR_LOCATION (tmp, input_location);
6236
6237 gimplify_and_add (tmp, stmt_p);
6238 }
6239 }
6240 \f
6241 #ifdef ENABLE_CHECKING
6242 /* Compare types A and B for a "close enough" match. */
6243
6244 static bool
6245 cpt_same_type (tree a, tree b)
6246 {
6247 if (lang_hooks.types_compatible_p (a, b))
6248 return true;
6249
6250 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6251 link them together. This routine is intended to catch type errors
6252 that will affect the optimizers, and the optimizers don't add new
6253 dereferences of function pointers, so ignore it. */
6254 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6255 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6256 return true;
6257
6258 /* ??? The C FE pushes type qualifiers after the fact into the type of
6259 the element from the type of the array. See build_unary_op's handling
6260 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6261 should have done it when creating the variable in the first place.
6262 Alternately, why aren't the two array types made variants? */
6263 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6264 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6265
6266 /* And because of those, we have to recurse down through pointers. */
6267 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6268 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6269
6270 return false;
6271 }
6272
6273 /* Check for some cases of the front end missing cast expressions.
6274 The type of a dereference should correspond to the pointer type;
6275 similarly the type of an address should match its object. */
6276
6277 static tree
6278 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6279 void *data ATTRIBUTE_UNUSED)
6280 {
6281 tree t = *tp;
6282 tree ptype, otype, dtype;
6283
6284 switch (TREE_CODE (t))
6285 {
6286 case INDIRECT_REF:
6287 case ARRAY_REF:
6288 otype = TREE_TYPE (t);
6289 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6290 dtype = TREE_TYPE (ptype);
6291 gcc_assert (cpt_same_type (otype, dtype));
6292 break;
6293
6294 case ADDR_EXPR:
6295 ptype = TREE_TYPE (t);
6296 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6297 dtype = TREE_TYPE (ptype);
6298 if (!cpt_same_type (otype, dtype))
6299 {
6300 /* &array is allowed to produce a pointer to the element, rather than
6301 a pointer to the array type. We must allow this in order to
6302 properly represent assigning the address of an array in C into
6303 pointer to the element type. */
6304 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6305 && POINTER_TYPE_P (ptype)
6306 && cpt_same_type (TREE_TYPE (otype), dtype));
6307 break;
6308 }
6309 break;
6310
6311 default:
6312 return NULL_TREE;
6313 }
6314
6315
6316 return NULL_TREE;
6317 }
6318 #endif
6319
6320 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6321 function decl containing BODY. */
6322
6323 void
6324 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6325 {
6326 location_t saved_location = input_location;
6327 tree body, parm_stmts;
6328
6329 timevar_push (TV_TREE_GIMPLIFY);
6330
6331 gcc_assert (gimplify_ctxp == NULL);
6332 push_gimplify_context ();
6333
6334 /* Unshare most shared trees in the body and in that of any nested functions.
6335 It would seem we don't have to do this for nested functions because
6336 they are supposed to be output and then the outer function gimplified
6337 first, but the g++ front end doesn't always do it that way. */
6338 unshare_body (body_p, fndecl);
6339 unvisit_body (body_p, fndecl);
6340
6341 /* Make sure input_location isn't set to something wierd. */
6342 input_location = DECL_SOURCE_LOCATION (fndecl);
6343
6344 /* Resolve callee-copies. This has to be done before processing
6345 the body so that DECL_VALUE_EXPR gets processed correctly. */
6346 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6347
6348 /* Gimplify the function's body. */
6349 gimplify_stmt (body_p);
6350 body = *body_p;
6351
6352 if (!body)
6353 body = alloc_stmt_list ();
6354 else if (TREE_CODE (body) == STATEMENT_LIST)
6355 {
6356 tree t = expr_only (*body_p);
6357 if (t)
6358 body = t;
6359 }
6360
6361 /* If there isn't an outer BIND_EXPR, add one. */
6362 if (TREE_CODE (body) != BIND_EXPR)
6363 {
6364 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6365 NULL_TREE, NULL_TREE);
6366 TREE_SIDE_EFFECTS (b) = 1;
6367 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6368 body = b;
6369 }
6370
6371 /* If we had callee-copies statements, insert them at the beginning
6372 of the function. */
6373 if (parm_stmts)
6374 {
6375 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6376 BIND_EXPR_BODY (body) = parm_stmts;
6377 }
6378
6379 /* Unshare again, in case gimplification was sloppy. */
6380 unshare_all_trees (body);
6381
6382 *body_p = body;
6383
6384 pop_gimplify_context (body);
6385 gcc_assert (gimplify_ctxp == NULL);
6386
6387 #ifdef ENABLE_CHECKING
6388 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6389 #endif
6390
6391 timevar_pop (TV_TREE_GIMPLIFY);
6392 input_location = saved_location;
6393 }
6394
6395 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6396 node for the function we want to gimplify. */
6397
6398 void
6399 gimplify_function_tree (tree fndecl)
6400 {
6401 tree oldfn, parm, ret;
6402
6403 oldfn = current_function_decl;
6404 current_function_decl = fndecl;
6405 cfun = DECL_STRUCT_FUNCTION (fndecl);
6406 if (cfun == NULL)
6407 allocate_struct_function (fndecl);
6408
6409 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6410 {
6411 /* Preliminarily mark non-addressed complex variables as eligible
6412 for promotion to gimple registers. We'll transform their uses
6413 as we find them. */
6414 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6415 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
6416 && !TREE_THIS_VOLATILE (parm)
6417 && !needs_to_live_in_memory (parm))
6418 DECL_GIMPLE_REG_P (parm) = 1;
6419 }
6420
6421 ret = DECL_RESULT (fndecl);
6422 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6423 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
6424 && !needs_to_live_in_memory (ret))
6425 DECL_GIMPLE_REG_P (ret) = 1;
6426
6427 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6428
6429 /* If we're instrumenting function entry/exit, then prepend the call to
6430 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6431 catch the exit hook. */
6432 /* ??? Add some way to ignore exceptions for this TFE. */
6433 if (flag_instrument_function_entry_exit
6434 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6435 {
6436 tree tf, x, bind;
6437
6438 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6439 TREE_SIDE_EFFECTS (tf) = 1;
6440 x = DECL_SAVED_TREE (fndecl);
6441 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6442 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6443 x = build_call_expr (x, 0);
6444 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6445
6446 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6447 TREE_SIDE_EFFECTS (bind) = 1;
6448 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6449 x = build_call_expr (x, 0);
6450 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6451 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6452
6453 DECL_SAVED_TREE (fndecl) = bind;
6454 }
6455
6456 cfun->gimplified = true;
6457 current_function_decl = oldfn;
6458 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6459 }
6460 \f
6461 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6462 force the result to be either ssa_name or an invariant, otherwise
6463 just force it to be a rhs expression. If VAR is not NULL, make the
6464 base variable of the final destination be VAR if suitable. */
6465
6466 tree
6467 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6468 {
6469 tree t;
6470 enum gimplify_status ret;
6471 gimple_predicate gimple_test_f;
6472
6473 *stmts = NULL_TREE;
6474
6475 if (is_gimple_val (expr))
6476 return expr;
6477
6478 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6479
6480 push_gimplify_context ();
6481 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
6482
6483 if (var)
6484 expr = build_gimple_modify_stmt (var, expr);
6485
6486 ret = gimplify_expr (&expr, stmts, NULL,
6487 gimple_test_f, fb_rvalue);
6488 gcc_assert (ret != GS_ERROR);
6489
6490 if (gimple_referenced_vars (cfun))
6491 {
6492 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6493 add_referenced_var (t);
6494 }
6495
6496 pop_gimplify_context (NULL);
6497
6498 return expr;
6499 }
6500
6501 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6502 some statements are produced, emits them before BSI. */
6503
6504 tree
6505 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6506 bool simple_p, tree var)
6507 {
6508 tree stmts;
6509
6510 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6511 if (stmts)
6512 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6513
6514 return expr;
6515 }
6516
6517 #include "gt-gimplify.h"