[multiple changes]
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "tree-gimple.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51
52
53 enum gimplify_omp_var_data
54 {
55 GOVD_SEEN = 1,
56 GOVD_EXPLICIT = 2,
57 GOVD_SHARED = 4,
58 GOVD_PRIVATE = 8,
59 GOVD_FIRSTPRIVATE = 16,
60 GOVD_LASTPRIVATE = 32,
61 GOVD_REDUCTION = 64,
62 GOVD_LOCAL = 128,
63 GOVD_DEBUG_PRIVATE = 256,
64 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
65 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
66 };
67
68 struct gimplify_omp_ctx
69 {
70 struct gimplify_omp_ctx *outer_context;
71 splay_tree variables;
72 struct pointer_set_t *privatized_types;
73 location_t location;
74 enum omp_clause_default_kind default_kind;
75 bool is_parallel;
76 };
77
78 struct gimplify_ctx
79 {
80 struct gimplify_ctx *prev_context;
81
82 tree current_bind_expr;
83 tree temps;
84 tree conditional_cleanups;
85 tree exit_label;
86 tree return_temp;
87
88 VEC(tree,heap) *case_labels;
89 /* The formal temporary table. Should this be persistent? */
90 htab_t temp_htab;
91
92 int conditions;
93 bool save_stack;
94 bool into_ssa;
95
96 /* When gimplifying combined omp parallel directives (omp parallel
97 loop and omp parallel sections), any prefix code needed to setup
98 the associated worksharing construct needs to be emitted in the
99 pre-queue of its parent parallel, otherwise the lowering process
100 will move that code to the child function. Similarly, we need to
101 move up to the gimplification context of the parent parallel
102 directive so temporaries are declared in the right context. */
103 tree *combined_pre_p;
104 struct gimplify_ctx *combined_ctxp;
105 };
106
107 static struct gimplify_ctx *gimplify_ctxp;
108 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
109
110
111
112 /* Formal (expression) temporary table handling: Multiple occurrences of
113 the same scalar expression are evaluated into the same temporary. */
114
115 typedef struct gimple_temp_hash_elt
116 {
117 tree val; /* Key */
118 tree temp; /* Value */
119 } elt_t;
120
121 /* Forward declarations. */
122 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
123 #ifdef ENABLE_CHECKING
124 static bool cpt_same_type (tree a, tree b);
125 #endif
126
127
128 /* Return a hash value for a formal temporary table entry. */
129
130 static hashval_t
131 gimple_tree_hash (const void *p)
132 {
133 tree t = ((const elt_t *) p)->val;
134 return iterative_hash_expr (t, 0);
135 }
136
137 /* Compare two formal temporary table entries. */
138
139 static int
140 gimple_tree_eq (const void *p1, const void *p2)
141 {
142 tree t1 = ((const elt_t *) p1)->val;
143 tree t2 = ((const elt_t *) p2)->val;
144 enum tree_code code = TREE_CODE (t1);
145
146 if (TREE_CODE (t2) != code
147 || TREE_TYPE (t1) != TREE_TYPE (t2))
148 return 0;
149
150 if (!operand_equal_p (t1, t2, 0))
151 return 0;
152
153 /* Only allow them to compare equal if they also hash equal; otherwise
154 results are nondeterminate, and we fail bootstrap comparison. */
155 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
156
157 return 1;
158 }
159
160 /* Set up a context for the gimplifier. */
161
162 void
163 push_gimplify_context (void)
164 {
165 struct gimplify_ctx *c;
166
167 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
168 c->prev_context = gimplify_ctxp;
169 if (optimize)
170 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
171
172 gimplify_ctxp = c;
173 }
174
175 /* Tear down a context for the gimplifier. If BODY is non-null, then
176 put the temporaries into the outer BIND_EXPR. Otherwise, put them
177 in the unexpanded_var_list. */
178
179 void
180 pop_gimplify_context (tree body)
181 {
182 struct gimplify_ctx *c = gimplify_ctxp;
183 tree t;
184
185 gcc_assert (c && !c->current_bind_expr);
186 gimplify_ctxp = c->prev_context;
187
188 for (t = c->temps; t ; t = TREE_CHAIN (t))
189 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
190
191 if (body)
192 declare_tmp_vars (c->temps, body);
193 else
194 record_vars (c->temps);
195
196 if (optimize)
197 htab_delete (c->temp_htab);
198 free (c);
199 }
200
201 static void
202 gimple_push_bind_expr (tree bind)
203 {
204 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
205 gimplify_ctxp->current_bind_expr = bind;
206 }
207
208 static void
209 gimple_pop_bind_expr (void)
210 {
211 gimplify_ctxp->current_bind_expr
212 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
213 }
214
215 tree
216 gimple_current_bind_expr (void)
217 {
218 return gimplify_ctxp->current_bind_expr;
219 }
220
221 /* Returns true iff there is a COND_EXPR between us and the innermost
222 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
223
224 static bool
225 gimple_conditional_context (void)
226 {
227 return gimplify_ctxp->conditions > 0;
228 }
229
230 /* Note that we've entered a COND_EXPR. */
231
232 static void
233 gimple_push_condition (void)
234 {
235 #ifdef ENABLE_CHECKING
236 if (gimplify_ctxp->conditions == 0)
237 gcc_assert (!gimplify_ctxp->conditional_cleanups);
238 #endif
239 ++(gimplify_ctxp->conditions);
240 }
241
242 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
243 now, add any conditional cleanups we've seen to the prequeue. */
244
245 static void
246 gimple_pop_condition (tree *pre_p)
247 {
248 int conds = --(gimplify_ctxp->conditions);
249
250 gcc_assert (conds >= 0);
251 if (conds == 0)
252 {
253 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
254 gimplify_ctxp->conditional_cleanups = NULL_TREE;
255 }
256 }
257
258 /* A stable comparison routine for use with splay trees and DECLs. */
259
260 static int
261 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
262 {
263 tree a = (tree) xa;
264 tree b = (tree) xb;
265
266 return DECL_UID (a) - DECL_UID (b);
267 }
268
269 /* Create a new omp construct that deals with variable remapping. */
270
271 static struct gimplify_omp_ctx *
272 new_omp_context (bool is_parallel)
273 {
274 struct gimplify_omp_ctx *c;
275
276 c = XCNEW (struct gimplify_omp_ctx);
277 c->outer_context = gimplify_omp_ctxp;
278 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
279 c->privatized_types = pointer_set_create ();
280 c->location = input_location;
281 c->is_parallel = is_parallel;
282 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
283
284 return c;
285 }
286
287 /* Destroy an omp construct that deals with variable remapping. */
288
289 static void
290 delete_omp_context (struct gimplify_omp_ctx *c)
291 {
292 splay_tree_delete (c->variables);
293 pointer_set_destroy (c->privatized_types);
294 XDELETE (c);
295 }
296
297 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
298 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
299
300 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
301
302 static void
303 append_to_statement_list_1 (tree t, tree *list_p)
304 {
305 tree list = *list_p;
306 tree_stmt_iterator i;
307
308 if (!list)
309 {
310 if (t && TREE_CODE (t) == STATEMENT_LIST)
311 {
312 *list_p = t;
313 return;
314 }
315 *list_p = list = alloc_stmt_list ();
316 }
317
318 i = tsi_last (list);
319 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
320 }
321
322 /* Add T to the end of the list container pointed to by LIST_P.
323 If T is an expression with no effects, it is ignored. */
324
325 void
326 append_to_statement_list (tree t, tree *list_p)
327 {
328 if (t && TREE_SIDE_EFFECTS (t))
329 append_to_statement_list_1 (t, list_p);
330 }
331
332 /* Similar, but the statement is always added, regardless of side effects. */
333
334 void
335 append_to_statement_list_force (tree t, tree *list_p)
336 {
337 if (t != NULL_TREE)
338 append_to_statement_list_1 (t, list_p);
339 }
340
341 /* Both gimplify the statement T and append it to LIST_P. */
342
343 void
344 gimplify_and_add (tree t, tree *list_p)
345 {
346 gimplify_stmt (&t);
347 append_to_statement_list (t, list_p);
348 }
349
350 /* Strip off a legitimate source ending from the input string NAME of
351 length LEN. Rather than having to know the names used by all of
352 our front ends, we strip off an ending of a period followed by
353 up to five characters. (Java uses ".class".) */
354
355 static inline void
356 remove_suffix (char *name, int len)
357 {
358 int i;
359
360 for (i = 2; i < 8 && len > i; i++)
361 {
362 if (name[len - i] == '.')
363 {
364 name[len - i] = '\0';
365 break;
366 }
367 }
368 }
369
370 /* Create a nameless artificial label and put it in the current function
371 context. Returns the newly created label. */
372
373 tree
374 create_artificial_label (void)
375 {
376 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
377
378 DECL_ARTIFICIAL (lab) = 1;
379 DECL_IGNORED_P (lab) = 1;
380 DECL_CONTEXT (lab) = current_function_decl;
381 return lab;
382 }
383
384 /* Subroutine for find_single_pointer_decl. */
385
386 static tree
387 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
388 void *data)
389 {
390 tree *pdecl = (tree *) data;
391
392 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
393 {
394 if (*pdecl)
395 {
396 /* We already found a pointer decl; return anything other
397 than NULL_TREE to unwind from walk_tree signalling that
398 we have a duplicate. */
399 return *tp;
400 }
401 *pdecl = *tp;
402 }
403
404 return NULL_TREE;
405 }
406
407 /* Find the single DECL of pointer type in the tree T and return it.
408 If there are zero or more than one such DECLs, return NULL. */
409
410 static tree
411 find_single_pointer_decl (tree t)
412 {
413 tree decl = NULL_TREE;
414
415 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
416 {
417 /* find_single_pointer_decl_1 returns a non-zero value, causing
418 walk_tree to return a non-zero value, to indicate that it
419 found more than one pointer DECL. */
420 return NULL_TREE;
421 }
422
423 return decl;
424 }
425
426 /* Create a new temporary name with PREFIX. Returns an identifier. */
427
428 static GTY(()) unsigned int tmp_var_id_num;
429
430 tree
431 create_tmp_var_name (const char *prefix)
432 {
433 char *tmp_name;
434
435 if (prefix)
436 {
437 char *preftmp = ASTRDUP (prefix);
438
439 remove_suffix (preftmp, strlen (preftmp));
440 prefix = preftmp;
441 }
442
443 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
444 return get_identifier (tmp_name);
445 }
446
447
448 /* Create a new temporary variable declaration of type TYPE.
449 Does NOT push it into the current binding. */
450
451 tree
452 create_tmp_var_raw (tree type, const char *prefix)
453 {
454 tree tmp_var;
455 tree new_type;
456
457 /* Make the type of the variable writable. */
458 new_type = build_type_variant (type, 0, 0);
459 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
460
461 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
462 type);
463
464 /* The variable was declared by the compiler. */
465 DECL_ARTIFICIAL (tmp_var) = 1;
466 /* And we don't want debug info for it. */
467 DECL_IGNORED_P (tmp_var) = 1;
468
469 /* Make the variable writable. */
470 TREE_READONLY (tmp_var) = 0;
471
472 DECL_EXTERNAL (tmp_var) = 0;
473 TREE_STATIC (tmp_var) = 0;
474 TREE_USED (tmp_var) = 1;
475
476 return tmp_var;
477 }
478
479 /* Create a new temporary variable declaration of type TYPE. DOES push the
480 variable into the current binding. Further, assume that this is called
481 only from gimplification or optimization, at which point the creation of
482 certain types are bugs. */
483
484 tree
485 create_tmp_var (tree type, const char *prefix)
486 {
487 tree tmp_var;
488
489 /* We don't allow types that are addressable (meaning we can't make copies),
490 incomplete, or of variable size. */
491 gcc_assert (!TREE_ADDRESSABLE (type)
492 && COMPLETE_TYPE_P (type)
493 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
494
495 tmp_var = create_tmp_var_raw (type, prefix);
496 gimple_add_tmp_var (tmp_var);
497 return tmp_var;
498 }
499
500 /* Given a tree, try to return a useful variable name that we can use
501 to prefix a temporary that is being assigned the value of the tree.
502 I.E. given <temp> = &A, return A. */
503
504 const char *
505 get_name (tree t)
506 {
507 tree stripped_decl;
508
509 stripped_decl = t;
510 STRIP_NOPS (stripped_decl);
511 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
512 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
513 else
514 {
515 switch (TREE_CODE (stripped_decl))
516 {
517 case ADDR_EXPR:
518 return get_name (TREE_OPERAND (stripped_decl, 0));
519 break;
520 default:
521 return NULL;
522 }
523 }
524 }
525
526 /* Create a temporary with a name derived from VAL. Subroutine of
527 lookup_tmp_var; nobody else should call this function. */
528
529 static inline tree
530 create_tmp_from_val (tree val)
531 {
532 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
533 }
534
535 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
536 an existing expression temporary. */
537
538 static tree
539 lookup_tmp_var (tree val, bool is_formal)
540 {
541 tree ret;
542
543 /* If not optimizing, never really reuse a temporary. local-alloc
544 won't allocate any variable that is used in more than one basic
545 block, which means it will go into memory, causing much extra
546 work in reload and final and poorer code generation, outweighing
547 the extra memory allocation here. */
548 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
549 ret = create_tmp_from_val (val);
550 else
551 {
552 elt_t elt, *elt_p;
553 void **slot;
554
555 elt.val = val;
556 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
557 if (*slot == NULL)
558 {
559 elt_p = XNEW (elt_t);
560 elt_p->val = val;
561 elt_p->temp = ret = create_tmp_from_val (val);
562 *slot = (void *) elt_p;
563 }
564 else
565 {
566 elt_p = (elt_t *) *slot;
567 ret = elt_p->temp;
568 }
569 }
570
571 if (is_formal)
572 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
573
574 return ret;
575 }
576
577 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
578 in gimplify_expr. Only use this function if:
579
580 1) The value of the unfactored expression represented by VAL will not
581 change between the initialization and use of the temporary, and
582 2) The temporary will not be otherwise modified.
583
584 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
585 and #2 means it is inappropriate for && temps.
586
587 For other cases, use get_initialized_tmp_var instead. */
588
589 static tree
590 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
591 {
592 tree t, mod;
593
594 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
595
596 t = lookup_tmp_var (val, is_formal);
597
598 if (is_formal)
599 {
600 tree u = find_single_pointer_decl (val);
601
602 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
603 u = DECL_GET_RESTRICT_BASE (u);
604 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
605 {
606 if (DECL_BASED_ON_RESTRICT_P (t))
607 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
608 else
609 {
610 DECL_BASED_ON_RESTRICT_P (t) = 1;
611 SET_DECL_RESTRICT_BASE (t, u);
612 }
613 }
614 }
615
616 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
617 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
618
619 mod = build2 (MODIFY_EXPR, TREE_TYPE (t), t, val);
620
621 if (EXPR_HAS_LOCATION (val))
622 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
623 else
624 SET_EXPR_LOCATION (mod, input_location);
625
626 /* gimplify_modify_expr might want to reduce this further. */
627 gimplify_and_add (mod, pre_p);
628
629 /* If we're gimplifying into ssa, gimplify_modify_expr will have
630 given our temporary an ssa name. Find and return it. */
631 if (gimplify_ctxp->into_ssa)
632 t = TREE_OPERAND (mod, 0);
633
634 return t;
635 }
636
637 tree
638 get_formal_tmp_var (tree val, tree *pre_p)
639 {
640 return internal_get_tmp_var (val, pre_p, NULL, true);
641 }
642
643 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
644 are as in gimplify_expr. */
645
646 tree
647 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
648 {
649 return internal_get_tmp_var (val, pre_p, post_p, false);
650 }
651
652 /* Declares all the variables in VARS in SCOPE. */
653
654 void
655 declare_tmp_vars (tree vars, tree scope)
656 {
657 tree last = vars;
658 if (last)
659 {
660 tree temps;
661
662 /* C99 mode puts the default 'return 0;' for main outside the outer
663 braces. So drill down until we find an actual scope. */
664 while (TREE_CODE (scope) == COMPOUND_EXPR)
665 scope = TREE_OPERAND (scope, 0);
666
667 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
668
669 temps = nreverse (last);
670 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
671 BIND_EXPR_VARS (scope) = temps;
672 }
673 }
674
675 void
676 gimple_add_tmp_var (tree tmp)
677 {
678 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
679
680 DECL_CONTEXT (tmp) = current_function_decl;
681 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
682
683 if (gimplify_ctxp)
684 {
685 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
686 gimplify_ctxp->temps = tmp;
687
688 /* Mark temporaries local within the nearest enclosing parallel. */
689 if (gimplify_omp_ctxp)
690 {
691 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
692 while (ctx && !ctx->is_parallel)
693 ctx = ctx->outer_context;
694 if (ctx)
695 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
696 }
697 }
698 else if (cfun)
699 record_vars (tmp);
700 else
701 declare_tmp_vars (tmp, DECL_SAVED_TREE (current_function_decl));
702 }
703
704 /* Determines whether to assign a locus to the statement STMT. */
705
706 static bool
707 should_carry_locus_p (tree stmt)
708 {
709 /* Don't emit a line note for a label. We particularly don't want to
710 emit one for the break label, since it doesn't actually correspond
711 to the beginning of the loop/switch. */
712 if (TREE_CODE (stmt) == LABEL_EXPR)
713 return false;
714
715 /* Do not annotate empty statements, since it confuses gcov. */
716 if (!TREE_SIDE_EFFECTS (stmt))
717 return false;
718
719 return true;
720 }
721
722 static void
723 annotate_one_with_locus (tree t, location_t locus)
724 {
725 if (EXPR_P (t) && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
726 SET_EXPR_LOCATION (t, locus);
727 }
728
729 void
730 annotate_all_with_locus (tree *stmt_p, location_t locus)
731 {
732 tree_stmt_iterator i;
733
734 if (!*stmt_p)
735 return;
736
737 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
738 {
739 tree t = tsi_stmt (i);
740
741 /* Assuming we've already been gimplified, we shouldn't
742 see nested chaining constructs anymore. */
743 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
744 && TREE_CODE (t) != COMPOUND_EXPR);
745
746 annotate_one_with_locus (t, locus);
747 }
748 }
749
750 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
751 These nodes model computations that should only be done once. If we
752 were to unshare something like SAVE_EXPR(i++), the gimplification
753 process would create wrong code. */
754
755 static tree
756 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
757 {
758 enum tree_code code = TREE_CODE (*tp);
759 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
760 if (TREE_CODE_CLASS (code) == tcc_type
761 || TREE_CODE_CLASS (code) == tcc_declaration
762 || TREE_CODE_CLASS (code) == tcc_constant
763 || code == SAVE_EXPR || code == TARGET_EXPR
764 /* We can't do anything sensible with a BLOCK used as an expression,
765 but we also can't just die when we see it because of non-expression
766 uses. So just avert our eyes and cross our fingers. Silly Java. */
767 || code == BLOCK)
768 *walk_subtrees = 0;
769 else
770 {
771 gcc_assert (code != BIND_EXPR);
772 copy_tree_r (tp, walk_subtrees, data);
773 }
774
775 return NULL_TREE;
776 }
777
778 /* Callback for walk_tree to unshare most of the shared trees rooted at
779 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
780 then *TP is deep copied by calling copy_tree_r.
781
782 This unshares the same trees as copy_tree_r with the exception of
783 SAVE_EXPR nodes. These nodes model computations that should only be
784 done once. If we were to unshare something like SAVE_EXPR(i++), the
785 gimplification process would create wrong code. */
786
787 static tree
788 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
789 void *data ATTRIBUTE_UNUSED)
790 {
791 tree t = *tp;
792 enum tree_code code = TREE_CODE (t);
793
794 /* Skip types, decls, and constants. But we do want to look at their
795 types and the bounds of types. Mark them as visited so we properly
796 unmark their subtrees on the unmark pass. If we've already seen them,
797 don't look down further. */
798 if (TREE_CODE_CLASS (code) == tcc_type
799 || TREE_CODE_CLASS (code) == tcc_declaration
800 || TREE_CODE_CLASS (code) == tcc_constant)
801 {
802 if (TREE_VISITED (t))
803 *walk_subtrees = 0;
804 else
805 TREE_VISITED (t) = 1;
806 }
807
808 /* If this node has been visited already, unshare it and don't look
809 any deeper. */
810 else if (TREE_VISITED (t))
811 {
812 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
813 *walk_subtrees = 0;
814 }
815
816 /* Otherwise, mark the tree as visited and keep looking. */
817 else
818 TREE_VISITED (t) = 1;
819
820 return NULL_TREE;
821 }
822
823 static tree
824 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
825 void *data ATTRIBUTE_UNUSED)
826 {
827 if (TREE_VISITED (*tp))
828 TREE_VISITED (*tp) = 0;
829 else
830 *walk_subtrees = 0;
831
832 return NULL_TREE;
833 }
834
835 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
836 bodies of any nested functions if we are unsharing the entire body of
837 FNDECL. */
838
839 static void
840 unshare_body (tree *body_p, tree fndecl)
841 {
842 struct cgraph_node *cgn = cgraph_node (fndecl);
843
844 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
845 if (body_p == &DECL_SAVED_TREE (fndecl))
846 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
847 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
848 }
849
850 /* Likewise, but mark all trees as not visited. */
851
852 static void
853 unvisit_body (tree *body_p, tree fndecl)
854 {
855 struct cgraph_node *cgn = cgraph_node (fndecl);
856
857 walk_tree (body_p, unmark_visited_r, NULL, NULL);
858 if (body_p == &DECL_SAVED_TREE (fndecl))
859 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
860 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
861 }
862
863 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
864
865 static void
866 unshare_all_trees (tree t)
867 {
868 walk_tree (&t, copy_if_shared_r, NULL, NULL);
869 walk_tree (&t, unmark_visited_r, NULL, NULL);
870 }
871
872 /* Unconditionally make an unshared copy of EXPR. This is used when using
873 stored expressions which span multiple functions, such as BINFO_VTABLE,
874 as the normal unsharing process can't tell that they're shared. */
875
876 tree
877 unshare_expr (tree expr)
878 {
879 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
880 return expr;
881 }
882
883 /* A terser interface for building a representation of an exception
884 specification. */
885
886 tree
887 gimple_build_eh_filter (tree body, tree allowed, tree failure)
888 {
889 tree t;
890
891 /* FIXME should the allowed types go in TREE_TYPE? */
892 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
893 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
894
895 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
896 append_to_statement_list (body, &TREE_OPERAND (t, 0));
897
898 return t;
899 }
900
901 \f
902 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
903 contain statements and have a value. Assign its value to a temporary
904 and give it void_type_node. Returns the temporary, or NULL_TREE if
905 WRAPPER was already void. */
906
907 tree
908 voidify_wrapper_expr (tree wrapper, tree temp)
909 {
910 if (!VOID_TYPE_P (TREE_TYPE (wrapper)))
911 {
912 tree *p, sub = wrapper;
913
914 restart:
915 /* Set p to point to the body of the wrapper. */
916 switch (TREE_CODE (sub))
917 {
918 case BIND_EXPR:
919 /* For a BIND_EXPR, the body is operand 1. */
920 p = &BIND_EXPR_BODY (sub);
921 break;
922
923 default:
924 p = &TREE_OPERAND (sub, 0);
925 break;
926 }
927
928 /* Advance to the last statement. Set all container types to void. */
929 if (TREE_CODE (*p) == STATEMENT_LIST)
930 {
931 tree_stmt_iterator i = tsi_last (*p);
932 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
933 }
934 else
935 {
936 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
937 {
938 TREE_SIDE_EFFECTS (*p) = 1;
939 TREE_TYPE (*p) = void_type_node;
940 }
941 }
942
943 if (p == NULL || IS_EMPTY_STMT (*p))
944 ;
945 /* Look through exception handling. */
946 else if (TREE_CODE (*p) == TRY_FINALLY_EXPR
947 || TREE_CODE (*p) == TRY_CATCH_EXPR)
948 {
949 sub = *p;
950 goto restart;
951 }
952 /* The C++ frontend already did this for us. */
953 else if (TREE_CODE (*p) == INIT_EXPR
954 || TREE_CODE (*p) == TARGET_EXPR)
955 temp = TREE_OPERAND (*p, 0);
956 /* If we're returning a dereference, move the dereference
957 outside the wrapper. */
958 else if (TREE_CODE (*p) == INDIRECT_REF)
959 {
960 tree ptr = TREE_OPERAND (*p, 0);
961 temp = create_tmp_var (TREE_TYPE (ptr), "retval");
962 *p = build2 (MODIFY_EXPR, TREE_TYPE (ptr), temp, ptr);
963 temp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (temp)), temp);
964 /* If this is a BIND_EXPR for a const inline function, it might not
965 have TREE_SIDE_EFFECTS set. That is no longer accurate. */
966 TREE_SIDE_EFFECTS (wrapper) = 1;
967 }
968 else
969 {
970 if (!temp)
971 temp = create_tmp_var (TREE_TYPE (wrapper), "retval");
972 *p = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, *p);
973 TREE_SIDE_EFFECTS (wrapper) = 1;
974 }
975
976 TREE_TYPE (wrapper) = void_type_node;
977 return temp;
978 }
979
980 return NULL_TREE;
981 }
982
983 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
984 a temporary through which they communicate. */
985
986 static void
987 build_stack_save_restore (tree *save, tree *restore)
988 {
989 tree save_call, tmp_var;
990
991 save_call =
992 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
993 NULL_TREE);
994 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
995
996 *save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
997 *restore =
998 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
999 tree_cons (NULL_TREE, tmp_var, NULL_TREE));
1000 }
1001
1002 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1003
1004 static enum gimplify_status
1005 gimplify_bind_expr (tree *expr_p, tree temp, tree *pre_p)
1006 {
1007 tree bind_expr = *expr_p;
1008 bool old_save_stack = gimplify_ctxp->save_stack;
1009 tree t;
1010
1011 temp = voidify_wrapper_expr (bind_expr, temp);
1012
1013 /* Mark variables seen in this bind expr. */
1014 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1015 {
1016 if (TREE_CODE (t) == VAR_DECL)
1017 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1018
1019 /* Preliminarily mark non-addressed complex variables as eligible
1020 for promotion to gimple registers. We'll transform their uses
1021 as we find them. */
1022 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1023 && !TREE_THIS_VOLATILE (t)
1024 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1025 && !needs_to_live_in_memory (t))
1026 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
1027 }
1028
1029 /* Mark variables seen in this bind expr as locals. */
1030 if (gimplify_omp_ctxp)
1031 {
1032 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1033
1034 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1035 if (TREE_CODE (t) == VAR_DECL && !is_global_var (t))
1036 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1037 }
1038
1039 gimple_push_bind_expr (bind_expr);
1040 gimplify_ctxp->save_stack = false;
1041
1042 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1043
1044 if (gimplify_ctxp->save_stack)
1045 {
1046 tree stack_save, stack_restore;
1047
1048 /* Save stack on entry and restore it on exit. Add a try_finally
1049 block to achieve this. Note that mudflap depends on the
1050 format of the emitted code: see mx_register_decls(). */
1051 build_stack_save_restore (&stack_save, &stack_restore);
1052
1053 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1054 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1055 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1056
1057 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1058 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1059 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1060 }
1061
1062 gimplify_ctxp->save_stack = old_save_stack;
1063 gimple_pop_bind_expr ();
1064
1065 if (temp)
1066 {
1067 *expr_p = temp;
1068 append_to_statement_list (bind_expr, pre_p);
1069 return GS_OK;
1070 }
1071 else
1072 return GS_ALL_DONE;
1073 }
1074
1075 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1076 GIMPLE value, it is assigned to a new temporary and the statement is
1077 re-written to return the temporary.
1078
1079 PRE_P points to the list where side effects that must happen before
1080 STMT should be stored. */
1081
1082 static enum gimplify_status
1083 gimplify_return_expr (tree stmt, tree *pre_p)
1084 {
1085 tree ret_expr = TREE_OPERAND (stmt, 0);
1086 tree result_decl, result;
1087
1088 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1089 || ret_expr == error_mark_node)
1090 return GS_ALL_DONE;
1091
1092 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1093 result_decl = NULL_TREE;
1094 else
1095 {
1096 result_decl = TREE_OPERAND (ret_expr, 0);
1097 if (TREE_CODE (result_decl) == INDIRECT_REF)
1098 /* See through a return by reference. */
1099 result_decl = TREE_OPERAND (result_decl, 0);
1100
1101 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1102 || TREE_CODE (ret_expr) == INIT_EXPR)
1103 && TREE_CODE (result_decl) == RESULT_DECL);
1104 }
1105
1106 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1107 Recall that aggregate_value_p is FALSE for any aggregate type that is
1108 returned in registers. If we're returning values in registers, then
1109 we don't want to extend the lifetime of the RESULT_DECL, particularly
1110 across another call. In addition, for those aggregates for which
1111 hard_function_value generates a PARALLEL, we'll die during normal
1112 expansion of structure assignments; there's special code in expand_return
1113 to handle this case that does not exist in expand_expr. */
1114 if (!result_decl
1115 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1116 result = result_decl;
1117 else if (gimplify_ctxp->return_temp)
1118 result = gimplify_ctxp->return_temp;
1119 else
1120 {
1121 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1122
1123 /* ??? With complex control flow (usually involving abnormal edges),
1124 we can wind up warning about an uninitialized value for this. Due
1125 to how this variable is constructed and initialized, this is never
1126 true. Give up and never warn. */
1127 TREE_NO_WARNING (result) = 1;
1128
1129 gimplify_ctxp->return_temp = result;
1130 }
1131
1132 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1133 Then gimplify the whole thing. */
1134 if (result != result_decl)
1135 TREE_OPERAND (ret_expr, 0) = result;
1136
1137 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1138
1139 /* If we didn't use a temporary, then the result is just the result_decl.
1140 Otherwise we need a simple copy. This should already be gimple. */
1141 if (result == result_decl)
1142 ret_expr = result;
1143 else
1144 ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
1145 TREE_OPERAND (stmt, 0) = ret_expr;
1146
1147 return GS_ALL_DONE;
1148 }
1149
1150 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1151 and initialization explicit. */
1152
1153 static enum gimplify_status
1154 gimplify_decl_expr (tree *stmt_p)
1155 {
1156 tree stmt = *stmt_p;
1157 tree decl = DECL_EXPR_DECL (stmt);
1158
1159 *stmt_p = NULL_TREE;
1160
1161 if (TREE_TYPE (decl) == error_mark_node)
1162 return GS_ERROR;
1163
1164 if ((TREE_CODE (decl) == TYPE_DECL
1165 || TREE_CODE (decl) == VAR_DECL)
1166 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1167 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1168
1169 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1170 {
1171 tree init = DECL_INITIAL (decl);
1172
1173 if (!TREE_CONSTANT (DECL_SIZE (decl)))
1174 {
1175 /* This is a variable-sized decl. Simplify its size and mark it
1176 for deferred expansion. Note that mudflap depends on the format
1177 of the emitted code: see mx_register_decls(). */
1178 tree t, args, addr, ptr_type;
1179
1180 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1181 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1182
1183 /* All occurrences of this decl in final gimplified code will be
1184 replaced by indirection. Setting DECL_VALUE_EXPR does two
1185 things: First, it lets the rest of the gimplifier know what
1186 replacement to use. Second, it lets the debug info know
1187 where to find the value. */
1188 ptr_type = build_pointer_type (TREE_TYPE (decl));
1189 addr = create_tmp_var (ptr_type, get_name (decl));
1190 DECL_IGNORED_P (addr) = 0;
1191 t = build_fold_indirect_ref (addr);
1192 SET_DECL_VALUE_EXPR (decl, t);
1193 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1194
1195 args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
1196 t = built_in_decls[BUILT_IN_ALLOCA];
1197 t = build_function_call_expr (t, args);
1198 t = fold_convert (ptr_type, t);
1199 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1200
1201 gimplify_and_add (t, stmt_p);
1202
1203 /* Indicate that we need to restore the stack level when the
1204 enclosing BIND_EXPR is exited. */
1205 gimplify_ctxp->save_stack = true;
1206 }
1207
1208 if (init && init != error_mark_node)
1209 {
1210 if (!TREE_STATIC (decl))
1211 {
1212 DECL_INITIAL (decl) = NULL_TREE;
1213 init = build2 (MODIFY_EXPR, void_type_node, decl, init);
1214 gimplify_and_add (init, stmt_p);
1215 }
1216 else
1217 /* We must still examine initializers for static variables
1218 as they may contain a label address. */
1219 walk_tree (&init, force_labels_r, NULL, NULL);
1220 }
1221
1222 /* This decl isn't mentioned in the enclosing block, so add it to the
1223 list of temps. FIXME it seems a bit of a kludge to say that
1224 anonymous artificial vars aren't pushed, but everything else is. */
1225 if (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1226 gimple_add_tmp_var (decl);
1227 }
1228
1229 return GS_ALL_DONE;
1230 }
1231
1232 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1233 and replacing the LOOP_EXPR with goto, but if the loop contains an
1234 EXIT_EXPR, we need to append a label for it to jump to. */
1235
1236 static enum gimplify_status
1237 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1238 {
1239 tree saved_label = gimplify_ctxp->exit_label;
1240 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1241 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1242
1243 append_to_statement_list (start_label, pre_p);
1244
1245 gimplify_ctxp->exit_label = NULL_TREE;
1246
1247 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1248
1249 if (gimplify_ctxp->exit_label)
1250 {
1251 append_to_statement_list (jump_stmt, pre_p);
1252 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1253 }
1254 else
1255 *expr_p = jump_stmt;
1256
1257 gimplify_ctxp->exit_label = saved_label;
1258
1259 return GS_ALL_DONE;
1260 }
1261
1262 /* Compare two case labels. Because the front end should already have
1263 made sure that case ranges do not overlap, it is enough to only compare
1264 the CASE_LOW values of each case label. */
1265
1266 static int
1267 compare_case_labels (const void *p1, const void *p2)
1268 {
1269 tree case1 = *(tree *)p1;
1270 tree case2 = *(tree *)p2;
1271
1272 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1273 }
1274
1275 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1276
1277 void
1278 sort_case_labels (tree label_vec)
1279 {
1280 size_t len = TREE_VEC_LENGTH (label_vec);
1281 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1282
1283 if (CASE_LOW (default_case))
1284 {
1285 size_t i;
1286
1287 /* The last label in the vector should be the default case
1288 but it is not. */
1289 for (i = 0; i < len; ++i)
1290 {
1291 tree t = TREE_VEC_ELT (label_vec, i);
1292 if (!CASE_LOW (t))
1293 {
1294 default_case = t;
1295 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1296 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1297 break;
1298 }
1299 }
1300 }
1301
1302 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1303 compare_case_labels);
1304 }
1305
1306 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1307 branch to. */
1308
1309 static enum gimplify_status
1310 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1311 {
1312 tree switch_expr = *expr_p;
1313 enum gimplify_status ret;
1314
1315 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1316 is_gimple_val, fb_rvalue);
1317
1318 if (SWITCH_BODY (switch_expr))
1319 {
1320 VEC(tree,heap) *labels, *saved_labels;
1321 tree label_vec, default_case = NULL_TREE;
1322 size_t i, len;
1323
1324 /* If someone can be bothered to fill in the labels, they can
1325 be bothered to null out the body too. */
1326 gcc_assert (!SWITCH_LABELS (switch_expr));
1327
1328 saved_labels = gimplify_ctxp->case_labels;
1329 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1330
1331 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1332
1333 labels = gimplify_ctxp->case_labels;
1334 gimplify_ctxp->case_labels = saved_labels;
1335
1336 len = VEC_length (tree, labels);
1337
1338 for (i = 0; i < len; ++i)
1339 {
1340 tree t = VEC_index (tree, labels, i);
1341 if (!CASE_LOW (t))
1342 {
1343 /* The default case must be the last label in the list. */
1344 default_case = t;
1345 VEC_replace (tree, labels, i, VEC_index (tree, labels, len - 1));
1346 len--;
1347 break;
1348 }
1349 }
1350
1351 label_vec = make_tree_vec (len + 1);
1352 SWITCH_LABELS (*expr_p) = label_vec;
1353 append_to_statement_list (switch_expr, pre_p);
1354
1355 if (! default_case)
1356 {
1357 /* If the switch has no default label, add one, so that we jump
1358 around the switch body. */
1359 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1360 NULL_TREE, create_artificial_label ());
1361 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1362 *expr_p = build1 (LABEL_EXPR, void_type_node,
1363 CASE_LABEL (default_case));
1364 }
1365 else
1366 *expr_p = SWITCH_BODY (switch_expr);
1367
1368 for (i = 0; i < len; ++i)
1369 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1370 TREE_VEC_ELT (label_vec, len) = default_case;
1371
1372 VEC_free (tree, heap, labels);
1373
1374 sort_case_labels (label_vec);
1375
1376 SWITCH_BODY (switch_expr) = NULL;
1377 }
1378 else
1379 gcc_assert (SWITCH_LABELS (switch_expr));
1380
1381 return ret;
1382 }
1383
1384 static enum gimplify_status
1385 gimplify_case_label_expr (tree *expr_p)
1386 {
1387 tree expr = *expr_p;
1388 struct gimplify_ctx *ctxp;
1389
1390 /* Invalid OpenMP programs can play Duff's Device type games with
1391 #pragma omp parallel. At least in the C front end, we don't
1392 detect such invalid branches until after gimplification. */
1393 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1394 if (ctxp->case_labels)
1395 break;
1396
1397 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1398 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1399 return GS_ALL_DONE;
1400 }
1401
1402 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1403 if necessary. */
1404
1405 tree
1406 build_and_jump (tree *label_p)
1407 {
1408 if (label_p == NULL)
1409 /* If there's nowhere to jump, just fall through. */
1410 return NULL_TREE;
1411
1412 if (*label_p == NULL_TREE)
1413 {
1414 tree label = create_artificial_label ();
1415 *label_p = label;
1416 }
1417
1418 return build1 (GOTO_EXPR, void_type_node, *label_p);
1419 }
1420
1421 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1422 This also involves building a label to jump to and communicating it to
1423 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1424
1425 static enum gimplify_status
1426 gimplify_exit_expr (tree *expr_p)
1427 {
1428 tree cond = TREE_OPERAND (*expr_p, 0);
1429 tree expr;
1430
1431 expr = build_and_jump (&gimplify_ctxp->exit_label);
1432 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1433 *expr_p = expr;
1434
1435 return GS_OK;
1436 }
1437
1438 /* A helper function to be called via walk_tree. Mark all labels under *TP
1439 as being forced. To be called for DECL_INITIAL of static variables. */
1440
1441 tree
1442 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1443 {
1444 if (TYPE_P (*tp))
1445 *walk_subtrees = 0;
1446 if (TREE_CODE (*tp) == LABEL_DECL)
1447 FORCED_LABEL (*tp) = 1;
1448
1449 return NULL_TREE;
1450 }
1451
1452 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1453 different from its canonical type, wrap the whole thing inside a
1454 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1455 type.
1456
1457 The canonical type of a COMPONENT_REF is the type of the field being
1458 referenced--unless the field is a bit-field which can be read directly
1459 in a smaller mode, in which case the canonical type is the
1460 sign-appropriate type corresponding to that mode. */
1461
1462 static void
1463 canonicalize_component_ref (tree *expr_p)
1464 {
1465 tree expr = *expr_p;
1466 tree type;
1467
1468 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1469
1470 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1471 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1472 else
1473 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1474
1475 if (TREE_TYPE (expr) != type)
1476 {
1477 tree old_type = TREE_TYPE (expr);
1478
1479 /* Set the type of the COMPONENT_REF to the underlying type. */
1480 TREE_TYPE (expr) = type;
1481
1482 /* And wrap the whole thing inside a NOP_EXPR. */
1483 expr = build1 (NOP_EXPR, old_type, expr);
1484
1485 *expr_p = expr;
1486 }
1487 }
1488
1489 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1490 to foo, embed that change in the ADDR_EXPR by converting
1491 T array[U];
1492 (T *)&array
1493 ==>
1494 &array[L]
1495 where L is the lower bound. For simplicity, only do this for constant
1496 lower bound. */
1497
1498 static void
1499 canonicalize_addr_expr (tree *expr_p)
1500 {
1501 tree expr = *expr_p;
1502 tree ctype = TREE_TYPE (expr);
1503 tree addr_expr = TREE_OPERAND (expr, 0);
1504 tree atype = TREE_TYPE (addr_expr);
1505 tree dctype, datype, ddatype, otype, obj_expr;
1506
1507 /* Both cast and addr_expr types should be pointers. */
1508 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1509 return;
1510
1511 /* The addr_expr type should be a pointer to an array. */
1512 datype = TREE_TYPE (atype);
1513 if (TREE_CODE (datype) != ARRAY_TYPE)
1514 return;
1515
1516 /* Both cast and addr_expr types should address the same object type. */
1517 dctype = TREE_TYPE (ctype);
1518 ddatype = TREE_TYPE (datype);
1519 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1520 return;
1521
1522 /* The addr_expr and the object type should match. */
1523 obj_expr = TREE_OPERAND (addr_expr, 0);
1524 otype = TREE_TYPE (obj_expr);
1525 if (!lang_hooks.types_compatible_p (otype, datype))
1526 return;
1527
1528 /* The lower bound and element sizes must be constant. */
1529 if (!TYPE_SIZE_UNIT (dctype)
1530 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1531 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1532 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1533 return;
1534
1535 /* All checks succeeded. Build a new node to merge the cast. */
1536 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1537 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1538 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1539 size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
1540 size_int (TYPE_ALIGN_UNIT (dctype))));
1541 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1542 }
1543
1544 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1545 underneath as appropriate. */
1546
1547 static enum gimplify_status
1548 gimplify_conversion (tree *expr_p)
1549 {
1550 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1551 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1552
1553 /* Then strip away all but the outermost conversion. */
1554 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1555
1556 /* And remove the outermost conversion if it's useless. */
1557 if (tree_ssa_useless_type_conversion (*expr_p))
1558 *expr_p = TREE_OPERAND (*expr_p, 0);
1559
1560 /* If we still have a conversion at the toplevel,
1561 then canonicalize some constructs. */
1562 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1563 {
1564 tree sub = TREE_OPERAND (*expr_p, 0);
1565
1566 /* If a NOP conversion is changing the type of a COMPONENT_REF
1567 expression, then canonicalize its type now in order to expose more
1568 redundant conversions. */
1569 if (TREE_CODE (sub) == COMPONENT_REF)
1570 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1571
1572 /* If a NOP conversion is changing a pointer to array of foo
1573 to a pointer to foo, embed that change in the ADDR_EXPR. */
1574 else if (TREE_CODE (sub) == ADDR_EXPR)
1575 canonicalize_addr_expr (expr_p);
1576 }
1577
1578 return GS_OK;
1579 }
1580
1581 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1582 DECL_VALUE_EXPR, and it's worth re-examining things. */
1583
1584 static enum gimplify_status
1585 gimplify_var_or_parm_decl (tree *expr_p)
1586 {
1587 tree decl = *expr_p;
1588
1589 /* ??? If this is a local variable, and it has not been seen in any
1590 outer BIND_EXPR, then it's probably the result of a duplicate
1591 declaration, for which we've already issued an error. It would
1592 be really nice if the front end wouldn't leak these at all.
1593 Currently the only known culprit is C++ destructors, as seen
1594 in g++.old-deja/g++.jason/binding.C. */
1595 if (TREE_CODE (decl) == VAR_DECL
1596 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1597 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1598 && decl_function_context (decl) == current_function_decl)
1599 {
1600 gcc_assert (errorcount || sorrycount);
1601 return GS_ERROR;
1602 }
1603
1604 /* When within an OpenMP context, notice uses of variables. */
1605 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1606 return GS_ALL_DONE;
1607
1608 /* If the decl is an alias for another expression, substitute it now. */
1609 if (DECL_HAS_VALUE_EXPR_P (decl))
1610 {
1611 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1612 return GS_OK;
1613 }
1614
1615 return GS_ALL_DONE;
1616 }
1617
1618
1619 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1620 node pointed to by EXPR_P.
1621
1622 compound_lval
1623 : min_lval '[' val ']'
1624 | min_lval '.' ID
1625 | compound_lval '[' val ']'
1626 | compound_lval '.' ID
1627
1628 This is not part of the original SIMPLE definition, which separates
1629 array and member references, but it seems reasonable to handle them
1630 together. Also, this way we don't run into problems with union
1631 aliasing; gcc requires that for accesses through a union to alias, the
1632 union reference must be explicit, which was not always the case when we
1633 were splitting up array and member refs.
1634
1635 PRE_P points to the list where side effects that must happen before
1636 *EXPR_P should be stored.
1637
1638 POST_P points to the list where side effects that must happen after
1639 *EXPR_P should be stored. */
1640
1641 static enum gimplify_status
1642 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1643 tree *post_p, fallback_t fallback)
1644 {
1645 tree *p;
1646 VEC(tree,heap) *stack;
1647 enum gimplify_status ret = GS_OK, tret;
1648 int i;
1649
1650 /* Create a stack of the subexpressions so later we can walk them in
1651 order from inner to outer. */
1652 stack = VEC_alloc (tree, heap, 10);
1653
1654 /* We can handle anything that get_inner_reference can deal with. */
1655 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1656 {
1657 restart:
1658 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1659 if (TREE_CODE (*p) == INDIRECT_REF)
1660 *p = fold_indirect_ref (*p);
1661
1662 if (handled_component_p (*p))
1663 ;
1664 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1665 additional COMPONENT_REFs. */
1666 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1667 && gimplify_var_or_parm_decl (p) == GS_OK)
1668 goto restart;
1669 else
1670 break;
1671
1672 VEC_safe_push (tree, heap, stack, *p);
1673 }
1674
1675 gcc_assert (VEC_length (tree, stack));
1676
1677 /* Now STACK is a stack of pointers to all the refs we've walked through
1678 and P points to the innermost expression.
1679
1680 Java requires that we elaborated nodes in source order. That
1681 means we must gimplify the inner expression followed by each of
1682 the indices, in order. But we can't gimplify the inner
1683 expression until we deal with any variable bounds, sizes, or
1684 positions in order to deal with PLACEHOLDER_EXPRs.
1685
1686 So we do this in three steps. First we deal with the annotations
1687 for any variables in the components, then we gimplify the base,
1688 then we gimplify any indices, from left to right. */
1689 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1690 {
1691 tree t = VEC_index (tree, stack, i);
1692
1693 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1694 {
1695 /* Gimplify the low bound and element type size and put them into
1696 the ARRAY_REF. If these values are set, they have already been
1697 gimplified. */
1698 if (!TREE_OPERAND (t, 2))
1699 {
1700 tree low = unshare_expr (array_ref_low_bound (t));
1701 if (!is_gimple_min_invariant (low))
1702 {
1703 TREE_OPERAND (t, 2) = low;
1704 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1705 is_gimple_formal_tmp_reg, fb_rvalue);
1706 ret = MIN (ret, tret);
1707 }
1708 }
1709
1710 if (!TREE_OPERAND (t, 3))
1711 {
1712 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1713 tree elmt_size = unshare_expr (array_ref_element_size (t));
1714 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1715
1716 /* Divide the element size by the alignment of the element
1717 type (above). */
1718 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1719
1720 if (!is_gimple_min_invariant (elmt_size))
1721 {
1722 TREE_OPERAND (t, 3) = elmt_size;
1723 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1724 is_gimple_formal_tmp_reg, fb_rvalue);
1725 ret = MIN (ret, tret);
1726 }
1727 }
1728 }
1729 else if (TREE_CODE (t) == COMPONENT_REF)
1730 {
1731 /* Set the field offset into T and gimplify it. */
1732 if (!TREE_OPERAND (t, 2))
1733 {
1734 tree offset = unshare_expr (component_ref_field_offset (t));
1735 tree field = TREE_OPERAND (t, 1);
1736 tree factor
1737 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1738
1739 /* Divide the offset by its alignment. */
1740 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1741
1742 if (!is_gimple_min_invariant (offset))
1743 {
1744 TREE_OPERAND (t, 2) = offset;
1745 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1746 is_gimple_formal_tmp_reg, fb_rvalue);
1747 ret = MIN (ret, tret);
1748 }
1749 }
1750 }
1751 }
1752
1753 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1754 so as to match the min_lval predicate. Failure to do so may result
1755 in the creation of large aggregate temporaries. */
1756 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1757 fallback | fb_lvalue);
1758 ret = MIN (ret, tret);
1759
1760 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1761 loop we also remove any useless conversions. */
1762 for (; VEC_length (tree, stack) > 0; )
1763 {
1764 tree t = VEC_pop (tree, stack);
1765
1766 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1767 {
1768 /* Gimplify the dimension.
1769 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1770 Gimplify non-constant array indices into a temporary
1771 variable.
1772 FIXME - The real fix is to gimplify post-modify
1773 expressions into a minimal gimple lvalue. However, that
1774 exposes bugs in alias analysis. The alias analyzer does
1775 not handle &PTR->FIELD very well. Will fix after the
1776 branch is merged into mainline (dnovillo 2004-05-03). */
1777 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1778 {
1779 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1780 is_gimple_formal_tmp_reg, fb_rvalue);
1781 ret = MIN (ret, tret);
1782 }
1783 }
1784 else if (TREE_CODE (t) == BIT_FIELD_REF)
1785 {
1786 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1787 is_gimple_val, fb_rvalue);
1788 ret = MIN (ret, tret);
1789 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1790 is_gimple_val, fb_rvalue);
1791 ret = MIN (ret, tret);
1792 }
1793
1794 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1795
1796 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1797 set which would have caused all the outer expressions in EXPR_P
1798 leading to P to also have had TREE_SIDE_EFFECTS set. */
1799 recalculate_side_effects (t);
1800 }
1801
1802 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1803 ret = MIN (ret, tret);
1804
1805 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1806 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1807 {
1808 canonicalize_component_ref (expr_p);
1809 ret = MIN (ret, GS_OK);
1810 }
1811
1812 VEC_free (tree, heap, stack);
1813
1814 return ret;
1815 }
1816
1817 /* Gimplify the self modifying expression pointed to by EXPR_P
1818 (++, --, +=, -=).
1819
1820 PRE_P points to the list where side effects that must happen before
1821 *EXPR_P should be stored.
1822
1823 POST_P points to the list where side effects that must happen after
1824 *EXPR_P should be stored.
1825
1826 WANT_VALUE is nonzero iff we want to use the value of this expression
1827 in another expression. */
1828
1829 static enum gimplify_status
1830 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1831 bool want_value)
1832 {
1833 enum tree_code code;
1834 tree lhs, lvalue, rhs, t1;
1835 bool postfix;
1836 enum tree_code arith_code;
1837 enum gimplify_status ret;
1838
1839 code = TREE_CODE (*expr_p);
1840
1841 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1842 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1843
1844 /* Prefix or postfix? */
1845 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1846 /* Faster to treat as prefix if result is not used. */
1847 postfix = want_value;
1848 else
1849 postfix = false;
1850
1851 /* Add or subtract? */
1852 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1853 arith_code = PLUS_EXPR;
1854 else
1855 arith_code = MINUS_EXPR;
1856
1857 /* Gimplify the LHS into a GIMPLE lvalue. */
1858 lvalue = TREE_OPERAND (*expr_p, 0);
1859 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1860 if (ret == GS_ERROR)
1861 return ret;
1862
1863 /* Extract the operands to the arithmetic operation. */
1864 lhs = lvalue;
1865 rhs = TREE_OPERAND (*expr_p, 1);
1866
1867 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1868 that as the result value and in the postqueue operation. */
1869 if (postfix)
1870 {
1871 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1872 if (ret == GS_ERROR)
1873 return ret;
1874 }
1875
1876 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1877 t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
1878
1879 if (postfix)
1880 {
1881 gimplify_and_add (t1, post_p);
1882 *expr_p = lhs;
1883 return GS_ALL_DONE;
1884 }
1885 else
1886 {
1887 *expr_p = t1;
1888 return GS_OK;
1889 }
1890 }
1891
1892 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1893
1894 static void
1895 maybe_with_size_expr (tree *expr_p)
1896 {
1897 tree expr = *expr_p;
1898 tree type = TREE_TYPE (expr);
1899 tree size;
1900
1901 /* If we've already wrapped this or the type is error_mark_node, we can't do
1902 anything. */
1903 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1904 || type == error_mark_node)
1905 return;
1906
1907 /* If the size isn't known or is a constant, we have nothing to do. */
1908 size = TYPE_SIZE_UNIT (type);
1909 if (!size || TREE_CODE (size) == INTEGER_CST)
1910 return;
1911
1912 /* Otherwise, make a WITH_SIZE_EXPR. */
1913 size = unshare_expr (size);
1914 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1915 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1916 }
1917
1918 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1919
1920 static enum gimplify_status
1921 gimplify_arg (tree *expr_p, tree *pre_p)
1922 {
1923 bool (*test) (tree);
1924 fallback_t fb;
1925
1926 /* In general, we allow lvalues for function arguments to avoid
1927 extra overhead of copying large aggregates out of even larger
1928 aggregates into temporaries only to copy the temporaries to
1929 the argument list. Make optimizers happy by pulling out to
1930 temporaries those types that fit in registers. */
1931 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
1932 test = is_gimple_val, fb = fb_rvalue;
1933 else
1934 test = is_gimple_lvalue, fb = fb_either;
1935
1936 /* If this is a variable sized type, we must remember the size. */
1937 maybe_with_size_expr (expr_p);
1938
1939 /* There is a sequence point before a function call. Side effects in
1940 the argument list must occur before the actual call. So, when
1941 gimplifying arguments, force gimplify_expr to use an internal
1942 post queue which is then appended to the end of PRE_P. */
1943 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
1944 }
1945
1946 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
1947 list where side effects that must happen before *EXPR_P should be stored.
1948 WANT_VALUE is true if the result of the call is desired. */
1949
1950 static enum gimplify_status
1951 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
1952 {
1953 tree decl;
1954 tree arglist;
1955 enum gimplify_status ret;
1956
1957 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
1958
1959 /* For reliable diagnostics during inlining, it is necessary that
1960 every call_expr be annotated with file and line. */
1961 if (! EXPR_HAS_LOCATION (*expr_p))
1962 SET_EXPR_LOCATION (*expr_p, input_location);
1963
1964 /* This may be a call to a builtin function.
1965
1966 Builtin function calls may be transformed into different
1967 (and more efficient) builtin function calls under certain
1968 circumstances. Unfortunately, gimplification can muck things
1969 up enough that the builtin expanders are not aware that certain
1970 transformations are still valid.
1971
1972 So we attempt transformation/gimplification of the call before
1973 we gimplify the CALL_EXPR. At this time we do not manage to
1974 transform all calls in the same manner as the expanders do, but
1975 we do transform most of them. */
1976 decl = get_callee_fndecl (*expr_p);
1977 if (decl && DECL_BUILT_IN (decl))
1978 {
1979 tree fndecl = get_callee_fndecl (*expr_p);
1980 tree arglist = TREE_OPERAND (*expr_p, 1);
1981 tree new = fold_builtin (fndecl, arglist, !want_value);
1982
1983 if (new && new != *expr_p)
1984 {
1985 /* There was a transformation of this call which computes the
1986 same value, but in a more efficient way. Return and try
1987 again. */
1988 *expr_p = new;
1989 return GS_OK;
1990 }
1991
1992 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1993 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
1994 {
1995 if (!arglist || !TREE_CHAIN (arglist))
1996 {
1997 error ("too few arguments to function %<va_start%>");
1998 *expr_p = build_empty_stmt ();
1999 return GS_OK;
2000 }
2001
2002 if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
2003 {
2004 *expr_p = build_empty_stmt ();
2005 return GS_OK;
2006 }
2007 /* Avoid gimplifying the second argument to va_start, which needs
2008 to be the plain PARM_DECL. */
2009 return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
2010 }
2011 }
2012
2013 /* There is a sequence point before the call, so any side effects in
2014 the calling expression must occur before the actual call. Force
2015 gimplify_expr to use an internal post queue. */
2016 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
2017 is_gimple_call_addr, fb_rvalue);
2018
2019 if (PUSH_ARGS_REVERSED)
2020 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2021 for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
2022 arglist = TREE_CHAIN (arglist))
2023 {
2024 enum gimplify_status t;
2025
2026 t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
2027
2028 if (t == GS_ERROR)
2029 ret = GS_ERROR;
2030 }
2031 if (PUSH_ARGS_REVERSED)
2032 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2033
2034 /* Try this again in case gimplification exposed something. */
2035 if (ret != GS_ERROR && decl && DECL_BUILT_IN (decl))
2036 {
2037 tree fndecl = get_callee_fndecl (*expr_p);
2038 tree arglist = TREE_OPERAND (*expr_p, 1);
2039 tree new = fold_builtin (fndecl, arglist, !want_value);
2040
2041 if (new && new != *expr_p)
2042 {
2043 /* There was a transformation of this call which computes the
2044 same value, but in a more efficient way. Return and try
2045 again. */
2046 *expr_p = new;
2047 return GS_OK;
2048 }
2049 }
2050
2051 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2052 decl. This allows us to eliminate redundant or useless
2053 calls to "const" functions. */
2054 if (TREE_CODE (*expr_p) == CALL_EXPR
2055 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2056 TREE_SIDE_EFFECTS (*expr_p) = 0;
2057
2058 return ret;
2059 }
2060
2061 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2062 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2063
2064 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2065 condition is true or false, respectively. If null, we should generate
2066 our own to skip over the evaluation of this specific expression.
2067
2068 This function is the tree equivalent of do_jump.
2069
2070 shortcut_cond_r should only be called by shortcut_cond_expr. */
2071
2072 static tree
2073 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2074 {
2075 tree local_label = NULL_TREE;
2076 tree t, expr = NULL;
2077
2078 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2079 retain the shortcut semantics. Just insert the gotos here;
2080 shortcut_cond_expr will append the real blocks later. */
2081 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2082 {
2083 /* Turn if (a && b) into
2084
2085 if (a); else goto no;
2086 if (b) goto yes; else goto no;
2087 (no:) */
2088
2089 if (false_label_p == NULL)
2090 false_label_p = &local_label;
2091
2092 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2093 append_to_statement_list (t, &expr);
2094
2095 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2096 false_label_p);
2097 append_to_statement_list (t, &expr);
2098 }
2099 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2100 {
2101 /* Turn if (a || b) into
2102
2103 if (a) goto yes;
2104 if (b) goto yes; else goto no;
2105 (yes:) */
2106
2107 if (true_label_p == NULL)
2108 true_label_p = &local_label;
2109
2110 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2111 append_to_statement_list (t, &expr);
2112
2113 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2114 false_label_p);
2115 append_to_statement_list (t, &expr);
2116 }
2117 else if (TREE_CODE (pred) == COND_EXPR)
2118 {
2119 /* As long as we're messing with gotos, turn if (a ? b : c) into
2120 if (a)
2121 if (b) goto yes; else goto no;
2122 else
2123 if (c) goto yes; else goto no; */
2124 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2125 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2126 false_label_p),
2127 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2128 false_label_p));
2129 }
2130 else
2131 {
2132 expr = build3 (COND_EXPR, void_type_node, pred,
2133 build_and_jump (true_label_p),
2134 build_and_jump (false_label_p));
2135 }
2136
2137 if (local_label)
2138 {
2139 t = build1 (LABEL_EXPR, void_type_node, local_label);
2140 append_to_statement_list (t, &expr);
2141 }
2142
2143 return expr;
2144 }
2145
2146 static tree
2147 shortcut_cond_expr (tree expr)
2148 {
2149 tree pred = TREE_OPERAND (expr, 0);
2150 tree then_ = TREE_OPERAND (expr, 1);
2151 tree else_ = TREE_OPERAND (expr, 2);
2152 tree true_label, false_label, end_label, t;
2153 tree *true_label_p;
2154 tree *false_label_p;
2155 bool emit_end, emit_false, jump_over_else;
2156 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2157 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2158
2159 /* First do simple transformations. */
2160 if (!else_se)
2161 {
2162 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2163 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2164 {
2165 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2166 then_ = shortcut_cond_expr (expr);
2167 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2168 pred = TREE_OPERAND (pred, 0);
2169 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2170 }
2171 }
2172 if (!then_se)
2173 {
2174 /* If there is no 'then', turn
2175 if (a || b); else d
2176 into
2177 if (a); else if (b); else d. */
2178 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2179 {
2180 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2181 else_ = shortcut_cond_expr (expr);
2182 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2183 pred = TREE_OPERAND (pred, 0);
2184 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2185 }
2186 }
2187
2188 /* If we're done, great. */
2189 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2190 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2191 return expr;
2192
2193 /* Otherwise we need to mess with gotos. Change
2194 if (a) c; else d;
2195 to
2196 if (a); else goto no;
2197 c; goto end;
2198 no: d; end:
2199 and recursively gimplify the condition. */
2200
2201 true_label = false_label = end_label = NULL_TREE;
2202
2203 /* If our arms just jump somewhere, hijack those labels so we don't
2204 generate jumps to jumps. */
2205
2206 if (then_
2207 && TREE_CODE (then_) == GOTO_EXPR
2208 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2209 {
2210 true_label = GOTO_DESTINATION (then_);
2211 then_ = NULL;
2212 then_se = false;
2213 }
2214
2215 if (else_
2216 && TREE_CODE (else_) == GOTO_EXPR
2217 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2218 {
2219 false_label = GOTO_DESTINATION (else_);
2220 else_ = NULL;
2221 else_se = false;
2222 }
2223
2224 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2225 if (true_label)
2226 true_label_p = &true_label;
2227 else
2228 true_label_p = NULL;
2229
2230 /* The 'else' branch also needs a label if it contains interesting code. */
2231 if (false_label || else_se)
2232 false_label_p = &false_label;
2233 else
2234 false_label_p = NULL;
2235
2236 /* If there was nothing else in our arms, just forward the label(s). */
2237 if (!then_se && !else_se)
2238 return shortcut_cond_r (pred, true_label_p, false_label_p);
2239
2240 /* If our last subexpression already has a terminal label, reuse it. */
2241 if (else_se)
2242 expr = expr_last (else_);
2243 else if (then_se)
2244 expr = expr_last (then_);
2245 else
2246 expr = NULL;
2247 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2248 end_label = LABEL_EXPR_LABEL (expr);
2249
2250 /* If we don't care about jumping to the 'else' branch, jump to the end
2251 if the condition is false. */
2252 if (!false_label_p)
2253 false_label_p = &end_label;
2254
2255 /* We only want to emit these labels if we aren't hijacking them. */
2256 emit_end = (end_label == NULL_TREE);
2257 emit_false = (false_label == NULL_TREE);
2258
2259 /* We only emit the jump over the else clause if we have to--if the
2260 then clause may fall through. Otherwise we can wind up with a
2261 useless jump and a useless label at the end of gimplified code,
2262 which will cause us to think that this conditional as a whole
2263 falls through even if it doesn't. If we then inline a function
2264 which ends with such a condition, that can cause us to issue an
2265 inappropriate warning about control reaching the end of a
2266 non-void function. */
2267 jump_over_else = block_may_fallthru (then_);
2268
2269 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2270
2271 expr = NULL;
2272 append_to_statement_list (pred, &expr);
2273
2274 append_to_statement_list (then_, &expr);
2275 if (else_se)
2276 {
2277 if (jump_over_else)
2278 {
2279 t = build_and_jump (&end_label);
2280 append_to_statement_list (t, &expr);
2281 }
2282 if (emit_false)
2283 {
2284 t = build1 (LABEL_EXPR, void_type_node, false_label);
2285 append_to_statement_list (t, &expr);
2286 }
2287 append_to_statement_list (else_, &expr);
2288 }
2289 if (emit_end && end_label)
2290 {
2291 t = build1 (LABEL_EXPR, void_type_node, end_label);
2292 append_to_statement_list (t, &expr);
2293 }
2294
2295 return expr;
2296 }
2297
2298 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2299
2300 static tree
2301 gimple_boolify (tree expr)
2302 {
2303 tree type = TREE_TYPE (expr);
2304
2305 if (TREE_CODE (type) == BOOLEAN_TYPE)
2306 return expr;
2307
2308 switch (TREE_CODE (expr))
2309 {
2310 case TRUTH_AND_EXPR:
2311 case TRUTH_OR_EXPR:
2312 case TRUTH_XOR_EXPR:
2313 case TRUTH_ANDIF_EXPR:
2314 case TRUTH_ORIF_EXPR:
2315 /* Also boolify the arguments of truth exprs. */
2316 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2317 /* FALLTHRU */
2318
2319 case TRUTH_NOT_EXPR:
2320 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2321 /* FALLTHRU */
2322
2323 case EQ_EXPR: case NE_EXPR:
2324 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2325 /* These expressions always produce boolean results. */
2326 TREE_TYPE (expr) = boolean_type_node;
2327 return expr;
2328
2329 default:
2330 /* Other expressions that get here must have boolean values, but
2331 might need to be converted to the appropriate mode. */
2332 return convert (boolean_type_node, expr);
2333 }
2334 }
2335
2336 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2337 into
2338
2339 if (p) if (p)
2340 t1 = a; a;
2341 else or else
2342 t1 = b; b;
2343 t1;
2344
2345 The second form is used when *EXPR_P is of type void.
2346
2347 TARGET is the tree for T1 above.
2348
2349 PRE_P points to the list where side effects that must happen before
2350 *EXPR_P should be stored.
2351
2352 POST_P points to the list where side effects that must happen after
2353 *EXPR_P should be stored. */
2354
2355 static enum gimplify_status
2356 gimplify_cond_expr (tree *expr_p, tree *pre_p, tree *post_p, tree target,
2357 fallback_t fallback)
2358 {
2359 tree expr = *expr_p;
2360 tree tmp, tmp2, type;
2361 enum gimplify_status ret;
2362
2363 type = TREE_TYPE (expr);
2364
2365 /* If this COND_EXPR has a value, copy the values into a temporary within
2366 the arms. */
2367 if (! VOID_TYPE_P (type))
2368 {
2369 tree result;
2370
2371 if (target)
2372 {
2373 ret = gimplify_expr (&target, pre_p, post_p,
2374 is_gimple_min_lval, fb_lvalue);
2375 if (ret != GS_ERROR)
2376 ret = GS_OK;
2377 result = tmp = target;
2378 tmp2 = unshare_expr (target);
2379 }
2380 else if ((fallback & fb_lvalue) == 0)
2381 {
2382 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2383 ret = GS_ALL_DONE;
2384 }
2385 else
2386 {
2387 tree type = build_pointer_type (TREE_TYPE (expr));
2388
2389 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2390 TREE_OPERAND (expr, 1) =
2391 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2392
2393 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2394 TREE_OPERAND (expr, 2) =
2395 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2396
2397 tmp2 = tmp = create_tmp_var (type, "iftmp");
2398
2399 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2400 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2401
2402 result = build_fold_indirect_ref (tmp);
2403 ret = GS_ALL_DONE;
2404 }
2405
2406 /* Build the then clause, 't1 = a;'. But don't build an assignment
2407 if this branch is void; in C++ it can be, if it's a throw. */
2408 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2409 TREE_OPERAND (expr, 1)
2410 = build2 (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1));
2411
2412 /* Build the else clause, 't1 = b;'. */
2413 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2414 TREE_OPERAND (expr, 2)
2415 = build2 (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2));
2416
2417 TREE_TYPE (expr) = void_type_node;
2418 recalculate_side_effects (expr);
2419
2420 /* Move the COND_EXPR to the prequeue. */
2421 gimplify_and_add (expr, pre_p);
2422
2423 *expr_p = result;
2424 return ret;
2425 }
2426
2427 /* Make sure the condition has BOOLEAN_TYPE. */
2428 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2429
2430 /* Break apart && and || conditions. */
2431 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2432 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2433 {
2434 expr = shortcut_cond_expr (expr);
2435
2436 if (expr != *expr_p)
2437 {
2438 *expr_p = expr;
2439
2440 /* We can't rely on gimplify_expr to re-gimplify the expanded
2441 form properly, as cleanups might cause the target labels to be
2442 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2443 set up a conditional context. */
2444 gimple_push_condition ();
2445 gimplify_stmt (expr_p);
2446 gimple_pop_condition (pre_p);
2447
2448 return GS_ALL_DONE;
2449 }
2450 }
2451
2452 /* Now do the normal gimplification. */
2453 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2454 is_gimple_condexpr, fb_rvalue);
2455
2456 gimple_push_condition ();
2457
2458 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2459 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2460 recalculate_side_effects (expr);
2461
2462 gimple_pop_condition (pre_p);
2463
2464 if (ret == GS_ERROR)
2465 ;
2466 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2467 ret = GS_ALL_DONE;
2468 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2469 /* Rewrite "if (a); else b" to "if (!a) b" */
2470 {
2471 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2472 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2473 is_gimple_condexpr, fb_rvalue);
2474
2475 tmp = TREE_OPERAND (expr, 1);
2476 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2477 TREE_OPERAND (expr, 2) = tmp;
2478 }
2479 else
2480 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2481 expr = TREE_OPERAND (expr, 0);
2482
2483 *expr_p = expr;
2484 return ret;
2485 }
2486
2487 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2488 a call to __builtin_memcpy. */
2489
2490 static enum gimplify_status
2491 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2492 {
2493 tree args, t, to, to_ptr, from;
2494
2495 to = TREE_OPERAND (*expr_p, 0);
2496 from = TREE_OPERAND (*expr_p, 1);
2497
2498 args = tree_cons (NULL, size, NULL);
2499
2500 t = build_fold_addr_expr (from);
2501 args = tree_cons (NULL, t, args);
2502
2503 to_ptr = build_fold_addr_expr (to);
2504 args = tree_cons (NULL, to_ptr, args);
2505 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2506 t = build_function_call_expr (t, args);
2507
2508 if (want_value)
2509 {
2510 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2511 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2512 }
2513
2514 *expr_p = t;
2515 return GS_OK;
2516 }
2517
2518 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2519 a call to __builtin_memset. In this case we know that the RHS is
2520 a CONSTRUCTOR with an empty element list. */
2521
2522 static enum gimplify_status
2523 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2524 {
2525 tree args, t, to, to_ptr;
2526
2527 to = TREE_OPERAND (*expr_p, 0);
2528
2529 args = tree_cons (NULL, size, NULL);
2530
2531 args = tree_cons (NULL, integer_zero_node, args);
2532
2533 to_ptr = build_fold_addr_expr (to);
2534 args = tree_cons (NULL, to_ptr, args);
2535 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2536 t = build_function_call_expr (t, args);
2537
2538 if (want_value)
2539 {
2540 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2541 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2542 }
2543
2544 *expr_p = t;
2545 return GS_OK;
2546 }
2547
2548 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2549 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2550 assignment. Returns non-null if we detect a potential overlap. */
2551
2552 struct gimplify_init_ctor_preeval_data
2553 {
2554 /* The base decl of the lhs object. May be NULL, in which case we
2555 have to assume the lhs is indirect. */
2556 tree lhs_base_decl;
2557
2558 /* The alias set of the lhs object. */
2559 int lhs_alias_set;
2560 };
2561
2562 static tree
2563 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2564 {
2565 struct gimplify_init_ctor_preeval_data *data
2566 = (struct gimplify_init_ctor_preeval_data *) xdata;
2567 tree t = *tp;
2568
2569 /* If we find the base object, obviously we have overlap. */
2570 if (data->lhs_base_decl == t)
2571 return t;
2572
2573 /* If the constructor component is indirect, determine if we have a
2574 potential overlap with the lhs. The only bits of information we
2575 have to go on at this point are addressability and alias sets. */
2576 if (TREE_CODE (t) == INDIRECT_REF
2577 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2578 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2579 return t;
2580
2581 if (IS_TYPE_OR_DECL_P (t))
2582 *walk_subtrees = 0;
2583 return NULL;
2584 }
2585
2586 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2587 force values that overlap with the lhs (as described by *DATA)
2588 into temporaries. */
2589
2590 static void
2591 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2592 struct gimplify_init_ctor_preeval_data *data)
2593 {
2594 enum gimplify_status one;
2595
2596 /* If the value is invariant, then there's nothing to pre-evaluate.
2597 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2598 invariant but has side effects and might contain a reference to
2599 the object we're initializing. */
2600 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2601 return;
2602
2603 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2604 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2605 return;
2606
2607 /* Recurse for nested constructors. */
2608 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2609 {
2610 unsigned HOST_WIDE_INT ix;
2611 constructor_elt *ce;
2612 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2613
2614 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2615 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2616 return;
2617 }
2618
2619 /* We can't preevaluate if the type contains a placeholder. */
2620 if (type_contains_placeholder_p (TREE_TYPE (*expr_p)))
2621 return;
2622
2623 /* Gimplify the constructor element to something appropriate for the rhs
2624 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2625 the gimplifier will consider this a store to memory. Doing this
2626 gimplification now means that we won't have to deal with complicated
2627 language-specific trees, nor trees like SAVE_EXPR that can induce
2628 exponential search behavior. */
2629 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2630 if (one == GS_ERROR)
2631 {
2632 *expr_p = NULL;
2633 return;
2634 }
2635
2636 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2637 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2638 always be true for all scalars, since is_gimple_mem_rhs insists on a
2639 temporary variable for them. */
2640 if (DECL_P (*expr_p))
2641 return;
2642
2643 /* If this is of variable size, we have no choice but to assume it doesn't
2644 overlap since we can't make a temporary for it. */
2645 if (!TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (*expr_p))))
2646 return;
2647
2648 /* Otherwise, we must search for overlap ... */
2649 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2650 return;
2651
2652 /* ... and if found, force the value into a temporary. */
2653 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2654 }
2655
2656 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2657 a RANGE_EXPR in a CONSTRUCTOR for an array.
2658
2659 var = lower;
2660 loop_entry:
2661 object[var] = value;
2662 if (var == upper)
2663 goto loop_exit;
2664 var = var + 1;
2665 goto loop_entry;
2666 loop_exit:
2667
2668 We increment var _after_ the loop exit check because we might otherwise
2669 fail if upper == TYPE_MAX_VALUE (type for upper).
2670
2671 Note that we never have to deal with SAVE_EXPRs here, because this has
2672 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2673
2674 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2675 tree *, bool);
2676
2677 static void
2678 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2679 tree value, tree array_elt_type,
2680 tree *pre_p, bool cleared)
2681 {
2682 tree loop_entry_label, loop_exit_label;
2683 tree var, var_type, cref;
2684
2685 loop_entry_label = create_artificial_label ();
2686 loop_exit_label = create_artificial_label ();
2687
2688 /* Create and initialize the index variable. */
2689 var_type = TREE_TYPE (upper);
2690 var = create_tmp_var (var_type, NULL);
2691 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var, lower), pre_p);
2692
2693 /* Add the loop entry label. */
2694 append_to_statement_list (build1 (LABEL_EXPR,
2695 void_type_node,
2696 loop_entry_label),
2697 pre_p);
2698
2699 /* Build the reference. */
2700 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2701 var, NULL_TREE, NULL_TREE);
2702
2703 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2704 the store. Otherwise just assign value to the reference. */
2705
2706 if (TREE_CODE (value) == CONSTRUCTOR)
2707 /* NB we might have to call ourself recursively through
2708 gimplify_init_ctor_eval if the value is a constructor. */
2709 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2710 pre_p, cleared);
2711 else
2712 append_to_statement_list (build2 (MODIFY_EXPR, TREE_TYPE (cref),
2713 cref, value),
2714 pre_p);
2715
2716 /* We exit the loop when the index var is equal to the upper bound. */
2717 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2718 build2 (EQ_EXPR, boolean_type_node,
2719 var, upper),
2720 build1 (GOTO_EXPR,
2721 void_type_node,
2722 loop_exit_label),
2723 NULL_TREE),
2724 pre_p);
2725
2726 /* Otherwise, increment the index var... */
2727 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var,
2728 build2 (PLUS_EXPR, var_type, var,
2729 fold_convert (var_type,
2730 integer_one_node))),
2731 pre_p);
2732
2733 /* ...and jump back to the loop entry. */
2734 append_to_statement_list (build1 (GOTO_EXPR,
2735 void_type_node,
2736 loop_entry_label),
2737 pre_p);
2738
2739 /* Add the loop exit label. */
2740 append_to_statement_list (build1 (LABEL_EXPR,
2741 void_type_node,
2742 loop_exit_label),
2743 pre_p);
2744 }
2745
2746 /* Return true if FDECL is accessing a field that is zero sized. */
2747
2748 static bool
2749 zero_sized_field_decl (tree fdecl)
2750 {
2751 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2752 && integer_zerop (DECL_SIZE (fdecl)))
2753 return true;
2754 return false;
2755 }
2756
2757 /* Return true if TYPE is zero sized. */
2758
2759 static bool
2760 zero_sized_type (tree type)
2761 {
2762 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2763 && integer_zerop (TYPE_SIZE (type)))
2764 return true;
2765 return false;
2766 }
2767
2768 /* A subroutine of gimplify_init_constructor. Generate individual
2769 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2770 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2771 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2772 zeroed first. */
2773
2774 static void
2775 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2776 tree *pre_p, bool cleared)
2777 {
2778 tree array_elt_type = NULL;
2779 unsigned HOST_WIDE_INT ix;
2780 tree purpose, value;
2781
2782 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2783 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2784
2785 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2786 {
2787 tree cref, init;
2788
2789 /* NULL values are created above for gimplification errors. */
2790 if (value == NULL)
2791 continue;
2792
2793 if (cleared && initializer_zerop (value))
2794 continue;
2795
2796 /* ??? Here's to hoping the front end fills in all of the indices,
2797 so we don't have to figure out what's missing ourselves. */
2798 gcc_assert (purpose);
2799
2800 /* Skip zero-sized fields, unless value has side-effects. This can
2801 happen with calls to functions returning a zero-sized type, which
2802 we shouldn't discard. As a number of downstream passes don't
2803 expect sets of zero-sized fields, we rely on the gimplification of
2804 the MODIFY_EXPR we make below to drop the assignment statement. */
2805 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2806 continue;
2807
2808 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2809 whole range. */
2810 if (TREE_CODE (purpose) == RANGE_EXPR)
2811 {
2812 tree lower = TREE_OPERAND (purpose, 0);
2813 tree upper = TREE_OPERAND (purpose, 1);
2814
2815 /* If the lower bound is equal to upper, just treat it as if
2816 upper was the index. */
2817 if (simple_cst_equal (lower, upper))
2818 purpose = upper;
2819 else
2820 {
2821 gimplify_init_ctor_eval_range (object, lower, upper, value,
2822 array_elt_type, pre_p, cleared);
2823 continue;
2824 }
2825 }
2826
2827 if (array_elt_type)
2828 {
2829 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2830 purpose, NULL_TREE, NULL_TREE);
2831 }
2832 else
2833 {
2834 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2835 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2836 unshare_expr (object), purpose, NULL_TREE);
2837 }
2838
2839 if (TREE_CODE (value) == CONSTRUCTOR
2840 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2841 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2842 pre_p, cleared);
2843 else
2844 {
2845 init = build2 (MODIFY_EXPR, TREE_TYPE (cref), cref, value);
2846 gimplify_and_add (init, pre_p);
2847 }
2848 }
2849 }
2850
2851 /* A subroutine of gimplify_modify_expr. Break out elements of a
2852 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2853
2854 Note that we still need to clear any elements that don't have explicit
2855 initializers, so if not all elements are initialized we keep the
2856 original MODIFY_EXPR, we just remove all of the constructor elements. */
2857
2858 static enum gimplify_status
2859 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2860 tree *post_p, bool want_value)
2861 {
2862 tree object;
2863 tree ctor = TREE_OPERAND (*expr_p, 1);
2864 tree type = TREE_TYPE (ctor);
2865 enum gimplify_status ret;
2866 VEC(constructor_elt,gc) *elts;
2867
2868 if (TREE_CODE (ctor) != CONSTRUCTOR)
2869 return GS_UNHANDLED;
2870
2871 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2872 is_gimple_lvalue, fb_lvalue);
2873 if (ret == GS_ERROR)
2874 return ret;
2875 object = TREE_OPERAND (*expr_p, 0);
2876
2877 elts = CONSTRUCTOR_ELTS (ctor);
2878
2879 ret = GS_ALL_DONE;
2880 switch (TREE_CODE (type))
2881 {
2882 case RECORD_TYPE:
2883 case UNION_TYPE:
2884 case QUAL_UNION_TYPE:
2885 case ARRAY_TYPE:
2886 {
2887 struct gimplify_init_ctor_preeval_data preeval_data;
2888 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2889 HOST_WIDE_INT num_nonzero_elements, num_nonconstant_elements;
2890 bool cleared;
2891
2892 /* Aggregate types must lower constructors to initialization of
2893 individual elements. The exception is that a CONSTRUCTOR node
2894 with no elements indicates zero-initialization of the whole. */
2895 if (VEC_empty (constructor_elt, elts))
2896 break;
2897
2898 categorize_ctor_elements (ctor, &num_nonzero_elements,
2899 &num_nonconstant_elements,
2900 &num_ctor_elements, &cleared);
2901
2902 /* If a const aggregate variable is being initialized, then it
2903 should never be a lose to promote the variable to be static. */
2904 if (num_nonconstant_elements == 0
2905 && num_nonzero_elements > 1
2906 && TREE_READONLY (object)
2907 && TREE_CODE (object) == VAR_DECL)
2908 {
2909 DECL_INITIAL (object) = ctor;
2910 TREE_STATIC (object) = 1;
2911 if (!DECL_NAME (object))
2912 DECL_NAME (object) = create_tmp_var_name ("C");
2913 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2914
2915 /* ??? C++ doesn't automatically append a .<number> to the
2916 assembler name, and even when it does, it looks a FE private
2917 data structures to figure out what that number should be,
2918 which are not set for this variable. I suppose this is
2919 important for local statics for inline functions, which aren't
2920 "local" in the object file sense. So in order to get a unique
2921 TU-local symbol, we must invoke the lhd version now. */
2922 lhd_set_decl_assembler_name (object);
2923
2924 *expr_p = NULL_TREE;
2925 break;
2926 }
2927
2928 /* If there are "lots" of initialized elements, even discounting
2929 those that are not address constants (and thus *must* be
2930 computed at runtime), then partition the constructor into
2931 constant and non-constant parts. Block copy the constant
2932 parts in, then generate code for the non-constant parts. */
2933 /* TODO. There's code in cp/typeck.c to do this. */
2934
2935 num_type_elements = count_type_elements (type, true);
2936
2937 /* If count_type_elements could not determine number of type elements
2938 for a constant-sized object, assume clearing is needed.
2939 Don't do this for variable-sized objects, as store_constructor
2940 will ignore the clearing of variable-sized objects. */
2941 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
2942 cleared = true;
2943 /* If there are "lots" of zeros, then block clear the object first. */
2944 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
2945 && num_nonzero_elements < num_type_elements/4)
2946 cleared = true;
2947 /* ??? This bit ought not be needed. For any element not present
2948 in the initializer, we should simply set them to zero. Except
2949 we'd need to *find* the elements that are not present, and that
2950 requires trickery to avoid quadratic compile-time behavior in
2951 large cases or excessive memory use in small cases. */
2952 else if (num_ctor_elements < num_type_elements)
2953 cleared = true;
2954
2955 /* If there are "lots" of initialized elements, and all of them
2956 are valid address constants, then the entire initializer can
2957 be dropped to memory, and then memcpy'd out. Don't do this
2958 for sparse arrays, though, as it's more efficient to follow
2959 the standard CONSTRUCTOR behavior of memset followed by
2960 individual element initialization. */
2961 if (num_nonconstant_elements == 0 && !cleared)
2962 {
2963 HOST_WIDE_INT size = int_size_in_bytes (type);
2964 unsigned int align;
2965
2966 /* ??? We can still get unbounded array types, at least
2967 from the C++ front end. This seems wrong, but attempt
2968 to work around it for now. */
2969 if (size < 0)
2970 {
2971 size = int_size_in_bytes (TREE_TYPE (object));
2972 if (size >= 0)
2973 TREE_TYPE (ctor) = type = TREE_TYPE (object);
2974 }
2975
2976 /* Find the maximum alignment we can assume for the object. */
2977 /* ??? Make use of DECL_OFFSET_ALIGN. */
2978 if (DECL_P (object))
2979 align = DECL_ALIGN (object);
2980 else
2981 align = TYPE_ALIGN (type);
2982
2983 if (size > 0 && !can_move_by_pieces (size, align))
2984 {
2985 tree new = create_tmp_var_raw (type, "C");
2986
2987 gimple_add_tmp_var (new);
2988 TREE_STATIC (new) = 1;
2989 TREE_READONLY (new) = 1;
2990 DECL_INITIAL (new) = ctor;
2991 if (align > DECL_ALIGN (new))
2992 {
2993 DECL_ALIGN (new) = align;
2994 DECL_USER_ALIGN (new) = 1;
2995 }
2996 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
2997
2998 TREE_OPERAND (*expr_p, 1) = new;
2999
3000 /* This is no longer an assignment of a CONSTRUCTOR, but
3001 we still may have processing to do on the LHS. So
3002 pretend we didn't do anything here to let that happen. */
3003 return GS_UNHANDLED;
3004 }
3005 }
3006
3007 if (cleared)
3008 {
3009 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3010 Note that we still have to gimplify, in order to handle the
3011 case of variable sized types. Avoid shared tree structures. */
3012 CONSTRUCTOR_ELTS (ctor) = NULL;
3013 object = unshare_expr (object);
3014 gimplify_stmt (expr_p);
3015 append_to_statement_list (*expr_p, pre_p);
3016 }
3017
3018 /* If we have not block cleared the object, or if there are nonzero
3019 elements in the constructor, add assignments to the individual
3020 scalar fields of the object. */
3021 if (!cleared || num_nonzero_elements > 0)
3022 {
3023 preeval_data.lhs_base_decl = get_base_address (object);
3024 if (!DECL_P (preeval_data.lhs_base_decl))
3025 preeval_data.lhs_base_decl = NULL;
3026 preeval_data.lhs_alias_set = get_alias_set (object);
3027
3028 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3029 pre_p, post_p, &preeval_data);
3030 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3031 }
3032
3033 *expr_p = NULL_TREE;
3034 }
3035 break;
3036
3037 case COMPLEX_TYPE:
3038 {
3039 tree r, i;
3040
3041 /* Extract the real and imaginary parts out of the ctor. */
3042 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3043 r = VEC_index (constructor_elt, elts, 0)->value;
3044 i = VEC_index (constructor_elt, elts, 1)->value;
3045 if (r == NULL || i == NULL)
3046 {
3047 tree zero = convert (TREE_TYPE (type), integer_zero_node);
3048 if (r == NULL)
3049 r = zero;
3050 if (i == NULL)
3051 i = zero;
3052 }
3053
3054 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3055 represent creation of a complex value. */
3056 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3057 {
3058 ctor = build_complex (type, r, i);
3059 TREE_OPERAND (*expr_p, 1) = ctor;
3060 }
3061 else
3062 {
3063 ctor = build2 (COMPLEX_EXPR, type, r, i);
3064 TREE_OPERAND (*expr_p, 1) = ctor;
3065 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3066 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3067 fb_rvalue);
3068 }
3069 }
3070 break;
3071
3072 case VECTOR_TYPE:
3073 {
3074 unsigned HOST_WIDE_INT ix;
3075 constructor_elt *ce;
3076
3077 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3078 if (TREE_CONSTANT (ctor))
3079 {
3080 bool constant_p = true;
3081 tree value;
3082
3083 /* Even when ctor is constant, it might contain non-*_CST
3084 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3085 belong into VECTOR_CST nodes. */
3086 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3087 if (!CONSTANT_CLASS_P (value))
3088 {
3089 constant_p = false;
3090 break;
3091 }
3092
3093 if (constant_p)
3094 {
3095 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3096 break;
3097 }
3098 }
3099
3100 /* Vector types use CONSTRUCTOR all the way through gimple
3101 compilation as a general initializer. */
3102 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3103 {
3104 enum gimplify_status tret;
3105 tret = gimplify_expr (&ce->value, pre_p, post_p,
3106 is_gimple_val, fb_rvalue);
3107 if (tret == GS_ERROR)
3108 ret = GS_ERROR;
3109 }
3110 }
3111 break;
3112
3113 default:
3114 /* So how did we get a CONSTRUCTOR for a scalar type? */
3115 gcc_unreachable ();
3116 }
3117
3118 if (ret == GS_ERROR)
3119 return GS_ERROR;
3120 else if (want_value)
3121 {
3122 append_to_statement_list (*expr_p, pre_p);
3123 *expr_p = object;
3124 return GS_OK;
3125 }
3126 else
3127 return GS_ALL_DONE;
3128 }
3129
3130 /* Given a pointer value OP0, return a simplified version of an
3131 indirection through OP0, or NULL_TREE if no simplification is
3132 possible. This may only be applied to a rhs of an expression.
3133 Note that the resulting type may be different from the type pointed
3134 to in the sense that it is still compatible from the langhooks
3135 point of view. */
3136
3137 static tree
3138 fold_indirect_ref_rhs (tree t)
3139 {
3140 tree type = TREE_TYPE (TREE_TYPE (t));
3141 tree sub = t;
3142 tree subtype;
3143
3144 STRIP_NOPS (sub);
3145 subtype = TREE_TYPE (sub);
3146 if (!POINTER_TYPE_P (subtype))
3147 return NULL_TREE;
3148
3149 if (TREE_CODE (sub) == ADDR_EXPR)
3150 {
3151 tree op = TREE_OPERAND (sub, 0);
3152 tree optype = TREE_TYPE (op);
3153 /* *&p => p */
3154 if (lang_hooks.types_compatible_p (type, optype))
3155 return op;
3156 /* *(foo *)&fooarray => fooarray[0] */
3157 else if (TREE_CODE (optype) == ARRAY_TYPE
3158 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3159 {
3160 tree type_domain = TYPE_DOMAIN (optype);
3161 tree min_val = size_zero_node;
3162 if (type_domain && TYPE_MIN_VALUE (type_domain))
3163 min_val = TYPE_MIN_VALUE (type_domain);
3164 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3165 }
3166 }
3167
3168 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3169 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3170 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3171 {
3172 tree type_domain;
3173 tree min_val = size_zero_node;
3174 tree osub = sub;
3175 sub = fold_indirect_ref_rhs (sub);
3176 if (! sub)
3177 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3178 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3179 if (type_domain && TYPE_MIN_VALUE (type_domain))
3180 min_val = TYPE_MIN_VALUE (type_domain);
3181 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3182 }
3183
3184 return NULL_TREE;
3185 }
3186
3187 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3188 based on the code of the RHS. We loop for as long as something changes. */
3189
3190 static enum gimplify_status
3191 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3192 tree *post_p, bool want_value)
3193 {
3194 enum gimplify_status ret = GS_OK;
3195
3196 while (ret != GS_UNHANDLED)
3197 switch (TREE_CODE (*from_p))
3198 {
3199 case INDIRECT_REF:
3200 {
3201 /* If we have code like
3202
3203 *(const A*)(A*)&x
3204
3205 where the type of "x" is a (possibly cv-qualified variant
3206 of "A"), treat the entire expression as identical to "x".
3207 This kind of code arises in C++ when an object is bound
3208 to a const reference, and if "x" is a TARGET_EXPR we want
3209 to take advantage of the optimization below. */
3210 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3211 if (t)
3212 {
3213 *from_p = t;
3214 ret = GS_OK;
3215 }
3216 else
3217 ret = GS_UNHANDLED;
3218 break;
3219 }
3220
3221 case TARGET_EXPR:
3222 {
3223 /* If we are initializing something from a TARGET_EXPR, strip the
3224 TARGET_EXPR and initialize it directly, if possible. This can't
3225 be done if the initializer is void, since that implies that the
3226 temporary is set in some non-trivial way.
3227
3228 ??? What about code that pulls out the temp and uses it
3229 elsewhere? I think that such code never uses the TARGET_EXPR as
3230 an initializer. If I'm wrong, we'll die because the temp won't
3231 have any RTL. In that case, I guess we'll need to replace
3232 references somehow. */
3233 tree init = TARGET_EXPR_INITIAL (*from_p);
3234
3235 if (!VOID_TYPE_P (TREE_TYPE (init)))
3236 {
3237 *from_p = init;
3238 ret = GS_OK;
3239 }
3240 else
3241 ret = GS_UNHANDLED;
3242 }
3243 break;
3244
3245 case COMPOUND_EXPR:
3246 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3247 caught. */
3248 gimplify_compound_expr (from_p, pre_p, true);
3249 ret = GS_OK;
3250 break;
3251
3252 case CONSTRUCTOR:
3253 /* If we're initializing from a CONSTRUCTOR, break this into
3254 individual MODIFY_EXPRs. */
3255 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3256
3257 case COND_EXPR:
3258 /* If we're assigning to a non-register type, push the assignment
3259 down into the branches. This is mandatory for ADDRESSABLE types,
3260 since we cannot generate temporaries for such, but it saves a
3261 copy in other cases as well. */
3262 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3263 {
3264 *expr_p = *from_p;
3265 return gimplify_cond_expr (expr_p, pre_p, post_p, *to_p,
3266 fb_rvalue);
3267 }
3268 else
3269 ret = GS_UNHANDLED;
3270 break;
3271
3272 case CALL_EXPR:
3273 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3274 return slot so that we don't generate a temporary. */
3275 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3276 && aggregate_value_p (*from_p, *from_p))
3277 {
3278 bool use_target;
3279
3280 if (TREE_CODE (*to_p) == RESULT_DECL
3281 && needs_to_live_in_memory (*to_p))
3282 /* It's always OK to use the return slot directly. */
3283 use_target = true;
3284 else if (!is_gimple_non_addressable (*to_p))
3285 /* Don't use the original target if it's already addressable;
3286 if its address escapes, and the called function uses the
3287 NRV optimization, a conforming program could see *to_p
3288 change before the called function returns; see c++/19317.
3289 When optimizing, the return_slot pass marks more functions
3290 as safe after we have escape info. */
3291 use_target = false;
3292 else if (TREE_CODE (*to_p) != PARM_DECL
3293 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3294 /* Don't use the original target if it's a formal temp; we
3295 don't want to take their addresses. */
3296 use_target = false;
3297 else if (is_gimple_reg_type (TREE_TYPE (*to_p)))
3298 /* Also don't force regs into memory. */
3299 use_target = false;
3300 else
3301 use_target = true;
3302
3303 if (use_target)
3304 {
3305 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3306 lang_hooks.mark_addressable (*to_p);
3307 }
3308 }
3309
3310 ret = GS_UNHANDLED;
3311 break;
3312
3313 default:
3314 ret = GS_UNHANDLED;
3315 break;
3316 }
3317
3318 return ret;
3319 }
3320
3321 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3322 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3323 DECL_COMPLEX_GIMPLE_REG_P set. */
3324
3325 static enum gimplify_status
3326 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3327 {
3328 enum tree_code code, ocode;
3329 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3330
3331 lhs = TREE_OPERAND (*expr_p, 0);
3332 rhs = TREE_OPERAND (*expr_p, 1);
3333 code = TREE_CODE (lhs);
3334 lhs = TREE_OPERAND (lhs, 0);
3335
3336 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3337 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3338 other = get_formal_tmp_var (other, pre_p);
3339
3340 realpart = code == REALPART_EXPR ? rhs : other;
3341 imagpart = code == REALPART_EXPR ? other : rhs;
3342
3343 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3344 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3345 else
3346 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3347
3348 TREE_OPERAND (*expr_p, 0) = lhs;
3349 TREE_OPERAND (*expr_p, 1) = new_rhs;
3350
3351 if (want_value)
3352 {
3353 append_to_statement_list (*expr_p, pre_p);
3354 *expr_p = rhs;
3355 }
3356
3357 return GS_ALL_DONE;
3358 }
3359
3360 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3361
3362 modify_expr
3363 : varname '=' rhs
3364 | '*' ID '=' rhs
3365
3366 PRE_P points to the list where side effects that must happen before
3367 *EXPR_P should be stored.
3368
3369 POST_P points to the list where side effects that must happen after
3370 *EXPR_P should be stored.
3371
3372 WANT_VALUE is nonzero iff we want to use the value of this expression
3373 in another expression. */
3374
3375 static enum gimplify_status
3376 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3377 {
3378 tree *from_p = &TREE_OPERAND (*expr_p, 1);
3379 tree *to_p = &TREE_OPERAND (*expr_p, 0);
3380 enum gimplify_status ret = GS_UNHANDLED;
3381
3382 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3383 || TREE_CODE (*expr_p) == INIT_EXPR);
3384
3385 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer useful. */
3386 if (TREE_CODE (*expr_p) == INIT_EXPR)
3387 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
3388
3389 /* For zero sized types only gimplify the left hand side and right hand side
3390 as statements and throw away the assignment. */
3391 if (zero_sized_type (TREE_TYPE (*from_p)))
3392 {
3393 gimplify_stmt (from_p);
3394 gimplify_stmt (to_p);
3395 append_to_statement_list (*from_p, pre_p);
3396 append_to_statement_list (*to_p, pre_p);
3397 *expr_p = NULL_TREE;
3398 return GS_ALL_DONE;
3399 }
3400
3401 /* See if any simplifications can be done based on what the RHS is. */
3402 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3403 want_value);
3404 if (ret != GS_UNHANDLED)
3405 return ret;
3406
3407 /* If the value being copied is of variable width, compute the length
3408 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3409 before gimplifying any of the operands so that we can resolve any
3410 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3411 the size of the expression to be copied, not of the destination, so
3412 that is what we must here. */
3413 maybe_with_size_expr (from_p);
3414
3415 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3416 if (ret == GS_ERROR)
3417 return ret;
3418
3419 ret = gimplify_expr (from_p, pre_p, post_p,
3420 rhs_predicate_for (*to_p), fb_rvalue);
3421 if (ret == GS_ERROR)
3422 return ret;
3423
3424 /* Now see if the above changed *from_p to something we handle specially. */
3425 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3426 want_value);
3427 if (ret != GS_UNHANDLED)
3428 return ret;
3429
3430 /* If we've got a variable sized assignment between two lvalues (i.e. does
3431 not involve a call), then we can make things a bit more straightforward
3432 by converting the assignment to memcpy or memset. */
3433 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3434 {
3435 tree from = TREE_OPERAND (*from_p, 0);
3436 tree size = TREE_OPERAND (*from_p, 1);
3437
3438 if (TREE_CODE (from) == CONSTRUCTOR)
3439 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3440 if (is_gimple_addressable (from))
3441 {
3442 *from_p = from;
3443 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3444 }
3445 }
3446
3447 /* Transform partial stores to non-addressable complex variables into
3448 total stores. This allows us to use real instead of virtual operands
3449 for these variables, which improves optimization. */
3450 if ((TREE_CODE (*to_p) == REALPART_EXPR
3451 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3452 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3453 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3454
3455 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3456 {
3457 /* If we've somehow already got an SSA_NAME on the LHS, then
3458 we're probably modified it twice. Not good. */
3459 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3460 *to_p = make_ssa_name (*to_p, *expr_p);
3461 }
3462
3463 if (want_value)
3464 {
3465 append_to_statement_list (*expr_p, pre_p);
3466 *expr_p = *to_p;
3467 return GS_OK;
3468 }
3469
3470 return GS_ALL_DONE;
3471 }
3472
3473 /* Gimplify a comparison between two variable-sized objects. Do this
3474 with a call to BUILT_IN_MEMCMP. */
3475
3476 static enum gimplify_status
3477 gimplify_variable_sized_compare (tree *expr_p)
3478 {
3479 tree op0 = TREE_OPERAND (*expr_p, 0);
3480 tree op1 = TREE_OPERAND (*expr_p, 1);
3481 tree args, t, dest;
3482
3483 t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3484 t = unshare_expr (t);
3485 t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
3486 args = tree_cons (NULL, t, NULL);
3487 t = build_fold_addr_expr (op1);
3488 args = tree_cons (NULL, t, args);
3489 dest = build_fold_addr_expr (op0);
3490 args = tree_cons (NULL, dest, args);
3491 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3492 t = build_function_call_expr (t, args);
3493 *expr_p
3494 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3495
3496 return GS_OK;
3497 }
3498
3499 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3500 points to the expression to gimplify.
3501
3502 Expressions of the form 'a && b' are gimplified to:
3503
3504 a && b ? true : false
3505
3506 gimplify_cond_expr will do the rest.
3507
3508 PRE_P points to the list where side effects that must happen before
3509 *EXPR_P should be stored. */
3510
3511 static enum gimplify_status
3512 gimplify_boolean_expr (tree *expr_p)
3513 {
3514 /* Preserve the original type of the expression. */
3515 tree type = TREE_TYPE (*expr_p);
3516
3517 *expr_p = build3 (COND_EXPR, type, *expr_p,
3518 convert (type, boolean_true_node),
3519 convert (type, boolean_false_node));
3520
3521 return GS_OK;
3522 }
3523
3524 /* Gimplifies an expression sequence. This function gimplifies each
3525 expression and re-writes the original expression with the last
3526 expression of the sequence in GIMPLE form.
3527
3528 PRE_P points to the list where the side effects for all the
3529 expressions in the sequence will be emitted.
3530
3531 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3532 /* ??? Should rearrange to share the pre-queue with all the indirect
3533 invocations of gimplify_expr. Would probably save on creations
3534 of statement_list nodes. */
3535
3536 static enum gimplify_status
3537 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3538 {
3539 tree t = *expr_p;
3540
3541 do
3542 {
3543 tree *sub_p = &TREE_OPERAND (t, 0);
3544
3545 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3546 gimplify_compound_expr (sub_p, pre_p, false);
3547 else
3548 gimplify_stmt (sub_p);
3549 append_to_statement_list (*sub_p, pre_p);
3550
3551 t = TREE_OPERAND (t, 1);
3552 }
3553 while (TREE_CODE (t) == COMPOUND_EXPR);
3554
3555 *expr_p = t;
3556 if (want_value)
3557 return GS_OK;
3558 else
3559 {
3560 gimplify_stmt (expr_p);
3561 return GS_ALL_DONE;
3562 }
3563 }
3564
3565 /* Gimplifies a statement list. These may be created either by an
3566 enlightened front-end, or by shortcut_cond_expr. */
3567
3568 static enum gimplify_status
3569 gimplify_statement_list (tree *expr_p)
3570 {
3571 tree_stmt_iterator i = tsi_start (*expr_p);
3572
3573 while (!tsi_end_p (i))
3574 {
3575 tree t;
3576
3577 gimplify_stmt (tsi_stmt_ptr (i));
3578
3579 t = tsi_stmt (i);
3580 if (t == NULL)
3581 tsi_delink (&i);
3582 else if (TREE_CODE (t) == STATEMENT_LIST)
3583 {
3584 tsi_link_before (&i, t, TSI_SAME_STMT);
3585 tsi_delink (&i);
3586 }
3587 else
3588 tsi_next (&i);
3589 }
3590
3591 return GS_ALL_DONE;
3592 }
3593
3594 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3595 gimplify. After gimplification, EXPR_P will point to a new temporary
3596 that holds the original value of the SAVE_EXPR node.
3597
3598 PRE_P points to the list where side effects that must happen before
3599 *EXPR_P should be stored. */
3600
3601 static enum gimplify_status
3602 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3603 {
3604 enum gimplify_status ret = GS_ALL_DONE;
3605 tree val;
3606
3607 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3608 val = TREE_OPERAND (*expr_p, 0);
3609
3610 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3611 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3612 {
3613 /* The operand may be a void-valued expression such as SAVE_EXPRs
3614 generated by the Java frontend for class initialization. It is
3615 being executed only for its side-effects. */
3616 if (TREE_TYPE (val) == void_type_node)
3617 {
3618 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3619 is_gimple_stmt, fb_none);
3620 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3621 val = NULL;
3622 }
3623 else
3624 val = get_initialized_tmp_var (val, pre_p, post_p);
3625
3626 TREE_OPERAND (*expr_p, 0) = val;
3627 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3628 }
3629
3630 *expr_p = val;
3631
3632 return ret;
3633 }
3634
3635 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3636
3637 unary_expr
3638 : ...
3639 | '&' varname
3640 ...
3641
3642 PRE_P points to the list where side effects that must happen before
3643 *EXPR_P should be stored.
3644
3645 POST_P points to the list where side effects that must happen after
3646 *EXPR_P should be stored. */
3647
3648 static enum gimplify_status
3649 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3650 {
3651 tree expr = *expr_p;
3652 tree op0 = TREE_OPERAND (expr, 0);
3653 enum gimplify_status ret;
3654
3655 switch (TREE_CODE (op0))
3656 {
3657 case INDIRECT_REF:
3658 case MISALIGNED_INDIRECT_REF:
3659 do_indirect_ref:
3660 /* Check if we are dealing with an expression of the form '&*ptr'.
3661 While the front end folds away '&*ptr' into 'ptr', these
3662 expressions may be generated internally by the compiler (e.g.,
3663 builtins like __builtin_va_end). */
3664 /* Caution: the silent array decomposition semantics we allow for
3665 ADDR_EXPR means we can't always discard the pair. */
3666 /* Gimplification of the ADDR_EXPR operand may drop
3667 cv-qualification conversions, so make sure we add them if
3668 needed. */
3669 {
3670 tree op00 = TREE_OPERAND (op0, 0);
3671 tree t_expr = TREE_TYPE (expr);
3672 tree t_op00 = TREE_TYPE (op00);
3673
3674 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3675 {
3676 #ifdef ENABLE_CHECKING
3677 tree t_op0 = TREE_TYPE (op0);
3678 gcc_assert (POINTER_TYPE_P (t_expr)
3679 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3680 ? TREE_TYPE (t_op0) : t_op0,
3681 TREE_TYPE (t_expr))
3682 && POINTER_TYPE_P (t_op00)
3683 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3684 #endif
3685 op00 = fold_convert (TREE_TYPE (expr), op00);
3686 }
3687 *expr_p = op00;
3688 ret = GS_OK;
3689 }
3690 break;
3691
3692 case VIEW_CONVERT_EXPR:
3693 /* Take the address of our operand and then convert it to the type of
3694 this ADDR_EXPR.
3695
3696 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3697 all clear. The impact of this transformation is even less clear. */
3698
3699 /* If the operand is a useless conversion, look through it. Doing so
3700 guarantees that the ADDR_EXPR and its operand will remain of the
3701 same type. */
3702 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3703 op0 = TREE_OPERAND (op0, 0);
3704
3705 *expr_p = fold_convert (TREE_TYPE (expr),
3706 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3707 ret = GS_OK;
3708 break;
3709
3710 default:
3711 /* We use fb_either here because the C frontend sometimes takes
3712 the address of a call that returns a struct; see
3713 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3714 the implied temporary explicit. */
3715 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3716 is_gimple_addressable, fb_either);
3717 if (ret != GS_ERROR)
3718 {
3719 op0 = TREE_OPERAND (expr, 0);
3720
3721 /* For various reasons, the gimplification of the expression
3722 may have made a new INDIRECT_REF. */
3723 if (TREE_CODE (op0) == INDIRECT_REF)
3724 goto do_indirect_ref;
3725
3726 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3727 is set properly. */
3728 recompute_tree_invariant_for_addr_expr (expr);
3729
3730 /* Mark the RHS addressable. */
3731 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3732 }
3733 break;
3734 }
3735
3736 return ret;
3737 }
3738
3739 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3740 value; output operands should be a gimple lvalue. */
3741
3742 static enum gimplify_status
3743 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3744 {
3745 tree expr = *expr_p;
3746 int noutputs = list_length (ASM_OUTPUTS (expr));
3747 const char **oconstraints
3748 = (const char **) alloca ((noutputs) * sizeof (const char *));
3749 int i;
3750 tree link;
3751 const char *constraint;
3752 bool allows_mem, allows_reg, is_inout;
3753 enum gimplify_status ret, tret;
3754
3755 ret = GS_ALL_DONE;
3756 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3757 {
3758 size_t constraint_len;
3759 oconstraints[i] = constraint
3760 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3761 constraint_len = strlen (constraint);
3762 if (constraint_len == 0)
3763 continue;
3764
3765 parse_output_constraint (&constraint, i, 0, 0,
3766 &allows_mem, &allows_reg, &is_inout);
3767
3768 if (!allows_reg && allows_mem)
3769 lang_hooks.mark_addressable (TREE_VALUE (link));
3770
3771 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3772 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3773 fb_lvalue | fb_mayfail);
3774 if (tret == GS_ERROR)
3775 {
3776 error ("invalid lvalue in asm output %d", i);
3777 ret = tret;
3778 }
3779
3780 if (is_inout)
3781 {
3782 /* An input/output operand. To give the optimizers more
3783 flexibility, split it into separate input and output
3784 operands. */
3785 tree input;
3786 char buf[10];
3787
3788 /* Turn the in/out constraint into an output constraint. */
3789 char *p = xstrdup (constraint);
3790 p[0] = '=';
3791 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
3792
3793 /* And add a matching input constraint. */
3794 if (allows_reg)
3795 {
3796 sprintf (buf, "%d", i);
3797
3798 /* If there are multiple alternatives in the constraint,
3799 handle each of them individually. Those that allow register
3800 will be replaced with operand number, the others will stay
3801 unchanged. */
3802 if (strchr (p, ',') != NULL)
3803 {
3804 size_t len = 0, buflen = strlen (buf);
3805 char *beg, *end, *str, *dst;
3806
3807 for (beg = p + 1;;)
3808 {
3809 end = strchr (beg, ',');
3810 if (end == NULL)
3811 end = strchr (beg, '\0');
3812 if ((size_t) (end - beg) < buflen)
3813 len += buflen + 1;
3814 else
3815 len += end - beg + 1;
3816 if (*end)
3817 beg = end + 1;
3818 else
3819 break;
3820 }
3821
3822 str = (char *) alloca (len);
3823 for (beg = p + 1, dst = str;;)
3824 {
3825 const char *tem;
3826 bool mem_p, reg_p, inout_p;
3827
3828 end = strchr (beg, ',');
3829 if (end)
3830 *end = '\0';
3831 beg[-1] = '=';
3832 tem = beg - 1;
3833 parse_output_constraint (&tem, i, 0, 0,
3834 &mem_p, &reg_p, &inout_p);
3835 if (dst != str)
3836 *dst++ = ',';
3837 if (reg_p)
3838 {
3839 memcpy (dst, buf, buflen);
3840 dst += buflen;
3841 }
3842 else
3843 {
3844 if (end)
3845 len = end - beg;
3846 else
3847 len = strlen (beg);
3848 memcpy (dst, beg, len);
3849 dst += len;
3850 }
3851 if (end)
3852 beg = end + 1;
3853 else
3854 break;
3855 }
3856 *dst = '\0';
3857 input = build_string (dst - str, str);
3858 }
3859 else
3860 input = build_string (strlen (buf), buf);
3861 }
3862 else
3863 input = build_string (constraint_len - 1, constraint + 1);
3864
3865 free (p);
3866
3867 input = build_tree_list (build_tree_list (NULL_TREE, input),
3868 unshare_expr (TREE_VALUE (link)));
3869 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
3870 }
3871 }
3872
3873 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3874 {
3875 constraint
3876 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3877 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3878 oconstraints, &allows_mem, &allows_reg);
3879
3880 /* If the operand is a memory input, it should be an lvalue. */
3881 if (!allows_reg && allows_mem)
3882 {
3883 lang_hooks.mark_addressable (TREE_VALUE (link));
3884 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3885 is_gimple_lvalue, fb_lvalue | fb_mayfail);
3886 if (tret == GS_ERROR)
3887 {
3888 error ("memory input %d is not directly addressable", i);
3889 ret = tret;
3890 }
3891 }
3892 else
3893 {
3894 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3895 is_gimple_asm_val, fb_rvalue);
3896 if (tret == GS_ERROR)
3897 ret = tret;
3898 }
3899 }
3900
3901 return ret;
3902 }
3903
3904 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
3905 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
3906 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
3907 return to this function.
3908
3909 FIXME should we complexify the prequeue handling instead? Or use flags
3910 for all the cleanups and let the optimizer tighten them up? The current
3911 code seems pretty fragile; it will break on a cleanup within any
3912 non-conditional nesting. But any such nesting would be broken, anyway;
3913 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
3914 and continues out of it. We can do that at the RTL level, though, so
3915 having an optimizer to tighten up try/finally regions would be a Good
3916 Thing. */
3917
3918 static enum gimplify_status
3919 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
3920 {
3921 tree_stmt_iterator iter;
3922 tree body;
3923
3924 tree temp = voidify_wrapper_expr (*expr_p, NULL);
3925
3926 /* We only care about the number of conditions between the innermost
3927 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
3928 any cleanups collected outside the CLEANUP_POINT_EXPR. */
3929 int old_conds = gimplify_ctxp->conditions;
3930 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
3931 gimplify_ctxp->conditions = 0;
3932 gimplify_ctxp->conditional_cleanups = NULL_TREE;
3933
3934 body = TREE_OPERAND (*expr_p, 0);
3935 gimplify_to_stmt_list (&body);
3936
3937 gimplify_ctxp->conditions = old_conds;
3938 gimplify_ctxp->conditional_cleanups = old_cleanups;
3939
3940 for (iter = tsi_start (body); !tsi_end_p (iter); )
3941 {
3942 tree *wce_p = tsi_stmt_ptr (iter);
3943 tree wce = *wce_p;
3944
3945 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
3946 {
3947 if (tsi_one_before_end_p (iter))
3948 {
3949 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
3950 tsi_delink (&iter);
3951 break;
3952 }
3953 else
3954 {
3955 tree sl, tfe;
3956 enum tree_code code;
3957
3958 if (CLEANUP_EH_ONLY (wce))
3959 code = TRY_CATCH_EXPR;
3960 else
3961 code = TRY_FINALLY_EXPR;
3962
3963 sl = tsi_split_statement_list_after (&iter);
3964 tfe = build2 (code, void_type_node, sl, NULL_TREE);
3965 append_to_statement_list (TREE_OPERAND (wce, 0),
3966 &TREE_OPERAND (tfe, 1));
3967 *wce_p = tfe;
3968 iter = tsi_start (sl);
3969 }
3970 }
3971 else
3972 tsi_next (&iter);
3973 }
3974
3975 if (temp)
3976 {
3977 *expr_p = temp;
3978 append_to_statement_list (body, pre_p);
3979 return GS_OK;
3980 }
3981 else
3982 {
3983 *expr_p = body;
3984 return GS_ALL_DONE;
3985 }
3986 }
3987
3988 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
3989 is the cleanup action required. */
3990
3991 static void
3992 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
3993 {
3994 tree wce;
3995
3996 /* Errors can result in improperly nested cleanups. Which results in
3997 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
3998 if (errorcount || sorrycount)
3999 return;
4000
4001 if (gimple_conditional_context ())
4002 {
4003 /* If we're in a conditional context, this is more complex. We only
4004 want to run the cleanup if we actually ran the initialization that
4005 necessitates it, but we want to run it after the end of the
4006 conditional context. So we wrap the try/finally around the
4007 condition and use a flag to determine whether or not to actually
4008 run the destructor. Thus
4009
4010 test ? f(A()) : 0
4011
4012 becomes (approximately)
4013
4014 flag = 0;
4015 try {
4016 if (test) { A::A(temp); flag = 1; val = f(temp); }
4017 else { val = 0; }
4018 } finally {
4019 if (flag) A::~A(temp);
4020 }
4021 val
4022 */
4023
4024 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4025 tree ffalse = build2 (MODIFY_EXPR, void_type_node, flag,
4026 boolean_false_node);
4027 tree ftrue = build2 (MODIFY_EXPR, void_type_node, flag,
4028 boolean_true_node);
4029 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4030 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4031 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4032 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4033 append_to_statement_list (ftrue, pre_p);
4034
4035 /* Because of this manipulation, and the EH edges that jump
4036 threading cannot redirect, the temporary (VAR) will appear
4037 to be used uninitialized. Don't warn. */
4038 TREE_NO_WARNING (var) = 1;
4039 }
4040 else
4041 {
4042 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4043 CLEANUP_EH_ONLY (wce) = eh_only;
4044 append_to_statement_list (wce, pre_p);
4045 }
4046
4047 gimplify_stmt (&TREE_OPERAND (wce, 0));
4048 }
4049
4050 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4051
4052 static enum gimplify_status
4053 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4054 {
4055 tree targ = *expr_p;
4056 tree temp = TARGET_EXPR_SLOT (targ);
4057 tree init = TARGET_EXPR_INITIAL (targ);
4058 enum gimplify_status ret;
4059
4060 if (init)
4061 {
4062 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4063 to the temps list. */
4064 gimple_add_tmp_var (temp);
4065
4066 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4067 expression is supposed to initialize the slot. */
4068 if (VOID_TYPE_P (TREE_TYPE (init)))
4069 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4070 else
4071 {
4072 /* Special handling for BIND_EXPR can result in fewer temps. */
4073 ret = GS_OK;
4074 if (TREE_CODE (init) == BIND_EXPR)
4075 gimplify_bind_expr (&init, temp, pre_p);
4076 if (init != temp)
4077 {
4078 init = build2 (MODIFY_EXPR, void_type_node, temp, init);
4079 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4080 fb_none);
4081 }
4082 }
4083 if (ret == GS_ERROR)
4084 return GS_ERROR;
4085 append_to_statement_list (init, pre_p);
4086
4087 /* If needed, push the cleanup for the temp. */
4088 if (TARGET_EXPR_CLEANUP (targ))
4089 {
4090 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4091 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4092 CLEANUP_EH_ONLY (targ), pre_p);
4093 }
4094
4095 /* Only expand this once. */
4096 TREE_OPERAND (targ, 3) = init;
4097 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4098 }
4099 else
4100 /* We should have expanded this before. */
4101 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4102
4103 *expr_p = temp;
4104 return GS_OK;
4105 }
4106
4107 /* Gimplification of expression trees. */
4108
4109 /* Gimplify an expression which appears at statement context; usually, this
4110 means replacing it with a suitably gimple STATEMENT_LIST. */
4111
4112 void
4113 gimplify_stmt (tree *stmt_p)
4114 {
4115 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4116 }
4117
4118 /* Similarly, but force the result to be a STATEMENT_LIST. */
4119
4120 void
4121 gimplify_to_stmt_list (tree *stmt_p)
4122 {
4123 gimplify_stmt (stmt_p);
4124 if (!*stmt_p)
4125 *stmt_p = alloc_stmt_list ();
4126 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4127 {
4128 tree t = *stmt_p;
4129 *stmt_p = alloc_stmt_list ();
4130 append_to_statement_list (t, stmt_p);
4131 }
4132 }
4133
4134 /* Gimplify *EXPR_P as if it had been used inside the gimplification
4135 context CTX_P. The other arguments are as in gimplify_expr. */
4136
4137 static enum gimplify_status
4138 gimplify_expr_in_ctx (tree *expr_p, tree *pre_p, tree *post_p,
4139 bool (* gimple_test_f) (tree), fallback_t fallback,
4140 struct gimplify_ctx *ctx_p,
4141 struct gimplify_omp_ctx *omp_ctx_p)
4142 {
4143 enum gimplify_status ret;
4144 struct gimplify_ctx *prev_ctxp;
4145 struct gimplify_omp_ctx *prev_omp_ctxp;
4146
4147 prev_ctxp = gimplify_ctxp;
4148 gimplify_ctxp = ctx_p;
4149 prev_omp_ctxp = gimplify_omp_ctxp;
4150 gimplify_omp_ctxp = omp_ctx_p;
4151 ret = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fallback);
4152 gimplify_ctxp = prev_ctxp;
4153 gimplify_omp_ctxp = prev_omp_ctxp;
4154
4155 return ret;
4156 }
4157
4158 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4159 to CTX. If entries already exist, force them to be some flavor of private.
4160 If there is no enclosing parallel, do nothing. */
4161
4162 void
4163 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4164 {
4165 splay_tree_node n;
4166
4167 if (decl == NULL || !DECL_P (decl))
4168 return;
4169
4170 do
4171 {
4172 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4173 if (n != NULL)
4174 {
4175 if (n->value & GOVD_SHARED)
4176 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4177 else
4178 return;
4179 }
4180 else if (ctx->is_parallel)
4181 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4182
4183 ctx = ctx->outer_context;
4184 }
4185 while (ctx);
4186 }
4187
4188 /* Similarly for each of the type sizes of TYPE. */
4189
4190 static void
4191 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4192 {
4193 if (type == NULL || type == error_mark_node)
4194 return;
4195 type = TYPE_MAIN_VARIANT (type);
4196
4197 if (pointer_set_insert (ctx->privatized_types, type))
4198 return;
4199
4200 switch (TREE_CODE (type))
4201 {
4202 case INTEGER_TYPE:
4203 case ENUMERAL_TYPE:
4204 case BOOLEAN_TYPE:
4205 case CHAR_TYPE:
4206 case REAL_TYPE:
4207 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4208 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4209 break;
4210
4211 case ARRAY_TYPE:
4212 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4213 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4214 break;
4215
4216 case RECORD_TYPE:
4217 case UNION_TYPE:
4218 case QUAL_UNION_TYPE:
4219 {
4220 tree field;
4221 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4222 if (TREE_CODE (field) == FIELD_DECL)
4223 {
4224 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4225 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4226 }
4227 }
4228 break;
4229
4230 case POINTER_TYPE:
4231 case REFERENCE_TYPE:
4232 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4233 break;
4234
4235 default:
4236 break;
4237 }
4238
4239 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4240 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4241 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4242 }
4243
4244 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4245
4246 static void
4247 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4248 {
4249 splay_tree_node n;
4250 unsigned int nflags;
4251 tree t;
4252
4253 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4254 return;
4255
4256 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4257 there are constructors involved somewhere. */
4258 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4259 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4260 flags |= GOVD_SEEN;
4261
4262 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4263 if (n != NULL)
4264 {
4265 /* We shouldn't be re-adding the decl with the same data
4266 sharing class. */
4267 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4268 /* The only combination of data sharing classes we should see is
4269 FIRSTPRIVATE and LASTPRIVATE. */
4270 nflags = n->value | flags;
4271 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4272 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4273 n->value = nflags;
4274 return;
4275 }
4276
4277 /* When adding a variable-sized variable, we have to handle all sorts
4278 of additional bits of data: the pointer replacement variable, and
4279 the parameters of the type. */
4280 if (!TREE_CONSTANT (DECL_SIZE (decl)))
4281 {
4282 /* Add the pointer replacement variable as PRIVATE if the variable
4283 replacement is private, else FIRSTPRIVATE since we'll need the
4284 address of the original variable either for SHARED, or for the
4285 copy into or out of the context. */
4286 if (!(flags & GOVD_LOCAL))
4287 {
4288 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4289 nflags |= flags & GOVD_SEEN;
4290 t = DECL_VALUE_EXPR (decl);
4291 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4292 t = TREE_OPERAND (t, 0);
4293 gcc_assert (DECL_P (t));
4294 omp_add_variable (ctx, t, nflags);
4295 }
4296
4297 /* Add all of the variable and type parameters (which should have
4298 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4299 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4300 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4301 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4302
4303 /* The variable-sized variable itself is never SHARED, only some form
4304 of PRIVATE. The sharing would take place via the pointer variable
4305 which we remapped above. */
4306 if (flags & GOVD_SHARED)
4307 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4308 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4309
4310 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4311 alloca statement we generate for the variable, so make sure it
4312 is available. This isn't automatically needed for the SHARED
4313 case, since we won't be allocating local storage then. */
4314 else
4315 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4316 }
4317 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4318 {
4319 gcc_assert ((flags & GOVD_LOCAL) == 0);
4320 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4321
4322 /* Similar to the direct variable sized case above, we'll need the
4323 size of references being privatized. */
4324 if ((flags & GOVD_SHARED) == 0)
4325 {
4326 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4327 if (!TREE_CONSTANT (t))
4328 omp_notice_variable (ctx, t, true);
4329 }
4330 }
4331
4332 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4333 }
4334
4335 /* Record the fact that DECL was used within the OpenMP context CTX.
4336 IN_CODE is true when real code uses DECL, and false when we should
4337 merely emit default(none) errors. Return true if DECL is going to
4338 be remapped and thus DECL shouldn't be gimplified into its
4339 DECL_VALUE_EXPR (if any). */
4340
4341 static bool
4342 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4343 {
4344 splay_tree_node n;
4345 unsigned flags = in_code ? GOVD_SEEN : 0;
4346 bool ret = false, shared;
4347
4348 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4349 return false;
4350
4351 /* Threadprivate variables are predetermined. */
4352 if (is_global_var (decl))
4353 {
4354 if (DECL_THREAD_LOCAL_P (decl))
4355 return false;
4356
4357 if (DECL_HAS_VALUE_EXPR_P (decl))
4358 {
4359 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4360
4361 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4362 return false;
4363 }
4364 }
4365
4366 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4367 if (n == NULL)
4368 {
4369 enum omp_clause_default_kind default_kind, kind;
4370
4371 if (!ctx->is_parallel)
4372 goto do_outer;
4373
4374 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4375 remapped firstprivate instead of shared. To some extent this is
4376 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4377 default_kind = ctx->default_kind;
4378 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4379 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4380 default_kind = kind;
4381
4382 switch (default_kind)
4383 {
4384 case OMP_CLAUSE_DEFAULT_NONE:
4385 error ("%qs not specified in enclosing parallel",
4386 IDENTIFIER_POINTER (DECL_NAME (decl)));
4387 error ("%Henclosing parallel", &ctx->location);
4388 /* FALLTHRU */
4389 case OMP_CLAUSE_DEFAULT_SHARED:
4390 flags |= GOVD_SHARED;
4391 break;
4392 case OMP_CLAUSE_DEFAULT_PRIVATE:
4393 flags |= GOVD_PRIVATE;
4394 break;
4395 default:
4396 gcc_unreachable ();
4397 }
4398
4399 omp_add_variable (ctx, decl, flags);
4400
4401 shared = (flags & GOVD_SHARED) != 0;
4402 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4403 goto do_outer;
4404 }
4405
4406 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4407 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4408
4409 /* If nothing changed, there's nothing left to do. */
4410 if ((n->value & flags) == flags)
4411 return ret;
4412 flags |= n->value;
4413 n->value = flags;
4414
4415 do_outer:
4416 /* If the variable is private in the current context, then we don't
4417 need to propagate anything to an outer context. */
4418 if (flags & GOVD_PRIVATE)
4419 return ret;
4420 if (ctx->outer_context
4421 && omp_notice_variable (ctx->outer_context, decl, in_code))
4422 return true;
4423 return ret;
4424 }
4425
4426 /* Verify that DECL is private within CTX. If there's specific information
4427 to the contrary in the innermost scope, generate an error. */
4428
4429 static bool
4430 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4431 {
4432 splay_tree_node n;
4433
4434 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4435 if (n != NULL)
4436 {
4437 if (n->value & GOVD_SHARED)
4438 {
4439 if (ctx == gimplify_omp_ctxp)
4440 error ("iteration variable %qs should be private",
4441 IDENTIFIER_POINTER (DECL_NAME (decl)));
4442 n->value = GOVD_PRIVATE;
4443 }
4444 return true;
4445 }
4446
4447 if (ctx->outer_context)
4448 return omp_is_private (ctx->outer_context, decl);
4449 else if (ctx->is_parallel)
4450 return false;
4451 else
4452 return !is_global_var (decl);
4453 }
4454
4455 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4456 and previous omp contexts. */
4457
4458 static void
4459 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel)
4460 {
4461 struct gimplify_omp_ctx *ctx, *outer_ctx;
4462 tree c;
4463
4464 ctx = new_omp_context (in_parallel);
4465 outer_ctx = ctx->outer_context;
4466
4467 while ((c = *list_p) != NULL)
4468 {
4469 enum gimplify_status gs;
4470 bool remove = false;
4471 bool notice_outer = true;
4472 unsigned int flags;
4473 tree decl;
4474
4475 switch (TREE_CODE (c))
4476 {
4477 case OMP_CLAUSE_PRIVATE:
4478 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4479 notice_outer = false;
4480 goto do_add;
4481 case OMP_CLAUSE_SHARED:
4482 flags = GOVD_SHARED | GOVD_EXPLICIT;
4483 goto do_add;
4484 case OMP_CLAUSE_FIRSTPRIVATE:
4485 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4486 goto do_add;
4487 case OMP_CLAUSE_LASTPRIVATE:
4488 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4489 goto do_add;
4490 case OMP_CLAUSE_REDUCTION:
4491 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4492 goto do_add;
4493
4494 do_add:
4495 decl = OMP_CLAUSE_DECL (c);
4496 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4497 {
4498 remove = true;
4499 break;
4500 }
4501 omp_add_variable (ctx, decl, flags);
4502 if (TREE_CODE (c) == OMP_CLAUSE_REDUCTION
4503 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4504 {
4505 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4506 GOVD_LOCAL);
4507 gimplify_omp_ctxp = ctx;
4508 push_gimplify_context ();
4509 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4510 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4511 push_gimplify_context ();
4512 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4513 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4514 gimplify_omp_ctxp = outer_ctx;
4515 }
4516 if (notice_outer)
4517 goto do_notice;
4518 break;
4519
4520 case OMP_CLAUSE_COPYIN:
4521 case OMP_CLAUSE_COPYPRIVATE:
4522 decl = OMP_CLAUSE_DECL (c);
4523 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4524 {
4525 remove = true;
4526 break;
4527 }
4528 do_notice:
4529 if (outer_ctx)
4530 omp_notice_variable (outer_ctx, decl, true);
4531 break;
4532
4533 case OMP_CLAUSE_SCHEDULE:
4534 if (gimplify_ctxp->combined_pre_p)
4535 {
4536 gcc_assert (gimplify_omp_ctxp == outer_ctx);
4537 gs = gimplify_expr_in_ctx (&OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c),
4538 gimplify_ctxp->combined_pre_p, NULL,
4539 is_gimple_val, fb_rvalue,
4540 gimplify_ctxp->combined_ctxp,
4541 outer_ctx->outer_context);
4542 if (gs == GS_ERROR)
4543 remove = true;
4544 break;
4545 }
4546 /* FALLTHRU */
4547 case OMP_CLAUSE_IF:
4548 case OMP_CLAUSE_NUM_THREADS:
4549 gs = gimplify_expr (&TREE_OPERAND (c, 0), pre_p, NULL,
4550 is_gimple_val, fb_rvalue);
4551 if (gs == GS_ERROR)
4552 remove = true;
4553 break;
4554
4555 case OMP_CLAUSE_NOWAIT:
4556 case OMP_CLAUSE_ORDERED:
4557 break;
4558
4559 case OMP_CLAUSE_DEFAULT:
4560 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4561 break;
4562
4563 default:
4564 gcc_unreachable ();
4565 }
4566
4567 if (remove)
4568 *list_p = OMP_CLAUSE_CHAIN (c);
4569 else
4570 list_p = &OMP_CLAUSE_CHAIN (c);
4571 }
4572
4573 gimplify_omp_ctxp = ctx;
4574 }
4575
4576 /* For all variables that were not actually used within the context,
4577 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4578
4579 static int
4580 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4581 {
4582 tree *list_p = (tree *) data;
4583 tree decl = (tree) n->key;
4584 unsigned flags = n->value;
4585 enum tree_code code;
4586 tree clause;
4587 bool private_debug;
4588
4589 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4590 return 0;
4591 if ((flags & GOVD_SEEN) == 0)
4592 return 0;
4593 if (flags & GOVD_DEBUG_PRIVATE)
4594 {
4595 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4596 private_debug = true;
4597 }
4598 else
4599 private_debug
4600 = lang_hooks.decls.omp_private_debug_clause (decl,
4601 !!(flags & GOVD_SHARED));
4602 if (private_debug)
4603 code = OMP_CLAUSE_PRIVATE;
4604 else if (flags & GOVD_SHARED)
4605 {
4606 if (is_global_var (decl))
4607 return 0;
4608 code = OMP_CLAUSE_SHARED;
4609 }
4610 else if (flags & GOVD_PRIVATE)
4611 code = OMP_CLAUSE_PRIVATE;
4612 else if (flags & GOVD_FIRSTPRIVATE)
4613 code = OMP_CLAUSE_FIRSTPRIVATE;
4614 else
4615 gcc_unreachable ();
4616
4617 clause = build1 (code, void_type_node, decl);
4618 OMP_CLAUSE_CHAIN (clause) = *list_p;
4619 if (private_debug)
4620 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4621 *list_p = clause;
4622
4623 return 0;
4624 }
4625
4626 static void
4627 gimplify_adjust_omp_clauses (tree *list_p)
4628 {
4629 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4630 tree c, decl;
4631
4632 while ((c = *list_p) != NULL)
4633 {
4634 splay_tree_node n;
4635 bool remove = false;
4636
4637 switch (TREE_CODE (c))
4638 {
4639 case OMP_CLAUSE_PRIVATE:
4640 case OMP_CLAUSE_SHARED:
4641 case OMP_CLAUSE_FIRSTPRIVATE:
4642 decl = OMP_CLAUSE_DECL (c);
4643 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4644 remove = !(n->value & GOVD_SEEN);
4645 if (! remove)
4646 {
4647 bool shared = TREE_CODE (c) == OMP_CLAUSE_SHARED;
4648 if ((n->value & GOVD_DEBUG_PRIVATE)
4649 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4650 {
4651 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4652 || ((n->value & GOVD_DATA_SHARE_CLASS)
4653 == GOVD_PRIVATE));
4654 TREE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4655 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4656 }
4657 }
4658 break;
4659
4660 case OMP_CLAUSE_LASTPRIVATE:
4661 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4662 accurately reflect the presence of a FIRSTPRIVATE clause. */
4663 decl = OMP_CLAUSE_DECL (c);
4664 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4665 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4666 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4667 break;
4668
4669 case OMP_CLAUSE_REDUCTION:
4670 case OMP_CLAUSE_COPYIN:
4671 case OMP_CLAUSE_COPYPRIVATE:
4672 case OMP_CLAUSE_IF:
4673 case OMP_CLAUSE_NUM_THREADS:
4674 case OMP_CLAUSE_SCHEDULE:
4675 case OMP_CLAUSE_NOWAIT:
4676 case OMP_CLAUSE_ORDERED:
4677 case OMP_CLAUSE_DEFAULT:
4678 break;
4679
4680 default:
4681 gcc_unreachable ();
4682 }
4683
4684 if (remove)
4685 *list_p = OMP_CLAUSE_CHAIN (c);
4686 else
4687 list_p = &OMP_CLAUSE_CHAIN (c);
4688 }
4689
4690 /* Add in any implicit data sharing. */
4691 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4692
4693 gimplify_omp_ctxp = ctx->outer_context;
4694 delete_omp_context (ctx);
4695 }
4696
4697 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4698 gimplification of the body, as well as scanning the body for used
4699 variables. We need to do this scan now, because variable-sized
4700 decls will be decomposed during gimplification. */
4701
4702 static enum gimplify_status
4703 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4704 {
4705 tree expr = *expr_p;
4706
4707 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true);
4708
4709 push_gimplify_context ();
4710
4711 if (determine_parallel_type (expr) == IS_COMBINED_PARALLEL)
4712 {
4713 gimplify_ctxp->combined_pre_p = pre_p;
4714 gimplify_ctxp->combined_ctxp = gimplify_ctxp->prev_context;
4715 }
4716
4717 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4718 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4719
4720 gimplify_ctxp->combined_pre_p = NULL;
4721 gimplify_ctxp->combined_ctxp = NULL;
4722
4723 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4724
4725 return GS_ALL_DONE;
4726 }
4727
4728 /* Gimplify the gross structure of an OMP_FOR statement. */
4729
4730 static enum gimplify_status
4731 gimplify_omp_for (tree *expr_p, tree *pre_p)
4732 {
4733 tree for_stmt, decl, t;
4734 enum gimplify_status ret = 0;
4735 struct gimplify_omp_ctx *outer_combined_omp_ctxp = NULL;
4736
4737 for_stmt = *expr_p;
4738
4739 if (gimplify_ctxp->combined_pre_p)
4740 outer_combined_omp_ctxp = gimplify_omp_ctxp->outer_context;
4741
4742 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false);
4743
4744 t = OMP_FOR_INIT (for_stmt);
4745 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
4746 decl = TREE_OPERAND (t, 0);
4747 gcc_assert (DECL_P (decl));
4748 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4749 gcc_assert (!TYPE_UNSIGNED (TREE_TYPE (decl)));
4750
4751 /* Make sure the iteration variable is private. */
4752 if (omp_is_private (gimplify_omp_ctxp, decl))
4753 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4754 else
4755 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4756
4757 /* Gimplify inside our parent's context if this is part of a combined
4758 parallel+workshare directive. */
4759 if (gimplify_ctxp->combined_pre_p)
4760 ret |= gimplify_expr_in_ctx (&TREE_OPERAND (t, 1),
4761 gimplify_ctxp->combined_pre_p, NULL,
4762 is_gimple_val, fb_rvalue,
4763 gimplify_ctxp->combined_ctxp,
4764 outer_combined_omp_ctxp);
4765 else
4766 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4767 NULL, is_gimple_val, fb_rvalue);
4768
4769 t = OMP_FOR_COND (for_stmt);
4770 gcc_assert (COMPARISON_CLASS_P (t));
4771 gcc_assert (TREE_OPERAND (t, 0) == decl);
4772
4773 /* Gimplify inside our parent's context if this is part of a combined
4774 parallel+workshare directive. */
4775 if (gimplify_ctxp->combined_pre_p)
4776 ret |= gimplify_expr_in_ctx (&TREE_OPERAND (t, 1),
4777 gimplify_ctxp->combined_pre_p, NULL,
4778 is_gimple_val, fb_rvalue,
4779 gimplify_ctxp->combined_ctxp,
4780 outer_combined_omp_ctxp);
4781 else
4782 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4783 NULL, is_gimple_val, fb_rvalue);
4784
4785 t = OMP_FOR_INCR (for_stmt);
4786 switch (TREE_CODE (t))
4787 {
4788 case PREINCREMENT_EXPR:
4789 case POSTINCREMENT_EXPR:
4790 t = build_int_cst (TREE_TYPE (decl), 1);
4791 goto build_modify;
4792 case PREDECREMENT_EXPR:
4793 case POSTDECREMENT_EXPR:
4794 t = build_int_cst (TREE_TYPE (decl), -1);
4795 goto build_modify;
4796 build_modify:
4797 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
4798 t = build2 (MODIFY_EXPR, void_type_node, decl, t);
4799 OMP_FOR_INCR (for_stmt) = t;
4800 break;
4801
4802 case MODIFY_EXPR:
4803 gcc_assert (TREE_OPERAND (t, 0) == decl);
4804 t = TREE_OPERAND (t, 1);
4805 switch (TREE_CODE (t))
4806 {
4807 case PLUS_EXPR:
4808 if (TREE_OPERAND (t, 1) == decl)
4809 {
4810 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
4811 TREE_OPERAND (t, 0) = decl;
4812 break;
4813 }
4814 case MINUS_EXPR:
4815 gcc_assert (TREE_OPERAND (t, 0) == decl);
4816 break;
4817 default:
4818 gcc_unreachable ();
4819 }
4820
4821 /* Gimplify inside our parent's context if this is part of a
4822 combined parallel+workshare directive. */
4823 if (gimplify_ctxp->combined_pre_p)
4824 ret |= gimplify_expr_in_ctx (&TREE_OPERAND (t, 1),
4825 gimplify_ctxp->combined_pre_p, NULL,
4826 is_gimple_val, fb_rvalue,
4827 gimplify_ctxp->combined_ctxp,
4828 outer_combined_omp_ctxp);
4829 else
4830 ret |= gimplify_expr (&TREE_OPERAND (t, 1),
4831 &OMP_FOR_PRE_BODY (for_stmt), NULL,
4832 is_gimple_val, fb_rvalue);
4833 break;
4834
4835 default:
4836 gcc_unreachable ();
4837 }
4838
4839 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
4840 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
4841
4842 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
4843 }
4844
4845 /* Gimplify the gross structure of other OpenMP worksharing constructs.
4846 In particular, OMP_SECTIONS and OMP_SINGLE. */
4847
4848 static enum gimplify_status
4849 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
4850 {
4851 tree stmt = *expr_p;
4852
4853 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false);
4854 gimplify_to_stmt_list (&OMP_BODY (stmt));
4855 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
4856
4857 return GS_ALL_DONE;
4858 }
4859
4860 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
4861 stabilized the lhs of the atomic operation as *ADDR. Return true if
4862 EXPR is this stabilized form. */
4863
4864 static bool
4865 goa_lhs_expr_p (tree expr, tree addr)
4866 {
4867 /* Also include casts to other type variants. The C front end is fond
4868 of adding these for e.g. volatile variables. This is like
4869 STRIP_TYPE_NOPS but includes the main variant lookup. */
4870 while ((TREE_CODE (expr) == NOP_EXPR
4871 || TREE_CODE (expr) == CONVERT_EXPR
4872 || TREE_CODE (expr) == NON_LVALUE_EXPR)
4873 && TREE_OPERAND (expr, 0) != error_mark_node
4874 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
4875 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
4876 expr = TREE_OPERAND (expr, 0);
4877
4878 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
4879 return true;
4880 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
4881 return true;
4882 return false;
4883 }
4884
4885 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
4886 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4887 size of the data type, and thus usable to find the index of the builtin
4888 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
4889
4890 static enum gimplify_status
4891 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
4892 {
4893 enum built_in_function base;
4894 tree decl, args, itype;
4895 enum insn_code *optab;
4896
4897 /* Check for one of the supported fetch-op operations. */
4898 switch (TREE_CODE (rhs))
4899 {
4900 case PLUS_EXPR:
4901 base = BUILT_IN_FETCH_AND_ADD_N;
4902 optab = sync_add_optab;
4903 break;
4904 case MINUS_EXPR:
4905 base = BUILT_IN_FETCH_AND_SUB_N;
4906 optab = sync_add_optab;
4907 break;
4908 case BIT_AND_EXPR:
4909 base = BUILT_IN_FETCH_AND_AND_N;
4910 optab = sync_and_optab;
4911 break;
4912 case BIT_IOR_EXPR:
4913 base = BUILT_IN_FETCH_AND_OR_N;
4914 optab = sync_ior_optab;
4915 break;
4916 case BIT_XOR_EXPR:
4917 base = BUILT_IN_FETCH_AND_XOR_N;
4918 optab = sync_xor_optab;
4919 break;
4920 default:
4921 return GS_UNHANDLED;
4922 }
4923
4924 /* Make sure the expression is of the proper form. */
4925 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
4926 rhs = TREE_OPERAND (rhs, 1);
4927 else if (commutative_tree_code (TREE_CODE (rhs))
4928 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
4929 rhs = TREE_OPERAND (rhs, 0);
4930 else
4931 return GS_UNHANDLED;
4932
4933 decl = built_in_decls[base + index + 1];
4934 itype = TREE_TYPE (TREE_TYPE (decl));
4935
4936 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
4937 return GS_UNHANDLED;
4938
4939 args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
4940 args = tree_cons (NULL, addr, args);
4941 *expr_p = build_function_call_expr (decl, args);
4942 return GS_OK;
4943 }
4944
4945 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
4946 appearences of *LHS_ADDR with LHS_VAR. If an expression does not involve
4947 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
4948 a subexpression, 0 if it did not, or -1 if an error was encountered. */
4949
4950 static int
4951 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
4952 {
4953 tree expr = *expr_p;
4954 int saw_lhs;
4955
4956 if (goa_lhs_expr_p (expr, lhs_addr))
4957 {
4958 *expr_p = lhs_var;
4959 return 1;
4960 }
4961 if (is_gimple_val (expr))
4962 return 0;
4963
4964 saw_lhs = 0;
4965 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
4966 {
4967 case tcc_binary:
4968 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
4969 lhs_addr, lhs_var);
4970 case tcc_unary:
4971 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
4972 lhs_addr, lhs_var);
4973 break;
4974 default:
4975 break;
4976 }
4977
4978 if (saw_lhs == 0)
4979 {
4980 enum gimplify_status gs;
4981 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
4982 if (gs != GS_ALL_DONE)
4983 saw_lhs = -1;
4984 }
4985
4986 return saw_lhs;
4987 }
4988
4989 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
4990
4991 oldval = *addr;
4992 repeat:
4993 newval = rhs; // with oldval replacing *addr in rhs
4994 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
4995 if (oldval != newval)
4996 goto repeat;
4997
4998 INDEX is log2 of the size of the data type, and thus usable to find the
4999 index of the builtin decl. */
5000
5001 static enum gimplify_status
5002 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5003 tree rhs, int index)
5004 {
5005 tree oldval, oldival, oldival2, newval, newival, label;
5006 tree type, itype, cmpxchg, args, x, iaddr;
5007
5008 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5009 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5010 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5011
5012 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5013 return GS_UNHANDLED;
5014
5015 oldval = create_tmp_var (type, NULL);
5016 newval = create_tmp_var (type, NULL);
5017
5018 /* Precompute as much of RHS as possible. In the same walk, replace
5019 occurrences of the lhs value with our temporary. */
5020 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5021 return GS_ERROR;
5022
5023 x = build_fold_indirect_ref (addr);
5024 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5025 gimplify_and_add (x, pre_p);
5026
5027 /* For floating-point values, we'll need to view-convert them to integers
5028 so that we can perform the atomic compare and swap. Simplify the
5029 following code by always setting up the "i"ntegral variables. */
5030 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5031 {
5032 oldival = oldval;
5033 newival = newval;
5034 iaddr = addr;
5035 }
5036 else
5037 {
5038 oldival = create_tmp_var (itype, NULL);
5039 newival = create_tmp_var (itype, NULL);
5040
5041 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5042 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5043 gimplify_and_add (x, pre_p);
5044 iaddr = fold_convert (build_pointer_type (itype), addr);
5045 }
5046
5047 oldival2 = create_tmp_var (itype, NULL);
5048
5049 label = create_artificial_label ();
5050 x = build1 (LABEL_EXPR, void_type_node, label);
5051 gimplify_and_add (x, pre_p);
5052
5053 x = build2 (MODIFY_EXPR, void_type_node, newval, rhs);
5054 gimplify_and_add (x, pre_p);
5055
5056 if (newval != newival)
5057 {
5058 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5059 x = build2 (MODIFY_EXPR, void_type_node, newival, x);
5060 gimplify_and_add (x, pre_p);
5061 }
5062
5063 x = build2 (MODIFY_EXPR, void_type_node, oldival2, oldival);
5064 gimplify_and_add (x, pre_p);
5065
5066 args = tree_cons (NULL, fold_convert (itype, newival), NULL);
5067 args = tree_cons (NULL, fold_convert (itype, oldival), args);
5068 args = tree_cons (NULL, iaddr, args);
5069 x = build_function_call_expr (cmpxchg, args);
5070 if (oldval == oldival)
5071 x = fold_convert (type, x);
5072 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5073 gimplify_and_add (x, pre_p);
5074
5075 /* For floating point, be prepared for the loop backedge. */
5076 if (oldval != oldival)
5077 {
5078 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5079 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5080 gimplify_and_add (x, pre_p);
5081 }
5082
5083 /* Note that we always perform the comparison as an integer, even for
5084 floating point. This allows the atomic operation to properly
5085 succeed even with NaNs and -0.0. */
5086 x = build3 (COND_EXPR, void_type_node,
5087 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5088 build1 (GOTO_EXPR, void_type_node, label), NULL);
5089 gimplify_and_add (x, pre_p);
5090
5091 *expr_p = NULL;
5092 return GS_ALL_DONE;
5093 }
5094
5095 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5096
5097 GOMP_atomic_start ();
5098 *addr = rhs;
5099 GOMP_atomic_end ();
5100
5101 The result is not globally atomic, but works so long as all parallel
5102 references are within #pragma omp atomic directives. According to
5103 responses received from omp@openmp.org, appears to be within spec.
5104 Which makes sense, since that's how several other compilers handle
5105 this situation as well. */
5106
5107 static enum gimplify_status
5108 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5109 {
5110 tree t;
5111
5112 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5113 t = build_function_call_expr (t, NULL);
5114 gimplify_and_add (t, pre_p);
5115
5116 t = build_fold_indirect_ref (addr);
5117 t = build2 (MODIFY_EXPR, void_type_node, t, rhs);
5118 gimplify_and_add (t, pre_p);
5119
5120 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5121 t = build_function_call_expr (t, NULL);
5122 gimplify_and_add (t, pre_p);
5123
5124 *expr_p = NULL;
5125 return GS_ALL_DONE;
5126 }
5127
5128 /* Gimplify an OMP_ATOMIC statement. */
5129
5130 static enum gimplify_status
5131 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5132 {
5133 tree addr = TREE_OPERAND (*expr_p, 0);
5134 tree rhs = TREE_OPERAND (*expr_p, 1);
5135 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5136 HOST_WIDE_INT index;
5137
5138 /* Make sure the type is one of the supported sizes. */
5139 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5140 index = exact_log2 (index);
5141 if (index >= 0 && index <= 4)
5142 {
5143 enum gimplify_status gs;
5144 unsigned int align;
5145
5146 if (DECL_P (TREE_OPERAND (addr, 0)))
5147 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5148 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5149 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5150 == FIELD_DECL)
5151 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5152 else
5153 align = TYPE_ALIGN_UNIT (type);
5154
5155 /* __sync builtins require strict data alignment. */
5156 if (exact_log2 (align) >= index)
5157 {
5158 /* When possible, use specialized atomic update functions. */
5159 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5160 {
5161 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5162 if (gs != GS_UNHANDLED)
5163 return gs;
5164 }
5165
5166 /* If we don't have specialized __sync builtins, try and implement
5167 as a compare and swap loop. */
5168 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5169 if (gs != GS_UNHANDLED)
5170 return gs;
5171 }
5172 }
5173
5174 /* The ultimate fallback is wrapping the operation in a mutex. */
5175 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5176 }
5177
5178 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5179 gimplification failed.
5180
5181 PRE_P points to the list where side effects that must happen before
5182 EXPR should be stored.
5183
5184 POST_P points to the list where side effects that must happen after
5185 EXPR should be stored, or NULL if there is no suitable list. In
5186 that case, we copy the result to a temporary, emit the
5187 post-effects, and then return the temporary.
5188
5189 GIMPLE_TEST_F points to a function that takes a tree T and
5190 returns nonzero if T is in the GIMPLE form requested by the
5191 caller. The GIMPLE predicates are in tree-gimple.c.
5192
5193 This test is used twice. Before gimplification, the test is
5194 invoked to determine whether *EXPR_P is already gimple enough. If
5195 that fails, *EXPR_P is gimplified according to its code and
5196 GIMPLE_TEST_F is called again. If the test still fails, then a new
5197 temporary variable is created and assigned the value of the
5198 gimplified expression.
5199
5200 FALLBACK tells the function what sort of a temporary we want. If the 1
5201 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5202 If both are set, either is OK, but an lvalue is preferable.
5203
5204 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5205 iterates until solution. */
5206
5207 enum gimplify_status
5208 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5209 bool (* gimple_test_f) (tree), fallback_t fallback)
5210 {
5211 tree tmp;
5212 tree internal_pre = NULL_TREE;
5213 tree internal_post = NULL_TREE;
5214 tree save_expr;
5215 int is_statement = (pre_p == NULL);
5216 location_t saved_location;
5217 enum gimplify_status ret;
5218
5219 save_expr = *expr_p;
5220 if (save_expr == NULL_TREE)
5221 return GS_ALL_DONE;
5222
5223 /* We used to check the predicate here and return immediately if it
5224 succeeds. This is wrong; the design is for gimplification to be
5225 idempotent, and for the predicates to only test for valid forms, not
5226 whether they are fully simplified. */
5227
5228 /* Set up our internal queues if needed. */
5229 if (pre_p == NULL)
5230 pre_p = &internal_pre;
5231 if (post_p == NULL)
5232 post_p = &internal_post;
5233
5234 saved_location = input_location;
5235 if (save_expr != error_mark_node
5236 && EXPR_HAS_LOCATION (*expr_p))
5237 input_location = EXPR_LOCATION (*expr_p);
5238
5239 /* Loop over the specific gimplifiers until the toplevel node
5240 remains the same. */
5241 do
5242 {
5243 /* Strip away as many useless type conversions as possible
5244 at the toplevel. */
5245 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5246
5247 /* Remember the expr. */
5248 save_expr = *expr_p;
5249
5250 /* Die, die, die, my darling. */
5251 if (save_expr == error_mark_node
5252 || (TREE_TYPE (save_expr)
5253 && TREE_TYPE (save_expr) == error_mark_node))
5254 {
5255 ret = GS_ERROR;
5256 break;
5257 }
5258
5259 /* Do any language-specific gimplification. */
5260 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5261 if (ret == GS_OK)
5262 {
5263 if (*expr_p == NULL_TREE)
5264 break;
5265 if (*expr_p != save_expr)
5266 continue;
5267 }
5268 else if (ret != GS_UNHANDLED)
5269 break;
5270
5271 ret = GS_OK;
5272 switch (TREE_CODE (*expr_p))
5273 {
5274 /* First deal with the special cases. */
5275
5276 case POSTINCREMENT_EXPR:
5277 case POSTDECREMENT_EXPR:
5278 case PREINCREMENT_EXPR:
5279 case PREDECREMENT_EXPR:
5280 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5281 fallback != fb_none);
5282 break;
5283
5284 case ARRAY_REF:
5285 case ARRAY_RANGE_REF:
5286 case REALPART_EXPR:
5287 case IMAGPART_EXPR:
5288 case COMPONENT_REF:
5289 case VIEW_CONVERT_EXPR:
5290 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5291 fallback ? fallback : fb_rvalue);
5292 break;
5293
5294 case COND_EXPR:
5295 ret = gimplify_cond_expr (expr_p, pre_p, post_p, NULL_TREE,
5296 fallback);
5297 /* C99 code may assign to an array in a structure value of a
5298 conditional expression, and this has undefined behavior
5299 only on execution, so create a temporary if an lvalue is
5300 required. */
5301 if (fallback == fb_lvalue)
5302 {
5303 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5304 lang_hooks.mark_addressable (*expr_p);
5305 }
5306 break;
5307
5308 case CALL_EXPR:
5309 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5310 /* C99 code may assign to an array in a structure returned
5311 from a function, and this has undefined behavior only on
5312 execution, so create a temporary if an lvalue is
5313 required. */
5314 if (fallback == fb_lvalue)
5315 {
5316 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5317 lang_hooks.mark_addressable (*expr_p);
5318 }
5319 break;
5320
5321 case TREE_LIST:
5322 gcc_unreachable ();
5323
5324 case COMPOUND_EXPR:
5325 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5326 break;
5327
5328 case MODIFY_EXPR:
5329 case INIT_EXPR:
5330 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5331 fallback != fb_none);
5332 break;
5333
5334 case TRUTH_ANDIF_EXPR:
5335 case TRUTH_ORIF_EXPR:
5336 ret = gimplify_boolean_expr (expr_p);
5337 break;
5338
5339 case TRUTH_NOT_EXPR:
5340 TREE_OPERAND (*expr_p, 0)
5341 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5342 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5343 is_gimple_val, fb_rvalue);
5344 recalculate_side_effects (*expr_p);
5345 break;
5346
5347 case ADDR_EXPR:
5348 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5349 break;
5350
5351 case VA_ARG_EXPR:
5352 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5353 break;
5354
5355 case CONVERT_EXPR:
5356 case NOP_EXPR:
5357 if (IS_EMPTY_STMT (*expr_p))
5358 {
5359 ret = GS_ALL_DONE;
5360 break;
5361 }
5362
5363 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5364 || fallback == fb_none)
5365 {
5366 /* Just strip a conversion to void (or in void context) and
5367 try again. */
5368 *expr_p = TREE_OPERAND (*expr_p, 0);
5369 break;
5370 }
5371
5372 ret = gimplify_conversion (expr_p);
5373 if (ret == GS_ERROR)
5374 break;
5375 if (*expr_p != save_expr)
5376 break;
5377 /* FALLTHRU */
5378
5379 case FIX_TRUNC_EXPR:
5380 case FIX_CEIL_EXPR:
5381 case FIX_FLOOR_EXPR:
5382 case FIX_ROUND_EXPR:
5383 /* unary_expr: ... | '(' cast ')' val | ... */
5384 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5385 is_gimple_val, fb_rvalue);
5386 recalculate_side_effects (*expr_p);
5387 break;
5388
5389 case INDIRECT_REF:
5390 *expr_p = fold_indirect_ref (*expr_p);
5391 if (*expr_p != save_expr)
5392 break;
5393 /* else fall through. */
5394 case ALIGN_INDIRECT_REF:
5395 case MISALIGNED_INDIRECT_REF:
5396 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5397 is_gimple_reg, fb_rvalue);
5398 recalculate_side_effects (*expr_p);
5399 break;
5400
5401 /* Constants need not be gimplified. */
5402 case INTEGER_CST:
5403 case REAL_CST:
5404 case STRING_CST:
5405 case COMPLEX_CST:
5406 case VECTOR_CST:
5407 ret = GS_ALL_DONE;
5408 break;
5409
5410 case CONST_DECL:
5411 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5412 CONST_DECL node. Otherwise the decl is replaceable by its
5413 value. */
5414 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5415 if (fallback & fb_lvalue)
5416 ret = GS_ALL_DONE;
5417 else
5418 *expr_p = DECL_INITIAL (*expr_p);
5419 break;
5420
5421 case DECL_EXPR:
5422 ret = gimplify_decl_expr (expr_p);
5423 break;
5424
5425 case EXC_PTR_EXPR:
5426 /* FIXME make this a decl. */
5427 ret = GS_ALL_DONE;
5428 break;
5429
5430 case BIND_EXPR:
5431 ret = gimplify_bind_expr (expr_p, NULL, pre_p);
5432 break;
5433
5434 case LOOP_EXPR:
5435 ret = gimplify_loop_expr (expr_p, pre_p);
5436 break;
5437
5438 case SWITCH_EXPR:
5439 ret = gimplify_switch_expr (expr_p, pre_p);
5440 break;
5441
5442 case EXIT_EXPR:
5443 ret = gimplify_exit_expr (expr_p);
5444 break;
5445
5446 case GOTO_EXPR:
5447 /* If the target is not LABEL, then it is a computed jump
5448 and the target needs to be gimplified. */
5449 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5450 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5451 NULL, is_gimple_val, fb_rvalue);
5452 break;
5453
5454 case LABEL_EXPR:
5455 ret = GS_ALL_DONE;
5456 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5457 == current_function_decl);
5458 break;
5459
5460 case CASE_LABEL_EXPR:
5461 ret = gimplify_case_label_expr (expr_p);
5462 break;
5463
5464 case RETURN_EXPR:
5465 ret = gimplify_return_expr (*expr_p, pre_p);
5466 break;
5467
5468 case CONSTRUCTOR:
5469 /* Don't reduce this in place; let gimplify_init_constructor work its
5470 magic. Buf if we're just elaborating this for side effects, just
5471 gimplify any element that has side-effects. */
5472 if (fallback == fb_none)
5473 {
5474 unsigned HOST_WIDE_INT ix;
5475 constructor_elt *ce;
5476 tree temp = NULL_TREE;
5477 for (ix = 0;
5478 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5479 ix, ce);
5480 ix++)
5481 if (TREE_SIDE_EFFECTS (ce->value))
5482 append_to_statement_list (ce->value, &temp);
5483
5484 *expr_p = temp;
5485 ret = GS_OK;
5486 }
5487 /* C99 code may assign to an array in a constructed
5488 structure or union, and this has undefined behavior only
5489 on execution, so create a temporary if an lvalue is
5490 required. */
5491 else if (fallback == fb_lvalue)
5492 {
5493 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5494 lang_hooks.mark_addressable (*expr_p);
5495 }
5496 else
5497 ret = GS_ALL_DONE;
5498 break;
5499
5500 /* The following are special cases that are not handled by the
5501 original GIMPLE grammar. */
5502
5503 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5504 eliminated. */
5505 case SAVE_EXPR:
5506 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5507 break;
5508
5509 case BIT_FIELD_REF:
5510 {
5511 enum gimplify_status r0, r1, r2;
5512
5513 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5514 is_gimple_lvalue, fb_either);
5515 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5516 is_gimple_val, fb_rvalue);
5517 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5518 is_gimple_val, fb_rvalue);
5519 recalculate_side_effects (*expr_p);
5520
5521 ret = MIN (r0, MIN (r1, r2));
5522 }
5523 break;
5524
5525 case NON_LVALUE_EXPR:
5526 /* This should have been stripped above. */
5527 gcc_unreachable ();
5528
5529 case ASM_EXPR:
5530 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5531 break;
5532
5533 case TRY_FINALLY_EXPR:
5534 case TRY_CATCH_EXPR:
5535 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5536 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5537 ret = GS_ALL_DONE;
5538 break;
5539
5540 case CLEANUP_POINT_EXPR:
5541 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5542 break;
5543
5544 case TARGET_EXPR:
5545 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5546 break;
5547
5548 case CATCH_EXPR:
5549 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5550 ret = GS_ALL_DONE;
5551 break;
5552
5553 case EH_FILTER_EXPR:
5554 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5555 ret = GS_ALL_DONE;
5556 break;
5557
5558 case OBJ_TYPE_REF:
5559 {
5560 enum gimplify_status r0, r1;
5561 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5562 is_gimple_val, fb_rvalue);
5563 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5564 is_gimple_val, fb_rvalue);
5565 ret = MIN (r0, r1);
5566 }
5567 break;
5568
5569 case LABEL_DECL:
5570 /* We get here when taking the address of a label. We mark
5571 the label as "forced"; meaning it can never be removed and
5572 it is a potential target for any computed goto. */
5573 FORCED_LABEL (*expr_p) = 1;
5574 ret = GS_ALL_DONE;
5575 break;
5576
5577 case STATEMENT_LIST:
5578 ret = gimplify_statement_list (expr_p);
5579 break;
5580
5581 case WITH_SIZE_EXPR:
5582 {
5583 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5584 post_p == &internal_post ? NULL : post_p,
5585 gimple_test_f, fallback);
5586 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5587 is_gimple_val, fb_rvalue);
5588 }
5589 break;
5590
5591 case VAR_DECL:
5592 case PARM_DECL:
5593 ret = gimplify_var_or_parm_decl (expr_p);
5594 break;
5595
5596 case SSA_NAME:
5597 /* Allow callbacks into the gimplifier during optimization. */
5598 ret = GS_ALL_DONE;
5599 break;
5600
5601 case OMP_PARALLEL:
5602 ret = gimplify_omp_parallel (expr_p, pre_p);
5603 break;
5604
5605 case OMP_FOR:
5606 ret = gimplify_omp_for (expr_p, pre_p);
5607 break;
5608
5609 case OMP_SECTIONS:
5610 case OMP_SINGLE:
5611 ret = gimplify_omp_workshare (expr_p, pre_p);
5612 break;
5613
5614 case OMP_SECTION:
5615 case OMP_MASTER:
5616 case OMP_ORDERED:
5617 case OMP_CRITICAL:
5618 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5619 break;
5620
5621 case OMP_ATOMIC:
5622 ret = gimplify_omp_atomic (expr_p, pre_p);
5623 break;
5624
5625 default:
5626 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5627 {
5628 case tcc_comparison:
5629 /* If this is a comparison of objects of aggregate type,
5630 handle it specially (by converting to a call to
5631 memcmp). It would be nice to only have to do this
5632 for variable-sized objects, but then we'd have to
5633 allow the same nest of reference nodes we allow for
5634 MODIFY_EXPR and that's too complex. */
5635 if (!AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 1))))
5636 goto expr_2;
5637 ret = gimplify_variable_sized_compare (expr_p);
5638 break;
5639
5640 /* If *EXPR_P does not need to be special-cased, handle it
5641 according to its class. */
5642 case tcc_unary:
5643 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5644 post_p, is_gimple_val, fb_rvalue);
5645 break;
5646
5647 case tcc_binary:
5648 expr_2:
5649 {
5650 enum gimplify_status r0, r1;
5651
5652 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5653 post_p, is_gimple_val, fb_rvalue);
5654 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5655 post_p, is_gimple_val, fb_rvalue);
5656
5657 ret = MIN (r0, r1);
5658 break;
5659 }
5660
5661 case tcc_declaration:
5662 case tcc_constant:
5663 ret = GS_ALL_DONE;
5664 goto dont_recalculate;
5665
5666 default:
5667 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5668 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5669 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5670 goto expr_2;
5671 }
5672
5673 recalculate_side_effects (*expr_p);
5674 dont_recalculate:
5675 break;
5676 }
5677
5678 /* If we replaced *expr_p, gimplify again. */
5679 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5680 ret = GS_ALL_DONE;
5681 }
5682 while (ret == GS_OK);
5683
5684 /* If we encountered an error_mark somewhere nested inside, either
5685 stub out the statement or propagate the error back out. */
5686 if (ret == GS_ERROR)
5687 {
5688 if (is_statement)
5689 *expr_p = NULL;
5690 goto out;
5691 }
5692
5693 /* This was only valid as a return value from the langhook, which
5694 we handled. Make sure it doesn't escape from any other context. */
5695 gcc_assert (ret != GS_UNHANDLED);
5696
5697 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5698 {
5699 /* We aren't looking for a value, and we don't have a valid
5700 statement. If it doesn't have side-effects, throw it away. */
5701 if (!TREE_SIDE_EFFECTS (*expr_p))
5702 *expr_p = NULL;
5703 else if (!TREE_THIS_VOLATILE (*expr_p))
5704 {
5705 /* This is probably a _REF that contains something nested that
5706 has side effects. Recurse through the operands to find it. */
5707 enum tree_code code = TREE_CODE (*expr_p);
5708
5709 switch (code)
5710 {
5711 case COMPONENT_REF:
5712 case REALPART_EXPR: case IMAGPART_EXPR:
5713 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5714 gimple_test_f, fallback);
5715 break;
5716
5717 case ARRAY_REF: case ARRAY_RANGE_REF:
5718 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5719 gimple_test_f, fallback);
5720 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5721 gimple_test_f, fallback);
5722 break;
5723
5724 default:
5725 /* Anything else with side-effects must be converted to
5726 a valid statement before we get here. */
5727 gcc_unreachable ();
5728 }
5729
5730 *expr_p = NULL;
5731 }
5732 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)))
5733 {
5734 /* Historically, the compiler has treated a bare
5735 reference to a volatile lvalue as forcing a load. */
5736 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5737 /* Normally, we do want to create a temporary for a
5738 TREE_ADDRESSABLE type because such a type should not be
5739 copied by bitwise-assignment. However, we make an
5740 exception here, as all we are doing here is ensuring that
5741 we read the bytes that make up the type. We use
5742 create_tmp_var_raw because create_tmp_var will abort when
5743 given a TREE_ADDRESSABLE type. */
5744 tree tmp = create_tmp_var_raw (type, "vol");
5745 gimple_add_tmp_var (tmp);
5746 *expr_p = build2 (MODIFY_EXPR, type, tmp, *expr_p);
5747 }
5748 else
5749 /* We can't do anything useful with a volatile reference to
5750 incomplete type, so just throw it away. */
5751 *expr_p = NULL;
5752 }
5753
5754 /* If we are gimplifying at the statement level, we're done. Tack
5755 everything together and replace the original statement with the
5756 gimplified form. */
5757 if (fallback == fb_none || is_statement)
5758 {
5759 if (internal_pre || internal_post)
5760 {
5761 append_to_statement_list (*expr_p, &internal_pre);
5762 append_to_statement_list (internal_post, &internal_pre);
5763 annotate_all_with_locus (&internal_pre, input_location);
5764 *expr_p = internal_pre;
5765 }
5766 else if (!*expr_p)
5767 ;
5768 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
5769 annotate_all_with_locus (expr_p, input_location);
5770 else
5771 annotate_one_with_locus (*expr_p, input_location);
5772 goto out;
5773 }
5774
5775 /* Otherwise we're gimplifying a subexpression, so the resulting value is
5776 interesting. */
5777
5778 /* If it's sufficiently simple already, we're done. Unless we are
5779 handling some post-effects internally; if that's the case, we need to
5780 copy into a temp before adding the post-effects to the tree. */
5781 if (!internal_post && (*gimple_test_f) (*expr_p))
5782 goto out;
5783
5784 /* Otherwise, we need to create a new temporary for the gimplified
5785 expression. */
5786
5787 /* We can't return an lvalue if we have an internal postqueue. The
5788 object the lvalue refers to would (probably) be modified by the
5789 postqueue; we need to copy the value out first, which means an
5790 rvalue. */
5791 if ((fallback & fb_lvalue) && !internal_post
5792 && is_gimple_addressable (*expr_p))
5793 {
5794 /* An lvalue will do. Take the address of the expression, store it
5795 in a temporary, and replace the expression with an INDIRECT_REF of
5796 that temporary. */
5797 tmp = build_fold_addr_expr (*expr_p);
5798 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
5799 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
5800 }
5801 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
5802 {
5803 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
5804
5805 /* An rvalue will do. Assign the gimplified expression into a new
5806 temporary TMP and replace the original expression with TMP. */
5807
5808 if (internal_post || (fallback & fb_lvalue))
5809 /* The postqueue might change the value of the expression between
5810 the initialization and use of the temporary, so we can't use a
5811 formal temp. FIXME do we care? */
5812 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5813 else
5814 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
5815
5816 if (TREE_CODE (*expr_p) != SSA_NAME)
5817 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
5818 }
5819 else
5820 {
5821 #ifdef ENABLE_CHECKING
5822 if (!(fallback & fb_mayfail))
5823 {
5824 fprintf (stderr, "gimplification failed:\n");
5825 print_generic_expr (stderr, *expr_p, 0);
5826 debug_tree (*expr_p);
5827 internal_error ("gimplification failed");
5828 }
5829 #endif
5830 gcc_assert (fallback & fb_mayfail);
5831 /* If this is an asm statement, and the user asked for the
5832 impossible, don't die. Fail and let gimplify_asm_expr
5833 issue an error. */
5834 ret = GS_ERROR;
5835 goto out;
5836 }
5837
5838 /* Make sure the temporary matches our predicate. */
5839 gcc_assert ((*gimple_test_f) (*expr_p));
5840
5841 if (internal_post)
5842 {
5843 annotate_all_with_locus (&internal_post, input_location);
5844 append_to_statement_list (internal_post, pre_p);
5845 }
5846
5847 out:
5848 input_location = saved_location;
5849 return ret;
5850 }
5851
5852 /* Look through TYPE for variable-sized objects and gimplify each such
5853 size that we find. Add to LIST_P any statements generated. */
5854
5855 void
5856 gimplify_type_sizes (tree type, tree *list_p)
5857 {
5858 tree field, t;
5859
5860 if (type == NULL || type == error_mark_node)
5861 return;
5862
5863 /* We first do the main variant, then copy into any other variants. */
5864 type = TYPE_MAIN_VARIANT (type);
5865
5866 /* Avoid infinite recursion. */
5867 if (TYPE_SIZES_GIMPLIFIED (type))
5868 return;
5869
5870 TYPE_SIZES_GIMPLIFIED (type) = 1;
5871
5872 switch (TREE_CODE (type))
5873 {
5874 case INTEGER_TYPE:
5875 case ENUMERAL_TYPE:
5876 case BOOLEAN_TYPE:
5877 case CHAR_TYPE:
5878 case REAL_TYPE:
5879 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
5880 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
5881
5882 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5883 {
5884 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
5885 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
5886 }
5887 break;
5888
5889 case ARRAY_TYPE:
5890 /* These types may not have declarations, so handle them here. */
5891 gimplify_type_sizes (TREE_TYPE (type), list_p);
5892 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
5893 break;
5894
5895 case RECORD_TYPE:
5896 case UNION_TYPE:
5897 case QUAL_UNION_TYPE:
5898 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5899 if (TREE_CODE (field) == FIELD_DECL)
5900 {
5901 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
5902 gimplify_type_sizes (TREE_TYPE (field), list_p);
5903 }
5904 break;
5905
5906 case POINTER_TYPE:
5907 case REFERENCE_TYPE:
5908 gimplify_type_sizes (TREE_TYPE (type), list_p);
5909 break;
5910
5911 default:
5912 break;
5913 }
5914
5915 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
5916 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
5917
5918 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5919 {
5920 TYPE_SIZE (t) = TYPE_SIZE (type);
5921 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
5922 TYPE_SIZES_GIMPLIFIED (t) = 1;
5923 }
5924 }
5925
5926 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
5927 a size or position, has had all of its SAVE_EXPRs evaluated.
5928 We add any required statements to STMT_P. */
5929
5930 void
5931 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
5932 {
5933 tree type, expr = *expr_p;
5934
5935 /* We don't do anything if the value isn't there, is constant, or contains
5936 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
5937 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
5938 will want to replace it with a new variable, but that will cause problems
5939 if this type is from outside the function. It's OK to have that here. */
5940 if (expr == NULL_TREE || TREE_CONSTANT (expr)
5941 || TREE_CODE (expr) == VAR_DECL
5942 || CONTAINS_PLACEHOLDER_P (expr))
5943 return;
5944
5945 type = TREE_TYPE (expr);
5946 *expr_p = unshare_expr (expr);
5947
5948 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
5949 expr = *expr_p;
5950
5951 /* Verify that we've an exact type match with the original expression.
5952 In particular, we do not wish to drop a "sizetype" in favour of a
5953 type of similar dimensions. We don't want to pollute the generic
5954 type-stripping code with this knowledge because it doesn't matter
5955 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
5956 and friends retain their "sizetype-ness". */
5957 if (TREE_TYPE (expr) != type
5958 && TREE_CODE (type) == INTEGER_TYPE
5959 && TYPE_IS_SIZETYPE (type))
5960 {
5961 tree tmp;
5962
5963 *expr_p = create_tmp_var (type, NULL);
5964 tmp = build1 (NOP_EXPR, type, expr);
5965 tmp = build2 (MODIFY_EXPR, type, *expr_p, expr);
5966 if (EXPR_HAS_LOCATION (expr))
5967 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
5968 else
5969 SET_EXPR_LOCATION (tmp, input_location);
5970
5971 gimplify_and_add (tmp, stmt_p);
5972 }
5973 }
5974 \f
5975 #ifdef ENABLE_CHECKING
5976 /* Compare types A and B for a "close enough" match. */
5977
5978 static bool
5979 cpt_same_type (tree a, tree b)
5980 {
5981 if (lang_hooks.types_compatible_p (a, b))
5982 return true;
5983
5984 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
5985 link them together. This routine is intended to catch type errors
5986 that will affect the optimizers, and the optimizers don't add new
5987 dereferences of function pointers, so ignore it. */
5988 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
5989 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
5990 return true;
5991
5992 /* ??? The C FE pushes type qualifiers after the fact into the type of
5993 the element from the type of the array. See build_unary_op's handling
5994 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
5995 should have done it when creating the variable in the first place.
5996 Alternately, why aren't the two array types made variants? */
5997 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
5998 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
5999
6000 /* And because of those, we have to recurse down through pointers. */
6001 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6002 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6003
6004 return false;
6005 }
6006
6007 /* Check for some cases of the front end missing cast expressions.
6008 The type of a dereference should correspond to the pointer type;
6009 similarly the type of an address should match its object. */
6010
6011 static tree
6012 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6013 void *data ATTRIBUTE_UNUSED)
6014 {
6015 tree t = *tp;
6016 tree ptype, otype, dtype;
6017
6018 switch (TREE_CODE (t))
6019 {
6020 case INDIRECT_REF:
6021 case ARRAY_REF:
6022 otype = TREE_TYPE (t);
6023 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6024 dtype = TREE_TYPE (ptype);
6025 gcc_assert (cpt_same_type (otype, dtype));
6026 break;
6027
6028 case ADDR_EXPR:
6029 ptype = TREE_TYPE (t);
6030 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6031 dtype = TREE_TYPE (ptype);
6032 if (!cpt_same_type (otype, dtype))
6033 {
6034 /* &array is allowed to produce a pointer to the element, rather than
6035 a pointer to the array type. We must allow this in order to
6036 properly represent assigning the address of an array in C into
6037 pointer to the element type. */
6038 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6039 && POINTER_TYPE_P (ptype)
6040 && cpt_same_type (TREE_TYPE (otype), dtype));
6041 break;
6042 }
6043 break;
6044
6045 default:
6046 return NULL_TREE;
6047 }
6048
6049
6050 return NULL_TREE;
6051 }
6052 #endif
6053
6054 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6055 function decl containing BODY. */
6056
6057 void
6058 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6059 {
6060 location_t saved_location = input_location;
6061 tree body, parm_stmts;
6062
6063 timevar_push (TV_TREE_GIMPLIFY);
6064
6065 gcc_assert (gimplify_ctxp == NULL);
6066 push_gimplify_context ();
6067
6068 /* Unshare most shared trees in the body and in that of any nested functions.
6069 It would seem we don't have to do this for nested functions because
6070 they are supposed to be output and then the outer function gimplified
6071 first, but the g++ front end doesn't always do it that way. */
6072 unshare_body (body_p, fndecl);
6073 unvisit_body (body_p, fndecl);
6074
6075 /* Make sure input_location isn't set to something wierd. */
6076 input_location = DECL_SOURCE_LOCATION (fndecl);
6077
6078 /* Resolve callee-copies. This has to be done before processing
6079 the body so that DECL_VALUE_EXPR gets processed correctly. */
6080 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6081
6082 /* Gimplify the function's body. */
6083 gimplify_stmt (body_p);
6084 body = *body_p;
6085
6086 if (!body)
6087 body = alloc_stmt_list ();
6088 else if (TREE_CODE (body) == STATEMENT_LIST)
6089 {
6090 tree t = expr_only (*body_p);
6091 if (t)
6092 body = t;
6093 }
6094
6095 /* If there isn't an outer BIND_EXPR, add one. */
6096 if (TREE_CODE (body) != BIND_EXPR)
6097 {
6098 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6099 NULL_TREE, NULL_TREE);
6100 TREE_SIDE_EFFECTS (b) = 1;
6101 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6102 body = b;
6103 }
6104
6105 /* If we had callee-copies statements, insert them at the beginning
6106 of the function. */
6107 if (parm_stmts)
6108 {
6109 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6110 BIND_EXPR_BODY (body) = parm_stmts;
6111 }
6112
6113 /* Unshare again, in case gimplification was sloppy. */
6114 unshare_all_trees (body);
6115
6116 *body_p = body;
6117
6118 pop_gimplify_context (body);
6119 gcc_assert (gimplify_ctxp == NULL);
6120
6121 #ifdef ENABLE_CHECKING
6122 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6123 #endif
6124
6125 timevar_pop (TV_TREE_GIMPLIFY);
6126 input_location = saved_location;
6127 }
6128
6129 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6130 node for the function we want to gimplify. */
6131
6132 void
6133 gimplify_function_tree (tree fndecl)
6134 {
6135 tree oldfn, parm, ret;
6136
6137 oldfn = current_function_decl;
6138 current_function_decl = fndecl;
6139 cfun = DECL_STRUCT_FUNCTION (fndecl);
6140 if (cfun == NULL)
6141 allocate_struct_function (fndecl);
6142
6143 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6144 {
6145 /* Preliminarily mark non-addressed complex variables as eligible
6146 for promotion to gimple registers. We'll transform their uses
6147 as we find them. */
6148 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6149 && !TREE_THIS_VOLATILE (parm)
6150 && !needs_to_live_in_memory (parm))
6151 DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
6152 }
6153
6154 ret = DECL_RESULT (fndecl);
6155 if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6156 && !needs_to_live_in_memory (ret))
6157 DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
6158
6159 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6160
6161 /* If we're instrumenting function entry/exit, then prepend the call to
6162 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6163 catch the exit hook. */
6164 /* ??? Add some way to ignore exceptions for this TFE. */
6165 if (flag_instrument_function_entry_exit
6166 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6167 {
6168 tree tf, x, bind;
6169
6170 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6171 TREE_SIDE_EFFECTS (tf) = 1;
6172 x = DECL_SAVED_TREE (fndecl);
6173 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6174 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6175 x = build_function_call_expr (x, NULL);
6176 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6177
6178 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6179 TREE_SIDE_EFFECTS (bind) = 1;
6180 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6181 x = build_function_call_expr (x, NULL);
6182 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6183 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6184
6185 DECL_SAVED_TREE (fndecl) = bind;
6186 }
6187
6188 current_function_decl = oldfn;
6189 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6190 }
6191
6192 \f
6193 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6194 force the result to be either ssa_name or an invariant, otherwise
6195 just force it to be a rhs expression. If VAR is not NULL, make the
6196 base variable of the final destination be VAR if suitable. */
6197
6198 tree
6199 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6200 {
6201 tree t;
6202 enum gimplify_status ret;
6203 gimple_predicate gimple_test_f;
6204
6205 *stmts = NULL_TREE;
6206
6207 if (is_gimple_val (expr))
6208 return expr;
6209
6210 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6211
6212 push_gimplify_context ();
6213 gimplify_ctxp->into_ssa = in_ssa_p;
6214
6215 if (var)
6216 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
6217
6218 ret = gimplify_expr (&expr, stmts, NULL,
6219 gimple_test_f, fb_rvalue);
6220 gcc_assert (ret != GS_ERROR);
6221
6222 if (referenced_vars)
6223 {
6224 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6225 add_referenced_tmp_var (t);
6226 }
6227
6228 pop_gimplify_context (NULL);
6229
6230 return expr;
6231 }
6232
6233 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6234 some statements are produced, emits them before BSI. */
6235
6236 tree
6237 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6238 bool simple_p, tree var)
6239 {
6240 tree stmts;
6241
6242 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6243 if (stmts)
6244 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6245
6246 return expr;
6247 }
6248
6249 #include "gt-gimplify.h"