target.h (globalize_decl_name): New.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 02110-1301, USA. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "tree.h"
30 #include "rtl.h"
31 #include "varray.h"
32 #include "tree-gimple.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h"
42 #include "flags.h"
43 #include "real.h"
44 #include "function.h"
45 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h"
48 #include "toplev.h"
49 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h"
52
53
54 enum gimplify_omp_var_data
55 {
56 GOVD_SEEN = 1,
57 GOVD_EXPLICIT = 2,
58 GOVD_SHARED = 4,
59 GOVD_PRIVATE = 8,
60 GOVD_FIRSTPRIVATE = 16,
61 GOVD_LASTPRIVATE = 32,
62 GOVD_REDUCTION = 64,
63 GOVD_LOCAL = 128,
64 GOVD_DEBUG_PRIVATE = 256,
65 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
66 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
67 };
68
69 struct gimplify_omp_ctx
70 {
71 struct gimplify_omp_ctx *outer_context;
72 splay_tree variables;
73 struct pointer_set_t *privatized_types;
74 location_t location;
75 enum omp_clause_default_kind default_kind;
76 bool is_parallel;
77 bool is_combined_parallel;
78 };
79
80 struct gimplify_ctx
81 {
82 struct gimplify_ctx *prev_context;
83
84 tree current_bind_expr;
85 tree temps;
86 tree conditional_cleanups;
87 tree exit_label;
88 tree return_temp;
89
90 VEC(tree,heap) *case_labels;
91 /* The formal temporary table. Should this be persistent? */
92 htab_t temp_htab;
93
94 int conditions;
95 bool save_stack;
96 bool into_ssa;
97 };
98
99 static struct gimplify_ctx *gimplify_ctxp;
100 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
101
102
103
104 /* Formal (expression) temporary table handling: Multiple occurrences of
105 the same scalar expression are evaluated into the same temporary. */
106
107 typedef struct gimple_temp_hash_elt
108 {
109 tree val; /* Key */
110 tree temp; /* Value */
111 } elt_t;
112
113 /* Forward declarations. */
114 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
115 #ifdef ENABLE_CHECKING
116 static bool cpt_same_type (tree a, tree b);
117 #endif
118
119
120 /* Return a hash value for a formal temporary table entry. */
121
122 static hashval_t
123 gimple_tree_hash (const void *p)
124 {
125 tree t = ((const elt_t *) p)->val;
126 return iterative_hash_expr (t, 0);
127 }
128
129 /* Compare two formal temporary table entries. */
130
131 static int
132 gimple_tree_eq (const void *p1, const void *p2)
133 {
134 tree t1 = ((const elt_t *) p1)->val;
135 tree t2 = ((const elt_t *) p2)->val;
136 enum tree_code code = TREE_CODE (t1);
137
138 if (TREE_CODE (t2) != code
139 || TREE_TYPE (t1) != TREE_TYPE (t2))
140 return 0;
141
142 if (!operand_equal_p (t1, t2, 0))
143 return 0;
144
145 /* Only allow them to compare equal if they also hash equal; otherwise
146 results are nondeterminate, and we fail bootstrap comparison. */
147 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
148
149 return 1;
150 }
151
152 /* Set up a context for the gimplifier. */
153
154 void
155 push_gimplify_context (void)
156 {
157 struct gimplify_ctx *c;
158
159 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
160 c->prev_context = gimplify_ctxp;
161 if (optimize)
162 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
163
164 gimplify_ctxp = c;
165 }
166
167 /* Tear down a context for the gimplifier. If BODY is non-null, then
168 put the temporaries into the outer BIND_EXPR. Otherwise, put them
169 in the unexpanded_var_list. */
170
171 void
172 pop_gimplify_context (tree body)
173 {
174 struct gimplify_ctx *c = gimplify_ctxp;
175 tree t;
176
177 gcc_assert (c && !c->current_bind_expr);
178 gimplify_ctxp = c->prev_context;
179
180 for (t = c->temps; t ; t = TREE_CHAIN (t))
181 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
182
183 if (body)
184 declare_vars (c->temps, body, false);
185 else
186 record_vars (c->temps);
187
188 if (optimize)
189 htab_delete (c->temp_htab);
190 free (c);
191 }
192
193 static void
194 gimple_push_bind_expr (tree bind)
195 {
196 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
197 gimplify_ctxp->current_bind_expr = bind;
198 }
199
200 static void
201 gimple_pop_bind_expr (void)
202 {
203 gimplify_ctxp->current_bind_expr
204 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
205 }
206
207 tree
208 gimple_current_bind_expr (void)
209 {
210 return gimplify_ctxp->current_bind_expr;
211 }
212
213 /* Returns true iff there is a COND_EXPR between us and the innermost
214 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
215
216 static bool
217 gimple_conditional_context (void)
218 {
219 return gimplify_ctxp->conditions > 0;
220 }
221
222 /* Note that we've entered a COND_EXPR. */
223
224 static void
225 gimple_push_condition (void)
226 {
227 #ifdef ENABLE_CHECKING
228 if (gimplify_ctxp->conditions == 0)
229 gcc_assert (!gimplify_ctxp->conditional_cleanups);
230 #endif
231 ++(gimplify_ctxp->conditions);
232 }
233
234 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
235 now, add any conditional cleanups we've seen to the prequeue. */
236
237 static void
238 gimple_pop_condition (tree *pre_p)
239 {
240 int conds = --(gimplify_ctxp->conditions);
241
242 gcc_assert (conds >= 0);
243 if (conds == 0)
244 {
245 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
246 gimplify_ctxp->conditional_cleanups = NULL_TREE;
247 }
248 }
249
250 /* A stable comparison routine for use with splay trees and DECLs. */
251
252 static int
253 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
254 {
255 tree a = (tree) xa;
256 tree b = (tree) xb;
257
258 return DECL_UID (a) - DECL_UID (b);
259 }
260
261 /* Create a new omp construct that deals with variable remapping. */
262
263 static struct gimplify_omp_ctx *
264 new_omp_context (bool is_parallel, bool is_combined_parallel)
265 {
266 struct gimplify_omp_ctx *c;
267
268 c = XCNEW (struct gimplify_omp_ctx);
269 c->outer_context = gimplify_omp_ctxp;
270 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
271 c->privatized_types = pointer_set_create ();
272 c->location = input_location;
273 c->is_parallel = is_parallel;
274 c->is_combined_parallel = is_combined_parallel;
275 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
276
277 return c;
278 }
279
280 /* Destroy an omp construct that deals with variable remapping. */
281
282 static void
283 delete_omp_context (struct gimplify_omp_ctx *c)
284 {
285 splay_tree_delete (c->variables);
286 pointer_set_destroy (c->privatized_types);
287 XDELETE (c);
288 }
289
290 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
291 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
292
293 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
294
295 static void
296 append_to_statement_list_1 (tree t, tree *list_p)
297 {
298 tree list = *list_p;
299 tree_stmt_iterator i;
300
301 if (!list)
302 {
303 if (t && TREE_CODE (t) == STATEMENT_LIST)
304 {
305 *list_p = t;
306 return;
307 }
308 *list_p = list = alloc_stmt_list ();
309 }
310
311 i = tsi_last (list);
312 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
313 }
314
315 /* Add T to the end of the list container pointed to by LIST_P.
316 If T is an expression with no effects, it is ignored. */
317
318 void
319 append_to_statement_list (tree t, tree *list_p)
320 {
321 if (t && TREE_SIDE_EFFECTS (t))
322 append_to_statement_list_1 (t, list_p);
323 }
324
325 /* Similar, but the statement is always added, regardless of side effects. */
326
327 void
328 append_to_statement_list_force (tree t, tree *list_p)
329 {
330 if (t != NULL_TREE)
331 append_to_statement_list_1 (t, list_p);
332 }
333
334 /* Both gimplify the statement T and append it to LIST_P. */
335
336 void
337 gimplify_and_add (tree t, tree *list_p)
338 {
339 gimplify_stmt (&t);
340 append_to_statement_list (t, list_p);
341 }
342
343 /* Strip off a legitimate source ending from the input string NAME of
344 length LEN. Rather than having to know the names used by all of
345 our front ends, we strip off an ending of a period followed by
346 up to five characters. (Java uses ".class".) */
347
348 static inline void
349 remove_suffix (char *name, int len)
350 {
351 int i;
352
353 for (i = 2; i < 8 && len > i; i++)
354 {
355 if (name[len - i] == '.')
356 {
357 name[len - i] = '\0';
358 break;
359 }
360 }
361 }
362
363 /* Create a nameless artificial label and put it in the current function
364 context. Returns the newly created label. */
365
366 tree
367 create_artificial_label (void)
368 {
369 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
370
371 DECL_ARTIFICIAL (lab) = 1;
372 DECL_IGNORED_P (lab) = 1;
373 DECL_CONTEXT (lab) = current_function_decl;
374 return lab;
375 }
376
377 /* Subroutine for find_single_pointer_decl. */
378
379 static tree
380 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
381 void *data)
382 {
383 tree *pdecl = (tree *) data;
384
385 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
386 {
387 if (*pdecl)
388 {
389 /* We already found a pointer decl; return anything other
390 than NULL_TREE to unwind from walk_tree signalling that
391 we have a duplicate. */
392 return *tp;
393 }
394 *pdecl = *tp;
395 }
396
397 return NULL_TREE;
398 }
399
400 /* Find the single DECL of pointer type in the tree T and return it.
401 If there are zero or more than one such DECLs, return NULL. */
402
403 static tree
404 find_single_pointer_decl (tree t)
405 {
406 tree decl = NULL_TREE;
407
408 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
409 {
410 /* find_single_pointer_decl_1 returns a nonzero value, causing
411 walk_tree to return a nonzero value, to indicate that it
412 found more than one pointer DECL. */
413 return NULL_TREE;
414 }
415
416 return decl;
417 }
418
419 /* Create a new temporary name with PREFIX. Returns an identifier. */
420
421 static GTY(()) unsigned int tmp_var_id_num;
422
423 tree
424 create_tmp_var_name (const char *prefix)
425 {
426 char *tmp_name;
427
428 if (prefix)
429 {
430 char *preftmp = ASTRDUP (prefix);
431
432 remove_suffix (preftmp, strlen (preftmp));
433 prefix = preftmp;
434 }
435
436 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
437 return get_identifier (tmp_name);
438 }
439
440
441 /* Create a new temporary variable declaration of type TYPE.
442 Does NOT push it into the current binding. */
443
444 tree
445 create_tmp_var_raw (tree type, const char *prefix)
446 {
447 tree tmp_var;
448 tree new_type;
449
450 /* Make the type of the variable writable. */
451 new_type = build_type_variant (type, 0, 0);
452 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
453
454 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
455 type);
456
457 /* The variable was declared by the compiler. */
458 DECL_ARTIFICIAL (tmp_var) = 1;
459 /* And we don't want debug info for it. */
460 DECL_IGNORED_P (tmp_var) = 1;
461
462 /* Make the variable writable. */
463 TREE_READONLY (tmp_var) = 0;
464
465 DECL_EXTERNAL (tmp_var) = 0;
466 TREE_STATIC (tmp_var) = 0;
467 TREE_USED (tmp_var) = 1;
468
469 return tmp_var;
470 }
471
472 /* Create a new temporary variable declaration of type TYPE. DOES push the
473 variable into the current binding. Further, assume that this is called
474 only from gimplification or optimization, at which point the creation of
475 certain types are bugs. */
476
477 tree
478 create_tmp_var (tree type, const char *prefix)
479 {
480 tree tmp_var;
481
482 /* We don't allow types that are addressable (meaning we can't make copies),
483 or incomplete. We also used to reject every variable size objects here,
484 but now support those for which a constant upper bound can be obtained.
485 The processing for variable sizes is performed in gimple_add_tmp_var,
486 point at which it really matters and possibly reached via paths not going
487 through this function, e.g. after direct calls to create_tmp_var_raw. */
488 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
489
490 tmp_var = create_tmp_var_raw (type, prefix);
491 gimple_add_tmp_var (tmp_var);
492 return tmp_var;
493 }
494
495 /* Given a tree, try to return a useful variable name that we can use
496 to prefix a temporary that is being assigned the value of the tree.
497 I.E. given <temp> = &A, return A. */
498
499 const char *
500 get_name (tree t)
501 {
502 tree stripped_decl;
503
504 stripped_decl = t;
505 STRIP_NOPS (stripped_decl);
506 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
507 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
508 else
509 {
510 switch (TREE_CODE (stripped_decl))
511 {
512 case ADDR_EXPR:
513 return get_name (TREE_OPERAND (stripped_decl, 0));
514 default:
515 return NULL;
516 }
517 }
518 }
519
520 /* Create a temporary with a name derived from VAL. Subroutine of
521 lookup_tmp_var; nobody else should call this function. */
522
523 static inline tree
524 create_tmp_from_val (tree val)
525 {
526 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
527 }
528
529 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
530 an existing expression temporary. */
531
532 static tree
533 lookup_tmp_var (tree val, bool is_formal)
534 {
535 tree ret;
536
537 /* If not optimizing, never really reuse a temporary. local-alloc
538 won't allocate any variable that is used in more than one basic
539 block, which means it will go into memory, causing much extra
540 work in reload and final and poorer code generation, outweighing
541 the extra memory allocation here. */
542 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
543 ret = create_tmp_from_val (val);
544 else
545 {
546 elt_t elt, *elt_p;
547 void **slot;
548
549 elt.val = val;
550 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
551 if (*slot == NULL)
552 {
553 elt_p = XNEW (elt_t);
554 elt_p->val = val;
555 elt_p->temp = ret = create_tmp_from_val (val);
556 *slot = (void *) elt_p;
557 }
558 else
559 {
560 elt_p = (elt_t *) *slot;
561 ret = elt_p->temp;
562 }
563 }
564
565 if (is_formal)
566 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
567
568 return ret;
569 }
570
571 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
572 in gimplify_expr. Only use this function if:
573
574 1) The value of the unfactored expression represented by VAL will not
575 change between the initialization and use of the temporary, and
576 2) The temporary will not be otherwise modified.
577
578 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
579 and #2 means it is inappropriate for && temps.
580
581 For other cases, use get_initialized_tmp_var instead. */
582
583 static tree
584 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
585 {
586 tree t, mod;
587
588 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
589
590 t = lookup_tmp_var (val, is_formal);
591
592 if (is_formal)
593 {
594 tree u = find_single_pointer_decl (val);
595
596 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
597 u = DECL_GET_RESTRICT_BASE (u);
598 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
599 {
600 if (DECL_BASED_ON_RESTRICT_P (t))
601 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
602 else
603 {
604 DECL_BASED_ON_RESTRICT_P (t) = 1;
605 SET_DECL_RESTRICT_BASE (t, u);
606 }
607 }
608 }
609
610 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
611 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
612 DECL_GIMPLE_REG_P (t) = 1;
613
614 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
615
616 if (EXPR_HAS_LOCATION (val))
617 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
618 else
619 SET_EXPR_LOCATION (mod, input_location);
620
621 /* gimplify_modify_expr might want to reduce this further. */
622 gimplify_and_add (mod, pre_p);
623
624 /* If we're gimplifying into ssa, gimplify_modify_expr will have
625 given our temporary an ssa name. Find and return it. */
626 if (gimplify_ctxp->into_ssa)
627 t = TREE_OPERAND (mod, 0);
628
629 return t;
630 }
631
632 /* Returns a formal temporary variable initialized with VAL. PRE_P
633 points to a statement list where side-effects needed to compute VAL
634 should be stored. */
635
636 tree
637 get_formal_tmp_var (tree val, tree *pre_p)
638 {
639 return internal_get_tmp_var (val, pre_p, NULL, true);
640 }
641
642 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
643 are as in gimplify_expr. */
644
645 tree
646 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
647 {
648 return internal_get_tmp_var (val, pre_p, post_p, false);
649 }
650
651 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
652 true, generate debug info for them; otherwise don't. */
653
654 void
655 declare_vars (tree vars, tree scope, bool debug_info)
656 {
657 tree last = vars;
658 if (last)
659 {
660 tree temps, block;
661
662 /* C99 mode puts the default 'return 0;' for main outside the outer
663 braces. So drill down until we find an actual scope. */
664 while (TREE_CODE (scope) == COMPOUND_EXPR)
665 scope = TREE_OPERAND (scope, 0);
666
667 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
668
669 temps = nreverse (last);
670
671 block = BIND_EXPR_BLOCK (scope);
672 if (!block || !debug_info)
673 {
674 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
675 BIND_EXPR_VARS (scope) = temps;
676 }
677 else
678 {
679 /* We need to attach the nodes both to the BIND_EXPR and to its
680 associated BLOCK for debugging purposes. The key point here
681 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
682 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
683 if (BLOCK_VARS (block))
684 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
685 else
686 {
687 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
688 BLOCK_VARS (block) = temps;
689 }
690 }
691 }
692 }
693
694 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
695 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
696 no such upper bound can be obtained. */
697
698 static void
699 force_constant_size (tree var)
700 {
701 /* The only attempt we make is by querying the maximum size of objects
702 of the variable's type. */
703
704 HOST_WIDE_INT max_size;
705
706 gcc_assert (TREE_CODE (var) == VAR_DECL);
707
708 max_size = max_int_size_in_bytes (TREE_TYPE (var));
709
710 gcc_assert (max_size >= 0);
711
712 DECL_SIZE_UNIT (var)
713 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
714 DECL_SIZE (var)
715 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
716 }
717
718 void
719 gimple_add_tmp_var (tree tmp)
720 {
721 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
722
723 /* Later processing assumes that the object size is constant, which might
724 not be true at this point. Force the use of a constant upper bound in
725 this case. */
726 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
727 force_constant_size (tmp);
728
729 DECL_CONTEXT (tmp) = current_function_decl;
730 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
731
732 if (gimplify_ctxp)
733 {
734 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
735 gimplify_ctxp->temps = tmp;
736
737 /* Mark temporaries local within the nearest enclosing parallel. */
738 if (gimplify_omp_ctxp)
739 {
740 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
741 while (ctx && !ctx->is_parallel)
742 ctx = ctx->outer_context;
743 if (ctx)
744 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
745 }
746 }
747 else if (cfun)
748 record_vars (tmp);
749 else
750 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
751 }
752
753 /* Determines whether to assign a locus to the statement STMT. */
754
755 static bool
756 should_carry_locus_p (tree stmt)
757 {
758 /* Don't emit a line note for a label. We particularly don't want to
759 emit one for the break label, since it doesn't actually correspond
760 to the beginning of the loop/switch. */
761 if (TREE_CODE (stmt) == LABEL_EXPR)
762 return false;
763
764 /* Do not annotate empty statements, since it confuses gcov. */
765 if (!TREE_SIDE_EFFECTS (stmt))
766 return false;
767
768 return true;
769 }
770
771 static void
772 annotate_one_with_locus (tree t, location_t locus)
773 {
774 if (CAN_HAVE_LOCATION_P (t)
775 && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
776 SET_EXPR_LOCATION (t, locus);
777 }
778
779 void
780 annotate_all_with_locus (tree *stmt_p, location_t locus)
781 {
782 tree_stmt_iterator i;
783
784 if (!*stmt_p)
785 return;
786
787 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
788 {
789 tree t = tsi_stmt (i);
790
791 /* Assuming we've already been gimplified, we shouldn't
792 see nested chaining constructs anymore. */
793 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
794 && TREE_CODE (t) != COMPOUND_EXPR);
795
796 annotate_one_with_locus (t, locus);
797 }
798 }
799
800 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801 These nodes model computations that should only be done once. If we
802 were to unshare something like SAVE_EXPR(i++), the gimplification
803 process would create wrong code. */
804
805 static tree
806 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
807 {
808 enum tree_code code = TREE_CODE (*tp);
809 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
810 if (TREE_CODE_CLASS (code) == tcc_type
811 || TREE_CODE_CLASS (code) == tcc_declaration
812 || TREE_CODE_CLASS (code) == tcc_constant
813 || code == SAVE_EXPR || code == TARGET_EXPR
814 /* We can't do anything sensible with a BLOCK used as an expression,
815 but we also can't just die when we see it because of non-expression
816 uses. So just avert our eyes and cross our fingers. Silly Java. */
817 || code == BLOCK)
818 *walk_subtrees = 0;
819 else
820 {
821 gcc_assert (code != BIND_EXPR);
822 copy_tree_r (tp, walk_subtrees, data);
823 }
824
825 return NULL_TREE;
826 }
827
828 /* Callback for walk_tree to unshare most of the shared trees rooted at
829 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
830 then *TP is deep copied by calling copy_tree_r.
831
832 This unshares the same trees as copy_tree_r with the exception of
833 SAVE_EXPR nodes. These nodes model computations that should only be
834 done once. If we were to unshare something like SAVE_EXPR(i++), the
835 gimplification process would create wrong code. */
836
837 static tree
838 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
839 void *data ATTRIBUTE_UNUSED)
840 {
841 tree t = *tp;
842 enum tree_code code = TREE_CODE (t);
843
844 /* Skip types, decls, and constants. But we do want to look at their
845 types and the bounds of types. Mark them as visited so we properly
846 unmark their subtrees on the unmark pass. If we've already seen them,
847 don't look down further. */
848 if (TREE_CODE_CLASS (code) == tcc_type
849 || TREE_CODE_CLASS (code) == tcc_declaration
850 || TREE_CODE_CLASS (code) == tcc_constant)
851 {
852 if (TREE_VISITED (t))
853 *walk_subtrees = 0;
854 else
855 TREE_VISITED (t) = 1;
856 }
857
858 /* If this node has been visited already, unshare it and don't look
859 any deeper. */
860 else if (TREE_VISITED (t))
861 {
862 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
863 *walk_subtrees = 0;
864 }
865
866 /* Otherwise, mark the tree as visited and keep looking. */
867 else
868 TREE_VISITED (t) = 1;
869
870 return NULL_TREE;
871 }
872
873 static tree
874 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
875 void *data ATTRIBUTE_UNUSED)
876 {
877 if (TREE_VISITED (*tp))
878 TREE_VISITED (*tp) = 0;
879 else
880 *walk_subtrees = 0;
881
882 return NULL_TREE;
883 }
884
885 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
886 bodies of any nested functions if we are unsharing the entire body of
887 FNDECL. */
888
889 static void
890 unshare_body (tree *body_p, tree fndecl)
891 {
892 struct cgraph_node *cgn = cgraph_node (fndecl);
893
894 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
895 if (body_p == &DECL_SAVED_TREE (fndecl))
896 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
897 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
898 }
899
900 /* Likewise, but mark all trees as not visited. */
901
902 static void
903 unvisit_body (tree *body_p, tree fndecl)
904 {
905 struct cgraph_node *cgn = cgraph_node (fndecl);
906
907 walk_tree (body_p, unmark_visited_r, NULL, NULL);
908 if (body_p == &DECL_SAVED_TREE (fndecl))
909 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
910 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
911 }
912
913 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
914
915 static void
916 unshare_all_trees (tree t)
917 {
918 walk_tree (&t, copy_if_shared_r, NULL, NULL);
919 walk_tree (&t, unmark_visited_r, NULL, NULL);
920 }
921
922 /* Unconditionally make an unshared copy of EXPR. This is used when using
923 stored expressions which span multiple functions, such as BINFO_VTABLE,
924 as the normal unsharing process can't tell that they're shared. */
925
926 tree
927 unshare_expr (tree expr)
928 {
929 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
930 return expr;
931 }
932
933 /* A terser interface for building a representation of an exception
934 specification. */
935
936 tree
937 gimple_build_eh_filter (tree body, tree allowed, tree failure)
938 {
939 tree t;
940
941 /* FIXME should the allowed types go in TREE_TYPE? */
942 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
943 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
944
945 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
946 append_to_statement_list (body, &TREE_OPERAND (t, 0));
947
948 return t;
949 }
950
951 \f
952 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
953 contain statements and have a value. Assign its value to a temporary
954 and give it void_type_node. Returns the temporary, or NULL_TREE if
955 WRAPPER was already void. */
956
957 tree
958 voidify_wrapper_expr (tree wrapper, tree temp)
959 {
960 tree type = TREE_TYPE (wrapper);
961 if (type && !VOID_TYPE_P (type))
962 {
963 tree *p;
964
965 /* Set p to point to the body of the wrapper. Loop until we find
966 something that isn't a wrapper. */
967 for (p = &wrapper; p && *p; )
968 {
969 switch (TREE_CODE (*p))
970 {
971 case BIND_EXPR:
972 TREE_SIDE_EFFECTS (*p) = 1;
973 TREE_TYPE (*p) = void_type_node;
974 /* For a BIND_EXPR, the body is operand 1. */
975 p = &BIND_EXPR_BODY (*p);
976 break;
977
978 case CLEANUP_POINT_EXPR:
979 case TRY_FINALLY_EXPR:
980 case TRY_CATCH_EXPR:
981 TREE_SIDE_EFFECTS (*p) = 1;
982 TREE_TYPE (*p) = void_type_node;
983 p = &TREE_OPERAND (*p, 0);
984 break;
985
986 case STATEMENT_LIST:
987 {
988 tree_stmt_iterator i = tsi_last (*p);
989 TREE_SIDE_EFFECTS (*p) = 1;
990 TREE_TYPE (*p) = void_type_node;
991 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
992 }
993 break;
994
995 case COMPOUND_EXPR:
996 /* Advance to the last statement. Set all container types to void. */
997 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
998 {
999 TREE_SIDE_EFFECTS (*p) = 1;
1000 TREE_TYPE (*p) = void_type_node;
1001 }
1002 break;
1003
1004 default:
1005 goto out;
1006 }
1007 }
1008
1009 out:
1010 if (p == NULL || IS_EMPTY_STMT (*p))
1011 temp = NULL_TREE;
1012 else if (temp)
1013 {
1014 /* The wrapper is on the RHS of an assignment that we're pushing
1015 down. */
1016 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1017 || TREE_CODE (temp) == GIMPLE_MODIFY_STMT
1018 || TREE_CODE (temp) == MODIFY_EXPR);
1019 GENERIC_TREE_OPERAND (temp, 1) = *p;
1020 *p = temp;
1021 }
1022 else
1023 {
1024 temp = create_tmp_var (type, "retval");
1025 *p = build2 (INIT_EXPR, type, temp, *p);
1026 }
1027
1028 return temp;
1029 }
1030
1031 return NULL_TREE;
1032 }
1033
1034 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1035 a temporary through which they communicate. */
1036
1037 static void
1038 build_stack_save_restore (tree *save, tree *restore)
1039 {
1040 tree save_call, tmp_var;
1041
1042 save_call =
1043 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
1044 NULL_TREE);
1045 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1046
1047 *save = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, tmp_var, save_call);
1048 *restore =
1049 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1050 tree_cons (NULL_TREE, tmp_var, NULL_TREE));
1051 }
1052
1053 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1054
1055 static enum gimplify_status
1056 gimplify_bind_expr (tree *expr_p, tree *pre_p)
1057 {
1058 tree bind_expr = *expr_p;
1059 bool old_save_stack = gimplify_ctxp->save_stack;
1060 tree t;
1061
1062 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1063
1064 /* Mark variables seen in this bind expr. */
1065 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1066 {
1067 if (TREE_CODE (t) == VAR_DECL)
1068 {
1069 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1070
1071 /* Mark variable as local. */
1072 if (ctx && !is_global_var (t)
1073 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1074 || splay_tree_lookup (ctx->variables,
1075 (splay_tree_key) t) == NULL))
1076 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1077
1078 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1079 }
1080
1081 /* Preliminarily mark non-addressed complex variables as eligible
1082 for promotion to gimple registers. We'll transform their uses
1083 as we find them. */
1084 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1085 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1086 && !TREE_THIS_VOLATILE (t)
1087 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1088 && !needs_to_live_in_memory (t))
1089 DECL_GIMPLE_REG_P (t) = 1;
1090 }
1091
1092 gimple_push_bind_expr (bind_expr);
1093 gimplify_ctxp->save_stack = false;
1094
1095 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1096
1097 if (gimplify_ctxp->save_stack)
1098 {
1099 tree stack_save, stack_restore;
1100
1101 /* Save stack on entry and restore it on exit. Add a try_finally
1102 block to achieve this. Note that mudflap depends on the
1103 format of the emitted code: see mx_register_decls(). */
1104 build_stack_save_restore (&stack_save, &stack_restore);
1105
1106 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1107 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1108 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1109
1110 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1111 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1112 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1113 }
1114
1115 gimplify_ctxp->save_stack = old_save_stack;
1116 gimple_pop_bind_expr ();
1117
1118 if (temp)
1119 {
1120 *expr_p = temp;
1121 append_to_statement_list (bind_expr, pre_p);
1122 return GS_OK;
1123 }
1124 else
1125 return GS_ALL_DONE;
1126 }
1127
1128 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1129 GIMPLE value, it is assigned to a new temporary and the statement is
1130 re-written to return the temporary.
1131
1132 PRE_P points to the list where side effects that must happen before
1133 STMT should be stored. */
1134
1135 static enum gimplify_status
1136 gimplify_return_expr (tree stmt, tree *pre_p)
1137 {
1138 tree ret_expr = TREE_OPERAND (stmt, 0);
1139 tree result_decl, result;
1140
1141 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1142 || ret_expr == error_mark_node)
1143 return GS_ALL_DONE;
1144
1145 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1146 result_decl = NULL_TREE;
1147 else
1148 {
1149 result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
1150 if (TREE_CODE (result_decl) == INDIRECT_REF)
1151 /* See through a return by reference. */
1152 result_decl = TREE_OPERAND (result_decl, 0);
1153
1154 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1155 || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
1156 || TREE_CODE (ret_expr) == INIT_EXPR)
1157 && TREE_CODE (result_decl) == RESULT_DECL);
1158 }
1159
1160 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1161 Recall that aggregate_value_p is FALSE for any aggregate type that is
1162 returned in registers. If we're returning values in registers, then
1163 we don't want to extend the lifetime of the RESULT_DECL, particularly
1164 across another call. In addition, for those aggregates for which
1165 hard_function_value generates a PARALLEL, we'll die during normal
1166 expansion of structure assignments; there's special code in expand_return
1167 to handle this case that does not exist in expand_expr. */
1168 if (!result_decl
1169 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1170 result = result_decl;
1171 else if (gimplify_ctxp->return_temp)
1172 result = gimplify_ctxp->return_temp;
1173 else
1174 {
1175 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1176 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1177 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1178 DECL_GIMPLE_REG_P (result) = 1;
1179
1180 /* ??? With complex control flow (usually involving abnormal edges),
1181 we can wind up warning about an uninitialized value for this. Due
1182 to how this variable is constructed and initialized, this is never
1183 true. Give up and never warn. */
1184 TREE_NO_WARNING (result) = 1;
1185
1186 gimplify_ctxp->return_temp = result;
1187 }
1188
1189 /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
1190 Then gimplify the whole thing. */
1191 if (result != result_decl)
1192 GENERIC_TREE_OPERAND (ret_expr, 0) = result;
1193
1194 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1195
1196 /* If we didn't use a temporary, then the result is just the result_decl.
1197 Otherwise we need a simple copy. This should already be gimple. */
1198 if (result == result_decl)
1199 ret_expr = result;
1200 else
1201 ret_expr = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (result), result_decl,
1202 result);
1203 TREE_OPERAND (stmt, 0) = ret_expr;
1204
1205 return GS_ALL_DONE;
1206 }
1207
1208 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1209 and initialization explicit. */
1210
1211 static enum gimplify_status
1212 gimplify_decl_expr (tree *stmt_p)
1213 {
1214 tree stmt = *stmt_p;
1215 tree decl = DECL_EXPR_DECL (stmt);
1216
1217 *stmt_p = NULL_TREE;
1218
1219 if (TREE_TYPE (decl) == error_mark_node)
1220 return GS_ERROR;
1221
1222 if ((TREE_CODE (decl) == TYPE_DECL
1223 || TREE_CODE (decl) == VAR_DECL)
1224 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1225 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1226
1227 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1228 {
1229 tree init = DECL_INITIAL (decl);
1230
1231 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1232 {
1233 /* This is a variable-sized decl. Simplify its size and mark it
1234 for deferred expansion. Note that mudflap depends on the format
1235 of the emitted code: see mx_register_decls(). */
1236 tree t, args, addr, ptr_type;
1237
1238 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1239 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1240
1241 /* All occurrences of this decl in final gimplified code will be
1242 replaced by indirection. Setting DECL_VALUE_EXPR does two
1243 things: First, it lets the rest of the gimplifier know what
1244 replacement to use. Second, it lets the debug info know
1245 where to find the value. */
1246 ptr_type = build_pointer_type (TREE_TYPE (decl));
1247 addr = create_tmp_var (ptr_type, get_name (decl));
1248 DECL_IGNORED_P (addr) = 0;
1249 t = build_fold_indirect_ref (addr);
1250 SET_DECL_VALUE_EXPR (decl, t);
1251 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1252
1253 args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
1254 t = built_in_decls[BUILT_IN_ALLOCA];
1255 t = build_function_call_expr (t, args);
1256 t = fold_convert (ptr_type, t);
1257 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
1258
1259 gimplify_and_add (t, stmt_p);
1260
1261 /* Indicate that we need to restore the stack level when the
1262 enclosing BIND_EXPR is exited. */
1263 gimplify_ctxp->save_stack = true;
1264 }
1265
1266 if (init && init != error_mark_node)
1267 {
1268 if (!TREE_STATIC (decl))
1269 {
1270 DECL_INITIAL (decl) = NULL_TREE;
1271 init = build2 (INIT_EXPR, void_type_node, decl, init);
1272 gimplify_and_add (init, stmt_p);
1273 }
1274 else
1275 /* We must still examine initializers for static variables
1276 as they may contain a label address. */
1277 walk_tree (&init, force_labels_r, NULL, NULL);
1278 }
1279
1280 /* Some front ends do not explicitly declare all anonymous
1281 artificial variables. We compensate here by declaring the
1282 variables, though it would be better if the front ends would
1283 explicitly declare them. */
1284 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1285 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1286 gimple_add_tmp_var (decl);
1287 }
1288
1289 return GS_ALL_DONE;
1290 }
1291
1292 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1293 and replacing the LOOP_EXPR with goto, but if the loop contains an
1294 EXIT_EXPR, we need to append a label for it to jump to. */
1295
1296 static enum gimplify_status
1297 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1298 {
1299 tree saved_label = gimplify_ctxp->exit_label;
1300 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1301 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1302
1303 append_to_statement_list (start_label, pre_p);
1304
1305 gimplify_ctxp->exit_label = NULL_TREE;
1306
1307 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1308
1309 if (gimplify_ctxp->exit_label)
1310 {
1311 append_to_statement_list (jump_stmt, pre_p);
1312 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1313 }
1314 else
1315 *expr_p = jump_stmt;
1316
1317 gimplify_ctxp->exit_label = saved_label;
1318
1319 return GS_ALL_DONE;
1320 }
1321
1322 /* Compare two case labels. Because the front end should already have
1323 made sure that case ranges do not overlap, it is enough to only compare
1324 the CASE_LOW values of each case label. */
1325
1326 static int
1327 compare_case_labels (const void *p1, const void *p2)
1328 {
1329 tree case1 = *(tree *)p1;
1330 tree case2 = *(tree *)p2;
1331
1332 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1333 }
1334
1335 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1336
1337 void
1338 sort_case_labels (tree label_vec)
1339 {
1340 size_t len = TREE_VEC_LENGTH (label_vec);
1341 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1342
1343 if (CASE_LOW (default_case))
1344 {
1345 size_t i;
1346
1347 /* The last label in the vector should be the default case
1348 but it is not. */
1349 for (i = 0; i < len; ++i)
1350 {
1351 tree t = TREE_VEC_ELT (label_vec, i);
1352 if (!CASE_LOW (t))
1353 {
1354 default_case = t;
1355 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1356 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1357 break;
1358 }
1359 }
1360 }
1361
1362 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1363 compare_case_labels);
1364 }
1365
1366 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1367 branch to. */
1368
1369 static enum gimplify_status
1370 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1371 {
1372 tree switch_expr = *expr_p;
1373 enum gimplify_status ret;
1374
1375 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1376 is_gimple_val, fb_rvalue);
1377
1378 if (SWITCH_BODY (switch_expr))
1379 {
1380 VEC(tree,heap) *labels, *saved_labels;
1381 tree label_vec, default_case = NULL_TREE;
1382 size_t i, len;
1383
1384 /* If someone can be bothered to fill in the labels, they can
1385 be bothered to null out the body too. */
1386 gcc_assert (!SWITCH_LABELS (switch_expr));
1387
1388 saved_labels = gimplify_ctxp->case_labels;
1389 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1390
1391 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1392
1393 labels = gimplify_ctxp->case_labels;
1394 gimplify_ctxp->case_labels = saved_labels;
1395
1396 i = 0;
1397 while (i < VEC_length (tree, labels))
1398 {
1399 tree elt = VEC_index (tree, labels, i);
1400 tree low = CASE_LOW (elt);
1401 bool remove_element = FALSE;
1402
1403 if (low)
1404 {
1405 /* Discard empty ranges. */
1406 tree high = CASE_HIGH (elt);
1407 if (high && INT_CST_LT (high, low))
1408 remove_element = TRUE;
1409 }
1410 else
1411 {
1412 /* The default case must be the last label in the list. */
1413 gcc_assert (!default_case);
1414 default_case = elt;
1415 remove_element = TRUE;
1416 }
1417
1418 if (remove_element)
1419 VEC_ordered_remove (tree, labels, i);
1420 else
1421 i++;
1422 }
1423 len = i;
1424
1425 label_vec = make_tree_vec (len + 1);
1426 SWITCH_LABELS (*expr_p) = label_vec;
1427 append_to_statement_list (switch_expr, pre_p);
1428
1429 if (! default_case)
1430 {
1431 /* If the switch has no default label, add one, so that we jump
1432 around the switch body. */
1433 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1434 NULL_TREE, create_artificial_label ());
1435 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1436 *expr_p = build1 (LABEL_EXPR, void_type_node,
1437 CASE_LABEL (default_case));
1438 }
1439 else
1440 *expr_p = SWITCH_BODY (switch_expr);
1441
1442 for (i = 0; i < len; ++i)
1443 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1444 TREE_VEC_ELT (label_vec, len) = default_case;
1445
1446 VEC_free (tree, heap, labels);
1447
1448 sort_case_labels (label_vec);
1449
1450 SWITCH_BODY (switch_expr) = NULL;
1451 }
1452 else
1453 gcc_assert (SWITCH_LABELS (switch_expr));
1454
1455 return ret;
1456 }
1457
1458 static enum gimplify_status
1459 gimplify_case_label_expr (tree *expr_p)
1460 {
1461 tree expr = *expr_p;
1462 struct gimplify_ctx *ctxp;
1463
1464 /* Invalid OpenMP programs can play Duff's Device type games with
1465 #pragma omp parallel. At least in the C front end, we don't
1466 detect such invalid branches until after gimplification. */
1467 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1468 if (ctxp->case_labels)
1469 break;
1470
1471 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1472 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1473 return GS_ALL_DONE;
1474 }
1475
1476 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1477 if necessary. */
1478
1479 tree
1480 build_and_jump (tree *label_p)
1481 {
1482 if (label_p == NULL)
1483 /* If there's nowhere to jump, just fall through. */
1484 return NULL_TREE;
1485
1486 if (*label_p == NULL_TREE)
1487 {
1488 tree label = create_artificial_label ();
1489 *label_p = label;
1490 }
1491
1492 return build1 (GOTO_EXPR, void_type_node, *label_p);
1493 }
1494
1495 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1496 This also involves building a label to jump to and communicating it to
1497 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1498
1499 static enum gimplify_status
1500 gimplify_exit_expr (tree *expr_p)
1501 {
1502 tree cond = TREE_OPERAND (*expr_p, 0);
1503 tree expr;
1504
1505 expr = build_and_jump (&gimplify_ctxp->exit_label);
1506 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1507 *expr_p = expr;
1508
1509 return GS_OK;
1510 }
1511
1512 /* A helper function to be called via walk_tree. Mark all labels under *TP
1513 as being forced. To be called for DECL_INITIAL of static variables. */
1514
1515 tree
1516 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1517 {
1518 if (TYPE_P (*tp))
1519 *walk_subtrees = 0;
1520 if (TREE_CODE (*tp) == LABEL_DECL)
1521 FORCED_LABEL (*tp) = 1;
1522
1523 return NULL_TREE;
1524 }
1525
1526 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1527 different from its canonical type, wrap the whole thing inside a
1528 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1529 type.
1530
1531 The canonical type of a COMPONENT_REF is the type of the field being
1532 referenced--unless the field is a bit-field which can be read directly
1533 in a smaller mode, in which case the canonical type is the
1534 sign-appropriate type corresponding to that mode. */
1535
1536 static void
1537 canonicalize_component_ref (tree *expr_p)
1538 {
1539 tree expr = *expr_p;
1540 tree type;
1541
1542 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1543
1544 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1545 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1546 else
1547 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1548
1549 if (TREE_TYPE (expr) != type)
1550 {
1551 tree old_type = TREE_TYPE (expr);
1552
1553 /* Set the type of the COMPONENT_REF to the underlying type. */
1554 TREE_TYPE (expr) = type;
1555
1556 /* And wrap the whole thing inside a NOP_EXPR. */
1557 expr = build1 (NOP_EXPR, old_type, expr);
1558
1559 *expr_p = expr;
1560 }
1561 }
1562
1563 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1564 to foo, embed that change in the ADDR_EXPR by converting
1565 T array[U];
1566 (T *)&array
1567 ==>
1568 &array[L]
1569 where L is the lower bound. For simplicity, only do this for constant
1570 lower bound. */
1571
1572 static void
1573 canonicalize_addr_expr (tree *expr_p)
1574 {
1575 tree expr = *expr_p;
1576 tree ctype = TREE_TYPE (expr);
1577 tree addr_expr = TREE_OPERAND (expr, 0);
1578 tree atype = TREE_TYPE (addr_expr);
1579 tree dctype, datype, ddatype, otype, obj_expr;
1580
1581 /* Both cast and addr_expr types should be pointers. */
1582 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1583 return;
1584
1585 /* The addr_expr type should be a pointer to an array. */
1586 datype = TREE_TYPE (atype);
1587 if (TREE_CODE (datype) != ARRAY_TYPE)
1588 return;
1589
1590 /* Both cast and addr_expr types should address the same object type. */
1591 dctype = TREE_TYPE (ctype);
1592 ddatype = TREE_TYPE (datype);
1593 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1594 return;
1595
1596 /* The addr_expr and the object type should match. */
1597 obj_expr = TREE_OPERAND (addr_expr, 0);
1598 otype = TREE_TYPE (obj_expr);
1599 if (!lang_hooks.types_compatible_p (otype, datype))
1600 return;
1601
1602 /* The lower bound and element sizes must be constant. */
1603 if (!TYPE_SIZE_UNIT (dctype)
1604 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1605 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1606 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1607 return;
1608
1609 /* All checks succeeded. Build a new node to merge the cast. */
1610 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1611 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1612 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1613 size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
1614 size_int (TYPE_ALIGN_UNIT (dctype))));
1615 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1616 }
1617
1618 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1619 underneath as appropriate. */
1620
1621 static enum gimplify_status
1622 gimplify_conversion (tree *expr_p)
1623 {
1624 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1625 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1626
1627 /* Then strip away all but the outermost conversion. */
1628 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1629
1630 /* And remove the outermost conversion if it's useless. */
1631 if (tree_ssa_useless_type_conversion (*expr_p))
1632 *expr_p = TREE_OPERAND (*expr_p, 0);
1633
1634 /* If we still have a conversion at the toplevel,
1635 then canonicalize some constructs. */
1636 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1637 {
1638 tree sub = TREE_OPERAND (*expr_p, 0);
1639
1640 /* If a NOP conversion is changing the type of a COMPONENT_REF
1641 expression, then canonicalize its type now in order to expose more
1642 redundant conversions. */
1643 if (TREE_CODE (sub) == COMPONENT_REF)
1644 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1645
1646 /* If a NOP conversion is changing a pointer to array of foo
1647 to a pointer to foo, embed that change in the ADDR_EXPR. */
1648 else if (TREE_CODE (sub) == ADDR_EXPR)
1649 canonicalize_addr_expr (expr_p);
1650 }
1651
1652 return GS_OK;
1653 }
1654
1655 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1656 DECL_VALUE_EXPR, and it's worth re-examining things. */
1657
1658 static enum gimplify_status
1659 gimplify_var_or_parm_decl (tree *expr_p)
1660 {
1661 tree decl = *expr_p;
1662
1663 /* ??? If this is a local variable, and it has not been seen in any
1664 outer BIND_EXPR, then it's probably the result of a duplicate
1665 declaration, for which we've already issued an error. It would
1666 be really nice if the front end wouldn't leak these at all.
1667 Currently the only known culprit is C++ destructors, as seen
1668 in g++.old-deja/g++.jason/binding.C. */
1669 if (TREE_CODE (decl) == VAR_DECL
1670 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1671 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1672 && decl_function_context (decl) == current_function_decl)
1673 {
1674 gcc_assert (errorcount || sorrycount);
1675 return GS_ERROR;
1676 }
1677
1678 /* When within an OpenMP context, notice uses of variables. */
1679 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1680 return GS_ALL_DONE;
1681
1682 /* If the decl is an alias for another expression, substitute it now. */
1683 if (DECL_HAS_VALUE_EXPR_P (decl))
1684 {
1685 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1686 return GS_OK;
1687 }
1688
1689 return GS_ALL_DONE;
1690 }
1691
1692
1693 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1694 node pointed to by EXPR_P.
1695
1696 compound_lval
1697 : min_lval '[' val ']'
1698 | min_lval '.' ID
1699 | compound_lval '[' val ']'
1700 | compound_lval '.' ID
1701
1702 This is not part of the original SIMPLE definition, which separates
1703 array and member references, but it seems reasonable to handle them
1704 together. Also, this way we don't run into problems with union
1705 aliasing; gcc requires that for accesses through a union to alias, the
1706 union reference must be explicit, which was not always the case when we
1707 were splitting up array and member refs.
1708
1709 PRE_P points to the list where side effects that must happen before
1710 *EXPR_P should be stored.
1711
1712 POST_P points to the list where side effects that must happen after
1713 *EXPR_P should be stored. */
1714
1715 static enum gimplify_status
1716 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1717 tree *post_p, fallback_t fallback)
1718 {
1719 tree *p;
1720 VEC(tree,heap) *stack;
1721 enum gimplify_status ret = GS_OK, tret;
1722 int i;
1723
1724 /* Create a stack of the subexpressions so later we can walk them in
1725 order from inner to outer. */
1726 stack = VEC_alloc (tree, heap, 10);
1727
1728 /* We can handle anything that get_inner_reference can deal with. */
1729 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1730 {
1731 restart:
1732 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1733 if (TREE_CODE (*p) == INDIRECT_REF)
1734 *p = fold_indirect_ref (*p);
1735
1736 if (handled_component_p (*p))
1737 ;
1738 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1739 additional COMPONENT_REFs. */
1740 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1741 && gimplify_var_or_parm_decl (p) == GS_OK)
1742 goto restart;
1743 else
1744 break;
1745
1746 VEC_safe_push (tree, heap, stack, *p);
1747 }
1748
1749 gcc_assert (VEC_length (tree, stack));
1750
1751 /* Now STACK is a stack of pointers to all the refs we've walked through
1752 and P points to the innermost expression.
1753
1754 Java requires that we elaborated nodes in source order. That
1755 means we must gimplify the inner expression followed by each of
1756 the indices, in order. But we can't gimplify the inner
1757 expression until we deal with any variable bounds, sizes, or
1758 positions in order to deal with PLACEHOLDER_EXPRs.
1759
1760 So we do this in three steps. First we deal with the annotations
1761 for any variables in the components, then we gimplify the base,
1762 then we gimplify any indices, from left to right. */
1763 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1764 {
1765 tree t = VEC_index (tree, stack, i);
1766
1767 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1768 {
1769 /* Gimplify the low bound and element type size and put them into
1770 the ARRAY_REF. If these values are set, they have already been
1771 gimplified. */
1772 if (!TREE_OPERAND (t, 2))
1773 {
1774 tree low = unshare_expr (array_ref_low_bound (t));
1775 if (!is_gimple_min_invariant (low))
1776 {
1777 TREE_OPERAND (t, 2) = low;
1778 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1779 is_gimple_formal_tmp_reg, fb_rvalue);
1780 ret = MIN (ret, tret);
1781 }
1782 }
1783
1784 if (!TREE_OPERAND (t, 3))
1785 {
1786 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1787 tree elmt_size = unshare_expr (array_ref_element_size (t));
1788 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1789
1790 /* Divide the element size by the alignment of the element
1791 type (above). */
1792 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1793
1794 if (!is_gimple_min_invariant (elmt_size))
1795 {
1796 TREE_OPERAND (t, 3) = elmt_size;
1797 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1798 is_gimple_formal_tmp_reg, fb_rvalue);
1799 ret = MIN (ret, tret);
1800 }
1801 }
1802 }
1803 else if (TREE_CODE (t) == COMPONENT_REF)
1804 {
1805 /* Set the field offset into T and gimplify it. */
1806 if (!TREE_OPERAND (t, 2))
1807 {
1808 tree offset = unshare_expr (component_ref_field_offset (t));
1809 tree field = TREE_OPERAND (t, 1);
1810 tree factor
1811 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1812
1813 /* Divide the offset by its alignment. */
1814 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1815
1816 if (!is_gimple_min_invariant (offset))
1817 {
1818 TREE_OPERAND (t, 2) = offset;
1819 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1820 is_gimple_formal_tmp_reg, fb_rvalue);
1821 ret = MIN (ret, tret);
1822 }
1823 }
1824 }
1825 }
1826
1827 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1828 so as to match the min_lval predicate. Failure to do so may result
1829 in the creation of large aggregate temporaries. */
1830 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1831 fallback | fb_lvalue);
1832 ret = MIN (ret, tret);
1833
1834 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1835 loop we also remove any useless conversions. */
1836 for (; VEC_length (tree, stack) > 0; )
1837 {
1838 tree t = VEC_pop (tree, stack);
1839
1840 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1841 {
1842 /* Gimplify the dimension.
1843 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1844 Gimplify non-constant array indices into a temporary
1845 variable.
1846 FIXME - The real fix is to gimplify post-modify
1847 expressions into a minimal gimple lvalue. However, that
1848 exposes bugs in alias analysis. The alias analyzer does
1849 not handle &PTR->FIELD very well. Will fix after the
1850 branch is merged into mainline (dnovillo 2004-05-03). */
1851 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1852 {
1853 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1854 is_gimple_formal_tmp_reg, fb_rvalue);
1855 ret = MIN (ret, tret);
1856 }
1857 }
1858 else if (TREE_CODE (t) == BIT_FIELD_REF)
1859 {
1860 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1861 is_gimple_val, fb_rvalue);
1862 ret = MIN (ret, tret);
1863 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1864 is_gimple_val, fb_rvalue);
1865 ret = MIN (ret, tret);
1866 }
1867
1868 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1869
1870 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1871 set which would have caused all the outer expressions in EXPR_P
1872 leading to P to also have had TREE_SIDE_EFFECTS set. */
1873 recalculate_side_effects (t);
1874 }
1875
1876 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1877 ret = MIN (ret, tret);
1878
1879 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1880 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1881 {
1882 canonicalize_component_ref (expr_p);
1883 ret = MIN (ret, GS_OK);
1884 }
1885
1886 VEC_free (tree, heap, stack);
1887
1888 return ret;
1889 }
1890
1891 /* Gimplify the self modifying expression pointed to by EXPR_P
1892 (++, --, +=, -=).
1893
1894 PRE_P points to the list where side effects that must happen before
1895 *EXPR_P should be stored.
1896
1897 POST_P points to the list where side effects that must happen after
1898 *EXPR_P should be stored.
1899
1900 WANT_VALUE is nonzero iff we want to use the value of this expression
1901 in another expression. */
1902
1903 static enum gimplify_status
1904 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1905 bool want_value)
1906 {
1907 enum tree_code code;
1908 tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
1909 bool postfix;
1910 enum tree_code arith_code;
1911 enum gimplify_status ret;
1912
1913 code = TREE_CODE (*expr_p);
1914
1915 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1916 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1917
1918 /* Prefix or postfix? */
1919 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1920 /* Faster to treat as prefix if result is not used. */
1921 postfix = want_value;
1922 else
1923 postfix = false;
1924
1925 /* For postfix, make sure the inner expression's post side effects
1926 are executed after side effects from this expression. */
1927 if (postfix)
1928 post_p = &post;
1929
1930 /* Add or subtract? */
1931 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1932 arith_code = PLUS_EXPR;
1933 else
1934 arith_code = MINUS_EXPR;
1935
1936 /* Gimplify the LHS into a GIMPLE lvalue. */
1937 lvalue = TREE_OPERAND (*expr_p, 0);
1938 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1939 if (ret == GS_ERROR)
1940 return ret;
1941
1942 /* Extract the operands to the arithmetic operation. */
1943 lhs = lvalue;
1944 rhs = TREE_OPERAND (*expr_p, 1);
1945
1946 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1947 that as the result value and in the postqueue operation. */
1948 if (postfix)
1949 {
1950 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1951 if (ret == GS_ERROR)
1952 return ret;
1953 }
1954
1955 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1956 t1 = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (lvalue), lvalue, t1);
1957
1958 if (postfix)
1959 {
1960 gimplify_and_add (t1, orig_post_p);
1961 append_to_statement_list (post, orig_post_p);
1962 *expr_p = lhs;
1963 return GS_ALL_DONE;
1964 }
1965 else
1966 {
1967 *expr_p = t1;
1968 return GS_OK;
1969 }
1970 }
1971
1972 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1973
1974 static void
1975 maybe_with_size_expr (tree *expr_p)
1976 {
1977 tree expr = *expr_p;
1978 tree type = TREE_TYPE (expr);
1979 tree size;
1980
1981 /* If we've already wrapped this or the type is error_mark_node, we can't do
1982 anything. */
1983 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1984 || type == error_mark_node)
1985 return;
1986
1987 /* If the size isn't known or is a constant, we have nothing to do. */
1988 size = TYPE_SIZE_UNIT (type);
1989 if (!size || TREE_CODE (size) == INTEGER_CST)
1990 return;
1991
1992 /* Otherwise, make a WITH_SIZE_EXPR. */
1993 size = unshare_expr (size);
1994 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1995 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1996 }
1997
1998 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1999
2000 static enum gimplify_status
2001 gimplify_arg (tree *expr_p, tree *pre_p)
2002 {
2003 bool (*test) (tree);
2004 fallback_t fb;
2005
2006 /* In general, we allow lvalues for function arguments to avoid
2007 extra overhead of copying large aggregates out of even larger
2008 aggregates into temporaries only to copy the temporaries to
2009 the argument list. Make optimizers happy by pulling out to
2010 temporaries those types that fit in registers. */
2011 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
2012 test = is_gimple_val, fb = fb_rvalue;
2013 else
2014 test = is_gimple_lvalue, fb = fb_either;
2015
2016 /* If this is a variable sized type, we must remember the size. */
2017 maybe_with_size_expr (expr_p);
2018
2019 /* There is a sequence point before a function call. Side effects in
2020 the argument list must occur before the actual call. So, when
2021 gimplifying arguments, force gimplify_expr to use an internal
2022 post queue which is then appended to the end of PRE_P. */
2023 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2024 }
2025
2026 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2027 list where side effects that must happen before *EXPR_P should be stored.
2028 WANT_VALUE is true if the result of the call is desired. */
2029
2030 static enum gimplify_status
2031 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2032 {
2033 tree decl;
2034 tree arglist;
2035 enum gimplify_status ret;
2036
2037 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2038
2039 /* For reliable diagnostics during inlining, it is necessary that
2040 every call_expr be annotated with file and line. */
2041 if (! EXPR_HAS_LOCATION (*expr_p))
2042 SET_EXPR_LOCATION (*expr_p, input_location);
2043
2044 /* This may be a call to a builtin function.
2045
2046 Builtin function calls may be transformed into different
2047 (and more efficient) builtin function calls under certain
2048 circumstances. Unfortunately, gimplification can muck things
2049 up enough that the builtin expanders are not aware that certain
2050 transformations are still valid.
2051
2052 So we attempt transformation/gimplification of the call before
2053 we gimplify the CALL_EXPR. At this time we do not manage to
2054 transform all calls in the same manner as the expanders do, but
2055 we do transform most of them. */
2056 decl = get_callee_fndecl (*expr_p);
2057 if (decl && DECL_BUILT_IN (decl))
2058 {
2059 tree arglist = TREE_OPERAND (*expr_p, 1);
2060 tree new = fold_builtin (decl, arglist, !want_value);
2061
2062 if (new && new != *expr_p)
2063 {
2064 /* There was a transformation of this call which computes the
2065 same value, but in a more efficient way. Return and try
2066 again. */
2067 *expr_p = new;
2068 return GS_OK;
2069 }
2070
2071 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2072 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2073 {
2074 if (!arglist || !TREE_CHAIN (arglist))
2075 {
2076 error ("too few arguments to function %<va_start%>");
2077 *expr_p = build_empty_stmt ();
2078 return GS_OK;
2079 }
2080
2081 if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
2082 {
2083 *expr_p = build_empty_stmt ();
2084 return GS_OK;
2085 }
2086 /* Avoid gimplifying the second argument to va_start, which needs
2087 to be the plain PARM_DECL. */
2088 return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
2089 }
2090 }
2091
2092 /* There is a sequence point before the call, so any side effects in
2093 the calling expression must occur before the actual call. Force
2094 gimplify_expr to use an internal post queue. */
2095 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
2096 is_gimple_call_addr, fb_rvalue);
2097
2098 if (PUSH_ARGS_REVERSED)
2099 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2100 for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
2101 arglist = TREE_CHAIN (arglist))
2102 {
2103 enum gimplify_status t;
2104
2105 t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
2106
2107 if (t == GS_ERROR)
2108 ret = GS_ERROR;
2109 }
2110 if (PUSH_ARGS_REVERSED)
2111 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2112
2113 /* Try this again in case gimplification exposed something. */
2114 if (ret != GS_ERROR)
2115 {
2116 decl = get_callee_fndecl (*expr_p);
2117 if (decl && DECL_BUILT_IN (decl))
2118 {
2119 tree arglist = TREE_OPERAND (*expr_p, 1);
2120 tree new = fold_builtin (decl, arglist, !want_value);
2121
2122 if (new && new != *expr_p)
2123 {
2124 /* There was a transformation of this call which computes the
2125 same value, but in a more efficient way. Return and try
2126 again. */
2127 *expr_p = new;
2128 return GS_OK;
2129 }
2130 }
2131 }
2132
2133 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2134 decl. This allows us to eliminate redundant or useless
2135 calls to "const" functions. */
2136 if (TREE_CODE (*expr_p) == CALL_EXPR
2137 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2138 TREE_SIDE_EFFECTS (*expr_p) = 0;
2139
2140 return ret;
2141 }
2142
2143 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2144 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2145
2146 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2147 condition is true or false, respectively. If null, we should generate
2148 our own to skip over the evaluation of this specific expression.
2149
2150 This function is the tree equivalent of do_jump.
2151
2152 shortcut_cond_r should only be called by shortcut_cond_expr. */
2153
2154 static tree
2155 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2156 {
2157 tree local_label = NULL_TREE;
2158 tree t, expr = NULL;
2159
2160 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2161 retain the shortcut semantics. Just insert the gotos here;
2162 shortcut_cond_expr will append the real blocks later. */
2163 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2164 {
2165 /* Turn if (a && b) into
2166
2167 if (a); else goto no;
2168 if (b) goto yes; else goto no;
2169 (no:) */
2170
2171 if (false_label_p == NULL)
2172 false_label_p = &local_label;
2173
2174 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2175 append_to_statement_list (t, &expr);
2176
2177 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2178 false_label_p);
2179 append_to_statement_list (t, &expr);
2180 }
2181 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2182 {
2183 /* Turn if (a || b) into
2184
2185 if (a) goto yes;
2186 if (b) goto yes; else goto no;
2187 (yes:) */
2188
2189 if (true_label_p == NULL)
2190 true_label_p = &local_label;
2191
2192 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2193 append_to_statement_list (t, &expr);
2194
2195 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2196 false_label_p);
2197 append_to_statement_list (t, &expr);
2198 }
2199 else if (TREE_CODE (pred) == COND_EXPR)
2200 {
2201 /* As long as we're messing with gotos, turn if (a ? b : c) into
2202 if (a)
2203 if (b) goto yes; else goto no;
2204 else
2205 if (c) goto yes; else goto no; */
2206 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2207 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2208 false_label_p),
2209 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2210 false_label_p));
2211 }
2212 else
2213 {
2214 expr = build3 (COND_EXPR, void_type_node, pred,
2215 build_and_jump (true_label_p),
2216 build_and_jump (false_label_p));
2217 }
2218
2219 if (local_label)
2220 {
2221 t = build1 (LABEL_EXPR, void_type_node, local_label);
2222 append_to_statement_list (t, &expr);
2223 }
2224
2225 return expr;
2226 }
2227
2228 static tree
2229 shortcut_cond_expr (tree expr)
2230 {
2231 tree pred = TREE_OPERAND (expr, 0);
2232 tree then_ = TREE_OPERAND (expr, 1);
2233 tree else_ = TREE_OPERAND (expr, 2);
2234 tree true_label, false_label, end_label, t;
2235 tree *true_label_p;
2236 tree *false_label_p;
2237 bool emit_end, emit_false, jump_over_else;
2238 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2239 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2240
2241 /* First do simple transformations. */
2242 if (!else_se)
2243 {
2244 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2245 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2246 {
2247 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2248 then_ = shortcut_cond_expr (expr);
2249 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2250 pred = TREE_OPERAND (pred, 0);
2251 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2252 }
2253 }
2254 if (!then_se)
2255 {
2256 /* If there is no 'then', turn
2257 if (a || b); else d
2258 into
2259 if (a); else if (b); else d. */
2260 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2261 {
2262 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2263 else_ = shortcut_cond_expr (expr);
2264 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2265 pred = TREE_OPERAND (pred, 0);
2266 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2267 }
2268 }
2269
2270 /* If we're done, great. */
2271 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2272 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2273 return expr;
2274
2275 /* Otherwise we need to mess with gotos. Change
2276 if (a) c; else d;
2277 to
2278 if (a); else goto no;
2279 c; goto end;
2280 no: d; end:
2281 and recursively gimplify the condition. */
2282
2283 true_label = false_label = end_label = NULL_TREE;
2284
2285 /* If our arms just jump somewhere, hijack those labels so we don't
2286 generate jumps to jumps. */
2287
2288 if (then_
2289 && TREE_CODE (then_) == GOTO_EXPR
2290 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2291 {
2292 true_label = GOTO_DESTINATION (then_);
2293 then_ = NULL;
2294 then_se = false;
2295 }
2296
2297 if (else_
2298 && TREE_CODE (else_) == GOTO_EXPR
2299 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2300 {
2301 false_label = GOTO_DESTINATION (else_);
2302 else_ = NULL;
2303 else_se = false;
2304 }
2305
2306 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2307 if (true_label)
2308 true_label_p = &true_label;
2309 else
2310 true_label_p = NULL;
2311
2312 /* The 'else' branch also needs a label if it contains interesting code. */
2313 if (false_label || else_se)
2314 false_label_p = &false_label;
2315 else
2316 false_label_p = NULL;
2317
2318 /* If there was nothing else in our arms, just forward the label(s). */
2319 if (!then_se && !else_se)
2320 return shortcut_cond_r (pred, true_label_p, false_label_p);
2321
2322 /* If our last subexpression already has a terminal label, reuse it. */
2323 if (else_se)
2324 expr = expr_last (else_);
2325 else if (then_se)
2326 expr = expr_last (then_);
2327 else
2328 expr = NULL;
2329 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2330 end_label = LABEL_EXPR_LABEL (expr);
2331
2332 /* If we don't care about jumping to the 'else' branch, jump to the end
2333 if the condition is false. */
2334 if (!false_label_p)
2335 false_label_p = &end_label;
2336
2337 /* We only want to emit these labels if we aren't hijacking them. */
2338 emit_end = (end_label == NULL_TREE);
2339 emit_false = (false_label == NULL_TREE);
2340
2341 /* We only emit the jump over the else clause if we have to--if the
2342 then clause may fall through. Otherwise we can wind up with a
2343 useless jump and a useless label at the end of gimplified code,
2344 which will cause us to think that this conditional as a whole
2345 falls through even if it doesn't. If we then inline a function
2346 which ends with such a condition, that can cause us to issue an
2347 inappropriate warning about control reaching the end of a
2348 non-void function. */
2349 jump_over_else = block_may_fallthru (then_);
2350
2351 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2352
2353 expr = NULL;
2354 append_to_statement_list (pred, &expr);
2355
2356 append_to_statement_list (then_, &expr);
2357 if (else_se)
2358 {
2359 if (jump_over_else)
2360 {
2361 t = build_and_jump (&end_label);
2362 append_to_statement_list (t, &expr);
2363 }
2364 if (emit_false)
2365 {
2366 t = build1 (LABEL_EXPR, void_type_node, false_label);
2367 append_to_statement_list (t, &expr);
2368 }
2369 append_to_statement_list (else_, &expr);
2370 }
2371 if (emit_end && end_label)
2372 {
2373 t = build1 (LABEL_EXPR, void_type_node, end_label);
2374 append_to_statement_list (t, &expr);
2375 }
2376
2377 return expr;
2378 }
2379
2380 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2381
2382 tree
2383 gimple_boolify (tree expr)
2384 {
2385 tree type = TREE_TYPE (expr);
2386
2387 if (TREE_CODE (type) == BOOLEAN_TYPE)
2388 return expr;
2389
2390 switch (TREE_CODE (expr))
2391 {
2392 case TRUTH_AND_EXPR:
2393 case TRUTH_OR_EXPR:
2394 case TRUTH_XOR_EXPR:
2395 case TRUTH_ANDIF_EXPR:
2396 case TRUTH_ORIF_EXPR:
2397 /* Also boolify the arguments of truth exprs. */
2398 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2399 /* FALLTHRU */
2400
2401 case TRUTH_NOT_EXPR:
2402 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2403 /* FALLTHRU */
2404
2405 case EQ_EXPR: case NE_EXPR:
2406 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2407 /* These expressions always produce boolean results. */
2408 TREE_TYPE (expr) = boolean_type_node;
2409 return expr;
2410
2411 default:
2412 /* Other expressions that get here must have boolean values, but
2413 might need to be converted to the appropriate mode. */
2414 return fold_convert (boolean_type_node, expr);
2415 }
2416 }
2417
2418 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2419 into
2420
2421 if (p) if (p)
2422 t1 = a; a;
2423 else or else
2424 t1 = b; b;
2425 t1;
2426
2427 The second form is used when *EXPR_P is of type void.
2428
2429 TARGET is the tree for T1 above.
2430
2431 PRE_P points to the list where side effects that must happen before
2432 *EXPR_P should be stored. */
2433
2434 static enum gimplify_status
2435 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2436 {
2437 tree expr = *expr_p;
2438 tree tmp, tmp2, type;
2439 enum gimplify_status ret;
2440
2441 type = TREE_TYPE (expr);
2442
2443 /* If this COND_EXPR has a value, copy the values into a temporary within
2444 the arms. */
2445 if (! VOID_TYPE_P (type))
2446 {
2447 tree result;
2448
2449 if ((fallback & fb_lvalue) == 0)
2450 {
2451 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2452 ret = GS_ALL_DONE;
2453 }
2454 else
2455 {
2456 tree type = build_pointer_type (TREE_TYPE (expr));
2457
2458 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2459 TREE_OPERAND (expr, 1) =
2460 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2461
2462 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2463 TREE_OPERAND (expr, 2) =
2464 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2465
2466 tmp2 = tmp = create_tmp_var (type, "iftmp");
2467
2468 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2469 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2470
2471 result = build_fold_indirect_ref (tmp);
2472 ret = GS_ALL_DONE;
2473 }
2474
2475 /* Build the then clause, 't1 = a;'. But don't build an assignment
2476 if this branch is void; in C++ it can be, if it's a throw. */
2477 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2478 TREE_OPERAND (expr, 1)
2479 = build2 (GIMPLE_MODIFY_STMT, void_type_node, tmp,
2480 TREE_OPERAND (expr, 1));
2481
2482 /* Build the else clause, 't1 = b;'. */
2483 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2484 TREE_OPERAND (expr, 2)
2485 = build2 (GIMPLE_MODIFY_STMT, void_type_node, tmp2,
2486 TREE_OPERAND (expr, 2));
2487
2488 TREE_TYPE (expr) = void_type_node;
2489 recalculate_side_effects (expr);
2490
2491 /* Move the COND_EXPR to the prequeue. */
2492 gimplify_and_add (expr, pre_p);
2493
2494 *expr_p = result;
2495 return ret;
2496 }
2497
2498 /* Make sure the condition has BOOLEAN_TYPE. */
2499 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2500
2501 /* Break apart && and || conditions. */
2502 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2503 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2504 {
2505 expr = shortcut_cond_expr (expr);
2506
2507 if (expr != *expr_p)
2508 {
2509 *expr_p = expr;
2510
2511 /* We can't rely on gimplify_expr to re-gimplify the expanded
2512 form properly, as cleanups might cause the target labels to be
2513 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2514 set up a conditional context. */
2515 gimple_push_condition ();
2516 gimplify_stmt (expr_p);
2517 gimple_pop_condition (pre_p);
2518
2519 return GS_ALL_DONE;
2520 }
2521 }
2522
2523 /* Now do the normal gimplification. */
2524 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2525 is_gimple_condexpr, fb_rvalue);
2526
2527 gimple_push_condition ();
2528
2529 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2530 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2531 recalculate_side_effects (expr);
2532
2533 gimple_pop_condition (pre_p);
2534
2535 if (ret == GS_ERROR)
2536 ;
2537 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2538 ret = GS_ALL_DONE;
2539 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2540 /* Rewrite "if (a); else b" to "if (!a) b" */
2541 {
2542 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2543 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2544 is_gimple_condexpr, fb_rvalue);
2545
2546 tmp = TREE_OPERAND (expr, 1);
2547 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2548 TREE_OPERAND (expr, 2) = tmp;
2549 }
2550 else
2551 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2552 expr = TREE_OPERAND (expr, 0);
2553
2554 *expr_p = expr;
2555 return ret;
2556 }
2557
2558 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2559 a call to __builtin_memcpy. */
2560
2561 static enum gimplify_status
2562 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2563 {
2564 tree args, t, to, to_ptr, from;
2565
2566 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2567 from = GENERIC_TREE_OPERAND (*expr_p, 1);
2568
2569 args = tree_cons (NULL, size, NULL);
2570
2571 t = build_fold_addr_expr (from);
2572 args = tree_cons (NULL, t, args);
2573
2574 to_ptr = build_fold_addr_expr (to);
2575 args = tree_cons (NULL, to_ptr, args);
2576 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2577 t = build_function_call_expr (t, args);
2578
2579 if (want_value)
2580 {
2581 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2582 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2583 }
2584
2585 *expr_p = t;
2586 return GS_OK;
2587 }
2588
2589 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2590 a call to __builtin_memset. In this case we know that the RHS is
2591 a CONSTRUCTOR with an empty element list. */
2592
2593 static enum gimplify_status
2594 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2595 {
2596 tree args, t, to, to_ptr;
2597
2598 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2599
2600 args = tree_cons (NULL, size, NULL);
2601
2602 args = tree_cons (NULL, integer_zero_node, args);
2603
2604 to_ptr = build_fold_addr_expr (to);
2605 args = tree_cons (NULL, to_ptr, args);
2606 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2607 t = build_function_call_expr (t, args);
2608
2609 if (want_value)
2610 {
2611 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2612 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2613 }
2614
2615 *expr_p = t;
2616 return GS_OK;
2617 }
2618
2619 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2620 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2621 assignment. Returns non-null if we detect a potential overlap. */
2622
2623 struct gimplify_init_ctor_preeval_data
2624 {
2625 /* The base decl of the lhs object. May be NULL, in which case we
2626 have to assume the lhs is indirect. */
2627 tree lhs_base_decl;
2628
2629 /* The alias set of the lhs object. */
2630 int lhs_alias_set;
2631 };
2632
2633 static tree
2634 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2635 {
2636 struct gimplify_init_ctor_preeval_data *data
2637 = (struct gimplify_init_ctor_preeval_data *) xdata;
2638 tree t = *tp;
2639
2640 /* If we find the base object, obviously we have overlap. */
2641 if (data->lhs_base_decl == t)
2642 return t;
2643
2644 /* If the constructor component is indirect, determine if we have a
2645 potential overlap with the lhs. The only bits of information we
2646 have to go on at this point are addressability and alias sets. */
2647 if (TREE_CODE (t) == INDIRECT_REF
2648 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2649 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2650 return t;
2651
2652 if (IS_TYPE_OR_DECL_P (t))
2653 *walk_subtrees = 0;
2654 return NULL;
2655 }
2656
2657 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2658 force values that overlap with the lhs (as described by *DATA)
2659 into temporaries. */
2660
2661 static void
2662 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2663 struct gimplify_init_ctor_preeval_data *data)
2664 {
2665 enum gimplify_status one;
2666
2667 /* If the value is invariant, then there's nothing to pre-evaluate.
2668 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2669 invariant but has side effects and might contain a reference to
2670 the object we're initializing. */
2671 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2672 return;
2673
2674 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2675 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2676 return;
2677
2678 /* Recurse for nested constructors. */
2679 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2680 {
2681 unsigned HOST_WIDE_INT ix;
2682 constructor_elt *ce;
2683 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2684
2685 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2686 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2687 return;
2688 }
2689
2690 /* If this is a variable sized type, we must remember the size. */
2691 maybe_with_size_expr (expr_p);
2692
2693 /* Gimplify the constructor element to something appropriate for the rhs
2694 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2695 the gimplifier will consider this a store to memory. Doing this
2696 gimplification now means that we won't have to deal with complicated
2697 language-specific trees, nor trees like SAVE_EXPR that can induce
2698 exponential search behavior. */
2699 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2700 if (one == GS_ERROR)
2701 {
2702 *expr_p = NULL;
2703 return;
2704 }
2705
2706 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2707 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2708 always be true for all scalars, since is_gimple_mem_rhs insists on a
2709 temporary variable for them. */
2710 if (DECL_P (*expr_p))
2711 return;
2712
2713 /* If this is of variable size, we have no choice but to assume it doesn't
2714 overlap since we can't make a temporary for it. */
2715 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
2716 return;
2717
2718 /* Otherwise, we must search for overlap ... */
2719 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2720 return;
2721
2722 /* ... and if found, force the value into a temporary. */
2723 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2724 }
2725
2726 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2727 a RANGE_EXPR in a CONSTRUCTOR for an array.
2728
2729 var = lower;
2730 loop_entry:
2731 object[var] = value;
2732 if (var == upper)
2733 goto loop_exit;
2734 var = var + 1;
2735 goto loop_entry;
2736 loop_exit:
2737
2738 We increment var _after_ the loop exit check because we might otherwise
2739 fail if upper == TYPE_MAX_VALUE (type for upper).
2740
2741 Note that we never have to deal with SAVE_EXPRs here, because this has
2742 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2743
2744 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2745 tree *, bool);
2746
2747 static void
2748 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2749 tree value, tree array_elt_type,
2750 tree *pre_p, bool cleared)
2751 {
2752 tree loop_entry_label, loop_exit_label;
2753 tree var, var_type, cref;
2754
2755 loop_entry_label = create_artificial_label ();
2756 loop_exit_label = create_artificial_label ();
2757
2758 /* Create and initialize the index variable. */
2759 var_type = TREE_TYPE (upper);
2760 var = create_tmp_var (var_type, NULL);
2761 append_to_statement_list (build2 (GIMPLE_MODIFY_STMT, var_type, var, lower),
2762 pre_p);
2763
2764 /* Add the loop entry label. */
2765 append_to_statement_list (build1 (LABEL_EXPR,
2766 void_type_node,
2767 loop_entry_label),
2768 pre_p);
2769
2770 /* Build the reference. */
2771 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2772 var, NULL_TREE, NULL_TREE);
2773
2774 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2775 the store. Otherwise just assign value to the reference. */
2776
2777 if (TREE_CODE (value) == CONSTRUCTOR)
2778 /* NB we might have to call ourself recursively through
2779 gimplify_init_ctor_eval if the value is a constructor. */
2780 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2781 pre_p, cleared);
2782 else
2783 append_to_statement_list (build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (cref),
2784 cref, value),
2785 pre_p);
2786
2787 /* We exit the loop when the index var is equal to the upper bound. */
2788 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2789 build2 (EQ_EXPR, boolean_type_node,
2790 var, upper),
2791 build1 (GOTO_EXPR,
2792 void_type_node,
2793 loop_exit_label),
2794 NULL_TREE),
2795 pre_p);
2796
2797 /* Otherwise, increment the index var... */
2798 append_to_statement_list (build2 (GIMPLE_MODIFY_STMT, var_type, var,
2799 build2 (PLUS_EXPR, var_type, var,
2800 fold_convert (var_type,
2801 integer_one_node))),
2802 pre_p);
2803
2804 /* ...and jump back to the loop entry. */
2805 append_to_statement_list (build1 (GOTO_EXPR,
2806 void_type_node,
2807 loop_entry_label),
2808 pre_p);
2809
2810 /* Add the loop exit label. */
2811 append_to_statement_list (build1 (LABEL_EXPR,
2812 void_type_node,
2813 loop_exit_label),
2814 pre_p);
2815 }
2816
2817 /* Return true if FDECL is accessing a field that is zero sized. */
2818
2819 static bool
2820 zero_sized_field_decl (tree fdecl)
2821 {
2822 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2823 && integer_zerop (DECL_SIZE (fdecl)))
2824 return true;
2825 return false;
2826 }
2827
2828 /* Return true if TYPE is zero sized. */
2829
2830 static bool
2831 zero_sized_type (tree type)
2832 {
2833 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2834 && integer_zerop (TYPE_SIZE (type)))
2835 return true;
2836 return false;
2837 }
2838
2839 /* A subroutine of gimplify_init_constructor. Generate individual
2840 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2841 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2842 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2843 zeroed first. */
2844
2845 static void
2846 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2847 tree *pre_p, bool cleared)
2848 {
2849 tree array_elt_type = NULL;
2850 unsigned HOST_WIDE_INT ix;
2851 tree purpose, value;
2852
2853 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2854 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2855
2856 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2857 {
2858 tree cref, init;
2859
2860 /* NULL values are created above for gimplification errors. */
2861 if (value == NULL)
2862 continue;
2863
2864 if (cleared && initializer_zerop (value))
2865 continue;
2866
2867 /* ??? Here's to hoping the front end fills in all of the indices,
2868 so we don't have to figure out what's missing ourselves. */
2869 gcc_assert (purpose);
2870
2871 /* Skip zero-sized fields, unless value has side-effects. This can
2872 happen with calls to functions returning a zero-sized type, which
2873 we shouldn't discard. As a number of downstream passes don't
2874 expect sets of zero-sized fields, we rely on the gimplification of
2875 the MODIFY_EXPR we make below to drop the assignment statement. */
2876 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2877 continue;
2878
2879 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2880 whole range. */
2881 if (TREE_CODE (purpose) == RANGE_EXPR)
2882 {
2883 tree lower = TREE_OPERAND (purpose, 0);
2884 tree upper = TREE_OPERAND (purpose, 1);
2885
2886 /* If the lower bound is equal to upper, just treat it as if
2887 upper was the index. */
2888 if (simple_cst_equal (lower, upper))
2889 purpose = upper;
2890 else
2891 {
2892 gimplify_init_ctor_eval_range (object, lower, upper, value,
2893 array_elt_type, pre_p, cleared);
2894 continue;
2895 }
2896 }
2897
2898 if (array_elt_type)
2899 {
2900 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2901 purpose, NULL_TREE, NULL_TREE);
2902 }
2903 else
2904 {
2905 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2906 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2907 unshare_expr (object), purpose, NULL_TREE);
2908 }
2909
2910 if (TREE_CODE (value) == CONSTRUCTOR
2911 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2912 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2913 pre_p, cleared);
2914 else
2915 {
2916 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2917 gimplify_and_add (init, pre_p);
2918 }
2919 }
2920 }
2921
2922 /* A subroutine of gimplify_modify_expr. Break out elements of a
2923 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2924
2925 Note that we still need to clear any elements that don't have explicit
2926 initializers, so if not all elements are initialized we keep the
2927 original MODIFY_EXPR, we just remove all of the constructor elements. */
2928
2929 static enum gimplify_status
2930 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2931 tree *post_p, bool want_value)
2932 {
2933 tree object;
2934 tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
2935 tree type = TREE_TYPE (ctor);
2936 enum gimplify_status ret;
2937 VEC(constructor_elt,gc) *elts;
2938
2939 if (TREE_CODE (ctor) != CONSTRUCTOR)
2940 return GS_UNHANDLED;
2941
2942 ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2943 is_gimple_lvalue, fb_lvalue);
2944 if (ret == GS_ERROR)
2945 return ret;
2946 object = GENERIC_TREE_OPERAND (*expr_p, 0);
2947
2948 elts = CONSTRUCTOR_ELTS (ctor);
2949
2950 ret = GS_ALL_DONE;
2951 switch (TREE_CODE (type))
2952 {
2953 case RECORD_TYPE:
2954 case UNION_TYPE:
2955 case QUAL_UNION_TYPE:
2956 case ARRAY_TYPE:
2957 {
2958 struct gimplify_init_ctor_preeval_data preeval_data;
2959 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2960 HOST_WIDE_INT num_nonzero_elements;
2961 bool cleared, valid_const_initializer;
2962
2963 /* Aggregate types must lower constructors to initialization of
2964 individual elements. The exception is that a CONSTRUCTOR node
2965 with no elements indicates zero-initialization of the whole. */
2966 if (VEC_empty (constructor_elt, elts))
2967 break;
2968
2969 /* Fetch information about the constructor to direct later processing.
2970 We might want to make static versions of it in various cases, and
2971 can only do so if it known to be a valid constant initializer. */
2972 valid_const_initializer
2973 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2974 &num_ctor_elements, &cleared);
2975
2976 /* If a const aggregate variable is being initialized, then it
2977 should never be a lose to promote the variable to be static. */
2978 if (valid_const_initializer
2979 && num_nonzero_elements > 1
2980 && TREE_READONLY (object)
2981 && TREE_CODE (object) == VAR_DECL)
2982 {
2983 DECL_INITIAL (object) = ctor;
2984 TREE_STATIC (object) = 1;
2985 if (!DECL_NAME (object))
2986 DECL_NAME (object) = create_tmp_var_name ("C");
2987 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2988
2989 /* ??? C++ doesn't automatically append a .<number> to the
2990 assembler name, and even when it does, it looks a FE private
2991 data structures to figure out what that number should be,
2992 which are not set for this variable. I suppose this is
2993 important for local statics for inline functions, which aren't
2994 "local" in the object file sense. So in order to get a unique
2995 TU-local symbol, we must invoke the lhd version now. */
2996 lhd_set_decl_assembler_name (object);
2997
2998 *expr_p = NULL_TREE;
2999 break;
3000 }
3001
3002 /* If there are "lots" of initialized elements, even discounting
3003 those that are not address constants (and thus *must* be
3004 computed at runtime), then partition the constructor into
3005 constant and non-constant parts. Block copy the constant
3006 parts in, then generate code for the non-constant parts. */
3007 /* TODO. There's code in cp/typeck.c to do this. */
3008
3009 num_type_elements = count_type_elements (type, true);
3010
3011 /* If count_type_elements could not determine number of type elements
3012 for a constant-sized object, assume clearing is needed.
3013 Don't do this for variable-sized objects, as store_constructor
3014 will ignore the clearing of variable-sized objects. */
3015 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3016 cleared = true;
3017 /* If there are "lots" of zeros, then block clear the object first. */
3018 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3019 && num_nonzero_elements < num_type_elements/4)
3020 cleared = true;
3021 /* ??? This bit ought not be needed. For any element not present
3022 in the initializer, we should simply set them to zero. Except
3023 we'd need to *find* the elements that are not present, and that
3024 requires trickery to avoid quadratic compile-time behavior in
3025 large cases or excessive memory use in small cases. */
3026 else if (num_ctor_elements < num_type_elements)
3027 cleared = true;
3028
3029 /* If there are "lots" of initialized elements, and all of them
3030 are valid address constants, then the entire initializer can
3031 be dropped to memory, and then memcpy'd out. Don't do this
3032 for sparse arrays, though, as it's more efficient to follow
3033 the standard CONSTRUCTOR behavior of memset followed by
3034 individual element initialization. */
3035 if (valid_const_initializer && !cleared)
3036 {
3037 HOST_WIDE_INT size = int_size_in_bytes (type);
3038 unsigned int align;
3039
3040 /* ??? We can still get unbounded array types, at least
3041 from the C++ front end. This seems wrong, but attempt
3042 to work around it for now. */
3043 if (size < 0)
3044 {
3045 size = int_size_in_bytes (TREE_TYPE (object));
3046 if (size >= 0)
3047 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3048 }
3049
3050 /* Find the maximum alignment we can assume for the object. */
3051 /* ??? Make use of DECL_OFFSET_ALIGN. */
3052 if (DECL_P (object))
3053 align = DECL_ALIGN (object);
3054 else
3055 align = TYPE_ALIGN (type);
3056
3057 if (size > 0 && !can_move_by_pieces (size, align))
3058 {
3059 tree new = create_tmp_var_raw (type, "C");
3060
3061 gimple_add_tmp_var (new);
3062 TREE_STATIC (new) = 1;
3063 TREE_READONLY (new) = 1;
3064 DECL_INITIAL (new) = ctor;
3065 if (align > DECL_ALIGN (new))
3066 {
3067 DECL_ALIGN (new) = align;
3068 DECL_USER_ALIGN (new) = 1;
3069 }
3070 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3071
3072 GENERIC_TREE_OPERAND (*expr_p, 1) = new;
3073
3074 /* This is no longer an assignment of a CONSTRUCTOR, but
3075 we still may have processing to do on the LHS. So
3076 pretend we didn't do anything here to let that happen. */
3077 return GS_UNHANDLED;
3078 }
3079 }
3080
3081 /* If there are nonzero elements, pre-evaluate to capture elements
3082 overlapping with the lhs into temporaries. We must do this before
3083 clearing to fetch the values before they are zeroed-out. */
3084 if (num_nonzero_elements > 0)
3085 {
3086 preeval_data.lhs_base_decl = get_base_address (object);
3087 if (!DECL_P (preeval_data.lhs_base_decl))
3088 preeval_data.lhs_base_decl = NULL;
3089 preeval_data.lhs_alias_set = get_alias_set (object);
3090
3091 gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
3092 pre_p, post_p, &preeval_data);
3093 }
3094
3095 if (cleared)
3096 {
3097 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3098 Note that we still have to gimplify, in order to handle the
3099 case of variable sized types. Avoid shared tree structures. */
3100 CONSTRUCTOR_ELTS (ctor) = NULL;
3101 object = unshare_expr (object);
3102 gimplify_stmt (expr_p);
3103 append_to_statement_list (*expr_p, pre_p);
3104 }
3105
3106 /* If we have not block cleared the object, or if there are nonzero
3107 elements in the constructor, add assignments to the individual
3108 scalar fields of the object. */
3109 if (!cleared || num_nonzero_elements > 0)
3110 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3111
3112 *expr_p = NULL_TREE;
3113 }
3114 break;
3115
3116 case COMPLEX_TYPE:
3117 {
3118 tree r, i;
3119
3120 /* Extract the real and imaginary parts out of the ctor. */
3121 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3122 r = VEC_index (constructor_elt, elts, 0)->value;
3123 i = VEC_index (constructor_elt, elts, 1)->value;
3124 if (r == NULL || i == NULL)
3125 {
3126 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3127 if (r == NULL)
3128 r = zero;
3129 if (i == NULL)
3130 i = zero;
3131 }
3132
3133 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3134 represent creation of a complex value. */
3135 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3136 {
3137 ctor = build_complex (type, r, i);
3138 TREE_OPERAND (*expr_p, 1) = ctor;
3139 }
3140 else
3141 {
3142 ctor = build2 (COMPLEX_EXPR, type, r, i);
3143 TREE_OPERAND (*expr_p, 1) = ctor;
3144 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3145 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3146 fb_rvalue);
3147 }
3148 }
3149 break;
3150
3151 case VECTOR_TYPE:
3152 {
3153 unsigned HOST_WIDE_INT ix;
3154 constructor_elt *ce;
3155
3156 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3157 if (TREE_CONSTANT (ctor))
3158 {
3159 bool constant_p = true;
3160 tree value;
3161
3162 /* Even when ctor is constant, it might contain non-*_CST
3163 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3164 belong into VECTOR_CST nodes. */
3165 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3166 if (!CONSTANT_CLASS_P (value))
3167 {
3168 constant_p = false;
3169 break;
3170 }
3171
3172 if (constant_p)
3173 {
3174 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3175 break;
3176 }
3177
3178 /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
3179 make a VECTOR_CST. It won't do anything for us, and it'll
3180 prevent us from representing it as a single constant. */
3181 break;
3182 }
3183
3184 /* Vector types use CONSTRUCTOR all the way through gimple
3185 compilation as a general initializer. */
3186 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3187 {
3188 enum gimplify_status tret;
3189 tret = gimplify_expr (&ce->value, pre_p, post_p,
3190 is_gimple_val, fb_rvalue);
3191 if (tret == GS_ERROR)
3192 ret = GS_ERROR;
3193 }
3194 if (!is_gimple_reg (GENERIC_TREE_OPERAND (*expr_p, 0)))
3195 GENERIC_TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3196 }
3197 break;
3198
3199 default:
3200 /* So how did we get a CONSTRUCTOR for a scalar type? */
3201 gcc_unreachable ();
3202 }
3203
3204 if (ret == GS_ERROR)
3205 return GS_ERROR;
3206 else if (want_value)
3207 {
3208 append_to_statement_list (*expr_p, pre_p);
3209 *expr_p = object;
3210 return GS_OK;
3211 }
3212 else
3213 return GS_ALL_DONE;
3214 }
3215
3216 /* Given a pointer value OP0, return a simplified version of an
3217 indirection through OP0, or NULL_TREE if no simplification is
3218 possible. This may only be applied to a rhs of an expression.
3219 Note that the resulting type may be different from the type pointed
3220 to in the sense that it is still compatible from the langhooks
3221 point of view. */
3222
3223 static tree
3224 fold_indirect_ref_rhs (tree t)
3225 {
3226 tree type = TREE_TYPE (TREE_TYPE (t));
3227 tree sub = t;
3228 tree subtype;
3229
3230 STRIP_USELESS_TYPE_CONVERSION (sub);
3231 subtype = TREE_TYPE (sub);
3232 if (!POINTER_TYPE_P (subtype))
3233 return NULL_TREE;
3234
3235 if (TREE_CODE (sub) == ADDR_EXPR)
3236 {
3237 tree op = TREE_OPERAND (sub, 0);
3238 tree optype = TREE_TYPE (op);
3239 /* *&p => p */
3240 if (lang_hooks.types_compatible_p (type, optype))
3241 return op;
3242 /* *(foo *)&fooarray => fooarray[0] */
3243 else if (TREE_CODE (optype) == ARRAY_TYPE
3244 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3245 {
3246 tree type_domain = TYPE_DOMAIN (optype);
3247 tree min_val = size_zero_node;
3248 if (type_domain && TYPE_MIN_VALUE (type_domain))
3249 min_val = TYPE_MIN_VALUE (type_domain);
3250 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3251 }
3252 }
3253
3254 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3255 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3256 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3257 {
3258 tree type_domain;
3259 tree min_val = size_zero_node;
3260 tree osub = sub;
3261 sub = fold_indirect_ref_rhs (sub);
3262 if (! sub)
3263 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3264 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3265 if (type_domain && TYPE_MIN_VALUE (type_domain))
3266 min_val = TYPE_MIN_VALUE (type_domain);
3267 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3268 }
3269
3270 return NULL_TREE;
3271 }
3272
3273 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3274 based on the code of the RHS. We loop for as long as something changes. */
3275
3276 static enum gimplify_status
3277 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3278 tree *post_p, bool want_value)
3279 {
3280 enum gimplify_status ret = GS_OK;
3281
3282 while (ret != GS_UNHANDLED)
3283 switch (TREE_CODE (*from_p))
3284 {
3285 case INDIRECT_REF:
3286 {
3287 /* If we have code like
3288
3289 *(const A*)(A*)&x
3290
3291 where the type of "x" is a (possibly cv-qualified variant
3292 of "A"), treat the entire expression as identical to "x".
3293 This kind of code arises in C++ when an object is bound
3294 to a const reference, and if "x" is a TARGET_EXPR we want
3295 to take advantage of the optimization below. */
3296 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3297 if (t)
3298 {
3299 *from_p = t;
3300 ret = GS_OK;
3301 }
3302 else
3303 ret = GS_UNHANDLED;
3304 break;
3305 }
3306
3307 case TARGET_EXPR:
3308 {
3309 /* If we are initializing something from a TARGET_EXPR, strip the
3310 TARGET_EXPR and initialize it directly, if possible. This can't
3311 be done if the initializer is void, since that implies that the
3312 temporary is set in some non-trivial way.
3313
3314 ??? What about code that pulls out the temp and uses it
3315 elsewhere? I think that such code never uses the TARGET_EXPR as
3316 an initializer. If I'm wrong, we'll die because the temp won't
3317 have any RTL. In that case, I guess we'll need to replace
3318 references somehow. */
3319 tree init = TARGET_EXPR_INITIAL (*from_p);
3320
3321 if (!VOID_TYPE_P (TREE_TYPE (init)))
3322 {
3323 *from_p = init;
3324 ret = GS_OK;
3325 }
3326 else
3327 ret = GS_UNHANDLED;
3328 }
3329 break;
3330
3331 case COMPOUND_EXPR:
3332 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3333 caught. */
3334 gimplify_compound_expr (from_p, pre_p, true);
3335 ret = GS_OK;
3336 break;
3337
3338 case CONSTRUCTOR:
3339 /* If we're initializing from a CONSTRUCTOR, break this into
3340 individual MODIFY_EXPRs. */
3341 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3342
3343 case COND_EXPR:
3344 /* If we're assigning to a non-register type, push the assignment
3345 down into the branches. This is mandatory for ADDRESSABLE types,
3346 since we cannot generate temporaries for such, but it saves a
3347 copy in other cases as well. */
3348 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3349 {
3350 /* This code should mirror the code in gimplify_cond_expr. */
3351 enum tree_code code = TREE_CODE (*expr_p);
3352 tree cond = *from_p;
3353 tree result = *to_p;
3354
3355 ret = gimplify_expr (&result, pre_p, post_p,
3356 is_gimple_min_lval, fb_lvalue);
3357 if (ret != GS_ERROR)
3358 ret = GS_OK;
3359
3360 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3361 TREE_OPERAND (cond, 1)
3362 = build2 (code, void_type_node, result,
3363 TREE_OPERAND (cond, 1));
3364 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3365 TREE_OPERAND (cond, 2)
3366 = build2 (code, void_type_node, unshare_expr (result),
3367 TREE_OPERAND (cond, 2));
3368
3369 TREE_TYPE (cond) = void_type_node;
3370 recalculate_side_effects (cond);
3371
3372 if (want_value)
3373 {
3374 gimplify_and_add (cond, pre_p);
3375 *expr_p = unshare_expr (result);
3376 }
3377 else
3378 *expr_p = cond;
3379 return ret;
3380 }
3381 else
3382 ret = GS_UNHANDLED;
3383 break;
3384
3385 case CALL_EXPR:
3386 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3387 return slot so that we don't generate a temporary. */
3388 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3389 && aggregate_value_p (*from_p, *from_p))
3390 {
3391 bool use_target;
3392
3393 if (!(rhs_predicate_for (*to_p))(*from_p))
3394 /* If we need a temporary, *to_p isn't accurate. */
3395 use_target = false;
3396 else if (TREE_CODE (*to_p) == RESULT_DECL
3397 && DECL_NAME (*to_p) == NULL_TREE
3398 && needs_to_live_in_memory (*to_p))
3399 /* It's OK to use the return slot directly unless it's an NRV. */
3400 use_target = true;
3401 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3402 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3403 /* Don't force regs into memory. */
3404 use_target = false;
3405 else if (TREE_CODE (*to_p) == VAR_DECL
3406 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3407 /* Don't use the original target if it's a formal temp; we
3408 don't want to take their addresses. */
3409 use_target = false;
3410 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3411 /* It's OK to use the target directly if it's being
3412 initialized. */
3413 use_target = true;
3414 else if (!is_gimple_non_addressable (*to_p))
3415 /* Don't use the original target if it's already addressable;
3416 if its address escapes, and the called function uses the
3417 NRV optimization, a conforming program could see *to_p
3418 change before the called function returns; see c++/19317.
3419 When optimizing, the return_slot pass marks more functions
3420 as safe after we have escape info. */
3421 use_target = false;
3422 else
3423 use_target = true;
3424
3425 if (use_target)
3426 {
3427 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3428 lang_hooks.mark_addressable (*to_p);
3429 }
3430 }
3431
3432 ret = GS_UNHANDLED;
3433 break;
3434
3435 /* If we're initializing from a container, push the initialization
3436 inside it. */
3437 case CLEANUP_POINT_EXPR:
3438 case BIND_EXPR:
3439 case STATEMENT_LIST:
3440 {
3441 tree wrap = *from_p;
3442 tree t;
3443
3444 ret = gimplify_expr (to_p, pre_p, post_p,
3445 is_gimple_min_lval, fb_lvalue);
3446 if (ret != GS_ERROR)
3447 ret = GS_OK;
3448
3449 t = voidify_wrapper_expr (wrap, *expr_p);
3450 gcc_assert (t == *expr_p);
3451
3452 if (want_value)
3453 {
3454 gimplify_and_add (wrap, pre_p);
3455 *expr_p = unshare_expr (*to_p);
3456 }
3457 else
3458 *expr_p = wrap;
3459 return GS_OK;
3460 }
3461
3462 default:
3463 ret = GS_UNHANDLED;
3464 break;
3465 }
3466
3467 return ret;
3468 }
3469
3470 /* Destructively convert the TREE pointer in TP into a gimple tuple if
3471 appropriate. */
3472
3473 static void
3474 tree_to_gimple_tuple (tree *tp)
3475 {
3476
3477 switch (TREE_CODE (*tp))
3478 {
3479 case GIMPLE_MODIFY_STMT:
3480 return;
3481 case MODIFY_EXPR:
3482 {
3483 struct gimple_stmt *gs;
3484 tree lhs = TREE_OPERAND (*tp, 0);
3485 bool def_stmt_self_p = false;
3486
3487 if (TREE_CODE (lhs) == SSA_NAME)
3488 {
3489 if (SSA_NAME_DEF_STMT (lhs) == *tp)
3490 def_stmt_self_p = true;
3491 }
3492
3493 gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
3494 gs->base = (*tp)->base;
3495 /* The set to base above overwrites the CODE. */
3496 TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
3497
3498 gs->locus = EXPR_LOCUS (*tp);
3499 gs->operands[0] = TREE_OPERAND (*tp, 0);
3500 gs->operands[1] = TREE_OPERAND (*tp, 1);
3501 gs->block = TREE_BLOCK (*tp);
3502 *tp = (tree)gs;
3503
3504 /* If we re-gimplify a set to an SSA_NAME, we must change the
3505 SSA name's DEF_STMT link. */
3506 if (def_stmt_self_p)
3507 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
3508
3509 return;
3510 }
3511 default:
3512 break;
3513 }
3514 }
3515
3516 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3517 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3518 DECL_GIMPLE_REG_P set. */
3519
3520 static enum gimplify_status
3521 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3522 {
3523 enum tree_code code, ocode;
3524 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3525
3526 lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
3527 rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
3528 code = TREE_CODE (lhs);
3529 lhs = TREE_OPERAND (lhs, 0);
3530
3531 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3532 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3533 other = get_formal_tmp_var (other, pre_p);
3534
3535 realpart = code == REALPART_EXPR ? rhs : other;
3536 imagpart = code == REALPART_EXPR ? other : rhs;
3537
3538 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3539 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3540 else
3541 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3542
3543 GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
3544 GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
3545
3546 if (want_value)
3547 {
3548 tree_to_gimple_tuple (expr_p);
3549
3550 append_to_statement_list (*expr_p, pre_p);
3551 *expr_p = rhs;
3552 }
3553
3554 return GS_ALL_DONE;
3555 }
3556
3557 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3558
3559 modify_expr
3560 : varname '=' rhs
3561 | '*' ID '=' rhs
3562
3563 PRE_P points to the list where side effects that must happen before
3564 *EXPR_P should be stored.
3565
3566 POST_P points to the list where side effects that must happen after
3567 *EXPR_P should be stored.
3568
3569 WANT_VALUE is nonzero iff we want to use the value of this expression
3570 in another expression. */
3571
3572 static enum gimplify_status
3573 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3574 {
3575 tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
3576 tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
3577 enum gimplify_status ret = GS_UNHANDLED;
3578
3579 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3580 || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
3581 || TREE_CODE (*expr_p) == INIT_EXPR);
3582
3583 /* For zero sized types only gimplify the left hand side and right hand side
3584 as statements and throw away the assignment. */
3585 if (zero_sized_type (TREE_TYPE (*from_p)))
3586 {
3587 gimplify_stmt (from_p);
3588 gimplify_stmt (to_p);
3589 append_to_statement_list (*from_p, pre_p);
3590 append_to_statement_list (*to_p, pre_p);
3591 *expr_p = NULL_TREE;
3592 return GS_ALL_DONE;
3593 }
3594
3595 /* See if any simplifications can be done based on what the RHS is. */
3596 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3597 want_value);
3598 if (ret != GS_UNHANDLED)
3599 return ret;
3600
3601 /* If the value being copied is of variable width, compute the length
3602 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3603 before gimplifying any of the operands so that we can resolve any
3604 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3605 the size of the expression to be copied, not of the destination, so
3606 that is what we must here. */
3607 maybe_with_size_expr (from_p);
3608
3609 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3610 if (ret == GS_ERROR)
3611 return ret;
3612
3613 ret = gimplify_expr (from_p, pre_p, post_p,
3614 rhs_predicate_for (*to_p), fb_rvalue);
3615 if (ret == GS_ERROR)
3616 return ret;
3617
3618 /* Now see if the above changed *from_p to something we handle specially. */
3619 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3620 want_value);
3621 if (ret != GS_UNHANDLED)
3622 return ret;
3623
3624 /* If we've got a variable sized assignment between two lvalues (i.e. does
3625 not involve a call), then we can make things a bit more straightforward
3626 by converting the assignment to memcpy or memset. */
3627 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3628 {
3629 tree from = TREE_OPERAND (*from_p, 0);
3630 tree size = TREE_OPERAND (*from_p, 1);
3631
3632 if (TREE_CODE (from) == CONSTRUCTOR)
3633 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3634 if (is_gimple_addressable (from))
3635 {
3636 *from_p = from;
3637 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3638 }
3639 }
3640
3641 /* Transform partial stores to non-addressable complex variables into
3642 total stores. This allows us to use real instead of virtual operands
3643 for these variables, which improves optimization. */
3644 if ((TREE_CODE (*to_p) == REALPART_EXPR
3645 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3646 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3647 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3648
3649 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3650 {
3651 /* If we've somehow already got an SSA_NAME on the LHS, then
3652 we're probably modified it twice. Not good. */
3653 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3654 *to_p = make_ssa_name (*to_p, *expr_p);
3655 }
3656
3657 if (want_value)
3658 {
3659 tree_to_gimple_tuple (expr_p);
3660
3661 append_to_statement_list (*expr_p, pre_p);
3662 *expr_p = *to_p;
3663 return GS_OK;
3664 }
3665
3666 return GS_ALL_DONE;
3667 }
3668
3669 /* Gimplify a comparison between two variable-sized objects. Do this
3670 with a call to BUILT_IN_MEMCMP. */
3671
3672 static enum gimplify_status
3673 gimplify_variable_sized_compare (tree *expr_p)
3674 {
3675 tree op0 = TREE_OPERAND (*expr_p, 0);
3676 tree op1 = TREE_OPERAND (*expr_p, 1);
3677 tree args, t, dest;
3678
3679 t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3680 t = unshare_expr (t);
3681 t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
3682 args = tree_cons (NULL, t, NULL);
3683 t = build_fold_addr_expr (op1);
3684 args = tree_cons (NULL, t, args);
3685 dest = build_fold_addr_expr (op0);
3686 args = tree_cons (NULL, dest, args);
3687 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3688 t = build_function_call_expr (t, args);
3689 *expr_p
3690 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3691
3692 return GS_OK;
3693 }
3694
3695 /* Gimplify a comparison between two aggregate objects of integral scalar
3696 mode as a comparison between the bitwise equivalent scalar values. */
3697
3698 static enum gimplify_status
3699 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3700 {
3701 tree op0 = TREE_OPERAND (*expr_p, 0);
3702 tree op1 = TREE_OPERAND (*expr_p, 1);
3703
3704 tree type = TREE_TYPE (op0);
3705 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3706
3707 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3708 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3709
3710 *expr_p
3711 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3712
3713 return GS_OK;
3714 }
3715
3716 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3717 points to the expression to gimplify.
3718
3719 Expressions of the form 'a && b' are gimplified to:
3720
3721 a && b ? true : false
3722
3723 gimplify_cond_expr will do the rest.
3724
3725 PRE_P points to the list where side effects that must happen before
3726 *EXPR_P should be stored. */
3727
3728 static enum gimplify_status
3729 gimplify_boolean_expr (tree *expr_p)
3730 {
3731 /* Preserve the original type of the expression. */
3732 tree type = TREE_TYPE (*expr_p);
3733
3734 *expr_p = build3 (COND_EXPR, type, *expr_p,
3735 fold_convert (type, boolean_true_node),
3736 fold_convert (type, boolean_false_node));
3737
3738 return GS_OK;
3739 }
3740
3741 /* Gimplifies an expression sequence. This function gimplifies each
3742 expression and re-writes the original expression with the last
3743 expression of the sequence in GIMPLE form.
3744
3745 PRE_P points to the list where the side effects for all the
3746 expressions in the sequence will be emitted.
3747
3748 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3749 /* ??? Should rearrange to share the pre-queue with all the indirect
3750 invocations of gimplify_expr. Would probably save on creations
3751 of statement_list nodes. */
3752
3753 static enum gimplify_status
3754 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3755 {
3756 tree t = *expr_p;
3757
3758 do
3759 {
3760 tree *sub_p = &TREE_OPERAND (t, 0);
3761
3762 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3763 gimplify_compound_expr (sub_p, pre_p, false);
3764 else
3765 gimplify_stmt (sub_p);
3766 append_to_statement_list (*sub_p, pre_p);
3767
3768 t = TREE_OPERAND (t, 1);
3769 }
3770 while (TREE_CODE (t) == COMPOUND_EXPR);
3771
3772 *expr_p = t;
3773 if (want_value)
3774 return GS_OK;
3775 else
3776 {
3777 gimplify_stmt (expr_p);
3778 return GS_ALL_DONE;
3779 }
3780 }
3781
3782 /* Gimplifies a statement list. These may be created either by an
3783 enlightened front-end, or by shortcut_cond_expr. */
3784
3785 static enum gimplify_status
3786 gimplify_statement_list (tree *expr_p, tree *pre_p)
3787 {
3788 tree temp = voidify_wrapper_expr (*expr_p, NULL);
3789
3790 tree_stmt_iterator i = tsi_start (*expr_p);
3791
3792 while (!tsi_end_p (i))
3793 {
3794 tree t;
3795
3796 gimplify_stmt (tsi_stmt_ptr (i));
3797
3798 t = tsi_stmt (i);
3799 if (t == NULL)
3800 tsi_delink (&i);
3801 else if (TREE_CODE (t) == STATEMENT_LIST)
3802 {
3803 tsi_link_before (&i, t, TSI_SAME_STMT);
3804 tsi_delink (&i);
3805 }
3806 else
3807 tsi_next (&i);
3808 }
3809
3810 if (temp)
3811 {
3812 append_to_statement_list (*expr_p, pre_p);
3813 *expr_p = temp;
3814 return GS_OK;
3815 }
3816
3817 return GS_ALL_DONE;
3818 }
3819
3820 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3821 gimplify. After gimplification, EXPR_P will point to a new temporary
3822 that holds the original value of the SAVE_EXPR node.
3823
3824 PRE_P points to the list where side effects that must happen before
3825 *EXPR_P should be stored. */
3826
3827 static enum gimplify_status
3828 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3829 {
3830 enum gimplify_status ret = GS_ALL_DONE;
3831 tree val;
3832
3833 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3834 val = TREE_OPERAND (*expr_p, 0);
3835
3836 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3837 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3838 {
3839 /* The operand may be a void-valued expression such as SAVE_EXPRs
3840 generated by the Java frontend for class initialization. It is
3841 being executed only for its side-effects. */
3842 if (TREE_TYPE (val) == void_type_node)
3843 {
3844 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3845 is_gimple_stmt, fb_none);
3846 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3847 val = NULL;
3848 }
3849 else
3850 val = get_initialized_tmp_var (val, pre_p, post_p);
3851
3852 TREE_OPERAND (*expr_p, 0) = val;
3853 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3854 }
3855
3856 *expr_p = val;
3857
3858 return ret;
3859 }
3860
3861 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3862
3863 unary_expr
3864 : ...
3865 | '&' varname
3866 ...
3867
3868 PRE_P points to the list where side effects that must happen before
3869 *EXPR_P should be stored.
3870
3871 POST_P points to the list where side effects that must happen after
3872 *EXPR_P should be stored. */
3873
3874 static enum gimplify_status
3875 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3876 {
3877 tree expr = *expr_p;
3878 tree op0 = TREE_OPERAND (expr, 0);
3879 enum gimplify_status ret;
3880
3881 switch (TREE_CODE (op0))
3882 {
3883 case INDIRECT_REF:
3884 case MISALIGNED_INDIRECT_REF:
3885 do_indirect_ref:
3886 /* Check if we are dealing with an expression of the form '&*ptr'.
3887 While the front end folds away '&*ptr' into 'ptr', these
3888 expressions may be generated internally by the compiler (e.g.,
3889 builtins like __builtin_va_end). */
3890 /* Caution: the silent array decomposition semantics we allow for
3891 ADDR_EXPR means we can't always discard the pair. */
3892 /* Gimplification of the ADDR_EXPR operand may drop
3893 cv-qualification conversions, so make sure we add them if
3894 needed. */
3895 {
3896 tree op00 = TREE_OPERAND (op0, 0);
3897 tree t_expr = TREE_TYPE (expr);
3898 tree t_op00 = TREE_TYPE (op00);
3899
3900 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3901 {
3902 #ifdef ENABLE_CHECKING
3903 tree t_op0 = TREE_TYPE (op0);
3904 gcc_assert (POINTER_TYPE_P (t_expr)
3905 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3906 ? TREE_TYPE (t_op0) : t_op0,
3907 TREE_TYPE (t_expr))
3908 && POINTER_TYPE_P (t_op00)
3909 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3910 #endif
3911 op00 = fold_convert (TREE_TYPE (expr), op00);
3912 }
3913 *expr_p = op00;
3914 ret = GS_OK;
3915 }
3916 break;
3917
3918 case VIEW_CONVERT_EXPR:
3919 /* Take the address of our operand and then convert it to the type of
3920 this ADDR_EXPR.
3921
3922 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3923 all clear. The impact of this transformation is even less clear. */
3924
3925 /* If the operand is a useless conversion, look through it. Doing so
3926 guarantees that the ADDR_EXPR and its operand will remain of the
3927 same type. */
3928 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3929 op0 = TREE_OPERAND (op0, 0);
3930
3931 *expr_p = fold_convert (TREE_TYPE (expr),
3932 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3933 ret = GS_OK;
3934 break;
3935
3936 default:
3937 /* We use fb_either here because the C frontend sometimes takes
3938 the address of a call that returns a struct; see
3939 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3940 the implied temporary explicit. */
3941 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3942 is_gimple_addressable, fb_either);
3943 if (ret != GS_ERROR)
3944 {
3945 op0 = TREE_OPERAND (expr, 0);
3946
3947 /* For various reasons, the gimplification of the expression
3948 may have made a new INDIRECT_REF. */
3949 if (TREE_CODE (op0) == INDIRECT_REF)
3950 goto do_indirect_ref;
3951
3952 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3953 is set properly. */
3954 recompute_tree_invariant_for_addr_expr (expr);
3955
3956 /* Mark the RHS addressable. */
3957 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3958 }
3959 break;
3960 }
3961
3962 return ret;
3963 }
3964
3965 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3966 value; output operands should be a gimple lvalue. */
3967
3968 static enum gimplify_status
3969 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3970 {
3971 tree expr = *expr_p;
3972 int noutputs = list_length (ASM_OUTPUTS (expr));
3973 const char **oconstraints
3974 = (const char **) alloca ((noutputs) * sizeof (const char *));
3975 int i;
3976 tree link;
3977 const char *constraint;
3978 bool allows_mem, allows_reg, is_inout;
3979 enum gimplify_status ret, tret;
3980
3981 ret = GS_ALL_DONE;
3982 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3983 {
3984 size_t constraint_len;
3985 oconstraints[i] = constraint
3986 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3987 constraint_len = strlen (constraint);
3988 if (constraint_len == 0)
3989 continue;
3990
3991 parse_output_constraint (&constraint, i, 0, 0,
3992 &allows_mem, &allows_reg, &is_inout);
3993
3994 if (!allows_reg && allows_mem)
3995 lang_hooks.mark_addressable (TREE_VALUE (link));
3996
3997 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3998 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3999 fb_lvalue | fb_mayfail);
4000 if (tret == GS_ERROR)
4001 {
4002 error ("invalid lvalue in asm output %d", i);
4003 ret = tret;
4004 }
4005
4006 if (is_inout)
4007 {
4008 /* An input/output operand. To give the optimizers more
4009 flexibility, split it into separate input and output
4010 operands. */
4011 tree input;
4012 char buf[10];
4013
4014 /* Turn the in/out constraint into an output constraint. */
4015 char *p = xstrdup (constraint);
4016 p[0] = '=';
4017 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4018
4019 /* And add a matching input constraint. */
4020 if (allows_reg)
4021 {
4022 sprintf (buf, "%d", i);
4023
4024 /* If there are multiple alternatives in the constraint,
4025 handle each of them individually. Those that allow register
4026 will be replaced with operand number, the others will stay
4027 unchanged. */
4028 if (strchr (p, ',') != NULL)
4029 {
4030 size_t len = 0, buflen = strlen (buf);
4031 char *beg, *end, *str, *dst;
4032
4033 for (beg = p + 1;;)
4034 {
4035 end = strchr (beg, ',');
4036 if (end == NULL)
4037 end = strchr (beg, '\0');
4038 if ((size_t) (end - beg) < buflen)
4039 len += buflen + 1;
4040 else
4041 len += end - beg + 1;
4042 if (*end)
4043 beg = end + 1;
4044 else
4045 break;
4046 }
4047
4048 str = (char *) alloca (len);
4049 for (beg = p + 1, dst = str;;)
4050 {
4051 const char *tem;
4052 bool mem_p, reg_p, inout_p;
4053
4054 end = strchr (beg, ',');
4055 if (end)
4056 *end = '\0';
4057 beg[-1] = '=';
4058 tem = beg - 1;
4059 parse_output_constraint (&tem, i, 0, 0,
4060 &mem_p, &reg_p, &inout_p);
4061 if (dst != str)
4062 *dst++ = ',';
4063 if (reg_p)
4064 {
4065 memcpy (dst, buf, buflen);
4066 dst += buflen;
4067 }
4068 else
4069 {
4070 if (end)
4071 len = end - beg;
4072 else
4073 len = strlen (beg);
4074 memcpy (dst, beg, len);
4075 dst += len;
4076 }
4077 if (end)
4078 beg = end + 1;
4079 else
4080 break;
4081 }
4082 *dst = '\0';
4083 input = build_string (dst - str, str);
4084 }
4085 else
4086 input = build_string (strlen (buf), buf);
4087 }
4088 else
4089 input = build_string (constraint_len - 1, constraint + 1);
4090
4091 free (p);
4092
4093 input = build_tree_list (build_tree_list (NULL_TREE, input),
4094 unshare_expr (TREE_VALUE (link)));
4095 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4096 }
4097 }
4098
4099 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4100 {
4101 constraint
4102 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4103 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4104 oconstraints, &allows_mem, &allows_reg);
4105
4106 /* If the operand is a memory input, it should be an lvalue. */
4107 if (!allows_reg && allows_mem)
4108 {
4109 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4110 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4111 lang_hooks.mark_addressable (TREE_VALUE (link));
4112 if (tret == GS_ERROR)
4113 {
4114 error ("memory input %d is not directly addressable", i);
4115 ret = tret;
4116 }
4117 }
4118 else
4119 {
4120 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4121 is_gimple_asm_val, fb_rvalue);
4122 if (tret == GS_ERROR)
4123 ret = tret;
4124 }
4125 }
4126
4127 return ret;
4128 }
4129
4130 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4131 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4132 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4133 return to this function.
4134
4135 FIXME should we complexify the prequeue handling instead? Or use flags
4136 for all the cleanups and let the optimizer tighten them up? The current
4137 code seems pretty fragile; it will break on a cleanup within any
4138 non-conditional nesting. But any such nesting would be broken, anyway;
4139 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4140 and continues out of it. We can do that at the RTL level, though, so
4141 having an optimizer to tighten up try/finally regions would be a Good
4142 Thing. */
4143
4144 static enum gimplify_status
4145 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4146 {
4147 tree_stmt_iterator iter;
4148 tree body;
4149
4150 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4151
4152 /* We only care about the number of conditions between the innermost
4153 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4154 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4155 int old_conds = gimplify_ctxp->conditions;
4156 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4157 gimplify_ctxp->conditions = 0;
4158 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4159
4160 body = TREE_OPERAND (*expr_p, 0);
4161 gimplify_to_stmt_list (&body);
4162
4163 gimplify_ctxp->conditions = old_conds;
4164 gimplify_ctxp->conditional_cleanups = old_cleanups;
4165
4166 for (iter = tsi_start (body); !tsi_end_p (iter); )
4167 {
4168 tree *wce_p = tsi_stmt_ptr (iter);
4169 tree wce = *wce_p;
4170
4171 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4172 {
4173 if (tsi_one_before_end_p (iter))
4174 {
4175 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4176 tsi_delink (&iter);
4177 break;
4178 }
4179 else
4180 {
4181 tree sl, tfe;
4182 enum tree_code code;
4183
4184 if (CLEANUP_EH_ONLY (wce))
4185 code = TRY_CATCH_EXPR;
4186 else
4187 code = TRY_FINALLY_EXPR;
4188
4189 sl = tsi_split_statement_list_after (&iter);
4190 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4191 append_to_statement_list (TREE_OPERAND (wce, 0),
4192 &TREE_OPERAND (tfe, 1));
4193 *wce_p = tfe;
4194 iter = tsi_start (sl);
4195 }
4196 }
4197 else
4198 tsi_next (&iter);
4199 }
4200
4201 if (temp)
4202 {
4203 *expr_p = temp;
4204 append_to_statement_list (body, pre_p);
4205 return GS_OK;
4206 }
4207 else
4208 {
4209 *expr_p = body;
4210 return GS_ALL_DONE;
4211 }
4212 }
4213
4214 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4215 is the cleanup action required. */
4216
4217 static void
4218 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4219 {
4220 tree wce;
4221
4222 /* Errors can result in improperly nested cleanups. Which results in
4223 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4224 if (errorcount || sorrycount)
4225 return;
4226
4227 if (gimple_conditional_context ())
4228 {
4229 /* If we're in a conditional context, this is more complex. We only
4230 want to run the cleanup if we actually ran the initialization that
4231 necessitates it, but we want to run it after the end of the
4232 conditional context. So we wrap the try/finally around the
4233 condition and use a flag to determine whether or not to actually
4234 run the destructor. Thus
4235
4236 test ? f(A()) : 0
4237
4238 becomes (approximately)
4239
4240 flag = 0;
4241 try {
4242 if (test) { A::A(temp); flag = 1; val = f(temp); }
4243 else { val = 0; }
4244 } finally {
4245 if (flag) A::~A(temp);
4246 }
4247 val
4248 */
4249
4250 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4251 tree ffalse = build2 (GIMPLE_MODIFY_STMT, void_type_node, flag,
4252 boolean_false_node);
4253 tree ftrue = build2 (GIMPLE_MODIFY_STMT, void_type_node, flag,
4254 boolean_true_node);
4255 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4256 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4257 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4258 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4259 append_to_statement_list (ftrue, pre_p);
4260
4261 /* Because of this manipulation, and the EH edges that jump
4262 threading cannot redirect, the temporary (VAR) will appear
4263 to be used uninitialized. Don't warn. */
4264 TREE_NO_WARNING (var) = 1;
4265 }
4266 else
4267 {
4268 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4269 CLEANUP_EH_ONLY (wce) = eh_only;
4270 append_to_statement_list (wce, pre_p);
4271 }
4272
4273 gimplify_stmt (&TREE_OPERAND (wce, 0));
4274 }
4275
4276 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4277
4278 static enum gimplify_status
4279 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4280 {
4281 tree targ = *expr_p;
4282 tree temp = TARGET_EXPR_SLOT (targ);
4283 tree init = TARGET_EXPR_INITIAL (targ);
4284 enum gimplify_status ret;
4285
4286 if (init)
4287 {
4288 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4289 to the temps list. */
4290 gimple_add_tmp_var (temp);
4291
4292 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4293 expression is supposed to initialize the slot. */
4294 if (VOID_TYPE_P (TREE_TYPE (init)))
4295 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4296 else
4297 {
4298 init = build2 (INIT_EXPR, void_type_node, temp, init);
4299 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4300 fb_none);
4301 }
4302 if (ret == GS_ERROR)
4303 return GS_ERROR;
4304 append_to_statement_list (init, pre_p);
4305
4306 /* If needed, push the cleanup for the temp. */
4307 if (TARGET_EXPR_CLEANUP (targ))
4308 {
4309 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4310 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4311 CLEANUP_EH_ONLY (targ), pre_p);
4312 }
4313
4314 /* Only expand this once. */
4315 TREE_OPERAND (targ, 3) = init;
4316 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4317 }
4318 else
4319 /* We should have expanded this before. */
4320 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4321
4322 *expr_p = temp;
4323 return GS_OK;
4324 }
4325
4326 /* Gimplification of expression trees. */
4327
4328 /* Gimplify an expression which appears at statement context; usually, this
4329 means replacing it with a suitably gimple STATEMENT_LIST. */
4330
4331 void
4332 gimplify_stmt (tree *stmt_p)
4333 {
4334 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4335 }
4336
4337 /* Similarly, but force the result to be a STATEMENT_LIST. */
4338
4339 void
4340 gimplify_to_stmt_list (tree *stmt_p)
4341 {
4342 gimplify_stmt (stmt_p);
4343 if (!*stmt_p)
4344 *stmt_p = alloc_stmt_list ();
4345 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4346 {
4347 tree t = *stmt_p;
4348 *stmt_p = alloc_stmt_list ();
4349 append_to_statement_list (t, stmt_p);
4350 }
4351 }
4352
4353
4354 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4355 to CTX. If entries already exist, force them to be some flavor of private.
4356 If there is no enclosing parallel, do nothing. */
4357
4358 void
4359 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4360 {
4361 splay_tree_node n;
4362
4363 if (decl == NULL || !DECL_P (decl))
4364 return;
4365
4366 do
4367 {
4368 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4369 if (n != NULL)
4370 {
4371 if (n->value & GOVD_SHARED)
4372 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4373 else
4374 return;
4375 }
4376 else if (ctx->is_parallel)
4377 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4378
4379 ctx = ctx->outer_context;
4380 }
4381 while (ctx);
4382 }
4383
4384 /* Similarly for each of the type sizes of TYPE. */
4385
4386 static void
4387 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4388 {
4389 if (type == NULL || type == error_mark_node)
4390 return;
4391 type = TYPE_MAIN_VARIANT (type);
4392
4393 if (pointer_set_insert (ctx->privatized_types, type))
4394 return;
4395
4396 switch (TREE_CODE (type))
4397 {
4398 case INTEGER_TYPE:
4399 case ENUMERAL_TYPE:
4400 case BOOLEAN_TYPE:
4401 case REAL_TYPE:
4402 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4403 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4404 break;
4405
4406 case ARRAY_TYPE:
4407 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4408 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4409 break;
4410
4411 case RECORD_TYPE:
4412 case UNION_TYPE:
4413 case QUAL_UNION_TYPE:
4414 {
4415 tree field;
4416 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4417 if (TREE_CODE (field) == FIELD_DECL)
4418 {
4419 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4420 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4421 }
4422 }
4423 break;
4424
4425 case POINTER_TYPE:
4426 case REFERENCE_TYPE:
4427 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4428 break;
4429
4430 default:
4431 break;
4432 }
4433
4434 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4435 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4436 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4437 }
4438
4439 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4440
4441 static void
4442 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4443 {
4444 splay_tree_node n;
4445 unsigned int nflags;
4446 tree t;
4447
4448 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4449 return;
4450
4451 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4452 there are constructors involved somewhere. */
4453 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4454 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4455 flags |= GOVD_SEEN;
4456
4457 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4458 if (n != NULL)
4459 {
4460 /* We shouldn't be re-adding the decl with the same data
4461 sharing class. */
4462 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4463 /* The only combination of data sharing classes we should see is
4464 FIRSTPRIVATE and LASTPRIVATE. */
4465 nflags = n->value | flags;
4466 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4467 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4468 n->value = nflags;
4469 return;
4470 }
4471
4472 /* When adding a variable-sized variable, we have to handle all sorts
4473 of additional bits of data: the pointer replacement variable, and
4474 the parameters of the type. */
4475 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
4476 {
4477 /* Add the pointer replacement variable as PRIVATE if the variable
4478 replacement is private, else FIRSTPRIVATE since we'll need the
4479 address of the original variable either for SHARED, or for the
4480 copy into or out of the context. */
4481 if (!(flags & GOVD_LOCAL))
4482 {
4483 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4484 nflags |= flags & GOVD_SEEN;
4485 t = DECL_VALUE_EXPR (decl);
4486 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4487 t = TREE_OPERAND (t, 0);
4488 gcc_assert (DECL_P (t));
4489 omp_add_variable (ctx, t, nflags);
4490 }
4491
4492 /* Add all of the variable and type parameters (which should have
4493 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4494 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4495 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4496 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4497
4498 /* The variable-sized variable itself is never SHARED, only some form
4499 of PRIVATE. The sharing would take place via the pointer variable
4500 which we remapped above. */
4501 if (flags & GOVD_SHARED)
4502 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4503 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4504
4505 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4506 alloca statement we generate for the variable, so make sure it
4507 is available. This isn't automatically needed for the SHARED
4508 case, since we won't be allocating local storage then. */
4509 else
4510 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4511 }
4512 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4513 {
4514 gcc_assert ((flags & GOVD_LOCAL) == 0);
4515 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4516
4517 /* Similar to the direct variable sized case above, we'll need the
4518 size of references being privatized. */
4519 if ((flags & GOVD_SHARED) == 0)
4520 {
4521 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4522 if (TREE_CODE (t) != INTEGER_CST)
4523 omp_notice_variable (ctx, t, true);
4524 }
4525 }
4526
4527 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4528 }
4529
4530 /* Record the fact that DECL was used within the OpenMP context CTX.
4531 IN_CODE is true when real code uses DECL, and false when we should
4532 merely emit default(none) errors. Return true if DECL is going to
4533 be remapped and thus DECL shouldn't be gimplified into its
4534 DECL_VALUE_EXPR (if any). */
4535
4536 static bool
4537 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4538 {
4539 splay_tree_node n;
4540 unsigned flags = in_code ? GOVD_SEEN : 0;
4541 bool ret = false, shared;
4542
4543 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4544 return false;
4545
4546 /* Threadprivate variables are predetermined. */
4547 if (is_global_var (decl))
4548 {
4549 if (DECL_THREAD_LOCAL_P (decl))
4550 return false;
4551
4552 if (DECL_HAS_VALUE_EXPR_P (decl))
4553 {
4554 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4555
4556 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4557 return false;
4558 }
4559 }
4560
4561 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4562 if (n == NULL)
4563 {
4564 enum omp_clause_default_kind default_kind, kind;
4565
4566 if (!ctx->is_parallel)
4567 goto do_outer;
4568
4569 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4570 remapped firstprivate instead of shared. To some extent this is
4571 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4572 default_kind = ctx->default_kind;
4573 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4574 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4575 default_kind = kind;
4576
4577 switch (default_kind)
4578 {
4579 case OMP_CLAUSE_DEFAULT_NONE:
4580 error ("%qs not specified in enclosing parallel",
4581 IDENTIFIER_POINTER (DECL_NAME (decl)));
4582 error ("%Henclosing parallel", &ctx->location);
4583 /* FALLTHRU */
4584 case OMP_CLAUSE_DEFAULT_SHARED:
4585 flags |= GOVD_SHARED;
4586 break;
4587 case OMP_CLAUSE_DEFAULT_PRIVATE:
4588 flags |= GOVD_PRIVATE;
4589 break;
4590 default:
4591 gcc_unreachable ();
4592 }
4593
4594 omp_add_variable (ctx, decl, flags);
4595
4596 shared = (flags & GOVD_SHARED) != 0;
4597 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4598 goto do_outer;
4599 }
4600
4601 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4602 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4603
4604 /* If nothing changed, there's nothing left to do. */
4605 if ((n->value & flags) == flags)
4606 return ret;
4607 flags |= n->value;
4608 n->value = flags;
4609
4610 do_outer:
4611 /* If the variable is private in the current context, then we don't
4612 need to propagate anything to an outer context. */
4613 if (flags & GOVD_PRIVATE)
4614 return ret;
4615 if (ctx->outer_context
4616 && omp_notice_variable (ctx->outer_context, decl, in_code))
4617 return true;
4618 return ret;
4619 }
4620
4621 /* Verify that DECL is private within CTX. If there's specific information
4622 to the contrary in the innermost scope, generate an error. */
4623
4624 static bool
4625 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4626 {
4627 splay_tree_node n;
4628
4629 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4630 if (n != NULL)
4631 {
4632 if (n->value & GOVD_SHARED)
4633 {
4634 if (ctx == gimplify_omp_ctxp)
4635 {
4636 error ("iteration variable %qs should be private",
4637 IDENTIFIER_POINTER (DECL_NAME (decl)));
4638 n->value = GOVD_PRIVATE;
4639 return true;
4640 }
4641 else
4642 return false;
4643 }
4644 else if ((n->value & GOVD_EXPLICIT) != 0
4645 && (ctx == gimplify_omp_ctxp
4646 || (ctx->is_combined_parallel
4647 && gimplify_omp_ctxp->outer_context == ctx)))
4648 {
4649 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4650 error ("iteration variable %qs should not be firstprivate",
4651 IDENTIFIER_POINTER (DECL_NAME (decl)));
4652 else if ((n->value & GOVD_REDUCTION) != 0)
4653 error ("iteration variable %qs should not be reduction",
4654 IDENTIFIER_POINTER (DECL_NAME (decl)));
4655 }
4656 return true;
4657 }
4658
4659 if (ctx->is_parallel)
4660 return false;
4661 else if (ctx->outer_context)
4662 return omp_is_private (ctx->outer_context, decl);
4663 else
4664 return !is_global_var (decl);
4665 }
4666
4667 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4668 and previous omp contexts. */
4669
4670 static void
4671 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4672 bool in_combined_parallel)
4673 {
4674 struct gimplify_omp_ctx *ctx, *outer_ctx;
4675 tree c;
4676
4677 ctx = new_omp_context (in_parallel, in_combined_parallel);
4678 outer_ctx = ctx->outer_context;
4679
4680 while ((c = *list_p) != NULL)
4681 {
4682 enum gimplify_status gs;
4683 bool remove = false;
4684 bool notice_outer = true;
4685 unsigned int flags;
4686 tree decl;
4687
4688 switch (OMP_CLAUSE_CODE (c))
4689 {
4690 case OMP_CLAUSE_PRIVATE:
4691 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4692 notice_outer = false;
4693 goto do_add;
4694 case OMP_CLAUSE_SHARED:
4695 flags = GOVD_SHARED | GOVD_EXPLICIT;
4696 goto do_add;
4697 case OMP_CLAUSE_FIRSTPRIVATE:
4698 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4699 goto do_add;
4700 case OMP_CLAUSE_LASTPRIVATE:
4701 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4702 goto do_add;
4703 case OMP_CLAUSE_REDUCTION:
4704 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4705 goto do_add;
4706
4707 do_add:
4708 decl = OMP_CLAUSE_DECL (c);
4709 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4710 {
4711 remove = true;
4712 break;
4713 }
4714 /* Handle NRV results passed by reference. */
4715 if (TREE_CODE (decl) == INDIRECT_REF
4716 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4717 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4718 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4719 omp_add_variable (ctx, decl, flags);
4720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4721 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4722 {
4723 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4724 GOVD_LOCAL | GOVD_SEEN);
4725 gimplify_omp_ctxp = ctx;
4726 push_gimplify_context ();
4727 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4728 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4729 push_gimplify_context ();
4730 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4731 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4732 gimplify_omp_ctxp = outer_ctx;
4733 }
4734 if (notice_outer)
4735 goto do_notice;
4736 break;
4737
4738 case OMP_CLAUSE_COPYIN:
4739 case OMP_CLAUSE_COPYPRIVATE:
4740 decl = OMP_CLAUSE_DECL (c);
4741 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4742 {
4743 remove = true;
4744 break;
4745 }
4746 /* Handle NRV results passed by reference. */
4747 if (TREE_CODE (decl) == INDIRECT_REF
4748 && TREE_CODE (TREE_OPERAND (decl, 0)) == RESULT_DECL
4749 && DECL_BY_REFERENCE (TREE_OPERAND (decl, 0)))
4750 OMP_CLAUSE_DECL (c) = decl = TREE_OPERAND (decl, 0);
4751 do_notice:
4752 if (outer_ctx)
4753 omp_notice_variable (outer_ctx, decl, true);
4754 break;
4755
4756 case OMP_CLAUSE_IF:
4757 OMP_CLAUSE_OPERAND (c, 0)
4758 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4759 /* Fall through. */
4760
4761 case OMP_CLAUSE_SCHEDULE:
4762 case OMP_CLAUSE_NUM_THREADS:
4763 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4764 is_gimple_val, fb_rvalue);
4765 if (gs == GS_ERROR)
4766 remove = true;
4767 break;
4768
4769 case OMP_CLAUSE_NOWAIT:
4770 case OMP_CLAUSE_ORDERED:
4771 break;
4772
4773 case OMP_CLAUSE_DEFAULT:
4774 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4775 break;
4776
4777 default:
4778 gcc_unreachable ();
4779 }
4780
4781 if (remove)
4782 *list_p = OMP_CLAUSE_CHAIN (c);
4783 else
4784 list_p = &OMP_CLAUSE_CHAIN (c);
4785 }
4786
4787 gimplify_omp_ctxp = ctx;
4788 }
4789
4790 /* For all variables that were not actually used within the context,
4791 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4792
4793 static int
4794 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4795 {
4796 tree *list_p = (tree *) data;
4797 tree decl = (tree) n->key;
4798 unsigned flags = n->value;
4799 enum omp_clause_code code;
4800 tree clause;
4801 bool private_debug;
4802
4803 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4804 return 0;
4805 if ((flags & GOVD_SEEN) == 0)
4806 return 0;
4807 if (flags & GOVD_DEBUG_PRIVATE)
4808 {
4809 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4810 private_debug = true;
4811 }
4812 else
4813 private_debug
4814 = lang_hooks.decls.omp_private_debug_clause (decl,
4815 !!(flags & GOVD_SHARED));
4816 if (private_debug)
4817 code = OMP_CLAUSE_PRIVATE;
4818 else if (flags & GOVD_SHARED)
4819 {
4820 if (is_global_var (decl))
4821 return 0;
4822 code = OMP_CLAUSE_SHARED;
4823 }
4824 else if (flags & GOVD_PRIVATE)
4825 code = OMP_CLAUSE_PRIVATE;
4826 else if (flags & GOVD_FIRSTPRIVATE)
4827 code = OMP_CLAUSE_FIRSTPRIVATE;
4828 else
4829 gcc_unreachable ();
4830
4831 clause = build_omp_clause (code);
4832 OMP_CLAUSE_DECL (clause) = decl;
4833 OMP_CLAUSE_CHAIN (clause) = *list_p;
4834 if (private_debug)
4835 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4836 *list_p = clause;
4837
4838 return 0;
4839 }
4840
4841 static void
4842 gimplify_adjust_omp_clauses (tree *list_p)
4843 {
4844 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4845 tree c, decl;
4846
4847 while ((c = *list_p) != NULL)
4848 {
4849 splay_tree_node n;
4850 bool remove = false;
4851
4852 switch (OMP_CLAUSE_CODE (c))
4853 {
4854 case OMP_CLAUSE_PRIVATE:
4855 case OMP_CLAUSE_SHARED:
4856 case OMP_CLAUSE_FIRSTPRIVATE:
4857 decl = OMP_CLAUSE_DECL (c);
4858 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4859 remove = !(n->value & GOVD_SEEN);
4860 if (! remove)
4861 {
4862 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4863 if ((n->value & GOVD_DEBUG_PRIVATE)
4864 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4865 {
4866 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4867 || ((n->value & GOVD_DATA_SHARE_CLASS)
4868 == GOVD_PRIVATE));
4869 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4870 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4871 }
4872 }
4873 break;
4874
4875 case OMP_CLAUSE_LASTPRIVATE:
4876 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4877 accurately reflect the presence of a FIRSTPRIVATE clause. */
4878 decl = OMP_CLAUSE_DECL (c);
4879 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4880 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4881 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4882 break;
4883
4884 case OMP_CLAUSE_REDUCTION:
4885 case OMP_CLAUSE_COPYIN:
4886 case OMP_CLAUSE_COPYPRIVATE:
4887 case OMP_CLAUSE_IF:
4888 case OMP_CLAUSE_NUM_THREADS:
4889 case OMP_CLAUSE_SCHEDULE:
4890 case OMP_CLAUSE_NOWAIT:
4891 case OMP_CLAUSE_ORDERED:
4892 case OMP_CLAUSE_DEFAULT:
4893 break;
4894
4895 default:
4896 gcc_unreachable ();
4897 }
4898
4899 if (remove)
4900 *list_p = OMP_CLAUSE_CHAIN (c);
4901 else
4902 list_p = &OMP_CLAUSE_CHAIN (c);
4903 }
4904
4905 /* Add in any implicit data sharing. */
4906 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4907
4908 gimplify_omp_ctxp = ctx->outer_context;
4909 delete_omp_context (ctx);
4910 }
4911
4912 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4913 gimplification of the body, as well as scanning the body for used
4914 variables. We need to do this scan now, because variable-sized
4915 decls will be decomposed during gimplification. */
4916
4917 static enum gimplify_status
4918 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4919 {
4920 tree expr = *expr_p;
4921
4922 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4923 OMP_PARALLEL_COMBINED (expr));
4924
4925 push_gimplify_context ();
4926
4927 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4928
4929 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4930 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4931 else
4932 pop_gimplify_context (NULL_TREE);
4933
4934 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4935
4936 return GS_ALL_DONE;
4937 }
4938
4939 /* Gimplify the gross structure of an OMP_FOR statement. */
4940
4941 static enum gimplify_status
4942 gimplify_omp_for (tree *expr_p, tree *pre_p)
4943 {
4944 tree for_stmt, decl, t;
4945 enum gimplify_status ret = GS_OK;
4946
4947 for_stmt = *expr_p;
4948
4949 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4950
4951 t = OMP_FOR_INIT (for_stmt);
4952 gcc_assert (TREE_CODE (t) == MODIFY_EXPR
4953 || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
4954 decl = GENERIC_TREE_OPERAND (t, 0);
4955 gcc_assert (DECL_P (decl));
4956 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4957
4958 /* Make sure the iteration variable is private. */
4959 if (omp_is_private (gimplify_omp_ctxp, decl))
4960 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4961 else
4962 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4963
4964 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
4965 &OMP_FOR_PRE_BODY (for_stmt),
4966 NULL, is_gimple_val, fb_rvalue);
4967
4968 tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
4969
4970 t = OMP_FOR_COND (for_stmt);
4971 gcc_assert (COMPARISON_CLASS_P (t));
4972 gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
4973
4974 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
4975 &OMP_FOR_PRE_BODY (for_stmt),
4976 NULL, is_gimple_val, fb_rvalue);
4977
4978 tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
4979 t = OMP_FOR_INCR (for_stmt);
4980 switch (TREE_CODE (t))
4981 {
4982 case PREINCREMENT_EXPR:
4983 case POSTINCREMENT_EXPR:
4984 t = build_int_cst (TREE_TYPE (decl), 1);
4985 goto build_modify;
4986 case PREDECREMENT_EXPR:
4987 case POSTDECREMENT_EXPR:
4988 t = build_int_cst (TREE_TYPE (decl), -1);
4989 goto build_modify;
4990 build_modify:
4991 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
4992 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, decl, t);
4993 OMP_FOR_INCR (for_stmt) = t;
4994 break;
4995
4996 case GIMPLE_MODIFY_STMT:
4997 gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
4998 t = GIMPLE_STMT_OPERAND (t, 1);
4999 switch (TREE_CODE (t))
5000 {
5001 case PLUS_EXPR:
5002 if (TREE_OPERAND (t, 1) == decl)
5003 {
5004 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5005 TREE_OPERAND (t, 0) = decl;
5006 break;
5007 }
5008 case MINUS_EXPR:
5009 gcc_assert (TREE_OPERAND (t, 0) == decl);
5010 break;
5011 default:
5012 gcc_unreachable ();
5013 }
5014
5015 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
5016 NULL, is_gimple_val, fb_rvalue);
5017 break;
5018
5019 default:
5020 gcc_unreachable ();
5021 }
5022
5023 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
5024 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5025
5026 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5027 }
5028
5029 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5030 In particular, OMP_SECTIONS and OMP_SINGLE. */
5031
5032 static enum gimplify_status
5033 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
5034 {
5035 tree stmt = *expr_p;
5036
5037 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
5038 gimplify_to_stmt_list (&OMP_BODY (stmt));
5039 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
5040
5041 return GS_ALL_DONE;
5042 }
5043
5044 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5045 stabilized the lhs of the atomic operation as *ADDR. Return true if
5046 EXPR is this stabilized form. */
5047
5048 static bool
5049 goa_lhs_expr_p (tree expr, tree addr)
5050 {
5051 /* Also include casts to other type variants. The C front end is fond
5052 of adding these for e.g. volatile variables. This is like
5053 STRIP_TYPE_NOPS but includes the main variant lookup. */
5054 while ((TREE_CODE (expr) == NOP_EXPR
5055 || TREE_CODE (expr) == CONVERT_EXPR
5056 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5057 && TREE_OPERAND (expr, 0) != error_mark_node
5058 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5059 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5060 expr = TREE_OPERAND (expr, 0);
5061
5062 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
5063 return true;
5064 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5065 return true;
5066 return false;
5067 }
5068
5069 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
5070 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
5071 size of the data type, and thus usable to find the index of the builtin
5072 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
5073
5074 static enum gimplify_status
5075 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
5076 {
5077 enum built_in_function base;
5078 tree decl, args, itype;
5079 enum insn_code *optab;
5080
5081 /* Check for one of the supported fetch-op operations. */
5082 switch (TREE_CODE (rhs))
5083 {
5084 case PLUS_EXPR:
5085 base = BUILT_IN_FETCH_AND_ADD_N;
5086 optab = sync_add_optab;
5087 break;
5088 case MINUS_EXPR:
5089 base = BUILT_IN_FETCH_AND_SUB_N;
5090 optab = sync_add_optab;
5091 break;
5092 case BIT_AND_EXPR:
5093 base = BUILT_IN_FETCH_AND_AND_N;
5094 optab = sync_and_optab;
5095 break;
5096 case BIT_IOR_EXPR:
5097 base = BUILT_IN_FETCH_AND_OR_N;
5098 optab = sync_ior_optab;
5099 break;
5100 case BIT_XOR_EXPR:
5101 base = BUILT_IN_FETCH_AND_XOR_N;
5102 optab = sync_xor_optab;
5103 break;
5104 default:
5105 return GS_UNHANDLED;
5106 }
5107
5108 /* Make sure the expression is of the proper form. */
5109 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
5110 rhs = TREE_OPERAND (rhs, 1);
5111 else if (commutative_tree_code (TREE_CODE (rhs))
5112 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
5113 rhs = TREE_OPERAND (rhs, 0);
5114 else
5115 return GS_UNHANDLED;
5116
5117 decl = built_in_decls[base + index + 1];
5118 itype = TREE_TYPE (TREE_TYPE (decl));
5119
5120 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5121 return GS_UNHANDLED;
5122
5123 args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
5124 args = tree_cons (NULL, addr, args);
5125 *expr_p = build_function_call_expr (decl, args);
5126 return GS_OK;
5127 }
5128
5129 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5130 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5131 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5132 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5133
5134 static int
5135 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5136 {
5137 tree expr = *expr_p;
5138 int saw_lhs;
5139
5140 if (goa_lhs_expr_p (expr, lhs_addr))
5141 {
5142 *expr_p = lhs_var;
5143 return 1;
5144 }
5145 if (is_gimple_val (expr))
5146 return 0;
5147
5148 saw_lhs = 0;
5149 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5150 {
5151 case tcc_binary:
5152 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5153 lhs_addr, lhs_var);
5154 case tcc_unary:
5155 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5156 lhs_addr, lhs_var);
5157 break;
5158 default:
5159 break;
5160 }
5161
5162 if (saw_lhs == 0)
5163 {
5164 enum gimplify_status gs;
5165 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5166 if (gs != GS_ALL_DONE)
5167 saw_lhs = -1;
5168 }
5169
5170 return saw_lhs;
5171 }
5172
5173 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5174
5175 oldval = *addr;
5176 repeat:
5177 newval = rhs; // with oldval replacing *addr in rhs
5178 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5179 if (oldval != newval)
5180 goto repeat;
5181
5182 INDEX is log2 of the size of the data type, and thus usable to find the
5183 index of the builtin decl. */
5184
5185 static enum gimplify_status
5186 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5187 tree rhs, int index)
5188 {
5189 tree oldval, oldival, oldival2, newval, newival, label;
5190 tree type, itype, cmpxchg, args, x, iaddr;
5191
5192 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5193 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5194 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5195
5196 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5197 return GS_UNHANDLED;
5198
5199 oldval = create_tmp_var (type, NULL);
5200 newval = create_tmp_var (type, NULL);
5201
5202 /* Precompute as much of RHS as possible. In the same walk, replace
5203 occurrences of the lhs value with our temporary. */
5204 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5205 return GS_ERROR;
5206
5207 x = build_fold_indirect_ref (addr);
5208 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldval, x);
5209 gimplify_and_add (x, pre_p);
5210
5211 /* For floating-point values, we'll need to view-convert them to integers
5212 so that we can perform the atomic compare and swap. Simplify the
5213 following code by always setting up the "i"ntegral variables. */
5214 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5215 {
5216 oldival = oldval;
5217 newival = newval;
5218 iaddr = addr;
5219 }
5220 else
5221 {
5222 oldival = create_tmp_var (itype, NULL);
5223 newival = create_tmp_var (itype, NULL);
5224
5225 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5226 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldival, x);
5227 gimplify_and_add (x, pre_p);
5228 iaddr = fold_convert (build_pointer_type (itype), addr);
5229 }
5230
5231 oldival2 = create_tmp_var (itype, NULL);
5232
5233 label = create_artificial_label ();
5234 x = build1 (LABEL_EXPR, void_type_node, label);
5235 gimplify_and_add (x, pre_p);
5236
5237 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, newval, rhs);
5238 gimplify_and_add (x, pre_p);
5239
5240 if (newval != newival)
5241 {
5242 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5243 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, newival, x);
5244 gimplify_and_add (x, pre_p);
5245 }
5246
5247 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldival2,
5248 fold_convert (itype, oldival));
5249 gimplify_and_add (x, pre_p);
5250
5251 args = tree_cons (NULL, fold_convert (itype, newival), NULL);
5252 args = tree_cons (NULL, fold_convert (itype, oldival), args);
5253 args = tree_cons (NULL, iaddr, args);
5254 x = build_function_call_expr (cmpxchg, args);
5255 if (oldval == oldival)
5256 x = fold_convert (type, x);
5257 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldival, x);
5258 gimplify_and_add (x, pre_p);
5259
5260 /* For floating point, be prepared for the loop backedge. */
5261 if (oldval != oldival)
5262 {
5263 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5264 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, oldval, x);
5265 gimplify_and_add (x, pre_p);
5266 }
5267
5268 /* Note that we always perform the comparison as an integer, even for
5269 floating point. This allows the atomic operation to properly
5270 succeed even with NaNs and -0.0. */
5271 x = build3 (COND_EXPR, void_type_node,
5272 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5273 build1 (GOTO_EXPR, void_type_node, label), NULL);
5274 gimplify_and_add (x, pre_p);
5275
5276 *expr_p = NULL;
5277 return GS_ALL_DONE;
5278 }
5279
5280 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5281
5282 GOMP_atomic_start ();
5283 *addr = rhs;
5284 GOMP_atomic_end ();
5285
5286 The result is not globally atomic, but works so long as all parallel
5287 references are within #pragma omp atomic directives. According to
5288 responses received from omp@openmp.org, appears to be within spec.
5289 Which makes sense, since that's how several other compilers handle
5290 this situation as well. */
5291
5292 static enum gimplify_status
5293 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5294 {
5295 tree t;
5296
5297 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5298 t = build_function_call_expr (t, NULL);
5299 gimplify_and_add (t, pre_p);
5300
5301 t = build_fold_indirect_ref (addr);
5302 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, t, rhs);
5303 gimplify_and_add (t, pre_p);
5304
5305 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5306 t = build_function_call_expr (t, NULL);
5307 gimplify_and_add (t, pre_p);
5308
5309 *expr_p = NULL;
5310 return GS_ALL_DONE;
5311 }
5312
5313 /* Gimplify an OMP_ATOMIC statement. */
5314
5315 static enum gimplify_status
5316 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5317 {
5318 tree addr = TREE_OPERAND (*expr_p, 0);
5319 tree rhs = TREE_OPERAND (*expr_p, 1);
5320 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5321 HOST_WIDE_INT index;
5322
5323 /* Make sure the type is one of the supported sizes. */
5324 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5325 index = exact_log2 (index);
5326 if (index >= 0 && index <= 4)
5327 {
5328 enum gimplify_status gs;
5329 unsigned int align;
5330
5331 if (DECL_P (TREE_OPERAND (addr, 0)))
5332 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5333 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5334 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5335 == FIELD_DECL)
5336 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5337 else
5338 align = TYPE_ALIGN_UNIT (type);
5339
5340 /* __sync builtins require strict data alignment. */
5341 if (exact_log2 (align) >= index)
5342 {
5343 /* When possible, use specialized atomic update functions. */
5344 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5345 {
5346 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5347 if (gs != GS_UNHANDLED)
5348 return gs;
5349 }
5350
5351 /* If we don't have specialized __sync builtins, try and implement
5352 as a compare and swap loop. */
5353 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5354 if (gs != GS_UNHANDLED)
5355 return gs;
5356 }
5357 }
5358
5359 /* The ultimate fallback is wrapping the operation in a mutex. */
5360 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5361 }
5362
5363 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5364 gimplification failed.
5365
5366 PRE_P points to the list where side effects that must happen before
5367 EXPR should be stored.
5368
5369 POST_P points to the list where side effects that must happen after
5370 EXPR should be stored, or NULL if there is no suitable list. In
5371 that case, we copy the result to a temporary, emit the
5372 post-effects, and then return the temporary.
5373
5374 GIMPLE_TEST_F points to a function that takes a tree T and
5375 returns nonzero if T is in the GIMPLE form requested by the
5376 caller. The GIMPLE predicates are in tree-gimple.c.
5377
5378 This test is used twice. Before gimplification, the test is
5379 invoked to determine whether *EXPR_P is already gimple enough. If
5380 that fails, *EXPR_P is gimplified according to its code and
5381 GIMPLE_TEST_F is called again. If the test still fails, then a new
5382 temporary variable is created and assigned the value of the
5383 gimplified expression.
5384
5385 FALLBACK tells the function what sort of a temporary we want. If the 1
5386 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5387 If both are set, either is OK, but an lvalue is preferable.
5388
5389 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5390 iterates until solution. */
5391
5392 enum gimplify_status
5393 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5394 bool (* gimple_test_f) (tree), fallback_t fallback)
5395 {
5396 tree tmp;
5397 tree internal_pre = NULL_TREE;
5398 tree internal_post = NULL_TREE;
5399 tree save_expr;
5400 int is_statement = (pre_p == NULL);
5401 location_t saved_location;
5402 enum gimplify_status ret;
5403
5404 save_expr = *expr_p;
5405 if (save_expr == NULL_TREE)
5406 return GS_ALL_DONE;
5407
5408 /* We used to check the predicate here and return immediately if it
5409 succeeds. This is wrong; the design is for gimplification to be
5410 idempotent, and for the predicates to only test for valid forms, not
5411 whether they are fully simplified. */
5412
5413 /* Set up our internal queues if needed. */
5414 if (pre_p == NULL)
5415 pre_p = &internal_pre;
5416 if (post_p == NULL)
5417 post_p = &internal_post;
5418
5419 saved_location = input_location;
5420 if (save_expr != error_mark_node
5421 && EXPR_HAS_LOCATION (*expr_p))
5422 input_location = EXPR_LOCATION (*expr_p);
5423
5424 /* Loop over the specific gimplifiers until the toplevel node
5425 remains the same. */
5426 do
5427 {
5428 /* Strip away as many useless type conversions as possible
5429 at the toplevel. */
5430 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5431
5432 /* Remember the expr. */
5433 save_expr = *expr_p;
5434
5435 /* Die, die, die, my darling. */
5436 if (save_expr == error_mark_node
5437 || (!GIMPLE_STMT_P (save_expr)
5438 && TREE_TYPE (save_expr)
5439 && TREE_TYPE (save_expr) == error_mark_node))
5440 {
5441 ret = GS_ERROR;
5442 break;
5443 }
5444
5445 /* Do any language-specific gimplification. */
5446 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5447 if (ret == GS_OK)
5448 {
5449 if (*expr_p == NULL_TREE)
5450 break;
5451 if (*expr_p != save_expr)
5452 continue;
5453 }
5454 else if (ret != GS_UNHANDLED)
5455 break;
5456
5457 ret = GS_OK;
5458 switch (TREE_CODE (*expr_p))
5459 {
5460 /* First deal with the special cases. */
5461
5462 case POSTINCREMENT_EXPR:
5463 case POSTDECREMENT_EXPR:
5464 case PREINCREMENT_EXPR:
5465 case PREDECREMENT_EXPR:
5466 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5467 fallback != fb_none);
5468 break;
5469
5470 case ARRAY_REF:
5471 case ARRAY_RANGE_REF:
5472 case REALPART_EXPR:
5473 case IMAGPART_EXPR:
5474 case COMPONENT_REF:
5475 case VIEW_CONVERT_EXPR:
5476 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5477 fallback ? fallback : fb_rvalue);
5478 break;
5479
5480 case COND_EXPR:
5481 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5482 /* C99 code may assign to an array in a structure value of a
5483 conditional expression, and this has undefined behavior
5484 only on execution, so create a temporary if an lvalue is
5485 required. */
5486 if (fallback == fb_lvalue)
5487 {
5488 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5489 lang_hooks.mark_addressable (*expr_p);
5490 }
5491 break;
5492
5493 case CALL_EXPR:
5494 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5495 /* C99 code may assign to an array in a structure returned
5496 from a function, and this has undefined behavior only on
5497 execution, so create a temporary if an lvalue is
5498 required. */
5499 if (fallback == fb_lvalue)
5500 {
5501 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5502 lang_hooks.mark_addressable (*expr_p);
5503 }
5504 break;
5505
5506 case TREE_LIST:
5507 gcc_unreachable ();
5508
5509 case COMPOUND_EXPR:
5510 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5511 break;
5512
5513 case MODIFY_EXPR:
5514 case GIMPLE_MODIFY_STMT:
5515 case INIT_EXPR:
5516 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5517 fallback != fb_none);
5518
5519 if (*expr_p)
5520 {
5521 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5522 useful. */
5523 if (TREE_CODE (*expr_p) == INIT_EXPR)
5524 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5525
5526 /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT. */
5527 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
5528 tree_to_gimple_tuple (expr_p);
5529 }
5530
5531 break;
5532
5533 case TRUTH_ANDIF_EXPR:
5534 case TRUTH_ORIF_EXPR:
5535 ret = gimplify_boolean_expr (expr_p);
5536 break;
5537
5538 case TRUTH_NOT_EXPR:
5539 TREE_OPERAND (*expr_p, 0)
5540 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5541 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5542 is_gimple_val, fb_rvalue);
5543 recalculate_side_effects (*expr_p);
5544 break;
5545
5546 case ADDR_EXPR:
5547 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5548 break;
5549
5550 case VA_ARG_EXPR:
5551 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5552 break;
5553
5554 case CONVERT_EXPR:
5555 case NOP_EXPR:
5556 if (IS_EMPTY_STMT (*expr_p))
5557 {
5558 ret = GS_ALL_DONE;
5559 break;
5560 }
5561
5562 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5563 || fallback == fb_none)
5564 {
5565 /* Just strip a conversion to void (or in void context) and
5566 try again. */
5567 *expr_p = TREE_OPERAND (*expr_p, 0);
5568 break;
5569 }
5570
5571 ret = gimplify_conversion (expr_p);
5572 if (ret == GS_ERROR)
5573 break;
5574 if (*expr_p != save_expr)
5575 break;
5576 /* FALLTHRU */
5577
5578 case FIX_TRUNC_EXPR:
5579 /* unary_expr: ... | '(' cast ')' val | ... */
5580 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5581 is_gimple_val, fb_rvalue);
5582 recalculate_side_effects (*expr_p);
5583 break;
5584
5585 case INDIRECT_REF:
5586 *expr_p = fold_indirect_ref (*expr_p);
5587 if (*expr_p != save_expr)
5588 break;
5589 /* else fall through. */
5590 case ALIGN_INDIRECT_REF:
5591 case MISALIGNED_INDIRECT_REF:
5592 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5593 is_gimple_reg, fb_rvalue);
5594 recalculate_side_effects (*expr_p);
5595 break;
5596
5597 /* Constants need not be gimplified. */
5598 case INTEGER_CST:
5599 case REAL_CST:
5600 case STRING_CST:
5601 case COMPLEX_CST:
5602 case VECTOR_CST:
5603 ret = GS_ALL_DONE;
5604 break;
5605
5606 case CONST_DECL:
5607 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5608 CONST_DECL node. Otherwise the decl is replaceable by its
5609 value. */
5610 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5611 if (fallback & fb_lvalue)
5612 ret = GS_ALL_DONE;
5613 else
5614 *expr_p = DECL_INITIAL (*expr_p);
5615 break;
5616
5617 case DECL_EXPR:
5618 ret = gimplify_decl_expr (expr_p);
5619 break;
5620
5621 case EXC_PTR_EXPR:
5622 /* FIXME make this a decl. */
5623 ret = GS_ALL_DONE;
5624 break;
5625
5626 case BIND_EXPR:
5627 ret = gimplify_bind_expr (expr_p, pre_p);
5628 break;
5629
5630 case LOOP_EXPR:
5631 ret = gimplify_loop_expr (expr_p, pre_p);
5632 break;
5633
5634 case SWITCH_EXPR:
5635 ret = gimplify_switch_expr (expr_p, pre_p);
5636 break;
5637
5638 case EXIT_EXPR:
5639 ret = gimplify_exit_expr (expr_p);
5640 break;
5641
5642 case GOTO_EXPR:
5643 /* If the target is not LABEL, then it is a computed jump
5644 and the target needs to be gimplified. */
5645 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5646 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5647 NULL, is_gimple_val, fb_rvalue);
5648 break;
5649
5650 case LABEL_EXPR:
5651 ret = GS_ALL_DONE;
5652 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5653 == current_function_decl);
5654 break;
5655
5656 case CASE_LABEL_EXPR:
5657 ret = gimplify_case_label_expr (expr_p);
5658 break;
5659
5660 case RETURN_EXPR:
5661 ret = gimplify_return_expr (*expr_p, pre_p);
5662 break;
5663
5664 case CONSTRUCTOR:
5665 /* Don't reduce this in place; let gimplify_init_constructor work its
5666 magic. Buf if we're just elaborating this for side effects, just
5667 gimplify any element that has side-effects. */
5668 if (fallback == fb_none)
5669 {
5670 unsigned HOST_WIDE_INT ix;
5671 constructor_elt *ce;
5672 tree temp = NULL_TREE;
5673 for (ix = 0;
5674 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5675 ix, ce);
5676 ix++)
5677 if (TREE_SIDE_EFFECTS (ce->value))
5678 append_to_statement_list (ce->value, &temp);
5679
5680 *expr_p = temp;
5681 ret = GS_OK;
5682 }
5683 /* C99 code may assign to an array in a constructed
5684 structure or union, and this has undefined behavior only
5685 on execution, so create a temporary if an lvalue is
5686 required. */
5687 else if (fallback == fb_lvalue)
5688 {
5689 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5690 lang_hooks.mark_addressable (*expr_p);
5691 }
5692 else
5693 ret = GS_ALL_DONE;
5694 break;
5695
5696 /* The following are special cases that are not handled by the
5697 original GIMPLE grammar. */
5698
5699 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5700 eliminated. */
5701 case SAVE_EXPR:
5702 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5703 break;
5704
5705 case BIT_FIELD_REF:
5706 {
5707 enum gimplify_status r0, r1, r2;
5708
5709 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5710 is_gimple_lvalue, fb_either);
5711 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5712 is_gimple_val, fb_rvalue);
5713 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5714 is_gimple_val, fb_rvalue);
5715 recalculate_side_effects (*expr_p);
5716
5717 ret = MIN (r0, MIN (r1, r2));
5718 }
5719 break;
5720
5721 case NON_LVALUE_EXPR:
5722 /* This should have been stripped above. */
5723 gcc_unreachable ();
5724
5725 case ASM_EXPR:
5726 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5727 break;
5728
5729 case TRY_FINALLY_EXPR:
5730 case TRY_CATCH_EXPR:
5731 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5732 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5733 ret = GS_ALL_DONE;
5734 break;
5735
5736 case CLEANUP_POINT_EXPR:
5737 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5738 break;
5739
5740 case TARGET_EXPR:
5741 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5742 break;
5743
5744 case CATCH_EXPR:
5745 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5746 ret = GS_ALL_DONE;
5747 break;
5748
5749 case EH_FILTER_EXPR:
5750 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5751 ret = GS_ALL_DONE;
5752 break;
5753
5754 case OBJ_TYPE_REF:
5755 {
5756 enum gimplify_status r0, r1;
5757 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5758 is_gimple_val, fb_rvalue);
5759 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5760 is_gimple_val, fb_rvalue);
5761 ret = MIN (r0, r1);
5762 }
5763 break;
5764
5765 case LABEL_DECL:
5766 /* We get here when taking the address of a label. We mark
5767 the label as "forced"; meaning it can never be removed and
5768 it is a potential target for any computed goto. */
5769 FORCED_LABEL (*expr_p) = 1;
5770 ret = GS_ALL_DONE;
5771 break;
5772
5773 case STATEMENT_LIST:
5774 ret = gimplify_statement_list (expr_p, pre_p);
5775 break;
5776
5777 case WITH_SIZE_EXPR:
5778 {
5779 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5780 post_p == &internal_post ? NULL : post_p,
5781 gimple_test_f, fallback);
5782 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5783 is_gimple_val, fb_rvalue);
5784 }
5785 break;
5786
5787 case VAR_DECL:
5788 case PARM_DECL:
5789 ret = gimplify_var_or_parm_decl (expr_p);
5790 break;
5791
5792 case RESULT_DECL:
5793 /* When within an OpenMP context, notice uses of variables. */
5794 if (gimplify_omp_ctxp)
5795 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5796 ret = GS_ALL_DONE;
5797 break;
5798
5799 case SSA_NAME:
5800 /* Allow callbacks into the gimplifier during optimization. */
5801 ret = GS_ALL_DONE;
5802 break;
5803
5804 case OMP_PARALLEL:
5805 ret = gimplify_omp_parallel (expr_p, pre_p);
5806 break;
5807
5808 case OMP_FOR:
5809 ret = gimplify_omp_for (expr_p, pre_p);
5810 break;
5811
5812 case OMP_SECTIONS:
5813 case OMP_SINGLE:
5814 ret = gimplify_omp_workshare (expr_p, pre_p);
5815 break;
5816
5817 case OMP_SECTION:
5818 case OMP_MASTER:
5819 case OMP_ORDERED:
5820 case OMP_CRITICAL:
5821 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5822 break;
5823
5824 case OMP_ATOMIC:
5825 ret = gimplify_omp_atomic (expr_p, pre_p);
5826 break;
5827
5828 case OMP_RETURN:
5829 case OMP_CONTINUE:
5830 ret = GS_ALL_DONE;
5831 break;
5832
5833 default:
5834 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5835 {
5836 case tcc_comparison:
5837 /* Handle comparison of objects of non scalar mode aggregates
5838 with a call to memcmp. It would be nice to only have to do
5839 this for variable-sized objects, but then we'd have to allow
5840 the same nest of reference nodes we allow for MODIFY_EXPR and
5841 that's too complex.
5842
5843 Compare scalar mode aggregates as scalar mode values. Using
5844 memcmp for them would be very inefficient at best, and is
5845 plain wrong if bitfields are involved. */
5846
5847 {
5848 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5849
5850 if (!AGGREGATE_TYPE_P (type))
5851 goto expr_2;
5852 else if (TYPE_MODE (type) != BLKmode)
5853 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5854 else
5855 ret = gimplify_variable_sized_compare (expr_p);
5856
5857 break;
5858 }
5859
5860 /* If *EXPR_P does not need to be special-cased, handle it
5861 according to its class. */
5862 case tcc_unary:
5863 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5864 post_p, is_gimple_val, fb_rvalue);
5865 break;
5866
5867 case tcc_binary:
5868 expr_2:
5869 {
5870 enum gimplify_status r0, r1;
5871
5872 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5873 post_p, is_gimple_val, fb_rvalue);
5874 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5875 post_p, is_gimple_val, fb_rvalue);
5876
5877 ret = MIN (r0, r1);
5878 break;
5879 }
5880
5881 case tcc_declaration:
5882 case tcc_constant:
5883 ret = GS_ALL_DONE;
5884 goto dont_recalculate;
5885
5886 default:
5887 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5888 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5889 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5890 goto expr_2;
5891 }
5892
5893 recalculate_side_effects (*expr_p);
5894 dont_recalculate:
5895 break;
5896 }
5897
5898 /* If we replaced *expr_p, gimplify again. */
5899 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5900 ret = GS_ALL_DONE;
5901 }
5902 while (ret == GS_OK);
5903
5904 /* If we encountered an error_mark somewhere nested inside, either
5905 stub out the statement or propagate the error back out. */
5906 if (ret == GS_ERROR)
5907 {
5908 if (is_statement)
5909 *expr_p = NULL;
5910 goto out;
5911 }
5912
5913 /* This was only valid as a return value from the langhook, which
5914 we handled. Make sure it doesn't escape from any other context. */
5915 gcc_assert (ret != GS_UNHANDLED);
5916
5917 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5918 {
5919 /* We aren't looking for a value, and we don't have a valid
5920 statement. If it doesn't have side-effects, throw it away. */
5921 if (!TREE_SIDE_EFFECTS (*expr_p))
5922 *expr_p = NULL;
5923 else if (!TREE_THIS_VOLATILE (*expr_p))
5924 {
5925 /* This is probably a _REF that contains something nested that
5926 has side effects. Recurse through the operands to find it. */
5927 enum tree_code code = TREE_CODE (*expr_p);
5928
5929 switch (code)
5930 {
5931 case COMPONENT_REF:
5932 case REALPART_EXPR:
5933 case IMAGPART_EXPR:
5934 case VIEW_CONVERT_EXPR:
5935 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5936 gimple_test_f, fallback);
5937 break;
5938
5939 case ARRAY_REF:
5940 case ARRAY_RANGE_REF:
5941 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5942 gimple_test_f, fallback);
5943 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5944 gimple_test_f, fallback);
5945 break;
5946
5947 default:
5948 /* Anything else with side-effects must be converted to
5949 a valid statement before we get here. */
5950 gcc_unreachable ();
5951 }
5952
5953 *expr_p = NULL;
5954 }
5955 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
5956 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
5957 {
5958 /* Historically, the compiler has treated a bare reference
5959 to a non-BLKmode volatile lvalue as forcing a load. */
5960 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5961 /* Normally, we do not want to create a temporary for a
5962 TREE_ADDRESSABLE type because such a type should not be
5963 copied by bitwise-assignment. However, we make an
5964 exception here, as all we are doing here is ensuring that
5965 we read the bytes that make up the type. We use
5966 create_tmp_var_raw because create_tmp_var will abort when
5967 given a TREE_ADDRESSABLE type. */
5968 tree tmp = create_tmp_var_raw (type, "vol");
5969 gimple_add_tmp_var (tmp);
5970 *expr_p = build2 (GIMPLE_MODIFY_STMT, type, tmp, *expr_p);
5971 }
5972 else
5973 /* We can't do anything useful with a volatile reference to
5974 an incomplete type, so just throw it away. Likewise for
5975 a BLKmode type, since any implicit inner load should
5976 already have been turned into an explicit one by the
5977 gimplification process. */
5978 *expr_p = NULL;
5979 }
5980
5981 /* If we are gimplifying at the statement level, we're done. Tack
5982 everything together and replace the original statement with the
5983 gimplified form. */
5984 if (fallback == fb_none || is_statement)
5985 {
5986 if (internal_pre || internal_post)
5987 {
5988 append_to_statement_list (*expr_p, &internal_pre);
5989 append_to_statement_list (internal_post, &internal_pre);
5990 annotate_all_with_locus (&internal_pre, input_location);
5991 *expr_p = internal_pre;
5992 }
5993 else if (!*expr_p)
5994 ;
5995 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
5996 annotate_all_with_locus (expr_p, input_location);
5997 else
5998 annotate_one_with_locus (*expr_p, input_location);
5999 goto out;
6000 }
6001
6002 /* Otherwise we're gimplifying a subexpression, so the resulting value is
6003 interesting. */
6004
6005 /* If it's sufficiently simple already, we're done. Unless we are
6006 handling some post-effects internally; if that's the case, we need to
6007 copy into a temp before adding the post-effects to the tree. */
6008 if (!internal_post && (*gimple_test_f) (*expr_p))
6009 goto out;
6010
6011 /* Otherwise, we need to create a new temporary for the gimplified
6012 expression. */
6013
6014 /* We can't return an lvalue if we have an internal postqueue. The
6015 object the lvalue refers to would (probably) be modified by the
6016 postqueue; we need to copy the value out first, which means an
6017 rvalue. */
6018 if ((fallback & fb_lvalue) && !internal_post
6019 && is_gimple_addressable (*expr_p))
6020 {
6021 /* An lvalue will do. Take the address of the expression, store it
6022 in a temporary, and replace the expression with an INDIRECT_REF of
6023 that temporary. */
6024 tmp = build_fold_addr_expr (*expr_p);
6025 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6026 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6027 }
6028 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
6029 {
6030 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6031
6032 /* An rvalue will do. Assign the gimplified expression into a new
6033 temporary TMP and replace the original expression with TMP. */
6034
6035 if (internal_post || (fallback & fb_lvalue))
6036 /* The postqueue might change the value of the expression between
6037 the initialization and use of the temporary, so we can't use a
6038 formal temp. FIXME do we care? */
6039 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6040 else
6041 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6042
6043 if (TREE_CODE (*expr_p) != SSA_NAME)
6044 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
6045 }
6046 else
6047 {
6048 #ifdef ENABLE_CHECKING
6049 if (!(fallback & fb_mayfail))
6050 {
6051 fprintf (stderr, "gimplification failed:\n");
6052 print_generic_expr (stderr, *expr_p, 0);
6053 debug_tree (*expr_p);
6054 internal_error ("gimplification failed");
6055 }
6056 #endif
6057 gcc_assert (fallback & fb_mayfail);
6058 /* If this is an asm statement, and the user asked for the
6059 impossible, don't die. Fail and let gimplify_asm_expr
6060 issue an error. */
6061 ret = GS_ERROR;
6062 goto out;
6063 }
6064
6065 /* Make sure the temporary matches our predicate. */
6066 gcc_assert ((*gimple_test_f) (*expr_p));
6067
6068 if (internal_post)
6069 {
6070 annotate_all_with_locus (&internal_post, input_location);
6071 append_to_statement_list (internal_post, pre_p);
6072 }
6073
6074 out:
6075 input_location = saved_location;
6076 return ret;
6077 }
6078
6079 /* Look through TYPE for variable-sized objects and gimplify each such
6080 size that we find. Add to LIST_P any statements generated. */
6081
6082 void
6083 gimplify_type_sizes (tree type, tree *list_p)
6084 {
6085 tree field, t;
6086
6087 if (type == NULL || type == error_mark_node)
6088 return;
6089
6090 /* We first do the main variant, then copy into any other variants. */
6091 type = TYPE_MAIN_VARIANT (type);
6092
6093 /* Avoid infinite recursion. */
6094 if (TYPE_SIZES_GIMPLIFIED (type))
6095 return;
6096
6097 TYPE_SIZES_GIMPLIFIED (type) = 1;
6098
6099 switch (TREE_CODE (type))
6100 {
6101 case INTEGER_TYPE:
6102 case ENUMERAL_TYPE:
6103 case BOOLEAN_TYPE:
6104 case REAL_TYPE:
6105 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
6106 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
6107
6108 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6109 {
6110 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
6111 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
6112 }
6113 break;
6114
6115 case ARRAY_TYPE:
6116 /* These types may not have declarations, so handle them here. */
6117 gimplify_type_sizes (TREE_TYPE (type), list_p);
6118 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
6119 break;
6120
6121 case RECORD_TYPE:
6122 case UNION_TYPE:
6123 case QUAL_UNION_TYPE:
6124 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6125 if (TREE_CODE (field) == FIELD_DECL)
6126 {
6127 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6128 gimplify_type_sizes (TREE_TYPE (field), list_p);
6129 }
6130 break;
6131
6132 case POINTER_TYPE:
6133 case REFERENCE_TYPE:
6134 /* We used to recurse on the pointed-to type here, which turned out to
6135 be incorrect because its definition might refer to variables not
6136 yet initialized at this point if a forward declaration is involved.
6137
6138 It was actually useful for anonymous pointed-to types to ensure
6139 that the sizes evaluation dominates every possible later use of the
6140 values. Restricting to such types here would be safe since there
6141 is no possible forward declaration around, but would introduce an
6142 undesirable middle-end semantic to anonymity. We then defer to
6143 front-ends the responsibility of ensuring that the sizes are
6144 evaluated both early and late enough, e.g. by attaching artificial
6145 type declarations to the tree. */
6146 break;
6147
6148 default:
6149 break;
6150 }
6151
6152 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6153 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6154
6155 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6156 {
6157 TYPE_SIZE (t) = TYPE_SIZE (type);
6158 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6159 TYPE_SIZES_GIMPLIFIED (t) = 1;
6160 }
6161 }
6162
6163 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6164 a size or position, has had all of its SAVE_EXPRs evaluated.
6165 We add any required statements to STMT_P. */
6166
6167 void
6168 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6169 {
6170 tree type, expr = *expr_p;
6171
6172 /* We don't do anything if the value isn't there, is constant, or contains
6173 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6174 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6175 will want to replace it with a new variable, but that will cause problems
6176 if this type is from outside the function. It's OK to have that here. */
6177 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6178 || TREE_CODE (expr) == VAR_DECL
6179 || CONTAINS_PLACEHOLDER_P (expr))
6180 return;
6181
6182 type = TREE_TYPE (expr);
6183 *expr_p = unshare_expr (expr);
6184
6185 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6186 expr = *expr_p;
6187
6188 /* Verify that we've an exact type match with the original expression.
6189 In particular, we do not wish to drop a "sizetype" in favour of a
6190 type of similar dimensions. We don't want to pollute the generic
6191 type-stripping code with this knowledge because it doesn't matter
6192 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6193 and friends retain their "sizetype-ness". */
6194 if (TREE_TYPE (expr) != type
6195 && TREE_CODE (type) == INTEGER_TYPE
6196 && TYPE_IS_SIZETYPE (type))
6197 {
6198 tree tmp;
6199
6200 *expr_p = create_tmp_var (type, NULL);
6201 tmp = build1 (NOP_EXPR, type, expr);
6202 tmp = build2 (GIMPLE_MODIFY_STMT, type, *expr_p, tmp);
6203 if (EXPR_HAS_LOCATION (expr))
6204 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6205 else
6206 SET_EXPR_LOCATION (tmp, input_location);
6207
6208 gimplify_and_add (tmp, stmt_p);
6209 }
6210 }
6211 \f
6212 #ifdef ENABLE_CHECKING
6213 /* Compare types A and B for a "close enough" match. */
6214
6215 static bool
6216 cpt_same_type (tree a, tree b)
6217 {
6218 if (lang_hooks.types_compatible_p (a, b))
6219 return true;
6220
6221 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6222 link them together. This routine is intended to catch type errors
6223 that will affect the optimizers, and the optimizers don't add new
6224 dereferences of function pointers, so ignore it. */
6225 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6226 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6227 return true;
6228
6229 /* ??? The C FE pushes type qualifiers after the fact into the type of
6230 the element from the type of the array. See build_unary_op's handling
6231 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6232 should have done it when creating the variable in the first place.
6233 Alternately, why aren't the two array types made variants? */
6234 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6235 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6236
6237 /* And because of those, we have to recurse down through pointers. */
6238 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6239 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6240
6241 return false;
6242 }
6243
6244 /* Check for some cases of the front end missing cast expressions.
6245 The type of a dereference should correspond to the pointer type;
6246 similarly the type of an address should match its object. */
6247
6248 static tree
6249 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6250 void *data ATTRIBUTE_UNUSED)
6251 {
6252 tree t = *tp;
6253 tree ptype, otype, dtype;
6254
6255 switch (TREE_CODE (t))
6256 {
6257 case INDIRECT_REF:
6258 case ARRAY_REF:
6259 otype = TREE_TYPE (t);
6260 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6261 dtype = TREE_TYPE (ptype);
6262 gcc_assert (cpt_same_type (otype, dtype));
6263 break;
6264
6265 case ADDR_EXPR:
6266 ptype = TREE_TYPE (t);
6267 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6268 dtype = TREE_TYPE (ptype);
6269 if (!cpt_same_type (otype, dtype))
6270 {
6271 /* &array is allowed to produce a pointer to the element, rather than
6272 a pointer to the array type. We must allow this in order to
6273 properly represent assigning the address of an array in C into
6274 pointer to the element type. */
6275 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6276 && POINTER_TYPE_P (ptype)
6277 && cpt_same_type (TREE_TYPE (otype), dtype));
6278 break;
6279 }
6280 break;
6281
6282 default:
6283 return NULL_TREE;
6284 }
6285
6286
6287 return NULL_TREE;
6288 }
6289 #endif
6290
6291 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6292 function decl containing BODY. */
6293
6294 void
6295 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6296 {
6297 location_t saved_location = input_location;
6298 tree body, parm_stmts;
6299
6300 timevar_push (TV_TREE_GIMPLIFY);
6301
6302 gcc_assert (gimplify_ctxp == NULL);
6303 push_gimplify_context ();
6304
6305 /* Unshare most shared trees in the body and in that of any nested functions.
6306 It would seem we don't have to do this for nested functions because
6307 they are supposed to be output and then the outer function gimplified
6308 first, but the g++ front end doesn't always do it that way. */
6309 unshare_body (body_p, fndecl);
6310 unvisit_body (body_p, fndecl);
6311
6312 /* Make sure input_location isn't set to something wierd. */
6313 input_location = DECL_SOURCE_LOCATION (fndecl);
6314
6315 /* Resolve callee-copies. This has to be done before processing
6316 the body so that DECL_VALUE_EXPR gets processed correctly. */
6317 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6318
6319 /* Gimplify the function's body. */
6320 gimplify_stmt (body_p);
6321 body = *body_p;
6322
6323 if (!body)
6324 body = alloc_stmt_list ();
6325 else if (TREE_CODE (body) == STATEMENT_LIST)
6326 {
6327 tree t = expr_only (*body_p);
6328 if (t)
6329 body = t;
6330 }
6331
6332 /* If there isn't an outer BIND_EXPR, add one. */
6333 if (TREE_CODE (body) != BIND_EXPR)
6334 {
6335 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6336 NULL_TREE, NULL_TREE);
6337 TREE_SIDE_EFFECTS (b) = 1;
6338 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6339 body = b;
6340 }
6341
6342 /* If we had callee-copies statements, insert them at the beginning
6343 of the function. */
6344 if (parm_stmts)
6345 {
6346 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6347 BIND_EXPR_BODY (body) = parm_stmts;
6348 }
6349
6350 /* Unshare again, in case gimplification was sloppy. */
6351 unshare_all_trees (body);
6352
6353 *body_p = body;
6354
6355 pop_gimplify_context (body);
6356 gcc_assert (gimplify_ctxp == NULL);
6357
6358 #ifdef ENABLE_CHECKING
6359 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6360 #endif
6361
6362 timevar_pop (TV_TREE_GIMPLIFY);
6363 input_location = saved_location;
6364 }
6365
6366 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6367 node for the function we want to gimplify. */
6368
6369 void
6370 gimplify_function_tree (tree fndecl)
6371 {
6372 tree oldfn, parm, ret;
6373
6374 oldfn = current_function_decl;
6375 current_function_decl = fndecl;
6376 cfun = DECL_STRUCT_FUNCTION (fndecl);
6377 if (cfun == NULL)
6378 allocate_struct_function (fndecl);
6379
6380 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6381 {
6382 /* Preliminarily mark non-addressed complex variables as eligible
6383 for promotion to gimple registers. We'll transform their uses
6384 as we find them. */
6385 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6386 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
6387 && !TREE_THIS_VOLATILE (parm)
6388 && !needs_to_live_in_memory (parm))
6389 DECL_GIMPLE_REG_P (parm) = 1;
6390 }
6391
6392 ret = DECL_RESULT (fndecl);
6393 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6394 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
6395 && !needs_to_live_in_memory (ret))
6396 DECL_GIMPLE_REG_P (ret) = 1;
6397
6398 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6399
6400 /* If we're instrumenting function entry/exit, then prepend the call to
6401 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6402 catch the exit hook. */
6403 /* ??? Add some way to ignore exceptions for this TFE. */
6404 if (flag_instrument_function_entry_exit
6405 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6406 {
6407 tree tf, x, bind;
6408
6409 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6410 TREE_SIDE_EFFECTS (tf) = 1;
6411 x = DECL_SAVED_TREE (fndecl);
6412 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6413 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6414 x = build_function_call_expr (x, NULL);
6415 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6416
6417 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6418 TREE_SIDE_EFFECTS (bind) = 1;
6419 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6420 x = build_function_call_expr (x, NULL);
6421 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6422 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6423
6424 DECL_SAVED_TREE (fndecl) = bind;
6425 }
6426
6427 cfun->gimplified = true;
6428 current_function_decl = oldfn;
6429 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6430 }
6431 \f
6432 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6433 force the result to be either ssa_name or an invariant, otherwise
6434 just force it to be a rhs expression. If VAR is not NULL, make the
6435 base variable of the final destination be VAR if suitable. */
6436
6437 tree
6438 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6439 {
6440 tree t;
6441 enum gimplify_status ret;
6442 gimple_predicate gimple_test_f;
6443
6444 *stmts = NULL_TREE;
6445
6446 if (is_gimple_val (expr))
6447 return expr;
6448
6449 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6450
6451 push_gimplify_context ();
6452 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
6453
6454 if (var)
6455 expr = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, expr);
6456
6457 ret = gimplify_expr (&expr, stmts, NULL,
6458 gimple_test_f, fb_rvalue);
6459 gcc_assert (ret != GS_ERROR);
6460
6461 if (gimple_referenced_vars (cfun))
6462 {
6463 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6464 add_referenced_var (t);
6465 }
6466
6467 pop_gimplify_context (NULL);
6468
6469 return expr;
6470 }
6471
6472 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6473 some statements are produced, emits them before BSI. */
6474
6475 tree
6476 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6477 bool simple_p, tree var)
6478 {
6479 tree stmts;
6480
6481 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6482 if (stmts)
6483 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6484
6485 return expr;
6486 }
6487
6488 #include "gt-gimplify.h"