gen-pass-instances.awk: Add len_of_call var in handle_line
[gcc.git] / gcc / gimple-expr.c
1 /* Gimple decl, type, and expression support functions.
2
3 Copyright (C) 2007-2015 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "stringpool.h"
29 #include "gimple-ssa.h"
30 #include "fold-const.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "stor-layout.h"
34 #include "demangle.h"
35 #include "hash-set.h"
36 #include "rtl.h"
37
38 /* ----- Type related ----- */
39
40 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
41 useless type conversion, otherwise return false.
42
43 This function implicitly defines the middle-end type system. With
44 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
45 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
46 the following invariants shall be fulfilled:
47
48 1) useless_type_conversion_p is transitive.
49 If a < b and b < c then a < c.
50
51 2) useless_type_conversion_p is not symmetric.
52 From a < b does not follow a > b.
53
54 3) Types define the available set of operations applicable to values.
55 A type conversion is useless if the operations for the target type
56 is a subset of the operations for the source type. For example
57 casts to void* are useless, casts from void* are not (void* can't
58 be dereferenced or offsetted, but copied, hence its set of operations
59 is a strict subset of that of all other data pointer types). Casts
60 to const T* are useless (can't be written to), casts from const T*
61 to T* are not. */
62
63 bool
64 useless_type_conversion_p (tree outer_type, tree inner_type)
65 {
66 /* Do the following before stripping toplevel qualifiers. */
67 if (POINTER_TYPE_P (inner_type)
68 && POINTER_TYPE_P (outer_type))
69 {
70 /* Do not lose casts between pointers to different address spaces. */
71 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
72 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
73 return false;
74 /* Do not lose casts to function pointer types. */
75 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
76 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
77 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
78 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
79 return false;
80 }
81
82 /* From now on qualifiers on value types do not matter. */
83 inner_type = TYPE_MAIN_VARIANT (inner_type);
84 outer_type = TYPE_MAIN_VARIANT (outer_type);
85
86 if (inner_type == outer_type)
87 return true;
88
89 /* Changes in machine mode are never useless conversions because the RTL
90 middle-end expects explicit conversions between modes. */
91 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
92 return false;
93
94 /* If both the inner and outer types are integral types, then the
95 conversion is not necessary if they have the same mode and
96 signedness and precision, and both or neither are boolean. */
97 if (INTEGRAL_TYPE_P (inner_type)
98 && INTEGRAL_TYPE_P (outer_type))
99 {
100 /* Preserve changes in signedness or precision. */
101 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
102 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
103 return false;
104
105 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
106 of precision one. */
107 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
108 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
109 && TYPE_PRECISION (outer_type) != 1)
110 return false;
111
112 /* We don't need to preserve changes in the types minimum or
113 maximum value in general as these do not generate code
114 unless the types precisions are different. */
115 return true;
116 }
117
118 /* Scalar floating point types with the same mode are compatible. */
119 else if (SCALAR_FLOAT_TYPE_P (inner_type)
120 && SCALAR_FLOAT_TYPE_P (outer_type))
121 return true;
122
123 /* Fixed point types with the same mode are compatible. */
124 else if (FIXED_POINT_TYPE_P (inner_type)
125 && FIXED_POINT_TYPE_P (outer_type))
126 return true;
127
128 /* We need to take special care recursing to pointed-to types. */
129 else if (POINTER_TYPE_P (inner_type)
130 && POINTER_TYPE_P (outer_type))
131 {
132 /* We do not care for const qualification of the pointed-to types
133 as const qualification has no semantic value to the middle-end. */
134
135 /* Otherwise pointers/references are equivalent. */
136 return true;
137 }
138
139 /* Recurse for complex types. */
140 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
141 && TREE_CODE (outer_type) == COMPLEX_TYPE)
142 return useless_type_conversion_p (TREE_TYPE (outer_type),
143 TREE_TYPE (inner_type));
144
145 /* Recurse for vector types with the same number of subparts. */
146 else if (TREE_CODE (inner_type) == VECTOR_TYPE
147 && TREE_CODE (outer_type) == VECTOR_TYPE
148 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
149 return useless_type_conversion_p (TREE_TYPE (outer_type),
150 TREE_TYPE (inner_type));
151
152 else if (TREE_CODE (inner_type) == ARRAY_TYPE
153 && TREE_CODE (outer_type) == ARRAY_TYPE)
154 {
155 /* Preserve various attributes. */
156 if (TYPE_REVERSE_STORAGE_ORDER (inner_type)
157 != TYPE_REVERSE_STORAGE_ORDER (outer_type))
158 return false;
159 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
160 return false;
161
162 /* Conversions from array types with unknown extent to
163 array types with known extent are not useless. */
164 if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type))
165 return false;
166
167 /* Nor are conversions from array types with non-constant size to
168 array types with constant size or to different size. */
169 if (TYPE_SIZE (outer_type)
170 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
171 && (!TYPE_SIZE (inner_type)
172 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
173 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
174 TYPE_SIZE (inner_type))))
175 return false;
176
177 /* Check conversions between arrays with partially known extents.
178 If the array min/max values are constant they have to match.
179 Otherwise allow conversions to unknown and variable extents.
180 In particular this declares conversions that may change the
181 mode to BLKmode as useless. */
182 if (TYPE_DOMAIN (inner_type)
183 && TYPE_DOMAIN (outer_type)
184 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
185 {
186 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
187 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
188 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
189 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
190
191 /* After gimplification a variable min/max value carries no
192 additional information compared to a NULL value. All that
193 matters has been lowered to be part of the IL. */
194 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
195 inner_min = NULL_TREE;
196 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
197 outer_min = NULL_TREE;
198 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
199 inner_max = NULL_TREE;
200 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
201 outer_max = NULL_TREE;
202
203 /* Conversions NULL / variable <- cst are useless, but not
204 the other way around. */
205 if (outer_min
206 && (!inner_min
207 || !tree_int_cst_equal (inner_min, outer_min)))
208 return false;
209 if (outer_max
210 && (!inner_max
211 || !tree_int_cst_equal (inner_max, outer_max)))
212 return false;
213 }
214
215 /* Recurse on the element check. */
216 return useless_type_conversion_p (TREE_TYPE (outer_type),
217 TREE_TYPE (inner_type));
218 }
219
220 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
221 || TREE_CODE (inner_type) == METHOD_TYPE)
222 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
223 {
224 tree outer_parm, inner_parm;
225
226 /* If the return types are not compatible bail out. */
227 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
228 TREE_TYPE (inner_type)))
229 return false;
230
231 /* Method types should belong to a compatible base class. */
232 if (TREE_CODE (inner_type) == METHOD_TYPE
233 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
234 TYPE_METHOD_BASETYPE (inner_type)))
235 return false;
236
237 /* A conversion to an unprototyped argument list is ok. */
238 if (!prototype_p (outer_type))
239 return true;
240
241 /* If the unqualified argument types are compatible the conversion
242 is useless. */
243 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
244 return true;
245
246 for (outer_parm = TYPE_ARG_TYPES (outer_type),
247 inner_parm = TYPE_ARG_TYPES (inner_type);
248 outer_parm && inner_parm;
249 outer_parm = TREE_CHAIN (outer_parm),
250 inner_parm = TREE_CHAIN (inner_parm))
251 if (!useless_type_conversion_p
252 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
253 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
254 return false;
255
256 /* If there is a mismatch in the number of arguments the functions
257 are not compatible. */
258 if (outer_parm || inner_parm)
259 return false;
260
261 /* Defer to the target if necessary. */
262 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
263 return comp_type_attributes (outer_type, inner_type) != 0;
264
265 return true;
266 }
267
268 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
269 explicit conversions for types involving to be structurally
270 compared types. */
271 else if (AGGREGATE_TYPE_P (inner_type)
272 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
273 return TYPE_CANONICAL (inner_type)
274 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type);
275
276 else if (TREE_CODE (inner_type) == OFFSET_TYPE
277 && TREE_CODE (outer_type) == OFFSET_TYPE)
278 return useless_type_conversion_p (TREE_TYPE (outer_type),
279 TREE_TYPE (inner_type))
280 && useless_type_conversion_p
281 (TYPE_OFFSET_BASETYPE (outer_type),
282 TYPE_OFFSET_BASETYPE (inner_type));
283
284 return false;
285 }
286
287
288 /* ----- Decl related ----- */
289
290 /* Set sequence SEQ to be the GIMPLE body for function FN. */
291
292 void
293 gimple_set_body (tree fndecl, gimple_seq seq)
294 {
295 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
296 if (fn == NULL)
297 {
298 /* If FNDECL still does not have a function structure associated
299 with it, then it does not make sense for it to receive a
300 GIMPLE body. */
301 gcc_assert (seq == NULL);
302 }
303 else
304 fn->gimple_body = seq;
305 }
306
307
308 /* Return the body of GIMPLE statements for function FN. After the
309 CFG pass, the function body doesn't exist anymore because it has
310 been split up into basic blocks. In this case, it returns
311 NULL. */
312
313 gimple_seq
314 gimple_body (tree fndecl)
315 {
316 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
317 return fn ? fn->gimple_body : NULL;
318 }
319
320 /* Return true when FNDECL has Gimple body either in unlowered
321 or CFG form. */
322 bool
323 gimple_has_body_p (tree fndecl)
324 {
325 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
326 return (gimple_body (fndecl) || (fn && fn->cfg));
327 }
328
329 /* Return a printable name for symbol DECL. */
330
331 const char *
332 gimple_decl_printable_name (tree decl, int verbosity)
333 {
334 if (!DECL_NAME (decl))
335 return NULL;
336
337 if (DECL_ASSEMBLER_NAME_SET_P (decl))
338 {
339 const char *str, *mangled_str;
340 int dmgl_opts = DMGL_NO_OPTS;
341
342 if (verbosity >= 2)
343 {
344 dmgl_opts = DMGL_VERBOSE
345 | DMGL_ANSI
346 | DMGL_GNU_V3
347 | DMGL_RET_POSTFIX;
348 if (TREE_CODE (decl) == FUNCTION_DECL)
349 dmgl_opts |= DMGL_PARAMS;
350 }
351
352 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
353 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
354 return (str) ? str : mangled_str;
355 }
356
357 return IDENTIFIER_POINTER (DECL_NAME (decl));
358 }
359
360
361 /* Create a new VAR_DECL and copy information from VAR to it. */
362
363 tree
364 copy_var_decl (tree var, tree name, tree type)
365 {
366 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
367
368 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
369 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
370 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
371 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
372 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
373 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
374 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
375 TREE_USED (copy) = 1;
376 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
377 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
378
379 return copy;
380 }
381
382 /* Strip off a legitimate source ending from the input string NAME of
383 length LEN. Rather than having to know the names used by all of
384 our front ends, we strip off an ending of a period followed by
385 up to five characters. (Java uses ".class".) */
386
387 static inline void
388 remove_suffix (char *name, int len)
389 {
390 int i;
391
392 for (i = 2; i < 8 && len > i; i++)
393 {
394 if (name[len - i] == '.')
395 {
396 name[len - i] = '\0';
397 break;
398 }
399 }
400 }
401
402 /* Create a new temporary name with PREFIX. Return an identifier. */
403
404 static GTY(()) unsigned int tmp_var_id_num;
405
406 tree
407 create_tmp_var_name (const char *prefix)
408 {
409 char *tmp_name;
410
411 if (prefix)
412 {
413 char *preftmp = ASTRDUP (prefix);
414
415 remove_suffix (preftmp, strlen (preftmp));
416 clean_symbol_name (preftmp);
417
418 prefix = preftmp;
419 }
420
421 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
422 return get_identifier (tmp_name);
423 }
424
425 /* Create a new temporary variable declaration of type TYPE.
426 Do NOT push it into the current binding. */
427
428 tree
429 create_tmp_var_raw (tree type, const char *prefix)
430 {
431 tree tmp_var;
432
433 tmp_var = build_decl (input_location,
434 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
435 type);
436
437 /* The variable was declared by the compiler. */
438 DECL_ARTIFICIAL (tmp_var) = 1;
439 /* And we don't want debug info for it. */
440 DECL_IGNORED_P (tmp_var) = 1;
441
442 /* Make the variable writable. */
443 TREE_READONLY (tmp_var) = 0;
444
445 DECL_EXTERNAL (tmp_var) = 0;
446 TREE_STATIC (tmp_var) = 0;
447 TREE_USED (tmp_var) = 1;
448
449 return tmp_var;
450 }
451
452 /* Create a new temporary variable declaration of type TYPE. DO push the
453 variable into the current binding. Further, assume that this is called
454 only from gimplification or optimization, at which point the creation of
455 certain types are bugs. */
456
457 tree
458 create_tmp_var (tree type, const char *prefix)
459 {
460 tree tmp_var;
461
462 /* We don't allow types that are addressable (meaning we can't make copies),
463 or incomplete. We also used to reject every variable size objects here,
464 but now support those for which a constant upper bound can be obtained.
465 The processing for variable sizes is performed in gimple_add_tmp_var,
466 point at which it really matters and possibly reached via paths not going
467 through this function, e.g. after direct calls to create_tmp_var_raw. */
468 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
469
470 tmp_var = create_tmp_var_raw (type, prefix);
471 gimple_add_tmp_var (tmp_var);
472 return tmp_var;
473 }
474
475 /* Create a new temporary variable declaration of type TYPE by calling
476 create_tmp_var and if TYPE is a vector or a complex number, mark the new
477 temporary as gimple register. */
478
479 tree
480 create_tmp_reg (tree type, const char *prefix)
481 {
482 tree tmp;
483
484 tmp = create_tmp_var (type, prefix);
485 if (TREE_CODE (type) == COMPLEX_TYPE
486 || TREE_CODE (type) == VECTOR_TYPE)
487 DECL_GIMPLE_REG_P (tmp) = 1;
488
489 return tmp;
490 }
491
492 /* Create a new temporary variable declaration of type TYPE by calling
493 create_tmp_var and if TYPE is a vector or a complex number, mark the new
494 temporary as gimple register. */
495
496 tree
497 create_tmp_reg_fn (struct function *fn, tree type, const char *prefix)
498 {
499 tree tmp;
500
501 tmp = create_tmp_var_raw (type, prefix);
502 gimple_add_tmp_var_fn (fn, tmp);
503 if (TREE_CODE (type) == COMPLEX_TYPE
504 || TREE_CODE (type) == VECTOR_TYPE)
505 DECL_GIMPLE_REG_P (tmp) = 1;
506
507 return tmp;
508 }
509
510
511 /* ----- Expression related ----- */
512
513 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
514 *OP1_P, *OP2_P and *OP3_P respectively. */
515
516 void
517 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
518 tree *op2_p, tree *op3_p)
519 {
520 enum gimple_rhs_class grhs_class;
521
522 *subcode_p = TREE_CODE (expr);
523 grhs_class = get_gimple_rhs_class (*subcode_p);
524
525 if (grhs_class == GIMPLE_TERNARY_RHS)
526 {
527 *op1_p = TREE_OPERAND (expr, 0);
528 *op2_p = TREE_OPERAND (expr, 1);
529 *op3_p = TREE_OPERAND (expr, 2);
530 }
531 else if (grhs_class == GIMPLE_BINARY_RHS)
532 {
533 *op1_p = TREE_OPERAND (expr, 0);
534 *op2_p = TREE_OPERAND (expr, 1);
535 *op3_p = NULL_TREE;
536 }
537 else if (grhs_class == GIMPLE_UNARY_RHS)
538 {
539 *op1_p = TREE_OPERAND (expr, 0);
540 *op2_p = NULL_TREE;
541 *op3_p = NULL_TREE;
542 }
543 else if (grhs_class == GIMPLE_SINGLE_RHS)
544 {
545 *op1_p = expr;
546 *op2_p = NULL_TREE;
547 *op3_p = NULL_TREE;
548 }
549 else
550 gcc_unreachable ();
551 }
552
553 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
554
555 void
556 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
557 tree *lhs_p, tree *rhs_p)
558 {
559 gcc_assert (COMPARISON_CLASS_P (cond)
560 || TREE_CODE (cond) == TRUTH_NOT_EXPR
561 || is_gimple_min_invariant (cond)
562 || SSA_VAR_P (cond));
563
564 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
565
566 /* Canonicalize conditionals of the form 'if (!VAL)'. */
567 if (*code_p == TRUTH_NOT_EXPR)
568 {
569 *code_p = EQ_EXPR;
570 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
571 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
572 }
573 /* Canonicalize conditionals of the form 'if (VAL)' */
574 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
575 {
576 *code_p = NE_EXPR;
577 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
578 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
579 }
580 }
581
582 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
583
584 bool
585 is_gimple_lvalue (tree t)
586 {
587 return (is_gimple_addressable (t)
588 || TREE_CODE (t) == WITH_SIZE_EXPR
589 /* These are complex lvalues, but don't have addresses, so they
590 go here. */
591 || TREE_CODE (t) == BIT_FIELD_REF);
592 }
593
594 /* Return true if T is a GIMPLE condition. */
595
596 bool
597 is_gimple_condexpr (tree t)
598 {
599 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
600 && !tree_could_throw_p (t)
601 && is_gimple_val (TREE_OPERAND (t, 0))
602 && is_gimple_val (TREE_OPERAND (t, 1))));
603 }
604
605 /* Return true if T is a gimple address. */
606
607 bool
608 is_gimple_address (const_tree t)
609 {
610 tree op;
611
612 if (TREE_CODE (t) != ADDR_EXPR)
613 return false;
614
615 op = TREE_OPERAND (t, 0);
616 while (handled_component_p (op))
617 {
618 if ((TREE_CODE (op) == ARRAY_REF
619 || TREE_CODE (op) == ARRAY_RANGE_REF)
620 && !is_gimple_val (TREE_OPERAND (op, 1)))
621 return false;
622
623 op = TREE_OPERAND (op, 0);
624 }
625
626 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
627 return true;
628
629 switch (TREE_CODE (op))
630 {
631 case PARM_DECL:
632 case RESULT_DECL:
633 case LABEL_DECL:
634 case FUNCTION_DECL:
635 case VAR_DECL:
636 case CONST_DECL:
637 return true;
638
639 default:
640 return false;
641 }
642 }
643
644 /* Return true if T is a gimple invariant address. */
645
646 bool
647 is_gimple_invariant_address (const_tree t)
648 {
649 const_tree op;
650
651 if (TREE_CODE (t) != ADDR_EXPR)
652 return false;
653
654 op = strip_invariant_refs (TREE_OPERAND (t, 0));
655 if (!op)
656 return false;
657
658 if (TREE_CODE (op) == MEM_REF)
659 {
660 const_tree op0 = TREE_OPERAND (op, 0);
661 return (TREE_CODE (op0) == ADDR_EXPR
662 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
663 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
664 }
665
666 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
667 }
668
669 /* Return true if T is a gimple invariant address at IPA level
670 (so addresses of variables on stack are not allowed). */
671
672 bool
673 is_gimple_ip_invariant_address (const_tree t)
674 {
675 const_tree op;
676
677 if (TREE_CODE (t) != ADDR_EXPR)
678 return false;
679
680 op = strip_invariant_refs (TREE_OPERAND (t, 0));
681 if (!op)
682 return false;
683
684 if (TREE_CODE (op) == MEM_REF)
685 {
686 const_tree op0 = TREE_OPERAND (op, 0);
687 return (TREE_CODE (op0) == ADDR_EXPR
688 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
689 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
690 }
691
692 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
693 }
694
695 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
696 form of function invariant. */
697
698 bool
699 is_gimple_min_invariant (const_tree t)
700 {
701 if (TREE_CODE (t) == ADDR_EXPR)
702 return is_gimple_invariant_address (t);
703
704 return is_gimple_constant (t);
705 }
706
707 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
708 form of gimple minimal invariant. */
709
710 bool
711 is_gimple_ip_invariant (const_tree t)
712 {
713 if (TREE_CODE (t) == ADDR_EXPR)
714 return is_gimple_ip_invariant_address (t);
715
716 return is_gimple_constant (t);
717 }
718
719 /* Return true if T is a non-aggregate register variable. */
720
721 bool
722 is_gimple_reg (tree t)
723 {
724 if (virtual_operand_p (t))
725 return false;
726
727 if (TREE_CODE (t) == SSA_NAME)
728 return true;
729
730 if (!is_gimple_variable (t))
731 return false;
732
733 if (!is_gimple_reg_type (TREE_TYPE (t)))
734 return false;
735
736 /* A volatile decl is not acceptable because we can't reuse it as
737 needed. We need to copy it into a temp first. */
738 if (TREE_THIS_VOLATILE (t))
739 return false;
740
741 /* We define "registers" as things that can be renamed as needed,
742 which with our infrastructure does not apply to memory. */
743 if (needs_to_live_in_memory (t))
744 return false;
745
746 /* Hard register variables are an interesting case. For those that
747 are call-clobbered, we don't know where all the calls are, since
748 we don't (want to) take into account which operations will turn
749 into libcalls at the rtl level. For those that are call-saved,
750 we don't currently model the fact that calls may in fact change
751 global hard registers, nor do we examine ASM_CLOBBERS at the tree
752 level, and so miss variable changes that might imply. All around,
753 it seems safest to not do too much optimization with these at the
754 tree level at all. We'll have to rely on the rtl optimizers to
755 clean this up, as there we've got all the appropriate bits exposed. */
756 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
757 return false;
758
759 /* Complex and vector values must have been put into SSA-like form.
760 That is, no assignments to the individual components. */
761 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
762 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
763 return DECL_GIMPLE_REG_P (t);
764
765 return true;
766 }
767
768
769 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
770
771 bool
772 is_gimple_val (tree t)
773 {
774 /* Make loads from volatiles and memory vars explicit. */
775 if (is_gimple_variable (t)
776 && is_gimple_reg_type (TREE_TYPE (t))
777 && !is_gimple_reg (t))
778 return false;
779
780 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
781 }
782
783 /* Similarly, but accept hard registers as inputs to asm statements. */
784
785 bool
786 is_gimple_asm_val (tree t)
787 {
788 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
789 return true;
790
791 return is_gimple_val (t);
792 }
793
794 /* Return true if T is a GIMPLE minimal lvalue. */
795
796 bool
797 is_gimple_min_lval (tree t)
798 {
799 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
800 return false;
801 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
802 }
803
804 /* Return true if T is a valid function operand of a CALL_EXPR. */
805
806 bool
807 is_gimple_call_addr (tree t)
808 {
809 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
810 }
811
812 /* Return true if T is a valid address operand of a MEM_REF. */
813
814 bool
815 is_gimple_mem_ref_addr (tree t)
816 {
817 return (is_gimple_reg (t)
818 || TREE_CODE (t) == INTEGER_CST
819 || (TREE_CODE (t) == ADDR_EXPR
820 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
821 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
822 }
823
824 /* Hold trees marked addressable during expand. */
825
826 static hash_set<tree> *mark_addressable_queue;
827
828 /* Mark X as addressable or queue it up if called during expand. We
829 don't want to apply it immediately during expand because decls are
830 made addressable at that point due to RTL-only concerns, such as
831 uses of memcpy for block moves, and TREE_ADDRESSABLE changes
832 is_gimple_reg, which might make it seem like a variable that used
833 to be a gimple_reg shouldn't have been an SSA name. So we queue up
834 this flag setting and only apply it when we're done with GIMPLE and
835 only RTL issues matter. */
836
837 static void
838 mark_addressable_1 (tree x)
839 {
840 if (!currently_expanding_to_rtl)
841 {
842 TREE_ADDRESSABLE (x) = 1;
843 return;
844 }
845
846 if (!mark_addressable_queue)
847 mark_addressable_queue = new hash_set<tree>();
848 mark_addressable_queue->add (x);
849 }
850
851 /* Adaptor for mark_addressable_1 for use in hash_set traversal. */
852
853 bool
854 mark_addressable_2 (tree const &x, void * ATTRIBUTE_UNUSED = NULL)
855 {
856 mark_addressable_1 (x);
857 return false;
858 }
859
860 /* Mark all queued trees as addressable, and empty the queue. To be
861 called right after clearing CURRENTLY_EXPANDING_TO_RTL. */
862
863 void
864 flush_mark_addressable_queue ()
865 {
866 gcc_assert (!currently_expanding_to_rtl);
867 if (mark_addressable_queue)
868 {
869 mark_addressable_queue->traverse<void*, mark_addressable_2> (NULL);
870 delete mark_addressable_queue;
871 mark_addressable_queue = NULL;
872 }
873 }
874
875 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
876 form and we don't do any syntax checking. */
877
878 void
879 mark_addressable (tree x)
880 {
881 while (handled_component_p (x))
882 x = TREE_OPERAND (x, 0);
883 if (TREE_CODE (x) == MEM_REF
884 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
885 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
886 if (TREE_CODE (x) != VAR_DECL
887 && TREE_CODE (x) != PARM_DECL
888 && TREE_CODE (x) != RESULT_DECL)
889 return;
890 mark_addressable_1 (x);
891
892 /* Also mark the artificial SSA_NAME that points to the partition of X. */
893 if (TREE_CODE (x) == VAR_DECL
894 && !DECL_EXTERNAL (x)
895 && !TREE_STATIC (x)
896 && cfun->gimple_df != NULL
897 && cfun->gimple_df->decls_to_pointers != NULL)
898 {
899 tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
900 if (namep)
901 mark_addressable_1 (*namep);
902 }
903 }
904
905 /* Returns true iff T is a valid RHS for an assignment to a renamed
906 user -- or front-end generated artificial -- variable. */
907
908 bool
909 is_gimple_reg_rhs (tree t)
910 {
911 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
912 }
913
914 #include "gt-gimple-expr.h"