double-int.c (lshift_double, [...]): Remove SHIFT_COUNT_TRUNCATED handling.
[gcc.git] / gcc / gimple-expr.c
1 /* Gimple decl, type, and expression support functions.
2
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "demangle.h"
29
30 /* ----- Type related ----- */
31
32 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
33 useless type conversion, otherwise return false.
34
35 This function implicitly defines the middle-end type system. With
36 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
37 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
38 the following invariants shall be fulfilled:
39
40 1) useless_type_conversion_p is transitive.
41 If a < b and b < c then a < c.
42
43 2) useless_type_conversion_p is not symmetric.
44 From a < b does not follow a > b.
45
46 3) Types define the available set of operations applicable to values.
47 A type conversion is useless if the operations for the target type
48 is a subset of the operations for the source type. For example
49 casts to void* are useless, casts from void* are not (void* can't
50 be dereferenced or offsetted, but copied, hence its set of operations
51 is a strict subset of that of all other data pointer types). Casts
52 to const T* are useless (can't be written to), casts from const T*
53 to T* are not. */
54
55 bool
56 useless_type_conversion_p (tree outer_type, tree inner_type)
57 {
58 /* Do the following before stripping toplevel qualifiers. */
59 if (POINTER_TYPE_P (inner_type)
60 && POINTER_TYPE_P (outer_type))
61 {
62 /* Do not lose casts between pointers to different address spaces. */
63 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
64 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
65 return false;
66 }
67
68 /* From now on qualifiers on value types do not matter. */
69 inner_type = TYPE_MAIN_VARIANT (inner_type);
70 outer_type = TYPE_MAIN_VARIANT (outer_type);
71
72 if (inner_type == outer_type)
73 return true;
74
75 /* If we know the canonical types, compare them. */
76 if (TYPE_CANONICAL (inner_type)
77 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
78 return true;
79
80 /* Changes in machine mode are never useless conversions unless we
81 deal with aggregate types in which case we defer to later checks. */
82 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
83 && !AGGREGATE_TYPE_P (inner_type))
84 return false;
85
86 /* If both the inner and outer types are integral types, then the
87 conversion is not necessary if they have the same mode and
88 signedness and precision, and both or neither are boolean. */
89 if (INTEGRAL_TYPE_P (inner_type)
90 && INTEGRAL_TYPE_P (outer_type))
91 {
92 /* Preserve changes in signedness or precision. */
93 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
94 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
95 return false;
96
97 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
98 of precision one. */
99 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
100 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
101 && TYPE_PRECISION (outer_type) != 1)
102 return false;
103
104 /* We don't need to preserve changes in the types minimum or
105 maximum value in general as these do not generate code
106 unless the types precisions are different. */
107 return true;
108 }
109
110 /* Scalar floating point types with the same mode are compatible. */
111 else if (SCALAR_FLOAT_TYPE_P (inner_type)
112 && SCALAR_FLOAT_TYPE_P (outer_type))
113 return true;
114
115 /* Fixed point types with the same mode are compatible. */
116 else if (FIXED_POINT_TYPE_P (inner_type)
117 && FIXED_POINT_TYPE_P (outer_type))
118 return true;
119
120 /* We need to take special care recursing to pointed-to types. */
121 else if (POINTER_TYPE_P (inner_type)
122 && POINTER_TYPE_P (outer_type))
123 {
124 /* Do not lose casts to function pointer types. */
125 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
126 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
127 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
128 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
129 return false;
130
131 /* We do not care for const qualification of the pointed-to types
132 as const qualification has no semantic value to the middle-end. */
133
134 /* Otherwise pointers/references are equivalent. */
135 return true;
136 }
137
138 /* Recurse for complex types. */
139 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
140 && TREE_CODE (outer_type) == COMPLEX_TYPE)
141 return useless_type_conversion_p (TREE_TYPE (outer_type),
142 TREE_TYPE (inner_type));
143
144 /* Recurse for vector types with the same number of subparts. */
145 else if (TREE_CODE (inner_type) == VECTOR_TYPE
146 && TREE_CODE (outer_type) == VECTOR_TYPE
147 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
148 return useless_type_conversion_p (TREE_TYPE (outer_type),
149 TREE_TYPE (inner_type));
150
151 else if (TREE_CODE (inner_type) == ARRAY_TYPE
152 && TREE_CODE (outer_type) == ARRAY_TYPE)
153 {
154 /* Preserve string attributes. */
155 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
156 return false;
157
158 /* Conversions from array types with unknown extent to
159 array types with known extent are not useless. */
160 if (!TYPE_DOMAIN (inner_type)
161 && TYPE_DOMAIN (outer_type))
162 return false;
163
164 /* Nor are conversions from array types with non-constant size to
165 array types with constant size or to different size. */
166 if (TYPE_SIZE (outer_type)
167 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
168 && (!TYPE_SIZE (inner_type)
169 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
170 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
171 TYPE_SIZE (inner_type))))
172 return false;
173
174 /* Check conversions between arrays with partially known extents.
175 If the array min/max values are constant they have to match.
176 Otherwise allow conversions to unknown and variable extents.
177 In particular this declares conversions that may change the
178 mode to BLKmode as useless. */
179 if (TYPE_DOMAIN (inner_type)
180 && TYPE_DOMAIN (outer_type)
181 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
182 {
183 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
184 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
185 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
186 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
187
188 /* After gimplification a variable min/max value carries no
189 additional information compared to a NULL value. All that
190 matters has been lowered to be part of the IL. */
191 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
192 inner_min = NULL_TREE;
193 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
194 outer_min = NULL_TREE;
195 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
196 inner_max = NULL_TREE;
197 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
198 outer_max = NULL_TREE;
199
200 /* Conversions NULL / variable <- cst are useless, but not
201 the other way around. */
202 if (outer_min
203 && (!inner_min
204 || !tree_int_cst_equal (inner_min, outer_min)))
205 return false;
206 if (outer_max
207 && (!inner_max
208 || !tree_int_cst_equal (inner_max, outer_max)))
209 return false;
210 }
211
212 /* Recurse on the element check. */
213 return useless_type_conversion_p (TREE_TYPE (outer_type),
214 TREE_TYPE (inner_type));
215 }
216
217 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
218 || TREE_CODE (inner_type) == METHOD_TYPE)
219 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
220 {
221 tree outer_parm, inner_parm;
222
223 /* If the return types are not compatible bail out. */
224 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
225 TREE_TYPE (inner_type)))
226 return false;
227
228 /* Method types should belong to a compatible base class. */
229 if (TREE_CODE (inner_type) == METHOD_TYPE
230 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
231 TYPE_METHOD_BASETYPE (inner_type)))
232 return false;
233
234 /* A conversion to an unprototyped argument list is ok. */
235 if (!prototype_p (outer_type))
236 return true;
237
238 /* If the unqualified argument types are compatible the conversion
239 is useless. */
240 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
241 return true;
242
243 for (outer_parm = TYPE_ARG_TYPES (outer_type),
244 inner_parm = TYPE_ARG_TYPES (inner_type);
245 outer_parm && inner_parm;
246 outer_parm = TREE_CHAIN (outer_parm),
247 inner_parm = TREE_CHAIN (inner_parm))
248 if (!useless_type_conversion_p
249 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
250 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
251 return false;
252
253 /* If there is a mismatch in the number of arguments the functions
254 are not compatible. */
255 if (outer_parm || inner_parm)
256 return false;
257
258 /* Defer to the target if necessary. */
259 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
260 return comp_type_attributes (outer_type, inner_type) != 0;
261
262 return true;
263 }
264
265 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
266 explicit conversions for types involving to be structurally
267 compared types. */
268 else if (AGGREGATE_TYPE_P (inner_type)
269 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
270 return false;
271
272 return false;
273 }
274
275
276 /* ----- Decl related ----- */
277
278 /* Set sequence SEQ to be the GIMPLE body for function FN. */
279
280 void
281 gimple_set_body (tree fndecl, gimple_seq seq)
282 {
283 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
284 if (fn == NULL)
285 {
286 /* If FNDECL still does not have a function structure associated
287 with it, then it does not make sense for it to receive a
288 GIMPLE body. */
289 gcc_assert (seq == NULL);
290 }
291 else
292 fn->gimple_body = seq;
293 }
294
295
296 /* Return the body of GIMPLE statements for function FN. After the
297 CFG pass, the function body doesn't exist anymore because it has
298 been split up into basic blocks. In this case, it returns
299 NULL. */
300
301 gimple_seq
302 gimple_body (tree fndecl)
303 {
304 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
305 return fn ? fn->gimple_body : NULL;
306 }
307
308 /* Return true when FNDECL has Gimple body either in unlowered
309 or CFG form. */
310 bool
311 gimple_has_body_p (tree fndecl)
312 {
313 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
314 return (gimple_body (fndecl) || (fn && fn->cfg));
315 }
316
317 /* Return a printable name for symbol DECL. */
318
319 const char *
320 gimple_decl_printable_name (tree decl, int verbosity)
321 {
322 if (!DECL_NAME (decl))
323 return NULL;
324
325 if (DECL_ASSEMBLER_NAME_SET_P (decl))
326 {
327 const char *str, *mangled_str;
328 int dmgl_opts = DMGL_NO_OPTS;
329
330 if (verbosity >= 2)
331 {
332 dmgl_opts = DMGL_VERBOSE
333 | DMGL_ANSI
334 | DMGL_GNU_V3
335 | DMGL_RET_POSTFIX;
336 if (TREE_CODE (decl) == FUNCTION_DECL)
337 dmgl_opts |= DMGL_PARAMS;
338 }
339
340 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
341 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
342 return (str) ? str : mangled_str;
343 }
344
345 return IDENTIFIER_POINTER (DECL_NAME (decl));
346 }
347
348
349 /* Create a new VAR_DECL and copy information from VAR to it. */
350
351 tree
352 copy_var_decl (tree var, tree name, tree type)
353 {
354 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
355
356 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
357 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
358 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
359 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
360 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
361 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
362 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
363 TREE_USED (copy) = 1;
364 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
365 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
366
367 return copy;
368 }
369
370 /* Given SSA_NAMEs NAME1 and NAME2, return true if they are candidates for
371 coalescing together, false otherwise.
372
373 This must stay consistent with var_map_base_init in tree-ssa-live.c. */
374
375 bool
376 gimple_can_coalesce_p (tree name1, tree name2)
377 {
378 /* First check the SSA_NAME's associated DECL. We only want to
379 coalesce if they have the same DECL or both have no associated DECL. */
380 tree var1 = SSA_NAME_VAR (name1);
381 tree var2 = SSA_NAME_VAR (name2);
382 var1 = (var1 && (!VAR_P (var1) || !DECL_IGNORED_P (var1))) ? var1 : NULL_TREE;
383 var2 = (var2 && (!VAR_P (var2) || !DECL_IGNORED_P (var2))) ? var2 : NULL_TREE;
384 if (var1 != var2)
385 return false;
386
387 /* Now check the types. If the types are the same, then we should
388 try to coalesce V1 and V2. */
389 tree t1 = TREE_TYPE (name1);
390 tree t2 = TREE_TYPE (name2);
391 if (t1 == t2)
392 return true;
393
394 /* If the types are not the same, check for a canonical type match. This
395 (for example) allows coalescing when the types are fundamentally the
396 same, but just have different names.
397
398 Note pointer types with different address spaces may have the same
399 canonical type. Those are rejected for coalescing by the
400 types_compatible_p check. */
401 if (TYPE_CANONICAL (t1)
402 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2)
403 && types_compatible_p (t1, t2))
404 return true;
405
406 return false;
407 }
408
409
410 /* ----- Expression related ----- */
411
412 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
413 *OP1_P, *OP2_P and *OP3_P respectively. */
414
415 void
416 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
417 tree *op2_p, tree *op3_p)
418 {
419 enum gimple_rhs_class grhs_class;
420
421 *subcode_p = TREE_CODE (expr);
422 grhs_class = get_gimple_rhs_class (*subcode_p);
423
424 if (grhs_class == GIMPLE_TERNARY_RHS)
425 {
426 *op1_p = TREE_OPERAND (expr, 0);
427 *op2_p = TREE_OPERAND (expr, 1);
428 *op3_p = TREE_OPERAND (expr, 2);
429 }
430 else if (grhs_class == GIMPLE_BINARY_RHS)
431 {
432 *op1_p = TREE_OPERAND (expr, 0);
433 *op2_p = TREE_OPERAND (expr, 1);
434 *op3_p = NULL_TREE;
435 }
436 else if (grhs_class == GIMPLE_UNARY_RHS)
437 {
438 *op1_p = TREE_OPERAND (expr, 0);
439 *op2_p = NULL_TREE;
440 *op3_p = NULL_TREE;
441 }
442 else if (grhs_class == GIMPLE_SINGLE_RHS)
443 {
444 *op1_p = expr;
445 *op2_p = NULL_TREE;
446 *op3_p = NULL_TREE;
447 }
448 else
449 gcc_unreachable ();
450 }
451
452 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
453
454 void
455 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
456 tree *lhs_p, tree *rhs_p)
457 {
458 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
459 || TREE_CODE (cond) == TRUTH_NOT_EXPR
460 || is_gimple_min_invariant (cond)
461 || SSA_VAR_P (cond));
462
463 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
464
465 /* Canonicalize conditionals of the form 'if (!VAL)'. */
466 if (*code_p == TRUTH_NOT_EXPR)
467 {
468 *code_p = EQ_EXPR;
469 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
470 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
471 }
472 /* Canonicalize conditionals of the form 'if (VAL)' */
473 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
474 {
475 *code_p = NE_EXPR;
476 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
477 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
478 }
479 }
480
481 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
482
483 bool
484 is_gimple_lvalue (tree t)
485 {
486 return (is_gimple_addressable (t)
487 || TREE_CODE (t) == WITH_SIZE_EXPR
488 /* These are complex lvalues, but don't have addresses, so they
489 go here. */
490 || TREE_CODE (t) == BIT_FIELD_REF);
491 }
492
493 /* Return true if T is a GIMPLE condition. */
494
495 bool
496 is_gimple_condexpr (tree t)
497 {
498 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
499 && !tree_could_throw_p (t)
500 && is_gimple_val (TREE_OPERAND (t, 0))
501 && is_gimple_val (TREE_OPERAND (t, 1))));
502 }
503
504 /* Return true if T is a gimple address. */
505
506 bool
507 is_gimple_address (const_tree t)
508 {
509 tree op;
510
511 if (TREE_CODE (t) != ADDR_EXPR)
512 return false;
513
514 op = TREE_OPERAND (t, 0);
515 while (handled_component_p (op))
516 {
517 if ((TREE_CODE (op) == ARRAY_REF
518 || TREE_CODE (op) == ARRAY_RANGE_REF)
519 && !is_gimple_val (TREE_OPERAND (op, 1)))
520 return false;
521
522 op = TREE_OPERAND (op, 0);
523 }
524
525 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
526 return true;
527
528 switch (TREE_CODE (op))
529 {
530 case PARM_DECL:
531 case RESULT_DECL:
532 case LABEL_DECL:
533 case FUNCTION_DECL:
534 case VAR_DECL:
535 case CONST_DECL:
536 return true;
537
538 default:
539 return false;
540 }
541 }
542
543 /* Return true if T is a gimple invariant address. */
544
545 bool
546 is_gimple_invariant_address (const_tree t)
547 {
548 const_tree op;
549
550 if (TREE_CODE (t) != ADDR_EXPR)
551 return false;
552
553 op = strip_invariant_refs (TREE_OPERAND (t, 0));
554 if (!op)
555 return false;
556
557 if (TREE_CODE (op) == MEM_REF)
558 {
559 const_tree op0 = TREE_OPERAND (op, 0);
560 return (TREE_CODE (op0) == ADDR_EXPR
561 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
562 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
563 }
564
565 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
566 }
567
568 /* Return true if T is a gimple invariant address at IPA level
569 (so addresses of variables on stack are not allowed). */
570
571 bool
572 is_gimple_ip_invariant_address (const_tree t)
573 {
574 const_tree op;
575
576 if (TREE_CODE (t) != ADDR_EXPR)
577 return false;
578
579 op = strip_invariant_refs (TREE_OPERAND (t, 0));
580 if (!op)
581 return false;
582
583 if (TREE_CODE (op) == MEM_REF)
584 {
585 const_tree op0 = TREE_OPERAND (op, 0);
586 return (TREE_CODE (op0) == ADDR_EXPR
587 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
588 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
589 }
590
591 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
592 }
593
594 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
595 form of function invariant. */
596
597 bool
598 is_gimple_min_invariant (const_tree t)
599 {
600 if (TREE_CODE (t) == ADDR_EXPR)
601 return is_gimple_invariant_address (t);
602
603 return is_gimple_constant (t);
604 }
605
606 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
607 form of gimple minimal invariant. */
608
609 bool
610 is_gimple_ip_invariant (const_tree t)
611 {
612 if (TREE_CODE (t) == ADDR_EXPR)
613 return is_gimple_ip_invariant_address (t);
614
615 return is_gimple_constant (t);
616 }
617
618 /* Return true if T is a non-aggregate register variable. */
619
620 bool
621 is_gimple_reg (tree t)
622 {
623 if (virtual_operand_p (t))
624 return false;
625
626 if (TREE_CODE (t) == SSA_NAME)
627 return true;
628
629 if (!is_gimple_variable (t))
630 return false;
631
632 if (!is_gimple_reg_type (TREE_TYPE (t)))
633 return false;
634
635 /* A volatile decl is not acceptable because we can't reuse it as
636 needed. We need to copy it into a temp first. */
637 if (TREE_THIS_VOLATILE (t))
638 return false;
639
640 /* We define "registers" as things that can be renamed as needed,
641 which with our infrastructure does not apply to memory. */
642 if (needs_to_live_in_memory (t))
643 return false;
644
645 /* Hard register variables are an interesting case. For those that
646 are call-clobbered, we don't know where all the calls are, since
647 we don't (want to) take into account which operations will turn
648 into libcalls at the rtl level. For those that are call-saved,
649 we don't currently model the fact that calls may in fact change
650 global hard registers, nor do we examine ASM_CLOBBERS at the tree
651 level, and so miss variable changes that might imply. All around,
652 it seems safest to not do too much optimization with these at the
653 tree level at all. We'll have to rely on the rtl optimizers to
654 clean this up, as there we've got all the appropriate bits exposed. */
655 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
656 return false;
657
658 /* Complex and vector values must have been put into SSA-like form.
659 That is, no assignments to the individual components. */
660 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
661 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
662 return DECL_GIMPLE_REG_P (t);
663
664 return true;
665 }
666
667
668 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
669
670 bool
671 is_gimple_val (tree t)
672 {
673 /* Make loads from volatiles and memory vars explicit. */
674 if (is_gimple_variable (t)
675 && is_gimple_reg_type (TREE_TYPE (t))
676 && !is_gimple_reg (t))
677 return false;
678
679 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
680 }
681
682 /* Similarly, but accept hard registers as inputs to asm statements. */
683
684 bool
685 is_gimple_asm_val (tree t)
686 {
687 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
688 return true;
689
690 return is_gimple_val (t);
691 }
692
693 /* Return true if T is a GIMPLE minimal lvalue. */
694
695 bool
696 is_gimple_min_lval (tree t)
697 {
698 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
699 return false;
700 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
701 }
702
703 /* Return true if T is a valid function operand of a CALL_EXPR. */
704
705 bool
706 is_gimple_call_addr (tree t)
707 {
708 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
709 }
710
711 /* Return true if T is a valid address operand of a MEM_REF. */
712
713 bool
714 is_gimple_mem_ref_addr (tree t)
715 {
716 return (is_gimple_reg (t)
717 || TREE_CODE (t) == INTEGER_CST
718 || (TREE_CODE (t) == ADDR_EXPR
719 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
720 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
721 }