1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
65 #include "stringpool.h"
69 #include "tree-vector-builder.h"
70 #include "gimple-fold.h"
71 #include "escaped_string.h"
73 /* Tree code classes. */
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
78 const enum tree_code_class tree_code_type
[] = {
79 #include "all-tree.def"
83 #undef END_OF_BASE_TREE_CODES
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
92 const unsigned char tree_code_length
[] = {
93 #include "all-tree.def"
97 #undef END_OF_BASE_TREE_CODES
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
104 static const char *const tree_code_name
[] = {
105 #include "all-tree.def"
109 #undef END_OF_BASE_TREE_CODES
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
114 const char *const tree_code_class_strings
[] =
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
132 /* Statistics-gathering stuff. */
134 static uint64_t tree_code_counts
[MAX_TREE_CODES
];
135 uint64_t tree_node_counts
[(int) all_kinds
];
136 uint64_t tree_node_sizes
[(int) all_kinds
];
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names
[] = {
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid
;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid
= 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid
;
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
169 struct GTY((for_user
)) type_hash
{
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
177 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
179 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
180 static bool equal (type_hash
*a
, type_hash
*b
);
183 keep_cache_entry (type_hash
*&t
)
185 return ggc_marked_p (t
->type
);
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
196 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node
;
201 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
203 static hashval_t
hash (tree t
);
204 static bool equal (tree x
, tree y
);
207 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
213 typedef std::pair
<tree
, const poly_wide_int
*> compare_type
;
214 static hashval_t
hash (tree t
);
215 static bool equal (tree x
, const compare_type
&y
);
218 static GTY ((cache
)) hash_table
<poly_int_cst_hasher
> *poly_int_cst_hash_table
;
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node
;
226 static GTY (()) tree cl_target_option_node
;
228 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
230 static hashval_t
hash (tree t
);
231 static bool equal (tree x
, tree y
);
234 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
236 /* General tree->tree mapping structure for use in hash tables. */
240 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
243 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
245 struct tree_vec_map_cache_hasher
: ggc_cache_ptr_hash
<tree_vec_map
>
247 static hashval_t
hash (tree_vec_map
*m
) { return DECL_UID (m
->base
.from
); }
250 equal (tree_vec_map
*a
, tree_vec_map
*b
)
252 return a
->base
.from
== b
->base
.from
;
256 keep_cache_entry (tree_vec_map
*&m
)
258 return ggc_marked_p (m
->base
.from
);
263 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
265 static void set_type_quals (tree
, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
270 static tree
build_array_type_1 (tree
, tree
, bool, bool);
272 tree global_trees
[TI_MAX
];
273 tree integer_types
[itk_none
];
275 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
276 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
278 bool tree_contains_struct
[MAX_TREE_CODES
][64];
280 /* Number of operands for each OpenMP clause. */
281 unsigned const char omp_clause_num_ops
[] =
283 0, /* OMP_CLAUSE_ERROR */
284 1, /* OMP_CLAUSE_PRIVATE */
285 1, /* OMP_CLAUSE_SHARED */
286 1, /* OMP_CLAUSE_FIRSTPRIVATE */
287 2, /* OMP_CLAUSE_LASTPRIVATE */
288 5, /* OMP_CLAUSE_REDUCTION */
289 5, /* OMP_CLAUSE_TASK_REDUCTION */
290 5, /* OMP_CLAUSE_IN_REDUCTION */
291 1, /* OMP_CLAUSE_COPYIN */
292 1, /* OMP_CLAUSE_COPYPRIVATE */
293 3, /* OMP_CLAUSE_LINEAR */
294 2, /* OMP_CLAUSE_ALIGNED */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 2, /* OMP_CLAUSE_FROM */
301 2, /* OMP_CLAUSE_TO */
302 2, /* OMP_CLAUSE_MAP */
303 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
304 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
305 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
306 1, /* OMP_CLAUSE_INCLUSIVE */
307 1, /* OMP_CLAUSE_EXCLUSIVE */
308 2, /* OMP_CLAUSE__CACHE_ */
309 2, /* OMP_CLAUSE_GANG */
310 1, /* OMP_CLAUSE_ASYNC */
311 1, /* OMP_CLAUSE_WAIT */
312 0, /* OMP_CLAUSE_AUTO */
313 0, /* OMP_CLAUSE_SEQ */
314 1, /* OMP_CLAUSE__LOOPTEMP_ */
315 1, /* OMP_CLAUSE__REDUCTEMP_ */
316 1, /* OMP_CLAUSE__CONDTEMP_ */
317 1, /* OMP_CLAUSE__SCANTEMP_ */
318 1, /* OMP_CLAUSE_IF */
319 1, /* OMP_CLAUSE_NUM_THREADS */
320 1, /* OMP_CLAUSE_SCHEDULE */
321 0, /* OMP_CLAUSE_NOWAIT */
322 1, /* OMP_CLAUSE_ORDERED */
323 0, /* OMP_CLAUSE_DEFAULT */
324 3, /* OMP_CLAUSE_COLLAPSE */
325 0, /* OMP_CLAUSE_UNTIED */
326 1, /* OMP_CLAUSE_FINAL */
327 0, /* OMP_CLAUSE_MERGEABLE */
328 1, /* OMP_CLAUSE_DEVICE */
329 1, /* OMP_CLAUSE_DIST_SCHEDULE */
330 0, /* OMP_CLAUSE_INBRANCH */
331 0, /* OMP_CLAUSE_NOTINBRANCH */
332 1, /* OMP_CLAUSE_NUM_TEAMS */
333 1, /* OMP_CLAUSE_THREAD_LIMIT */
334 0, /* OMP_CLAUSE_PROC_BIND */
335 1, /* OMP_CLAUSE_SAFELEN */
336 1, /* OMP_CLAUSE_SIMDLEN */
337 0, /* OMP_CLAUSE_DEVICE_TYPE */
338 0, /* OMP_CLAUSE_FOR */
339 0, /* OMP_CLAUSE_PARALLEL */
340 0, /* OMP_CLAUSE_SECTIONS */
341 0, /* OMP_CLAUSE_TASKGROUP */
342 1, /* OMP_CLAUSE_PRIORITY */
343 1, /* OMP_CLAUSE_GRAINSIZE */
344 1, /* OMP_CLAUSE_NUM_TASKS */
345 0, /* OMP_CLAUSE_NOGROUP */
346 0, /* OMP_CLAUSE_THREADS */
347 0, /* OMP_CLAUSE_SIMD */
348 1, /* OMP_CLAUSE_HINT */
349 0, /* OMP_CLAUSE_DEFAULTMAP */
350 0, /* OMP_CLAUSE_ORDER */
351 0, /* OMP_CLAUSE_BIND */
352 1, /* OMP_CLAUSE__SIMDUID_ */
353 0, /* OMP_CLAUSE__SIMT_ */
354 0, /* OMP_CLAUSE_INDEPENDENT */
355 1, /* OMP_CLAUSE_WORKER */
356 1, /* OMP_CLAUSE_VECTOR */
357 1, /* OMP_CLAUSE_NUM_GANGS */
358 1, /* OMP_CLAUSE_NUM_WORKERS */
359 1, /* OMP_CLAUSE_VECTOR_LENGTH */
360 3, /* OMP_CLAUSE_TILE */
361 2, /* OMP_CLAUSE__GRIDDIM_ */
362 0, /* OMP_CLAUSE_IF_PRESENT */
363 0, /* OMP_CLAUSE_FINALIZE */
366 const char * const omp_clause_code_name
[] =
452 /* Return the tree node structure used by tree code CODE. */
454 static inline enum tree_node_structure_enum
455 tree_node_structure_for_code (enum tree_code code
)
457 switch (TREE_CODE_CLASS (code
))
459 case tcc_declaration
:
462 case CONST_DECL
: return TS_CONST_DECL
;
463 case DEBUG_EXPR_DECL
: return TS_DECL_WRTL
;
464 case FIELD_DECL
: return TS_FIELD_DECL
;
465 case FUNCTION_DECL
: return TS_FUNCTION_DECL
;
466 case LABEL_DECL
: return TS_LABEL_DECL
;
467 case PARM_DECL
: return TS_PARM_DECL
;
468 case RESULT_DECL
: return TS_RESULT_DECL
;
469 case TRANSLATION_UNIT_DECL
: return TS_TRANSLATION_UNIT_DECL
;
470 case TYPE_DECL
: return TS_TYPE_DECL
;
471 case VAR_DECL
: return TS_VAR_DECL
;
472 default: return TS_DECL_NON_COMMON
;
475 case tcc_type
: return TS_TYPE_NON_COMMON
;
483 case tcc_vl_exp
: return TS_EXP
;
485 default: /* tcc_constant and tcc_exceptional */
491 /* tcc_constant cases. */
492 case COMPLEX_CST
: return TS_COMPLEX
;
493 case FIXED_CST
: return TS_FIXED_CST
;
494 case INTEGER_CST
: return TS_INT_CST
;
495 case POLY_INT_CST
: return TS_POLY_INT_CST
;
496 case REAL_CST
: return TS_REAL_CST
;
497 case STRING_CST
: return TS_STRING
;
498 case VECTOR_CST
: return TS_VECTOR
;
499 case VOID_CST
: return TS_TYPED
;
501 /* tcc_exceptional cases. */
502 case BLOCK
: return TS_BLOCK
;
503 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
504 case ERROR_MARK
: return TS_COMMON
;
505 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
506 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
507 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
508 case PLACEHOLDER_EXPR
: return TS_COMMON
;
509 case SSA_NAME
: return TS_SSA_NAME
;
510 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
511 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
512 case TREE_BINFO
: return TS_BINFO
;
513 case TREE_LIST
: return TS_LIST
;
514 case TREE_VEC
: return TS_VEC
;
522 /* Initialize tree_contains_struct to describe the hierarchy of tree
526 initialize_tree_contains_struct (void)
530 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
533 enum tree_node_structure_enum ts_code
;
535 code
= (enum tree_code
) i
;
536 ts_code
= tree_node_structure_for_code (code
);
538 /* Mark the TS structure itself. */
539 tree_contains_struct
[code
][ts_code
] = 1;
541 /* Mark all the structures that TS is derived from. */
546 case TS_OPTIMIZATION
:
547 case TS_TARGET_OPTION
:
553 case TS_POLY_INT_CST
:
562 case TS_STATEMENT_LIST
:
563 MARK_TS_TYPED (code
);
567 case TS_DECL_MINIMAL
:
573 MARK_TS_COMMON (code
);
576 case TS_TYPE_WITH_LANG_SPECIFIC
:
577 MARK_TS_TYPE_COMMON (code
);
580 case TS_TYPE_NON_COMMON
:
581 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
585 MARK_TS_DECL_MINIMAL (code
);
590 MARK_TS_DECL_COMMON (code
);
593 case TS_DECL_NON_COMMON
:
594 MARK_TS_DECL_WITH_VIS (code
);
597 case TS_DECL_WITH_VIS
:
601 MARK_TS_DECL_WRTL (code
);
605 MARK_TS_DECL_COMMON (code
);
609 MARK_TS_DECL_WITH_VIS (code
);
613 case TS_FUNCTION_DECL
:
614 MARK_TS_DECL_NON_COMMON (code
);
617 case TS_TRANSLATION_UNIT_DECL
:
618 MARK_TS_DECL_COMMON (code
);
626 /* Basic consistency checks for attributes used in fold. */
627 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
628 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
629 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
630 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
631 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
632 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
633 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
634 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
635 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
636 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
637 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
638 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
639 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
640 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
641 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
642 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
643 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
644 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
645 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
646 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
647 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
648 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
649 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
650 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
651 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
652 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
653 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
654 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
655 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
656 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
657 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
658 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
659 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
660 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
661 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
662 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
663 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
664 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
665 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
666 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
675 /* Initialize the hash table of types. */
677 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
680 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
683 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
685 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
687 poly_int_cst_hash_table
= hash_table
<poly_int_cst_hasher
>::create_ggc (64);
689 int_cst_node
= make_int_cst (1, 1);
691 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
693 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
694 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
696 /* Initialize the tree_contains_struct array. */
697 initialize_tree_contains_struct ();
698 lang_hooks
.init_ts ();
702 /* The name of the object as the assembler will see it (but before any
703 translations made by ASM_OUTPUT_LABELREF). Often this is the same
704 as DECL_NAME. It is an IDENTIFIER_NODE. */
706 decl_assembler_name (tree decl
)
708 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
709 lang_hooks
.set_decl_assembler_name (decl
);
710 return DECL_ASSEMBLER_NAME_RAW (decl
);
713 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
714 (either of which may be NULL). Inform the FE, if this changes the
718 overwrite_decl_assembler_name (tree decl
, tree name
)
720 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
721 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
724 /* When the target supports COMDAT groups, this indicates which group the
725 DECL is associated with. This can be either an IDENTIFIER_NODE or a
726 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
728 decl_comdat_group (const_tree node
)
730 struct symtab_node
*snode
= symtab_node::get (node
);
733 return snode
->get_comdat_group ();
736 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
738 decl_comdat_group_id (const_tree node
)
740 struct symtab_node
*snode
= symtab_node::get (node
);
743 return snode
->get_comdat_group_id ();
746 /* When the target supports named section, return its name as IDENTIFIER_NODE
747 or NULL if it is in no section. */
749 decl_section_name (const_tree node
)
751 struct symtab_node
*snode
= symtab_node::get (node
);
754 return snode
->get_section ();
757 /* Set section name of NODE to VALUE (that is expected to be
760 set_decl_section_name (tree node
, const char *value
)
762 struct symtab_node
*snode
;
766 snode
= symtab_node::get (node
);
770 else if (VAR_P (node
))
771 snode
= varpool_node::get_create (node
);
773 snode
= cgraph_node::get_create (node
);
774 snode
->set_section (value
);
777 /* Return TLS model of a variable NODE. */
779 decl_tls_model (const_tree node
)
781 struct varpool_node
*snode
= varpool_node::get (node
);
783 return TLS_MODEL_NONE
;
784 return snode
->tls_model
;
787 /* Set TLS model of variable NODE to MODEL. */
789 set_decl_tls_model (tree node
, enum tls_model model
)
791 struct varpool_node
*vnode
;
793 if (model
== TLS_MODEL_NONE
)
795 vnode
= varpool_node::get (node
);
800 vnode
= varpool_node::get_create (node
);
801 vnode
->tls_model
= model
;
804 /* Compute the number of bytes occupied by a tree with code CODE.
805 This function cannot be used for nodes that have variable sizes,
806 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
808 tree_code_size (enum tree_code code
)
810 switch (TREE_CODE_CLASS (code
))
812 case tcc_declaration
: /* A decl node */
815 case FIELD_DECL
: return sizeof (tree_field_decl
);
816 case PARM_DECL
: return sizeof (tree_parm_decl
);
817 case VAR_DECL
: return sizeof (tree_var_decl
);
818 case LABEL_DECL
: return sizeof (tree_label_decl
);
819 case RESULT_DECL
: return sizeof (tree_result_decl
);
820 case CONST_DECL
: return sizeof (tree_const_decl
);
821 case TYPE_DECL
: return sizeof (tree_type_decl
);
822 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
823 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
824 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
827 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
829 gcc_checking_assert (code
>= NUM_TREE_CODES
);
830 return lang_hooks
.tree_size (code
);
833 case tcc_type
: /* a type node */
844 case FIXED_POINT_TYPE
:
850 case QUAL_UNION_TYPE
:
854 case LANG_TYPE
: return sizeof (tree_type_non_common
);
856 gcc_checking_assert (code
>= NUM_TREE_CODES
);
857 return lang_hooks
.tree_size (code
);
860 case tcc_reference
: /* a reference */
861 case tcc_expression
: /* an expression */
862 case tcc_statement
: /* an expression with side effects */
863 case tcc_comparison
: /* a comparison expression */
864 case tcc_unary
: /* a unary arithmetic expression */
865 case tcc_binary
: /* a binary arithmetic expression */
866 return (sizeof (struct tree_exp
)
867 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
869 case tcc_constant
: /* a constant */
872 case VOID_CST
: return sizeof (tree_typed
);
873 case INTEGER_CST
: gcc_unreachable ();
874 case POLY_INT_CST
: return sizeof (tree_poly_int_cst
);
875 case REAL_CST
: return sizeof (tree_real_cst
);
876 case FIXED_CST
: return sizeof (tree_fixed_cst
);
877 case COMPLEX_CST
: return sizeof (tree_complex
);
878 case VECTOR_CST
: gcc_unreachable ();
879 case STRING_CST
: gcc_unreachable ();
881 gcc_checking_assert (code
>= NUM_TREE_CODES
);
882 return lang_hooks
.tree_size (code
);
885 case tcc_exceptional
: /* something random, like an identifier. */
888 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
889 case TREE_LIST
: return sizeof (tree_list
);
892 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
894 case TREE_VEC
: gcc_unreachable ();
895 case OMP_CLAUSE
: gcc_unreachable ();
897 case SSA_NAME
: return sizeof (tree_ssa_name
);
899 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
900 case BLOCK
: return sizeof (struct tree_block
);
901 case CONSTRUCTOR
: return sizeof (tree_constructor
);
902 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
903 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
906 gcc_checking_assert (code
>= NUM_TREE_CODES
);
907 return lang_hooks
.tree_size (code
);
915 /* Compute the number of bytes occupied by NODE. This routine only
916 looks at TREE_CODE, except for those nodes that have variable sizes. */
918 tree_size (const_tree node
)
920 const enum tree_code code
= TREE_CODE (node
);
924 return (sizeof (struct tree_int_cst
)
925 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
928 return (offsetof (struct tree_binfo
, base_binfos
)
930 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
933 return (sizeof (struct tree_vec
)
934 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
937 return (sizeof (struct tree_vector
)
938 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
941 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
944 return (sizeof (struct tree_omp_clause
)
945 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
949 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
950 return (sizeof (struct tree_exp
)
951 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
953 return tree_code_size (code
);
957 /* Return tree node kind based on tree CODE. */
959 static tree_node_kind
960 get_stats_node_kind (enum tree_code code
)
962 enum tree_code_class type
= TREE_CODE_CLASS (code
);
966 case tcc_declaration
: /* A decl node */
968 case tcc_type
: /* a type node */
970 case tcc_statement
: /* an expression with side effects */
972 case tcc_reference
: /* a reference */
974 case tcc_expression
: /* an expression */
975 case tcc_comparison
: /* a comparison expression */
976 case tcc_unary
: /* a unary arithmetic expression */
977 case tcc_binary
: /* a binary arithmetic expression */
979 case tcc_constant
: /* a constant */
981 case tcc_exceptional
: /* something random, like an identifier. */
984 case IDENTIFIER_NODE
:
991 return ssa_name_kind
;
997 return omp_clause_kind
;
1009 /* Record interesting allocation statistics for a tree node with CODE
1013 record_node_allocation_statistics (enum tree_code code
, size_t length
)
1015 if (!GATHER_STATISTICS
)
1018 tree_node_kind kind
= get_stats_node_kind (code
);
1020 tree_code_counts
[(int) code
]++;
1021 tree_node_counts
[(int) kind
]++;
1022 tree_node_sizes
[(int) kind
] += length
;
1025 /* Allocate and return a new UID from the DECL_UID namespace. */
1028 allocate_decl_uid (void)
1030 return next_decl_uid
++;
1033 /* Return a newly allocated node of code CODE. For decl and type
1034 nodes, some other fields are initialized. The rest of the node is
1035 initialized to zero. This function cannot be used for TREE_VEC,
1036 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1039 Achoo! I got a code in the node. */
1042 make_node (enum tree_code code MEM_STAT_DECL
)
1045 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1046 size_t length
= tree_code_size (code
);
1048 record_node_allocation_statistics (code
, length
);
1050 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1051 TREE_SET_CODE (t
, code
);
1056 if (code
!= DEBUG_BEGIN_STMT
)
1057 TREE_SIDE_EFFECTS (t
) = 1;
1060 case tcc_declaration
:
1061 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1063 if (code
== FUNCTION_DECL
)
1065 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1066 SET_DECL_MODE (t
, FUNCTION_MODE
);
1069 SET_DECL_ALIGN (t
, 1);
1071 DECL_SOURCE_LOCATION (t
) = input_location
;
1072 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1073 DECL_UID (t
) = --next_debug_decl_uid
;
1076 DECL_UID (t
) = allocate_decl_uid ();
1077 SET_DECL_PT_UID (t
, -1);
1079 if (TREE_CODE (t
) == LABEL_DECL
)
1080 LABEL_DECL_UID (t
) = -1;
1085 TYPE_UID (t
) = next_type_uid
++;
1086 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1087 TYPE_USER_ALIGN (t
) = 0;
1088 TYPE_MAIN_VARIANT (t
) = t
;
1089 TYPE_CANONICAL (t
) = t
;
1091 /* Default to no attributes for type, but let target change that. */
1092 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1093 targetm
.set_default_type_attributes (t
);
1095 /* We have not yet computed the alias set for this type. */
1096 TYPE_ALIAS_SET (t
) = -1;
1100 TREE_CONSTANT (t
) = 1;
1103 case tcc_expression
:
1109 case PREDECREMENT_EXPR
:
1110 case PREINCREMENT_EXPR
:
1111 case POSTDECREMENT_EXPR
:
1112 case POSTINCREMENT_EXPR
:
1113 /* All of these have side-effects, no matter what their
1115 TREE_SIDE_EFFECTS (t
) = 1;
1123 case tcc_exceptional
:
1126 case TARGET_OPTION_NODE
:
1127 TREE_TARGET_OPTION(t
)
1128 = ggc_cleared_alloc
<struct cl_target_option
> ();
1131 case OPTIMIZATION_NODE
:
1132 TREE_OPTIMIZATION (t
)
1133 = ggc_cleared_alloc
<struct cl_optimization
> ();
1142 /* Other classes need no special treatment. */
1149 /* Free tree node. */
1152 free_node (tree node
)
1154 enum tree_code code
= TREE_CODE (node
);
1155 if (GATHER_STATISTICS
)
1157 enum tree_node_kind kind
= get_stats_node_kind (code
);
1159 gcc_checking_assert (tree_code_counts
[(int) TREE_CODE (node
)] != 0);
1160 gcc_checking_assert (tree_node_counts
[(int) kind
] != 0);
1161 gcc_checking_assert (tree_node_sizes
[(int) kind
] >= tree_size (node
));
1163 tree_code_counts
[(int) TREE_CODE (node
)]--;
1164 tree_node_counts
[(int) kind
]--;
1165 tree_node_sizes
[(int) kind
] -= tree_size (node
);
1167 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1168 vec_free (CONSTRUCTOR_ELTS (node
));
1169 else if (code
== BLOCK
)
1170 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1171 else if (code
== TREE_BINFO
)
1172 vec_free (BINFO_BASE_ACCESSES (node
));
1176 /* Return a new node with the same contents as NODE except that its
1177 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1180 copy_node (tree node MEM_STAT_DECL
)
1183 enum tree_code code
= TREE_CODE (node
);
1186 gcc_assert (code
!= STATEMENT_LIST
);
1188 length
= tree_size (node
);
1189 record_node_allocation_statistics (code
, length
);
1190 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1191 memcpy (t
, node
, length
);
1193 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1195 TREE_ASM_WRITTEN (t
) = 0;
1196 TREE_VISITED (t
) = 0;
1198 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1200 if (code
== DEBUG_EXPR_DECL
)
1201 DECL_UID (t
) = --next_debug_decl_uid
;
1204 DECL_UID (t
) = allocate_decl_uid ();
1205 if (DECL_PT_UID_SET_P (node
))
1206 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1208 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1209 && DECL_HAS_VALUE_EXPR_P (node
))
1211 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1212 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1214 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1217 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1218 t
->decl_with_vis
.symtab_node
= NULL
;
1220 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1222 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1223 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1225 if (TREE_CODE (node
) == FUNCTION_DECL
)
1227 DECL_STRUCT_FUNCTION (t
) = NULL
;
1228 t
->decl_with_vis
.symtab_node
= NULL
;
1231 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1233 TYPE_UID (t
) = next_type_uid
++;
1234 /* The following is so that the debug code for
1235 the copy is different from the original type.
1236 The two statements usually duplicate each other
1237 (because they clear fields of the same union),
1238 but the optimizer should catch that. */
1239 TYPE_SYMTAB_ADDRESS (t
) = 0;
1240 TYPE_SYMTAB_DIE (t
) = 0;
1242 /* Do not copy the values cache. */
1243 if (TYPE_CACHED_VALUES_P (t
))
1245 TYPE_CACHED_VALUES_P (t
) = 0;
1246 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1249 else if (code
== TARGET_OPTION_NODE
)
1251 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1252 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1253 sizeof (struct cl_target_option
));
1255 else if (code
== OPTIMIZATION_NODE
)
1257 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1258 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1259 sizeof (struct cl_optimization
));
1265 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1266 For example, this can copy a list made of TREE_LIST nodes. */
1269 copy_list (tree list
)
1277 head
= prev
= copy_node (list
);
1278 next
= TREE_CHAIN (list
);
1281 TREE_CHAIN (prev
) = copy_node (next
);
1282 prev
= TREE_CHAIN (prev
);
1283 next
= TREE_CHAIN (next
);
1289 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1290 INTEGER_CST with value CST and type TYPE. */
1293 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1295 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1296 /* We need extra HWIs if CST is an unsigned integer with its
1298 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1299 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1300 return cst
.get_len ();
1303 /* Return a new INTEGER_CST with value CST and type TYPE. */
1306 build_new_int_cst (tree type
, const wide_int
&cst
)
1308 unsigned int len
= cst
.get_len ();
1309 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1310 tree nt
= make_int_cst (len
, ext_len
);
1315 TREE_INT_CST_ELT (nt
, ext_len
)
1316 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1317 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1318 TREE_INT_CST_ELT (nt
, i
) = -1;
1320 else if (TYPE_UNSIGNED (type
)
1321 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1324 TREE_INT_CST_ELT (nt
, len
)
1325 = zext_hwi (cst
.elt (len
),
1326 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1329 for (unsigned int i
= 0; i
< len
; i
++)
1330 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1331 TREE_TYPE (nt
) = type
;
1335 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1338 build_new_poly_int_cst (tree type
, tree (&coeffs
)[NUM_POLY_INT_COEFFS
]
1341 size_t length
= sizeof (struct tree_poly_int_cst
);
1342 record_node_allocation_statistics (POLY_INT_CST
, length
);
1344 tree t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1346 TREE_SET_CODE (t
, POLY_INT_CST
);
1347 TREE_CONSTANT (t
) = 1;
1348 TREE_TYPE (t
) = type
;
1349 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1350 POLY_INT_CST_COEFF (t
, i
) = coeffs
[i
];
1354 /* Create a constant tree that contains CST sign-extended to TYPE. */
1357 build_int_cst (tree type
, poly_int64 cst
)
1359 /* Support legacy code. */
1361 type
= integer_type_node
;
1363 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1366 /* Create a constant tree that contains CST zero-extended to TYPE. */
1369 build_int_cstu (tree type
, poly_uint64 cst
)
1371 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1374 /* Create a constant tree that contains CST sign-extended to TYPE. */
1377 build_int_cst_type (tree type
, poly_int64 cst
)
1380 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1383 /* Constructs tree in type TYPE from with value given by CST. Signedness
1384 of CST is assumed to be the same as the signedness of TYPE. */
1387 double_int_to_tree (tree type
, double_int cst
)
1389 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1392 /* We force the wide_int CST to the range of the type TYPE by sign or
1393 zero extending it. OVERFLOWABLE indicates if we are interested in
1394 overflow of the value, when >0 we are only interested in signed
1395 overflow, for <0 we are interested in any overflow. OVERFLOWED
1396 indicates whether overflow has already occurred. CONST_OVERFLOWED
1397 indicates whether constant overflow has already occurred. We force
1398 T's value to be within range of T's type (by setting to 0 or 1 all
1399 the bits outside the type's range). We set TREE_OVERFLOWED if,
1400 OVERFLOWED is nonzero,
1401 or OVERFLOWABLE is >0 and signed overflow occurs
1402 or OVERFLOWABLE is <0 and any overflow occurs
1403 We return a new tree node for the extended wide_int. The node
1404 is shared if no overflow flags are set. */
1408 force_fit_type (tree type
, const poly_wide_int_ref
&cst
,
1409 int overflowable
, bool overflowed
)
1411 signop sign
= TYPE_SIGN (type
);
1413 /* If we need to set overflow flags, return a new unshared node. */
1414 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1418 || (overflowable
> 0 && sign
== SIGNED
))
1420 poly_wide_int tmp
= poly_wide_int::from (cst
, TYPE_PRECISION (type
),
1423 if (tmp
.is_constant ())
1424 t
= build_new_int_cst (type
, tmp
.coeffs
[0]);
1427 tree coeffs
[NUM_POLY_INT_COEFFS
];
1428 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1430 coeffs
[i
] = build_new_int_cst (type
, tmp
.coeffs
[i
]);
1431 TREE_OVERFLOW (coeffs
[i
]) = 1;
1433 t
= build_new_poly_int_cst (type
, coeffs
);
1435 TREE_OVERFLOW (t
) = 1;
1440 /* Else build a shared node. */
1441 return wide_int_to_tree (type
, cst
);
1444 /* These are the hash table functions for the hash table of INTEGER_CST
1445 nodes of a sizetype. */
1447 /* Return the hash code X, an INTEGER_CST. */
1450 int_cst_hasher::hash (tree x
)
1452 const_tree
const t
= x
;
1453 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1456 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1457 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1462 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1463 is the same as that given by *Y, which is the same. */
1466 int_cst_hasher::equal (tree x
, tree y
)
1468 const_tree
const xt
= x
;
1469 const_tree
const yt
= y
;
1471 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1472 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1473 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1476 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1477 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1483 /* Create an INT_CST node of TYPE and value CST.
1484 The returned node is always shared. For small integers we use a
1485 per-type vector cache, for larger ones we use a single hash table.
1486 The value is extended from its precision according to the sign of
1487 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1488 the upper bits and ensures that hashing and value equality based
1489 upon the underlying HOST_WIDE_INTs works without masking. */
1492 wide_int_to_tree_1 (tree type
, const wide_int_ref
&pcst
)
1499 unsigned int prec
= TYPE_PRECISION (type
);
1500 signop sgn
= TYPE_SIGN (type
);
1502 /* Verify that everything is canonical. */
1503 int l
= pcst
.get_len ();
1506 if (pcst
.elt (l
- 1) == 0)
1507 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1508 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1509 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1512 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1513 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1517 /* We just need to store a single HOST_WIDE_INT. */
1519 if (TYPE_UNSIGNED (type
))
1520 hwi
= cst
.to_uhwi ();
1522 hwi
= cst
.to_shwi ();
1524 switch (TREE_CODE (type
))
1527 gcc_assert (hwi
== 0);
1531 case REFERENCE_TYPE
:
1532 /* Cache NULL pointer and zero bounds. */
1541 /* Cache false or true. */
1543 if (IN_RANGE (hwi
, 0, 1))
1549 if (TYPE_SIGN (type
) == UNSIGNED
)
1552 limit
= INTEGER_SHARE_LIMIT
;
1553 if (IN_RANGE (hwi
, 0, INTEGER_SHARE_LIMIT
- 1))
1558 /* Cache [-1, N). */
1559 limit
= INTEGER_SHARE_LIMIT
+ 1;
1560 if (IN_RANGE (hwi
, -1, INTEGER_SHARE_LIMIT
- 1))
1574 /* Look for it in the type's vector of small shared ints. */
1575 if (!TYPE_CACHED_VALUES_P (type
))
1577 TYPE_CACHED_VALUES_P (type
) = 1;
1578 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1581 t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
);
1583 /* Make sure no one is clobbering the shared constant. */
1584 gcc_checking_assert (TREE_TYPE (t
) == type
1585 && TREE_INT_CST_NUNITS (t
) == 1
1586 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1587 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1588 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1591 /* Create a new shared int. */
1592 t
= build_new_int_cst (type
, cst
);
1593 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1598 /* Use the cache of larger shared ints, using int_cst_node as
1601 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1602 TREE_TYPE (int_cst_node
) = type
;
1604 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1608 /* Insert this one into the hash table. */
1611 /* Make a new node for next time round. */
1612 int_cst_node
= make_int_cst (1, 1);
1618 /* The value either hashes properly or we drop it on the floor
1619 for the gc to take care of. There will not be enough of them
1622 tree nt
= build_new_int_cst (type
, cst
);
1623 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1627 /* Insert this one into the hash table. */
1639 poly_int_cst_hasher::hash (tree t
)
1641 inchash::hash hstate
;
1643 hstate
.add_int (TYPE_UID (TREE_TYPE (t
)));
1644 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1645 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
1647 return hstate
.end ();
1651 poly_int_cst_hasher::equal (tree x
, const compare_type
&y
)
1653 if (TREE_TYPE (x
) != y
.first
)
1655 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1656 if (wi::to_wide (POLY_INT_CST_COEFF (x
, i
)) != y
.second
->coeffs
[i
])
1661 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1662 The elements must also have type TYPE. */
1665 build_poly_int_cst (tree type
, const poly_wide_int_ref
&values
)
1667 unsigned int prec
= TYPE_PRECISION (type
);
1668 gcc_assert (prec
<= values
.coeffs
[0].get_precision ());
1669 poly_wide_int c
= poly_wide_int::from (values
, prec
, SIGNED
);
1672 h
.add_int (TYPE_UID (type
));
1673 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1674 h
.add_wide_int (c
.coeffs
[i
]);
1675 poly_int_cst_hasher::compare_type
comp (type
, &c
);
1676 tree
*slot
= poly_int_cst_hash_table
->find_slot_with_hash (comp
, h
.end (),
1678 if (*slot
== NULL_TREE
)
1680 tree coeffs
[NUM_POLY_INT_COEFFS
];
1681 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1682 coeffs
[i
] = wide_int_to_tree_1 (type
, c
.coeffs
[i
]);
1683 *slot
= build_new_poly_int_cst (type
, coeffs
);
1688 /* Create a constant tree with value VALUE in type TYPE. */
1691 wide_int_to_tree (tree type
, const poly_wide_int_ref
&value
)
1693 if (value
.is_constant ())
1694 return wide_int_to_tree_1 (type
, value
.coeffs
[0]);
1695 return build_poly_int_cst (type
, value
);
1699 cache_integer_cst (tree t
)
1701 tree type
= TREE_TYPE (t
);
1704 int prec
= TYPE_PRECISION (type
);
1706 gcc_assert (!TREE_OVERFLOW (t
));
1708 switch (TREE_CODE (type
))
1711 gcc_assert (integer_zerop (t
));
1715 case REFERENCE_TYPE
:
1716 /* Cache NULL pointer. */
1717 if (integer_zerop (t
))
1725 /* Cache false or true. */
1727 if (wi::ltu_p (wi::to_wide (t
), 2))
1728 ix
= TREE_INT_CST_ELT (t
, 0);
1733 if (TYPE_UNSIGNED (type
))
1736 limit
= INTEGER_SHARE_LIMIT
;
1738 /* This is a little hokie, but if the prec is smaller than
1739 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1740 obvious test will not get the correct answer. */
1741 if (prec
< HOST_BITS_PER_WIDE_INT
)
1743 if (tree_to_uhwi (t
) < (unsigned HOST_WIDE_INT
) INTEGER_SHARE_LIMIT
)
1744 ix
= tree_to_uhwi (t
);
1746 else if (wi::ltu_p (wi::to_wide (t
), INTEGER_SHARE_LIMIT
))
1747 ix
= tree_to_uhwi (t
);
1752 limit
= INTEGER_SHARE_LIMIT
+ 1;
1754 if (integer_minus_onep (t
))
1756 else if (!wi::neg_p (wi::to_wide (t
)))
1758 if (prec
< HOST_BITS_PER_WIDE_INT
)
1760 if (tree_to_shwi (t
) < INTEGER_SHARE_LIMIT
)
1761 ix
= tree_to_shwi (t
) + 1;
1763 else if (wi::ltu_p (wi::to_wide (t
), INTEGER_SHARE_LIMIT
))
1764 ix
= tree_to_shwi (t
) + 1;
1778 /* Look for it in the type's vector of small shared ints. */
1779 if (!TYPE_CACHED_VALUES_P (type
))
1781 TYPE_CACHED_VALUES_P (type
) = 1;
1782 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1785 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) == NULL_TREE
);
1786 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1790 /* Use the cache of larger shared ints. */
1791 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1792 /* If there is already an entry for the number verify it's the
1795 gcc_assert (wi::to_wide (tree (*slot
)) == wi::to_wide (t
));
1797 /* Otherwise insert this one into the hash table. */
1803 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1804 and the rest are zeros. */
1807 build_low_bits_mask (tree type
, unsigned bits
)
1809 gcc_assert (bits
<= TYPE_PRECISION (type
));
1811 return wide_int_to_tree (type
, wi::mask (bits
, false,
1812 TYPE_PRECISION (type
)));
1815 /* Checks that X is integer constant that can be expressed in (unsigned)
1816 HOST_WIDE_INT without loss of precision. */
1819 cst_and_fits_in_hwi (const_tree x
)
1821 return (TREE_CODE (x
) == INTEGER_CST
1822 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
1825 /* Build a newly constructed VECTOR_CST with the given values of
1826 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1829 make_vector (unsigned log2_npatterns
,
1830 unsigned int nelts_per_pattern MEM_STAT_DECL
)
1832 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
1834 unsigned npatterns
= 1 << log2_npatterns
;
1835 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
1836 unsigned length
= (sizeof (struct tree_vector
)
1837 + (encoded_nelts
- 1) * sizeof (tree
));
1839 record_node_allocation_statistics (VECTOR_CST
, length
);
1841 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1843 TREE_SET_CODE (t
, VECTOR_CST
);
1844 TREE_CONSTANT (t
) = 1;
1845 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
1846 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
1851 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1852 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1855 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1857 unsigned HOST_WIDE_INT idx
, nelts
;
1860 /* We can't construct a VECTOR_CST for a variable number of elements. */
1861 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
1862 tree_vector_builder
vec (type
, nelts
, 1);
1863 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
1865 if (TREE_CODE (value
) == VECTOR_CST
)
1867 /* If NELTS is constant then this must be too. */
1868 unsigned int sub_nelts
= VECTOR_CST_NELTS (value
).to_constant ();
1869 for (unsigned i
= 0; i
< sub_nelts
; ++i
)
1870 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
1873 vec
.quick_push (value
);
1875 while (vec
.length () < nelts
)
1876 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
1878 return vec
.build ();
1881 /* Build a vector of type VECTYPE where all the elements are SCs. */
1883 build_vector_from_val (tree vectype
, tree sc
)
1885 unsigned HOST_WIDE_INT i
, nunits
;
1887 if (sc
== error_mark_node
)
1890 /* Verify that the vector type is suitable for SC. Note that there
1891 is some inconsistency in the type-system with respect to restrict
1892 qualifications of pointers. Vector types always have a main-variant
1893 element type and the qualification is applied to the vector-type.
1894 So TREE_TYPE (vector-type) does not return a properly qualified
1895 vector element-type. */
1896 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
1897 TREE_TYPE (vectype
)));
1899 if (CONSTANT_CLASS_P (sc
))
1901 tree_vector_builder
v (vectype
, 1, 1);
1905 else if (!TYPE_VECTOR_SUBPARTS (vectype
).is_constant (&nunits
))
1906 return fold_build1 (VEC_DUPLICATE_EXPR
, vectype
, sc
);
1909 vec
<constructor_elt
, va_gc
> *v
;
1910 vec_alloc (v
, nunits
);
1911 for (i
= 0; i
< nunits
; ++i
)
1912 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
1913 return build_constructor (vectype
, v
);
1917 /* If TYPE is not a vector type, just return SC, otherwise return
1918 build_vector_from_val (TYPE, SC). */
1921 build_uniform_cst (tree type
, tree sc
)
1923 if (!VECTOR_TYPE_P (type
))
1926 return build_vector_from_val (type
, sc
);
1929 /* Build a vector series of type TYPE in which element I has the value
1930 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1931 and a VEC_SERIES_EXPR otherwise. */
1934 build_vec_series (tree type
, tree base
, tree step
)
1936 if (integer_zerop (step
))
1937 return build_vector_from_val (type
, base
);
1938 if (TREE_CODE (base
) == INTEGER_CST
&& TREE_CODE (step
) == INTEGER_CST
)
1940 tree_vector_builder
builder (type
, 1, 3);
1941 tree elt1
= wide_int_to_tree (TREE_TYPE (base
),
1942 wi::to_wide (base
) + wi::to_wide (step
));
1943 tree elt2
= wide_int_to_tree (TREE_TYPE (base
),
1944 wi::to_wide (elt1
) + wi::to_wide (step
));
1945 builder
.quick_push (base
);
1946 builder
.quick_push (elt1
);
1947 builder
.quick_push (elt2
);
1948 return builder
.build ();
1950 return build2 (VEC_SERIES_EXPR
, type
, base
, step
);
1953 /* Return a vector with the same number of units and number of bits
1954 as VEC_TYPE, but in which the elements are a linear series of unsigned
1955 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1958 build_index_vector (tree vec_type
, poly_uint64 base
, poly_uint64 step
)
1960 tree index_vec_type
= vec_type
;
1961 tree index_elt_type
= TREE_TYPE (vec_type
);
1962 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vec_type
);
1963 if (!INTEGRAL_TYPE_P (index_elt_type
) || !TYPE_UNSIGNED (index_elt_type
))
1965 index_elt_type
= build_nonstandard_integer_type
1966 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type
)), true);
1967 index_vec_type
= build_vector_type (index_elt_type
, nunits
);
1970 tree_vector_builder
v (index_vec_type
, 1, 3);
1971 for (unsigned int i
= 0; i
< 3; ++i
)
1972 v
.quick_push (build_int_cstu (index_elt_type
, base
+ i
* step
));
1976 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1977 elements are A and the rest are B. */
1980 build_vector_a_then_b (tree vec_type
, unsigned int num_a
, tree a
, tree b
)
1982 gcc_assert (known_le (num_a
, TYPE_VECTOR_SUBPARTS (vec_type
)));
1983 unsigned int count
= constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type
));
1984 /* Optimize the constant case. */
1985 if ((count
& 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type
).is_constant ())
1987 tree_vector_builder
builder (vec_type
, count
, 2);
1988 for (unsigned int i
= 0; i
< count
* 2; ++i
)
1989 builder
.quick_push (i
< num_a
? a
: b
);
1990 return builder
.build ();
1993 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1994 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1997 recompute_constructor_flags (tree c
)
2001 bool constant_p
= true;
2002 bool side_effects_p
= false;
2003 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2005 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2007 /* Mostly ctors will have elts that don't have side-effects, so
2008 the usual case is to scan all the elements. Hence a single
2009 loop for both const and side effects, rather than one loop
2010 each (with early outs). */
2011 if (!TREE_CONSTANT (val
))
2013 if (TREE_SIDE_EFFECTS (val
))
2014 side_effects_p
= true;
2017 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
2018 TREE_CONSTANT (c
) = constant_p
;
2021 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2025 verify_constructor_flags (tree c
)
2029 bool constant_p
= TREE_CONSTANT (c
);
2030 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
2031 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2033 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2035 if (constant_p
&& !TREE_CONSTANT (val
))
2036 internal_error ("non-constant element in constant CONSTRUCTOR");
2037 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
2038 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2042 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2043 are in the vec pointed to by VALS. */
2045 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals MEM_STAT_DECL
)
2047 tree c
= make_node (CONSTRUCTOR PASS_MEM_STAT
);
2049 TREE_TYPE (c
) = type
;
2050 CONSTRUCTOR_ELTS (c
) = vals
;
2052 recompute_constructor_flags (c
);
2057 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2060 build_constructor_single (tree type
, tree index
, tree value
)
2062 vec
<constructor_elt
, va_gc
> *v
;
2063 constructor_elt elt
= {index
, value
};
2066 v
->quick_push (elt
);
2068 return build_constructor (type
, v
);
2072 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2073 are in a list pointed to by VALS. */
2075 build_constructor_from_list (tree type
, tree vals
)
2078 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2082 vec_alloc (v
, list_length (vals
));
2083 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
2084 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
2087 return build_constructor (type
, v
);
2090 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2091 of elements, provided as index/value pairs. */
2094 build_constructor_va (tree type
, int nelts
, ...)
2096 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2099 va_start (p
, nelts
);
2100 vec_alloc (v
, nelts
);
2103 tree index
= va_arg (p
, tree
);
2104 tree value
= va_arg (p
, tree
);
2105 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
2108 return build_constructor (type
, v
);
2111 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2114 build_clobber (tree type
)
2116 tree clobber
= build_constructor (type
, NULL
);
2117 TREE_THIS_VOLATILE (clobber
) = true;
2121 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2124 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
2127 FIXED_VALUE_TYPE
*fp
;
2129 v
= make_node (FIXED_CST
);
2130 fp
= ggc_alloc
<fixed_value
> ();
2131 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
2133 TREE_TYPE (v
) = type
;
2134 TREE_FIXED_CST_PTR (v
) = fp
;
2138 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2141 build_real (tree type
, REAL_VALUE_TYPE d
)
2144 REAL_VALUE_TYPE
*dp
;
2147 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2148 Consider doing it via real_convert now. */
2150 v
= make_node (REAL_CST
);
2151 dp
= ggc_alloc
<real_value
> ();
2152 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
2154 TREE_TYPE (v
) = type
;
2155 TREE_REAL_CST_PTR (v
) = dp
;
2156 TREE_OVERFLOW (v
) = overflow
;
2160 /* Like build_real, but first truncate D to the type. */
2163 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
2165 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
2168 /* Return a new REAL_CST node whose type is TYPE
2169 and whose value is the integer value of the INTEGER_CST node I. */
2172 real_value_from_int_cst (const_tree type
, const_tree i
)
2176 /* Clear all bits of the real value type so that we can later do
2177 bitwise comparisons to see if two values are the same. */
2178 memset (&d
, 0, sizeof d
);
2180 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
2181 TYPE_SIGN (TREE_TYPE (i
)));
2185 /* Given a tree representing an integer constant I, return a tree
2186 representing the same value as a floating-point constant of type TYPE. */
2189 build_real_from_int_cst (tree type
, const_tree i
)
2192 int overflow
= TREE_OVERFLOW (i
);
2194 v
= build_real (type
, real_value_from_int_cst (type
, i
));
2196 TREE_OVERFLOW (v
) |= overflow
;
2200 /* Return a newly constructed STRING_CST node whose value is
2201 the LEN characters at STR.
2202 Note that for a C string literal, LEN should include the trailing NUL.
2203 The TREE_TYPE is not initialized. */
2206 build_string (int len
, const char *str
)
2211 /* Do not waste bytes provided by padding of struct tree_string. */
2212 length
= len
+ offsetof (struct tree_string
, str
) + 1;
2214 record_node_allocation_statistics (STRING_CST
, length
);
2216 s
= (tree
) ggc_internal_alloc (length
);
2218 memset (s
, 0, sizeof (struct tree_typed
));
2219 TREE_SET_CODE (s
, STRING_CST
);
2220 TREE_CONSTANT (s
) = 1;
2221 TREE_STRING_LENGTH (s
) = len
;
2222 memcpy (s
->string
.str
, str
, len
);
2223 s
->string
.str
[len
] = '\0';
2228 /* Return a newly constructed COMPLEX_CST node whose value is
2229 specified by the real and imaginary parts REAL and IMAG.
2230 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2231 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2234 build_complex (tree type
, tree real
, tree imag
)
2236 gcc_assert (CONSTANT_CLASS_P (real
));
2237 gcc_assert (CONSTANT_CLASS_P (imag
));
2239 tree t
= make_node (COMPLEX_CST
);
2241 TREE_REALPART (t
) = real
;
2242 TREE_IMAGPART (t
) = imag
;
2243 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2244 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2248 /* Build a complex (inf +- 0i), such as for the result of cproj.
2249 TYPE is the complex tree type of the result. If NEG is true, the
2250 imaginary zero is negative. */
2253 build_complex_inf (tree type
, bool neg
)
2255 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
2259 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
2260 build_real (TREE_TYPE (type
), rzero
));
2263 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2264 element is set to 1. In particular, this is 1 + i for complex types. */
2267 build_each_one_cst (tree type
)
2269 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2271 tree scalar
= build_one_cst (TREE_TYPE (type
));
2272 return build_complex (type
, scalar
, scalar
);
2275 return build_one_cst (type
);
2278 /* Return a constant of arithmetic type TYPE which is the
2279 multiplicative identity of the set TYPE. */
2282 build_one_cst (tree type
)
2284 switch (TREE_CODE (type
))
2286 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2287 case POINTER_TYPE
: case REFERENCE_TYPE
:
2289 return build_int_cst (type
, 1);
2292 return build_real (type
, dconst1
);
2294 case FIXED_POINT_TYPE
:
2295 /* We can only generate 1 for accum types. */
2296 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2297 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2301 tree scalar
= build_one_cst (TREE_TYPE (type
));
2303 return build_vector_from_val (type
, scalar
);
2307 return build_complex (type
,
2308 build_one_cst (TREE_TYPE (type
)),
2309 build_zero_cst (TREE_TYPE (type
)));
2316 /* Return an integer of type TYPE containing all 1's in as much precision as
2317 it contains, or a complex or vector whose subparts are such integers. */
2320 build_all_ones_cst (tree type
)
2322 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2324 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2325 return build_complex (type
, scalar
, scalar
);
2328 return build_minus_one_cst (type
);
2331 /* Return a constant of arithmetic type TYPE which is the
2332 opposite of the multiplicative identity of the set TYPE. */
2335 build_minus_one_cst (tree type
)
2337 switch (TREE_CODE (type
))
2339 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2340 case POINTER_TYPE
: case REFERENCE_TYPE
:
2342 return build_int_cst (type
, -1);
2345 return build_real (type
, dconstm1
);
2347 case FIXED_POINT_TYPE
:
2348 /* We can only generate 1 for accum types. */
2349 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2350 return build_fixed (type
,
2351 fixed_from_double_int (double_int_minus_one
,
2352 SCALAR_TYPE_MODE (type
)));
2356 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2358 return build_vector_from_val (type
, scalar
);
2362 return build_complex (type
,
2363 build_minus_one_cst (TREE_TYPE (type
)),
2364 build_zero_cst (TREE_TYPE (type
)));
2371 /* Build 0 constant of type TYPE. This is used by constructor folding
2372 and thus the constant should be represented in memory by
2376 build_zero_cst (tree type
)
2378 switch (TREE_CODE (type
))
2380 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2381 case POINTER_TYPE
: case REFERENCE_TYPE
:
2382 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2383 return build_int_cst (type
, 0);
2386 return build_real (type
, dconst0
);
2388 case FIXED_POINT_TYPE
:
2389 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2393 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2395 return build_vector_from_val (type
, scalar
);
2400 tree zero
= build_zero_cst (TREE_TYPE (type
));
2402 return build_complex (type
, zero
, zero
);
2406 if (!AGGREGATE_TYPE_P (type
))
2407 return fold_convert (type
, integer_zero_node
);
2408 return build_constructor (type
, NULL
);
2413 /* Build a BINFO with LEN language slots. */
2416 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2419 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2420 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2422 record_node_allocation_statistics (TREE_BINFO
, length
);
2424 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2426 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2428 TREE_SET_CODE (t
, TREE_BINFO
);
2430 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2435 /* Create a CASE_LABEL_EXPR tree node and return it. */
2438 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2440 tree t
= make_node (CASE_LABEL_EXPR
);
2442 TREE_TYPE (t
) = void_type_node
;
2443 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2445 CASE_LOW (t
) = low_value
;
2446 CASE_HIGH (t
) = high_value
;
2447 CASE_LABEL (t
) = label_decl
;
2448 CASE_CHAIN (t
) = NULL_TREE
;
2453 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2454 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2455 The latter determines the length of the HOST_WIDE_INT vector. */
2458 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2461 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2462 + sizeof (struct tree_int_cst
));
2465 record_node_allocation_statistics (INTEGER_CST
, length
);
2467 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2469 TREE_SET_CODE (t
, INTEGER_CST
);
2470 TREE_INT_CST_NUNITS (t
) = len
;
2471 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2472 /* to_offset can only be applied to trees that are offset_int-sized
2473 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2474 must be exactly the precision of offset_int and so LEN is correct. */
2475 if (ext_len
<= OFFSET_INT_ELTS
)
2476 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2478 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2480 TREE_CONSTANT (t
) = 1;
2485 /* Build a newly constructed TREE_VEC node of length LEN. */
2488 make_tree_vec (int len MEM_STAT_DECL
)
2491 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2493 record_node_allocation_statistics (TREE_VEC
, length
);
2495 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2497 TREE_SET_CODE (t
, TREE_VEC
);
2498 TREE_VEC_LENGTH (t
) = len
;
2503 /* Grow a TREE_VEC node to new length LEN. */
2506 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2508 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2510 int oldlen
= TREE_VEC_LENGTH (v
);
2511 gcc_assert (len
> oldlen
);
2513 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2514 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2516 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2518 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2520 TREE_VEC_LENGTH (v
) = len
;
2525 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2526 fixed, and scalar, complex or vector. */
2529 zerop (const_tree expr
)
2531 return (integer_zerop (expr
)
2532 || real_zerop (expr
)
2533 || fixed_zerop (expr
));
2536 /* Return 1 if EXPR is the integer constant zero or a complex constant
2537 of zero, or a location wrapper for such a constant. */
2540 integer_zerop (const_tree expr
)
2542 STRIP_ANY_LOCATION_WRAPPER (expr
);
2544 switch (TREE_CODE (expr
))
2547 return wi::to_wide (expr
) == 0;
2549 return (integer_zerop (TREE_REALPART (expr
))
2550 && integer_zerop (TREE_IMAGPART (expr
)));
2552 return (VECTOR_CST_NPATTERNS (expr
) == 1
2553 && VECTOR_CST_DUPLICATE_P (expr
)
2554 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2560 /* Return 1 if EXPR is the integer constant one or the corresponding
2561 complex constant, or a location wrapper for such a constant. */
2564 integer_onep (const_tree expr
)
2566 STRIP_ANY_LOCATION_WRAPPER (expr
);
2568 switch (TREE_CODE (expr
))
2571 return wi::eq_p (wi::to_widest (expr
), 1);
2573 return (integer_onep (TREE_REALPART (expr
))
2574 && integer_zerop (TREE_IMAGPART (expr
)));
2576 return (VECTOR_CST_NPATTERNS (expr
) == 1
2577 && VECTOR_CST_DUPLICATE_P (expr
)
2578 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2584 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2585 return 1 if every piece is the integer constant one.
2586 Also return 1 for location wrappers for such a constant. */
2589 integer_each_onep (const_tree expr
)
2591 STRIP_ANY_LOCATION_WRAPPER (expr
);
2593 if (TREE_CODE (expr
) == COMPLEX_CST
)
2594 return (integer_onep (TREE_REALPART (expr
))
2595 && integer_onep (TREE_IMAGPART (expr
)));
2597 return integer_onep (expr
);
2600 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2601 it contains, or a complex or vector whose subparts are such integers,
2602 or a location wrapper for such a constant. */
2605 integer_all_onesp (const_tree expr
)
2607 STRIP_ANY_LOCATION_WRAPPER (expr
);
2609 if (TREE_CODE (expr
) == COMPLEX_CST
2610 && integer_all_onesp (TREE_REALPART (expr
))
2611 && integer_all_onesp (TREE_IMAGPART (expr
)))
2614 else if (TREE_CODE (expr
) == VECTOR_CST
)
2615 return (VECTOR_CST_NPATTERNS (expr
) == 1
2616 && VECTOR_CST_DUPLICATE_P (expr
)
2617 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2619 else if (TREE_CODE (expr
) != INTEGER_CST
)
2622 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2623 == wi::to_wide (expr
));
2626 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2627 for such a constant. */
2630 integer_minus_onep (const_tree expr
)
2632 STRIP_ANY_LOCATION_WRAPPER (expr
);
2634 if (TREE_CODE (expr
) == COMPLEX_CST
)
2635 return (integer_all_onesp (TREE_REALPART (expr
))
2636 && integer_zerop (TREE_IMAGPART (expr
)));
2638 return integer_all_onesp (expr
);
2641 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2642 one bit on), or a location wrapper for such a constant. */
2645 integer_pow2p (const_tree expr
)
2647 STRIP_ANY_LOCATION_WRAPPER (expr
);
2649 if (TREE_CODE (expr
) == COMPLEX_CST
2650 && integer_pow2p (TREE_REALPART (expr
))
2651 && integer_zerop (TREE_IMAGPART (expr
)))
2654 if (TREE_CODE (expr
) != INTEGER_CST
)
2657 return wi::popcount (wi::to_wide (expr
)) == 1;
2660 /* Return 1 if EXPR is an integer constant other than zero or a
2661 complex constant other than zero, or a location wrapper for such a
2665 integer_nonzerop (const_tree expr
)
2667 STRIP_ANY_LOCATION_WRAPPER (expr
);
2669 return ((TREE_CODE (expr
) == INTEGER_CST
2670 && wi::to_wide (expr
) != 0)
2671 || (TREE_CODE (expr
) == COMPLEX_CST
2672 && (integer_nonzerop (TREE_REALPART (expr
))
2673 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2676 /* Return 1 if EXPR is the integer constant one. For vector,
2677 return 1 if every piece is the integer constant minus one
2678 (representing the value TRUE).
2679 Also return 1 for location wrappers for such a constant. */
2682 integer_truep (const_tree expr
)
2684 STRIP_ANY_LOCATION_WRAPPER (expr
);
2686 if (TREE_CODE (expr
) == VECTOR_CST
)
2687 return integer_all_onesp (expr
);
2688 return integer_onep (expr
);
2691 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2692 for such a constant. */
2695 fixed_zerop (const_tree expr
)
2697 STRIP_ANY_LOCATION_WRAPPER (expr
);
2699 return (TREE_CODE (expr
) == FIXED_CST
2700 && TREE_FIXED_CST (expr
).data
.is_zero ());
2703 /* Return the power of two represented by a tree node known to be a
2707 tree_log2 (const_tree expr
)
2709 if (TREE_CODE (expr
) == COMPLEX_CST
)
2710 return tree_log2 (TREE_REALPART (expr
));
2712 return wi::exact_log2 (wi::to_wide (expr
));
2715 /* Similar, but return the largest integer Y such that 2 ** Y is less
2716 than or equal to EXPR. */
2719 tree_floor_log2 (const_tree expr
)
2721 if (TREE_CODE (expr
) == COMPLEX_CST
)
2722 return tree_log2 (TREE_REALPART (expr
));
2724 return wi::floor_log2 (wi::to_wide (expr
));
2727 /* Return number of known trailing zero bits in EXPR, or, if the value of
2728 EXPR is known to be zero, the precision of it's type. */
2731 tree_ctz (const_tree expr
)
2733 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2734 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2737 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2738 switch (TREE_CODE (expr
))
2741 ret1
= wi::ctz (wi::to_wide (expr
));
2742 return MIN (ret1
, prec
);
2744 ret1
= wi::ctz (get_nonzero_bits (expr
));
2745 return MIN (ret1
, prec
);
2752 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2755 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2756 return MIN (ret1
, ret2
);
2757 case POINTER_PLUS_EXPR
:
2758 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2759 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2760 /* Second operand is sizetype, which could be in theory
2761 wider than pointer's precision. Make sure we never
2762 return more than prec. */
2763 ret2
= MIN (ret2
, prec
);
2764 return MIN (ret1
, ret2
);
2766 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2767 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2768 return MAX (ret1
, ret2
);
2770 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2771 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2772 return MIN (ret1
+ ret2
, prec
);
2774 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2775 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2776 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2778 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2779 return MIN (ret1
+ ret2
, prec
);
2783 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2784 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2786 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2787 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2792 case TRUNC_DIV_EXPR
:
2794 case FLOOR_DIV_EXPR
:
2795 case ROUND_DIV_EXPR
:
2796 case EXACT_DIV_EXPR
:
2797 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
2798 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
2800 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
2803 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2811 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2812 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
2814 return MIN (ret1
, prec
);
2816 return tree_ctz (TREE_OPERAND (expr
, 0));
2818 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
2821 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
2822 return MIN (ret1
, ret2
);
2824 return tree_ctz (TREE_OPERAND (expr
, 1));
2826 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
2827 if (ret1
> BITS_PER_UNIT
)
2829 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
2830 return MIN (ret1
, prec
);
2838 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2839 decimal float constants, so don't return 1 for them.
2840 Also return 1 for location wrappers around such a constant. */
2843 real_zerop (const_tree expr
)
2845 STRIP_ANY_LOCATION_WRAPPER (expr
);
2847 switch (TREE_CODE (expr
))
2850 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
2851 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2853 return real_zerop (TREE_REALPART (expr
))
2854 && real_zerop (TREE_IMAGPART (expr
));
2857 /* Don't simply check for a duplicate because the predicate
2858 accepts both +0.0 and -0.0. */
2859 unsigned count
= vector_cst_encoded_nelts (expr
);
2860 for (unsigned int i
= 0; i
< count
; ++i
)
2861 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
2870 /* Return 1 if EXPR is the real constant one in real or complex form.
2871 Trailing zeroes matter for decimal float constants, so don't return
2873 Also return 1 for location wrappers around such a constant. */
2876 real_onep (const_tree expr
)
2878 STRIP_ANY_LOCATION_WRAPPER (expr
);
2880 switch (TREE_CODE (expr
))
2883 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
2884 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2886 return real_onep (TREE_REALPART (expr
))
2887 && real_zerop (TREE_IMAGPART (expr
));
2889 return (VECTOR_CST_NPATTERNS (expr
) == 1
2890 && VECTOR_CST_DUPLICATE_P (expr
)
2891 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2897 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2898 matter for decimal float constants, so don't return 1 for them.
2899 Also return 1 for location wrappers around such a constant. */
2902 real_minus_onep (const_tree expr
)
2904 STRIP_ANY_LOCATION_WRAPPER (expr
);
2906 switch (TREE_CODE (expr
))
2909 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
2910 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2912 return real_minus_onep (TREE_REALPART (expr
))
2913 && real_zerop (TREE_IMAGPART (expr
));
2915 return (VECTOR_CST_NPATTERNS (expr
) == 1
2916 && VECTOR_CST_DUPLICATE_P (expr
)
2917 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2923 /* Nonzero if EXP is a constant or a cast of a constant. */
2926 really_constant_p (const_tree exp
)
2928 /* This is not quite the same as STRIP_NOPS. It does more. */
2929 while (CONVERT_EXPR_P (exp
)
2930 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2931 exp
= TREE_OPERAND (exp
, 0);
2932 return TREE_CONSTANT (exp
);
2935 /* Return true if T holds a polynomial pointer difference, storing it in
2936 *VALUE if so. A true return means that T's precision is no greater
2937 than 64 bits, which is the largest address space we support, so *VALUE
2938 never loses precision. However, the signedness of the result does
2939 not necessarily match the signedness of T: sometimes an unsigned type
2940 like sizetype is used to encode a value that is actually negative. */
2943 ptrdiff_tree_p (const_tree t
, poly_int64_pod
*value
)
2947 if (TREE_CODE (t
) == INTEGER_CST
)
2949 if (!cst_and_fits_in_hwi (t
))
2951 *value
= int_cst_value (t
);
2954 if (POLY_INT_CST_P (t
))
2956 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
2957 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t
, i
)))
2959 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
2960 value
->coeffs
[i
] = int_cst_value (POLY_INT_CST_COEFF (t
, i
));
2967 tree_to_poly_int64 (const_tree t
)
2969 gcc_assert (tree_fits_poly_int64_p (t
));
2970 if (POLY_INT_CST_P (t
))
2971 return poly_int_cst_value (t
).force_shwi ();
2972 return TREE_INT_CST_LOW (t
);
2976 tree_to_poly_uint64 (const_tree t
)
2978 gcc_assert (tree_fits_poly_uint64_p (t
));
2979 if (POLY_INT_CST_P (t
))
2980 return poly_int_cst_value (t
).force_uhwi ();
2981 return TREE_INT_CST_LOW (t
);
2984 /* Return first list element whose TREE_VALUE is ELEM.
2985 Return 0 if ELEM is not in LIST. */
2988 value_member (tree elem
, tree list
)
2992 if (elem
== TREE_VALUE (list
))
2994 list
= TREE_CHAIN (list
);
2999 /* Return first list element whose TREE_PURPOSE is ELEM.
3000 Return 0 if ELEM is not in LIST. */
3003 purpose_member (const_tree elem
, tree list
)
3007 if (elem
== TREE_PURPOSE (list
))
3009 list
= TREE_CHAIN (list
);
3014 /* Return true if ELEM is in V. */
3017 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
3021 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
3027 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3031 chain_index (int idx
, tree chain
)
3033 for (; chain
&& idx
> 0; --idx
)
3034 chain
= TREE_CHAIN (chain
);
3038 /* Return nonzero if ELEM is part of the chain CHAIN. */
3041 chain_member (const_tree elem
, const_tree chain
)
3047 chain
= DECL_CHAIN (chain
);
3053 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3054 We expect a null pointer to mark the end of the chain.
3055 This is the Lisp primitive `length'. */
3058 list_length (const_tree t
)
3061 #ifdef ENABLE_TREE_CHECKING
3069 #ifdef ENABLE_TREE_CHECKING
3072 gcc_assert (p
!= q
);
3080 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3081 UNION_TYPE TYPE, or NULL_TREE if none. */
3084 first_field (const_tree type
)
3086 tree t
= TYPE_FIELDS (type
);
3087 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
3092 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3093 by modifying the last node in chain 1 to point to chain 2.
3094 This is the Lisp primitive `nconc'. */
3097 chainon (tree op1
, tree op2
)
3106 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
3108 TREE_CHAIN (t1
) = op2
;
3110 #ifdef ENABLE_TREE_CHECKING
3113 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
3114 gcc_assert (t2
!= t1
);
3121 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3124 tree_last (tree chain
)
3128 while ((next
= TREE_CHAIN (chain
)))
3133 /* Reverse the order of elements in the chain T,
3134 and return the new head of the chain (old last element). */
3139 tree prev
= 0, decl
, next
;
3140 for (decl
= t
; decl
; decl
= next
)
3142 /* We shouldn't be using this function to reverse BLOCK chains; we
3143 have blocks_nreverse for that. */
3144 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
3145 next
= TREE_CHAIN (decl
);
3146 TREE_CHAIN (decl
) = prev
;
3152 /* Return a newly created TREE_LIST node whose
3153 purpose and value fields are PARM and VALUE. */
3156 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
3158 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
3159 TREE_PURPOSE (t
) = parm
;
3160 TREE_VALUE (t
) = value
;
3164 /* Build a chain of TREE_LIST nodes from a vector. */
3167 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
3169 tree ret
= NULL_TREE
;
3173 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
3175 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
3176 pp
= &TREE_CHAIN (*pp
);
3181 /* Return a newly created TREE_LIST node whose
3182 purpose and value fields are PURPOSE and VALUE
3183 and whose TREE_CHAIN is CHAIN. */
3186 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
3190 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
3191 memset (node
, 0, sizeof (struct tree_common
));
3193 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
3195 TREE_SET_CODE (node
, TREE_LIST
);
3196 TREE_CHAIN (node
) = chain
;
3197 TREE_PURPOSE (node
) = purpose
;
3198 TREE_VALUE (node
) = value
;
3202 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3206 ctor_to_vec (tree ctor
)
3208 vec
<tree
, va_gc
> *vec
;
3209 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
3213 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
3214 vec
->quick_push (val
);
3219 /* Return the size nominally occupied by an object of type TYPE
3220 when it resides in memory. The value is measured in units of bytes,
3221 and its data type is that normally used for type sizes
3222 (which is the first type created by make_signed_type or
3223 make_unsigned_type). */
3226 size_in_bytes_loc (location_t loc
, const_tree type
)
3230 if (type
== error_mark_node
)
3231 return integer_zero_node
;
3233 type
= TYPE_MAIN_VARIANT (type
);
3234 t
= TYPE_SIZE_UNIT (type
);
3238 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
3239 return size_zero_node
;
3245 /* Return the size of TYPE (in bytes) as a wide integer
3246 or return -1 if the size can vary or is larger than an integer. */
3249 int_size_in_bytes (const_tree type
)
3253 if (type
== error_mark_node
)
3256 type
= TYPE_MAIN_VARIANT (type
);
3257 t
= TYPE_SIZE_UNIT (type
);
3259 if (t
&& tree_fits_uhwi_p (t
))
3260 return TREE_INT_CST_LOW (t
);
3265 /* Return the maximum size of TYPE (in bytes) as a wide integer
3266 or return -1 if the size can vary or is larger than an integer. */
3269 max_int_size_in_bytes (const_tree type
)
3271 HOST_WIDE_INT size
= -1;
3274 /* If this is an array type, check for a possible MAX_SIZE attached. */
3276 if (TREE_CODE (type
) == ARRAY_TYPE
)
3278 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3280 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3281 size
= tree_to_uhwi (size_tree
);
3284 /* If we still haven't been able to get a size, see if the language
3285 can compute a maximum size. */
3289 size_tree
= lang_hooks
.types
.max_size (type
);
3291 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3292 size
= tree_to_uhwi (size_tree
);
3298 /* Return the bit position of FIELD, in bits from the start of the record.
3299 This is a tree of type bitsizetype. */
3302 bit_position (const_tree field
)
3304 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3305 DECL_FIELD_BIT_OFFSET (field
));
3308 /* Return the byte position of FIELD, in bytes from the start of the record.
3309 This is a tree of type sizetype. */
3312 byte_position (const_tree field
)
3314 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3315 DECL_FIELD_BIT_OFFSET (field
));
3318 /* Likewise, but return as an integer. It must be representable in
3319 that way (since it could be a signed value, we don't have the
3320 option of returning -1 like int_size_in_byte can. */
3323 int_byte_position (const_tree field
)
3325 return tree_to_shwi (byte_position (field
));
3328 /* Return the strictest alignment, in bits, that T is known to have. */
3331 expr_align (const_tree t
)
3333 unsigned int align0
, align1
;
3335 switch (TREE_CODE (t
))
3337 CASE_CONVERT
: case NON_LVALUE_EXPR
:
3338 /* If we have conversions, we know that the alignment of the
3339 object must meet each of the alignments of the types. */
3340 align0
= expr_align (TREE_OPERAND (t
, 0));
3341 align1
= TYPE_ALIGN (TREE_TYPE (t
));
3342 return MAX (align0
, align1
);
3344 case SAVE_EXPR
: case COMPOUND_EXPR
: case MODIFY_EXPR
:
3345 case INIT_EXPR
: case TARGET_EXPR
: case WITH_CLEANUP_EXPR
:
3346 case CLEANUP_POINT_EXPR
:
3347 /* These don't change the alignment of an object. */
3348 return expr_align (TREE_OPERAND (t
, 0));
3351 /* The best we can do is say that the alignment is the least aligned
3353 align0
= expr_align (TREE_OPERAND (t
, 1));
3354 align1
= expr_align (TREE_OPERAND (t
, 2));
3355 return MIN (align0
, align1
);
3357 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3358 meaningfully, it's always 1. */
3359 case LABEL_DECL
: case CONST_DECL
:
3360 case VAR_DECL
: case PARM_DECL
: case RESULT_DECL
:
3362 gcc_assert (DECL_ALIGN (t
) != 0);
3363 return DECL_ALIGN (t
);
3369 /* Otherwise take the alignment from that of the type. */
3370 return TYPE_ALIGN (TREE_TYPE (t
));
3373 /* Return, as a tree node, the number of elements for TYPE (which is an
3374 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3377 array_type_nelts (const_tree type
)
3379 tree index_type
, min
, max
;
3381 /* If they did it with unspecified bounds, then we should have already
3382 given an error about it before we got here. */
3383 if (! TYPE_DOMAIN (type
))
3384 return error_mark_node
;
3386 index_type
= TYPE_DOMAIN (type
);
3387 min
= TYPE_MIN_VALUE (index_type
);
3388 max
= TYPE_MAX_VALUE (index_type
);
3390 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3392 return error_mark_node
;
3394 return (integer_zerop (min
)
3396 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3399 /* If arg is static -- a reference to an object in static storage -- then
3400 return the object. This is not the same as the C meaning of `static'.
3401 If arg isn't static, return NULL. */
3406 switch (TREE_CODE (arg
))
3409 /* Nested functions are static, even though taking their address will
3410 involve a trampoline as we unnest the nested function and create
3411 the trampoline on the tree level. */
3415 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3416 && ! DECL_THREAD_LOCAL_P (arg
)
3417 && ! DECL_DLLIMPORT_P (arg
)
3421 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3425 return TREE_STATIC (arg
) ? arg
: NULL
;
3432 /* If the thing being referenced is not a field, then it is
3433 something language specific. */
3434 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3436 /* If we are referencing a bitfield, we can't evaluate an
3437 ADDR_EXPR at compile time and so it isn't a constant. */
3438 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3441 return staticp (TREE_OPERAND (arg
, 0));
3447 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3450 case ARRAY_RANGE_REF
:
3451 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3452 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3453 return staticp (TREE_OPERAND (arg
, 0));
3457 case COMPOUND_LITERAL_EXPR
:
3458 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3468 /* Return whether OP is a DECL whose address is function-invariant. */
3471 decl_address_invariant_p (const_tree op
)
3473 /* The conditions below are slightly less strict than the one in
3476 switch (TREE_CODE (op
))
3485 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3486 || DECL_THREAD_LOCAL_P (op
)
3487 || DECL_CONTEXT (op
) == current_function_decl
3488 || decl_function_context (op
) == current_function_decl
)
3493 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3494 || decl_function_context (op
) == current_function_decl
)
3505 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3508 decl_address_ip_invariant_p (const_tree op
)
3510 /* The conditions below are slightly less strict than the one in
3513 switch (TREE_CODE (op
))
3521 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3522 && !DECL_DLLIMPORT_P (op
))
3523 || DECL_THREAD_LOCAL_P (op
))
3528 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3540 /* Return true if T is function-invariant (internal function, does
3541 not handle arithmetic; that's handled in skip_simple_arithmetic and
3542 tree_invariant_p). */
3545 tree_invariant_p_1 (tree t
)
3549 if (TREE_CONSTANT (t
)
3550 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3553 switch (TREE_CODE (t
))
3559 op
= TREE_OPERAND (t
, 0);
3560 while (handled_component_p (op
))
3562 switch (TREE_CODE (op
))
3565 case ARRAY_RANGE_REF
:
3566 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3567 || TREE_OPERAND (op
, 2) != NULL_TREE
3568 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3573 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3579 op
= TREE_OPERAND (op
, 0);
3582 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3591 /* Return true if T is function-invariant. */
3594 tree_invariant_p (tree t
)
3596 tree inner
= skip_simple_arithmetic (t
);
3597 return tree_invariant_p_1 (inner
);
3600 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3601 Do this to any expression which may be used in more than one place,
3602 but must be evaluated only once.
3604 Normally, expand_expr would reevaluate the expression each time.
3605 Calling save_expr produces something that is evaluated and recorded
3606 the first time expand_expr is called on it. Subsequent calls to
3607 expand_expr just reuse the recorded value.
3609 The call to expand_expr that generates code that actually computes
3610 the value is the first call *at compile time*. Subsequent calls
3611 *at compile time* generate code to use the saved value.
3612 This produces correct result provided that *at run time* control
3613 always flows through the insns made by the first expand_expr
3614 before reaching the other places where the save_expr was evaluated.
3615 You, the caller of save_expr, must make sure this is so.
3617 Constants, and certain read-only nodes, are returned with no
3618 SAVE_EXPR because that is safe. Expressions containing placeholders
3619 are not touched; see tree.def for an explanation of what these
3623 save_expr (tree expr
)
3627 /* If the tree evaluates to a constant, then we don't want to hide that
3628 fact (i.e. this allows further folding, and direct checks for constants).
3629 However, a read-only object that has side effects cannot be bypassed.
3630 Since it is no problem to reevaluate literals, we just return the
3632 inner
= skip_simple_arithmetic (expr
);
3633 if (TREE_CODE (inner
) == ERROR_MARK
)
3636 if (tree_invariant_p_1 (inner
))
3639 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3640 it means that the size or offset of some field of an object depends on
3641 the value within another field.
3643 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3644 and some variable since it would then need to be both evaluated once and
3645 evaluated more than once. Front-ends must assure this case cannot
3646 happen by surrounding any such subexpressions in their own SAVE_EXPR
3647 and forcing evaluation at the proper time. */
3648 if (contains_placeholder_p (inner
))
3651 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3653 /* This expression might be placed ahead of a jump to ensure that the
3654 value was computed on both sides of the jump. So make sure it isn't
3655 eliminated as dead. */
3656 TREE_SIDE_EFFECTS (expr
) = 1;
3660 /* Look inside EXPR into any simple arithmetic operations. Return the
3661 outermost non-arithmetic or non-invariant node. */
3664 skip_simple_arithmetic (tree expr
)
3666 /* We don't care about whether this can be used as an lvalue in this
3668 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3669 expr
= TREE_OPERAND (expr
, 0);
3671 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3672 a constant, it will be more efficient to not make another SAVE_EXPR since
3673 it will allow better simplification and GCSE will be able to merge the
3674 computations if they actually occur. */
3677 if (UNARY_CLASS_P (expr
))
3678 expr
= TREE_OPERAND (expr
, 0);
3679 else if (BINARY_CLASS_P (expr
))
3681 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3682 expr
= TREE_OPERAND (expr
, 0);
3683 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3684 expr
= TREE_OPERAND (expr
, 1);
3695 /* Look inside EXPR into simple arithmetic operations involving constants.
3696 Return the outermost non-arithmetic or non-constant node. */
3699 skip_simple_constant_arithmetic (tree expr
)
3701 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3702 expr
= TREE_OPERAND (expr
, 0);
3706 if (UNARY_CLASS_P (expr
))
3707 expr
= TREE_OPERAND (expr
, 0);
3708 else if (BINARY_CLASS_P (expr
))
3710 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3711 expr
= TREE_OPERAND (expr
, 0);
3712 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3713 expr
= TREE_OPERAND (expr
, 1);
3724 /* Return which tree structure is used by T. */
3726 enum tree_node_structure_enum
3727 tree_node_structure (const_tree t
)
3729 const enum tree_code code
= TREE_CODE (t
);
3730 return tree_node_structure_for_code (code
);
3733 /* Set various status flags when building a CALL_EXPR object T. */
3736 process_call_operands (tree t
)
3738 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3739 bool read_only
= false;
3740 int i
= call_expr_flags (t
);
3742 /* Calls have side-effects, except those to const or pure functions. */
3743 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3744 side_effects
= true;
3745 /* Propagate TREE_READONLY of arguments for const functions. */
3749 if (!side_effects
|| read_only
)
3750 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3752 tree op
= TREE_OPERAND (t
, i
);
3753 if (op
&& TREE_SIDE_EFFECTS (op
))
3754 side_effects
= true;
3755 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3759 TREE_SIDE_EFFECTS (t
) = side_effects
;
3760 TREE_READONLY (t
) = read_only
;
3763 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3764 size or offset that depends on a field within a record. */
3767 contains_placeholder_p (const_tree exp
)
3769 enum tree_code code
;
3774 code
= TREE_CODE (exp
);
3775 if (code
== PLACEHOLDER_EXPR
)
3778 switch (TREE_CODE_CLASS (code
))
3781 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3782 position computations since they will be converted into a
3783 WITH_RECORD_EXPR involving the reference, which will assume
3784 here will be valid. */
3785 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3787 case tcc_exceptional
:
3788 if (code
== TREE_LIST
)
3789 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
3790 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
3795 case tcc_comparison
:
3796 case tcc_expression
:
3800 /* Ignoring the first operand isn't quite right, but works best. */
3801 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
3804 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3805 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
3806 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
3809 /* The save_expr function never wraps anything containing
3810 a PLACEHOLDER_EXPR. */
3817 switch (TREE_CODE_LENGTH (code
))
3820 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3822 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3823 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
3834 const_call_expr_arg_iterator iter
;
3835 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
3836 if (CONTAINS_PLACEHOLDER_P (arg
))
3850 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3851 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3855 type_contains_placeholder_1 (const_tree type
)
3857 /* If the size contains a placeholder or the parent type (component type in
3858 the case of arrays) type involves a placeholder, this type does. */
3859 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
3860 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
3861 || (!POINTER_TYPE_P (type
)
3863 && type_contains_placeholder_p (TREE_TYPE (type
))))
3866 /* Now do type-specific checks. Note that the last part of the check above
3867 greatly limits what we have to do below. */
3868 switch (TREE_CODE (type
))
3876 case REFERENCE_TYPE
:
3885 case FIXED_POINT_TYPE
:
3886 /* Here we just check the bounds. */
3887 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
3888 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
3891 /* We have already checked the component type above, so just check
3892 the domain type. Flexible array members have a null domain. */
3893 return TYPE_DOMAIN (type
) ?
3894 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
3898 case QUAL_UNION_TYPE
:
3902 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3903 if (TREE_CODE (field
) == FIELD_DECL
3904 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
3905 || (TREE_CODE (type
) == QUAL_UNION_TYPE
3906 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
3907 || type_contains_placeholder_p (TREE_TYPE (field
))))
3918 /* Wrapper around above function used to cache its result. */
3921 type_contains_placeholder_p (tree type
)
3925 /* If the contains_placeholder_bits field has been initialized,
3926 then we know the answer. */
3927 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
3928 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
3930 /* Indicate that we've seen this type node, and the answer is false.
3931 This is what we want to return if we run into recursion via fields. */
3932 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
3934 /* Compute the real value. */
3935 result
= type_contains_placeholder_1 (type
);
3937 /* Store the real value. */
3938 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
3943 /* Push tree EXP onto vector QUEUE if it is not already present. */
3946 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
3951 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
3952 if (simple_cst_equal (iter
, exp
) == 1)
3956 queue
->safe_push (exp
);
3959 /* Given a tree EXP, find all occurrences of references to fields
3960 in a PLACEHOLDER_EXPR and place them in vector REFS without
3961 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3962 we assume here that EXP contains only arithmetic expressions
3963 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3967 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
3969 enum tree_code code
= TREE_CODE (exp
);
3973 /* We handle TREE_LIST and COMPONENT_REF separately. */
3974 if (code
== TREE_LIST
)
3976 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
3977 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
3979 else if (code
== COMPONENT_REF
)
3981 for (inner
= TREE_OPERAND (exp
, 0);
3982 REFERENCE_CLASS_P (inner
);
3983 inner
= TREE_OPERAND (inner
, 0))
3986 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3987 push_without_duplicates (exp
, refs
);
3989 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
3992 switch (TREE_CODE_CLASS (code
))
3997 case tcc_declaration
:
3998 /* Variables allocated to static storage can stay. */
3999 if (!TREE_STATIC (exp
))
4000 push_without_duplicates (exp
, refs
);
4003 case tcc_expression
:
4004 /* This is the pattern built in ada/make_aligning_type. */
4005 if (code
== ADDR_EXPR
4006 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
4008 push_without_duplicates (exp
, refs
);
4014 case tcc_exceptional
:
4017 case tcc_comparison
:
4019 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
4020 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4024 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4025 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4033 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4034 return a tree with all occurrences of references to F in a
4035 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4036 CONST_DECLs. Note that we assume here that EXP contains only
4037 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4038 occurring only in their argument list. */
4041 substitute_in_expr (tree exp
, tree f
, tree r
)
4043 enum tree_code code
= TREE_CODE (exp
);
4044 tree op0
, op1
, op2
, op3
;
4047 /* We handle TREE_LIST and COMPONENT_REF separately. */
4048 if (code
== TREE_LIST
)
4050 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
4051 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
4052 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4055 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4057 else if (code
== COMPONENT_REF
)
4061 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4062 and it is the right field, replace it with R. */
4063 for (inner
= TREE_OPERAND (exp
, 0);
4064 REFERENCE_CLASS_P (inner
);
4065 inner
= TREE_OPERAND (inner
, 0))
4069 op1
= TREE_OPERAND (exp
, 1);
4071 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
4074 /* If this expression hasn't been completed let, leave it alone. */
4075 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
4078 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4079 if (op0
== TREE_OPERAND (exp
, 0))
4083 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
4086 switch (TREE_CODE_CLASS (code
))
4091 case tcc_declaration
:
4097 case tcc_expression
:
4103 case tcc_exceptional
:
4106 case tcc_comparison
:
4108 switch (TREE_CODE_LENGTH (code
))
4114 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4115 if (op0
== TREE_OPERAND (exp
, 0))
4118 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4122 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4123 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4125 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4128 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4132 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4133 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4134 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4136 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4137 && op2
== TREE_OPERAND (exp
, 2))
4140 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4144 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4145 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4146 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4147 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
4149 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4150 && op2
== TREE_OPERAND (exp
, 2)
4151 && op3
== TREE_OPERAND (exp
, 3))
4155 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4167 new_tree
= NULL_TREE
;
4169 /* If we are trying to replace F with a constant or with another
4170 instance of one of the arguments of the call, inline back
4171 functions which do nothing else than computing a value from
4172 the arguments they are passed. This makes it possible to
4173 fold partially or entirely the replacement expression. */
4174 if (code
== CALL_EXPR
)
4176 bool maybe_inline
= false;
4177 if (CONSTANT_CLASS_P (r
))
4178 maybe_inline
= true;
4180 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4181 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
4183 maybe_inline
= true;
4188 tree t
= maybe_inline_call_in_expr (exp
);
4190 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
4194 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4196 tree op
= TREE_OPERAND (exp
, i
);
4197 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
4201 new_tree
= copy_node (exp
);
4202 TREE_OPERAND (new_tree
, i
) = new_op
;
4208 new_tree
= fold (new_tree
);
4209 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4210 process_call_operands (new_tree
);
4221 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4223 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4224 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4229 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4230 for it within OBJ, a tree that is an object or a chain of references. */
4233 substitute_placeholder_in_expr (tree exp
, tree obj
)
4235 enum tree_code code
= TREE_CODE (exp
);
4236 tree op0
, op1
, op2
, op3
;
4239 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4240 in the chain of OBJ. */
4241 if (code
== PLACEHOLDER_EXPR
)
4243 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
4246 for (elt
= obj
; elt
!= 0;
4247 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4248 || TREE_CODE (elt
) == COND_EXPR
)
4249 ? TREE_OPERAND (elt
, 1)
4250 : (REFERENCE_CLASS_P (elt
)
4251 || UNARY_CLASS_P (elt
)
4252 || BINARY_CLASS_P (elt
)
4253 || VL_EXP_CLASS_P (elt
)
4254 || EXPRESSION_CLASS_P (elt
))
4255 ? TREE_OPERAND (elt
, 0) : 0))
4256 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
4259 for (elt
= obj
; elt
!= 0;
4260 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4261 || TREE_CODE (elt
) == COND_EXPR
)
4262 ? TREE_OPERAND (elt
, 1)
4263 : (REFERENCE_CLASS_P (elt
)
4264 || UNARY_CLASS_P (elt
)
4265 || BINARY_CLASS_P (elt
)
4266 || VL_EXP_CLASS_P (elt
)
4267 || EXPRESSION_CLASS_P (elt
))
4268 ? TREE_OPERAND (elt
, 0) : 0))
4269 if (POINTER_TYPE_P (TREE_TYPE (elt
))
4270 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
4272 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
4274 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4275 survives until RTL generation, there will be an error. */
4279 /* TREE_LIST is special because we need to look at TREE_VALUE
4280 and TREE_CHAIN, not TREE_OPERANDS. */
4281 else if (code
== TREE_LIST
)
4283 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
4284 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
4285 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4288 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4291 switch (TREE_CODE_CLASS (code
))
4294 case tcc_declaration
:
4297 case tcc_exceptional
:
4300 case tcc_comparison
:
4301 case tcc_expression
:
4304 switch (TREE_CODE_LENGTH (code
))
4310 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4311 if (op0
== TREE_OPERAND (exp
, 0))
4314 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4318 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4319 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4321 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4324 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4328 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4329 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4330 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4332 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4333 && op2
== TREE_OPERAND (exp
, 2))
4336 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4340 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4341 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4342 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4343 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4345 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4346 && op2
== TREE_OPERAND (exp
, 2)
4347 && op3
== TREE_OPERAND (exp
, 3))
4351 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4363 new_tree
= NULL_TREE
;
4365 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4367 tree op
= TREE_OPERAND (exp
, i
);
4368 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4372 new_tree
= copy_node (exp
);
4373 TREE_OPERAND (new_tree
, i
) = new_op
;
4379 new_tree
= fold (new_tree
);
4380 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4381 process_call_operands (new_tree
);
4392 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4394 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4395 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4401 /* Subroutine of stabilize_reference; this is called for subtrees of
4402 references. Any expression with side-effects must be put in a SAVE_EXPR
4403 to ensure that it is only evaluated once.
4405 We don't put SAVE_EXPR nodes around everything, because assigning very
4406 simple expressions to temporaries causes us to miss good opportunities
4407 for optimizations. Among other things, the opportunity to fold in the
4408 addition of a constant into an addressing mode often gets lost, e.g.
4409 "y[i+1] += x;". In general, we take the approach that we should not make
4410 an assignment unless we are forced into it - i.e., that any non-side effect
4411 operator should be allowed, and that cse should take care of coalescing
4412 multiple utterances of the same expression should that prove fruitful. */
4415 stabilize_reference_1 (tree e
)
4418 enum tree_code code
= TREE_CODE (e
);
4420 /* We cannot ignore const expressions because it might be a reference
4421 to a const array but whose index contains side-effects. But we can
4422 ignore things that are actual constant or that already have been
4423 handled by this function. */
4425 if (tree_invariant_p (e
))
4428 switch (TREE_CODE_CLASS (code
))
4430 case tcc_exceptional
:
4431 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4432 have side-effects. */
4433 if (code
== STATEMENT_LIST
)
4434 return save_expr (e
);
4437 case tcc_declaration
:
4438 case tcc_comparison
:
4440 case tcc_expression
:
4443 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4444 so that it will only be evaluated once. */
4445 /* The reference (r) and comparison (<) classes could be handled as
4446 below, but it is generally faster to only evaluate them once. */
4447 if (TREE_SIDE_EFFECTS (e
))
4448 return save_expr (e
);
4452 /* Constants need no processing. In fact, we should never reach
4457 /* Division is slow and tends to be compiled with jumps,
4458 especially the division by powers of 2 that is often
4459 found inside of an array reference. So do it just once. */
4460 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4461 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4462 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4463 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4464 return save_expr (e
);
4465 /* Recursively stabilize each operand. */
4466 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4467 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4471 /* Recursively stabilize each operand. */
4472 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4479 TREE_TYPE (result
) = TREE_TYPE (e
);
4480 TREE_READONLY (result
) = TREE_READONLY (e
);
4481 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4482 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4487 /* Stabilize a reference so that we can use it any number of times
4488 without causing its operands to be evaluated more than once.
4489 Returns the stabilized reference. This works by means of save_expr,
4490 so see the caveats in the comments about save_expr.
4492 Also allows conversion expressions whose operands are references.
4493 Any other kind of expression is returned unchanged. */
4496 stabilize_reference (tree ref
)
4499 enum tree_code code
= TREE_CODE (ref
);
4506 /* No action is needed in this case. */
4511 case FIX_TRUNC_EXPR
:
4512 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4516 result
= build_nt (INDIRECT_REF
,
4517 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4521 result
= build_nt (COMPONENT_REF
,
4522 stabilize_reference (TREE_OPERAND (ref
, 0)),
4523 TREE_OPERAND (ref
, 1), NULL_TREE
);
4527 result
= build_nt (BIT_FIELD_REF
,
4528 stabilize_reference (TREE_OPERAND (ref
, 0)),
4529 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4530 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4534 result
= build_nt (ARRAY_REF
,
4535 stabilize_reference (TREE_OPERAND (ref
, 0)),
4536 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4537 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4540 case ARRAY_RANGE_REF
:
4541 result
= build_nt (ARRAY_RANGE_REF
,
4542 stabilize_reference (TREE_OPERAND (ref
, 0)),
4543 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4544 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4548 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4549 it wouldn't be ignored. This matters when dealing with
4551 return stabilize_reference_1 (ref
);
4553 /* If arg isn't a kind of lvalue we recognize, make no change.
4554 Caller should recognize the error for an invalid lvalue. */
4559 return error_mark_node
;
4562 TREE_TYPE (result
) = TREE_TYPE (ref
);
4563 TREE_READONLY (result
) = TREE_READONLY (ref
);
4564 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4565 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4570 /* Low-level constructors for expressions. */
4572 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4573 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4576 recompute_tree_invariant_for_addr_expr (tree t
)
4579 bool tc
= true, se
= false;
4581 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4583 /* We started out assuming this address is both invariant and constant, but
4584 does not have side effects. Now go down any handled components and see if
4585 any of them involve offsets that are either non-constant or non-invariant.
4586 Also check for side-effects.
4588 ??? Note that this code makes no attempt to deal with the case where
4589 taking the address of something causes a copy due to misalignment. */
4591 #define UPDATE_FLAGS(NODE) \
4592 do { tree _node = (NODE); \
4593 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4594 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4596 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4597 node
= TREE_OPERAND (node
, 0))
4599 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4600 array reference (probably made temporarily by the G++ front end),
4601 so ignore all the operands. */
4602 if ((TREE_CODE (node
) == ARRAY_REF
4603 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4604 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4606 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4607 if (TREE_OPERAND (node
, 2))
4608 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4609 if (TREE_OPERAND (node
, 3))
4610 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4612 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4613 FIELD_DECL, apparently. The G++ front end can put something else
4614 there, at least temporarily. */
4615 else if (TREE_CODE (node
) == COMPONENT_REF
4616 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4618 if (TREE_OPERAND (node
, 2))
4619 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4623 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4625 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4626 the address, since &(*a)->b is a form of addition. If it's a constant, the
4627 address is constant too. If it's a decl, its address is constant if the
4628 decl is static. Everything else is not constant and, furthermore,
4629 taking the address of a volatile variable is not volatile. */
4630 if (TREE_CODE (node
) == INDIRECT_REF
4631 || TREE_CODE (node
) == MEM_REF
)
4632 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4633 else if (CONSTANT_CLASS_P (node
))
4635 else if (DECL_P (node
))
4636 tc
&= (staticp (node
) != NULL_TREE
);
4640 se
|= TREE_SIDE_EFFECTS (node
);
4644 TREE_CONSTANT (t
) = tc
;
4645 TREE_SIDE_EFFECTS (t
) = se
;
4649 /* Build an expression of code CODE, data type TYPE, and operands as
4650 specified. Expressions and reference nodes can be created this way.
4651 Constants, decls, types and misc nodes cannot be.
4653 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4654 enough for all extant tree codes. */
4657 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4661 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4663 t
= make_node (code PASS_MEM_STAT
);
4670 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4672 int length
= sizeof (struct tree_exp
);
4675 record_node_allocation_statistics (code
, length
);
4677 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4679 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4681 memset (t
, 0, sizeof (struct tree_common
));
4683 TREE_SET_CODE (t
, code
);
4685 TREE_TYPE (t
) = type
;
4686 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4687 TREE_OPERAND (t
, 0) = node
;
4688 if (node
&& !TYPE_P (node
))
4690 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4691 TREE_READONLY (t
) = TREE_READONLY (node
);
4694 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4696 if (code
!= DEBUG_BEGIN_STMT
)
4697 TREE_SIDE_EFFECTS (t
) = 1;
4702 /* All of these have side-effects, no matter what their
4704 TREE_SIDE_EFFECTS (t
) = 1;
4705 TREE_READONLY (t
) = 0;
4709 /* Whether a dereference is readonly has nothing to do with whether
4710 its operand is readonly. */
4711 TREE_READONLY (t
) = 0;
4716 recompute_tree_invariant_for_addr_expr (t
);
4720 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4721 && node
&& !TYPE_P (node
)
4722 && TREE_CONSTANT (node
))
4723 TREE_CONSTANT (t
) = 1;
4724 if (TREE_CODE_CLASS (code
) == tcc_reference
4725 && node
&& TREE_THIS_VOLATILE (node
))
4726 TREE_THIS_VOLATILE (t
) = 1;
4733 #define PROCESS_ARG(N) \
4735 TREE_OPERAND (t, N) = arg##N; \
4736 if (arg##N &&!TYPE_P (arg##N)) \
4738 if (TREE_SIDE_EFFECTS (arg##N)) \
4740 if (!TREE_READONLY (arg##N) \
4741 && !CONSTANT_CLASS_P (arg##N)) \
4742 (void) (read_only = 0); \
4743 if (!TREE_CONSTANT (arg##N)) \
4744 (void) (constant = 0); \
4749 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4751 bool constant
, read_only
, side_effects
, div_by_zero
;
4754 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4756 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4757 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4758 /* When sizetype precision doesn't match that of pointers
4759 we need to be able to build explicit extensions or truncations
4760 of the offset argument. */
4761 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4762 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4763 && TREE_CODE (arg1
) == INTEGER_CST
);
4765 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4766 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4767 && ptrofftype_p (TREE_TYPE (arg1
)));
4769 t
= make_node (code PASS_MEM_STAT
);
4772 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4773 result based on those same flags for the arguments. But if the
4774 arguments aren't really even `tree' expressions, we shouldn't be trying
4777 /* Expressions without side effects may be constant if their
4778 arguments are as well. */
4779 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4780 || TREE_CODE_CLASS (code
) == tcc_binary
);
4782 side_effects
= TREE_SIDE_EFFECTS (t
);
4786 case TRUNC_DIV_EXPR
:
4788 case FLOOR_DIV_EXPR
:
4789 case ROUND_DIV_EXPR
:
4790 case EXACT_DIV_EXPR
:
4792 case FLOOR_MOD_EXPR
:
4793 case ROUND_MOD_EXPR
:
4794 case TRUNC_MOD_EXPR
:
4795 div_by_zero
= integer_zerop (arg1
);
4798 div_by_zero
= false;
4804 TREE_SIDE_EFFECTS (t
) = side_effects
;
4805 if (code
== MEM_REF
)
4807 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
4809 tree o
= TREE_OPERAND (arg0
, 0);
4810 TREE_READONLY (t
) = TREE_READONLY (o
);
4811 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
4816 TREE_READONLY (t
) = read_only
;
4817 /* Don't mark X / 0 as constant. */
4818 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
4819 TREE_THIS_VOLATILE (t
)
4820 = (TREE_CODE_CLASS (code
) == tcc_reference
4821 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4829 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4830 tree arg2 MEM_STAT_DECL
)
4832 bool constant
, read_only
, side_effects
;
4835 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
4836 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4838 t
= make_node (code PASS_MEM_STAT
);
4843 /* As a special exception, if COND_EXPR has NULL branches, we
4844 assume that it is a gimple statement and always consider
4845 it to have side effects. */
4846 if (code
== COND_EXPR
4847 && tt
== void_type_node
4848 && arg1
== NULL_TREE
4849 && arg2
== NULL_TREE
)
4850 side_effects
= true;
4852 side_effects
= TREE_SIDE_EFFECTS (t
);
4858 if (code
== COND_EXPR
)
4859 TREE_READONLY (t
) = read_only
;
4861 TREE_SIDE_EFFECTS (t
) = side_effects
;
4862 TREE_THIS_VOLATILE (t
)
4863 = (TREE_CODE_CLASS (code
) == tcc_reference
4864 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4870 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4871 tree arg2
, tree arg3 MEM_STAT_DECL
)
4873 bool constant
, read_only
, side_effects
;
4876 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
4878 t
= make_node (code PASS_MEM_STAT
);
4881 side_effects
= TREE_SIDE_EFFECTS (t
);
4888 TREE_SIDE_EFFECTS (t
) = side_effects
;
4889 TREE_THIS_VOLATILE (t
)
4890 = (TREE_CODE_CLASS (code
) == tcc_reference
4891 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4897 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4898 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
4900 bool constant
, read_only
, side_effects
;
4903 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
4905 t
= make_node (code PASS_MEM_STAT
);
4908 side_effects
= TREE_SIDE_EFFECTS (t
);
4916 TREE_SIDE_EFFECTS (t
) = side_effects
;
4917 if (code
== TARGET_MEM_REF
)
4919 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
4921 tree o
= TREE_OPERAND (arg0
, 0);
4922 TREE_READONLY (t
) = TREE_READONLY (o
);
4923 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
4927 TREE_THIS_VOLATILE (t
)
4928 = (TREE_CODE_CLASS (code
) == tcc_reference
4929 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4934 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4935 on the pointer PTR. */
4938 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
4940 poly_int64 offset
= 0;
4941 tree ptype
= TREE_TYPE (ptr
);
4943 /* For convenience allow addresses that collapse to a simple base
4945 if (TREE_CODE (ptr
) == ADDR_EXPR
4946 && (handled_component_p (TREE_OPERAND (ptr
, 0))
4947 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
4949 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
4951 if (TREE_CODE (ptr
) == MEM_REF
)
4953 offset
+= mem_ref_offset (ptr
).force_shwi ();
4954 ptr
= TREE_OPERAND (ptr
, 0);
4957 ptr
= build_fold_addr_expr (ptr
);
4958 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
4960 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
4961 ptr
, build_int_cst (ptype
, offset
));
4962 SET_EXPR_LOCATION (tem
, loc
);
4966 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4969 mem_ref_offset (const_tree t
)
4971 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t
, 1)),
4975 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4976 offsetted by OFFSET units. */
4979 build_invariant_address (tree type
, tree base
, poly_int64 offset
)
4981 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
4982 build_fold_addr_expr (base
),
4983 build_int_cst (ptr_type_node
, offset
));
4984 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
4985 recompute_tree_invariant_for_addr_expr (addr
);
4989 /* Similar except don't specify the TREE_TYPE
4990 and leave the TREE_SIDE_EFFECTS as 0.
4991 It is permissible for arguments to be null,
4992 or even garbage if their values do not matter. */
4995 build_nt (enum tree_code code
, ...)
5002 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5006 t
= make_node (code
);
5007 length
= TREE_CODE_LENGTH (code
);
5009 for (i
= 0; i
< length
; i
++)
5010 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
5016 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5020 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
5025 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
5026 CALL_EXPR_FN (ret
) = fn
;
5027 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
5028 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
5029 CALL_EXPR_ARG (ret
, ix
) = t
;
5033 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5035 We do NOT enter this node in any sort of symbol table.
5037 LOC is the location of the decl.
5039 layout_decl is used to set up the decl's storage layout.
5040 Other slots are initialized to 0 or null pointers. */
5043 build_decl (location_t loc
, enum tree_code code
, tree name
,
5044 tree type MEM_STAT_DECL
)
5048 t
= make_node (code PASS_MEM_STAT
);
5049 DECL_SOURCE_LOCATION (t
) = loc
;
5051 /* if (type == error_mark_node)
5052 type = integer_type_node; */
5053 /* That is not done, deliberately, so that having error_mark_node
5054 as the type can suppress useless errors in the use of this variable. */
5056 DECL_NAME (t
) = name
;
5057 TREE_TYPE (t
) = type
;
5059 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
5065 /* Builds and returns function declaration with NAME and TYPE. */
5068 build_fn_decl (const char *name
, tree type
)
5070 tree id
= get_identifier (name
);
5071 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
5073 DECL_EXTERNAL (decl
) = 1;
5074 TREE_PUBLIC (decl
) = 1;
5075 DECL_ARTIFICIAL (decl
) = 1;
5076 TREE_NOTHROW (decl
) = 1;
5081 vec
<tree
, va_gc
> *all_translation_units
;
5083 /* Builds a new translation-unit decl with name NAME, queues it in the
5084 global list of translation-unit decls and returns it. */
5087 build_translation_unit_decl (tree name
)
5089 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
5091 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
5092 vec_safe_push (all_translation_units
, tu
);
5097 /* BLOCK nodes are used to represent the structure of binding contours
5098 and declarations, once those contours have been exited and their contents
5099 compiled. This information is used for outputting debugging info. */
5102 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
5104 tree block
= make_node (BLOCK
);
5106 BLOCK_VARS (block
) = vars
;
5107 BLOCK_SUBBLOCKS (block
) = subblocks
;
5108 BLOCK_SUPERCONTEXT (block
) = supercontext
;
5109 BLOCK_CHAIN (block
) = chain
;
5114 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5116 LOC is the location to use in tree T. */
5119 protected_set_expr_location (tree t
, location_t loc
)
5121 if (CAN_HAVE_LOCATION_P (t
))
5122 SET_EXPR_LOCATION (t
, loc
);
5125 /* Data used when collecting DECLs and TYPEs for language data removal. */
5127 class free_lang_data_d
5130 free_lang_data_d () : decls (100), types (100) {}
5132 /* Worklist to avoid excessive recursion. */
5133 auto_vec
<tree
> worklist
;
5135 /* Set of traversed objects. Used to avoid duplicate visits. */
5136 hash_set
<tree
> pset
;
5138 /* Array of symbols to process with free_lang_data_in_decl. */
5139 auto_vec
<tree
> decls
;
5141 /* Array of types to process with free_lang_data_in_type. */
5142 auto_vec
<tree
> types
;
5146 /* Add type or decl T to one of the list of tree nodes that need their
5147 language data removed. The lists are held inside FLD. */
5150 add_tree_to_fld_list (tree t
, class free_lang_data_d
*fld
)
5153 fld
->decls
.safe_push (t
);
5154 else if (TYPE_P (t
))
5155 fld
->types
.safe_push (t
);
5160 /* Push tree node T into FLD->WORKLIST. */
5163 fld_worklist_push (tree t
, class free_lang_data_d
*fld
)
5165 if (t
&& !is_lang_specific (t
) && !fld
->pset
.contains (t
))
5166 fld
->worklist
.safe_push ((t
));
5171 /* Return simplified TYPE_NAME of TYPE. */
5174 fld_simplified_type_name (tree type
)
5176 if (!TYPE_NAME (type
) || TREE_CODE (TYPE_NAME (type
)) != TYPE_DECL
)
5177 return TYPE_NAME (type
);
5178 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5179 TYPE_DECL if the type doesn't have linkage.
5180 this must match fld_ */
5181 if (type
!= TYPE_MAIN_VARIANT (type
)
5182 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type
))
5183 && (TREE_CODE (type
) != RECORD_TYPE
5184 || !TYPE_BINFO (type
)
5185 || !BINFO_VTABLE (TYPE_BINFO (type
)))))
5186 return DECL_NAME (TYPE_NAME (type
));
5187 return TYPE_NAME (type
);
5190 /* Do same comparsion as check_qualified_type skipping lang part of type
5191 and be more permissive about type names: we only care that names are
5192 same (for diagnostics) and that ODR names are the same.
5193 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5196 fld_type_variant_equal_p (tree t
, tree v
, tree inner_type
)
5198 if (TYPE_QUALS (t
) != TYPE_QUALS (v
)
5199 /* We want to match incomplete variants with complete types.
5200 In this case we need to ignore alignment. */
5201 || ((!RECORD_OR_UNION_TYPE_P (t
) || COMPLETE_TYPE_P (v
))
5202 && (TYPE_ALIGN (t
) != TYPE_ALIGN (v
)
5203 || TYPE_USER_ALIGN (t
) != TYPE_USER_ALIGN (v
)))
5204 || fld_simplified_type_name (t
) != fld_simplified_type_name (v
)
5205 || !attribute_list_equal (TYPE_ATTRIBUTES (t
),
5206 TYPE_ATTRIBUTES (v
))
5207 || (inner_type
&& TREE_TYPE (v
) != inner_type
))
5213 /* Find variant of FIRST that match T and create new one if necessary.
5214 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5217 fld_type_variant (tree first
, tree t
, class free_lang_data_d
*fld
,
5218 tree inner_type
= NULL
)
5220 if (first
== TYPE_MAIN_VARIANT (t
))
5222 for (tree v
= first
; v
; v
= TYPE_NEXT_VARIANT (v
))
5223 if (fld_type_variant_equal_p (t
, v
, inner_type
))
5225 tree v
= build_variant_type_copy (first
);
5226 TYPE_READONLY (v
) = TYPE_READONLY (t
);
5227 TYPE_VOLATILE (v
) = TYPE_VOLATILE (t
);
5228 TYPE_ATOMIC (v
) = TYPE_ATOMIC (t
);
5229 TYPE_RESTRICT (v
) = TYPE_RESTRICT (t
);
5230 TYPE_ADDR_SPACE (v
) = TYPE_ADDR_SPACE (t
);
5231 TYPE_NAME (v
) = TYPE_NAME (t
);
5232 TYPE_ATTRIBUTES (v
) = TYPE_ATTRIBUTES (t
);
5233 TYPE_CANONICAL (v
) = TYPE_CANONICAL (t
);
5234 /* Variants of incomplete types should have alignment
5235 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5236 if (!RECORD_OR_UNION_TYPE_P (v
) || COMPLETE_TYPE_P (v
))
5238 SET_TYPE_ALIGN (v
, TYPE_ALIGN (t
));
5239 TYPE_USER_ALIGN (v
) = TYPE_USER_ALIGN (t
);
5242 TREE_TYPE (v
) = inner_type
;
5243 gcc_checking_assert (fld_type_variant_equal_p (t
,v
, inner_type
));
5244 if (!fld
->pset
.add (v
))
5245 add_tree_to_fld_list (v
, fld
);
5249 /* Map complete types to incomplete types. */
5251 static hash_map
<tree
, tree
> *fld_incomplete_types
;
5253 /* Map types to simplified types. */
5255 static hash_map
<tree
, tree
> *fld_simplified_types
;
5257 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5258 use MAP to prevent duplicates. */
5261 fld_process_array_type (tree t
, tree t2
, hash_map
<tree
, tree
> *map
,
5262 class free_lang_data_d
*fld
)
5264 if (TREE_TYPE (t
) == t2
)
5267 if (TYPE_MAIN_VARIANT (t
) != t
)
5269 return fld_type_variant
5270 (fld_process_array_type (TYPE_MAIN_VARIANT (t
),
5271 TYPE_MAIN_VARIANT (t2
), map
, fld
),
5277 = map
->get_or_insert (t
, &existed
);
5280 array
= build_array_type_1 (t2
, TYPE_DOMAIN (t
),
5281 TYPE_TYPELESS_STORAGE (t
), false);
5282 TYPE_CANONICAL (array
) = TYPE_CANONICAL (t
);
5283 if (!fld
->pset
.add (array
))
5284 add_tree_to_fld_list (array
, fld
);
5289 /* Return CTX after removal of contexts that are not relevant */
5292 fld_decl_context (tree ctx
)
5294 /* Variably modified types are needed for tree_is_indexable to decide
5295 whether the type needs to go to local or global section.
5296 This code is semi-broken but for now it is easiest to keep contexts
5298 if (ctx
&& TYPE_P (ctx
)
5299 && !variably_modified_type_p (ctx
, NULL_TREE
))
5301 while (ctx
&& TYPE_P (ctx
))
5302 ctx
= TYPE_CONTEXT (ctx
);
5307 /* For T being aggregate type try to turn it into a incomplete variant.
5308 Return T if no simplification is possible. */
5311 fld_incomplete_type_of (tree t
, class free_lang_data_d
*fld
)
5315 if (POINTER_TYPE_P (t
))
5317 tree t2
= fld_incomplete_type_of (TREE_TYPE (t
), fld
);
5318 if (t2
!= TREE_TYPE (t
))
5321 if (TREE_CODE (t
) == POINTER_TYPE
)
5322 first
= build_pointer_type_for_mode (t2
, TYPE_MODE (t
),
5323 TYPE_REF_CAN_ALIAS_ALL (t
));
5325 first
= build_reference_type_for_mode (t2
, TYPE_MODE (t
),
5326 TYPE_REF_CAN_ALIAS_ALL (t
));
5327 gcc_assert (TYPE_CANONICAL (t2
) != t2
5328 && TYPE_CANONICAL (t2
) == TYPE_CANONICAL (TREE_TYPE (t
)));
5329 if (!fld
->pset
.add (first
))
5330 add_tree_to_fld_list (first
, fld
);
5331 return fld_type_variant (first
, t
, fld
);
5335 if (TREE_CODE (t
) == ARRAY_TYPE
)
5336 return fld_process_array_type (t
,
5337 fld_incomplete_type_of (TREE_TYPE (t
), fld
),
5338 fld_incomplete_types
, fld
);
5339 if ((!RECORD_OR_UNION_TYPE_P (t
) && TREE_CODE (t
) != ENUMERAL_TYPE
)
5340 || !COMPLETE_TYPE_P (t
))
5342 if (TYPE_MAIN_VARIANT (t
) == t
)
5346 = fld_incomplete_types
->get_or_insert (t
, &existed
);
5350 copy
= build_distinct_type_copy (t
);
5352 /* It is possible that type was not seen by free_lang_data yet. */
5353 if (!fld
->pset
.add (copy
))
5354 add_tree_to_fld_list (copy
, fld
);
5355 TYPE_SIZE (copy
) = NULL
;
5356 TYPE_USER_ALIGN (copy
) = 0;
5357 TYPE_SIZE_UNIT (copy
) = NULL
;
5358 TYPE_CANONICAL (copy
) = TYPE_CANONICAL (t
);
5359 TREE_ADDRESSABLE (copy
) = 0;
5360 if (AGGREGATE_TYPE_P (t
))
5362 SET_TYPE_MODE (copy
, VOIDmode
);
5363 SET_TYPE_ALIGN (copy
, BITS_PER_UNIT
);
5364 TYPE_TYPELESS_STORAGE (copy
) = 0;
5365 TYPE_FIELDS (copy
) = NULL
;
5366 TYPE_BINFO (copy
) = NULL
;
5369 TYPE_VALUES (copy
) = NULL
;
5371 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5372 This is needed for ODR violation warnings to come out right (we
5373 want duplicate TYPE_DECLs whenever the type is duplicated because
5374 of ODR violation. Because lang data in the TYPE_DECL may not
5375 have been freed yet, rebuild it from scratch and copy relevant
5377 TYPE_NAME (copy
) = fld_simplified_type_name (copy
);
5378 tree name
= TYPE_NAME (copy
);
5380 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
5382 gcc_checking_assert (TREE_TYPE (name
) == t
);
5383 tree name2
= build_decl (DECL_SOURCE_LOCATION (name
), TYPE_DECL
,
5384 DECL_NAME (name
), copy
);
5385 if (DECL_ASSEMBLER_NAME_SET_P (name
))
5386 SET_DECL_ASSEMBLER_NAME (name2
, DECL_ASSEMBLER_NAME (name
));
5387 SET_DECL_ALIGN (name2
, 0);
5388 DECL_CONTEXT (name2
) = fld_decl_context
5389 (DECL_CONTEXT (name
));
5390 TYPE_NAME (copy
) = name2
;
5395 return (fld_type_variant
5396 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t
), fld
), t
, fld
));
5399 /* Simplify type T for scenarios where we do not need complete pointer
5403 fld_simplified_type (tree t
, class free_lang_data_d
*fld
)
5407 if (POINTER_TYPE_P (t
))
5408 return fld_incomplete_type_of (t
, fld
);
5409 /* FIXME: This triggers verification error, see PR88140. */
5410 if (TREE_CODE (t
) == ARRAY_TYPE
&& 0)
5411 return fld_process_array_type (t
, fld_simplified_type (TREE_TYPE (t
), fld
),
5412 fld_simplified_types
, fld
);
5416 /* Reset the expression *EXPR_P, a size or position.
5418 ??? We could reset all non-constant sizes or positions. But it's cheap
5419 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5421 We need to reset self-referential sizes or positions because they cannot
5422 be gimplified and thus can contain a CALL_EXPR after the gimplification
5423 is finished, which will run afoul of LTO streaming. And they need to be
5424 reset to something essentially dummy but not constant, so as to preserve
5425 the properties of the object they are attached to. */
5428 free_lang_data_in_one_sizepos (tree
*expr_p
)
5430 tree expr
= *expr_p
;
5431 if (CONTAINS_PLACEHOLDER_P (expr
))
5432 *expr_p
= build0 (PLACEHOLDER_EXPR
, TREE_TYPE (expr
));
5436 /* Reset all the fields in a binfo node BINFO. We only keep
5437 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5440 free_lang_data_in_binfo (tree binfo
)
5445 gcc_assert (TREE_CODE (binfo
) == TREE_BINFO
);
5447 BINFO_VIRTUALS (binfo
) = NULL_TREE
;
5448 BINFO_BASE_ACCESSES (binfo
) = NULL
;
5449 BINFO_INHERITANCE_CHAIN (binfo
) = NULL_TREE
;
5450 BINFO_SUBVTT_INDEX (binfo
) = NULL_TREE
;
5451 BINFO_VPTR_FIELD (binfo
) = NULL_TREE
;
5453 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo
), i
, t
)
5454 free_lang_data_in_binfo (t
);
5458 /* Reset all language specific information still present in TYPE. */
5461 free_lang_data_in_type (tree type
, class free_lang_data_d
*fld
)
5463 gcc_assert (TYPE_P (type
));
5465 /* Give the FE a chance to remove its own data first. */
5466 lang_hooks
.free_lang_data (type
);
5468 TREE_LANG_FLAG_0 (type
) = 0;
5469 TREE_LANG_FLAG_1 (type
) = 0;
5470 TREE_LANG_FLAG_2 (type
) = 0;
5471 TREE_LANG_FLAG_3 (type
) = 0;
5472 TREE_LANG_FLAG_4 (type
) = 0;
5473 TREE_LANG_FLAG_5 (type
) = 0;
5474 TREE_LANG_FLAG_6 (type
) = 0;
5476 TYPE_NEEDS_CONSTRUCTING (type
) = 0;
5478 /* Purge non-marked variants from the variants chain, so that they
5479 don't reappear in the IL after free_lang_data. */
5480 while (TYPE_NEXT_VARIANT (type
)
5481 && !fld
->pset
.contains (TYPE_NEXT_VARIANT (type
)))
5483 tree t
= TYPE_NEXT_VARIANT (type
);
5484 TYPE_NEXT_VARIANT (type
) = TYPE_NEXT_VARIANT (t
);
5485 /* Turn the removed types into distinct types. */
5486 TYPE_MAIN_VARIANT (t
) = t
;
5487 TYPE_NEXT_VARIANT (t
) = NULL_TREE
;
5490 if (TREE_CODE (type
) == FUNCTION_TYPE
)
5492 TREE_TYPE (type
) = fld_simplified_type (TREE_TYPE (type
), fld
);
5493 /* Remove the const and volatile qualifiers from arguments. The
5494 C++ front end removes them, but the C front end does not,
5495 leading to false ODR violation errors when merging two
5496 instances of the same function signature compiled by
5497 different front ends. */
5498 for (tree p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
5500 TREE_VALUE (p
) = fld_simplified_type (TREE_VALUE (p
), fld
);
5501 tree arg_type
= TREE_VALUE (p
);
5503 if (TYPE_READONLY (arg_type
) || TYPE_VOLATILE (arg_type
))
5505 int quals
= TYPE_QUALS (arg_type
)
5507 & ~TYPE_QUAL_VOLATILE
;
5508 TREE_VALUE (p
) = build_qualified_type (arg_type
, quals
);
5509 if (!fld
->pset
.add (TREE_VALUE (p
)))
5510 free_lang_data_in_type (TREE_VALUE (p
), fld
);
5512 /* C++ FE uses TREE_PURPOSE to store initial values. */
5513 TREE_PURPOSE (p
) = NULL
;
5516 else if (TREE_CODE (type
) == METHOD_TYPE
)
5518 TREE_TYPE (type
) = fld_simplified_type (TREE_TYPE (type
), fld
);
5519 for (tree p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
5521 /* C++ FE uses TREE_PURPOSE to store initial values. */
5522 TREE_VALUE (p
) = fld_simplified_type (TREE_VALUE (p
), fld
);
5523 TREE_PURPOSE (p
) = NULL
;
5526 else if (RECORD_OR_UNION_TYPE_P (type
))
5528 /* Remove members that are not FIELD_DECLs from the field list
5529 of an aggregate. These occur in C++. */
5530 for (tree
*prev
= &TYPE_FIELDS (type
), member
; (member
= *prev
);)
5531 if (TREE_CODE (member
) == FIELD_DECL
)
5532 prev
= &DECL_CHAIN (member
);
5534 *prev
= DECL_CHAIN (member
);
5536 TYPE_VFIELD (type
) = NULL_TREE
;
5538 if (TYPE_BINFO (type
))
5540 free_lang_data_in_binfo (TYPE_BINFO (type
));
5541 /* We need to preserve link to bases and virtual table for all
5542 polymorphic types to make devirtualization machinery working. */
5543 if (!BINFO_VTABLE (TYPE_BINFO (type
)))
5544 TYPE_BINFO (type
) = NULL
;
5547 else if (INTEGRAL_TYPE_P (type
)
5548 || SCALAR_FLOAT_TYPE_P (type
)
5549 || FIXED_POINT_TYPE_P (type
))
5551 if (TREE_CODE (type
) == ENUMERAL_TYPE
)
5553 /* Type values are used only for C++ ODR checking. Drop them
5554 for all type variants and non-ODR types.
5555 For ODR types the data is freed in free_odr_warning_data. */
5556 if (TYPE_MAIN_VARIANT (type
) != type
5557 || !type_with_linkage_p (type
))
5558 TYPE_VALUES (type
) = NULL
;
5560 /* Simplify representation by recording only values rather
5561 than const decls. */
5562 for (tree e
= TYPE_VALUES (type
); e
; e
= TREE_CHAIN (e
))
5563 if (TREE_CODE (TREE_VALUE (e
)) == CONST_DECL
)
5564 TREE_VALUE (e
) = DECL_INITIAL (TREE_VALUE (e
));
5566 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type
));
5567 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type
));
5570 TYPE_LANG_SLOT_1 (type
) = NULL_TREE
;
5572 free_lang_data_in_one_sizepos (&TYPE_SIZE (type
));
5573 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type
));
5575 if (TYPE_CONTEXT (type
)
5576 && TREE_CODE (TYPE_CONTEXT (type
)) == BLOCK
)
5578 tree ctx
= TYPE_CONTEXT (type
);
5581 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5583 while (ctx
&& TREE_CODE (ctx
) == BLOCK
);
5584 TYPE_CONTEXT (type
) = ctx
;
5587 TYPE_STUB_DECL (type
) = NULL
;
5588 TYPE_NAME (type
) = fld_simplified_type_name (type
);
5592 /* Return true if DECL may need an assembler name to be set. */
5595 need_assembler_name_p (tree decl
)
5597 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5598 Rule merging. This makes type_odr_p to return true on those types during
5599 LTO and by comparing the mangled name, we can say what types are intended
5600 to be equivalent across compilation unit.
5602 We do not store names of type_in_anonymous_namespace_p.
5604 Record, union and enumeration type have linkage that allows use
5605 to check type_in_anonymous_namespace_p. We do not mangle compound types
5606 that always can be compared structurally.
5608 Similarly for builtin types, we compare properties of their main variant.
5609 A special case are integer types where mangling do make differences
5610 between char/signed char/unsigned char etc. Storing name for these makes
5611 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5612 See cp/mangle.c:write_builtin_type for details. */
5614 if (TREE_CODE (decl
) == TYPE_DECL
)
5616 if (DECL_NAME (decl
)
5617 && decl
== TYPE_NAME (TREE_TYPE (decl
))
5618 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
5619 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
5620 && ((TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
5621 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
)
5622 || TYPE_CXX_ODR_P (TREE_TYPE (decl
)))
5623 && (type_with_linkage_p (TREE_TYPE (decl
))
5624 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
5625 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
5626 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
5629 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5630 if (!VAR_OR_FUNCTION_DECL_P (decl
))
5633 /* If DECL already has its assembler name set, it does not need a
5635 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
5636 || DECL_ASSEMBLER_NAME_SET_P (decl
))
5639 /* Abstract decls do not need an assembler name. */
5640 if (DECL_ABSTRACT_P (decl
))
5643 /* For VAR_DECLs, only static, public and external symbols need an
5646 && !TREE_STATIC (decl
)
5647 && !TREE_PUBLIC (decl
)
5648 && !DECL_EXTERNAL (decl
))
5651 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5653 /* Do not set assembler name on builtins. Allow RTL expansion to
5654 decide whether to expand inline or via a regular call. */
5655 if (fndecl_built_in_p (decl
)
5656 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
5659 /* Functions represented in the callgraph need an assembler name. */
5660 if (cgraph_node::get (decl
) != NULL
)
5663 /* Unused and not public functions don't need an assembler name. */
5664 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
5672 /* Reset all language specific information still present in symbol
5676 free_lang_data_in_decl (tree decl
, class free_lang_data_d
*fld
)
5678 gcc_assert (DECL_P (decl
));
5680 /* Give the FE a chance to remove its own data first. */
5681 lang_hooks
.free_lang_data (decl
);
5683 TREE_LANG_FLAG_0 (decl
) = 0;
5684 TREE_LANG_FLAG_1 (decl
) = 0;
5685 TREE_LANG_FLAG_2 (decl
) = 0;
5686 TREE_LANG_FLAG_3 (decl
) = 0;
5687 TREE_LANG_FLAG_4 (decl
) = 0;
5688 TREE_LANG_FLAG_5 (decl
) = 0;
5689 TREE_LANG_FLAG_6 (decl
) = 0;
5691 free_lang_data_in_one_sizepos (&DECL_SIZE (decl
));
5692 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl
));
5693 if (TREE_CODE (decl
) == FIELD_DECL
)
5695 DECL_FCONTEXT (decl
) = NULL
;
5696 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl
));
5697 if (TREE_CODE (DECL_CONTEXT (decl
)) == QUAL_UNION_TYPE
)
5698 DECL_QUALIFIER (decl
) = NULL_TREE
;
5701 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5703 struct cgraph_node
*node
;
5704 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5705 the address may be taken in other unit, so this flag has no practical
5708 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5709 for public objects that indeed cannot be adressed, but it is not
5710 the case. Set the flag to true so we do not get merge failures for
5711 i.e. virtual tables between units that take address of it and
5712 units that don't. */
5713 if (TREE_PUBLIC (decl
))
5714 TREE_ADDRESSABLE (decl
) = true;
5715 TREE_TYPE (decl
) = fld_simplified_type (TREE_TYPE (decl
), fld
);
5716 if (!(node
= cgraph_node::get (decl
))
5717 || (!node
->definition
&& !node
->clones
))
5720 node
->release_body ();
5723 release_function_body (decl
);
5724 DECL_ARGUMENTS (decl
) = NULL
;
5725 DECL_RESULT (decl
) = NULL
;
5726 DECL_INITIAL (decl
) = error_mark_node
;
5729 if (gimple_has_body_p (decl
) || (node
&& node
->thunk
.thunk_p
))
5733 /* If DECL has a gimple body, then the context for its
5734 arguments must be DECL. Otherwise, it doesn't really
5735 matter, as we will not be emitting any code for DECL. In
5736 general, there may be other instances of DECL created by
5737 the front end and since PARM_DECLs are generally shared,
5738 their DECL_CONTEXT changes as the replicas of DECL are
5739 created. The only time where DECL_CONTEXT is important
5740 is for the FUNCTION_DECLs that have a gimple body (since
5741 the PARM_DECL will be used in the function's body). */
5742 for (t
= DECL_ARGUMENTS (decl
); t
; t
= TREE_CHAIN (t
))
5743 DECL_CONTEXT (t
) = decl
;
5744 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl
))
5745 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
5746 = target_option_default_node
;
5747 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
))
5748 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
5749 = optimization_default_node
;
5752 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5753 At this point, it is not needed anymore. */
5754 DECL_SAVED_TREE (decl
) = NULL_TREE
;
5756 /* Clear the abstract origin if it refers to a method.
5757 Otherwise dwarf2out.c will ICE as we splice functions out of
5758 TYPE_FIELDS and thus the origin will not be output
5760 if (DECL_ABSTRACT_ORIGIN (decl
)
5761 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))
5762 && RECORD_OR_UNION_TYPE_P
5763 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))))
5764 DECL_ABSTRACT_ORIGIN (decl
) = NULL_TREE
;
5766 DECL_VINDEX (decl
) = NULL_TREE
;
5768 else if (VAR_P (decl
))
5770 /* See comment above why we set the flag for functoins. */
5771 if (TREE_PUBLIC (decl
))
5772 TREE_ADDRESSABLE (decl
) = true;
5773 if ((DECL_EXTERNAL (decl
)
5774 && (!TREE_STATIC (decl
) || !TREE_READONLY (decl
)))
5775 || (decl_function_context (decl
) && !TREE_STATIC (decl
)))
5776 DECL_INITIAL (decl
) = NULL_TREE
;
5778 else if (TREE_CODE (decl
) == TYPE_DECL
)
5780 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5781 DECL_VISIBILITY_SPECIFIED (decl
) = 0;
5782 TREE_PUBLIC (decl
) = 0;
5783 TREE_PRIVATE (decl
) = 0;
5784 DECL_ARTIFICIAL (decl
) = 0;
5785 TYPE_DECL_SUPPRESS_DEBUG (decl
) = 0;
5786 DECL_INITIAL (decl
) = NULL_TREE
;
5787 DECL_ORIGINAL_TYPE (decl
) = NULL_TREE
;
5788 DECL_MODE (decl
) = VOIDmode
;
5789 SET_DECL_ALIGN (decl
, 0);
5790 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5792 else if (TREE_CODE (decl
) == FIELD_DECL
)
5794 TREE_TYPE (decl
) = fld_simplified_type (TREE_TYPE (decl
), fld
);
5795 DECL_INITIAL (decl
) = NULL_TREE
;
5797 else if (TREE_CODE (decl
) == TRANSLATION_UNIT_DECL
5798 && DECL_INITIAL (decl
)
5799 && TREE_CODE (DECL_INITIAL (decl
)) == BLOCK
)
5801 /* Strip builtins from the translation-unit BLOCK. We still have targets
5802 without builtin_decl_explicit support and also builtins are shared
5803 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5804 tree
*nextp
= &BLOCK_VARS (DECL_INITIAL (decl
));
5808 if (TREE_CODE (var
) == FUNCTION_DECL
5809 && fndecl_built_in_p (var
))
5810 *nextp
= TREE_CHAIN (var
);
5812 nextp
= &TREE_CHAIN (var
);
5815 /* We need to keep field decls associated with their trees. Otherwise tree
5816 merging may merge some fileds and keep others disjoint wich in turn will
5817 not do well with TREE_CHAIN pointers linking them.
5819 Also do not drop containing types for virtual methods and tables because
5820 these are needed by devirtualization.
5821 C++ destructors are special because C++ frontends sometimes produces
5822 virtual destructor as an alias of non-virtual destructor. In
5823 devirutalization code we always walk through aliases and we need
5824 context to be preserved too. See PR89335 */
5825 if (TREE_CODE (decl
) != FIELD_DECL
5826 && ((TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
)
5827 || (!DECL_VIRTUAL_P (decl
)
5828 && (TREE_CODE (decl
) != FUNCTION_DECL
5829 || !DECL_CXX_DESTRUCTOR_P (decl
)))))
5830 DECL_CONTEXT (decl
) = fld_decl_context (DECL_CONTEXT (decl
));
5834 /* Operand callback helper for free_lang_data_in_node. *TP is the
5835 subtree operand being considered. */
5838 find_decls_types_r (tree
*tp
, int *ws
, void *data
)
5841 class free_lang_data_d
*fld
= (class free_lang_data_d
*) data
;
5843 if (TREE_CODE (t
) == TREE_LIST
)
5846 /* Language specific nodes will be removed, so there is no need
5847 to gather anything under them. */
5848 if (is_lang_specific (t
))
5856 /* Note that walk_tree does not traverse every possible field in
5857 decls, so we have to do our own traversals here. */
5858 add_tree_to_fld_list (t
, fld
);
5860 fld_worklist_push (DECL_NAME (t
), fld
);
5861 fld_worklist_push (DECL_CONTEXT (t
), fld
);
5862 fld_worklist_push (DECL_SIZE (t
), fld
);
5863 fld_worklist_push (DECL_SIZE_UNIT (t
), fld
);
5865 /* We are going to remove everything under DECL_INITIAL for
5866 TYPE_DECLs. No point walking them. */
5867 if (TREE_CODE (t
) != TYPE_DECL
)
5868 fld_worklist_push (DECL_INITIAL (t
), fld
);
5870 fld_worklist_push (DECL_ATTRIBUTES (t
), fld
);
5871 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t
), fld
);
5873 if (TREE_CODE (t
) == FUNCTION_DECL
)
5875 fld_worklist_push (DECL_ARGUMENTS (t
), fld
);
5876 fld_worklist_push (DECL_RESULT (t
), fld
);
5878 else if (TREE_CODE (t
) == FIELD_DECL
)
5880 fld_worklist_push (DECL_FIELD_OFFSET (t
), fld
);
5881 fld_worklist_push (DECL_BIT_FIELD_TYPE (t
), fld
);
5882 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t
), fld
);
5883 fld_worklist_push (DECL_FCONTEXT (t
), fld
);
5886 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
5887 && DECL_HAS_VALUE_EXPR_P (t
))
5888 fld_worklist_push (DECL_VALUE_EXPR (t
), fld
);
5890 if (TREE_CODE (t
) != FIELD_DECL
5891 && TREE_CODE (t
) != TYPE_DECL
)
5892 fld_worklist_push (TREE_CHAIN (t
), fld
);
5895 else if (TYPE_P (t
))
5897 /* Note that walk_tree does not traverse every possible field in
5898 types, so we have to do our own traversals here. */
5899 add_tree_to_fld_list (t
, fld
);
5901 if (!RECORD_OR_UNION_TYPE_P (t
))
5902 fld_worklist_push (TYPE_CACHED_VALUES (t
), fld
);
5903 fld_worklist_push (TYPE_SIZE (t
), fld
);
5904 fld_worklist_push (TYPE_SIZE_UNIT (t
), fld
);
5905 fld_worklist_push (TYPE_ATTRIBUTES (t
), fld
);
5906 fld_worklist_push (TYPE_POINTER_TO (t
), fld
);
5907 fld_worklist_push (TYPE_REFERENCE_TO (t
), fld
);
5908 fld_worklist_push (TYPE_NAME (t
), fld
);
5909 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5910 lists, we may look types up in these lists and use them while
5911 optimizing the function body. Thus we need to free lang data
5913 if (TREE_CODE (t
) == POINTER_TYPE
)
5914 fld_worklist_push (TYPE_NEXT_PTR_TO (t
), fld
);
5915 if (TREE_CODE (t
) == REFERENCE_TYPE
)
5916 fld_worklist_push (TYPE_NEXT_REF_TO (t
), fld
);
5917 if (!POINTER_TYPE_P (t
))
5918 fld_worklist_push (TYPE_MIN_VALUE_RAW (t
), fld
);
5919 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5920 if (!RECORD_OR_UNION_TYPE_P (t
))
5921 fld_worklist_push (TYPE_MAX_VALUE_RAW (t
), fld
);
5922 fld_worklist_push (TYPE_MAIN_VARIANT (t
), fld
);
5923 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5924 do not and want not to reach unused variants this way. */
5925 if (TYPE_CONTEXT (t
))
5927 tree ctx
= TYPE_CONTEXT (t
);
5928 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5929 So push that instead. */
5930 while (ctx
&& TREE_CODE (ctx
) == BLOCK
)
5931 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5932 fld_worklist_push (ctx
, fld
);
5934 fld_worklist_push (TYPE_CANONICAL (t
), fld
);
5936 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
))
5940 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t
)), i
, tem
)
5941 fld_worklist_push (TREE_TYPE (tem
), fld
);
5942 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t
)), fld
);
5943 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t
)), fld
);
5945 if (RECORD_OR_UNION_TYPE_P (t
))
5948 /* Push all TYPE_FIELDS - there can be interleaving interesting
5949 and non-interesting things. */
5950 tem
= TYPE_FIELDS (t
);
5953 if (TREE_CODE (tem
) == FIELD_DECL
)
5954 fld_worklist_push (tem
, fld
);
5955 tem
= TREE_CHAIN (tem
);
5958 if (FUNC_OR_METHOD_TYPE_P (t
))
5959 fld_worklist_push (TYPE_METHOD_BASETYPE (t
), fld
);
5961 fld_worklist_push (TYPE_STUB_DECL (t
), fld
);
5964 else if (TREE_CODE (t
) == BLOCK
)
5966 for (tree
*tem
= &BLOCK_VARS (t
); *tem
; )
5968 if (TREE_CODE (*tem
) != LABEL_DECL
5969 && (TREE_CODE (*tem
) != VAR_DECL
5970 || !auto_var_in_fn_p (*tem
, DECL_CONTEXT (*tem
))))
5972 gcc_assert (TREE_CODE (*tem
) != RESULT_DECL
5973 && TREE_CODE (*tem
) != PARM_DECL
);
5974 *tem
= TREE_CHAIN (*tem
);
5978 fld_worklist_push (*tem
, fld
);
5979 tem
= &TREE_CHAIN (*tem
);
5982 for (tree tem
= BLOCK_SUBBLOCKS (t
); tem
; tem
= BLOCK_CHAIN (tem
))
5983 fld_worklist_push (tem
, fld
);
5984 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t
), fld
);
5987 if (TREE_CODE (t
) != IDENTIFIER_NODE
5988 && CODE_CONTAINS_STRUCT (TREE_CODE (t
), TS_TYPED
))
5989 fld_worklist_push (TREE_TYPE (t
), fld
);
5995 /* Find decls and types in T. */
5998 find_decls_types (tree t
, class free_lang_data_d
*fld
)
6002 if (!fld
->pset
.contains (t
))
6003 walk_tree (&t
, find_decls_types_r
, fld
, &fld
->pset
);
6004 if (fld
->worklist
.is_empty ())
6006 t
= fld
->worklist
.pop ();
6010 /* Translate all the types in LIST with the corresponding runtime
6014 get_eh_types_for_runtime (tree list
)
6018 if (list
== NULL_TREE
)
6021 head
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
6023 list
= TREE_CHAIN (list
);
6026 tree n
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
6027 TREE_CHAIN (prev
) = n
;
6028 prev
= TREE_CHAIN (prev
);
6029 list
= TREE_CHAIN (list
);
6036 /* Find decls and types referenced in EH region R and store them in
6037 FLD->DECLS and FLD->TYPES. */
6040 find_decls_types_in_eh_region (eh_region r
, class free_lang_data_d
*fld
)
6051 /* The types referenced in each catch must first be changed to the
6052 EH types used at runtime. This removes references to FE types
6054 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
6056 c
->type_list
= get_eh_types_for_runtime (c
->type_list
);
6057 walk_tree (&c
->type_list
, find_decls_types_r
, fld
, &fld
->pset
);
6062 case ERT_ALLOWED_EXCEPTIONS
:
6063 r
->u
.allowed
.type_list
6064 = get_eh_types_for_runtime (r
->u
.allowed
.type_list
);
6065 walk_tree (&r
->u
.allowed
.type_list
, find_decls_types_r
, fld
, &fld
->pset
);
6068 case ERT_MUST_NOT_THROW
:
6069 walk_tree (&r
->u
.must_not_throw
.failure_decl
,
6070 find_decls_types_r
, fld
, &fld
->pset
);
6076 /* Find decls and types referenced in cgraph node N and store them in
6077 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6078 look for *every* kind of DECL and TYPE node reachable from N,
6079 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6080 NAMESPACE_DECLs, etc). */
6083 find_decls_types_in_node (struct cgraph_node
*n
, class free_lang_data_d
*fld
)
6086 struct function
*fn
;
6090 find_decls_types (n
->decl
, fld
);
6092 if (!gimple_has_body_p (n
->decl
))
6095 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
6097 fn
= DECL_STRUCT_FUNCTION (n
->decl
);
6099 /* Traverse locals. */
6100 FOR_EACH_LOCAL_DECL (fn
, ix
, t
)
6101 find_decls_types (t
, fld
);
6103 /* Traverse EH regions in FN. */
6106 FOR_ALL_EH_REGION_FN (r
, fn
)
6107 find_decls_types_in_eh_region (r
, fld
);
6110 /* Traverse every statement in FN. */
6111 FOR_EACH_BB_FN (bb
, fn
)
6114 gimple_stmt_iterator si
;
6117 for (psi
= gsi_start_phis (bb
); !gsi_end_p (psi
); gsi_next (&psi
))
6119 gphi
*phi
= psi
.phi ();
6121 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
6123 tree
*arg_p
= gimple_phi_arg_def_ptr (phi
, i
);
6124 find_decls_types (*arg_p
, fld
);
6128 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6130 gimple
*stmt
= gsi_stmt (si
);
6132 if (is_gimple_call (stmt
))
6133 find_decls_types (gimple_call_fntype (stmt
), fld
);
6135 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
6137 tree arg
= gimple_op (stmt
, i
);
6138 find_decls_types (arg
, fld
);
6139 /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6140 which we need for asm stmts. */
6142 && TREE_CODE (arg
) == TREE_LIST
6143 && TREE_PURPOSE (arg
)
6144 && gimple_code (stmt
) == GIMPLE_ASM
)
6145 find_decls_types (TREE_PURPOSE (arg
), fld
);
6152 /* Find decls and types referenced in varpool node N and store them in
6153 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6154 look for *every* kind of DECL and TYPE node reachable from N,
6155 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6156 NAMESPACE_DECLs, etc). */
6159 find_decls_types_in_var (varpool_node
*v
, class free_lang_data_d
*fld
)
6161 find_decls_types (v
->decl
, fld
);
6164 /* If T needs an assembler name, have one created for it. */
6167 assign_assembler_name_if_needed (tree t
)
6169 if (need_assembler_name_p (t
))
6171 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6172 diagnostics that use input_location to show locus
6173 information. The problem here is that, at this point,
6174 input_location is generally anchored to the end of the file
6175 (since the parser is long gone), so we don't have a good
6176 position to pin it to.
6178 To alleviate this problem, this uses the location of T's
6179 declaration. Examples of this are
6180 testsuite/g++.dg/template/cond2.C and
6181 testsuite/g++.dg/template/pr35240.C. */
6182 location_t saved_location
= input_location
;
6183 input_location
= DECL_SOURCE_LOCATION (t
);
6185 decl_assembler_name (t
);
6187 input_location
= saved_location
;
6192 /* Free language specific information for every operand and expression
6193 in every node of the call graph. This process operates in three stages:
6195 1- Every callgraph node and varpool node is traversed looking for
6196 decls and types embedded in them. This is a more exhaustive
6197 search than that done by find_referenced_vars, because it will
6198 also collect individual fields, decls embedded in types, etc.
6200 2- All the decls found are sent to free_lang_data_in_decl.
6202 3- All the types found are sent to free_lang_data_in_type.
6204 The ordering between decls and types is important because
6205 free_lang_data_in_decl sets assembler names, which includes
6206 mangling. So types cannot be freed up until assembler names have
6210 free_lang_data_in_cgraph (class free_lang_data_d
*fld
)
6212 struct cgraph_node
*n
;
6218 /* Find decls and types in the body of every function in the callgraph. */
6219 FOR_EACH_FUNCTION (n
)
6220 find_decls_types_in_node (n
, fld
);
6222 FOR_EACH_VEC_SAFE_ELT (alias_pairs
, i
, p
)
6223 find_decls_types (p
->decl
, fld
);
6225 /* Find decls and types in every varpool symbol. */
6226 FOR_EACH_VARIABLE (v
)
6227 find_decls_types_in_var (v
, fld
);
6229 /* Set the assembler name on every decl found. We need to do this
6230 now because free_lang_data_in_decl will invalidate data needed
6231 for mangling. This breaks mangling on interdependent decls. */
6232 FOR_EACH_VEC_ELT (fld
->decls
, i
, t
)
6233 assign_assembler_name_if_needed (t
);
6235 /* Traverse every decl found freeing its language data. */
6236 FOR_EACH_VEC_ELT (fld
->decls
, i
, t
)
6237 free_lang_data_in_decl (t
, fld
);
6239 /* Traverse every type found freeing its language data. */
6240 FOR_EACH_VEC_ELT (fld
->types
, i
, t
)
6241 free_lang_data_in_type (t
, fld
);
6245 /* Free resources that are used by FE but are not needed once they are done. */
6248 free_lang_data (void)
6251 class free_lang_data_d fld
;
6253 /* If we are the LTO frontend we have freed lang-specific data already. */
6255 || (!flag_generate_lto
&& !flag_generate_offload
))
6257 /* Rebuild type inheritance graph even when not doing LTO to get
6258 consistent profile data. */
6259 rebuild_type_inheritance_graph ();
6263 fld_incomplete_types
= new hash_map
<tree
, tree
>;
6264 fld_simplified_types
= new hash_map
<tree
, tree
>;
6266 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6267 if (vec_safe_is_empty (all_translation_units
))
6268 build_translation_unit_decl (NULL_TREE
);
6270 /* Allocate and assign alias sets to the standard integer types
6271 while the slots are still in the way the frontends generated them. */
6272 for (i
= 0; i
< itk_none
; ++i
)
6273 if (integer_types
[i
])
6274 TYPE_ALIAS_SET (integer_types
[i
]) = get_alias_set (integer_types
[i
]);
6276 /* Traverse the IL resetting language specific information for
6277 operands, expressions, etc. */
6278 free_lang_data_in_cgraph (&fld
);
6280 /* Create gimple variants for common types. */
6281 for (unsigned i
= 0;
6282 i
< sizeof (builtin_structptr_types
) / sizeof (builtin_structptr_type
);
6284 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
6286 /* Reset some langhooks. Do not reset types_compatible_p, it may
6287 still be used indirectly via the get_alias_set langhook. */
6288 lang_hooks
.dwarf_name
= lhd_dwarf_name
;
6289 lang_hooks
.decl_printable_name
= gimple_decl_printable_name
;
6290 lang_hooks
.gimplify_expr
= lhd_gimplify_expr
;
6291 lang_hooks
.overwrite_decl_assembler_name
= lhd_overwrite_decl_assembler_name
;
6292 lang_hooks
.print_xnode
= lhd_print_tree_nothing
;
6293 lang_hooks
.print_decl
= lhd_print_tree_nothing
;
6294 lang_hooks
.print_type
= lhd_print_tree_nothing
;
6295 lang_hooks
.print_identifier
= lhd_print_tree_nothing
;
6297 lang_hooks
.tree_inlining
.var_mod_type_p
= hook_bool_tree_tree_false
;
6304 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
6308 /* We do not want the default decl_assembler_name implementation,
6309 rather if we have fixed everything we want a wrapper around it
6310 asserting that all non-local symbols already got their assembler
6311 name and only produce assembler names for local symbols. Or rather
6312 make sure we never call decl_assembler_name on local symbols and
6313 devise a separate, middle-end private scheme for it. */
6315 /* Reset diagnostic machinery. */
6316 tree_diagnostics_defaults (global_dc
);
6318 rebuild_type_inheritance_graph ();
6320 delete fld_incomplete_types
;
6321 delete fld_simplified_types
;
6329 const pass_data pass_data_ipa_free_lang_data
=
6331 SIMPLE_IPA_PASS
, /* type */
6332 "*free_lang_data", /* name */
6333 OPTGROUP_NONE
, /* optinfo_flags */
6334 TV_IPA_FREE_LANG_DATA
, /* tv_id */
6335 0, /* properties_required */
6336 0, /* properties_provided */
6337 0, /* properties_destroyed */
6338 0, /* todo_flags_start */
6339 0, /* todo_flags_finish */
6342 class pass_ipa_free_lang_data
: public simple_ipa_opt_pass
6345 pass_ipa_free_lang_data (gcc::context
*ctxt
)
6346 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data
, ctxt
)
6349 /* opt_pass methods: */
6350 virtual unsigned int execute (function
*) { return free_lang_data (); }
6352 }; // class pass_ipa_free_lang_data
6356 simple_ipa_opt_pass
*
6357 make_pass_ipa_free_lang_data (gcc::context
*ctxt
)
6359 return new pass_ipa_free_lang_data (ctxt
);
6362 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6363 of the various TYPE_QUAL values. */
6366 set_type_quals (tree type
, int type_quals
)
6368 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
6369 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
6370 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
6371 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
6372 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
6375 /* Returns true iff CAND and BASE have equivalent language-specific
6379 check_lang_type (const_tree cand
, const_tree base
)
6381 if (lang_hooks
.types
.type_hash_eq
== NULL
)
6383 /* type_hash_eq currently only applies to these types. */
6384 if (TREE_CODE (cand
) != FUNCTION_TYPE
6385 && TREE_CODE (cand
) != METHOD_TYPE
)
6387 return lang_hooks
.types
.type_hash_eq (cand
, base
);
6390 /* This function checks to see if TYPE matches the size one of the built-in
6391 atomic types, and returns that core atomic type. */
6394 find_atomic_core_type (const_tree type
)
6396 tree base_atomic_type
;
6398 /* Only handle complete types. */
6399 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
6402 switch (tree_to_uhwi (TYPE_SIZE (type
)))
6405 base_atomic_type
= atomicQI_type_node
;
6409 base_atomic_type
= atomicHI_type_node
;
6413 base_atomic_type
= atomicSI_type_node
;
6417 base_atomic_type
= atomicDI_type_node
;
6421 base_atomic_type
= atomicTI_type_node
;
6425 base_atomic_type
= NULL_TREE
;
6428 return base_atomic_type
;
6431 /* Returns true iff unqualified CAND and BASE are equivalent. */
6434 check_base_type (const_tree cand
, const_tree base
)
6436 if (TYPE_NAME (cand
) != TYPE_NAME (base
)
6437 /* Apparently this is needed for Objective-C. */
6438 || TYPE_CONTEXT (cand
) != TYPE_CONTEXT (base
)
6439 || !attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6440 TYPE_ATTRIBUTES (base
)))
6442 /* Check alignment. */
6443 if (TYPE_ALIGN (cand
) == TYPE_ALIGN (base
))
6445 /* Atomic types increase minimal alignment. We must to do so as well
6446 or we get duplicated canonical types. See PR88686. */
6447 if ((TYPE_QUALS (cand
) & TYPE_QUAL_ATOMIC
))
6449 /* See if this object can map to a basic atomic type. */
6450 tree atomic_type
= find_atomic_core_type (cand
);
6451 if (atomic_type
&& TYPE_ALIGN (atomic_type
) == TYPE_ALIGN (cand
))
6457 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6460 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
6462 return (TYPE_QUALS (cand
) == type_quals
6463 && check_base_type (cand
, base
)
6464 && check_lang_type (cand
, base
));
6467 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6470 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
6472 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
6473 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6474 /* Apparently this is needed for Objective-C. */
6475 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6476 /* Check alignment. */
6477 && TYPE_ALIGN (cand
) == align
6478 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6479 TYPE_ATTRIBUTES (base
))
6480 && check_lang_type (cand
, base
));
6483 /* Return a version of the TYPE, qualified as indicated by the
6484 TYPE_QUALS, if one exists. If no qualified version exists yet,
6485 return NULL_TREE. */
6488 get_qualified_type (tree type
, int type_quals
)
6490 if (TYPE_QUALS (type
) == type_quals
)
6493 tree mv
= TYPE_MAIN_VARIANT (type
);
6494 if (check_qualified_type (mv
, type
, type_quals
))
6497 /* Search the chain of variants to see if there is already one there just
6498 like the one we need to have. If so, use that existing one. We must
6499 preserve the TYPE_NAME, since there is code that depends on this. */
6500 for (tree
*tp
= &TYPE_NEXT_VARIANT (mv
); *tp
; tp
= &TYPE_NEXT_VARIANT (*tp
))
6501 if (check_qualified_type (*tp
, type
, type_quals
))
6503 /* Put the found variant at the head of the variant list so
6504 frequently searched variants get found faster. The C++ FE
6505 benefits greatly from this. */
6507 *tp
= TYPE_NEXT_VARIANT (t
);
6508 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
6509 TYPE_NEXT_VARIANT (mv
) = t
;
6516 /* Like get_qualified_type, but creates the type if it does not
6517 exist. This function never returns NULL_TREE. */
6520 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
6524 /* See if we already have the appropriate qualified variant. */
6525 t
= get_qualified_type (type
, type_quals
);
6527 /* If not, build it. */
6530 t
= build_variant_type_copy (type PASS_MEM_STAT
);
6531 set_type_quals (t
, type_quals
);
6533 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
6535 /* See if this object can map to a basic atomic type. */
6536 tree atomic_type
= find_atomic_core_type (type
);
6539 /* Ensure the alignment of this type is compatible with
6540 the required alignment of the atomic type. */
6541 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
6542 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
6546 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6547 /* Propagate structural equality. */
6548 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6549 else if (TYPE_CANONICAL (type
) != type
)
6550 /* Build the underlying canonical type, since it is different
6553 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
6554 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
6557 /* T is its own canonical type. */
6558 TYPE_CANONICAL (t
) = t
;
6565 /* Create a variant of type T with alignment ALIGN. */
6568 build_aligned_type (tree type
, unsigned int align
)
6572 if (TYPE_PACKED (type
)
6573 || TYPE_ALIGN (type
) == align
)
6576 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6577 if (check_aligned_type (t
, type
, align
))
6580 t
= build_variant_type_copy (type
);
6581 SET_TYPE_ALIGN (t
, align
);
6582 TYPE_USER_ALIGN (t
) = 1;
6587 /* Create a new distinct copy of TYPE. The new type is made its own
6588 MAIN_VARIANT. If TYPE requires structural equality checks, the
6589 resulting type requires structural equality checks; otherwise, its
6590 TYPE_CANONICAL points to itself. */
6593 build_distinct_type_copy (tree type MEM_STAT_DECL
)
6595 tree t
= copy_node (type PASS_MEM_STAT
);
6597 TYPE_POINTER_TO (t
) = 0;
6598 TYPE_REFERENCE_TO (t
) = 0;
6600 /* Set the canonical type either to a new equivalence class, or
6601 propagate the need for structural equality checks. */
6602 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6603 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6605 TYPE_CANONICAL (t
) = t
;
6607 /* Make it its own variant. */
6608 TYPE_MAIN_VARIANT (t
) = t
;
6609 TYPE_NEXT_VARIANT (t
) = 0;
6611 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6612 whose TREE_TYPE is not t. This can also happen in the Ada
6613 frontend when using subtypes. */
6618 /* Create a new variant of TYPE, equivalent but distinct. This is so
6619 the caller can modify it. TYPE_CANONICAL for the return type will
6620 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6621 are considered equal by the language itself (or that both types
6622 require structural equality checks). */
6625 build_variant_type_copy (tree type MEM_STAT_DECL
)
6627 tree t
, m
= TYPE_MAIN_VARIANT (type
);
6629 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
6631 /* Since we're building a variant, assume that it is a non-semantic
6632 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6633 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
6634 /* Type variants have no alias set defined. */
6635 TYPE_ALIAS_SET (t
) = -1;
6637 /* Add the new type to the chain of variants of TYPE. */
6638 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
6639 TYPE_NEXT_VARIANT (m
) = t
;
6640 TYPE_MAIN_VARIANT (t
) = m
;
6645 /* Return true if the from tree in both tree maps are equal. */
6648 tree_map_base_eq (const void *va
, const void *vb
)
6650 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
6651 *const b
= (const struct tree_map_base
*) vb
;
6652 return (a
->from
== b
->from
);
6655 /* Hash a from tree in a tree_base_map. */
6658 tree_map_base_hash (const void *item
)
6660 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
6663 /* Return true if this tree map structure is marked for garbage collection
6664 purposes. We simply return true if the from tree is marked, so that this
6665 structure goes away when the from tree goes away. */
6668 tree_map_base_marked_p (const void *p
)
6670 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
6673 /* Hash a from tree in a tree_map. */
6676 tree_map_hash (const void *item
)
6678 return (((const struct tree_map
*) item
)->hash
);
6681 /* Hash a from tree in a tree_decl_map. */
6684 tree_decl_map_hash (const void *item
)
6686 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
6689 /* Return the initialization priority for DECL. */
6692 decl_init_priority_lookup (tree decl
)
6694 symtab_node
*snode
= symtab_node::get (decl
);
6697 return DEFAULT_INIT_PRIORITY
;
6699 snode
->get_init_priority ();
6702 /* Return the finalization priority for DECL. */
6705 decl_fini_priority_lookup (tree decl
)
6707 cgraph_node
*node
= cgraph_node::get (decl
);
6710 return DEFAULT_INIT_PRIORITY
;
6712 node
->get_fini_priority ();
6715 /* Set the initialization priority for DECL to PRIORITY. */
6718 decl_init_priority_insert (tree decl
, priority_type priority
)
6720 struct symtab_node
*snode
;
6722 if (priority
== DEFAULT_INIT_PRIORITY
)
6724 snode
= symtab_node::get (decl
);
6728 else if (VAR_P (decl
))
6729 snode
= varpool_node::get_create (decl
);
6731 snode
= cgraph_node::get_create (decl
);
6732 snode
->set_init_priority (priority
);
6735 /* Set the finalization priority for DECL to PRIORITY. */
6738 decl_fini_priority_insert (tree decl
, priority_type priority
)
6740 struct cgraph_node
*node
;
6742 if (priority
== DEFAULT_INIT_PRIORITY
)
6744 node
= cgraph_node::get (decl
);
6749 node
= cgraph_node::get_create (decl
);
6750 node
->set_fini_priority (priority
);
6753 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6756 print_debug_expr_statistics (void)
6758 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6759 (long) debug_expr_for_decl
->size (),
6760 (long) debug_expr_for_decl
->elements (),
6761 debug_expr_for_decl
->collisions ());
6764 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6767 print_value_expr_statistics (void)
6769 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6770 (long) value_expr_for_decl
->size (),
6771 (long) value_expr_for_decl
->elements (),
6772 value_expr_for_decl
->collisions ());
6775 /* Lookup a debug expression for FROM, and return it if we find one. */
6778 decl_debug_expr_lookup (tree from
)
6780 struct tree_decl_map
*h
, in
;
6781 in
.base
.from
= from
;
6783 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6789 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6792 decl_debug_expr_insert (tree from
, tree to
)
6794 struct tree_decl_map
*h
;
6796 h
= ggc_alloc
<tree_decl_map
> ();
6797 h
->base
.from
= from
;
6799 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
6802 /* Lookup a value expression for FROM, and return it if we find one. */
6805 decl_value_expr_lookup (tree from
)
6807 struct tree_decl_map
*h
, in
;
6808 in
.base
.from
= from
;
6810 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6816 /* Insert a mapping FROM->TO in the value expression hashtable. */
6819 decl_value_expr_insert (tree from
, tree to
)
6821 struct tree_decl_map
*h
;
6823 h
= ggc_alloc
<tree_decl_map
> ();
6824 h
->base
.from
= from
;
6826 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
6829 /* Lookup a vector of debug arguments for FROM, and return it if we
6833 decl_debug_args_lookup (tree from
)
6835 struct tree_vec_map
*h
, in
;
6837 if (!DECL_HAS_DEBUG_ARGS_P (from
))
6839 gcc_checking_assert (debug_args_for_decl
!= NULL
);
6840 in
.base
.from
= from
;
6841 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6847 /* Insert a mapping FROM->empty vector of debug arguments in the value
6848 expression hashtable. */
6851 decl_debug_args_insert (tree from
)
6853 struct tree_vec_map
*h
;
6856 if (DECL_HAS_DEBUG_ARGS_P (from
))
6857 return decl_debug_args_lookup (from
);
6858 if (debug_args_for_decl
== NULL
)
6859 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
6860 h
= ggc_alloc
<tree_vec_map
> ();
6861 h
->base
.from
= from
;
6863 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
6865 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
6869 /* Hashing of types so that we don't make duplicates.
6870 The entry point is `type_hash_canon'. */
6872 /* Generate the default hash code for TYPE. This is designed for
6873 speed, rather than maximum entropy. */
6876 type_hash_canon_hash (tree type
)
6878 inchash::hash hstate
;
6880 hstate
.add_int (TREE_CODE (type
));
6882 if (TREE_TYPE (type
))
6883 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
6885 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
6886 /* Just the identifier is adequate to distinguish. */
6887 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
6889 switch (TREE_CODE (type
))
6892 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
6895 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
6896 if (TREE_VALUE (t
) != error_mark_node
)
6897 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
6901 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
6906 if (TYPE_DOMAIN (type
))
6907 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
6908 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
6910 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
6911 hstate
.add_object (typeless
);
6918 tree t
= TYPE_MAX_VALUE (type
);
6920 t
= TYPE_MIN_VALUE (type
);
6921 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
6922 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
6927 case FIXED_POINT_TYPE
:
6929 unsigned prec
= TYPE_PRECISION (type
);
6930 hstate
.add_object (prec
);
6935 hstate
.add_poly_int (TYPE_VECTOR_SUBPARTS (type
));
6942 return hstate
.end ();
6945 /* These are the Hashtable callback functions. */
6947 /* Returns true iff the types are equivalent. */
6950 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
6952 /* First test the things that are the same for all types. */
6953 if (a
->hash
!= b
->hash
6954 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6955 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6956 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6957 TYPE_ATTRIBUTES (b
->type
))
6958 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6959 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6962 /* Be careful about comparing arrays before and after the element type
6963 has been completed; don't compare TYPE_ALIGN unless both types are
6965 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6966 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6967 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6970 switch (TREE_CODE (a
->type
))
6975 case REFERENCE_TYPE
:
6980 return known_eq (TYPE_VECTOR_SUBPARTS (a
->type
),
6981 TYPE_VECTOR_SUBPARTS (b
->type
));
6984 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6985 && !(TYPE_VALUES (a
->type
)
6986 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6987 && TYPE_VALUES (b
->type
)
6988 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6989 && type_list_equal (TYPE_VALUES (a
->type
),
6990 TYPE_VALUES (b
->type
))))
6998 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
7000 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
7001 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
7002 TYPE_MAX_VALUE (b
->type
)))
7003 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
7004 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
7005 TYPE_MIN_VALUE (b
->type
))));
7007 case FIXED_POINT_TYPE
:
7008 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
7011 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
7014 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
7015 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
7016 || (TYPE_ARG_TYPES (a
->type
)
7017 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
7018 && TYPE_ARG_TYPES (b
->type
)
7019 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
7020 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
7021 TYPE_ARG_TYPES (b
->type
)))))
7025 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7026 where the flag should be inherited from the element type
7027 and can change after ARRAY_TYPEs are created; on non-aggregates
7028 compare it and hash it, scalars will never have that flag set
7029 and we need to differentiate between arrays created by different
7030 front-ends or middle-end created arrays. */
7031 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
7032 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
7033 || (TYPE_TYPELESS_STORAGE (a
->type
)
7034 == TYPE_TYPELESS_STORAGE (b
->type
))));
7038 case QUAL_UNION_TYPE
:
7039 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
7040 || (TYPE_FIELDS (a
->type
)
7041 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
7042 && TYPE_FIELDS (b
->type
)
7043 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
7044 && type_list_equal (TYPE_FIELDS (a
->type
),
7045 TYPE_FIELDS (b
->type
))));
7048 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
7049 || (TYPE_ARG_TYPES (a
->type
)
7050 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
7051 && TYPE_ARG_TYPES (b
->type
)
7052 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
7053 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
7054 TYPE_ARG_TYPES (b
->type
))))
7062 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
7063 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
7068 /* Given TYPE, and HASHCODE its hash code, return the canonical
7069 object for an identical type if one already exists.
7070 Otherwise, return TYPE, and record it as the canonical object.
7072 To use this function, first create a type of the sort you want.
7073 Then compute its hash code from the fields of the type that
7074 make it different from other similar types.
7075 Then call this function and use the value. */
7078 type_hash_canon (unsigned int hashcode
, tree type
)
7083 /* The hash table only contains main variants, so ensure that's what we're
7085 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
7087 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7088 must call that routine before comparing TYPE_ALIGNs. */
7094 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
7097 tree t1
= ((type_hash
*) *loc
)->type
;
7098 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
7100 if (TYPE_UID (type
) + 1 == next_type_uid
)
7102 /* Free also min/max values and the cache for integer
7103 types. This can't be done in free_node, as LTO frees
7104 those on its own. */
7105 if (TREE_CODE (type
) == INTEGER_TYPE
)
7107 if (TYPE_MIN_VALUE (type
)
7108 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
7110 /* Zero is always in TYPE_CACHED_VALUES. */
7111 if (! TYPE_UNSIGNED (type
))
7112 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
7113 ggc_free (TYPE_MIN_VALUE (type
));
7115 if (TYPE_MAX_VALUE (type
)
7116 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
7118 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
7119 ggc_free (TYPE_MAX_VALUE (type
));
7121 if (TYPE_CACHED_VALUES_P (type
))
7122 ggc_free (TYPE_CACHED_VALUES (type
));
7129 struct type_hash
*h
;
7131 h
= ggc_alloc
<type_hash
> ();
7141 print_type_hash_statistics (void)
7143 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
7144 (long) type_hash_table
->size (),
7145 (long) type_hash_table
->elements (),
7146 type_hash_table
->collisions ());
7149 /* Given two lists of types
7150 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7151 return 1 if the lists contain the same types in the same order.
7152 Also, the TREE_PURPOSEs must match. */
7155 type_list_equal (const_tree l1
, const_tree l2
)
7159 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
7160 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
7161 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
7162 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
7163 && (TREE_TYPE (TREE_PURPOSE (t1
))
7164 == TREE_TYPE (TREE_PURPOSE (t2
))))))
7170 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7171 given by TYPE. If the argument list accepts variable arguments,
7172 then this function counts only the ordinary arguments. */
7175 type_num_arguments (const_tree fntype
)
7179 for (tree t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
7180 /* If the function does not take a variable number of arguments,
7181 the last element in the list will have type `void'. */
7182 if (VOID_TYPE_P (TREE_VALUE (t
)))
7190 /* Return the type of the function TYPE's argument ARGNO if known.
7191 For vararg function's where ARGNO refers to one of the variadic
7192 arguments return null. Otherwise, return a void_type_node for
7193 out-of-bounds ARGNO. */
7196 type_argument_type (const_tree fntype
, unsigned argno
)
7198 /* Treat zero the same as an out-of-bounds argument number. */
7200 return void_type_node
;
7202 function_args_iterator iter
;
7206 FOREACH_FUNCTION_ARGS (fntype
, argtype
, iter
)
7208 /* A vararg function's argument list ends in a null. Otherwise,
7209 an ordinary function's argument list ends with void. Return
7210 null if ARGNO refers to a vararg argument, void_type_node if
7211 it's out of bounds, and the formal argument type otherwise. */
7215 if (i
== argno
|| VOID_TYPE_P (argtype
))
7224 /* Nonzero if integer constants T1 and T2
7225 represent the same constant value. */
7228 tree_int_cst_equal (const_tree t1
, const_tree t2
)
7233 if (t1
== 0 || t2
== 0)
7236 STRIP_ANY_LOCATION_WRAPPER (t1
);
7237 STRIP_ANY_LOCATION_WRAPPER (t2
);
7239 if (TREE_CODE (t1
) == INTEGER_CST
7240 && TREE_CODE (t2
) == INTEGER_CST
7241 && wi::to_widest (t1
) == wi::to_widest (t2
))
7247 /* Return true if T is an INTEGER_CST whose numerical value (extended
7248 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7251 tree_fits_shwi_p (const_tree t
)
7253 return (t
!= NULL_TREE
7254 && TREE_CODE (t
) == INTEGER_CST
7255 && wi::fits_shwi_p (wi::to_widest (t
)));
7258 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7259 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7262 tree_fits_poly_int64_p (const_tree t
)
7266 if (POLY_INT_CST_P (t
))
7268 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
7269 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t
, i
))))
7273 return (TREE_CODE (t
) == INTEGER_CST
7274 && wi::fits_shwi_p (wi::to_widest (t
)));
7277 /* Return true if T is an INTEGER_CST whose numerical value (extended
7278 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7281 tree_fits_uhwi_p (const_tree t
)
7283 return (t
!= NULL_TREE
7284 && TREE_CODE (t
) == INTEGER_CST
7285 && wi::fits_uhwi_p (wi::to_widest (t
)));
7288 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7289 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7292 tree_fits_poly_uint64_p (const_tree t
)
7296 if (POLY_INT_CST_P (t
))
7298 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
7299 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t
, i
))))
7303 return (TREE_CODE (t
) == INTEGER_CST
7304 && wi::fits_uhwi_p (wi::to_widest (t
)));
7307 /* T is an INTEGER_CST whose numerical value (extended according to
7308 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7312 tree_to_shwi (const_tree t
)
7314 gcc_assert (tree_fits_shwi_p (t
));
7315 return TREE_INT_CST_LOW (t
);
7318 /* T is an INTEGER_CST whose numerical value (extended according to
7319 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7322 unsigned HOST_WIDE_INT
7323 tree_to_uhwi (const_tree t
)
7325 gcc_assert (tree_fits_uhwi_p (t
));
7326 return TREE_INT_CST_LOW (t
);
7329 /* Return the most significant (sign) bit of T. */
7332 tree_int_cst_sign_bit (const_tree t
)
7334 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
7336 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
7339 /* Return an indication of the sign of the integer constant T.
7340 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7341 Note that -1 will never be returned if T's type is unsigned. */
7344 tree_int_cst_sgn (const_tree t
)
7346 if (wi::to_wide (t
) == 0)
7348 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
7350 else if (wi::neg_p (wi::to_wide (t
)))
7356 /* Return the minimum number of bits needed to represent VALUE in a
7357 signed or unsigned type, UNSIGNEDP says which. */
7360 tree_int_cst_min_precision (tree value
, signop sgn
)
7362 /* If the value is negative, compute its negative minus 1. The latter
7363 adjustment is because the absolute value of the largest negative value
7364 is one larger than the largest positive value. This is equivalent to
7365 a bit-wise negation, so use that operation instead. */
7367 if (tree_int_cst_sgn (value
) < 0)
7368 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
7370 /* Return the number of bits needed, taking into account the fact
7371 that we need one more bit for a signed than unsigned type.
7372 If value is 0 or -1, the minimum precision is 1 no matter
7373 whether unsignedp is true or false. */
7375 if (integer_zerop (value
))
7378 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
7381 /* Return truthvalue of whether T1 is the same tree structure as T2.
7382 Return 1 if they are the same.
7383 Return 0 if they are understandably different.
7384 Return -1 if either contains tree structure not understood by
7388 simple_cst_equal (const_tree t1
, const_tree t2
)
7390 enum tree_code code1
, code2
;
7396 if (t1
== 0 || t2
== 0)
7399 /* For location wrappers to be the same, they must be at the same
7400 source location (and wrap the same thing). */
7401 if (location_wrapper_p (t1
) && location_wrapper_p (t2
))
7403 if (EXPR_LOCATION (t1
) != EXPR_LOCATION (t2
))
7405 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7408 code1
= TREE_CODE (t1
);
7409 code2
= TREE_CODE (t2
);
7411 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
7413 if (CONVERT_EXPR_CODE_P (code2
)
7414 || code2
== NON_LVALUE_EXPR
)
7415 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7417 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
7420 else if (CONVERT_EXPR_CODE_P (code2
)
7421 || code2
== NON_LVALUE_EXPR
)
7422 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
7430 return wi::to_widest (t1
) == wi::to_widest (t2
);
7433 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
7436 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
7439 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
7440 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
7441 TREE_STRING_LENGTH (t1
)));
7445 unsigned HOST_WIDE_INT idx
;
7446 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
7447 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
7449 if (vec_safe_length (v1
) != vec_safe_length (v2
))
7452 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
7453 /* ??? Should we handle also fields here? */
7454 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
7460 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7463 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
7466 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
7469 const_tree arg1
, arg2
;
7470 const_call_expr_arg_iterator iter1
, iter2
;
7471 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
7472 arg2
= first_const_call_expr_arg (t2
, &iter2
);
7474 arg1
= next_const_call_expr_arg (&iter1
),
7475 arg2
= next_const_call_expr_arg (&iter2
))
7477 cmp
= simple_cst_equal (arg1
, arg2
);
7481 return arg1
== arg2
;
7485 /* Special case: if either target is an unallocated VAR_DECL,
7486 it means that it's going to be unified with whatever the
7487 TARGET_EXPR is really supposed to initialize, so treat it
7488 as being equivalent to anything. */
7489 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
7490 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
7491 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
7492 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
7493 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
7494 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
7497 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7502 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
7504 case WITH_CLEANUP_EXPR
:
7505 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7509 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
7512 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
7513 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7524 if (POLY_INT_CST_P (t1
))
7525 /* A false return means maybe_ne rather than known_ne. */
7526 return known_eq (poly_widest_int::from (poly_int_cst_value (t1
),
7527 TYPE_SIGN (TREE_TYPE (t1
))),
7528 poly_widest_int::from (poly_int_cst_value (t2
),
7529 TYPE_SIGN (TREE_TYPE (t2
))));
7533 /* This general rule works for most tree codes. All exceptions should be
7534 handled above. If this is a language-specific tree code, we can't
7535 trust what might be in the operand, so say we don't know
7537 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
7540 switch (TREE_CODE_CLASS (code1
))
7544 case tcc_comparison
:
7545 case tcc_expression
:
7549 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
7551 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
7563 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7564 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7565 than U, respectively. */
7568 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
7570 if (tree_int_cst_sgn (t
) < 0)
7572 else if (!tree_fits_uhwi_p (t
))
7574 else if (TREE_INT_CST_LOW (t
) == u
)
7576 else if (TREE_INT_CST_LOW (t
) < u
)
7582 /* Return true if SIZE represents a constant size that is in bounds of
7583 what the middle-end and the backend accepts (covering not more than
7584 half of the address-space).
7585 When PERR is non-null, set *PERR on failure to the description of
7586 why SIZE is not valid. */
7589 valid_constant_size_p (const_tree size
, cst_size_error
*perr
/* = NULL */)
7591 if (POLY_INT_CST_P (size
))
7593 if (TREE_OVERFLOW (size
))
7595 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
7596 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size
, i
)))
7601 cst_size_error error
;
7605 if (TREE_CODE (size
) != INTEGER_CST
)
7607 *perr
= cst_size_not_constant
;
7611 if (TREE_OVERFLOW_P (size
))
7613 *perr
= cst_size_overflow
;
7617 if (tree_int_cst_sgn (size
) < 0)
7619 *perr
= cst_size_negative
;
7622 if (!tree_fits_uhwi_p (size
)
7623 || (wi::to_widest (TYPE_MAX_VALUE (sizetype
))
7624 < wi::to_widest (size
) * 2))
7626 *perr
= cst_size_too_big
;
7633 /* Return the precision of the type, or for a complex or vector type the
7634 precision of the type of its elements. */
7637 element_precision (const_tree type
)
7640 type
= TREE_TYPE (type
);
7641 enum tree_code code
= TREE_CODE (type
);
7642 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
7643 type
= TREE_TYPE (type
);
7645 return TYPE_PRECISION (type
);
7648 /* Return true if CODE represents an associative tree code. Otherwise
7651 associative_tree_code (enum tree_code code
)
7670 /* Return true if CODE represents a commutative tree code. Otherwise
7673 commutative_tree_code (enum tree_code code
)
7679 case MULT_HIGHPART_EXPR
:
7687 case UNORDERED_EXPR
:
7691 case TRUTH_AND_EXPR
:
7692 case TRUTH_XOR_EXPR
:
7694 case WIDEN_MULT_EXPR
:
7695 case VEC_WIDEN_MULT_HI_EXPR
:
7696 case VEC_WIDEN_MULT_LO_EXPR
:
7697 case VEC_WIDEN_MULT_EVEN_EXPR
:
7698 case VEC_WIDEN_MULT_ODD_EXPR
:
7707 /* Return true if CODE represents a ternary tree code for which the
7708 first two operands are commutative. Otherwise return false. */
7710 commutative_ternary_tree_code (enum tree_code code
)
7714 case WIDEN_MULT_PLUS_EXPR
:
7715 case WIDEN_MULT_MINUS_EXPR
:
7725 /* Returns true if CODE can overflow. */
7728 operation_can_overflow (enum tree_code code
)
7736 /* Can overflow in various ways. */
7738 case TRUNC_DIV_EXPR
:
7739 case EXACT_DIV_EXPR
:
7740 case FLOOR_DIV_EXPR
:
7742 /* For INT_MIN / -1. */
7749 /* These operators cannot overflow. */
7754 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7755 ftrapv doesn't generate trapping insns for CODE. */
7758 operation_no_trapping_overflow (tree type
, enum tree_code code
)
7760 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
7762 /* We don't generate instructions that trap on overflow for complex or vector
7764 if (!INTEGRAL_TYPE_P (type
))
7767 if (!TYPE_OVERFLOW_TRAPS (type
))
7777 /* These operators can overflow, and -ftrapv generates trapping code for
7780 case TRUNC_DIV_EXPR
:
7781 case EXACT_DIV_EXPR
:
7782 case FLOOR_DIV_EXPR
:
7785 /* These operators can overflow, but -ftrapv does not generate trapping
7789 /* These operators cannot overflow. */
7794 /* Constructors for pointer, array and function types.
7795 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7796 constructed by language-dependent code, not here.) */
7798 /* Construct, lay out and return the type of pointers to TO_TYPE with
7799 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7800 reference all of memory. If such a type has already been
7801 constructed, reuse it. */
7804 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
7808 bool could_alias
= can_alias_all
;
7810 if (to_type
== error_mark_node
)
7811 return error_mark_node
;
7813 /* If the pointed-to type has the may_alias attribute set, force
7814 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7815 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7816 can_alias_all
= true;
7818 /* In some cases, languages will have things that aren't a POINTER_TYPE
7819 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7820 In that case, return that type without regard to the rest of our
7823 ??? This is a kludge, but consistent with the way this function has
7824 always operated and there doesn't seem to be a good way to avoid this
7826 if (TYPE_POINTER_TO (to_type
) != 0
7827 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
7828 return TYPE_POINTER_TO (to_type
);
7830 /* First, if we already have a type for pointers to TO_TYPE and it's
7831 the proper mode, use it. */
7832 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
7833 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7836 t
= make_node (POINTER_TYPE
);
7838 TREE_TYPE (t
) = to_type
;
7839 SET_TYPE_MODE (t
, mode
);
7840 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7841 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
7842 TYPE_POINTER_TO (to_type
) = t
;
7844 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7845 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7846 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7847 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7849 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7852 /* Lay out the type. This function has many callers that are concerned
7853 with expression-construction, and this simplifies them all. */
7859 /* By default build pointers in ptr_mode. */
7862 build_pointer_type (tree to_type
)
7864 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7865 : TYPE_ADDR_SPACE (to_type
);
7866 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7867 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
7870 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7873 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
7877 bool could_alias
= can_alias_all
;
7879 if (to_type
== error_mark_node
)
7880 return error_mark_node
;
7882 /* If the pointed-to type has the may_alias attribute set, force
7883 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7884 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7885 can_alias_all
= true;
7887 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7888 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7889 In that case, return that type without regard to the rest of our
7892 ??? This is a kludge, but consistent with the way this function has
7893 always operated and there doesn't seem to be a good way to avoid this
7895 if (TYPE_REFERENCE_TO (to_type
) != 0
7896 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
7897 return TYPE_REFERENCE_TO (to_type
);
7899 /* First, if we already have a type for pointers to TO_TYPE and it's
7900 the proper mode, use it. */
7901 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
7902 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7905 t
= make_node (REFERENCE_TYPE
);
7907 TREE_TYPE (t
) = to_type
;
7908 SET_TYPE_MODE (t
, mode
);
7909 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7910 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
7911 TYPE_REFERENCE_TO (to_type
) = t
;
7913 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7914 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7915 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7916 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7918 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7927 /* Build the node for the type of references-to-TO_TYPE by default
7931 build_reference_type (tree to_type
)
7933 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7934 : TYPE_ADDR_SPACE (to_type
);
7935 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7936 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
7939 #define MAX_INT_CACHED_PREC \
7940 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7941 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7943 /* Builds a signed or unsigned integer type of precision PRECISION.
7944 Used for C bitfields whose precision does not match that of
7945 built-in target types. */
7947 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7953 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7955 if (precision
<= MAX_INT_CACHED_PREC
)
7957 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7962 itype
= make_node (INTEGER_TYPE
);
7963 TYPE_PRECISION (itype
) = precision
;
7966 fixup_unsigned_type (itype
);
7968 fixup_signed_type (itype
);
7970 inchash::hash hstate
;
7971 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
7972 ret
= type_hash_canon (hstate
.end (), itype
);
7973 if (precision
<= MAX_INT_CACHED_PREC
)
7974 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7979 #define MAX_BOOL_CACHED_PREC \
7980 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7981 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
7983 /* Builds a boolean type of precision PRECISION.
7984 Used for boolean vectors to choose proper vector element size. */
7986 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
7990 if (precision
<= MAX_BOOL_CACHED_PREC
)
7992 type
= nonstandard_boolean_type_cache
[precision
];
7997 type
= make_node (BOOLEAN_TYPE
);
7998 TYPE_PRECISION (type
) = precision
;
7999 fixup_signed_type (type
);
8001 if (precision
<= MAX_INT_CACHED_PREC
)
8002 nonstandard_boolean_type_cache
[precision
] = type
;
8007 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8008 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8009 is true, reuse such a type that has already been constructed. */
8012 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
8014 tree itype
= make_node (INTEGER_TYPE
);
8016 TREE_TYPE (itype
) = type
;
8018 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
8019 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
8021 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
8022 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
8023 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
8024 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
8025 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
8026 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
8027 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
8032 if ((TYPE_MIN_VALUE (itype
)
8033 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
8034 || (TYPE_MAX_VALUE (itype
)
8035 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
8037 /* Since we cannot reliably merge this type, we need to compare it using
8038 structural equality checks. */
8039 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
8043 hashval_t hash
= type_hash_canon_hash (itype
);
8044 itype
= type_hash_canon (hash
, itype
);
8049 /* Wrapper around build_range_type_1 with SHARED set to true. */
8052 build_range_type (tree type
, tree lowval
, tree highval
)
8054 return build_range_type_1 (type
, lowval
, highval
, true);
8057 /* Wrapper around build_range_type_1 with SHARED set to false. */
8060 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
8062 return build_range_type_1 (type
, lowval
, highval
, false);
8065 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8066 MAXVAL should be the maximum value in the domain
8067 (one less than the length of the array).
8069 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8070 We don't enforce this limit, that is up to caller (e.g. language front end).
8071 The limit exists because the result is a signed type and we don't handle
8072 sizes that use more than one HOST_WIDE_INT. */
8075 build_index_type (tree maxval
)
8077 return build_range_type (sizetype
, size_zero_node
, maxval
);
8080 /* Return true if the debug information for TYPE, a subtype, should be emitted
8081 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8082 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8083 debug info and doesn't reflect the source code. */
8086 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
8088 tree base_type
= TREE_TYPE (type
), low
, high
;
8090 /* Subrange types have a base type which is an integral type. */
8091 if (!INTEGRAL_TYPE_P (base_type
))
8094 /* Get the real bounds of the subtype. */
8095 if (lang_hooks
.types
.get_subrange_bounds
)
8096 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
8099 low
= TYPE_MIN_VALUE (type
);
8100 high
= TYPE_MAX_VALUE (type
);
8103 /* If the type and its base type have the same representation and the same
8104 name, then the type is not a subrange but a copy of the base type. */
8105 if ((TREE_CODE (base_type
) == INTEGER_TYPE
8106 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
8107 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
8108 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
8109 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
8110 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
8120 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8121 and number of elements specified by the range of values of INDEX_TYPE.
8122 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8123 If SHARED is true, reuse such a type that has already been constructed. */
8126 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
8131 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
8133 error ("arrays of functions are not meaningful");
8134 elt_type
= integer_type_node
;
8137 t
= make_node (ARRAY_TYPE
);
8138 TREE_TYPE (t
) = elt_type
;
8139 TYPE_DOMAIN (t
) = index_type
;
8140 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
8141 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
8144 /* If the element type is incomplete at this point we get marked for
8145 structural equality. Do not record these types in the canonical
8147 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
8152 hashval_t hash
= type_hash_canon_hash (t
);
8153 t
= type_hash_canon (hash
, t
);
8156 if (TYPE_CANONICAL (t
) == t
)
8158 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
8159 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
8161 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8162 else if (TYPE_CANONICAL (elt_type
) != elt_type
8163 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
8165 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
8167 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
8168 typeless_storage
, shared
);
8174 /* Wrapper around build_array_type_1 with SHARED set to true. */
8177 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
8179 return build_array_type_1 (elt_type
, index_type
, typeless_storage
, true);
8182 /* Wrapper around build_array_type_1 with SHARED set to false. */
8185 build_nonshared_array_type (tree elt_type
, tree index_type
)
8187 return build_array_type_1 (elt_type
, index_type
, false, false);
8190 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8194 build_array_type_nelts (tree elt_type
, poly_uint64 nelts
)
8196 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
8199 /* Recursively examines the array elements of TYPE, until a non-array
8200 element type is found. */
8203 strip_array_types (tree type
)
8205 while (TREE_CODE (type
) == ARRAY_TYPE
)
8206 type
= TREE_TYPE (type
);
8211 /* Computes the canonical argument types from the argument type list
8214 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8215 on entry to this function, or if any of the ARGTYPES are
8218 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8219 true on entry to this function, or if any of the ARGTYPES are
8222 Returns a canonical argument list, which may be ARGTYPES when the
8223 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8224 true) or would not differ from ARGTYPES. */
8227 maybe_canonicalize_argtypes (tree argtypes
,
8228 bool *any_structural_p
,
8229 bool *any_noncanonical_p
)
8232 bool any_noncanonical_argtypes_p
= false;
8234 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
8236 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
8237 /* Fail gracefully by stating that the type is structural. */
8238 *any_structural_p
= true;
8239 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
8240 *any_structural_p
= true;
8241 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
8242 || TREE_PURPOSE (arg
))
8243 /* If the argument has a default argument, we consider it
8244 non-canonical even though the type itself is canonical.
8245 That way, different variants of function and method types
8246 with default arguments will all point to the variant with
8247 no defaults as their canonical type. */
8248 any_noncanonical_argtypes_p
= true;
8251 if (*any_structural_p
)
8254 if (any_noncanonical_argtypes_p
)
8256 /* Build the canonical list of argument types. */
8257 tree canon_argtypes
= NULL_TREE
;
8258 bool is_void
= false;
8260 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
8262 if (arg
== void_list_node
)
8265 canon_argtypes
= tree_cons (NULL_TREE
,
8266 TYPE_CANONICAL (TREE_VALUE (arg
)),
8270 canon_argtypes
= nreverse (canon_argtypes
);
8272 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
8274 /* There is a non-canonical type. */
8275 *any_noncanonical_p
= true;
8276 return canon_argtypes
;
8279 /* The canonical argument types are the same as ARGTYPES. */
8283 /* Construct, lay out and return
8284 the type of functions returning type VALUE_TYPE
8285 given arguments of types ARG_TYPES.
8286 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8287 are data type nodes for the arguments of the function.
8288 If such a type has already been constructed, reuse it. */
8291 build_function_type (tree value_type
, tree arg_types
)
8294 inchash::hash hstate
;
8295 bool any_structural_p
, any_noncanonical_p
;
8296 tree canon_argtypes
;
8298 gcc_assert (arg_types
!= error_mark_node
);
8300 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
8302 error ("function return type cannot be function");
8303 value_type
= integer_type_node
;
8306 /* Make a node of the sort we want. */
8307 t
= make_node (FUNCTION_TYPE
);
8308 TREE_TYPE (t
) = value_type
;
8309 TYPE_ARG_TYPES (t
) = arg_types
;
8311 /* If we already have such a type, use the old one. */
8312 hashval_t hash
= type_hash_canon_hash (t
);
8313 t
= type_hash_canon (hash
, t
);
8315 /* Set up the canonical type. */
8316 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
8317 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
8318 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
8320 &any_noncanonical_p
);
8321 if (any_structural_p
)
8322 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8323 else if (any_noncanonical_p
)
8324 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
8327 if (!COMPLETE_TYPE_P (t
))
8332 /* Build a function type. The RETURN_TYPE is the type returned by the
8333 function. If VAARGS is set, no void_type_node is appended to the
8334 list. ARGP must be always be terminated be a NULL_TREE. */
8337 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
8341 t
= va_arg (argp
, tree
);
8342 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
8343 args
= tree_cons (NULL_TREE
, t
, args
);
8348 if (args
!= NULL_TREE
)
8349 args
= nreverse (args
);
8350 gcc_assert (last
!= void_list_node
);
8352 else if (args
== NULL_TREE
)
8353 args
= void_list_node
;
8357 args
= nreverse (args
);
8358 TREE_CHAIN (last
) = void_list_node
;
8360 args
= build_function_type (return_type
, args
);
8365 /* Build a function type. The RETURN_TYPE is the type returned by the
8366 function. If additional arguments are provided, they are
8367 additional argument types. The list of argument types must always
8368 be terminated by NULL_TREE. */
8371 build_function_type_list (tree return_type
, ...)
8376 va_start (p
, return_type
);
8377 args
= build_function_type_list_1 (false, return_type
, p
);
8382 /* Build a variable argument function type. The RETURN_TYPE is the
8383 type returned by the function. If additional arguments are provided,
8384 they are additional argument types. The list of argument types must
8385 always be terminated by NULL_TREE. */
8388 build_varargs_function_type_list (tree return_type
, ...)
8393 va_start (p
, return_type
);
8394 args
= build_function_type_list_1 (true, return_type
, p
);
8400 /* Build a function type. RETURN_TYPE is the type returned by the
8401 function; VAARGS indicates whether the function takes varargs. The
8402 function takes N named arguments, the types of which are provided in
8406 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
8410 tree t
= vaargs
? NULL_TREE
: void_list_node
;
8412 for (i
= n
- 1; i
>= 0; i
--)
8413 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
8415 return build_function_type (return_type
, t
);
8418 /* Build a function type. RETURN_TYPE is the type returned by the
8419 function. The function takes N named arguments, the types of which
8420 are provided in ARG_TYPES. */
8423 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8425 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
8428 /* Build a variable argument function type. RETURN_TYPE is the type
8429 returned by the function. The function takes N named arguments, the
8430 types of which are provided in ARG_TYPES. */
8433 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8435 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
8438 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8439 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8440 for the method. An implicit additional parameter (of type
8441 pointer-to-BASETYPE) is added to the ARGTYPES. */
8444 build_method_type_directly (tree basetype
,
8450 bool any_structural_p
, any_noncanonical_p
;
8451 tree canon_argtypes
;
8453 /* Make a node of the sort we want. */
8454 t
= make_node (METHOD_TYPE
);
8456 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8457 TREE_TYPE (t
) = rettype
;
8458 ptype
= build_pointer_type (basetype
);
8460 /* The actual arglist for this function includes a "hidden" argument
8461 which is "this". Put it into the list of argument types. */
8462 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
8463 TYPE_ARG_TYPES (t
) = argtypes
;
8465 /* If we already have such a type, use the old one. */
8466 hashval_t hash
= type_hash_canon_hash (t
);
8467 t
= type_hash_canon (hash
, t
);
8469 /* Set up the canonical type. */
8471 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8472 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
8474 = (TYPE_CANONICAL (basetype
) != basetype
8475 || TYPE_CANONICAL (rettype
) != rettype
);
8476 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
8478 &any_noncanonical_p
);
8479 if (any_structural_p
)
8480 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8481 else if (any_noncanonical_p
)
8483 = build_method_type_directly (TYPE_CANONICAL (basetype
),
8484 TYPE_CANONICAL (rettype
),
8486 if (!COMPLETE_TYPE_P (t
))
8492 /* Construct, lay out and return the type of methods belonging to class
8493 BASETYPE and whose arguments and values are described by TYPE.
8494 If that type exists already, reuse it.
8495 TYPE must be a FUNCTION_TYPE node. */
8498 build_method_type (tree basetype
, tree type
)
8500 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
8502 return build_method_type_directly (basetype
,
8504 TYPE_ARG_TYPES (type
));
8507 /* Construct, lay out and return the type of offsets to a value
8508 of type TYPE, within an object of type BASETYPE.
8509 If a suitable offset type exists already, reuse it. */
8512 build_offset_type (tree basetype
, tree type
)
8516 /* Make a node of the sort we want. */
8517 t
= make_node (OFFSET_TYPE
);
8519 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8520 TREE_TYPE (t
) = type
;
8522 /* If we already have such a type, use the old one. */
8523 hashval_t hash
= type_hash_canon_hash (t
);
8524 t
= type_hash_canon (hash
, t
);
8526 if (!COMPLETE_TYPE_P (t
))
8529 if (TYPE_CANONICAL (t
) == t
)
8531 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8532 || TYPE_STRUCTURAL_EQUALITY_P (type
))
8533 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8534 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
8535 || TYPE_CANONICAL (type
) != type
)
8537 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
8538 TYPE_CANONICAL (type
));
8544 /* Create a complex type whose components are COMPONENT_TYPE.
8546 If NAMED is true, the type is given a TYPE_NAME. We do not always
8547 do so because this creates a DECL node and thus make the DECL_UIDs
8548 dependent on the type canonicalization hashtable, which is GC-ed,
8549 so the DECL_UIDs would not be stable wrt garbage collection. */
8552 build_complex_type (tree component_type
, bool named
)
8554 gcc_assert (INTEGRAL_TYPE_P (component_type
)
8555 || SCALAR_FLOAT_TYPE_P (component_type
)
8556 || FIXED_POINT_TYPE_P (component_type
));
8558 /* Make a node of the sort we want. */
8559 tree probe
= make_node (COMPLEX_TYPE
);
8561 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
8563 /* If we already have such a type, use the old one. */
8564 hashval_t hash
= type_hash_canon_hash (probe
);
8565 tree t
= type_hash_canon (hash
, probe
);
8569 /* We created a new type. The hash insertion will have laid
8570 out the type. We need to check the canonicalization and
8571 maybe set the name. */
8572 gcc_checking_assert (COMPLETE_TYPE_P (t
)
8574 && TYPE_CANONICAL (t
) == t
);
8576 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
8577 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8578 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
8580 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
8582 /* We need to create a name, since complex is a fundamental type. */
8585 const char *name
= NULL
;
8587 if (TREE_TYPE (t
) == char_type_node
)
8588 name
= "complex char";
8589 else if (TREE_TYPE (t
) == signed_char_type_node
)
8590 name
= "complex signed char";
8591 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
8592 name
= "complex unsigned char";
8593 else if (TREE_TYPE (t
) == short_integer_type_node
)
8594 name
= "complex short int";
8595 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
8596 name
= "complex short unsigned int";
8597 else if (TREE_TYPE (t
) == integer_type_node
)
8598 name
= "complex int";
8599 else if (TREE_TYPE (t
) == unsigned_type_node
)
8600 name
= "complex unsigned int";
8601 else if (TREE_TYPE (t
) == long_integer_type_node
)
8602 name
= "complex long int";
8603 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
8604 name
= "complex long unsigned int";
8605 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
8606 name
= "complex long long int";
8607 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
8608 name
= "complex long long unsigned int";
8611 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
8612 get_identifier (name
), t
);
8616 return build_qualified_type (t
, TYPE_QUALS (component_type
));
8619 /* If TYPE is a real or complex floating-point type and the target
8620 does not directly support arithmetic on TYPE then return the wider
8621 type to be used for arithmetic on TYPE. Otherwise, return
8625 excess_precision_type (tree type
)
8627 /* The target can give two different responses to the question of
8628 which excess precision mode it would like depending on whether we
8629 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8631 enum excess_precision_type requested_type
8632 = (flag_excess_precision
== EXCESS_PRECISION_FAST
8633 ? EXCESS_PRECISION_TYPE_FAST
8634 : EXCESS_PRECISION_TYPE_STANDARD
);
8636 enum flt_eval_method target_flt_eval_method
8637 = targetm
.c
.excess_precision (requested_type
);
8639 /* The target should not ask for unpredictable float evaluation (though
8640 it might advertise that implicitly the evaluation is unpredictable,
8641 but we don't care about that here, it will have been reported
8642 elsewhere). If it does ask for unpredictable evaluation, we have
8643 nothing to do here. */
8644 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
8646 /* Nothing to do. The target has asked for all types we know about
8647 to be computed with their native precision and range. */
8648 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
8651 /* The target will promote this type in a target-dependent way, so excess
8652 precision ought to leave it alone. */
8653 if (targetm
.promoted_type (type
) != NULL_TREE
)
8656 machine_mode float16_type_mode
= (float16_type_node
8657 ? TYPE_MODE (float16_type_node
)
8659 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
8660 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
8662 switch (TREE_CODE (type
))
8666 machine_mode type_mode
= TYPE_MODE (type
);
8667 switch (target_flt_eval_method
)
8669 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
8670 if (type_mode
== float16_type_mode
)
8671 return float_type_node
;
8673 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
8674 if (type_mode
== float16_type_mode
8675 || type_mode
== float_type_mode
)
8676 return double_type_node
;
8678 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
8679 if (type_mode
== float16_type_mode
8680 || type_mode
== float_type_mode
8681 || type_mode
== double_type_mode
)
8682 return long_double_type_node
;
8691 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
8693 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
8694 switch (target_flt_eval_method
)
8696 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
8697 if (type_mode
== float16_type_mode
)
8698 return complex_float_type_node
;
8700 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
8701 if (type_mode
== float16_type_mode
8702 || type_mode
== float_type_mode
)
8703 return complex_double_type_node
;
8705 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
8706 if (type_mode
== float16_type_mode
8707 || type_mode
== float_type_mode
8708 || type_mode
== double_type_mode
)
8709 return complex_long_double_type_node
;
8723 /* Return OP, stripped of any conversions to wider types as much as is safe.
8724 Converting the value back to OP's type makes a value equivalent to OP.
8726 If FOR_TYPE is nonzero, we return a value which, if converted to
8727 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8729 OP must have integer, real or enumeral type. Pointers are not allowed!
8731 There are some cases where the obvious value we could return
8732 would regenerate to OP if converted to OP's type,
8733 but would not extend like OP to wider types.
8734 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8735 For example, if OP is (unsigned short)(signed char)-1,
8736 we avoid returning (signed char)-1 if FOR_TYPE is int,
8737 even though extending that to an unsigned short would regenerate OP,
8738 since the result of extending (signed char)-1 to (int)
8739 is different from (int) OP. */
8742 get_unwidened (tree op
, tree for_type
)
8744 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8745 tree type
= TREE_TYPE (op
);
8747 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
8749 = (for_type
!= 0 && for_type
!= type
8750 && final_prec
> TYPE_PRECISION (type
)
8751 && TYPE_UNSIGNED (type
));
8754 while (CONVERT_EXPR_P (op
))
8758 /* TYPE_PRECISION on vector types has different meaning
8759 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8760 so avoid them here. */
8761 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
8764 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
8765 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
8767 /* Truncations are many-one so cannot be removed.
8768 Unless we are later going to truncate down even farther. */
8770 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
8773 /* See what's inside this conversion. If we decide to strip it,
8775 op
= TREE_OPERAND (op
, 0);
8777 /* If we have not stripped any zero-extensions (uns is 0),
8778 we can strip any kind of extension.
8779 If we have previously stripped a zero-extension,
8780 only zero-extensions can safely be stripped.
8781 Any extension can be stripped if the bits it would produce
8782 are all going to be discarded later by truncating to FOR_TYPE. */
8786 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
8788 /* TYPE_UNSIGNED says whether this is a zero-extension.
8789 Let's avoid computing it if it does not affect WIN
8790 and if UNS will not be needed again. */
8792 || CONVERT_EXPR_P (op
))
8793 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8801 /* If we finally reach a constant see if it fits in sth smaller and
8802 in that case convert it. */
8803 if (TREE_CODE (win
) == INTEGER_CST
)
8805 tree wtype
= TREE_TYPE (win
);
8806 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
8808 prec
= MAX (prec
, final_prec
);
8809 if (prec
< TYPE_PRECISION (wtype
))
8811 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
8812 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
8813 win
= fold_convert (t
, win
);
8820 /* Return OP or a simpler expression for a narrower value
8821 which can be sign-extended or zero-extended to give back OP.
8822 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8823 or 0 if the value should be sign-extended. */
8826 get_narrower (tree op
, int *unsignedp_ptr
)
8831 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8833 while (TREE_CODE (op
) == NOP_EXPR
)
8836 = (TYPE_PRECISION (TREE_TYPE (op
))
8837 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8839 /* Truncations are many-one so cannot be removed. */
8843 /* See what's inside this conversion. If we decide to strip it,
8848 op
= TREE_OPERAND (op
, 0);
8849 /* An extension: the outermost one can be stripped,
8850 but remember whether it is zero or sign extension. */
8852 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8853 /* Otherwise, if a sign extension has been stripped,
8854 only sign extensions can now be stripped;
8855 if a zero extension has been stripped, only zero-extensions. */
8856 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8860 else /* bitschange == 0 */
8862 /* A change in nominal type can always be stripped, but we must
8863 preserve the unsignedness. */
8865 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8867 op
= TREE_OPERAND (op
, 0);
8868 /* Keep trying to narrow, but don't assign op to win if it
8869 would turn an integral type into something else. */
8870 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8877 if (TREE_CODE (op
) == COMPONENT_REF
8878 /* Since type_for_size always gives an integer type. */
8879 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8880 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8881 /* Ensure field is laid out already. */
8882 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8883 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8885 unsigned HOST_WIDE_INT innerprec
8886 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8887 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8888 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8889 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8891 /* We can get this structure field in a narrower type that fits it,
8892 but the resulting extension to its nominal type (a fullword type)
8893 must satisfy the same conditions as for other extensions.
8895 Do this only for fields that are aligned (not bit-fields),
8896 because when bit-field insns will be used there is no
8897 advantage in doing this. */
8899 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8900 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8901 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8905 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8906 win
= fold_convert (type
, op
);
8910 *unsignedp_ptr
= uns
;
8914 /* Return true if integer constant C has a value that is permissible
8915 for TYPE, an integral type. */
8918 int_fits_type_p (const_tree c
, const_tree type
)
8920 tree type_low_bound
, type_high_bound
;
8921 bool ok_for_low_bound
, ok_for_high_bound
;
8922 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8924 /* Non-standard boolean types can have arbitrary precision but various
8925 transformations assume that they can only take values 0 and +/-1. */
8926 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8927 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
8930 type_low_bound
= TYPE_MIN_VALUE (type
);
8931 type_high_bound
= TYPE_MAX_VALUE (type
);
8933 /* If at least one bound of the type is a constant integer, we can check
8934 ourselves and maybe make a decision. If no such decision is possible, but
8935 this type is a subtype, try checking against that. Otherwise, use
8936 fits_to_tree_p, which checks against the precision.
8938 Compute the status for each possibly constant bound, and return if we see
8939 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8940 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8941 for "constant known to fit". */
8943 /* Check if c >= type_low_bound. */
8944 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8946 if (tree_int_cst_lt (c
, type_low_bound
))
8948 ok_for_low_bound
= true;
8951 ok_for_low_bound
= false;
8953 /* Check if c <= type_high_bound. */
8954 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8956 if (tree_int_cst_lt (type_high_bound
, c
))
8958 ok_for_high_bound
= true;
8961 ok_for_high_bound
= false;
8963 /* If the constant fits both bounds, the result is known. */
8964 if (ok_for_low_bound
&& ok_for_high_bound
)
8967 /* Perform some generic filtering which may allow making a decision
8968 even if the bounds are not constant. First, negative integers
8969 never fit in unsigned types, */
8970 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
8973 /* Second, narrower types always fit in wider ones. */
8974 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8977 /* Third, unsigned integers with top bit set never fit signed types. */
8978 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8980 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
8981 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8983 /* When a tree_cst is converted to a wide-int, the precision
8984 is taken from the type. However, if the precision of the
8985 mode underneath the type is smaller than that, it is
8986 possible that the value will not fit. The test below
8987 fails if any bit is set between the sign bit of the
8988 underlying mode and the top bit of the type. */
8989 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8992 else if (wi::neg_p (wi::to_wide (c
)))
8996 /* If we haven't been able to decide at this point, there nothing more we
8997 can check ourselves here. Look at the base type if we have one and it
8998 has the same precision. */
8999 if (TREE_CODE (type
) == INTEGER_TYPE
9000 && TREE_TYPE (type
) != 0
9001 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
9003 type
= TREE_TYPE (type
);
9007 /* Or to fits_to_tree_p, if nothing else. */
9008 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
9011 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9012 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9013 represented (assuming two's-complement arithmetic) within the bit
9014 precision of the type are returned instead. */
9017 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
9019 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
9020 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
9021 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
9024 if (TYPE_UNSIGNED (type
))
9025 mpz_set_ui (min
, 0);
9028 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
9029 wi::to_mpz (mn
, min
, SIGNED
);
9033 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
9034 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
9035 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
9038 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
9039 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
9043 /* Return true if VAR is an automatic variable. */
9046 auto_var_p (const_tree var
)
9048 return ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
9049 || TREE_CODE (var
) == PARM_DECL
)
9050 && ! TREE_STATIC (var
))
9051 || TREE_CODE (var
) == RESULT_DECL
);
9054 /* Return true if VAR is an automatic variable defined in function FN. */
9057 auto_var_in_fn_p (const_tree var
, const_tree fn
)
9059 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
9060 && (auto_var_p (var
)
9061 || TREE_CODE (var
) == LABEL_DECL
));
9064 /* Subprogram of following function. Called by walk_tree.
9066 Return *TP if it is an automatic variable or parameter of the
9067 function passed in as DATA. */
9070 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
9072 tree fn
= (tree
) data
;
9077 else if (DECL_P (*tp
)
9078 && auto_var_in_fn_p (*tp
, fn
))
9084 /* Returns true if T is, contains, or refers to a type with variable
9085 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9086 arguments, but not the return type. If FN is nonzero, only return
9087 true if a modifier of the type or position of FN is a variable or
9088 parameter inside FN.
9090 This concept is more general than that of C99 'variably modified types':
9091 in C99, a struct type is never variably modified because a VLA may not
9092 appear as a structure member. However, in GNU C code like:
9094 struct S { int i[f()]; };
9096 is valid, and other languages may define similar constructs. */
9099 variably_modified_type_p (tree type
, tree fn
)
9103 /* Test if T is either variable (if FN is zero) or an expression containing
9104 a variable in FN. If TYPE isn't gimplified, return true also if
9105 gimplify_one_sizepos would gimplify the expression into a local
9107 #define RETURN_TRUE_IF_VAR(T) \
9108 do { tree _t = (T); \
9109 if (_t != NULL_TREE \
9110 && _t != error_mark_node \
9111 && !CONSTANT_CLASS_P (_t) \
9112 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9114 || (!TYPE_SIZES_GIMPLIFIED (type) \
9115 && (TREE_CODE (_t) != VAR_DECL \
9116 && !CONTAINS_PLACEHOLDER_P (_t))) \
9117 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9118 return true; } while (0)
9120 if (type
== error_mark_node
)
9123 /* If TYPE itself has variable size, it is variably modified. */
9124 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
9125 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
9127 switch (TREE_CODE (type
))
9130 case REFERENCE_TYPE
:
9132 /* Ada can have pointer types refering to themselves indirectly. */
9133 if (TREE_VISITED (type
))
9135 TREE_VISITED (type
) = true;
9136 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
9138 TREE_VISITED (type
) = false;
9141 TREE_VISITED (type
) = false;
9146 /* If TYPE is a function type, it is variably modified if the
9147 return type is variably modified. */
9148 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
9154 case FIXED_POINT_TYPE
:
9157 /* Scalar types are variably modified if their end points
9159 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
9160 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
9165 case QUAL_UNION_TYPE
:
9166 /* We can't see if any of the fields are variably-modified by the
9167 definition we normally use, since that would produce infinite
9168 recursion via pointers. */
9169 /* This is variably modified if some field's type is. */
9170 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
9171 if (TREE_CODE (t
) == FIELD_DECL
)
9173 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
9174 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
9175 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
9177 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
9178 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
9183 /* Do not call ourselves to avoid infinite recursion. This is
9184 variably modified if the element type is. */
9185 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
9186 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9193 /* The current language may have other cases to check, but in general,
9194 all other types are not variably modified. */
9195 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
9197 #undef RETURN_TRUE_IF_VAR
9200 /* Given a DECL or TYPE, return the scope in which it was declared, or
9201 NULL_TREE if there is no containing scope. */
9204 get_containing_scope (const_tree t
)
9206 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
9209 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9212 get_ultimate_context (const_tree decl
)
9214 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
9216 if (TREE_CODE (decl
) == BLOCK
)
9217 decl
= BLOCK_SUPERCONTEXT (decl
);
9219 decl
= get_containing_scope (decl
);
9224 /* Return the innermost context enclosing DECL that is
9225 a FUNCTION_DECL, or zero if none. */
9228 decl_function_context (const_tree decl
)
9232 if (TREE_CODE (decl
) == ERROR_MARK
)
9235 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9236 where we look up the function at runtime. Such functions always take
9237 a first argument of type 'pointer to real context'.
9239 C++ should really be fixed to use DECL_CONTEXT for the real context,
9240 and use something else for the "virtual context". */
9241 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VIRTUAL_P (decl
))
9244 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
9246 context
= DECL_CONTEXT (decl
);
9248 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
9250 if (TREE_CODE (context
) == BLOCK
)
9251 context
= BLOCK_SUPERCONTEXT (context
);
9253 context
= get_containing_scope (context
);
9259 /* Return the innermost context enclosing DECL that is
9260 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9261 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9264 decl_type_context (const_tree decl
)
9266 tree context
= DECL_CONTEXT (decl
);
9269 switch (TREE_CODE (context
))
9271 case NAMESPACE_DECL
:
9272 case TRANSLATION_UNIT_DECL
:
9277 case QUAL_UNION_TYPE
:
9282 context
= DECL_CONTEXT (context
);
9286 context
= BLOCK_SUPERCONTEXT (context
);
9296 /* CALL is a CALL_EXPR. Return the declaration for the function
9297 called, or NULL_TREE if the called function cannot be
9301 get_callee_fndecl (const_tree call
)
9305 if (call
== error_mark_node
)
9306 return error_mark_node
;
9308 /* It's invalid to call this function with anything but a
9310 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
9312 /* The first operand to the CALL is the address of the function
9314 addr
= CALL_EXPR_FN (call
);
9316 /* If there is no function, return early. */
9317 if (addr
== NULL_TREE
)
9322 /* If this is a readonly function pointer, extract its initial value. */
9323 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
9324 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
9325 && DECL_INITIAL (addr
))
9326 addr
= DECL_INITIAL (addr
);
9328 /* If the address is just `&f' for some function `f', then we know
9329 that `f' is being called. */
9330 if (TREE_CODE (addr
) == ADDR_EXPR
9331 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
9332 return TREE_OPERAND (addr
, 0);
9334 /* We couldn't figure out what was being called. */
9338 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9339 return the associated function code, otherwise return CFN_LAST. */
9342 get_call_combined_fn (const_tree call
)
9344 /* It's invalid to call this function with anything but a CALL_EXPR. */
9345 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
9347 if (!CALL_EXPR_FN (call
))
9348 return as_combined_fn (CALL_EXPR_IFN (call
));
9350 tree fndecl
= get_callee_fndecl (call
);
9351 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
9352 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
9357 /* Comparator of indices based on tree_node_counts. */
9360 tree_nodes_cmp (const void *p1
, const void *p2
)
9362 const unsigned *n1
= (const unsigned *)p1
;
9363 const unsigned *n2
= (const unsigned *)p2
;
9365 return tree_node_counts
[*n1
] - tree_node_counts
[*n2
];
9368 /* Comparator of indices based on tree_code_counts. */
9371 tree_codes_cmp (const void *p1
, const void *p2
)
9373 const unsigned *n1
= (const unsigned *)p1
;
9374 const unsigned *n2
= (const unsigned *)p2
;
9376 return tree_code_counts
[*n1
] - tree_code_counts
[*n2
];
9379 #define TREE_MEM_USAGE_SPACES 40
9381 /* Print debugging information about tree nodes generated during the compile,
9382 and any language-specific information. */
9385 dump_tree_statistics (void)
9387 if (GATHER_STATISTICS
)
9389 uint64_t total_nodes
, total_bytes
;
9390 fprintf (stderr
, "\nKind Nodes Bytes\n");
9391 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9392 total_nodes
= total_bytes
= 0;
9395 auto_vec
<unsigned> indices (all_kinds
);
9396 for (unsigned i
= 0; i
< all_kinds
; i
++)
9397 indices
.quick_push (i
);
9398 indices
.qsort (tree_nodes_cmp
);
9400 for (unsigned i
= 0; i
< (int) all_kinds
; i
++)
9402 unsigned j
= indices
[i
];
9403 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n",
9404 tree_node_kind_names
[j
], SIZE_AMOUNT (tree_node_counts
[j
]),
9405 SIZE_AMOUNT (tree_node_sizes
[j
]));
9406 total_nodes
+= tree_node_counts
[j
];
9407 total_bytes
+= tree_node_sizes
[j
];
9409 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9410 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n", "Total",
9411 SIZE_AMOUNT (total_nodes
), SIZE_AMOUNT (total_bytes
));
9412 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9416 fprintf (stderr
, "Code Nodes\n");
9417 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9419 auto_vec
<unsigned> indices (MAX_TREE_CODES
);
9420 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
9421 indices
.quick_push (i
);
9422 indices
.qsort (tree_codes_cmp
);
9424 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
9426 unsigned j
= indices
[i
];
9427 fprintf (stderr
, "%-32s %6" PRIu64
"%c\n",
9428 get_tree_code_name ((enum tree_code
) j
),
9429 SIZE_AMOUNT (tree_code_counts
[j
]));
9431 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9432 fprintf (stderr
, "\n");
9433 ssanames_print_statistics ();
9434 fprintf (stderr
, "\n");
9435 phinodes_print_statistics ();
9436 fprintf (stderr
, "\n");
9440 fprintf (stderr
, "(No per-node statistics)\n");
9442 print_type_hash_statistics ();
9443 print_debug_expr_statistics ();
9444 print_value_expr_statistics ();
9445 lang_hooks
.print_statistics ();
9448 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9450 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9453 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
9455 /* This relies on the raw feedback's top 4 bits being zero. */
9456 #define FEEDBACK(X) ((X) * 0x04c11db7)
9457 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9458 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9459 static const unsigned syndromes
[16] =
9461 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9462 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9463 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9464 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9469 value
<<= (32 - bytes
* 8);
9470 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
9472 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
9474 chksum
= (chksum
<< 4) ^ feedback
;
9480 /* Generate a crc32 of a string. */
9483 crc32_string (unsigned chksum
, const char *string
)
9486 chksum
= crc32_byte (chksum
, *string
);
9491 /* P is a string that will be used in a symbol. Mask out any characters
9492 that are not valid in that context. */
9495 clean_symbol_name (char *p
)
9499 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9502 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9509 static GTY(()) unsigned anon_cnt
= 0; /* Saved for PCH. */
9511 /* Create a unique anonymous identifier. The identifier is still a
9512 valid assembly label. */
9518 #if !defined (NO_DOT_IN_LABEL)
9520 #elif !defined (NO_DOLLAR_IN_LABEL)
9528 int len
= snprintf (buf
, sizeof (buf
), fmt
, anon_cnt
++);
9529 gcc_checking_assert (len
< int (sizeof (buf
)));
9531 tree id
= get_identifier_with_length (buf
, len
);
9532 IDENTIFIER_ANON_P (id
) = true;
9537 /* Generate a name for a special-purpose function.
9538 The generated name may need to be unique across the whole link.
9539 Changes to this function may also require corresponding changes to
9540 xstrdup_mask_random.
9541 TYPE is some string to identify the purpose of this function to the
9542 linker or collect2; it must start with an uppercase letter,
9544 I - for constructors
9546 N - for C++ anonymous namespaces
9547 F - for DWARF unwind frame information. */
9550 get_file_function_name (const char *type
)
9556 /* If we already have a name we know to be unique, just use that. */
9557 if (first_global_object_name
)
9558 p
= q
= ASTRDUP (first_global_object_name
);
9559 /* If the target is handling the constructors/destructors, they
9560 will be local to this file and the name is only necessary for
9562 We also assign sub_I and sub_D sufixes to constructors called from
9563 the global static constructors. These are always local. */
9564 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
9565 || (strncmp (type
, "sub_", 4) == 0
9566 && (type
[4] == 'I' || type
[4] == 'D')))
9568 const char *file
= main_input_filename
;
9570 file
= LOCATION_FILE (input_location
);
9571 /* Just use the file's basename, because the full pathname
9572 might be quite long. */
9573 p
= q
= ASTRDUP (lbasename (file
));
9577 /* Otherwise, the name must be unique across the entire link.
9578 We don't have anything that we know to be unique to this translation
9579 unit, so use what we do have and throw in some randomness. */
9581 const char *name
= weak_global_object_name
;
9582 const char *file
= main_input_filename
;
9587 file
= LOCATION_FILE (input_location
);
9589 len
= strlen (file
);
9590 q
= (char *) alloca (9 + 19 + len
+ 1);
9591 memcpy (q
, file
, len
+ 1);
9593 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
9594 crc32_string (0, name
), get_random_seed (false));
9599 clean_symbol_name (q
);
9600 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
9603 /* Set up the name of the file-level functions we may need.
9604 Use a global object (which is already required to be unique over
9605 the program) rather than the file name (which imposes extra
9607 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
9609 return get_identifier (buf
);
9612 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9614 /* Complain that the tree code of NODE does not match the expected 0
9615 terminated list of trailing codes. The trailing code list can be
9616 empty, for a more vague error message. FILE, LINE, and FUNCTION
9617 are of the caller. */
9620 tree_check_failed (const_tree node
, const char *file
,
9621 int line
, const char *function
, ...)
9625 unsigned length
= 0;
9626 enum tree_code code
;
9628 va_start (args
, function
);
9629 while ((code
= (enum tree_code
) va_arg (args
, int)))
9630 length
+= 4 + strlen (get_tree_code_name (code
));
9635 va_start (args
, function
);
9636 length
+= strlen ("expected ");
9637 buffer
= tmp
= (char *) alloca (length
);
9639 while ((code
= (enum tree_code
) va_arg (args
, int)))
9641 const char *prefix
= length
? " or " : "expected ";
9643 strcpy (tmp
+ length
, prefix
);
9644 length
+= strlen (prefix
);
9645 strcpy (tmp
+ length
, get_tree_code_name (code
));
9646 length
+= strlen (get_tree_code_name (code
));
9651 buffer
= "unexpected node";
9653 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9654 buffer
, get_tree_code_name (TREE_CODE (node
)),
9655 function
, trim_filename (file
), line
);
9658 /* Complain that the tree code of NODE does match the expected 0
9659 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9663 tree_not_check_failed (const_tree node
, const char *file
,
9664 int line
, const char *function
, ...)
9668 unsigned length
= 0;
9669 enum tree_code code
;
9671 va_start (args
, function
);
9672 while ((code
= (enum tree_code
) va_arg (args
, int)))
9673 length
+= 4 + strlen (get_tree_code_name (code
));
9675 va_start (args
, function
);
9676 buffer
= (char *) alloca (length
);
9678 while ((code
= (enum tree_code
) va_arg (args
, int)))
9682 strcpy (buffer
+ length
, " or ");
9685 strcpy (buffer
+ length
, get_tree_code_name (code
));
9686 length
+= strlen (get_tree_code_name (code
));
9690 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9691 buffer
, get_tree_code_name (TREE_CODE (node
)),
9692 function
, trim_filename (file
), line
);
9695 /* Similar to tree_check_failed, except that we check for a class of tree
9696 code, given in CL. */
9699 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9700 const char *file
, int line
, const char *function
)
9703 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9704 TREE_CODE_CLASS_STRING (cl
),
9705 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9706 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9709 /* Similar to tree_check_failed, except that instead of specifying a
9710 dozen codes, use the knowledge that they're all sequential. */
9713 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9714 const char *function
, enum tree_code c1
,
9718 unsigned length
= 0;
9721 for (c
= c1
; c
<= c2
; ++c
)
9722 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9724 length
+= strlen ("expected ");
9725 buffer
= (char *) alloca (length
);
9728 for (c
= c1
; c
<= c2
; ++c
)
9730 const char *prefix
= length
? " or " : "expected ";
9732 strcpy (buffer
+ length
, prefix
);
9733 length
+= strlen (prefix
);
9734 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9735 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9738 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9739 buffer
, get_tree_code_name (TREE_CODE (node
)),
9740 function
, trim_filename (file
), line
);
9744 /* Similar to tree_check_failed, except that we check that a tree does
9745 not have the specified code, given in CL. */
9748 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9749 const char *file
, int line
, const char *function
)
9752 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9753 TREE_CODE_CLASS_STRING (cl
),
9754 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9755 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9759 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9762 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9763 const char *function
, enum omp_clause_code code
)
9765 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9767 omp_clause_code_name
[code
],
9768 get_tree_code_name (TREE_CODE (node
)),
9769 function
, trim_filename (file
), line
);
9773 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9776 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9777 const char *function
, enum omp_clause_code c1
,
9778 enum omp_clause_code c2
)
9781 unsigned length
= 0;
9784 for (c
= c1
; c
<= c2
; ++c
)
9785 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9787 length
+= strlen ("expected ");
9788 buffer
= (char *) alloca (length
);
9791 for (c
= c1
; c
<= c2
; ++c
)
9793 const char *prefix
= length
? " or " : "expected ";
9795 strcpy (buffer
+ length
, prefix
);
9796 length
+= strlen (prefix
);
9797 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9798 length
+= strlen (omp_clause_code_name
[c
]);
9801 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9802 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9803 function
, trim_filename (file
), line
);
9807 #undef DEFTREESTRUCT
9808 #define DEFTREESTRUCT(VAL, NAME) NAME,
9810 static const char *ts_enum_names
[] = {
9811 #include "treestruct.def"
9813 #undef DEFTREESTRUCT
9815 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9817 /* Similar to tree_class_check_failed, except that we check for
9818 whether CODE contains the tree structure identified by EN. */
9821 tree_contains_struct_check_failed (const_tree node
,
9822 const enum tree_node_structure_enum en
,
9823 const char *file
, int line
,
9824 const char *function
)
9827 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9829 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9833 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9834 (dynamically sized) vector. */
9837 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9838 const char *function
)
9841 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9843 idx
+ 1, len
, function
, trim_filename (file
), line
);
9846 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9847 (dynamically sized) vector. */
9850 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9851 const char *function
)
9854 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9855 idx
+ 1, len
, function
, trim_filename (file
), line
);
9858 /* Similar to above, except that the check is for the bounds of the operand
9859 vector of an expression node EXP. */
9862 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9863 int line
, const char *function
)
9865 enum tree_code code
= TREE_CODE (exp
);
9867 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9868 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9869 function
, trim_filename (file
), line
);
9872 /* Similar to above, except that the check is for the number of
9873 operands of an OMP_CLAUSE node. */
9876 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9877 int line
, const char *function
)
9880 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9881 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9882 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9883 trim_filename (file
), line
);
9885 #endif /* ENABLE_TREE_CHECKING */
9887 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9888 and mapped to the machine mode MODE. Initialize its fields and build
9889 the information necessary for debugging output. */
9892 make_vector_type (tree innertype
, poly_int64 nunits
, machine_mode mode
)
9895 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
9897 t
= make_node (VECTOR_TYPE
);
9898 TREE_TYPE (t
) = mv_innertype
;
9899 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9900 SET_TYPE_MODE (t
, mode
);
9902 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
9903 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9904 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
9905 || mode
!= VOIDmode
)
9906 && !VECTOR_BOOLEAN_TYPE_P (t
))
9908 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
9912 hashval_t hash
= type_hash_canon_hash (t
);
9913 t
= type_hash_canon (hash
, t
);
9915 /* We have built a main variant, based on the main variant of the
9916 inner type. Use it to build the variant we return. */
9917 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9918 && TREE_TYPE (t
) != innertype
)
9919 return build_type_attribute_qual_variant (t
,
9920 TYPE_ATTRIBUTES (innertype
),
9921 TYPE_QUALS (innertype
));
9927 make_or_reuse_type (unsigned size
, int unsignedp
)
9931 if (size
== INT_TYPE_SIZE
)
9932 return unsignedp
? unsigned_type_node
: integer_type_node
;
9933 if (size
== CHAR_TYPE_SIZE
)
9934 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9935 if (size
== SHORT_TYPE_SIZE
)
9936 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9937 if (size
== LONG_TYPE_SIZE
)
9938 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9939 if (size
== LONG_LONG_TYPE_SIZE
)
9940 return (unsignedp
? long_long_unsigned_type_node
9941 : long_long_integer_type_node
);
9943 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9944 if (size
== int_n_data
[i
].bitsize
9945 && int_n_enabled_p
[i
])
9946 return (unsignedp
? int_n_trees
[i
].unsigned_type
9947 : int_n_trees
[i
].signed_type
);
9950 return make_unsigned_type (size
);
9952 return make_signed_type (size
);
9955 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9958 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9962 if (size
== SHORT_FRACT_TYPE_SIZE
)
9963 return unsignedp
? sat_unsigned_short_fract_type_node
9964 : sat_short_fract_type_node
;
9965 if (size
== FRACT_TYPE_SIZE
)
9966 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9967 if (size
== LONG_FRACT_TYPE_SIZE
)
9968 return unsignedp
? sat_unsigned_long_fract_type_node
9969 : sat_long_fract_type_node
;
9970 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9971 return unsignedp
? sat_unsigned_long_long_fract_type_node
9972 : sat_long_long_fract_type_node
;
9976 if (size
== SHORT_FRACT_TYPE_SIZE
)
9977 return unsignedp
? unsigned_short_fract_type_node
9978 : short_fract_type_node
;
9979 if (size
== FRACT_TYPE_SIZE
)
9980 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9981 if (size
== LONG_FRACT_TYPE_SIZE
)
9982 return unsignedp
? unsigned_long_fract_type_node
9983 : long_fract_type_node
;
9984 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9985 return unsignedp
? unsigned_long_long_fract_type_node
9986 : long_long_fract_type_node
;
9989 return make_fract_type (size
, unsignedp
, satp
);
9992 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9995 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9999 if (size
== SHORT_ACCUM_TYPE_SIZE
)
10000 return unsignedp
? sat_unsigned_short_accum_type_node
10001 : sat_short_accum_type_node
;
10002 if (size
== ACCUM_TYPE_SIZE
)
10003 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
10004 if (size
== LONG_ACCUM_TYPE_SIZE
)
10005 return unsignedp
? sat_unsigned_long_accum_type_node
10006 : sat_long_accum_type_node
;
10007 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
10008 return unsignedp
? sat_unsigned_long_long_accum_type_node
10009 : sat_long_long_accum_type_node
;
10013 if (size
== SHORT_ACCUM_TYPE_SIZE
)
10014 return unsignedp
? unsigned_short_accum_type_node
10015 : short_accum_type_node
;
10016 if (size
== ACCUM_TYPE_SIZE
)
10017 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
10018 if (size
== LONG_ACCUM_TYPE_SIZE
)
10019 return unsignedp
? unsigned_long_accum_type_node
10020 : long_accum_type_node
;
10021 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
10022 return unsignedp
? unsigned_long_long_accum_type_node
10023 : long_long_accum_type_node
;
10026 return make_accum_type (size
, unsignedp
, satp
);
10030 /* Create an atomic variant node for TYPE. This routine is called
10031 during initialization of data types to create the 5 basic atomic
10032 types. The generic build_variant_type function requires these to
10033 already be set up in order to function properly, so cannot be
10034 called from there. If ALIGN is non-zero, then ensure alignment is
10035 overridden to this value. */
10038 build_atomic_base (tree type
, unsigned int align
)
10042 /* Make sure its not already registered. */
10043 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
10046 t
= build_variant_type_copy (type
);
10047 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
10050 SET_TYPE_ALIGN (t
, align
);
10055 /* Information about the _FloatN and _FloatNx types. This must be in
10056 the same order as the corresponding TI_* enum values. */
10057 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
10069 /* Create nodes for all integer types (and error_mark_node) using the sizes
10070 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10073 build_common_tree_nodes (bool signed_char
)
10077 error_mark_node
= make_node (ERROR_MARK
);
10078 TREE_TYPE (error_mark_node
) = error_mark_node
;
10080 initialize_sizetypes ();
10082 /* Define both `signed char' and `unsigned char'. */
10083 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
10084 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
10085 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
10086 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
10088 /* Define `char', which is like either `signed char' or `unsigned char'
10089 but not the same as either. */
10092 ? make_signed_type (CHAR_TYPE_SIZE
)
10093 : make_unsigned_type (CHAR_TYPE_SIZE
));
10094 TYPE_STRING_FLAG (char_type_node
) = 1;
10096 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
10097 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
10098 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
10099 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
10100 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
10101 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
10102 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
10103 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
10105 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
10107 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
10108 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
10110 if (int_n_enabled_p
[i
])
10112 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
10113 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
10117 /* Define a boolean type. This type only represents boolean values but
10118 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10119 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
10120 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
10121 TYPE_PRECISION (boolean_type_node
) = 1;
10122 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
10124 /* Define what type to use for size_t. */
10125 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
10126 size_type_node
= unsigned_type_node
;
10127 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
10128 size_type_node
= long_unsigned_type_node
;
10129 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
10130 size_type_node
= long_long_unsigned_type_node
;
10131 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
10132 size_type_node
= short_unsigned_type_node
;
10137 size_type_node
= NULL_TREE
;
10138 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
10139 if (int_n_enabled_p
[i
])
10141 char name
[50], altname
[50];
10142 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
10143 sprintf (altname
, "__int%d__ unsigned", int_n_data
[i
].bitsize
);
10145 if (strcmp (name
, SIZE_TYPE
) == 0
10146 || strcmp (altname
, SIZE_TYPE
) == 0)
10148 size_type_node
= int_n_trees
[i
].unsigned_type
;
10151 if (size_type_node
== NULL_TREE
)
10152 gcc_unreachable ();
10155 /* Define what type to use for ptrdiff_t. */
10156 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
10157 ptrdiff_type_node
= integer_type_node
;
10158 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
10159 ptrdiff_type_node
= long_integer_type_node
;
10160 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
10161 ptrdiff_type_node
= long_long_integer_type_node
;
10162 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
10163 ptrdiff_type_node
= short_integer_type_node
;
10166 ptrdiff_type_node
= NULL_TREE
;
10167 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
10168 if (int_n_enabled_p
[i
])
10170 char name
[50], altname
[50];
10171 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
10172 sprintf (altname
, "__int%d__", int_n_data
[i
].bitsize
);
10174 if (strcmp (name
, PTRDIFF_TYPE
) == 0
10175 || strcmp (altname
, PTRDIFF_TYPE
) == 0)
10176 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
10178 if (ptrdiff_type_node
== NULL_TREE
)
10179 gcc_unreachable ();
10182 /* Fill in the rest of the sized types. Reuse existing type nodes
10184 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
10185 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
10186 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
10187 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
10188 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
10190 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
10191 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
10192 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
10193 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
10194 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
10196 /* Don't call build_qualified type for atomics. That routine does
10197 special processing for atomics, and until they are initialized
10198 it's better not to make that call.
10200 Check to see if there is a target override for atomic types. */
10202 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
10203 targetm
.atomic_align_for_mode (QImode
));
10204 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
10205 targetm
.atomic_align_for_mode (HImode
));
10206 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
10207 targetm
.atomic_align_for_mode (SImode
));
10208 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
10209 targetm
.atomic_align_for_mode (DImode
));
10210 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
10211 targetm
.atomic_align_for_mode (TImode
));
10213 access_public_node
= get_identifier ("public");
10214 access_protected_node
= get_identifier ("protected");
10215 access_private_node
= get_identifier ("private");
10217 /* Define these next since types below may used them. */
10218 integer_zero_node
= build_int_cst (integer_type_node
, 0);
10219 integer_one_node
= build_int_cst (integer_type_node
, 1);
10220 integer_three_node
= build_int_cst (integer_type_node
, 3);
10221 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
10223 size_zero_node
= size_int (0);
10224 size_one_node
= size_int (1);
10225 bitsize_zero_node
= bitsize_int (0);
10226 bitsize_one_node
= bitsize_int (1);
10227 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
10229 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
10230 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
10232 void_type_node
= make_node (VOID_TYPE
);
10233 layout_type (void_type_node
);
10235 /* We are not going to have real types in C with less than byte alignment,
10236 so we might as well not have any types that claim to have it. */
10237 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
10238 TYPE_USER_ALIGN (void_type_node
) = 0;
10240 void_node
= make_node (VOID_CST
);
10241 TREE_TYPE (void_node
) = void_type_node
;
10243 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
10244 layout_type (TREE_TYPE (null_pointer_node
));
10246 ptr_type_node
= build_pointer_type (void_type_node
);
10247 const_ptr_type_node
10248 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
10249 for (unsigned i
= 0;
10250 i
< sizeof (builtin_structptr_types
) / sizeof (builtin_structptr_type
);
10252 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
10254 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
10256 float_type_node
= make_node (REAL_TYPE
);
10257 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
10258 layout_type (float_type_node
);
10260 double_type_node
= make_node (REAL_TYPE
);
10261 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
10262 layout_type (double_type_node
);
10264 long_double_type_node
= make_node (REAL_TYPE
);
10265 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
10266 layout_type (long_double_type_node
);
10268 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
10270 int n
= floatn_nx_types
[i
].n
;
10271 bool extended
= floatn_nx_types
[i
].extended
;
10272 scalar_float_mode mode
;
10273 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
10275 int precision
= GET_MODE_PRECISION (mode
);
10276 /* Work around the rs6000 KFmode having precision 113 not
10278 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
10279 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
10280 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
10282 gcc_assert (min_precision
== n
);
10283 if (precision
< min_precision
)
10284 precision
= min_precision
;
10285 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
10286 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
10287 layout_type (FLOATN_NX_TYPE_NODE (i
));
10288 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
10291 float_ptr_type_node
= build_pointer_type (float_type_node
);
10292 double_ptr_type_node
= build_pointer_type (double_type_node
);
10293 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
10294 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
10296 /* Fixed size integer types. */
10297 uint16_type_node
= make_or_reuse_type (16, 1);
10298 uint32_type_node
= make_or_reuse_type (32, 1);
10299 uint64_type_node
= make_or_reuse_type (64, 1);
10301 /* Decimal float types. */
10302 dfloat32_type_node
= make_node (REAL_TYPE
);
10303 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
10304 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
10305 layout_type (dfloat32_type_node
);
10306 dfloat32_ptr_type_node
= build_pointer_type (dfloat32_type_node
);
10308 dfloat64_type_node
= make_node (REAL_TYPE
);
10309 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
10310 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
10311 layout_type (dfloat64_type_node
);
10312 dfloat64_ptr_type_node
= build_pointer_type (dfloat64_type_node
);
10314 dfloat128_type_node
= make_node (REAL_TYPE
);
10315 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
10316 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
10317 layout_type (dfloat128_type_node
);
10318 dfloat128_ptr_type_node
= build_pointer_type (dfloat128_type_node
);
10320 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
10321 complex_float_type_node
= build_complex_type (float_type_node
, true);
10322 complex_double_type_node
= build_complex_type (double_type_node
, true);
10323 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
10326 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
10328 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
10329 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
10330 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
10333 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10334 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10335 sat_ ## KIND ## _type_node = \
10336 make_sat_signed_ ## KIND ## _type (SIZE); \
10337 sat_unsigned_ ## KIND ## _type_node = \
10338 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10339 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10340 unsigned_ ## KIND ## _type_node = \
10341 make_unsigned_ ## KIND ## _type (SIZE);
10343 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10344 sat_ ## WIDTH ## KIND ## _type_node = \
10345 make_sat_signed_ ## KIND ## _type (SIZE); \
10346 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10347 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10348 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10349 unsigned_ ## WIDTH ## KIND ## _type_node = \
10350 make_unsigned_ ## KIND ## _type (SIZE);
10352 /* Make fixed-point type nodes based on four different widths. */
10353 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10354 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10355 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10356 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10357 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10359 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10360 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10361 NAME ## _type_node = \
10362 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10363 u ## NAME ## _type_node = \
10364 make_or_reuse_unsigned_ ## KIND ## _type \
10365 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10366 sat_ ## NAME ## _type_node = \
10367 make_or_reuse_sat_signed_ ## KIND ## _type \
10368 (GET_MODE_BITSIZE (MODE ## mode)); \
10369 sat_u ## NAME ## _type_node = \
10370 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10371 (GET_MODE_BITSIZE (U ## MODE ## mode));
10373 /* Fixed-point type and mode nodes. */
10374 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
10375 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
10376 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
10377 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
10378 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
10379 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
10380 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
10381 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
10382 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
10383 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
10384 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
10387 tree t
= targetm
.build_builtin_va_list ();
10389 /* Many back-ends define record types without setting TYPE_NAME.
10390 If we copied the record type here, we'd keep the original
10391 record type without a name. This breaks name mangling. So,
10392 don't copy record types and let c_common_nodes_and_builtins()
10393 declare the type to be __builtin_va_list. */
10394 if (TREE_CODE (t
) != RECORD_TYPE
)
10395 t
= build_variant_type_copy (t
);
10397 va_list_type_node
= t
;
10400 /* SCEV analyzer global shared trees. */
10401 chrec_dont_know
= make_node (SCEV_NOT_KNOWN
);
10402 TREE_TYPE (chrec_dont_know
) = void_type_node
;
10403 chrec_known
= make_node (SCEV_KNOWN
);
10404 TREE_TYPE (chrec_known
) = void_type_node
;
10407 /* Modify DECL for given flags.
10408 TM_PURE attribute is set only on types, so the function will modify
10409 DECL's type when ECF_TM_PURE is used. */
10412 set_call_expr_flags (tree decl
, int flags
)
10414 if (flags
& ECF_NOTHROW
)
10415 TREE_NOTHROW (decl
) = 1;
10416 if (flags
& ECF_CONST
)
10417 TREE_READONLY (decl
) = 1;
10418 if (flags
& ECF_PURE
)
10419 DECL_PURE_P (decl
) = 1;
10420 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
10421 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
10422 if (flags
& ECF_NOVOPS
)
10423 DECL_IS_NOVOPS (decl
) = 1;
10424 if (flags
& ECF_NORETURN
)
10425 TREE_THIS_VOLATILE (decl
) = 1;
10426 if (flags
& ECF_MALLOC
)
10427 DECL_IS_MALLOC (decl
) = 1;
10428 if (flags
& ECF_RETURNS_TWICE
)
10429 DECL_IS_RETURNS_TWICE (decl
) = 1;
10430 if (flags
& ECF_LEAF
)
10431 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
10432 NULL
, DECL_ATTRIBUTES (decl
));
10433 if (flags
& ECF_COLD
)
10434 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
10435 NULL
, DECL_ATTRIBUTES (decl
));
10436 if (flags
& ECF_RET1
)
10437 DECL_ATTRIBUTES (decl
)
10438 = tree_cons (get_identifier ("fn spec"),
10439 build_tree_list (NULL_TREE
, build_string (1, "1")),
10440 DECL_ATTRIBUTES (decl
));
10441 if ((flags
& ECF_TM_PURE
) && flag_tm
)
10442 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
10443 /* Looping const or pure is implied by noreturn.
10444 There is currently no way to declare looping const or looping pure alone. */
10445 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
10446 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
10450 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10453 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
10454 const char *library_name
, int ecf_flags
)
10458 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
10459 library_name
, NULL_TREE
);
10460 set_call_expr_flags (decl
, ecf_flags
);
10462 set_builtin_decl (code
, decl
, true);
10465 /* Call this function after instantiating all builtins that the language
10466 front end cares about. This will build the rest of the builtins
10467 and internal functions that are relied upon by the tree optimizers and
10471 build_common_builtin_nodes (void)
10476 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
10477 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
10479 ftype
= build_function_type (void_type_node
, void_list_node
);
10480 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
10481 local_define_builtin ("__builtin_unreachable", ftype
,
10482 BUILT_IN_UNREACHABLE
,
10483 "__builtin_unreachable",
10484 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
10485 | ECF_CONST
| ECF_COLD
);
10486 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
10487 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
10489 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
10492 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
10493 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
10495 ftype
= build_function_type_list (ptr_type_node
,
10496 ptr_type_node
, const_ptr_type_node
,
10497 size_type_node
, NULL_TREE
);
10499 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
10500 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
10501 "memcpy", ECF_NOTHROW
| ECF_LEAF
| ECF_RET1
);
10502 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
10503 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
10504 "memmove", ECF_NOTHROW
| ECF_LEAF
| ECF_RET1
);
10507 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
10509 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
10510 const_ptr_type_node
, size_type_node
,
10512 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
10513 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10516 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
10518 ftype
= build_function_type_list (ptr_type_node
,
10519 ptr_type_node
, integer_type_node
,
10520 size_type_node
, NULL_TREE
);
10521 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
10522 "memset", ECF_NOTHROW
| ECF_LEAF
| ECF_RET1
);
10525 /* If we're checking the stack, `alloca' can throw. */
10526 const int alloca_flags
10527 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
10529 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
10531 ftype
= build_function_type_list (ptr_type_node
,
10532 size_type_node
, NULL_TREE
);
10533 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
10534 "alloca", alloca_flags
);
10537 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
10538 size_type_node
, NULL_TREE
);
10539 local_define_builtin ("__builtin_alloca_with_align", ftype
,
10540 BUILT_IN_ALLOCA_WITH_ALIGN
,
10541 "__builtin_alloca_with_align",
10544 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
10545 size_type_node
, size_type_node
, NULL_TREE
);
10546 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
10547 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
10548 "__builtin_alloca_with_align_and_max",
10551 ftype
= build_function_type_list (void_type_node
,
10552 ptr_type_node
, ptr_type_node
,
10553 ptr_type_node
, NULL_TREE
);
10554 local_define_builtin ("__builtin_init_trampoline", ftype
,
10555 BUILT_IN_INIT_TRAMPOLINE
,
10556 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
10557 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
10558 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
10559 "__builtin_init_heap_trampoline",
10560 ECF_NOTHROW
| ECF_LEAF
);
10561 local_define_builtin ("__builtin_init_descriptor", ftype
,
10562 BUILT_IN_INIT_DESCRIPTOR
,
10563 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
10565 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
10566 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
10567 BUILT_IN_ADJUST_TRAMPOLINE
,
10568 "__builtin_adjust_trampoline",
10569 ECF_CONST
| ECF_NOTHROW
);
10570 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
10571 BUILT_IN_ADJUST_DESCRIPTOR
,
10572 "__builtin_adjust_descriptor",
10573 ECF_CONST
| ECF_NOTHROW
);
10575 ftype
= build_function_type_list (void_type_node
,
10576 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10577 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
10578 BUILT_IN_NONLOCAL_GOTO
,
10579 "__builtin_nonlocal_goto",
10580 ECF_NORETURN
| ECF_NOTHROW
);
10582 ftype
= build_function_type_list (void_type_node
,
10583 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10584 local_define_builtin ("__builtin_setjmp_setup", ftype
,
10585 BUILT_IN_SETJMP_SETUP
,
10586 "__builtin_setjmp_setup", ECF_NOTHROW
);
10588 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10589 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
10590 BUILT_IN_SETJMP_RECEIVER
,
10591 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
10593 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
10594 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
10595 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
10597 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10598 local_define_builtin ("__builtin_stack_restore", ftype
,
10599 BUILT_IN_STACK_RESTORE
,
10600 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
10602 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
10603 const_ptr_type_node
, size_type_node
,
10605 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
10606 "__builtin_memcmp_eq",
10607 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10609 local_define_builtin ("__builtin_strncmp_eq", ftype
, BUILT_IN_STRNCMP_EQ
,
10610 "__builtin_strncmp_eq",
10611 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10613 local_define_builtin ("__builtin_strcmp_eq", ftype
, BUILT_IN_STRCMP_EQ
,
10614 "__builtin_strcmp_eq",
10615 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10617 /* If there's a possibility that we might use the ARM EABI, build the
10618 alternate __cxa_end_cleanup node used to resume from C++. */
10619 if (targetm
.arm_eabi_unwinder
)
10621 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
10622 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
10623 BUILT_IN_CXA_END_CLEANUP
,
10624 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
10627 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10628 local_define_builtin ("__builtin_unwind_resume", ftype
,
10629 BUILT_IN_UNWIND_RESUME
,
10630 ((targetm_common
.except_unwind_info (&global_options
)
10632 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10635 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
10637 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
10639 local_define_builtin ("__builtin_return_address", ftype
,
10640 BUILT_IN_RETURN_ADDRESS
,
10641 "__builtin_return_address",
10645 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
10646 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10648 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
10649 ptr_type_node
, NULL_TREE
);
10650 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
10651 local_define_builtin ("__cyg_profile_func_enter", ftype
,
10652 BUILT_IN_PROFILE_FUNC_ENTER
,
10653 "__cyg_profile_func_enter", 0);
10654 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10655 local_define_builtin ("__cyg_profile_func_exit", ftype
,
10656 BUILT_IN_PROFILE_FUNC_EXIT
,
10657 "__cyg_profile_func_exit", 0);
10660 /* The exception object and filter values from the runtime. The argument
10661 must be zero before exception lowering, i.e. from the front end. After
10662 exception lowering, it will be the region number for the exception
10663 landing pad. These functions are PURE instead of CONST to prevent
10664 them from being hoisted past the exception edge that will initialize
10665 its value in the landing pad. */
10666 ftype
= build_function_type_list (ptr_type_node
,
10667 integer_type_node
, NULL_TREE
);
10668 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
10669 /* Only use TM_PURE if we have TM language support. */
10670 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
10671 ecf_flags
|= ECF_TM_PURE
;
10672 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
10673 "__builtin_eh_pointer", ecf_flags
);
10675 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
10676 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
10677 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
10678 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10680 ftype
= build_function_type_list (void_type_node
,
10681 integer_type_node
, integer_type_node
,
10683 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10684 BUILT_IN_EH_COPY_VALUES
,
10685 "__builtin_eh_copy_values", ECF_NOTHROW
);
10687 /* Complex multiplication and division. These are handled as builtins
10688 rather than optabs because emit_library_call_value doesn't support
10689 complex. Further, we can do slightly better with folding these
10690 beasties if the real and complex parts of the arguments are separate. */
10694 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10696 char mode_name_buf
[4], *q
;
10698 enum built_in_function mcode
, dcode
;
10699 tree type
, inner_type
;
10700 const char *prefix
= "__";
10702 if (targetm
.libfunc_gnu_prefix
)
10705 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
10708 inner_type
= TREE_TYPE (type
);
10710 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10711 inner_type
, inner_type
, NULL_TREE
);
10713 mcode
= ((enum built_in_function
)
10714 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10715 dcode
= ((enum built_in_function
)
10716 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10718 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10722 /* For -ftrapping-math these should throw from a former
10723 -fnon-call-exception stmt. */
10724 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10726 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10727 built_in_names
[mcode
],
10728 ECF_CONST
| ECF_LEAF
);
10730 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10732 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10733 built_in_names
[dcode
],
10734 ECF_CONST
| ECF_LEAF
);
10738 init_internal_fns ();
10741 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10744 If we requested a pointer to a vector, build up the pointers that
10745 we stripped off while looking for the inner type. Similarly for
10746 return values from functions.
10748 The argument TYPE is the top of the chain, and BOTTOM is the
10749 new type which we will point to. */
10752 reconstruct_complex_type (tree type
, tree bottom
)
10756 if (TREE_CODE (type
) == POINTER_TYPE
)
10758 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10759 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10760 TYPE_REF_CAN_ALIAS_ALL (type
));
10762 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10764 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10765 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10766 TYPE_REF_CAN_ALIAS_ALL (type
));
10768 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10770 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10771 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10773 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10775 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10776 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
10778 else if (TREE_CODE (type
) == METHOD_TYPE
)
10780 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10781 /* The build_method_type_directly() routine prepends 'this' to argument list,
10782 so we must compensate by getting rid of it. */
10784 = build_method_type_directly
10785 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10787 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10789 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10791 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10792 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10797 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10798 TYPE_QUALS (type
));
10801 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10804 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
10807 unsigned int bitsize
;
10809 switch (GET_MODE_CLASS (mode
))
10811 case MODE_VECTOR_BOOL
:
10812 case MODE_VECTOR_INT
:
10813 case MODE_VECTOR_FLOAT
:
10814 case MODE_VECTOR_FRACT
:
10815 case MODE_VECTOR_UFRACT
:
10816 case MODE_VECTOR_ACCUM
:
10817 case MODE_VECTOR_UACCUM
:
10818 nunits
= GET_MODE_NUNITS (mode
);
10822 /* Check that there are no leftover bits. */
10823 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
10824 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10825 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10829 gcc_unreachable ();
10832 return make_vector_type (innertype
, nunits
, mode
);
10835 /* Similarly, but takes the inner type and number of units, which must be
10839 build_vector_type (tree innertype
, poly_int64 nunits
)
10841 return make_vector_type (innertype
, nunits
, VOIDmode
);
10844 /* Build truth vector with specified length and number of units. */
10847 build_truth_vector_type (poly_uint64 nunits
, poly_uint64 vector_size
)
10849 machine_mode mask_mode
10850 = targetm
.vectorize
.get_mask_mode (nunits
, vector_size
).else_blk ();
10853 if (mask_mode
== BLKmode
)
10854 vsize
= vector_size
* BITS_PER_UNIT
;
10856 vsize
= GET_MODE_BITSIZE (mask_mode
);
10858 unsigned HOST_WIDE_INT esize
= vector_element_size (vsize
, nunits
);
10860 tree bool_type
= build_nonstandard_boolean_type (esize
);
10862 return make_vector_type (bool_type
, nunits
, mask_mode
);
10865 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10868 build_same_sized_truth_vector_type (tree vectype
)
10870 if (VECTOR_BOOLEAN_TYPE_P (vectype
))
10873 poly_uint64 size
= GET_MODE_SIZE (TYPE_MODE (vectype
));
10875 if (known_eq (size
, 0U))
10876 size
= tree_to_uhwi (TYPE_SIZE_UNIT (vectype
));
10878 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype
), size
);
10881 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10884 build_opaque_vector_type (tree innertype
, poly_int64 nunits
)
10886 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10888 /* We always build the non-opaque variant before the opaque one,
10889 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10890 cand
= TYPE_NEXT_VARIANT (t
);
10892 && TYPE_VECTOR_OPAQUE (cand
)
10893 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10895 /* Othewise build a variant type and make sure to queue it after
10896 the non-opaque type. */
10897 cand
= build_distinct_type_copy (t
);
10898 TYPE_VECTOR_OPAQUE (cand
) = true;
10899 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10900 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10901 TYPE_NEXT_VARIANT (t
) = cand
;
10902 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10906 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10909 vector_cst_int_elt (const_tree t
, unsigned int i
)
10911 /* First handle elements that are directly encoded. */
10912 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10913 if (i
< encoded_nelts
)
10914 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
10916 /* Identify the pattern that contains element I and work out the index of
10917 the last encoded element for that pattern. */
10918 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10919 unsigned int pattern
= i
% npatterns
;
10920 unsigned int count
= i
/ npatterns
;
10921 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10923 /* If there are no steps, the final encoded value is the right one. */
10924 if (!VECTOR_CST_STEPPED_P (t
))
10925 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
10927 /* Otherwise work out the value from the last two encoded elements. */
10928 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
10929 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
10930 wide_int diff
= wi::to_wide (v2
) - wi::to_wide (v1
);
10931 return wi::to_wide (v2
) + (count
- 2) * diff
;
10934 /* Return the value of element I of VECTOR_CST T. */
10937 vector_cst_elt (const_tree t
, unsigned int i
)
10939 /* First handle elements that are directly encoded. */
10940 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10941 if (i
< encoded_nelts
)
10942 return VECTOR_CST_ENCODED_ELT (t
, i
);
10944 /* If there are no steps, the final encoded value is the right one. */
10945 if (!VECTOR_CST_STEPPED_P (t
))
10947 /* Identify the pattern that contains element I and work out the index of
10948 the last encoded element for that pattern. */
10949 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10950 unsigned int pattern
= i
% npatterns
;
10951 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10952 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
10955 /* Otherwise work out the value from the last two encoded elements. */
10956 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
10957 vector_cst_int_elt (t
, i
));
10960 /* Given an initializer INIT, return TRUE if INIT is zero or some
10961 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10962 null, set *NONZERO if and only if INIT is known not to be all
10963 zeros. The combination of return value of false and *NONZERO
10964 false implies that INIT may but need not be all zeros. Other
10965 combinations indicate definitive answers. */
10968 initializer_zerop (const_tree init
, bool *nonzero
/* = NULL */)
10974 /* Conservatively clear NONZERO and set it only if INIT is definitely
10980 unsigned HOST_WIDE_INT off
= 0;
10982 switch (TREE_CODE (init
))
10985 if (integer_zerop (init
))
10992 /* ??? Note that this is not correct for C4X float formats. There,
10993 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10994 negative exponent. */
10995 if (real_zerop (init
)
10996 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
)))
11003 if (fixed_zerop (init
))
11010 if (integer_zerop (init
)
11011 || (real_zerop (init
)
11012 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
11013 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
)))))
11020 if (VECTOR_CST_NPATTERNS (init
) == 1
11021 && VECTOR_CST_DUPLICATE_P (init
)
11022 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)))
11030 if (TREE_CLOBBER_P (init
))
11033 unsigned HOST_WIDE_INT idx
;
11036 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
11037 if (!initializer_zerop (elt
, nonzero
))
11045 tree arg
= TREE_OPERAND (init
, 0);
11046 if (TREE_CODE (arg
) != ADDR_EXPR
)
11048 tree offset
= TREE_OPERAND (init
, 1);
11049 if (TREE_CODE (offset
) != INTEGER_CST
11050 || !tree_fits_uhwi_p (offset
))
11052 off
= tree_to_uhwi (offset
);
11055 arg
= TREE_OPERAND (arg
, 0);
11056 if (TREE_CODE (arg
) != STRING_CST
)
11060 /* Fall through. */
11064 gcc_assert (off
<= INT_MAX
);
11067 int n
= TREE_STRING_LENGTH (init
);
11071 /* We need to loop through all elements to handle cases like
11072 "\0" and "\0foobar". */
11073 for (i
= 0; i
< n
; ++i
)
11074 if (TREE_STRING_POINTER (init
)[i
] != '\0')
11088 /* Return true if EXPR is an initializer expression in which every element
11089 is a constant that is numerically equal to 0 or 1. The elements do not
11090 need to be equal to each other. */
11093 initializer_each_zero_or_onep (const_tree expr
)
11095 STRIP_ANY_LOCATION_WRAPPER (expr
);
11097 switch (TREE_CODE (expr
))
11100 return integer_zerop (expr
) || integer_onep (expr
);
11103 return real_zerop (expr
) || real_onep (expr
);
11107 unsigned HOST_WIDE_INT nelts
= vector_cst_encoded_nelts (expr
);
11108 if (VECTOR_CST_STEPPED_P (expr
)
11109 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
)).is_constant (&nelts
))
11112 for (unsigned int i
= 0; i
< nelts
; ++i
)
11114 tree elt
= vector_cst_elt (expr
, i
);
11115 if (!initializer_each_zero_or_onep (elt
))
11127 /* Check if vector VEC consists of all the equal elements and
11128 that the number of elements corresponds to the type of VEC.
11129 The function returns first element of the vector
11130 or NULL_TREE if the vector is not uniform. */
11132 uniform_vector_p (const_tree vec
)
11135 unsigned HOST_WIDE_INT i
, nelts
;
11137 if (vec
== NULL_TREE
)
11140 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
11142 if (TREE_CODE (vec
) == VEC_DUPLICATE_EXPR
)
11143 return TREE_OPERAND (vec
, 0);
11145 else if (TREE_CODE (vec
) == VECTOR_CST
)
11147 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
11148 return VECTOR_CST_ENCODED_ELT (vec
, 0);
11152 else if (TREE_CODE (vec
) == CONSTRUCTOR
11153 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)).is_constant (&nelts
))
11155 first
= error_mark_node
;
11157 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
11164 if (!operand_equal_p (first
, t
, 0))
11176 /* If the argument is INTEGER_CST, return it. If the argument is vector
11177 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11179 Look through location wrappers. */
11182 uniform_integer_cst_p (tree t
)
11184 STRIP_ANY_LOCATION_WRAPPER (t
);
11186 if (TREE_CODE (t
) == INTEGER_CST
)
11189 if (VECTOR_TYPE_P (TREE_TYPE (t
)))
11191 t
= uniform_vector_p (t
);
11192 if (t
&& TREE_CODE (t
) == INTEGER_CST
)
11199 /* If VECTOR_CST T has a single nonzero element, return the index of that
11200 element, otherwise return -1. */
11203 single_nonzero_element (const_tree t
)
11205 unsigned HOST_WIDE_INT nelts
;
11206 unsigned int repeat_nelts
;
11207 if (VECTOR_CST_NELTS (t
).is_constant (&nelts
))
11208 repeat_nelts
= nelts
;
11209 else if (VECTOR_CST_NELTS_PER_PATTERN (t
) == 2)
11211 nelts
= vector_cst_encoded_nelts (t
);
11212 repeat_nelts
= VECTOR_CST_NPATTERNS (t
);
11218 for (unsigned int i
= 0; i
< nelts
; ++i
)
11220 tree elt
= vector_cst_elt (t
, i
);
11221 if (!integer_zerop (elt
) && !real_zerop (elt
))
11223 if (res
>= 0 || i
>= repeat_nelts
)
11231 /* Build an empty statement at location LOC. */
11234 build_empty_stmt (location_t loc
)
11236 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
11237 SET_EXPR_LOCATION (t
, loc
);
11242 /* Build an OpenMP clause with code CODE. LOC is the location of the
11246 build_omp_clause (location_t loc
, enum omp_clause_code code
)
11251 length
= omp_clause_num_ops
[code
];
11252 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
11254 record_node_allocation_statistics (OMP_CLAUSE
, size
);
11256 t
= (tree
) ggc_internal_alloc (size
);
11257 memset (t
, 0, size
);
11258 TREE_SET_CODE (t
, OMP_CLAUSE
);
11259 OMP_CLAUSE_SET_CODE (t
, code
);
11260 OMP_CLAUSE_LOCATION (t
) = loc
;
11265 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11266 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11267 Except for the CODE and operand count field, other storage for the
11268 object is initialized to zeros. */
11271 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
11274 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
11276 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
11277 gcc_assert (len
>= 1);
11279 record_node_allocation_statistics (code
, length
);
11281 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
11283 TREE_SET_CODE (t
, code
);
11285 /* Can't use TREE_OPERAND to store the length because if checking is
11286 enabled, it will try to check the length before we store it. :-P */
11287 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
11292 /* Helper function for build_call_* functions; build a CALL_EXPR with
11293 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11294 the argument slots. */
11297 build_call_1 (tree return_type
, tree fn
, int nargs
)
11301 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
11302 TREE_TYPE (t
) = return_type
;
11303 CALL_EXPR_FN (t
) = fn
;
11304 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
11309 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11310 FN and a null static chain slot. NARGS is the number of call arguments
11311 which are specified as "..." arguments. */
11314 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
11318 va_start (args
, nargs
);
11319 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
11324 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11325 FN and a null static chain slot. NARGS is the number of call arguments
11326 which are specified as a va_list ARGS. */
11329 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
11334 t
= build_call_1 (return_type
, fn
, nargs
);
11335 for (i
= 0; i
< nargs
; i
++)
11336 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
11337 process_call_operands (t
);
11341 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11342 FN and a null static chain slot. NARGS is the number of call arguments
11343 which are specified as a tree array ARGS. */
11346 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
11347 int nargs
, const tree
*args
)
11352 t
= build_call_1 (return_type
, fn
, nargs
);
11353 for (i
= 0; i
< nargs
; i
++)
11354 CALL_EXPR_ARG (t
, i
) = args
[i
];
11355 process_call_operands (t
);
11356 SET_EXPR_LOCATION (t
, loc
);
11360 /* Like build_call_array, but takes a vec. */
11363 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
11368 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
11369 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
11370 CALL_EXPR_ARG (ret
, ix
) = t
;
11371 process_call_operands (ret
);
11375 /* Conveniently construct a function call expression. FNDECL names the
11376 function to be called and N arguments are passed in the array
11380 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11382 tree fntype
= TREE_TYPE (fndecl
);
11383 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11385 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11388 /* Conveniently construct a function call expression. FNDECL names the
11389 function to be called and the arguments are passed in the vector
11393 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
11395 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
11396 vec_safe_address (vec
));
11400 /* Conveniently construct a function call expression. FNDECL names the
11401 function to be called, N is the number of arguments, and the "..."
11402 parameters are the argument expressions. */
11405 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11408 tree
*argarray
= XALLOCAVEC (tree
, n
);
11412 for (i
= 0; i
< n
; i
++)
11413 argarray
[i
] = va_arg (ap
, tree
);
11415 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11418 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11419 varargs macros aren't supported by all bootstrap compilers. */
11422 build_call_expr (tree fndecl
, int n
, ...)
11425 tree
*argarray
= XALLOCAVEC (tree
, n
);
11429 for (i
= 0; i
< n
; i
++)
11430 argarray
[i
] = va_arg (ap
, tree
);
11432 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11435 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11436 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11437 It will get gimplified later into an ordinary internal function. */
11440 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
11441 tree type
, int n
, const tree
*args
)
11443 tree t
= build_call_1 (type
, NULL_TREE
, n
);
11444 for (int i
= 0; i
< n
; ++i
)
11445 CALL_EXPR_ARG (t
, i
) = args
[i
];
11446 SET_EXPR_LOCATION (t
, loc
);
11447 CALL_EXPR_IFN (t
) = ifn
;
11451 /* Build internal call expression. This is just like CALL_EXPR, except
11452 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11453 internal function. */
11456 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
11457 tree type
, int n
, ...)
11460 tree
*argarray
= XALLOCAVEC (tree
, n
);
11464 for (i
= 0; i
< n
; i
++)
11465 argarray
[i
] = va_arg (ap
, tree
);
11467 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
11470 /* Return a function call to FN, if the target is guaranteed to support it,
11473 N is the number of arguments, passed in the "...", and TYPE is the
11474 type of the return value. */
11477 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
11481 tree
*argarray
= XALLOCAVEC (tree
, n
);
11485 for (i
= 0; i
< n
; i
++)
11486 argarray
[i
] = va_arg (ap
, tree
);
11488 if (internal_fn_p (fn
))
11490 internal_fn ifn
= as_internal_fn (fn
);
11491 if (direct_internal_fn_p (ifn
))
11493 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
11494 if (!direct_internal_fn_supported_p (ifn
, types
,
11495 OPTIMIZE_FOR_BOTH
))
11498 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
11502 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
11505 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11509 /* Return a function call to the appropriate builtin alloca variant.
11511 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11512 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11513 bound for SIZE in case it is not a fixed value. */
11516 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
11520 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
11522 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
11524 else if (align
> 0)
11526 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
11527 return build_call_expr (t
, 2, size
, size_int (align
));
11531 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
11532 return build_call_expr (t
, 1, size
);
11536 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11537 if SIZE == -1) and return a tree node representing char* pointer to
11538 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value
11539 is the LEN bytes at STR (the representation of the string, which may
11543 build_string_literal (int len
, const char *str
,
11544 tree eltype
/* = char_type_node */,
11545 unsigned HOST_WIDE_INT size
/* = -1 */)
11547 tree t
= build_string (len
, str
);
11548 /* Set the maximum valid index based on the string length or SIZE. */
11549 unsigned HOST_WIDE_INT maxidx
11550 = (size
== HOST_WIDE_INT_M1U
? len
: size
) - 1;
11552 tree index
= build_index_type (size_int (maxidx
));
11553 eltype
= build_type_variant (eltype
, 1, 0);
11554 tree type
= build_array_type (eltype
, index
);
11555 TREE_TYPE (t
) = type
;
11556 TREE_CONSTANT (t
) = 1;
11557 TREE_READONLY (t
) = 1;
11558 TREE_STATIC (t
) = 1;
11560 type
= build_pointer_type (eltype
);
11561 t
= build1 (ADDR_EXPR
, type
,
11562 build4 (ARRAY_REF
, eltype
,
11563 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
11569 /* Return true if T (assumed to be a DECL) must be assigned a memory
11573 needs_to_live_in_memory (const_tree t
)
11575 return (TREE_ADDRESSABLE (t
)
11576 || is_global_var (t
)
11577 || (TREE_CODE (t
) == RESULT_DECL
11578 && !DECL_BY_REFERENCE (t
)
11579 && aggregate_value_p (t
, current_function_decl
)));
11582 /* Return value of a constant X and sign-extend it. */
11585 int_cst_value (const_tree x
)
11587 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
11588 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
11590 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11591 gcc_assert (cst_and_fits_in_hwi (x
));
11593 if (bits
< HOST_BITS_PER_WIDE_INT
)
11595 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
11597 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
11599 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
11605 /* If TYPE is an integral or pointer type, return an integer type with
11606 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11607 if TYPE is already an integer type of signedness UNSIGNEDP.
11608 If TYPE is a floating-point type, return an integer type with the same
11609 bitsize and with the signedness given by UNSIGNEDP; this is useful
11610 when doing bit-level operations on a floating-point value. */
11613 signed_or_unsigned_type_for (int unsignedp
, tree type
)
11615 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
) == unsignedp
)
11618 if (TREE_CODE (type
) == VECTOR_TYPE
)
11620 tree inner
= TREE_TYPE (type
);
11621 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
11624 if (inner
== inner2
)
11626 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
11629 if (TREE_CODE (type
) == COMPLEX_TYPE
)
11631 tree inner
= TREE_TYPE (type
);
11632 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
11635 if (inner
== inner2
)
11637 return build_complex_type (inner2
);
11641 if (INTEGRAL_TYPE_P (type
)
11642 || POINTER_TYPE_P (type
)
11643 || TREE_CODE (type
) == OFFSET_TYPE
)
11644 bits
= TYPE_PRECISION (type
);
11645 else if (TREE_CODE (type
) == REAL_TYPE
)
11646 bits
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type
));
11650 return build_nonstandard_integer_type (bits
, unsignedp
);
11653 /* If TYPE is an integral or pointer type, return an integer type with
11654 the same precision which is unsigned, or itself if TYPE is already an
11655 unsigned integer type. If TYPE is a floating-point type, return an
11656 unsigned integer type with the same bitsize as TYPE. */
11659 unsigned_type_for (tree type
)
11661 return signed_or_unsigned_type_for (1, type
);
11664 /* If TYPE is an integral or pointer type, return an integer type with
11665 the same precision which is signed, or itself if TYPE is already a
11666 signed integer type. If TYPE is a floating-point type, return a
11667 signed integer type with the same bitsize as TYPE. */
11670 signed_type_for (tree type
)
11672 return signed_or_unsigned_type_for (0, type
);
11675 /* If TYPE is a vector type, return a signed integer vector type with the
11676 same width and number of subparts. Otherwise return boolean_type_node. */
11679 truth_type_for (tree type
)
11681 if (TREE_CODE (type
) == VECTOR_TYPE
)
11683 if (VECTOR_BOOLEAN_TYPE_P (type
))
11685 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type
),
11686 GET_MODE_SIZE (TYPE_MODE (type
)));
11689 return boolean_type_node
;
11692 /* Returns the largest value obtainable by casting something in INNER type to
11696 upper_bound_in_type (tree outer
, tree inner
)
11698 unsigned int det
= 0;
11699 unsigned oprec
= TYPE_PRECISION (outer
);
11700 unsigned iprec
= TYPE_PRECISION (inner
);
11703 /* Compute a unique number for every combination. */
11704 det
|= (oprec
> iprec
) ? 4 : 0;
11705 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
11706 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
11708 /* Determine the exponent to use. */
11713 /* oprec <= iprec, outer: signed, inner: don't care. */
11718 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11722 /* oprec > iprec, outer: signed, inner: signed. */
11726 /* oprec > iprec, outer: signed, inner: unsigned. */
11730 /* oprec > iprec, outer: unsigned, inner: signed. */
11734 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11738 gcc_unreachable ();
11741 return wide_int_to_tree (outer
,
11742 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
11745 /* Returns the smallest value obtainable by casting something in INNER type to
11749 lower_bound_in_type (tree outer
, tree inner
)
11751 unsigned oprec
= TYPE_PRECISION (outer
);
11752 unsigned iprec
= TYPE_PRECISION (inner
);
11754 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11756 if (TYPE_UNSIGNED (outer
)
11757 /* If we are widening something of an unsigned type, OUTER type
11758 contains all values of INNER type. In particular, both INNER
11759 and OUTER types have zero in common. */
11760 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
11761 return build_int_cst (outer
, 0);
11764 /* If we are widening a signed type to another signed type, we
11765 want to obtain -2^^(iprec-1). If we are keeping the
11766 precision or narrowing to a signed type, we want to obtain
11768 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
11769 return wide_int_to_tree (outer
,
11770 wi::mask (prec
- 1, true,
11771 TYPE_PRECISION (outer
)));
11775 /* Return nonzero if two operands that are suitable for PHI nodes are
11776 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11777 SSA_NAME or invariant. Note that this is strictly an optimization.
11778 That is, callers of this function can directly call operand_equal_p
11779 and get the same result, only slower. */
11782 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
11786 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
11788 return operand_equal_p (arg0
, arg1
, 0);
11791 /* Returns number of zeros at the end of binary representation of X. */
11794 num_ending_zeros (const_tree x
)
11796 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
11800 #define WALK_SUBTREE(NODE) \
11803 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11809 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11810 be walked whenever a type is seen in the tree. Rest of operands and return
11811 value are as for walk_tree. */
11814 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
11815 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11817 tree result
= NULL_TREE
;
11819 switch (TREE_CODE (type
))
11822 case REFERENCE_TYPE
:
11824 /* We have to worry about mutually recursive pointers. These can't
11825 be written in C. They can in Ada. It's pathological, but
11826 there's an ACATS test (c38102a) that checks it. Deal with this
11827 by checking if we're pointing to another pointer, that one
11828 points to another pointer, that one does too, and we have no htab.
11829 If so, get a hash table. We check three levels deep to avoid
11830 the cost of the hash table if we don't need one. */
11831 if (POINTER_TYPE_P (TREE_TYPE (type
))
11832 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
11833 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
11836 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
11847 WALK_SUBTREE (TREE_TYPE (type
));
11851 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
11853 /* Fall through. */
11855 case FUNCTION_TYPE
:
11856 WALK_SUBTREE (TREE_TYPE (type
));
11860 /* We never want to walk into default arguments. */
11861 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
11862 WALK_SUBTREE (TREE_VALUE (arg
));
11867 /* Don't follow this nodes's type if a pointer for fear that
11868 we'll have infinite recursion. If we have a PSET, then we
11871 || (!POINTER_TYPE_P (TREE_TYPE (type
))
11872 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
11873 WALK_SUBTREE (TREE_TYPE (type
));
11874 WALK_SUBTREE (TYPE_DOMAIN (type
));
11878 WALK_SUBTREE (TREE_TYPE (type
));
11879 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
11889 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11890 called with the DATA and the address of each sub-tree. If FUNC returns a
11891 non-NULL value, the traversal is stopped, and the value returned by FUNC
11892 is returned. If PSET is non-NULL it is used to record the nodes visited,
11893 and to avoid visiting a node more than once. */
11896 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11897 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11899 enum tree_code code
;
11903 #define WALK_SUBTREE_TAIL(NODE) \
11907 goto tail_recurse; \
11912 /* Skip empty subtrees. */
11916 /* Don't walk the same tree twice, if the user has requested
11917 that we avoid doing so. */
11918 if (pset
&& pset
->add (*tp
))
11921 /* Call the function. */
11923 result
= (*func
) (tp
, &walk_subtrees
, data
);
11925 /* If we found something, return it. */
11929 code
= TREE_CODE (*tp
);
11931 /* Even if we didn't, FUNC may have decided that there was nothing
11932 interesting below this point in the tree. */
11933 if (!walk_subtrees
)
11935 /* But we still need to check our siblings. */
11936 if (code
== TREE_LIST
)
11937 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11938 else if (code
== OMP_CLAUSE
)
11939 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11946 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11947 if (result
|| !walk_subtrees
)
11954 case IDENTIFIER_NODE
:
11961 case PLACEHOLDER_EXPR
:
11965 /* None of these have subtrees other than those already walked
11970 WALK_SUBTREE (TREE_VALUE (*tp
));
11971 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11976 int len
= TREE_VEC_LENGTH (*tp
);
11981 /* Walk all elements but the first. */
11983 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11985 /* Now walk the first one as a tail call. */
11986 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11990 WALK_SUBTREE (TREE_REALPART (*tp
));
11991 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11995 unsigned HOST_WIDE_INT idx
;
11996 constructor_elt
*ce
;
11998 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
12000 WALK_SUBTREE (ce
->value
);
12005 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
12010 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
12012 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12013 into declarations that are just mentioned, rather than
12014 declared; they don't really belong to this part of the tree.
12015 And, we can see cycles: the initializer for a declaration
12016 can refer to the declaration itself. */
12017 WALK_SUBTREE (DECL_INITIAL (decl
));
12018 WALK_SUBTREE (DECL_SIZE (decl
));
12019 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
12021 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
12024 case STATEMENT_LIST
:
12026 tree_stmt_iterator i
;
12027 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
12028 WALK_SUBTREE (*tsi_stmt_ptr (i
));
12033 switch (OMP_CLAUSE_CODE (*tp
))
12035 case OMP_CLAUSE_GANG
:
12036 case OMP_CLAUSE__GRIDDIM_
:
12037 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
12040 case OMP_CLAUSE_ASYNC
:
12041 case OMP_CLAUSE_WAIT
:
12042 case OMP_CLAUSE_WORKER
:
12043 case OMP_CLAUSE_VECTOR
:
12044 case OMP_CLAUSE_NUM_GANGS
:
12045 case OMP_CLAUSE_NUM_WORKERS
:
12046 case OMP_CLAUSE_VECTOR_LENGTH
:
12047 case OMP_CLAUSE_PRIVATE
:
12048 case OMP_CLAUSE_SHARED
:
12049 case OMP_CLAUSE_FIRSTPRIVATE
:
12050 case OMP_CLAUSE_COPYIN
:
12051 case OMP_CLAUSE_COPYPRIVATE
:
12052 case OMP_CLAUSE_FINAL
:
12053 case OMP_CLAUSE_IF
:
12054 case OMP_CLAUSE_NUM_THREADS
:
12055 case OMP_CLAUSE_SCHEDULE
:
12056 case OMP_CLAUSE_UNIFORM
:
12057 case OMP_CLAUSE_DEPEND
:
12058 case OMP_CLAUSE_NONTEMPORAL
:
12059 case OMP_CLAUSE_NUM_TEAMS
:
12060 case OMP_CLAUSE_THREAD_LIMIT
:
12061 case OMP_CLAUSE_DEVICE
:
12062 case OMP_CLAUSE_DIST_SCHEDULE
:
12063 case OMP_CLAUSE_SAFELEN
:
12064 case OMP_CLAUSE_SIMDLEN
:
12065 case OMP_CLAUSE_ORDERED
:
12066 case OMP_CLAUSE_PRIORITY
:
12067 case OMP_CLAUSE_GRAINSIZE
:
12068 case OMP_CLAUSE_NUM_TASKS
:
12069 case OMP_CLAUSE_HINT
:
12070 case OMP_CLAUSE_TO_DECLARE
:
12071 case OMP_CLAUSE_LINK
:
12072 case OMP_CLAUSE_USE_DEVICE_PTR
:
12073 case OMP_CLAUSE_USE_DEVICE_ADDR
:
12074 case OMP_CLAUSE_IS_DEVICE_PTR
:
12075 case OMP_CLAUSE_INCLUSIVE
:
12076 case OMP_CLAUSE_EXCLUSIVE
:
12077 case OMP_CLAUSE__LOOPTEMP_
:
12078 case OMP_CLAUSE__REDUCTEMP_
:
12079 case OMP_CLAUSE__CONDTEMP_
:
12080 case OMP_CLAUSE__SCANTEMP_
:
12081 case OMP_CLAUSE__SIMDUID_
:
12082 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
12085 case OMP_CLAUSE_INDEPENDENT
:
12086 case OMP_CLAUSE_NOWAIT
:
12087 case OMP_CLAUSE_DEFAULT
:
12088 case OMP_CLAUSE_UNTIED
:
12089 case OMP_CLAUSE_MERGEABLE
:
12090 case OMP_CLAUSE_PROC_BIND
:
12091 case OMP_CLAUSE_DEVICE_TYPE
:
12092 case OMP_CLAUSE_INBRANCH
:
12093 case OMP_CLAUSE_NOTINBRANCH
:
12094 case OMP_CLAUSE_FOR
:
12095 case OMP_CLAUSE_PARALLEL
:
12096 case OMP_CLAUSE_SECTIONS
:
12097 case OMP_CLAUSE_TASKGROUP
:
12098 case OMP_CLAUSE_NOGROUP
:
12099 case OMP_CLAUSE_THREADS
:
12100 case OMP_CLAUSE_SIMD
:
12101 case OMP_CLAUSE_DEFAULTMAP
:
12102 case OMP_CLAUSE_ORDER
:
12103 case OMP_CLAUSE_BIND
:
12104 case OMP_CLAUSE_AUTO
:
12105 case OMP_CLAUSE_SEQ
:
12106 case OMP_CLAUSE_TILE
:
12107 case OMP_CLAUSE__SIMT_
:
12108 case OMP_CLAUSE_IF_PRESENT
:
12109 case OMP_CLAUSE_FINALIZE
:
12110 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
12112 case OMP_CLAUSE_LASTPRIVATE
:
12113 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
12114 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
12115 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
12117 case OMP_CLAUSE_COLLAPSE
:
12120 for (i
= 0; i
< 3; i
++)
12121 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
12122 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
12125 case OMP_CLAUSE_LINEAR
:
12126 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
12127 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
12128 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
12129 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
12131 case OMP_CLAUSE_ALIGNED
:
12132 case OMP_CLAUSE_FROM
:
12133 case OMP_CLAUSE_TO
:
12134 case OMP_CLAUSE_MAP
:
12135 case OMP_CLAUSE__CACHE_
:
12136 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
12137 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
12138 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
12140 case OMP_CLAUSE_REDUCTION
:
12141 case OMP_CLAUSE_TASK_REDUCTION
:
12142 case OMP_CLAUSE_IN_REDUCTION
:
12145 for (i
= 0; i
< 5; i
++)
12146 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
12147 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
12151 gcc_unreachable ();
12159 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12160 But, we only want to walk once. */
12161 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
12162 for (i
= 0; i
< len
; ++i
)
12163 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
12164 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
12168 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12169 defining. We only want to walk into these fields of a type in this
12170 case and not in the general case of a mere reference to the type.
12172 The criterion is as follows: if the field can be an expression, it
12173 must be walked only here. This should be in keeping with the fields
12174 that are directly gimplified in gimplify_type_sizes in order for the
12175 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12176 variable-sized types.
12178 Note that DECLs get walked as part of processing the BIND_EXPR. */
12179 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
12181 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
12182 if (TREE_CODE (*type_p
) == ERROR_MARK
)
12185 /* Call the function for the type. See if it returns anything or
12186 doesn't want us to continue. If we are to continue, walk both
12187 the normal fields and those for the declaration case. */
12188 result
= (*func
) (type_p
, &walk_subtrees
, data
);
12189 if (result
|| !walk_subtrees
)
12192 /* But do not walk a pointed-to type since it may itself need to
12193 be walked in the declaration case if it isn't anonymous. */
12194 if (!POINTER_TYPE_P (*type_p
))
12196 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
12201 /* If this is a record type, also walk the fields. */
12202 if (RECORD_OR_UNION_TYPE_P (*type_p
))
12206 for (field
= TYPE_FIELDS (*type_p
); field
;
12207 field
= DECL_CHAIN (field
))
12209 /* We'd like to look at the type of the field, but we can
12210 easily get infinite recursion. So assume it's pointed
12211 to elsewhere in the tree. Also, ignore things that
12213 if (TREE_CODE (field
) != FIELD_DECL
)
12216 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
12217 WALK_SUBTREE (DECL_SIZE (field
));
12218 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
12219 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
12220 WALK_SUBTREE (DECL_QUALIFIER (field
));
12224 /* Same for scalar types. */
12225 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
12226 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
12227 || TREE_CODE (*type_p
) == INTEGER_TYPE
12228 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
12229 || TREE_CODE (*type_p
) == REAL_TYPE
)
12231 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
12232 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
12235 WALK_SUBTREE (TYPE_SIZE (*type_p
));
12236 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
12241 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
12245 /* Walk over all the sub-trees of this operand. */
12246 len
= TREE_OPERAND_LENGTH (*tp
);
12248 /* Go through the subtrees. We need to do this in forward order so
12249 that the scope of a FOR_EXPR is handled properly. */
12252 for (i
= 0; i
< len
- 1; ++i
)
12253 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
12254 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
12257 /* If this is a type, walk the needed fields in the type. */
12258 else if (TYPE_P (*tp
))
12259 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
12263 /* We didn't find what we were looking for. */
12266 #undef WALK_SUBTREE_TAIL
12268 #undef WALK_SUBTREE
12270 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12273 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
12278 hash_set
<tree
> pset
;
12279 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
12285 tree_block (tree t
)
12287 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
12289 if (IS_EXPR_CODE_CLASS (c
))
12290 return LOCATION_BLOCK (t
->exp
.locus
);
12291 gcc_unreachable ();
12296 tree_set_block (tree t
, tree b
)
12298 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
12300 if (IS_EXPR_CODE_CLASS (c
))
12302 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
12305 gcc_unreachable ();
12308 /* Create a nameless artificial label and put it in the current
12309 function context. The label has a location of LOC. Returns the
12310 newly created label. */
12313 create_artificial_label (location_t loc
)
12315 tree lab
= build_decl (loc
,
12316 LABEL_DECL
, NULL_TREE
, void_type_node
);
12318 DECL_ARTIFICIAL (lab
) = 1;
12319 DECL_IGNORED_P (lab
) = 1;
12320 DECL_CONTEXT (lab
) = current_function_decl
;
12324 /* Given a tree, try to return a useful variable name that we can use
12325 to prefix a temporary that is being assigned the value of the tree.
12326 I.E. given <temp> = &A, return A. */
12331 tree stripped_decl
;
12334 STRIP_NOPS (stripped_decl
);
12335 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
12336 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
12337 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
12339 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
12342 return IDENTIFIER_POINTER (name
);
12346 switch (TREE_CODE (stripped_decl
))
12349 return get_name (TREE_OPERAND (stripped_decl
, 0));
12356 /* Return true if TYPE has a variable argument list. */
12359 stdarg_p (const_tree fntype
)
12361 function_args_iterator args_iter
;
12362 tree n
= NULL_TREE
, t
;
12367 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
12372 return n
!= NULL_TREE
&& n
!= void_type_node
;
12375 /* Return true if TYPE has a prototype. */
12378 prototype_p (const_tree fntype
)
12382 gcc_assert (fntype
!= NULL_TREE
);
12384 t
= TYPE_ARG_TYPES (fntype
);
12385 return (t
!= NULL_TREE
);
12388 /* If BLOCK is inlined from an __attribute__((__artificial__))
12389 routine, return pointer to location from where it has been
12392 block_nonartificial_location (tree block
)
12394 location_t
*ret
= NULL
;
12396 while (block
&& TREE_CODE (block
) == BLOCK
12397 && BLOCK_ABSTRACT_ORIGIN (block
))
12399 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
12400 if (TREE_CODE (ao
) == FUNCTION_DECL
)
12402 /* If AO is an artificial inline, point RET to the
12403 call site locus at which it has been inlined and continue
12404 the loop, in case AO's caller is also an artificial
12406 if (DECL_DECLARED_INLINE_P (ao
)
12407 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
12408 ret
= &BLOCK_SOURCE_LOCATION (block
);
12412 else if (TREE_CODE (ao
) != BLOCK
)
12415 block
= BLOCK_SUPERCONTEXT (block
);
12421 /* If EXP is inlined from an __attribute__((__artificial__))
12422 function, return the location of the original call expression. */
12425 tree_nonartificial_location (tree exp
)
12427 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
12432 return EXPR_LOCATION (exp
);
12436 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12439 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12442 cl_option_hasher::hash (tree x
)
12444 const_tree
const t
= x
;
12448 hashval_t hash
= 0;
12450 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
12452 p
= (const char *)TREE_OPTIMIZATION (t
);
12453 len
= sizeof (struct cl_optimization
);
12456 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
12457 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
12460 gcc_unreachable ();
12462 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12464 for (i
= 0; i
< len
; i
++)
12466 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
12471 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12472 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12476 cl_option_hasher::equal (tree x
, tree y
)
12478 const_tree
const xt
= x
;
12479 const_tree
const yt
= y
;
12481 if (TREE_CODE (xt
) != TREE_CODE (yt
))
12484 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
12485 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt
),
12486 TREE_OPTIMIZATION (yt
));
12487 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
12488 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
12489 TREE_TARGET_OPTION (yt
));
12491 gcc_unreachable ();
12494 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12497 build_optimization_node (struct gcc_options
*opts
)
12501 /* Use the cache of optimization nodes. */
12503 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
12506 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
12510 /* Insert this one into the hash table. */
12511 t
= cl_optimization_node
;
12514 /* Make a new node for next time round. */
12515 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
12521 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12524 build_target_option_node (struct gcc_options
*opts
)
12528 /* Use the cache of optimization nodes. */
12530 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
12533 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
12537 /* Insert this one into the hash table. */
12538 t
= cl_target_option_node
;
12541 /* Make a new node for next time round. */
12542 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
12548 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12549 so that they aren't saved during PCH writing. */
12552 prepare_target_option_nodes_for_pch (void)
12554 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
12555 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
12556 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
12557 TREE_TARGET_GLOBALS (*iter
) = NULL
;
12560 /* Determine the "ultimate origin" of a block. */
12563 block_ultimate_origin (const_tree block
)
12565 tree origin
= BLOCK_ABSTRACT_ORIGIN (block
);
12567 if (origin
== NULL_TREE
)
12571 gcc_checking_assert ((DECL_P (origin
)
12572 && DECL_ORIGIN (origin
) == origin
)
12573 || BLOCK_ORIGIN (origin
) == origin
);
12578 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12582 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
12584 /* Do not strip casts into or out of differing address spaces. */
12585 if (POINTER_TYPE_P (outer_type
)
12586 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
12588 if (!POINTER_TYPE_P (inner_type
)
12589 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
12590 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
12593 else if (POINTER_TYPE_P (inner_type
)
12594 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
12596 /* We already know that outer_type is not a pointer with
12597 a non-generic address space. */
12601 /* Use precision rather then machine mode when we can, which gives
12602 the correct answer even for submode (bit-field) types. */
12603 if ((INTEGRAL_TYPE_P (outer_type
)
12604 || POINTER_TYPE_P (outer_type
)
12605 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
12606 && (INTEGRAL_TYPE_P (inner_type
)
12607 || POINTER_TYPE_P (inner_type
)
12608 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
12609 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
12611 /* Otherwise fall back on comparing machine modes (e.g. for
12612 aggregate types, floats). */
12613 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
12616 /* Return true iff conversion in EXP generates no instruction. Mark
12617 it inline so that we fully inline into the stripping functions even
12618 though we have two uses of this function. */
12621 tree_nop_conversion (const_tree exp
)
12623 tree outer_type
, inner_type
;
12625 if (location_wrapper_p (exp
))
12627 if (!CONVERT_EXPR_P (exp
)
12628 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
12631 outer_type
= TREE_TYPE (exp
);
12632 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
12633 if (!inner_type
|| inner_type
== error_mark_node
)
12636 return tree_nop_conversion_p (outer_type
, inner_type
);
12639 /* Return true iff conversion in EXP generates no instruction. Don't
12640 consider conversions changing the signedness. */
12643 tree_sign_nop_conversion (const_tree exp
)
12645 tree outer_type
, inner_type
;
12647 if (!tree_nop_conversion (exp
))
12650 outer_type
= TREE_TYPE (exp
);
12651 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
12653 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
12654 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
12657 /* Strip conversions from EXP according to tree_nop_conversion and
12658 return the resulting expression. */
12661 tree_strip_nop_conversions (tree exp
)
12663 while (tree_nop_conversion (exp
))
12664 exp
= TREE_OPERAND (exp
, 0);
12668 /* Strip conversions from EXP according to tree_sign_nop_conversion
12669 and return the resulting expression. */
12672 tree_strip_sign_nop_conversions (tree exp
)
12674 while (tree_sign_nop_conversion (exp
))
12675 exp
= TREE_OPERAND (exp
, 0);
12679 /* Avoid any floating point extensions from EXP. */
12681 strip_float_extensions (tree exp
)
12683 tree sub
, expt
, subt
;
12685 /* For floating point constant look up the narrowest type that can hold
12686 it properly and handle it like (type)(narrowest_type)constant.
12687 This way we can optimize for instance a=a*2.0 where "a" is float
12688 but 2.0 is double constant. */
12689 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
12691 REAL_VALUE_TYPE orig
;
12694 orig
= TREE_REAL_CST (exp
);
12695 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
12696 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
12697 type
= float_type_node
;
12698 else if (TYPE_PRECISION (TREE_TYPE (exp
))
12699 > TYPE_PRECISION (double_type_node
)
12700 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
12701 type
= double_type_node
;
12703 return build_real_truncate (type
, orig
);
12706 if (!CONVERT_EXPR_P (exp
))
12709 sub
= TREE_OPERAND (exp
, 0);
12710 subt
= TREE_TYPE (sub
);
12711 expt
= TREE_TYPE (exp
);
12713 if (!FLOAT_TYPE_P (subt
))
12716 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
12719 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
12722 return strip_float_extensions (sub
);
12725 /* Strip out all handled components that produce invariant
12729 strip_invariant_refs (const_tree op
)
12731 while (handled_component_p (op
))
12733 switch (TREE_CODE (op
))
12736 case ARRAY_RANGE_REF
:
12737 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
12738 || TREE_OPERAND (op
, 2) != NULL_TREE
12739 || TREE_OPERAND (op
, 3) != NULL_TREE
)
12743 case COMPONENT_REF
:
12744 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
12750 op
= TREE_OPERAND (op
, 0);
12756 static GTY(()) tree gcc_eh_personality_decl
;
12758 /* Return the GCC personality function decl. */
12761 lhd_gcc_personality (void)
12763 if (!gcc_eh_personality_decl
)
12764 gcc_eh_personality_decl
= build_personality_function ("gcc");
12765 return gcc_eh_personality_decl
;
12768 /* TARGET is a call target of GIMPLE call statement
12769 (obtained by gimple_call_fn). Return true if it is
12770 OBJ_TYPE_REF representing an virtual call of C++ method.
12771 (As opposed to OBJ_TYPE_REF representing objc calls
12772 through a cast where middle-end devirtualization machinery
12776 virtual_method_call_p (const_tree target
)
12778 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
12780 tree t
= TREE_TYPE (target
);
12781 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
12783 if (TREE_CODE (t
) == FUNCTION_TYPE
)
12785 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
12786 /* If we do not have BINFO associated, it means that type was built
12787 without devirtualization enabled. Do not consider this a virtual
12789 if (!TYPE_BINFO (obj_type_ref_class (target
)))
12794 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12797 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
12800 tree base_binfo
, b
;
12802 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12803 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
12804 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
12806 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
12811 /* Try to find a base info of BINFO that would have its field decl at offset
12812 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12813 found, return, otherwise return NULL_TREE. */
12816 get_binfo_at_offset (tree binfo
, poly_int64 offset
, tree expected_type
)
12818 tree type
= BINFO_TYPE (binfo
);
12822 HOST_WIDE_INT pos
, size
;
12826 if (types_same_for_odr (type
, expected_type
))
12828 if (maybe_lt (offset
, 0))
12831 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
12833 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
12836 pos
= int_bit_position (fld
);
12837 size
= tree_to_uhwi (DECL_SIZE (fld
));
12838 if (known_in_range_p (offset
, pos
, size
))
12841 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
12844 /* Offset 0 indicates the primary base, whose vtable contents are
12845 represented in the binfo for the derived class. */
12846 else if (maybe_ne (offset
, 0))
12848 tree found_binfo
= NULL
, base_binfo
;
12849 /* Offsets in BINFO are in bytes relative to the whole structure
12850 while POS is in bits relative to the containing field. */
12851 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
12854 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12855 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
12856 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
12858 found_binfo
= base_binfo
;
12862 binfo
= found_binfo
;
12864 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
12868 type
= TREE_TYPE (fld
);
12873 /* Returns true if X is a typedef decl. */
12876 is_typedef_decl (const_tree x
)
12878 return (x
&& TREE_CODE (x
) == TYPE_DECL
12879 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
12882 /* Returns true iff TYPE is a type variant created for a typedef. */
12885 typedef_variant_p (const_tree type
)
12887 return is_typedef_decl (TYPE_NAME (type
));
12890 /* PR 84195: Replace control characters in "unescaped" with their
12891 escaped equivalents. Allow newlines if -fmessage-length has
12892 been set to a non-zero value. This is done here, rather than
12893 where the attribute is recorded as the message length can
12894 change between these two locations. */
12897 escaped_string::escape (const char *unescaped
)
12900 size_t i
, new_i
, len
;
12905 m_str
= const_cast<char *> (unescaped
);
12908 if (unescaped
== NULL
|| *unescaped
== 0)
12911 len
= strlen (unescaped
);
12915 for (i
= 0; i
< len
; i
++)
12917 char c
= unescaped
[i
];
12922 escaped
[new_i
++] = c
;
12926 if (c
!= '\n' || !pp_is_wrapping_line (global_dc
->printer
))
12928 if (escaped
== NULL
)
12930 /* We only allocate space for a new string if we
12931 actually encounter a control character that
12932 needs replacing. */
12933 escaped
= (char *) xmalloc (len
* 2 + 1);
12934 strncpy (escaped
, unescaped
, i
);
12938 escaped
[new_i
++] = '\\';
12942 case '\a': escaped
[new_i
++] = 'a'; break;
12943 case '\b': escaped
[new_i
++] = 'b'; break;
12944 case '\f': escaped
[new_i
++] = 'f'; break;
12945 case '\n': escaped
[new_i
++] = 'n'; break;
12946 case '\r': escaped
[new_i
++] = 'r'; break;
12947 case '\t': escaped
[new_i
++] = 't'; break;
12948 case '\v': escaped
[new_i
++] = 'v'; break;
12949 default: escaped
[new_i
++] = '?'; break;
12953 escaped
[new_i
++] = c
;
12958 escaped
[new_i
] = 0;
12964 /* Warn about a use of an identifier which was marked deprecated. Returns
12965 whether a warning was given. */
12968 warn_deprecated_use (tree node
, tree attr
)
12970 escaped_string msg
;
12972 if (node
== 0 || !warn_deprecated_decl
)
12978 attr
= DECL_ATTRIBUTES (node
);
12979 else if (TYPE_P (node
))
12981 tree decl
= TYPE_STUB_DECL (node
);
12983 attr
= lookup_attribute ("deprecated",
12984 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12989 attr
= lookup_attribute ("deprecated", attr
);
12992 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12997 auto_diagnostic_group d
;
12999 w
= warning (OPT_Wdeprecated_declarations
,
13000 "%qD is deprecated: %s", node
, (const char *) msg
);
13002 w
= warning (OPT_Wdeprecated_declarations
,
13003 "%qD is deprecated", node
);
13005 inform (DECL_SOURCE_LOCATION (node
), "declared here");
13007 else if (TYPE_P (node
))
13009 tree what
= NULL_TREE
;
13010 tree decl
= TYPE_STUB_DECL (node
);
13012 if (TYPE_NAME (node
))
13014 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
13015 what
= TYPE_NAME (node
);
13016 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
13017 && DECL_NAME (TYPE_NAME (node
)))
13018 what
= DECL_NAME (TYPE_NAME (node
));
13021 auto_diagnostic_group d
;
13025 w
= warning (OPT_Wdeprecated_declarations
,
13026 "%qE is deprecated: %s", what
, (const char *) msg
);
13028 w
= warning (OPT_Wdeprecated_declarations
,
13029 "%qE is deprecated", what
);
13034 w
= warning (OPT_Wdeprecated_declarations
,
13035 "type is deprecated: %s", (const char *) msg
);
13037 w
= warning (OPT_Wdeprecated_declarations
,
13038 "type is deprecated");
13042 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
13048 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13049 somewhere in it. */
13052 contains_bitfld_component_ref_p (const_tree ref
)
13054 while (handled_component_p (ref
))
13056 if (TREE_CODE (ref
) == COMPONENT_REF
13057 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
13059 ref
= TREE_OPERAND (ref
, 0);
13065 /* Try to determine whether a TRY_CATCH expression can fall through.
13066 This is a subroutine of block_may_fallthru. */
13069 try_catch_may_fallthru (const_tree stmt
)
13071 tree_stmt_iterator i
;
13073 /* If the TRY block can fall through, the whole TRY_CATCH can
13075 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
13078 i
= tsi_start (TREE_OPERAND (stmt
, 1));
13079 switch (TREE_CODE (tsi_stmt (i
)))
13082 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13083 catch expression and a body. The whole TRY_CATCH may fall
13084 through iff any of the catch bodies falls through. */
13085 for (; !tsi_end_p (i
); tsi_next (&i
))
13087 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
13092 case EH_FILTER_EXPR
:
13093 /* The exception filter expression only matters if there is an
13094 exception. If the exception does not match EH_FILTER_TYPES,
13095 we will execute EH_FILTER_FAILURE, and we will fall through
13096 if that falls through. If the exception does match
13097 EH_FILTER_TYPES, the stack unwinder will continue up the
13098 stack, so we will not fall through. We don't know whether we
13099 will throw an exception which matches EH_FILTER_TYPES or not,
13100 so we just ignore EH_FILTER_TYPES and assume that we might
13101 throw an exception which doesn't match. */
13102 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
13105 /* This case represents statements to be executed when an
13106 exception occurs. Those statements are implicitly followed
13107 by a RESX statement to resume execution after the exception.
13108 So in this case the TRY_CATCH never falls through. */
13113 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13114 need not be 100% accurate; simply be conservative and return true if we
13115 don't know. This is used only to avoid stupidly generating extra code.
13116 If we're wrong, we'll just delete the extra code later. */
13119 block_may_fallthru (const_tree block
)
13121 /* This CONST_CAST is okay because expr_last returns its argument
13122 unmodified and we assign it to a const_tree. */
13123 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
13125 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
13129 /* Easy cases. If the last statement of the block implies
13130 control transfer, then we can't fall through. */
13134 /* If there is a default: label or case labels cover all possible
13135 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13136 to some case label in all cases and all we care is whether the
13137 SWITCH_BODY falls through. */
13138 if (SWITCH_ALL_CASES_P (stmt
))
13139 return block_may_fallthru (SWITCH_BODY (stmt
));
13143 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
13145 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
13148 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
13150 case TRY_CATCH_EXPR
:
13151 return try_catch_may_fallthru (stmt
);
13153 case TRY_FINALLY_EXPR
:
13154 /* The finally clause is always executed after the try clause,
13155 so if it does not fall through, then the try-finally will not
13156 fall through. Otherwise, if the try clause does not fall
13157 through, then when the finally clause falls through it will
13158 resume execution wherever the try clause was going. So the
13159 whole try-finally will only fall through if both the try
13160 clause and the finally clause fall through. */
13161 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
13162 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
13165 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
13168 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
13169 stmt
= TREE_OPERAND (stmt
, 1);
13175 /* Functions that do not return do not fall through. */
13176 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
13178 case CLEANUP_POINT_EXPR
:
13179 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
13182 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
13188 return lang_hooks
.block_may_fallthru (stmt
);
13192 /* True if we are using EH to handle cleanups. */
13193 static bool using_eh_for_cleanups_flag
= false;
13195 /* This routine is called from front ends to indicate eh should be used for
13198 using_eh_for_cleanups (void)
13200 using_eh_for_cleanups_flag
= true;
13203 /* Query whether EH is used for cleanups. */
13205 using_eh_for_cleanups_p (void)
13207 return using_eh_for_cleanups_flag
;
13210 /* Wrapper for tree_code_name to ensure that tree code is valid */
13212 get_tree_code_name (enum tree_code code
)
13214 const char *invalid
= "<invalid tree code>";
13216 if (code
>= MAX_TREE_CODES
)
13218 if (code
== 0xa5a5)
13219 return "ggc_freed";
13223 return tree_code_name
[code
];
13226 /* Drops the TREE_OVERFLOW flag from T. */
13229 drop_tree_overflow (tree t
)
13231 gcc_checking_assert (TREE_OVERFLOW (t
));
13233 /* For tree codes with a sharing machinery re-build the result. */
13234 if (poly_int_tree_p (t
))
13235 return wide_int_to_tree (TREE_TYPE (t
), wi::to_poly_wide (t
));
13237 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13238 and canonicalize the result. */
13239 if (TREE_CODE (t
) == VECTOR_CST
)
13241 tree_vector_builder builder
;
13242 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
13243 unsigned int count
= builder
.encoded_nelts ();
13244 for (unsigned int i
= 0; i
< count
; ++i
)
13246 tree elt
= VECTOR_CST_ELT (t
, i
);
13247 if (TREE_OVERFLOW (elt
))
13248 elt
= drop_tree_overflow (elt
);
13249 builder
.quick_push (elt
);
13251 return builder
.build ();
13254 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13255 and drop the flag. */
13257 TREE_OVERFLOW (t
) = 0;
13259 /* For constants that contain nested constants, drop the flag
13260 from those as well. */
13261 if (TREE_CODE (t
) == COMPLEX_CST
)
13263 if (TREE_OVERFLOW (TREE_REALPART (t
)))
13264 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
13265 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
13266 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
13272 /* Given a memory reference expression T, return its base address.
13273 The base address of a memory reference expression is the main
13274 object being referenced. For instance, the base address for
13275 'array[i].fld[j]' is 'array'. You can think of this as stripping
13276 away the offset part from a memory address.
13278 This function calls handled_component_p to strip away all the inner
13279 parts of the memory reference until it reaches the base object. */
13282 get_base_address (tree t
)
13284 while (handled_component_p (t
))
13285 t
= TREE_OPERAND (t
, 0);
13287 if ((TREE_CODE (t
) == MEM_REF
13288 || TREE_CODE (t
) == TARGET_MEM_REF
)
13289 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
13290 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
13292 /* ??? Either the alias oracle or all callers need to properly deal
13293 with WITH_SIZE_EXPRs before we can look through those. */
13294 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
13300 /* Return a tree of sizetype representing the size, in bytes, of the element
13301 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13304 array_ref_element_size (tree exp
)
13306 tree aligned_size
= TREE_OPERAND (exp
, 3);
13307 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
13308 location_t loc
= EXPR_LOCATION (exp
);
13310 /* If a size was specified in the ARRAY_REF, it's the size measured
13311 in alignment units of the element type. So multiply by that value. */
13314 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13315 sizetype from another type of the same width and signedness. */
13316 if (TREE_TYPE (aligned_size
) != sizetype
)
13317 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
13318 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
13319 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
13322 /* Otherwise, take the size from that of the element type. Substitute
13323 any PLACEHOLDER_EXPR that we have. */
13325 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
13328 /* Return a tree representing the lower bound of the array mentioned in
13329 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13332 array_ref_low_bound (tree exp
)
13334 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
13336 /* If a lower bound is specified in EXP, use it. */
13337 if (TREE_OPERAND (exp
, 2))
13338 return TREE_OPERAND (exp
, 2);
13340 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13341 substituting for a PLACEHOLDER_EXPR as needed. */
13342 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
13343 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
13345 /* Otherwise, return a zero of the appropriate type. */
13346 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
13349 /* Return a tree representing the upper bound of the array mentioned in
13350 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13353 array_ref_up_bound (tree exp
)
13355 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
13357 /* If there is a domain type and it has an upper bound, use it, substituting
13358 for a PLACEHOLDER_EXPR as needed. */
13359 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
13360 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
13362 /* Otherwise fail. */
13366 /* Returns true if REF is an array reference or a component reference
13367 to an array at the end of a structure.
13368 If this is the case, the array may be allocated larger
13369 than its upper bound implies. */
13372 array_at_struct_end_p (tree ref
)
13376 if (TREE_CODE (ref
) == ARRAY_REF
13377 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
13379 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
13380 ref
= TREE_OPERAND (ref
, 0);
13382 else if (TREE_CODE (ref
) == COMPONENT_REF
13383 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
13384 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
13388 if (TREE_CODE (ref
) == STRING_CST
)
13391 tree ref_to_array
= ref
;
13392 while (handled_component_p (ref
))
13394 /* If the reference chain contains a component reference to a
13395 non-union type and there follows another field the reference
13396 is not at the end of a structure. */
13397 if (TREE_CODE (ref
) == COMPONENT_REF
)
13399 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
13401 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
13402 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
13403 nextf
= DECL_CHAIN (nextf
);
13408 /* If we have a multi-dimensional array we do not consider
13409 a non-innermost dimension as flex array if the whole
13410 multi-dimensional array is at struct end.
13411 Same for an array of aggregates with a trailing array
13413 else if (TREE_CODE (ref
) == ARRAY_REF
)
13415 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
13417 /* If we view an underlying object as sth else then what we
13418 gathered up to now is what we have to rely on. */
13419 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
13422 gcc_unreachable ();
13424 ref
= TREE_OPERAND (ref
, 0);
13427 /* The array now is at struct end. Treat flexible arrays as
13428 always subject to extend, even into just padding constrained by
13429 an underlying decl. */
13430 if (! TYPE_SIZE (atype
)
13431 || ! TYPE_DOMAIN (atype
)
13432 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
13435 if (TREE_CODE (ref
) == MEM_REF
13436 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
13437 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
13439 /* If the reference is based on a declared entity, the size of the array
13440 is constrained by its given domain. (Do not trust commons PR/69368). */
13442 && !(flag_unconstrained_commons
13443 && VAR_P (ref
) && DECL_COMMON (ref
))
13444 && DECL_SIZE_UNIT (ref
)
13445 && TREE_CODE (DECL_SIZE_UNIT (ref
)) == INTEGER_CST
)
13447 /* Check whether the array domain covers all of the available
13450 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype
))) != INTEGER_CST
13451 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
13452 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
)
13454 if (! get_addr_base_and_unit_offset (ref_to_array
, &offset
))
13457 /* If at least one extra element fits it is a flexarray. */
13458 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
13459 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
)))
13461 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype
))),
13462 wi::to_offset (DECL_SIZE_UNIT (ref
)) - offset
))
13471 /* Return a tree representing the offset, in bytes, of the field referenced
13472 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13475 component_ref_field_offset (tree exp
)
13477 tree aligned_offset
= TREE_OPERAND (exp
, 2);
13478 tree field
= TREE_OPERAND (exp
, 1);
13479 location_t loc
= EXPR_LOCATION (exp
);
13481 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13482 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13484 if (aligned_offset
)
13486 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13487 sizetype from another type of the same width and signedness. */
13488 if (TREE_TYPE (aligned_offset
) != sizetype
)
13489 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
13490 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
13491 size_int (DECL_OFFSET_ALIGN (field
)
13495 /* Otherwise, take the offset from that of the field. Substitute
13496 any PLACEHOLDER_EXPR that we have. */
13498 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
13501 /* Determines the size of the member referenced by the COMPONENT_REF
13502 REF, using its initializer expression if necessary in order to
13503 determine the size of an initialized flexible array member.
13504 Returns the size (which might be zero for an object with
13505 an uninitialized flexible array member) or null if the size
13506 cannot be determined. */
13509 component_ref_size (tree ref
)
13511 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
13513 tree member
= TREE_OPERAND (ref
, 1);
13515 /* If the member is not an array, or is not last, or is an array with
13516 more than one element, return its size. Otherwise it's either
13517 a bona fide flexible array member, or a zero-length array member,
13518 or an array of length one treated as such. */
13519 tree size
= DECL_SIZE_UNIT (member
);
13522 tree memtype
= TREE_TYPE (member
);
13523 if (TREE_CODE (memtype
) != ARRAY_TYPE
13524 || !array_at_struct_end_p (ref
))
13527 if (!integer_zerop (size
))
13528 if (tree dom
= TYPE_DOMAIN (memtype
))
13529 if (tree min
= TYPE_MIN_VALUE (dom
))
13530 if (tree max
= TYPE_MAX_VALUE (dom
))
13531 if (TREE_CODE (min
) == INTEGER_CST
13532 && TREE_CODE (max
) == INTEGER_CST
)
13534 offset_int minidx
= wi::to_offset (min
);
13535 offset_int maxidx
= wi::to_offset (max
);
13536 if (maxidx
- minidx
> 1)
13541 /* If the reference is to a declared object and the member a true
13542 flexible array, try to determine its size from its initializer. */
13543 poly_int64 off
= 0;
13544 tree base
= get_addr_base_and_unit_offset (ref
, &off
);
13545 if (!base
|| !VAR_P (base
))
13548 /* The size of any member of a declared object other than a flexible
13549 array member is that obtained above. */
13553 if (tree init
= DECL_INITIAL (base
))
13554 if (TREE_CODE (init
) == CONSTRUCTOR
)
13556 off
<<= LOG2_BITS_PER_UNIT
;
13557 init
= fold_ctor_reference (NULL_TREE
, init
, off
, 0, base
);
13559 return TYPE_SIZE_UNIT (TREE_TYPE (init
));
13562 /* Return "don't know" for an external non-array object since its
13563 flexible array member can be initialized to have any number of
13564 elements. Otherwise, return zero because the flexible array
13565 member has no elements. */
13566 return (DECL_EXTERNAL (base
) && TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
13567 ? NULL_TREE
: integer_zero_node
);
13570 /* Return the machine mode of T. For vectors, returns the mode of the
13571 inner type. The main use case is to feed the result to HONOR_NANS,
13572 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13575 element_mode (const_tree t
)
13579 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
13581 return TYPE_MODE (t
);
13584 /* Vector types need to re-check the target flags each time we report
13585 the machine mode. We need to do this because attribute target can
13586 change the result of vector_mode_supported_p and have_regs_of_mode
13587 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13588 change on a per-function basis. */
13589 /* ??? Possibly a better solution is to run through all the types
13590 referenced by a function and re-compute the TYPE_MODE once, rather
13591 than make the TYPE_MODE macro call a function. */
13594 vector_type_mode (const_tree t
)
13598 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
13600 mode
= t
->type_common
.mode
;
13601 if (VECTOR_MODE_P (mode
)
13602 && (!targetm
.vector_mode_supported_p (mode
)
13603 || !have_regs_of_mode
[mode
]))
13605 scalar_int_mode innermode
;
13607 /* For integers, try mapping it to a same-sized scalar mode. */
13608 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
13610 poly_int64 size
= (TYPE_VECTOR_SUBPARTS (t
)
13611 * GET_MODE_BITSIZE (innermode
));
13612 scalar_int_mode mode
;
13613 if (int_mode_for_size (size
, 0).exists (&mode
)
13614 && have_regs_of_mode
[mode
])
13624 /* Verify that basic properties of T match TV and thus T can be a variant of
13625 TV. TV should be the more specified variant (i.e. the main variant). */
13628 verify_type_variant (const_tree t
, tree tv
)
13630 /* Type variant can differ by:
13632 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13633 ENCODE_QUAL_ADDR_SPACE.
13634 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13635 in this case some values may not be set in the variant types
13636 (see TYPE_COMPLETE_P checks).
13637 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13638 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13639 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13640 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13641 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13642 this is necessary to make it possible to merge types form different TUs
13643 - arrays, pointers and references may have TREE_TYPE that is a variant
13644 of TREE_TYPE of their main variants.
13645 - aggregates may have new TYPE_FIELDS list that list variants of
13646 the main variant TYPE_FIELDS.
13647 - vector types may differ by TYPE_VECTOR_OPAQUE
13650 /* Convenience macro for matching individual fields. */
13651 #define verify_variant_match(flag) \
13653 if (flag (tv) != flag (t)) \
13655 error ("type variant differs by %s", #flag); \
13661 /* tree_base checks. */
13663 verify_variant_match (TREE_CODE
);
13664 /* FIXME: Ada builds non-artificial variants of artificial types. */
13665 if (TYPE_ARTIFICIAL (tv
) && 0)
13666 verify_variant_match (TYPE_ARTIFICIAL
);
13667 if (POINTER_TYPE_P (tv
))
13668 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
13669 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13670 verify_variant_match (TYPE_UNSIGNED
);
13671 verify_variant_match (TYPE_PACKED
);
13672 if (TREE_CODE (t
) == REFERENCE_TYPE
)
13673 verify_variant_match (TYPE_REF_IS_RVALUE
);
13674 if (AGGREGATE_TYPE_P (t
))
13675 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
13677 verify_variant_match (TYPE_SATURATING
);
13678 /* FIXME: This check trigger during libstdc++ build. */
13679 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
) && 0)
13680 verify_variant_match (TYPE_FINAL_P
);
13682 /* tree_type_common checks. */
13684 if (COMPLETE_TYPE_P (t
))
13686 verify_variant_match (TYPE_MODE
);
13687 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
13688 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
13689 verify_variant_match (TYPE_SIZE
);
13690 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
13691 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
13692 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
13694 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
13695 TYPE_SIZE_UNIT (tv
), 0));
13696 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13698 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13699 debug_tree (TYPE_SIZE_UNIT (tv
));
13700 error ("type%'s %<TYPE_SIZE_UNIT%>");
13701 debug_tree (TYPE_SIZE_UNIT (t
));
13705 verify_variant_match (TYPE_PRECISION
);
13706 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
13707 if (RECORD_OR_UNION_TYPE_P (t
))
13708 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13709 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13710 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13711 /* During LTO we merge variant lists from diferent translation units
13712 that may differ BY TYPE_CONTEXT that in turn may point
13713 to TRANSLATION_UNIT_DECL.
13714 Ada also builds variants of types with different TYPE_CONTEXT. */
13715 if ((!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
)) && 0)
13716 verify_variant_match (TYPE_CONTEXT
);
13717 if (TREE_CODE (t
) == ARRAY_TYPE
|| TREE_CODE (t
) == INTEGER_TYPE
)
13718 verify_variant_match (TYPE_STRING_FLAG
);
13719 if (TREE_CODE (t
) == RECORD_TYPE
|| TREE_CODE (t
) == UNION_TYPE
)
13720 verify_variant_match (TYPE_CXX_ODR_P
);
13721 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13723 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13728 /* tree_type_non_common checks. */
13730 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13731 and dangle the pointer from time to time. */
13732 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13733 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13734 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13736 error ("type variant has different %<TYPE_VFIELD%>");
13740 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13741 || TREE_CODE (t
) == INTEGER_TYPE
13742 || TREE_CODE (t
) == BOOLEAN_TYPE
13743 || TREE_CODE (t
) == REAL_TYPE
13744 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13746 verify_variant_match (TYPE_MAX_VALUE
);
13747 verify_variant_match (TYPE_MIN_VALUE
);
13749 if (TREE_CODE (t
) == METHOD_TYPE
)
13750 verify_variant_match (TYPE_METHOD_BASETYPE
);
13751 if (TREE_CODE (t
) == OFFSET_TYPE
)
13752 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13753 if (TREE_CODE (t
) == ARRAY_TYPE
)
13754 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13755 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13756 or even type's main variant. This is needed to make bootstrap pass
13757 and the bug seems new in GCC 5.
13758 C++ FE should be updated to make this consistent and we should check
13759 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13760 is a match with main variant.
13762 Also disable the check for Java for now because of parser hack that builds
13763 first an dummy BINFO and then sometimes replace it by real BINFO in some
13765 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13766 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13767 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13768 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13769 at LTO time only. */
13770 && (in_lto_p
&& odr_type_p (t
)))
13772 error ("type variant has different %<TYPE_BINFO%>");
13774 error ("type variant%'s %<TYPE_BINFO%>");
13775 debug_tree (TYPE_BINFO (tv
));
13776 error ("type%'s %<TYPE_BINFO%>");
13777 debug_tree (TYPE_BINFO (t
));
13781 /* Check various uses of TYPE_VALUES_RAW. */
13782 if (TREE_CODE (t
) == ENUMERAL_TYPE
13783 && TYPE_VALUES (t
))
13784 verify_variant_match (TYPE_VALUES
);
13785 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13786 verify_variant_match (TYPE_DOMAIN
);
13787 /* Permit incomplete variants of complete type. While FEs may complete
13788 all variants, this does not happen for C++ templates in all cases. */
13789 else if (RECORD_OR_UNION_TYPE_P (t
)
13790 && COMPLETE_TYPE_P (t
)
13791 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13795 /* Fortran builds qualified variants as new records with items of
13796 qualified type. Verify that they looks same. */
13797 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13799 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13800 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13801 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13802 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13803 /* FIXME: gfc_nonrestricted_type builds all types as variants
13804 with exception of pointer types. It deeply copies the type
13805 which means that we may end up with a variant type
13806 referring non-variant pointer. We may change it to
13807 produce types as variants, too, like
13808 objc_get_protocol_qualified_type does. */
13809 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13810 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13811 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13815 error ("type variant has different %<TYPE_FIELDS%>");
13817 error ("first mismatch is field");
13819 error ("and field");
13824 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
13825 verify_variant_match (TYPE_ARG_TYPES
);
13826 /* For C++ the qualified variant of array type is really an array type
13827 of qualified TREE_TYPE.
13828 objc builds variants of pointer where pointer to type is a variant, too
13829 in objc_get_protocol_qualified_type. */
13830 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13831 && ((TREE_CODE (t
) != ARRAY_TYPE
13832 && !POINTER_TYPE_P (t
))
13833 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13834 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13836 error ("type variant has different %<TREE_TYPE%>");
13838 error ("type variant%'s %<TREE_TYPE%>");
13839 debug_tree (TREE_TYPE (tv
));
13840 error ("type%'s %<TREE_TYPE%>");
13841 debug_tree (TREE_TYPE (t
));
13844 if (type_with_alias_set_p (t
)
13845 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13847 error ("type is not compatible with its variant");
13849 error ("type variant%'s %<TREE_TYPE%>");
13850 debug_tree (TREE_TYPE (tv
));
13851 error ("type%'s %<TREE_TYPE%>");
13852 debug_tree (TREE_TYPE (t
));
13856 #undef verify_variant_match
13860 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13861 the middle-end types_compatible_p function. It needs to avoid
13862 claiming types are different for types that should be treated
13863 the same with respect to TBAA. Canonical types are also used
13864 for IL consistency checks via the useless_type_conversion_p
13865 predicate which does not handle all type kinds itself but falls
13866 back to pointer-comparison of TYPE_CANONICAL for aggregates
13869 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13870 type calculation because we need to allow inter-operability between signed
13871 and unsigned variants. */
13874 type_with_interoperable_signedness (const_tree type
)
13876 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13877 signed char and unsigned char. Similarly fortran FE builds
13878 C_SIZE_T as signed type, while C defines it unsigned. */
13880 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13882 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13883 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13886 /* Return true iff T1 and T2 are structurally identical for what
13888 This function is used both by lto.c canonical type merging and by the
13889 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13890 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13891 only for LTO because only in these cases TYPE_CANONICAL equivalence
13892 correspond to one defined by gimple_canonical_types_compatible_p. */
13895 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13896 bool trust_type_canonical
)
13898 /* Type variants should be same as the main variant. When not doing sanity
13899 checking to verify this fact, go to main variants and save some work. */
13900 if (trust_type_canonical
)
13902 t1
= TYPE_MAIN_VARIANT (t1
);
13903 t2
= TYPE_MAIN_VARIANT (t2
);
13906 /* Check first for the obvious case of pointer identity. */
13910 /* Check that we have two types to compare. */
13911 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13914 /* We consider complete types always compatible with incomplete type.
13915 This does not make sense for canonical type calculation and thus we
13916 need to ensure that we are never called on it.
13918 FIXME: For more correctness the function probably should have three modes
13919 1) mode assuming that types are complete mathcing their structure
13920 2) mode allowing incomplete types but producing equivalence classes
13921 and thus ignoring all info from complete types
13922 3) mode allowing incomplete types to match complete but checking
13923 compatibility between complete types.
13925 1 and 2 can be used for canonical type calculation. 3 is the real
13926 definition of type compatibility that can be used i.e. for warnings during
13927 declaration merging. */
13929 gcc_assert (!trust_type_canonical
13930 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13932 /* If the types have been previously registered and found equal
13935 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13936 && trust_type_canonical
)
13938 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13939 they are always NULL, but they are set to non-NULL for types
13940 constructed by build_pointer_type and variants. In this case the
13941 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13942 all pointers are considered equal. Be sure to not return false
13944 gcc_checking_assert (canonical_type_used_p (t1
)
13945 && canonical_type_used_p (t2
));
13946 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13949 /* For types where we do ODR based TBAA the canonical type is always
13950 set correctly, so we know that types are different if their
13951 canonical types does not match. */
13952 if (trust_type_canonical
13953 && (odr_type_p (t1
) && odr_based_tbaa_p (t1
))
13954 != (odr_type_p (t2
) && odr_based_tbaa_p (t2
)))
13957 /* Can't be the same type if the types don't have the same code. */
13958 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13959 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13962 /* Qualifiers do not matter for canonical type comparison purposes. */
13964 /* Void types and nullptr types are always the same. */
13965 if (TREE_CODE (t1
) == VOID_TYPE
13966 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13969 /* Can't be the same type if they have different mode. */
13970 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13973 /* Non-aggregate types can be handled cheaply. */
13974 if (INTEGRAL_TYPE_P (t1
)
13975 || SCALAR_FLOAT_TYPE_P (t1
)
13976 || FIXED_POINT_TYPE_P (t1
)
13977 || TREE_CODE (t1
) == VECTOR_TYPE
13978 || TREE_CODE (t1
) == COMPLEX_TYPE
13979 || TREE_CODE (t1
) == OFFSET_TYPE
13980 || POINTER_TYPE_P (t1
))
13982 /* Can't be the same type if they have different recision. */
13983 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13986 /* In some cases the signed and unsigned types are required to be
13988 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13989 && !type_with_interoperable_signedness (t1
))
13992 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13993 interoperable with "signed char". Unless all frontends are revisited
13994 to agree on these types, we must ignore the flag completely. */
13996 /* Fortran standard define C_PTR type that is compatible with every
13997 C pointer. For this reason we need to glob all pointers into one.
13998 Still pointers in different address spaces are not compatible. */
13999 if (POINTER_TYPE_P (t1
))
14001 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
14002 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
14006 /* Tail-recurse to components. */
14007 if (TREE_CODE (t1
) == VECTOR_TYPE
14008 || TREE_CODE (t1
) == COMPLEX_TYPE
)
14009 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
14011 trust_type_canonical
);
14016 /* Do type-specific comparisons. */
14017 switch (TREE_CODE (t1
))
14020 /* Array types are the same if the element types are the same and
14021 the number of elements are the same. */
14022 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
14023 trust_type_canonical
)
14024 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
14025 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
14026 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
14030 tree i1
= TYPE_DOMAIN (t1
);
14031 tree i2
= TYPE_DOMAIN (t2
);
14033 /* For an incomplete external array, the type domain can be
14034 NULL_TREE. Check this condition also. */
14035 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
14037 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
14041 tree min1
= TYPE_MIN_VALUE (i1
);
14042 tree min2
= TYPE_MIN_VALUE (i2
);
14043 tree max1
= TYPE_MAX_VALUE (i1
);
14044 tree max2
= TYPE_MAX_VALUE (i2
);
14046 /* The minimum/maximum values have to be the same. */
14049 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
14050 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
14051 || operand_equal_p (min1
, min2
, 0))))
14054 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
14055 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
14056 || operand_equal_p (max1
, max2
, 0)))))
14064 case FUNCTION_TYPE
:
14065 /* Function types are the same if the return type and arguments types
14067 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
14068 trust_type_canonical
))
14071 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
14075 tree parms1
, parms2
;
14077 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
14079 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
14081 if (!gimple_canonical_types_compatible_p
14082 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
14083 trust_type_canonical
))
14087 if (parms1
|| parms2
)
14095 case QUAL_UNION_TYPE
:
14099 /* Don't try to compare variants of an incomplete type, before
14100 TYPE_FIELDS has been copied around. */
14101 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
14105 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
14108 /* For aggregate types, all the fields must be the same. */
14109 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
14111 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
14113 /* Skip non-fields and zero-sized fields. */
14114 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
14116 && integer_zerop (DECL_SIZE (f1
)))))
14117 f1
= TREE_CHAIN (f1
);
14118 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
14120 && integer_zerop (DECL_SIZE (f2
)))))
14121 f2
= TREE_CHAIN (f2
);
14124 /* The fields must have the same name, offset and type. */
14125 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
14126 || !gimple_compare_field_offset (f1
, f2
)
14127 || !gimple_canonical_types_compatible_p
14128 (TREE_TYPE (f1
), TREE_TYPE (f2
),
14129 trust_type_canonical
))
14133 /* If one aggregate has more fields than the other, they
14134 are not the same. */
14142 /* Consider all types with language specific trees in them mutually
14143 compatible. This is executed only from verify_type and false
14144 positives can be tolerated. */
14145 gcc_assert (!in_lto_p
);
14150 /* Verify type T. */
14153 verify_type (const_tree t
)
14155 bool error_found
= false;
14156 tree mv
= TYPE_MAIN_VARIANT (t
);
14159 error ("main variant is not defined");
14160 error_found
= true;
14162 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
14164 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14166 error_found
= true;
14168 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
14169 error_found
= true;
14171 tree ct
= TYPE_CANONICAL (t
);
14174 else if (TYPE_CANONICAL (t
) != ct
)
14176 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14178 error_found
= true;
14180 /* Method and function types cannot be used to address memory and thus
14181 TYPE_CANONICAL really matters only for determining useless conversions.
14183 FIXME: C++ FE produce declarations of builtin functions that are not
14184 compatible with main variants. */
14185 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
14188 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14189 with variably sized arrays because their sizes possibly
14190 gimplified to different variables. */
14191 && !variably_modified_type_p (ct
, NULL
)
14192 && !gimple_canonical_types_compatible_p (t
, ct
, false)
14193 && COMPLETE_TYPE_P (t
))
14195 error ("%<TYPE_CANONICAL%> is not compatible");
14197 error_found
= true;
14200 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
14201 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
14203 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14205 error_found
= true;
14207 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
14209 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14211 debug_tree (TYPE_MAIN_VARIANT (ct
));
14212 error_found
= true;
14216 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14217 if (RECORD_OR_UNION_TYPE_P (t
))
14219 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14220 and danagle the pointer from time to time. */
14221 if (TYPE_VFIELD (t
)
14222 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
14223 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
14225 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14226 debug_tree (TYPE_VFIELD (t
));
14227 error_found
= true;
14230 else if (TREE_CODE (t
) == POINTER_TYPE
)
14232 if (TYPE_NEXT_PTR_TO (t
)
14233 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
14235 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14236 debug_tree (TYPE_NEXT_PTR_TO (t
));
14237 error_found
= true;
14240 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
14242 if (TYPE_NEXT_REF_TO (t
)
14243 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
14245 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14246 debug_tree (TYPE_NEXT_REF_TO (t
));
14247 error_found
= true;
14250 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
14251 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
14253 /* FIXME: The following check should pass:
14254 useless_type_conversion_p (const_cast <tree> (t),
14255 TREE_TYPE (TYPE_MIN_VALUE (t))
14256 but does not for C sizetypes in LTO. */
14259 /* Check various uses of TYPE_MAXVAL_RAW. */
14260 if (RECORD_OR_UNION_TYPE_P (t
))
14262 if (!TYPE_BINFO (t
))
14264 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
14266 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14267 debug_tree (TYPE_BINFO (t
));
14268 error_found
= true;
14270 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
14272 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14273 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
14274 error_found
= true;
14277 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
14279 if (TYPE_METHOD_BASETYPE (t
)
14280 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
14281 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
14283 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14284 debug_tree (TYPE_METHOD_BASETYPE (t
));
14285 error_found
= true;
14288 else if (TREE_CODE (t
) == OFFSET_TYPE
)
14290 if (TYPE_OFFSET_BASETYPE (t
)
14291 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
14292 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
14294 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14295 debug_tree (TYPE_OFFSET_BASETYPE (t
));
14296 error_found
= true;
14299 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
14300 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
14302 /* FIXME: The following check should pass:
14303 useless_type_conversion_p (const_cast <tree> (t),
14304 TREE_TYPE (TYPE_MAX_VALUE (t))
14305 but does not for C sizetypes in LTO. */
14307 else if (TREE_CODE (t
) == ARRAY_TYPE
)
14309 if (TYPE_ARRAY_MAX_SIZE (t
)
14310 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
14312 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14313 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
14314 error_found
= true;
14317 else if (TYPE_MAX_VALUE_RAW (t
))
14319 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14320 debug_tree (TYPE_MAX_VALUE_RAW (t
));
14321 error_found
= true;
14324 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
14326 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14327 debug_tree (TYPE_LANG_SLOT_1 (t
));
14328 error_found
= true;
14331 /* Check various uses of TYPE_VALUES_RAW. */
14332 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
14333 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
14335 tree value
= TREE_VALUE (l
);
14336 tree name
= TREE_PURPOSE (l
);
14338 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14339 CONST_DECL of ENUMERAL TYPE. */
14340 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
14342 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14343 debug_tree (value
);
14345 error_found
= true;
14347 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
14348 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
14350 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14352 debug_tree (value
);
14354 error_found
= true;
14356 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
14358 error ("enum value name is not %<IDENTIFIER_NODE%>");
14359 debug_tree (value
);
14361 error_found
= true;
14364 else if (TREE_CODE (t
) == ARRAY_TYPE
)
14366 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
14368 error ("array %<TYPE_DOMAIN%> is not integer type");
14369 debug_tree (TYPE_DOMAIN (t
));
14370 error_found
= true;
14373 else if (RECORD_OR_UNION_TYPE_P (t
))
14375 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
14377 error ("%<TYPE_FIELDS%> defined in incomplete type");
14378 error_found
= true;
14380 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
14382 /* TODO: verify properties of decls. */
14383 if (TREE_CODE (fld
) == FIELD_DECL
)
14385 else if (TREE_CODE (fld
) == TYPE_DECL
)
14387 else if (TREE_CODE (fld
) == CONST_DECL
)
14389 else if (VAR_P (fld
))
14391 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
14393 else if (TREE_CODE (fld
) == USING_DECL
)
14395 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
14399 error ("wrong tree in %<TYPE_FIELDS%> list");
14401 error_found
= true;
14405 else if (TREE_CODE (t
) == INTEGER_TYPE
14406 || TREE_CODE (t
) == BOOLEAN_TYPE
14407 || TREE_CODE (t
) == OFFSET_TYPE
14408 || TREE_CODE (t
) == REFERENCE_TYPE
14409 || TREE_CODE (t
) == NULLPTR_TYPE
14410 || TREE_CODE (t
) == POINTER_TYPE
)
14412 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
14414 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14416 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
14417 error_found
= true;
14419 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
14421 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14422 debug_tree (TYPE_CACHED_VALUES (t
));
14423 error_found
= true;
14425 /* Verify just enough of cache to ensure that no one copied it to new type.
14426 All copying should go by copy_node that should clear it. */
14427 else if (TYPE_CACHED_VALUES_P (t
))
14430 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
14431 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
14432 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
14434 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14435 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
14436 error_found
= true;
14441 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
14442 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
14444 /* C++ FE uses TREE_PURPOSE to store initial values. */
14445 if (TREE_PURPOSE (l
) && in_lto_p
)
14447 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14449 error_found
= true;
14451 if (!TYPE_P (TREE_VALUE (l
)))
14453 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14455 error_found
= true;
14458 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
14460 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14461 debug_tree (TYPE_VALUES_RAW (t
));
14462 error_found
= true;
14464 if (TREE_CODE (t
) != INTEGER_TYPE
14465 && TREE_CODE (t
) != BOOLEAN_TYPE
14466 && TREE_CODE (t
) != OFFSET_TYPE
14467 && TREE_CODE (t
) != REFERENCE_TYPE
14468 && TREE_CODE (t
) != NULLPTR_TYPE
14469 && TREE_CODE (t
) != POINTER_TYPE
14470 && TYPE_CACHED_VALUES_P (t
))
14472 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14473 error_found
= true;
14476 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14477 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14479 if (TREE_CODE (t
) == METHOD_TYPE
14480 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
14482 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14483 error_found
= true;
14488 debug_tree (const_cast <tree
> (t
));
14489 internal_error ("%qs failed", __func__
);
14494 /* Return 1 if ARG interpreted as signed in its precision is known to be
14495 always positive or 2 if ARG is known to be always negative, or 3 if
14496 ARG may be positive or negative. */
14499 get_range_pos_neg (tree arg
)
14501 if (arg
== error_mark_node
)
14504 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14506 if (TREE_CODE (arg
) == INTEGER_CST
)
14508 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
14514 while (CONVERT_EXPR_P (arg
)
14515 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
14516 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
14518 arg
= TREE_OPERAND (arg
, 0);
14519 /* Narrower value zero extended into wider type
14520 will always result in positive values. */
14521 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
14522 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
14524 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14529 if (TREE_CODE (arg
) != SSA_NAME
)
14531 wide_int arg_min
, arg_max
;
14532 while (get_range_info (arg
, &arg_min
, &arg_max
) != VR_RANGE
)
14534 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
14535 if (is_gimple_assign (g
)
14536 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
14538 tree t
= gimple_assign_rhs1 (g
);
14539 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
14540 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
14542 if (TYPE_UNSIGNED (TREE_TYPE (t
))
14543 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
14545 prec
= TYPE_PRECISION (TREE_TYPE (t
));
14554 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
14556 /* For unsigned values, the "positive" range comes
14557 below the "negative" range. */
14558 if (!wi::neg_p (wi::sext (arg_max
, prec
), SIGNED
))
14560 if (wi::neg_p (wi::sext (arg_min
, prec
), SIGNED
))
14565 if (!wi::neg_p (wi::sext (arg_min
, prec
), SIGNED
))
14567 if (wi::neg_p (wi::sext (arg_max
, prec
), SIGNED
))
14576 /* Return true if ARG is marked with the nonnull attribute in the
14577 current function signature. */
14580 nonnull_arg_p (const_tree arg
)
14582 tree t
, attrs
, fntype
;
14583 unsigned HOST_WIDE_INT arg_num
;
14585 gcc_assert (TREE_CODE (arg
) == PARM_DECL
14586 && (POINTER_TYPE_P (TREE_TYPE (arg
))
14587 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
14589 /* The static chain decl is always non null. */
14590 if (arg
== cfun
->static_chain_decl
)
14593 /* THIS argument of method is always non-NULL. */
14594 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
14595 && arg
== DECL_ARGUMENTS (cfun
->decl
)
14596 && flag_delete_null_pointer_checks
)
14599 /* Values passed by reference are always non-NULL. */
14600 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
14601 && flag_delete_null_pointer_checks
)
14604 fntype
= TREE_TYPE (cfun
->decl
);
14605 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
14607 attrs
= lookup_attribute ("nonnull", attrs
);
14609 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14610 if (attrs
== NULL_TREE
)
14613 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14614 if (TREE_VALUE (attrs
) == NULL_TREE
)
14617 /* Get the position number for ARG in the function signature. */
14618 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
14620 t
= DECL_CHAIN (t
), arg_num
++)
14626 gcc_assert (t
== arg
);
14628 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14629 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
14631 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
14639 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14643 set_block (location_t loc
, tree block
)
14645 location_t pure_loc
= get_pure_location (loc
);
14646 source_range src_range
= get_range_from_loc (line_table
, loc
);
14647 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
);
14651 set_source_range (tree expr
, location_t start
, location_t finish
)
14653 source_range src_range
;
14654 src_range
.m_start
= start
;
14655 src_range
.m_finish
= finish
;
14656 return set_source_range (expr
, src_range
);
14660 set_source_range (tree expr
, source_range src_range
)
14662 if (!EXPR_P (expr
))
14663 return UNKNOWN_LOCATION
;
14665 location_t pure_loc
= get_pure_location (EXPR_LOCATION (expr
));
14666 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
14670 SET_EXPR_LOCATION (expr
, adhoc
);
14674 /* Return EXPR, potentially wrapped with a node expression LOC,
14675 if !CAN_HAVE_LOCATION_P (expr).
14677 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14678 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14680 Wrapper nodes can be identified using location_wrapper_p. */
14683 maybe_wrap_with_location (tree expr
, location_t loc
)
14687 if (loc
== UNKNOWN_LOCATION
)
14689 if (CAN_HAVE_LOCATION_P (expr
))
14691 /* We should only be adding wrappers for constants and for decls,
14692 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14693 gcc_assert (CONSTANT_CLASS_P (expr
)
14695 || EXCEPTIONAL_CLASS_P (expr
));
14697 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14698 any impact of the wrapper nodes. */
14699 if (EXCEPTIONAL_CLASS_P (expr
))
14702 /* If any auto_suppress_location_wrappers are active, don't create
14704 if (suppress_location_wrappers
> 0)
14708 = (((CONSTANT_CLASS_P (expr
) && TREE_CODE (expr
) != STRING_CST
)
14709 || (TREE_CODE (expr
) == CONST_DECL
&& !TREE_STATIC (expr
)))
14710 ? NON_LVALUE_EXPR
: VIEW_CONVERT_EXPR
);
14711 tree wrapper
= build1_loc (loc
, code
, TREE_TYPE (expr
), expr
);
14712 /* Mark this node as being a wrapper. */
14713 EXPR_LOCATION_WRAPPER_P (wrapper
) = 1;
14717 int suppress_location_wrappers
;
14719 /* Return the name of combined function FN, for debugging purposes. */
14722 combined_fn_name (combined_fn fn
)
14724 if (builtin_fn_p (fn
))
14726 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14727 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14730 return internal_fn_name (as_internal_fn (fn
));
14733 /* Return a bitmap with a bit set corresponding to each argument in
14734 a function call type FNTYPE declared with attribute nonnull,
14735 or null if none of the function's argument are nonnull. The caller
14736 must free the bitmap. */
14739 get_nonnull_args (const_tree fntype
)
14741 if (fntype
== NULL_TREE
)
14744 tree attrs
= TYPE_ATTRIBUTES (fntype
);
14748 bitmap argmap
= NULL
;
14750 /* A function declaration can specify multiple attribute nonnull,
14751 each with zero or more arguments. The loop below creates a bitmap
14752 representing a union of all the arguments. An empty (but non-null)
14753 bitmap means that all arguments have been declaraed nonnull. */
14754 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
14756 attrs
= lookup_attribute ("nonnull", attrs
);
14761 argmap
= BITMAP_ALLOC (NULL
);
14763 if (!TREE_VALUE (attrs
))
14765 /* Clear the bitmap in case a previous attribute nonnull
14766 set it and this one overrides it for all arguments. */
14767 bitmap_clear (argmap
);
14771 /* Iterate over the indices of the format arguments declared nonnull
14772 and set a bit for each. */
14773 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
14775 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
14776 bitmap_set_bit (argmap
, val
);
14783 /* Returns true if TYPE is a type where it and all of its subobjects
14784 (recursively) are of structure, union, or array type. */
14787 default_is_empty_type (tree type
)
14789 if (RECORD_OR_UNION_TYPE_P (type
))
14791 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14792 if (TREE_CODE (field
) == FIELD_DECL
14793 && !DECL_PADDING_P (field
)
14794 && !default_is_empty_type (TREE_TYPE (field
)))
14798 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14799 return (integer_minus_onep (array_type_nelts (type
))
14800 || TYPE_DOMAIN (type
) == NULL_TREE
14801 || default_is_empty_type (TREE_TYPE (type
)));
14805 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14806 that shouldn't be passed via stack. */
14809 default_is_empty_record (const_tree type
)
14811 if (!abi_version_at_least (12))
14814 if (type
== error_mark_node
)
14817 if (TREE_ADDRESSABLE (type
))
14820 return default_is_empty_type (TYPE_MAIN_VARIANT (type
));
14823 /* Like int_size_in_bytes, but handle empty records specially. */
14826 arg_int_size_in_bytes (const_tree type
)
14828 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
14831 /* Like size_in_bytes, but handle empty records specially. */
14834 arg_size_in_bytes (const_tree type
)
14836 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
14839 /* Return true if an expression with CODE has to have the same result type as
14840 its first operand. */
14843 expr_type_first_operand_type_p (tree_code code
)
14856 case TRUNC_DIV_EXPR
:
14857 case CEIL_DIV_EXPR
:
14858 case FLOOR_DIV_EXPR
:
14859 case ROUND_DIV_EXPR
:
14860 case TRUNC_MOD_EXPR
:
14861 case CEIL_MOD_EXPR
:
14862 case FLOOR_MOD_EXPR
:
14863 case ROUND_MOD_EXPR
:
14865 case EXACT_DIV_EXPR
:
14883 /* Return a typenode for the "standard" C type with a given name. */
14885 get_typenode_from_name (const char *name
)
14887 if (name
== NULL
|| *name
== '\0')
14890 if (strcmp (name
, "char") == 0)
14891 return char_type_node
;
14892 if (strcmp (name
, "unsigned char") == 0)
14893 return unsigned_char_type_node
;
14894 if (strcmp (name
, "signed char") == 0)
14895 return signed_char_type_node
;
14897 if (strcmp (name
, "short int") == 0)
14898 return short_integer_type_node
;
14899 if (strcmp (name
, "short unsigned int") == 0)
14900 return short_unsigned_type_node
;
14902 if (strcmp (name
, "int") == 0)
14903 return integer_type_node
;
14904 if (strcmp (name
, "unsigned int") == 0)
14905 return unsigned_type_node
;
14907 if (strcmp (name
, "long int") == 0)
14908 return long_integer_type_node
;
14909 if (strcmp (name
, "long unsigned int") == 0)
14910 return long_unsigned_type_node
;
14912 if (strcmp (name
, "long long int") == 0)
14913 return long_long_integer_type_node
;
14914 if (strcmp (name
, "long long unsigned int") == 0)
14915 return long_long_unsigned_type_node
;
14917 gcc_unreachable ();
14920 /* List of pointer types used to declare builtins before we have seen their
14923 Keep the size up to date in tree.h ! */
14924 const builtin_structptr_type builtin_structptr_types
[6] =
14926 { fileptr_type_node
, ptr_type_node
, "FILE" },
14927 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
14928 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
14929 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
14930 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
14931 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
14934 /* Return the maximum object size. */
14937 max_object_size (void)
14939 /* To do: Make this a configurable parameter. */
14940 return TYPE_MAX_VALUE (ptrdiff_type_node
);
14945 namespace selftest
{
14947 /* Selftests for tree. */
14949 /* Verify that integer constants are sane. */
14952 test_integer_constants ()
14954 ASSERT_TRUE (integer_type_node
!= NULL
);
14955 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
14957 tree type
= integer_type_node
;
14959 tree zero
= build_zero_cst (type
);
14960 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
14961 ASSERT_EQ (type
, TREE_TYPE (zero
));
14963 tree one
= build_int_cst (type
, 1);
14964 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
14965 ASSERT_EQ (type
, TREE_TYPE (zero
));
14968 /* Verify identifiers. */
14971 test_identifiers ()
14973 tree identifier
= get_identifier ("foo");
14974 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
14975 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
14978 /* Verify LABEL_DECL. */
14983 tree identifier
= get_identifier ("err");
14984 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
14985 identifier
, void_type_node
);
14986 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
14987 ASSERT_FALSE (FORCED_LABEL (label_decl
));
14990 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14991 are given by VALS. */
14994 build_vector (tree type
, vec
<tree
> vals MEM_STAT_DECL
)
14996 gcc_assert (known_eq (vals
.length (), TYPE_VECTOR_SUBPARTS (type
)));
14997 tree_vector_builder
builder (type
, vals
.length (), 1);
14998 builder
.splice (vals
);
14999 return builder
.build ();
15002 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15005 check_vector_cst (vec
<tree
> expected
, tree actual
)
15007 ASSERT_KNOWN_EQ (expected
.length (),
15008 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
15009 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
15010 ASSERT_EQ (wi::to_wide (expected
[i
]),
15011 wi::to_wide (vector_cst_elt (actual
, i
)));
15014 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15015 and that its elements match EXPECTED. */
15018 check_vector_cst_duplicate (vec
<tree
> expected
, tree actual
,
15019 unsigned int npatterns
)
15021 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15022 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15023 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
15024 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
15025 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
15026 check_vector_cst (expected
, actual
);
15029 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15030 and NPATTERNS background elements, and that its elements match
15034 check_vector_cst_fill (vec
<tree
> expected
, tree actual
,
15035 unsigned int npatterns
)
15037 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15038 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15039 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
15040 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15041 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
15042 check_vector_cst (expected
, actual
);
15045 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15046 and that its elements match EXPECTED. */
15049 check_vector_cst_stepped (vec
<tree
> expected
, tree actual
,
15050 unsigned int npatterns
)
15052 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15053 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15054 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
15055 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15056 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
15057 check_vector_cst (expected
, actual
);
15060 /* Test the creation of VECTOR_CSTs. */
15063 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO
)
15065 auto_vec
<tree
, 8> elements (8);
15066 elements
.quick_grow (8);
15067 tree element_type
= build_nonstandard_integer_type (16, true);
15068 tree vector_type
= build_vector_type (element_type
, 8);
15070 /* Test a simple linear series with a base of 0 and a step of 1:
15071 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15072 for (unsigned int i
= 0; i
< 8; ++i
)
15073 elements
[i
] = build_int_cst (element_type
, i
);
15074 tree vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15075 check_vector_cst_stepped (elements
, vector
, 1);
15077 /* Try the same with the first element replaced by 100:
15078 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15079 elements
[0] = build_int_cst (element_type
, 100);
15080 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15081 check_vector_cst_stepped (elements
, vector
, 1);
15083 /* Try a series that wraps around.
15084 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15085 for (unsigned int i
= 1; i
< 8; ++i
)
15086 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
15087 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15088 check_vector_cst_stepped (elements
, vector
, 1);
15090 /* Try a downward series:
15091 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15092 for (unsigned int i
= 1; i
< 8; ++i
)
15093 elements
[i
] = build_int_cst (element_type
, 80 - i
);
15094 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15095 check_vector_cst_stepped (elements
, vector
, 1);
15097 /* Try two interleaved series with different bases and steps:
15098 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15099 elements
[1] = build_int_cst (element_type
, 53);
15100 for (unsigned int i
= 2; i
< 8; i
+= 2)
15102 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
15103 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
15105 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15106 check_vector_cst_stepped (elements
, vector
, 2);
15108 /* Try a duplicated value:
15109 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15110 for (unsigned int i
= 1; i
< 8; ++i
)
15111 elements
[i
] = elements
[0];
15112 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15113 check_vector_cst_duplicate (elements
, vector
, 1);
15115 /* Try an interleaved duplicated value:
15116 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15117 elements
[1] = build_int_cst (element_type
, 55);
15118 for (unsigned int i
= 2; i
< 8; ++i
)
15119 elements
[i
] = elements
[i
- 2];
15120 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15121 check_vector_cst_duplicate (elements
, vector
, 2);
15123 /* Try a duplicated value with 2 exceptions
15124 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15125 elements
[0] = build_int_cst (element_type
, 41);
15126 elements
[1] = build_int_cst (element_type
, 97);
15127 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15128 check_vector_cst_fill (elements
, vector
, 2);
15130 /* Try with and without a step
15131 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15132 for (unsigned int i
= 3; i
< 8; i
+= 2)
15133 elements
[i
] = build_int_cst (element_type
, i
* 7);
15134 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15135 check_vector_cst_stepped (elements
, vector
, 2);
15137 /* Try a fully-general constant:
15138 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15139 elements
[5] = build_int_cst (element_type
, 9990);
15140 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15141 check_vector_cst_fill (elements
, vector
, 4);
15144 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15145 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15146 modifying its argument in-place. */
15149 check_strip_nops (tree node
, tree expected
)
15152 ASSERT_EQ (expected
, node
);
15155 /* Verify location wrappers. */
15158 test_location_wrappers ()
15160 location_t loc
= BUILTINS_LOCATION
;
15162 ASSERT_EQ (NULL_TREE
, maybe_wrap_with_location (NULL_TREE
, loc
));
15164 /* Wrapping a constant. */
15165 tree int_cst
= build_int_cst (integer_type_node
, 42);
15166 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst
));
15167 ASSERT_FALSE (location_wrapper_p (int_cst
));
15169 tree wrapped_int_cst
= maybe_wrap_with_location (int_cst
, loc
);
15170 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst
));
15171 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_cst
));
15172 ASSERT_EQ (int_cst
, tree_strip_any_location_wrapper (wrapped_int_cst
));
15174 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15175 ASSERT_EQ (int_cst
, maybe_wrap_with_location (int_cst
, UNKNOWN_LOCATION
));
15177 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15178 tree cast
= build1 (NOP_EXPR
, char_type_node
, int_cst
);
15179 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast
));
15180 ASSERT_EQ (cast
, maybe_wrap_with_location (cast
, loc
));
15182 /* Wrapping a STRING_CST. */
15183 tree string_cst
= build_string (4, "foo");
15184 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst
));
15185 ASSERT_FALSE (location_wrapper_p (string_cst
));
15187 tree wrapped_string_cst
= maybe_wrap_with_location (string_cst
, loc
);
15188 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst
));
15189 ASSERT_EQ (VIEW_CONVERT_EXPR
, TREE_CODE (wrapped_string_cst
));
15190 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_string_cst
));
15191 ASSERT_EQ (string_cst
, tree_strip_any_location_wrapper (wrapped_string_cst
));
15194 /* Wrapping a variable. */
15195 tree int_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
15196 get_identifier ("some_int_var"),
15197 integer_type_node
);
15198 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var
));
15199 ASSERT_FALSE (location_wrapper_p (int_var
));
15201 tree wrapped_int_var
= maybe_wrap_with_location (int_var
, loc
);
15202 ASSERT_TRUE (location_wrapper_p (wrapped_int_var
));
15203 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_var
));
15204 ASSERT_EQ (int_var
, tree_strip_any_location_wrapper (wrapped_int_var
));
15206 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15208 tree r_cast
= build1 (NON_LVALUE_EXPR
, integer_type_node
, int_var
);
15209 ASSERT_FALSE (location_wrapper_p (r_cast
));
15210 ASSERT_EQ (r_cast
, tree_strip_any_location_wrapper (r_cast
));
15212 /* Verify that STRIP_NOPS removes wrappers. */
15213 check_strip_nops (wrapped_int_cst
, int_cst
);
15214 check_strip_nops (wrapped_string_cst
, string_cst
);
15215 check_strip_nops (wrapped_int_var
, int_var
);
15218 /* Test various tree predicates. Verify that location wrappers don't
15219 affect the results. */
15224 /* Build various constants and wrappers around them. */
15226 location_t loc
= BUILTINS_LOCATION
;
15228 tree i_0
= build_int_cst (integer_type_node
, 0);
15229 tree wr_i_0
= maybe_wrap_with_location (i_0
, loc
);
15231 tree i_1
= build_int_cst (integer_type_node
, 1);
15232 tree wr_i_1
= maybe_wrap_with_location (i_1
, loc
);
15234 tree i_m1
= build_int_cst (integer_type_node
, -1);
15235 tree wr_i_m1
= maybe_wrap_with_location (i_m1
, loc
);
15237 tree f_0
= build_real_from_int_cst (float_type_node
, i_0
);
15238 tree wr_f_0
= maybe_wrap_with_location (f_0
, loc
);
15239 tree f_1
= build_real_from_int_cst (float_type_node
, i_1
);
15240 tree wr_f_1
= maybe_wrap_with_location (f_1
, loc
);
15241 tree f_m1
= build_real_from_int_cst (float_type_node
, i_m1
);
15242 tree wr_f_m1
= maybe_wrap_with_location (f_m1
, loc
);
15244 tree c_i_0
= build_complex (NULL_TREE
, i_0
, i_0
);
15245 tree c_i_1
= build_complex (NULL_TREE
, i_1
, i_0
);
15246 tree c_i_m1
= build_complex (NULL_TREE
, i_m1
, i_0
);
15248 tree c_f_0
= build_complex (NULL_TREE
, f_0
, f_0
);
15249 tree c_f_1
= build_complex (NULL_TREE
, f_1
, f_0
);
15250 tree c_f_m1
= build_complex (NULL_TREE
, f_m1
, f_0
);
15252 /* TODO: vector constants. */
15254 /* Test integer_onep. */
15255 ASSERT_FALSE (integer_onep (i_0
));
15256 ASSERT_FALSE (integer_onep (wr_i_0
));
15257 ASSERT_TRUE (integer_onep (i_1
));
15258 ASSERT_TRUE (integer_onep (wr_i_1
));
15259 ASSERT_FALSE (integer_onep (i_m1
));
15260 ASSERT_FALSE (integer_onep (wr_i_m1
));
15261 ASSERT_FALSE (integer_onep (f_0
));
15262 ASSERT_FALSE (integer_onep (wr_f_0
));
15263 ASSERT_FALSE (integer_onep (f_1
));
15264 ASSERT_FALSE (integer_onep (wr_f_1
));
15265 ASSERT_FALSE (integer_onep (f_m1
));
15266 ASSERT_FALSE (integer_onep (wr_f_m1
));
15267 ASSERT_FALSE (integer_onep (c_i_0
));
15268 ASSERT_TRUE (integer_onep (c_i_1
));
15269 ASSERT_FALSE (integer_onep (c_i_m1
));
15270 ASSERT_FALSE (integer_onep (c_f_0
));
15271 ASSERT_FALSE (integer_onep (c_f_1
));
15272 ASSERT_FALSE (integer_onep (c_f_m1
));
15274 /* Test integer_zerop. */
15275 ASSERT_TRUE (integer_zerop (i_0
));
15276 ASSERT_TRUE (integer_zerop (wr_i_0
));
15277 ASSERT_FALSE (integer_zerop (i_1
));
15278 ASSERT_FALSE (integer_zerop (wr_i_1
));
15279 ASSERT_FALSE (integer_zerop (i_m1
));
15280 ASSERT_FALSE (integer_zerop (wr_i_m1
));
15281 ASSERT_FALSE (integer_zerop (f_0
));
15282 ASSERT_FALSE (integer_zerop (wr_f_0
));
15283 ASSERT_FALSE (integer_zerop (f_1
));
15284 ASSERT_FALSE (integer_zerop (wr_f_1
));
15285 ASSERT_FALSE (integer_zerop (f_m1
));
15286 ASSERT_FALSE (integer_zerop (wr_f_m1
));
15287 ASSERT_TRUE (integer_zerop (c_i_0
));
15288 ASSERT_FALSE (integer_zerop (c_i_1
));
15289 ASSERT_FALSE (integer_zerop (c_i_m1
));
15290 ASSERT_FALSE (integer_zerop (c_f_0
));
15291 ASSERT_FALSE (integer_zerop (c_f_1
));
15292 ASSERT_FALSE (integer_zerop (c_f_m1
));
15294 /* Test integer_all_onesp. */
15295 ASSERT_FALSE (integer_all_onesp (i_0
));
15296 ASSERT_FALSE (integer_all_onesp (wr_i_0
));
15297 ASSERT_FALSE (integer_all_onesp (i_1
));
15298 ASSERT_FALSE (integer_all_onesp (wr_i_1
));
15299 ASSERT_TRUE (integer_all_onesp (i_m1
));
15300 ASSERT_TRUE (integer_all_onesp (wr_i_m1
));
15301 ASSERT_FALSE (integer_all_onesp (f_0
));
15302 ASSERT_FALSE (integer_all_onesp (wr_f_0
));
15303 ASSERT_FALSE (integer_all_onesp (f_1
));
15304 ASSERT_FALSE (integer_all_onesp (wr_f_1
));
15305 ASSERT_FALSE (integer_all_onesp (f_m1
));
15306 ASSERT_FALSE (integer_all_onesp (wr_f_m1
));
15307 ASSERT_FALSE (integer_all_onesp (c_i_0
));
15308 ASSERT_FALSE (integer_all_onesp (c_i_1
));
15309 ASSERT_FALSE (integer_all_onesp (c_i_m1
));
15310 ASSERT_FALSE (integer_all_onesp (c_f_0
));
15311 ASSERT_FALSE (integer_all_onesp (c_f_1
));
15312 ASSERT_FALSE (integer_all_onesp (c_f_m1
));
15314 /* Test integer_minus_onep. */
15315 ASSERT_FALSE (integer_minus_onep (i_0
));
15316 ASSERT_FALSE (integer_minus_onep (wr_i_0
));
15317 ASSERT_FALSE (integer_minus_onep (i_1
));
15318 ASSERT_FALSE (integer_minus_onep (wr_i_1
));
15319 ASSERT_TRUE (integer_minus_onep (i_m1
));
15320 ASSERT_TRUE (integer_minus_onep (wr_i_m1
));
15321 ASSERT_FALSE (integer_minus_onep (f_0
));
15322 ASSERT_FALSE (integer_minus_onep (wr_f_0
));
15323 ASSERT_FALSE (integer_minus_onep (f_1
));
15324 ASSERT_FALSE (integer_minus_onep (wr_f_1
));
15325 ASSERT_FALSE (integer_minus_onep (f_m1
));
15326 ASSERT_FALSE (integer_minus_onep (wr_f_m1
));
15327 ASSERT_FALSE (integer_minus_onep (c_i_0
));
15328 ASSERT_FALSE (integer_minus_onep (c_i_1
));
15329 ASSERT_TRUE (integer_minus_onep (c_i_m1
));
15330 ASSERT_FALSE (integer_minus_onep (c_f_0
));
15331 ASSERT_FALSE (integer_minus_onep (c_f_1
));
15332 ASSERT_FALSE (integer_minus_onep (c_f_m1
));
15334 /* Test integer_each_onep. */
15335 ASSERT_FALSE (integer_each_onep (i_0
));
15336 ASSERT_FALSE (integer_each_onep (wr_i_0
));
15337 ASSERT_TRUE (integer_each_onep (i_1
));
15338 ASSERT_TRUE (integer_each_onep (wr_i_1
));
15339 ASSERT_FALSE (integer_each_onep (i_m1
));
15340 ASSERT_FALSE (integer_each_onep (wr_i_m1
));
15341 ASSERT_FALSE (integer_each_onep (f_0
));
15342 ASSERT_FALSE (integer_each_onep (wr_f_0
));
15343 ASSERT_FALSE (integer_each_onep (f_1
));
15344 ASSERT_FALSE (integer_each_onep (wr_f_1
));
15345 ASSERT_FALSE (integer_each_onep (f_m1
));
15346 ASSERT_FALSE (integer_each_onep (wr_f_m1
));
15347 ASSERT_FALSE (integer_each_onep (c_i_0
));
15348 ASSERT_FALSE (integer_each_onep (c_i_1
));
15349 ASSERT_FALSE (integer_each_onep (c_i_m1
));
15350 ASSERT_FALSE (integer_each_onep (c_f_0
));
15351 ASSERT_FALSE (integer_each_onep (c_f_1
));
15352 ASSERT_FALSE (integer_each_onep (c_f_m1
));
15354 /* Test integer_truep. */
15355 ASSERT_FALSE (integer_truep (i_0
));
15356 ASSERT_FALSE (integer_truep (wr_i_0
));
15357 ASSERT_TRUE (integer_truep (i_1
));
15358 ASSERT_TRUE (integer_truep (wr_i_1
));
15359 ASSERT_FALSE (integer_truep (i_m1
));
15360 ASSERT_FALSE (integer_truep (wr_i_m1
));
15361 ASSERT_FALSE (integer_truep (f_0
));
15362 ASSERT_FALSE (integer_truep (wr_f_0
));
15363 ASSERT_FALSE (integer_truep (f_1
));
15364 ASSERT_FALSE (integer_truep (wr_f_1
));
15365 ASSERT_FALSE (integer_truep (f_m1
));
15366 ASSERT_FALSE (integer_truep (wr_f_m1
));
15367 ASSERT_FALSE (integer_truep (c_i_0
));
15368 ASSERT_TRUE (integer_truep (c_i_1
));
15369 ASSERT_FALSE (integer_truep (c_i_m1
));
15370 ASSERT_FALSE (integer_truep (c_f_0
));
15371 ASSERT_FALSE (integer_truep (c_f_1
));
15372 ASSERT_FALSE (integer_truep (c_f_m1
));
15374 /* Test integer_nonzerop. */
15375 ASSERT_FALSE (integer_nonzerop (i_0
));
15376 ASSERT_FALSE (integer_nonzerop (wr_i_0
));
15377 ASSERT_TRUE (integer_nonzerop (i_1
));
15378 ASSERT_TRUE (integer_nonzerop (wr_i_1
));
15379 ASSERT_TRUE (integer_nonzerop (i_m1
));
15380 ASSERT_TRUE (integer_nonzerop (wr_i_m1
));
15381 ASSERT_FALSE (integer_nonzerop (f_0
));
15382 ASSERT_FALSE (integer_nonzerop (wr_f_0
));
15383 ASSERT_FALSE (integer_nonzerop (f_1
));
15384 ASSERT_FALSE (integer_nonzerop (wr_f_1
));
15385 ASSERT_FALSE (integer_nonzerop (f_m1
));
15386 ASSERT_FALSE (integer_nonzerop (wr_f_m1
));
15387 ASSERT_FALSE (integer_nonzerop (c_i_0
));
15388 ASSERT_TRUE (integer_nonzerop (c_i_1
));
15389 ASSERT_TRUE (integer_nonzerop (c_i_m1
));
15390 ASSERT_FALSE (integer_nonzerop (c_f_0
));
15391 ASSERT_FALSE (integer_nonzerop (c_f_1
));
15392 ASSERT_FALSE (integer_nonzerop (c_f_m1
));
15394 /* Test real_zerop. */
15395 ASSERT_FALSE (real_zerop (i_0
));
15396 ASSERT_FALSE (real_zerop (wr_i_0
));
15397 ASSERT_FALSE (real_zerop (i_1
));
15398 ASSERT_FALSE (real_zerop (wr_i_1
));
15399 ASSERT_FALSE (real_zerop (i_m1
));
15400 ASSERT_FALSE (real_zerop (wr_i_m1
));
15401 ASSERT_TRUE (real_zerop (f_0
));
15402 ASSERT_TRUE (real_zerop (wr_f_0
));
15403 ASSERT_FALSE (real_zerop (f_1
));
15404 ASSERT_FALSE (real_zerop (wr_f_1
));
15405 ASSERT_FALSE (real_zerop (f_m1
));
15406 ASSERT_FALSE (real_zerop (wr_f_m1
));
15407 ASSERT_FALSE (real_zerop (c_i_0
));
15408 ASSERT_FALSE (real_zerop (c_i_1
));
15409 ASSERT_FALSE (real_zerop (c_i_m1
));
15410 ASSERT_TRUE (real_zerop (c_f_0
));
15411 ASSERT_FALSE (real_zerop (c_f_1
));
15412 ASSERT_FALSE (real_zerop (c_f_m1
));
15414 /* Test real_onep. */
15415 ASSERT_FALSE (real_onep (i_0
));
15416 ASSERT_FALSE (real_onep (wr_i_0
));
15417 ASSERT_FALSE (real_onep (i_1
));
15418 ASSERT_FALSE (real_onep (wr_i_1
));
15419 ASSERT_FALSE (real_onep (i_m1
));
15420 ASSERT_FALSE (real_onep (wr_i_m1
));
15421 ASSERT_FALSE (real_onep (f_0
));
15422 ASSERT_FALSE (real_onep (wr_f_0
));
15423 ASSERT_TRUE (real_onep (f_1
));
15424 ASSERT_TRUE (real_onep (wr_f_1
));
15425 ASSERT_FALSE (real_onep (f_m1
));
15426 ASSERT_FALSE (real_onep (wr_f_m1
));
15427 ASSERT_FALSE (real_onep (c_i_0
));
15428 ASSERT_FALSE (real_onep (c_i_1
));
15429 ASSERT_FALSE (real_onep (c_i_m1
));
15430 ASSERT_FALSE (real_onep (c_f_0
));
15431 ASSERT_TRUE (real_onep (c_f_1
));
15432 ASSERT_FALSE (real_onep (c_f_m1
));
15434 /* Test real_minus_onep. */
15435 ASSERT_FALSE (real_minus_onep (i_0
));
15436 ASSERT_FALSE (real_minus_onep (wr_i_0
));
15437 ASSERT_FALSE (real_minus_onep (i_1
));
15438 ASSERT_FALSE (real_minus_onep (wr_i_1
));
15439 ASSERT_FALSE (real_minus_onep (i_m1
));
15440 ASSERT_FALSE (real_minus_onep (wr_i_m1
));
15441 ASSERT_FALSE (real_minus_onep (f_0
));
15442 ASSERT_FALSE (real_minus_onep (wr_f_0
));
15443 ASSERT_FALSE (real_minus_onep (f_1
));
15444 ASSERT_FALSE (real_minus_onep (wr_f_1
));
15445 ASSERT_TRUE (real_minus_onep (f_m1
));
15446 ASSERT_TRUE (real_minus_onep (wr_f_m1
));
15447 ASSERT_FALSE (real_minus_onep (c_i_0
));
15448 ASSERT_FALSE (real_minus_onep (c_i_1
));
15449 ASSERT_FALSE (real_minus_onep (c_i_m1
));
15450 ASSERT_FALSE (real_minus_onep (c_f_0
));
15451 ASSERT_FALSE (real_minus_onep (c_f_1
));
15452 ASSERT_TRUE (real_minus_onep (c_f_m1
));
15455 ASSERT_TRUE (zerop (i_0
));
15456 ASSERT_TRUE (zerop (wr_i_0
));
15457 ASSERT_FALSE (zerop (i_1
));
15458 ASSERT_FALSE (zerop (wr_i_1
));
15459 ASSERT_FALSE (zerop (i_m1
));
15460 ASSERT_FALSE (zerop (wr_i_m1
));
15461 ASSERT_TRUE (zerop (f_0
));
15462 ASSERT_TRUE (zerop (wr_f_0
));
15463 ASSERT_FALSE (zerop (f_1
));
15464 ASSERT_FALSE (zerop (wr_f_1
));
15465 ASSERT_FALSE (zerop (f_m1
));
15466 ASSERT_FALSE (zerop (wr_f_m1
));
15467 ASSERT_TRUE (zerop (c_i_0
));
15468 ASSERT_FALSE (zerop (c_i_1
));
15469 ASSERT_FALSE (zerop (c_i_m1
));
15470 ASSERT_TRUE (zerop (c_f_0
));
15471 ASSERT_FALSE (zerop (c_f_1
));
15472 ASSERT_FALSE (zerop (c_f_m1
));
15474 /* Test tree_expr_nonnegative_p. */
15475 ASSERT_TRUE (tree_expr_nonnegative_p (i_0
));
15476 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0
));
15477 ASSERT_TRUE (tree_expr_nonnegative_p (i_1
));
15478 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1
));
15479 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1
));
15480 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1
));
15481 ASSERT_TRUE (tree_expr_nonnegative_p (f_0
));
15482 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0
));
15483 ASSERT_TRUE (tree_expr_nonnegative_p (f_1
));
15484 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1
));
15485 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1
));
15486 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1
));
15487 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0
));
15488 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1
));
15489 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1
));
15490 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0
));
15491 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1
));
15492 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1
));
15494 /* Test tree_expr_nonzero_p. */
15495 ASSERT_FALSE (tree_expr_nonzero_p (i_0
));
15496 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0
));
15497 ASSERT_TRUE (tree_expr_nonzero_p (i_1
));
15498 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1
));
15499 ASSERT_TRUE (tree_expr_nonzero_p (i_m1
));
15500 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1
));
15502 /* Test integer_valued_real_p. */
15503 ASSERT_FALSE (integer_valued_real_p (i_0
));
15504 ASSERT_TRUE (integer_valued_real_p (f_0
));
15505 ASSERT_TRUE (integer_valued_real_p (wr_f_0
));
15506 ASSERT_TRUE (integer_valued_real_p (f_1
));
15507 ASSERT_TRUE (integer_valued_real_p (wr_f_1
));
15509 /* Test integer_pow2p. */
15510 ASSERT_FALSE (integer_pow2p (i_0
));
15511 ASSERT_TRUE (integer_pow2p (i_1
));
15512 ASSERT_TRUE (integer_pow2p (wr_i_1
));
15514 /* Test uniform_integer_cst_p. */
15515 ASSERT_TRUE (uniform_integer_cst_p (i_0
));
15516 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0
));
15517 ASSERT_TRUE (uniform_integer_cst_p (i_1
));
15518 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1
));
15519 ASSERT_TRUE (uniform_integer_cst_p (i_m1
));
15520 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1
));
15521 ASSERT_FALSE (uniform_integer_cst_p (f_0
));
15522 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0
));
15523 ASSERT_FALSE (uniform_integer_cst_p (f_1
));
15524 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1
));
15525 ASSERT_FALSE (uniform_integer_cst_p (f_m1
));
15526 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1
));
15527 ASSERT_FALSE (uniform_integer_cst_p (c_i_0
));
15528 ASSERT_FALSE (uniform_integer_cst_p (c_i_1
));
15529 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1
));
15530 ASSERT_FALSE (uniform_integer_cst_p (c_f_0
));
15531 ASSERT_FALSE (uniform_integer_cst_p (c_f_1
));
15532 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1
));
15535 /* Check that string escaping works correctly. */
15538 test_escaped_strings (void)
15541 escaped_string msg
;
15544 /* ASSERT_STREQ does not accept NULL as a valid test
15545 result, so we have to use ASSERT_EQ instead. */
15546 ASSERT_EQ (NULL
, (const char *) msg
);
15549 ASSERT_STREQ ("", (const char *) msg
);
15551 msg
.escape ("foobar");
15552 ASSERT_STREQ ("foobar", (const char *) msg
);
15554 /* Ensure that we have -fmessage-length set to 0. */
15555 saved_cutoff
= pp_line_cutoff (global_dc
->printer
);
15556 pp_line_cutoff (global_dc
->printer
) = 0;
15558 msg
.escape ("foo\nbar");
15559 ASSERT_STREQ ("foo\\nbar", (const char *) msg
);
15561 msg
.escape ("\a\b\f\n\r\t\v");
15562 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg
);
15564 /* Now repeat the tests with -fmessage-length set to 5. */
15565 pp_line_cutoff (global_dc
->printer
) = 5;
15567 /* Note that the newline is not translated into an escape. */
15568 msg
.escape ("foo\nbar");
15569 ASSERT_STREQ ("foo\nbar", (const char *) msg
);
15571 msg
.escape ("\a\b\f\n\r\t\v");
15572 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg
);
15574 /* Restore the original message length setting. */
15575 pp_line_cutoff (global_dc
->printer
) = saved_cutoff
;
15578 /* Run all of the selftests within this file. */
15583 test_integer_constants ();
15584 test_identifiers ();
15586 test_vector_cst_patterns ();
15587 test_location_wrappers ();
15588 test_predicates ();
15589 test_escaped_strings ();
15592 } // namespace selftest
15594 #endif /* CHECKING_P */
15596 #include "gt-tree.h"