Change inchash to name space.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "inchash.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "pointer-set.h"
56 #include "tree-ssa-alias.h"
57 #include "internal-fn.h"
58 #include "gimple-expr.h"
59 #include "is-a.h"
60 #include "gimple.h"
61 #include "gimple-iterator.h"
62 #include "gimplify.h"
63 #include "gimple-ssa.h"
64 #include "cgraph.h"
65 #include "tree-phinodes.h"
66 #include "stringpool.h"
67 #include "tree-ssanames.h"
68 #include "expr.h"
69 #include "tree-dfa.h"
70 #include "params.h"
71 #include "tree-pass.h"
72 #include "langhooks-def.h"
73 #include "diagnostic.h"
74 #include "tree-diagnostic.h"
75 #include "tree-pretty-print.h"
76 #include "except.h"
77 #include "debug.h"
78 #include "intl.h"
79 #include "wide-int.h"
80 #include "builtins.h"
81
82 /* Tree code classes. */
83
84 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
85 #define END_OF_BASE_TREE_CODES tcc_exceptional,
86
87 const enum tree_code_class tree_code_type[] = {
88 #include "all-tree.def"
89 };
90
91 #undef DEFTREECODE
92 #undef END_OF_BASE_TREE_CODES
93
94 /* Table indexed by tree code giving number of expression
95 operands beyond the fixed part of the node structure.
96 Not used for types or decls. */
97
98 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
99 #define END_OF_BASE_TREE_CODES 0,
100
101 const unsigned char tree_code_length[] = {
102 #include "all-tree.def"
103 };
104
105 #undef DEFTREECODE
106 #undef END_OF_BASE_TREE_CODES
107
108 /* Names of tree components.
109 Used for printing out the tree and error messages. */
110 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
111 #define END_OF_BASE_TREE_CODES "@dummy",
112
113 static const char *const tree_code_name[] = {
114 #include "all-tree.def"
115 };
116
117 #undef DEFTREECODE
118 #undef END_OF_BASE_TREE_CODES
119
120 /* Each tree code class has an associated string representation.
121 These must correspond to the tree_code_class entries. */
122
123 const char *const tree_code_class_strings[] =
124 {
125 "exceptional",
126 "constant",
127 "type",
128 "declaration",
129 "reference",
130 "comparison",
131 "unary",
132 "binary",
133 "statement",
134 "vl_exp",
135 "expression"
136 };
137
138 /* obstack.[ch] explicitly declined to prototype this. */
139 extern int _obstack_allocated_p (struct obstack *h, void *obj);
140
141 /* Statistics-gathering stuff. */
142
143 static int tree_code_counts[MAX_TREE_CODES];
144 int tree_node_counts[(int) all_kinds];
145 int tree_node_sizes[(int) all_kinds];
146
147 /* Keep in sync with tree.h:enum tree_node_kind. */
148 static const char * const tree_node_kind_names[] = {
149 "decls",
150 "types",
151 "blocks",
152 "stmts",
153 "refs",
154 "exprs",
155 "constants",
156 "identifiers",
157 "vecs",
158 "binfos",
159 "ssa names",
160 "constructors",
161 "random kinds",
162 "lang_decl kinds",
163 "lang_type kinds",
164 "omp clauses",
165 };
166
167 /* Unique id for next decl created. */
168 static GTY(()) int next_decl_uid;
169 /* Unique id for next type created. */
170 static GTY(()) int next_type_uid = 1;
171 /* Unique id for next debug decl created. Use negative numbers,
172 to catch erroneous uses. */
173 static GTY(()) int next_debug_decl_uid;
174
175 /* Since we cannot rehash a type after it is in the table, we have to
176 keep the hash code. */
177
178 struct GTY(()) type_hash {
179 unsigned long hash;
180 tree type;
181 };
182
183 /* Initial size of the hash table (rounded to next prime). */
184 #define TYPE_HASH_INITIAL_SIZE 1000
185
186 /* Now here is the hash table. When recording a type, it is added to
187 the slot whose index is the hash code. Note that the hash table is
188 used for several kinds of types (function types, array types and
189 array index range types, for now). While all these live in the
190 same table, they are completely independent, and the hash code is
191 computed differently for each of these. */
192
193 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
194 htab_t type_hash_table;
195
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
198 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
199 htab_t int_cst_hash_table;
200
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
209 htab_t cl_option_hash_table;
210
211 /* General tree->tree mapping structure for use in hash tables. */
212
213
214 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
215 htab_t debug_expr_for_decl;
216
217 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
218 htab_t value_expr_for_decl;
219
220 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
221 htab_t debug_args_for_decl;
222
223 static void set_type_quals (tree, int);
224 static int type_hash_eq (const void *, const void *);
225 static hashval_t type_hash_hash (const void *);
226 static hashval_t int_cst_hash_hash (const void *);
227 static int int_cst_hash_eq (const void *, const void *);
228 static hashval_t cl_option_hash_hash (const void *);
229 static int cl_option_hash_eq (const void *, const void *);
230 static void print_type_hash_statistics (void);
231 static void print_debug_expr_statistics (void);
232 static void print_value_expr_statistics (void);
233 static int type_hash_marked_p (const void *);
234 static void type_hash_list (const_tree, inchash::hash &);
235 static void attribute_hash_list (const_tree, inchash::hash &);
236
237 tree global_trees[TI_MAX];
238 tree integer_types[itk_none];
239
240 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
241
242 /* Number of operands for each OpenMP clause. */
243 unsigned const char omp_clause_num_ops[] =
244 {
245 0, /* OMP_CLAUSE_ERROR */
246 1, /* OMP_CLAUSE_PRIVATE */
247 1, /* OMP_CLAUSE_SHARED */
248 1, /* OMP_CLAUSE_FIRSTPRIVATE */
249 2, /* OMP_CLAUSE_LASTPRIVATE */
250 4, /* OMP_CLAUSE_REDUCTION */
251 1, /* OMP_CLAUSE_COPYIN */
252 1, /* OMP_CLAUSE_COPYPRIVATE */
253 3, /* OMP_CLAUSE_LINEAR */
254 2, /* OMP_CLAUSE_ALIGNED */
255 1, /* OMP_CLAUSE_DEPEND */
256 1, /* OMP_CLAUSE_UNIFORM */
257 2, /* OMP_CLAUSE_FROM */
258 2, /* OMP_CLAUSE_TO */
259 2, /* OMP_CLAUSE_MAP */
260 1, /* OMP_CLAUSE__LOOPTEMP_ */
261 1, /* OMP_CLAUSE_IF */
262 1, /* OMP_CLAUSE_NUM_THREADS */
263 1, /* OMP_CLAUSE_SCHEDULE */
264 0, /* OMP_CLAUSE_NOWAIT */
265 0, /* OMP_CLAUSE_ORDERED */
266 0, /* OMP_CLAUSE_DEFAULT */
267 3, /* OMP_CLAUSE_COLLAPSE */
268 0, /* OMP_CLAUSE_UNTIED */
269 1, /* OMP_CLAUSE_FINAL */
270 0, /* OMP_CLAUSE_MERGEABLE */
271 1, /* OMP_CLAUSE_DEVICE */
272 1, /* OMP_CLAUSE_DIST_SCHEDULE */
273 0, /* OMP_CLAUSE_INBRANCH */
274 0, /* OMP_CLAUSE_NOTINBRANCH */
275 1, /* OMP_CLAUSE_NUM_TEAMS */
276 1, /* OMP_CLAUSE_THREAD_LIMIT */
277 0, /* OMP_CLAUSE_PROC_BIND */
278 1, /* OMP_CLAUSE_SAFELEN */
279 1, /* OMP_CLAUSE_SIMDLEN */
280 0, /* OMP_CLAUSE_FOR */
281 0, /* OMP_CLAUSE_PARALLEL */
282 0, /* OMP_CLAUSE_SECTIONS */
283 0, /* OMP_CLAUSE_TASKGROUP */
284 1, /* OMP_CLAUSE__SIMDUID_ */
285 };
286
287 const char * const omp_clause_code_name[] =
288 {
289 "error_clause",
290 "private",
291 "shared",
292 "firstprivate",
293 "lastprivate",
294 "reduction",
295 "copyin",
296 "copyprivate",
297 "linear",
298 "aligned",
299 "depend",
300 "uniform",
301 "from",
302 "to",
303 "map",
304 "_looptemp_",
305 "if",
306 "num_threads",
307 "schedule",
308 "nowait",
309 "ordered",
310 "default",
311 "collapse",
312 "untied",
313 "final",
314 "mergeable",
315 "device",
316 "dist_schedule",
317 "inbranch",
318 "notinbranch",
319 "num_teams",
320 "thread_limit",
321 "proc_bind",
322 "safelen",
323 "simdlen",
324 "for",
325 "parallel",
326 "sections",
327 "taskgroup",
328 "_simduid_"
329 };
330
331
332 /* Return the tree node structure used by tree code CODE. */
333
334 static inline enum tree_node_structure_enum
335 tree_node_structure_for_code (enum tree_code code)
336 {
337 switch (TREE_CODE_CLASS (code))
338 {
339 case tcc_declaration:
340 {
341 switch (code)
342 {
343 case FIELD_DECL:
344 return TS_FIELD_DECL;
345 case PARM_DECL:
346 return TS_PARM_DECL;
347 case VAR_DECL:
348 return TS_VAR_DECL;
349 case LABEL_DECL:
350 return TS_LABEL_DECL;
351 case RESULT_DECL:
352 return TS_RESULT_DECL;
353 case DEBUG_EXPR_DECL:
354 return TS_DECL_WRTL;
355 case CONST_DECL:
356 return TS_CONST_DECL;
357 case TYPE_DECL:
358 return TS_TYPE_DECL;
359 case FUNCTION_DECL:
360 return TS_FUNCTION_DECL;
361 case TRANSLATION_UNIT_DECL:
362 return TS_TRANSLATION_UNIT_DECL;
363 default:
364 return TS_DECL_NON_COMMON;
365 }
366 }
367 case tcc_type:
368 return TS_TYPE_NON_COMMON;
369 case tcc_reference:
370 case tcc_comparison:
371 case tcc_unary:
372 case tcc_binary:
373 case tcc_expression:
374 case tcc_statement:
375 case tcc_vl_exp:
376 return TS_EXP;
377 default: /* tcc_constant and tcc_exceptional */
378 break;
379 }
380 switch (code)
381 {
382 /* tcc_constant cases. */
383 case VOID_CST: return TS_TYPED;
384 case INTEGER_CST: return TS_INT_CST;
385 case REAL_CST: return TS_REAL_CST;
386 case FIXED_CST: return TS_FIXED_CST;
387 case COMPLEX_CST: return TS_COMPLEX;
388 case VECTOR_CST: return TS_VECTOR;
389 case STRING_CST: return TS_STRING;
390 /* tcc_exceptional cases. */
391 case ERROR_MARK: return TS_COMMON;
392 case IDENTIFIER_NODE: return TS_IDENTIFIER;
393 case TREE_LIST: return TS_LIST;
394 case TREE_VEC: return TS_VEC;
395 case SSA_NAME: return TS_SSA_NAME;
396 case PLACEHOLDER_EXPR: return TS_COMMON;
397 case STATEMENT_LIST: return TS_STATEMENT_LIST;
398 case BLOCK: return TS_BLOCK;
399 case CONSTRUCTOR: return TS_CONSTRUCTOR;
400 case TREE_BINFO: return TS_BINFO;
401 case OMP_CLAUSE: return TS_OMP_CLAUSE;
402 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
403 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
404
405 default:
406 gcc_unreachable ();
407 }
408 }
409
410
411 /* Initialize tree_contains_struct to describe the hierarchy of tree
412 nodes. */
413
414 static void
415 initialize_tree_contains_struct (void)
416 {
417 unsigned i;
418
419 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
420 {
421 enum tree_code code;
422 enum tree_node_structure_enum ts_code;
423
424 code = (enum tree_code) i;
425 ts_code = tree_node_structure_for_code (code);
426
427 /* Mark the TS structure itself. */
428 tree_contains_struct[code][ts_code] = 1;
429
430 /* Mark all the structures that TS is derived from. */
431 switch (ts_code)
432 {
433 case TS_TYPED:
434 case TS_BLOCK:
435 MARK_TS_BASE (code);
436 break;
437
438 case TS_COMMON:
439 case TS_INT_CST:
440 case TS_REAL_CST:
441 case TS_FIXED_CST:
442 case TS_VECTOR:
443 case TS_STRING:
444 case TS_COMPLEX:
445 case TS_SSA_NAME:
446 case TS_CONSTRUCTOR:
447 case TS_EXP:
448 case TS_STATEMENT_LIST:
449 MARK_TS_TYPED (code);
450 break;
451
452 case TS_IDENTIFIER:
453 case TS_DECL_MINIMAL:
454 case TS_TYPE_COMMON:
455 case TS_LIST:
456 case TS_VEC:
457 case TS_BINFO:
458 case TS_OMP_CLAUSE:
459 case TS_OPTIMIZATION:
460 case TS_TARGET_OPTION:
461 MARK_TS_COMMON (code);
462 break;
463
464 case TS_TYPE_WITH_LANG_SPECIFIC:
465 MARK_TS_TYPE_COMMON (code);
466 break;
467
468 case TS_TYPE_NON_COMMON:
469 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
470 break;
471
472 case TS_DECL_COMMON:
473 MARK_TS_DECL_MINIMAL (code);
474 break;
475
476 case TS_DECL_WRTL:
477 case TS_CONST_DECL:
478 MARK_TS_DECL_COMMON (code);
479 break;
480
481 case TS_DECL_NON_COMMON:
482 MARK_TS_DECL_WITH_VIS (code);
483 break;
484
485 case TS_DECL_WITH_VIS:
486 case TS_PARM_DECL:
487 case TS_LABEL_DECL:
488 case TS_RESULT_DECL:
489 MARK_TS_DECL_WRTL (code);
490 break;
491
492 case TS_FIELD_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_VAR_DECL:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_TYPE_DECL:
501 case TS_FUNCTION_DECL:
502 MARK_TS_DECL_NON_COMMON (code);
503 break;
504
505 case TS_TRANSLATION_UNIT_DECL:
506 MARK_TS_DECL_COMMON (code);
507 break;
508
509 default:
510 gcc_unreachable ();
511 }
512 }
513
514 /* Basic consistency checks for attributes used in fold. */
515 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
517 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
526 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
531 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
540 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
543 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
544 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
545 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
546 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
547 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
548 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
549 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
553 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
555 }
556
557
558 /* Init tree.c. */
559
560 void
561 init_ttree (void)
562 {
563 /* Initialize the hash table of types. */
564 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
565 type_hash_eq, 0);
566
567 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
568 tree_decl_map_eq, 0);
569
570 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
571 tree_decl_map_eq, 0);
572
573 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
574 int_cst_hash_eq, NULL);
575
576 int_cst_node = make_int_cst (1, 1);
577
578 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
579 cl_option_hash_eq, NULL);
580
581 cl_optimization_node = make_node (OPTIMIZATION_NODE);
582 cl_target_option_node = make_node (TARGET_OPTION_NODE);
583
584 /* Initialize the tree_contains_struct array. */
585 initialize_tree_contains_struct ();
586 lang_hooks.init_ts ();
587 }
588
589 \f
590 /* The name of the object as the assembler will see it (but before any
591 translations made by ASM_OUTPUT_LABELREF). Often this is the same
592 as DECL_NAME. It is an IDENTIFIER_NODE. */
593 tree
594 decl_assembler_name (tree decl)
595 {
596 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
597 lang_hooks.set_decl_assembler_name (decl);
598 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
599 }
600
601 /* When the target supports COMDAT groups, this indicates which group the
602 DECL is associated with. This can be either an IDENTIFIER_NODE or a
603 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
604 tree
605 decl_comdat_group (const_tree node)
606 {
607 struct symtab_node *snode = symtab_node::get (node);
608 if (!snode)
609 return NULL;
610 return snode->get_comdat_group ();
611 }
612
613 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
614 tree
615 decl_comdat_group_id (const_tree node)
616 {
617 struct symtab_node *snode = symtab_node::get (node);
618 if (!snode)
619 return NULL;
620 return snode->get_comdat_group_id ();
621 }
622
623 /* When the target supports named section, return its name as IDENTIFIER_NODE
624 or NULL if it is in no section. */
625 const char *
626 decl_section_name (const_tree node)
627 {
628 struct symtab_node *snode = symtab_node::get (node);
629 if (!snode)
630 return NULL;
631 return snode->get_section ();
632 }
633
634 /* Set section section name of NODE to VALUE (that is expected to
635 be identifier node) */
636 void
637 set_decl_section_name (tree node, const char *value)
638 {
639 struct symtab_node *snode;
640
641 if (value == NULL)
642 {
643 snode = symtab_node::get (node);
644 if (!snode)
645 return;
646 }
647 else if (TREE_CODE (node) == VAR_DECL)
648 snode = varpool_node::get_create (node);
649 else
650 snode = cgraph_node::get_create (node);
651 snode->set_section (value);
652 }
653
654 /* Return TLS model of a variable NODE. */
655 enum tls_model
656 decl_tls_model (const_tree node)
657 {
658 struct varpool_node *snode = varpool_node::get (node);
659 if (!snode)
660 return TLS_MODEL_NONE;
661 return snode->tls_model;
662 }
663
664 /* Set TLS model of variable NODE to MODEL. */
665 void
666 set_decl_tls_model (tree node, enum tls_model model)
667 {
668 struct varpool_node *vnode;
669
670 if (model == TLS_MODEL_NONE)
671 {
672 vnode = varpool_node::get (node);
673 if (!vnode)
674 return;
675 }
676 else
677 vnode = varpool_node::get_create (node);
678 vnode->tls_model = model;
679 }
680
681 /* Compute the number of bytes occupied by a tree with code CODE.
682 This function cannot be used for nodes that have variable sizes,
683 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
684 size_t
685 tree_code_size (enum tree_code code)
686 {
687 switch (TREE_CODE_CLASS (code))
688 {
689 case tcc_declaration: /* A decl node */
690 {
691 switch (code)
692 {
693 case FIELD_DECL:
694 return sizeof (struct tree_field_decl);
695 case PARM_DECL:
696 return sizeof (struct tree_parm_decl);
697 case VAR_DECL:
698 return sizeof (struct tree_var_decl);
699 case LABEL_DECL:
700 return sizeof (struct tree_label_decl);
701 case RESULT_DECL:
702 return sizeof (struct tree_result_decl);
703 case CONST_DECL:
704 return sizeof (struct tree_const_decl);
705 case TYPE_DECL:
706 return sizeof (struct tree_type_decl);
707 case FUNCTION_DECL:
708 return sizeof (struct tree_function_decl);
709 case DEBUG_EXPR_DECL:
710 return sizeof (struct tree_decl_with_rtl);
711 case TRANSLATION_UNIT_DECL:
712 return sizeof (struct tree_translation_unit_decl);
713 case NAMESPACE_DECL:
714 case IMPORTED_DECL:
715 case NAMELIST_DECL:
716 return sizeof (struct tree_decl_non_common);
717 default:
718 return lang_hooks.tree_size (code);
719 }
720 }
721
722 case tcc_type: /* a type node */
723 return sizeof (struct tree_type_non_common);
724
725 case tcc_reference: /* a reference */
726 case tcc_expression: /* an expression */
727 case tcc_statement: /* an expression with side effects */
728 case tcc_comparison: /* a comparison expression */
729 case tcc_unary: /* a unary arithmetic expression */
730 case tcc_binary: /* a binary arithmetic expression */
731 return (sizeof (struct tree_exp)
732 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
733
734 case tcc_constant: /* a constant */
735 switch (code)
736 {
737 case VOID_CST: return sizeof (struct tree_typed);
738 case INTEGER_CST: gcc_unreachable ();
739 case REAL_CST: return sizeof (struct tree_real_cst);
740 case FIXED_CST: return sizeof (struct tree_fixed_cst);
741 case COMPLEX_CST: return sizeof (struct tree_complex);
742 case VECTOR_CST: return sizeof (struct tree_vector);
743 case STRING_CST: gcc_unreachable ();
744 default:
745 return lang_hooks.tree_size (code);
746 }
747
748 case tcc_exceptional: /* something random, like an identifier. */
749 switch (code)
750 {
751 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
752 case TREE_LIST: return sizeof (struct tree_list);
753
754 case ERROR_MARK:
755 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
756
757 case TREE_VEC:
758 case OMP_CLAUSE: gcc_unreachable ();
759
760 case SSA_NAME: return sizeof (struct tree_ssa_name);
761
762 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
763 case BLOCK: return sizeof (struct tree_block);
764 case CONSTRUCTOR: return sizeof (struct tree_constructor);
765 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
766 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
767
768 default:
769 return lang_hooks.tree_size (code);
770 }
771
772 default:
773 gcc_unreachable ();
774 }
775 }
776
777 /* Compute the number of bytes occupied by NODE. This routine only
778 looks at TREE_CODE, except for those nodes that have variable sizes. */
779 size_t
780 tree_size (const_tree node)
781 {
782 const enum tree_code code = TREE_CODE (node);
783 switch (code)
784 {
785 case INTEGER_CST:
786 return (sizeof (struct tree_int_cst)
787 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
788
789 case TREE_BINFO:
790 return (offsetof (struct tree_binfo, base_binfos)
791 + vec<tree, va_gc>
792 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
793
794 case TREE_VEC:
795 return (sizeof (struct tree_vec)
796 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
797
798 case VECTOR_CST:
799 return (sizeof (struct tree_vector)
800 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
801
802 case STRING_CST:
803 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
804
805 case OMP_CLAUSE:
806 return (sizeof (struct tree_omp_clause)
807 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
808 * sizeof (tree));
809
810 default:
811 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
812 return (sizeof (struct tree_exp)
813 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
814 else
815 return tree_code_size (code);
816 }
817 }
818
819 /* Record interesting allocation statistics for a tree node with CODE
820 and LENGTH. */
821
822 static void
823 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
824 size_t length ATTRIBUTE_UNUSED)
825 {
826 enum tree_code_class type = TREE_CODE_CLASS (code);
827 tree_node_kind kind;
828
829 if (!GATHER_STATISTICS)
830 return;
831
832 switch (type)
833 {
834 case tcc_declaration: /* A decl node */
835 kind = d_kind;
836 break;
837
838 case tcc_type: /* a type node */
839 kind = t_kind;
840 break;
841
842 case tcc_statement: /* an expression with side effects */
843 kind = s_kind;
844 break;
845
846 case tcc_reference: /* a reference */
847 kind = r_kind;
848 break;
849
850 case tcc_expression: /* an expression */
851 case tcc_comparison: /* a comparison expression */
852 case tcc_unary: /* a unary arithmetic expression */
853 case tcc_binary: /* a binary arithmetic expression */
854 kind = e_kind;
855 break;
856
857 case tcc_constant: /* a constant */
858 kind = c_kind;
859 break;
860
861 case tcc_exceptional: /* something random, like an identifier. */
862 switch (code)
863 {
864 case IDENTIFIER_NODE:
865 kind = id_kind;
866 break;
867
868 case TREE_VEC:
869 kind = vec_kind;
870 break;
871
872 case TREE_BINFO:
873 kind = binfo_kind;
874 break;
875
876 case SSA_NAME:
877 kind = ssa_name_kind;
878 break;
879
880 case BLOCK:
881 kind = b_kind;
882 break;
883
884 case CONSTRUCTOR:
885 kind = constr_kind;
886 break;
887
888 case OMP_CLAUSE:
889 kind = omp_clause_kind;
890 break;
891
892 default:
893 kind = x_kind;
894 break;
895 }
896 break;
897
898 case tcc_vl_exp:
899 kind = e_kind;
900 break;
901
902 default:
903 gcc_unreachable ();
904 }
905
906 tree_code_counts[(int) code]++;
907 tree_node_counts[(int) kind]++;
908 tree_node_sizes[(int) kind] += length;
909 }
910
911 /* Allocate and return a new UID from the DECL_UID namespace. */
912
913 int
914 allocate_decl_uid (void)
915 {
916 return next_decl_uid++;
917 }
918
919 /* Return a newly allocated node of code CODE. For decl and type
920 nodes, some other fields are initialized. The rest of the node is
921 initialized to zero. This function cannot be used for TREE_VEC,
922 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
923 tree_code_size.
924
925 Achoo! I got a code in the node. */
926
927 tree
928 make_node_stat (enum tree_code code MEM_STAT_DECL)
929 {
930 tree t;
931 enum tree_code_class type = TREE_CODE_CLASS (code);
932 size_t length = tree_code_size (code);
933
934 record_node_allocation_statistics (code, length);
935
936 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
937 TREE_SET_CODE (t, code);
938
939 switch (type)
940 {
941 case tcc_statement:
942 TREE_SIDE_EFFECTS (t) = 1;
943 break;
944
945 case tcc_declaration:
946 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
947 {
948 if (code == FUNCTION_DECL)
949 {
950 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
951 DECL_MODE (t) = FUNCTION_MODE;
952 }
953 else
954 DECL_ALIGN (t) = 1;
955 }
956 DECL_SOURCE_LOCATION (t) = input_location;
957 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
958 DECL_UID (t) = --next_debug_decl_uid;
959 else
960 {
961 DECL_UID (t) = allocate_decl_uid ();
962 SET_DECL_PT_UID (t, -1);
963 }
964 if (TREE_CODE (t) == LABEL_DECL)
965 LABEL_DECL_UID (t) = -1;
966
967 break;
968
969 case tcc_type:
970 TYPE_UID (t) = next_type_uid++;
971 TYPE_ALIGN (t) = BITS_PER_UNIT;
972 TYPE_USER_ALIGN (t) = 0;
973 TYPE_MAIN_VARIANT (t) = t;
974 TYPE_CANONICAL (t) = t;
975
976 /* Default to no attributes for type, but let target change that. */
977 TYPE_ATTRIBUTES (t) = NULL_TREE;
978 targetm.set_default_type_attributes (t);
979
980 /* We have not yet computed the alias set for this type. */
981 TYPE_ALIAS_SET (t) = -1;
982 break;
983
984 case tcc_constant:
985 TREE_CONSTANT (t) = 1;
986 break;
987
988 case tcc_expression:
989 switch (code)
990 {
991 case INIT_EXPR:
992 case MODIFY_EXPR:
993 case VA_ARG_EXPR:
994 case PREDECREMENT_EXPR:
995 case PREINCREMENT_EXPR:
996 case POSTDECREMENT_EXPR:
997 case POSTINCREMENT_EXPR:
998 /* All of these have side-effects, no matter what their
999 operands are. */
1000 TREE_SIDE_EFFECTS (t) = 1;
1001 break;
1002
1003 default:
1004 break;
1005 }
1006 break;
1007
1008 default:
1009 /* Other classes need no special treatment. */
1010 break;
1011 }
1012
1013 return t;
1014 }
1015 \f
1016 /* Return a new node with the same contents as NODE except that its
1017 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1018
1019 tree
1020 copy_node_stat (tree node MEM_STAT_DECL)
1021 {
1022 tree t;
1023 enum tree_code code = TREE_CODE (node);
1024 size_t length;
1025
1026 gcc_assert (code != STATEMENT_LIST);
1027
1028 length = tree_size (node);
1029 record_node_allocation_statistics (code, length);
1030 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1031 memcpy (t, node, length);
1032
1033 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1034 TREE_CHAIN (t) = 0;
1035 TREE_ASM_WRITTEN (t) = 0;
1036 TREE_VISITED (t) = 0;
1037
1038 if (TREE_CODE_CLASS (code) == tcc_declaration)
1039 {
1040 if (code == DEBUG_EXPR_DECL)
1041 DECL_UID (t) = --next_debug_decl_uid;
1042 else
1043 {
1044 DECL_UID (t) = allocate_decl_uid ();
1045 if (DECL_PT_UID_SET_P (node))
1046 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1047 }
1048 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1049 && DECL_HAS_VALUE_EXPR_P (node))
1050 {
1051 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1052 DECL_HAS_VALUE_EXPR_P (t) = 1;
1053 }
1054 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1055 if (TREE_CODE (node) == VAR_DECL)
1056 {
1057 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1058 t->decl_with_vis.symtab_node = NULL;
1059 }
1060 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1061 {
1062 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1063 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1064 }
1065 if (TREE_CODE (node) == FUNCTION_DECL)
1066 {
1067 DECL_STRUCT_FUNCTION (t) = NULL;
1068 t->decl_with_vis.symtab_node = NULL;
1069 }
1070 }
1071 else if (TREE_CODE_CLASS (code) == tcc_type)
1072 {
1073 TYPE_UID (t) = next_type_uid++;
1074 /* The following is so that the debug code for
1075 the copy is different from the original type.
1076 The two statements usually duplicate each other
1077 (because they clear fields of the same union),
1078 but the optimizer should catch that. */
1079 TYPE_SYMTAB_POINTER (t) = 0;
1080 TYPE_SYMTAB_ADDRESS (t) = 0;
1081
1082 /* Do not copy the values cache. */
1083 if (TYPE_CACHED_VALUES_P (t))
1084 {
1085 TYPE_CACHED_VALUES_P (t) = 0;
1086 TYPE_CACHED_VALUES (t) = NULL_TREE;
1087 }
1088 }
1089
1090 return t;
1091 }
1092
1093 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1094 For example, this can copy a list made of TREE_LIST nodes. */
1095
1096 tree
1097 copy_list (tree list)
1098 {
1099 tree head;
1100 tree prev, next;
1101
1102 if (list == 0)
1103 return 0;
1104
1105 head = prev = copy_node (list);
1106 next = TREE_CHAIN (list);
1107 while (next)
1108 {
1109 TREE_CHAIN (prev) = copy_node (next);
1110 prev = TREE_CHAIN (prev);
1111 next = TREE_CHAIN (next);
1112 }
1113 return head;
1114 }
1115
1116 \f
1117 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1118 INTEGER_CST with value CST and type TYPE. */
1119
1120 static unsigned int
1121 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1122 {
1123 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1124 /* We need an extra zero HWI if CST is an unsigned integer with its
1125 upper bit set, and if CST occupies a whole number of HWIs. */
1126 if (TYPE_UNSIGNED (type)
1127 && wi::neg_p (cst)
1128 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1129 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1130 return cst.get_len ();
1131 }
1132
1133 /* Return a new INTEGER_CST with value CST and type TYPE. */
1134
1135 static tree
1136 build_new_int_cst (tree type, const wide_int &cst)
1137 {
1138 unsigned int len = cst.get_len ();
1139 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1140 tree nt = make_int_cst (len, ext_len);
1141
1142 if (len < ext_len)
1143 {
1144 --ext_len;
1145 TREE_INT_CST_ELT (nt, ext_len) = 0;
1146 for (unsigned int i = len; i < ext_len; ++i)
1147 TREE_INT_CST_ELT (nt, i) = -1;
1148 }
1149 else if (TYPE_UNSIGNED (type)
1150 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1151 {
1152 len--;
1153 TREE_INT_CST_ELT (nt, len)
1154 = zext_hwi (cst.elt (len),
1155 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1156 }
1157
1158 for (unsigned int i = 0; i < len; i++)
1159 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1160 TREE_TYPE (nt) = type;
1161 return nt;
1162 }
1163
1164 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1165
1166 tree
1167 build_int_cst (tree type, HOST_WIDE_INT low)
1168 {
1169 /* Support legacy code. */
1170 if (!type)
1171 type = integer_type_node;
1172
1173 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1174 }
1175
1176 tree
1177 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1178 {
1179 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1180 }
1181
1182 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1183
1184 tree
1185 build_int_cst_type (tree type, HOST_WIDE_INT low)
1186 {
1187 gcc_assert (type);
1188 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1189 }
1190
1191 /* Constructs tree in type TYPE from with value given by CST. Signedness
1192 of CST is assumed to be the same as the signedness of TYPE. */
1193
1194 tree
1195 double_int_to_tree (tree type, double_int cst)
1196 {
1197 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1198 }
1199
1200 /* We force the wide_int CST to the range of the type TYPE by sign or
1201 zero extending it. OVERFLOWABLE indicates if we are interested in
1202 overflow of the value, when >0 we are only interested in signed
1203 overflow, for <0 we are interested in any overflow. OVERFLOWED
1204 indicates whether overflow has already occurred. CONST_OVERFLOWED
1205 indicates whether constant overflow has already occurred. We force
1206 T's value to be within range of T's type (by setting to 0 or 1 all
1207 the bits outside the type's range). We set TREE_OVERFLOWED if,
1208 OVERFLOWED is nonzero,
1209 or OVERFLOWABLE is >0 and signed overflow occurs
1210 or OVERFLOWABLE is <0 and any overflow occurs
1211 We return a new tree node for the extended wide_int. The node
1212 is shared if no overflow flags are set. */
1213
1214
1215 tree
1216 force_fit_type (tree type, const wide_int_ref &cst,
1217 int overflowable, bool overflowed)
1218 {
1219 signop sign = TYPE_SIGN (type);
1220
1221 /* If we need to set overflow flags, return a new unshared node. */
1222 if (overflowed || !wi::fits_to_tree_p (cst, type))
1223 {
1224 if (overflowed
1225 || overflowable < 0
1226 || (overflowable > 0 && sign == SIGNED))
1227 {
1228 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1229 tree t = build_new_int_cst (type, tmp);
1230 TREE_OVERFLOW (t) = 1;
1231 return t;
1232 }
1233 }
1234
1235 /* Else build a shared node. */
1236 return wide_int_to_tree (type, cst);
1237 }
1238
1239 /* These are the hash table functions for the hash table of INTEGER_CST
1240 nodes of a sizetype. */
1241
1242 /* Return the hash code code X, an INTEGER_CST. */
1243
1244 static hashval_t
1245 int_cst_hash_hash (const void *x)
1246 {
1247 const_tree const t = (const_tree) x;
1248 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1249 int i;
1250
1251 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1252 code ^= TREE_INT_CST_ELT (t, i);
1253
1254 return code;
1255 }
1256
1257 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1258 is the same as that given by *Y, which is the same. */
1259
1260 static int
1261 int_cst_hash_eq (const void *x, const void *y)
1262 {
1263 const_tree const xt = (const_tree) x;
1264 const_tree const yt = (const_tree) y;
1265
1266 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1267 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1268 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1269 return false;
1270
1271 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1272 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1273 return false;
1274
1275 return true;
1276 }
1277
1278 /* Create an INT_CST node of TYPE and value CST.
1279 The returned node is always shared. For small integers we use a
1280 per-type vector cache, for larger ones we use a single hash table.
1281 The value is extended from its precision according to the sign of
1282 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1283 the upper bits and ensures that hashing and value equality based
1284 upon the underlying HOST_WIDE_INTs works without masking. */
1285
1286 tree
1287 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1288 {
1289 tree t;
1290 int ix = -1;
1291 int limit = 0;
1292
1293 gcc_assert (type);
1294 unsigned int prec = TYPE_PRECISION (type);
1295 signop sgn = TYPE_SIGN (type);
1296
1297 /* Verify that everything is canonical. */
1298 int l = pcst.get_len ();
1299 if (l > 1)
1300 {
1301 if (pcst.elt (l - 1) == 0)
1302 gcc_checking_assert (pcst.elt (l - 2) < 0);
1303 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1304 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1305 }
1306
1307 wide_int cst = wide_int::from (pcst, prec, sgn);
1308 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1309
1310 if (ext_len == 1)
1311 {
1312 /* We just need to store a single HOST_WIDE_INT. */
1313 HOST_WIDE_INT hwi;
1314 if (TYPE_UNSIGNED (type))
1315 hwi = cst.to_uhwi ();
1316 else
1317 hwi = cst.to_shwi ();
1318
1319 switch (TREE_CODE (type))
1320 {
1321 case NULLPTR_TYPE:
1322 gcc_assert (hwi == 0);
1323 /* Fallthru. */
1324
1325 case POINTER_TYPE:
1326 case REFERENCE_TYPE:
1327 /* Cache NULL pointer. */
1328 if (hwi == 0)
1329 {
1330 limit = 1;
1331 ix = 0;
1332 }
1333 break;
1334
1335 case BOOLEAN_TYPE:
1336 /* Cache false or true. */
1337 limit = 2;
1338 if (hwi < 2)
1339 ix = hwi;
1340 break;
1341
1342 case INTEGER_TYPE:
1343 case OFFSET_TYPE:
1344 if (TYPE_SIGN (type) == UNSIGNED)
1345 {
1346 /* Cache [0, N). */
1347 limit = INTEGER_SHARE_LIMIT;
1348 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1349 ix = hwi;
1350 }
1351 else
1352 {
1353 /* Cache [-1, N). */
1354 limit = INTEGER_SHARE_LIMIT + 1;
1355 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1356 ix = hwi + 1;
1357 }
1358 break;
1359
1360 case ENUMERAL_TYPE:
1361 break;
1362
1363 default:
1364 gcc_unreachable ();
1365 }
1366
1367 if (ix >= 0)
1368 {
1369 /* Look for it in the type's vector of small shared ints. */
1370 if (!TYPE_CACHED_VALUES_P (type))
1371 {
1372 TYPE_CACHED_VALUES_P (type) = 1;
1373 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1374 }
1375
1376 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1377 if (t)
1378 /* Make sure no one is clobbering the shared constant. */
1379 gcc_checking_assert (TREE_TYPE (t) == type
1380 && TREE_INT_CST_NUNITS (t) == 1
1381 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1382 && TREE_INT_CST_EXT_NUNITS (t) == 1
1383 && TREE_INT_CST_ELT (t, 0) == hwi);
1384 else
1385 {
1386 /* Create a new shared int. */
1387 t = build_new_int_cst (type, cst);
1388 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1389 }
1390 }
1391 else
1392 {
1393 /* Use the cache of larger shared ints, using int_cst_node as
1394 a temporary. */
1395 void **slot;
1396
1397 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1398 TREE_TYPE (int_cst_node) = type;
1399
1400 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1401 t = (tree) *slot;
1402 if (!t)
1403 {
1404 /* Insert this one into the hash table. */
1405 t = int_cst_node;
1406 *slot = t;
1407 /* Make a new node for next time round. */
1408 int_cst_node = make_int_cst (1, 1);
1409 }
1410 }
1411 }
1412 else
1413 {
1414 /* The value either hashes properly or we drop it on the floor
1415 for the gc to take care of. There will not be enough of them
1416 to worry about. */
1417 void **slot;
1418
1419 tree nt = build_new_int_cst (type, cst);
1420 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1421 t = (tree) *slot;
1422 if (!t)
1423 {
1424 /* Insert this one into the hash table. */
1425 t = nt;
1426 *slot = t;
1427 }
1428 }
1429
1430 return t;
1431 }
1432
1433 void
1434 cache_integer_cst (tree t)
1435 {
1436 tree type = TREE_TYPE (t);
1437 int ix = -1;
1438 int limit = 0;
1439 int prec = TYPE_PRECISION (type);
1440
1441 gcc_assert (!TREE_OVERFLOW (t));
1442
1443 switch (TREE_CODE (type))
1444 {
1445 case NULLPTR_TYPE:
1446 gcc_assert (integer_zerop (t));
1447 /* Fallthru. */
1448
1449 case POINTER_TYPE:
1450 case REFERENCE_TYPE:
1451 /* Cache NULL pointer. */
1452 if (integer_zerop (t))
1453 {
1454 limit = 1;
1455 ix = 0;
1456 }
1457 break;
1458
1459 case BOOLEAN_TYPE:
1460 /* Cache false or true. */
1461 limit = 2;
1462 if (wi::ltu_p (t, 2))
1463 ix = TREE_INT_CST_ELT (t, 0);
1464 break;
1465
1466 case INTEGER_TYPE:
1467 case OFFSET_TYPE:
1468 if (TYPE_UNSIGNED (type))
1469 {
1470 /* Cache 0..N */
1471 limit = INTEGER_SHARE_LIMIT;
1472
1473 /* This is a little hokie, but if the prec is smaller than
1474 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1475 obvious test will not get the correct answer. */
1476 if (prec < HOST_BITS_PER_WIDE_INT)
1477 {
1478 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1479 ix = tree_to_uhwi (t);
1480 }
1481 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1482 ix = tree_to_uhwi (t);
1483 }
1484 else
1485 {
1486 /* Cache -1..N */
1487 limit = INTEGER_SHARE_LIMIT + 1;
1488
1489 if (integer_minus_onep (t))
1490 ix = 0;
1491 else if (!wi::neg_p (t))
1492 {
1493 if (prec < HOST_BITS_PER_WIDE_INT)
1494 {
1495 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1496 ix = tree_to_shwi (t) + 1;
1497 }
1498 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1499 ix = tree_to_shwi (t) + 1;
1500 }
1501 }
1502 break;
1503
1504 case ENUMERAL_TYPE:
1505 break;
1506
1507 default:
1508 gcc_unreachable ();
1509 }
1510
1511 if (ix >= 0)
1512 {
1513 /* Look for it in the type's vector of small shared ints. */
1514 if (!TYPE_CACHED_VALUES_P (type))
1515 {
1516 TYPE_CACHED_VALUES_P (type) = 1;
1517 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1518 }
1519
1520 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1521 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1522 }
1523 else
1524 {
1525 /* Use the cache of larger shared ints. */
1526 void **slot;
1527
1528 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1529 /* If there is already an entry for the number verify it's the
1530 same. */
1531 if (*slot)
1532 gcc_assert (wi::eq_p (tree (*slot), t));
1533 else
1534 /* Otherwise insert this one into the hash table. */
1535 *slot = t;
1536 }
1537 }
1538
1539
1540 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1541 and the rest are zeros. */
1542
1543 tree
1544 build_low_bits_mask (tree type, unsigned bits)
1545 {
1546 gcc_assert (bits <= TYPE_PRECISION (type));
1547
1548 return wide_int_to_tree (type, wi::mask (bits, false,
1549 TYPE_PRECISION (type)));
1550 }
1551
1552 /* Checks that X is integer constant that can be expressed in (unsigned)
1553 HOST_WIDE_INT without loss of precision. */
1554
1555 bool
1556 cst_and_fits_in_hwi (const_tree x)
1557 {
1558 if (TREE_CODE (x) != INTEGER_CST)
1559 return false;
1560
1561 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1562 return false;
1563
1564 return TREE_INT_CST_NUNITS (x) == 1;
1565 }
1566
1567 /* Build a newly constructed TREE_VEC node of length LEN. */
1568
1569 tree
1570 make_vector_stat (unsigned len MEM_STAT_DECL)
1571 {
1572 tree t;
1573 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1574
1575 record_node_allocation_statistics (VECTOR_CST, length);
1576
1577 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1578
1579 TREE_SET_CODE (t, VECTOR_CST);
1580 TREE_CONSTANT (t) = 1;
1581
1582 return t;
1583 }
1584
1585 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1586 are in a list pointed to by VALS. */
1587
1588 tree
1589 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1590 {
1591 int over = 0;
1592 unsigned cnt = 0;
1593 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1594 TREE_TYPE (v) = type;
1595
1596 /* Iterate through elements and check for overflow. */
1597 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1598 {
1599 tree value = vals[cnt];
1600
1601 VECTOR_CST_ELT (v, cnt) = value;
1602
1603 /* Don't crash if we get an address constant. */
1604 if (!CONSTANT_CLASS_P (value))
1605 continue;
1606
1607 over |= TREE_OVERFLOW (value);
1608 }
1609
1610 TREE_OVERFLOW (v) = over;
1611 return v;
1612 }
1613
1614 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1615 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1616
1617 tree
1618 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1619 {
1620 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1621 unsigned HOST_WIDE_INT idx;
1622 tree value;
1623
1624 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1625 vec[idx] = value;
1626 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1627 vec[idx] = build_zero_cst (TREE_TYPE (type));
1628
1629 return build_vector (type, vec);
1630 }
1631
1632 /* Build a vector of type VECTYPE where all the elements are SCs. */
1633 tree
1634 build_vector_from_val (tree vectype, tree sc)
1635 {
1636 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1637
1638 if (sc == error_mark_node)
1639 return sc;
1640
1641 /* Verify that the vector type is suitable for SC. Note that there
1642 is some inconsistency in the type-system with respect to restrict
1643 qualifications of pointers. Vector types always have a main-variant
1644 element type and the qualification is applied to the vector-type.
1645 So TREE_TYPE (vector-type) does not return a properly qualified
1646 vector element-type. */
1647 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1648 TREE_TYPE (vectype)));
1649
1650 if (CONSTANT_CLASS_P (sc))
1651 {
1652 tree *v = XALLOCAVEC (tree, nunits);
1653 for (i = 0; i < nunits; ++i)
1654 v[i] = sc;
1655 return build_vector (vectype, v);
1656 }
1657 else
1658 {
1659 vec<constructor_elt, va_gc> *v;
1660 vec_alloc (v, nunits);
1661 for (i = 0; i < nunits; ++i)
1662 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1663 return build_constructor (vectype, v);
1664 }
1665 }
1666
1667 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1668 are in the vec pointed to by VALS. */
1669 tree
1670 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1671 {
1672 tree c = make_node (CONSTRUCTOR);
1673 unsigned int i;
1674 constructor_elt *elt;
1675 bool constant_p = true;
1676 bool side_effects_p = false;
1677
1678 TREE_TYPE (c) = type;
1679 CONSTRUCTOR_ELTS (c) = vals;
1680
1681 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1682 {
1683 /* Mostly ctors will have elts that don't have side-effects, so
1684 the usual case is to scan all the elements. Hence a single
1685 loop for both const and side effects, rather than one loop
1686 each (with early outs). */
1687 if (!TREE_CONSTANT (elt->value))
1688 constant_p = false;
1689 if (TREE_SIDE_EFFECTS (elt->value))
1690 side_effects_p = true;
1691 }
1692
1693 TREE_SIDE_EFFECTS (c) = side_effects_p;
1694 TREE_CONSTANT (c) = constant_p;
1695
1696 return c;
1697 }
1698
1699 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1700 INDEX and VALUE. */
1701 tree
1702 build_constructor_single (tree type, tree index, tree value)
1703 {
1704 vec<constructor_elt, va_gc> *v;
1705 constructor_elt elt = {index, value};
1706
1707 vec_alloc (v, 1);
1708 v->quick_push (elt);
1709
1710 return build_constructor (type, v);
1711 }
1712
1713
1714 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1715 are in a list pointed to by VALS. */
1716 tree
1717 build_constructor_from_list (tree type, tree vals)
1718 {
1719 tree t;
1720 vec<constructor_elt, va_gc> *v = NULL;
1721
1722 if (vals)
1723 {
1724 vec_alloc (v, list_length (vals));
1725 for (t = vals; t; t = TREE_CHAIN (t))
1726 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1727 }
1728
1729 return build_constructor (type, v);
1730 }
1731
1732 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1733 of elements, provided as index/value pairs. */
1734
1735 tree
1736 build_constructor_va (tree type, int nelts, ...)
1737 {
1738 vec<constructor_elt, va_gc> *v = NULL;
1739 va_list p;
1740
1741 va_start (p, nelts);
1742 vec_alloc (v, nelts);
1743 while (nelts--)
1744 {
1745 tree index = va_arg (p, tree);
1746 tree value = va_arg (p, tree);
1747 CONSTRUCTOR_APPEND_ELT (v, index, value);
1748 }
1749 va_end (p);
1750 return build_constructor (type, v);
1751 }
1752
1753 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1754
1755 tree
1756 build_fixed (tree type, FIXED_VALUE_TYPE f)
1757 {
1758 tree v;
1759 FIXED_VALUE_TYPE *fp;
1760
1761 v = make_node (FIXED_CST);
1762 fp = ggc_alloc<fixed_value> ();
1763 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1764
1765 TREE_TYPE (v) = type;
1766 TREE_FIXED_CST_PTR (v) = fp;
1767 return v;
1768 }
1769
1770 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1771
1772 tree
1773 build_real (tree type, REAL_VALUE_TYPE d)
1774 {
1775 tree v;
1776 REAL_VALUE_TYPE *dp;
1777 int overflow = 0;
1778
1779 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1780 Consider doing it via real_convert now. */
1781
1782 v = make_node (REAL_CST);
1783 dp = ggc_alloc<real_value> ();
1784 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1785
1786 TREE_TYPE (v) = type;
1787 TREE_REAL_CST_PTR (v) = dp;
1788 TREE_OVERFLOW (v) = overflow;
1789 return v;
1790 }
1791
1792 /* Return a new REAL_CST node whose type is TYPE
1793 and whose value is the integer value of the INTEGER_CST node I. */
1794
1795 REAL_VALUE_TYPE
1796 real_value_from_int_cst (const_tree type, const_tree i)
1797 {
1798 REAL_VALUE_TYPE d;
1799
1800 /* Clear all bits of the real value type so that we can later do
1801 bitwise comparisons to see if two values are the same. */
1802 memset (&d, 0, sizeof d);
1803
1804 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1805 TYPE_SIGN (TREE_TYPE (i)));
1806 return d;
1807 }
1808
1809 /* Given a tree representing an integer constant I, return a tree
1810 representing the same value as a floating-point constant of type TYPE. */
1811
1812 tree
1813 build_real_from_int_cst (tree type, const_tree i)
1814 {
1815 tree v;
1816 int overflow = TREE_OVERFLOW (i);
1817
1818 v = build_real (type, real_value_from_int_cst (type, i));
1819
1820 TREE_OVERFLOW (v) |= overflow;
1821 return v;
1822 }
1823
1824 /* Return a newly constructed STRING_CST node whose value is
1825 the LEN characters at STR.
1826 Note that for a C string literal, LEN should include the trailing NUL.
1827 The TREE_TYPE is not initialized. */
1828
1829 tree
1830 build_string (int len, const char *str)
1831 {
1832 tree s;
1833 size_t length;
1834
1835 /* Do not waste bytes provided by padding of struct tree_string. */
1836 length = len + offsetof (struct tree_string, str) + 1;
1837
1838 record_node_allocation_statistics (STRING_CST, length);
1839
1840 s = (tree) ggc_internal_alloc (length);
1841
1842 memset (s, 0, sizeof (struct tree_typed));
1843 TREE_SET_CODE (s, STRING_CST);
1844 TREE_CONSTANT (s) = 1;
1845 TREE_STRING_LENGTH (s) = len;
1846 memcpy (s->string.str, str, len);
1847 s->string.str[len] = '\0';
1848
1849 return s;
1850 }
1851
1852 /* Return a newly constructed COMPLEX_CST node whose value is
1853 specified by the real and imaginary parts REAL and IMAG.
1854 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1855 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1856
1857 tree
1858 build_complex (tree type, tree real, tree imag)
1859 {
1860 tree t = make_node (COMPLEX_CST);
1861
1862 TREE_REALPART (t) = real;
1863 TREE_IMAGPART (t) = imag;
1864 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1865 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1866 return t;
1867 }
1868
1869 /* Return a constant of arithmetic type TYPE which is the
1870 multiplicative identity of the set TYPE. */
1871
1872 tree
1873 build_one_cst (tree type)
1874 {
1875 switch (TREE_CODE (type))
1876 {
1877 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1878 case POINTER_TYPE: case REFERENCE_TYPE:
1879 case OFFSET_TYPE:
1880 return build_int_cst (type, 1);
1881
1882 case REAL_TYPE:
1883 return build_real (type, dconst1);
1884
1885 case FIXED_POINT_TYPE:
1886 /* We can only generate 1 for accum types. */
1887 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1888 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1889
1890 case VECTOR_TYPE:
1891 {
1892 tree scalar = build_one_cst (TREE_TYPE (type));
1893
1894 return build_vector_from_val (type, scalar);
1895 }
1896
1897 case COMPLEX_TYPE:
1898 return build_complex (type,
1899 build_one_cst (TREE_TYPE (type)),
1900 build_zero_cst (TREE_TYPE (type)));
1901
1902 default:
1903 gcc_unreachable ();
1904 }
1905 }
1906
1907 /* Return an integer of type TYPE containing all 1's in as much precision as
1908 it contains, or a complex or vector whose subparts are such integers. */
1909
1910 tree
1911 build_all_ones_cst (tree type)
1912 {
1913 if (TREE_CODE (type) == COMPLEX_TYPE)
1914 {
1915 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1916 return build_complex (type, scalar, scalar);
1917 }
1918 else
1919 return build_minus_one_cst (type);
1920 }
1921
1922 /* Return a constant of arithmetic type TYPE which is the
1923 opposite of the multiplicative identity of the set TYPE. */
1924
1925 tree
1926 build_minus_one_cst (tree type)
1927 {
1928 switch (TREE_CODE (type))
1929 {
1930 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1932 case OFFSET_TYPE:
1933 return build_int_cst (type, -1);
1934
1935 case REAL_TYPE:
1936 return build_real (type, dconstm1);
1937
1938 case FIXED_POINT_TYPE:
1939 /* We can only generate 1 for accum types. */
1940 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1941 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1942 TYPE_MODE (type)));
1943
1944 case VECTOR_TYPE:
1945 {
1946 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1947
1948 return build_vector_from_val (type, scalar);
1949 }
1950
1951 case COMPLEX_TYPE:
1952 return build_complex (type,
1953 build_minus_one_cst (TREE_TYPE (type)),
1954 build_zero_cst (TREE_TYPE (type)));
1955
1956 default:
1957 gcc_unreachable ();
1958 }
1959 }
1960
1961 /* Build 0 constant of type TYPE. This is used by constructor folding
1962 and thus the constant should be represented in memory by
1963 zero(es). */
1964
1965 tree
1966 build_zero_cst (tree type)
1967 {
1968 switch (TREE_CODE (type))
1969 {
1970 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1971 case POINTER_TYPE: case REFERENCE_TYPE:
1972 case OFFSET_TYPE: case NULLPTR_TYPE:
1973 return build_int_cst (type, 0);
1974
1975 case REAL_TYPE:
1976 return build_real (type, dconst0);
1977
1978 case FIXED_POINT_TYPE:
1979 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1980
1981 case VECTOR_TYPE:
1982 {
1983 tree scalar = build_zero_cst (TREE_TYPE (type));
1984
1985 return build_vector_from_val (type, scalar);
1986 }
1987
1988 case COMPLEX_TYPE:
1989 {
1990 tree zero = build_zero_cst (TREE_TYPE (type));
1991
1992 return build_complex (type, zero, zero);
1993 }
1994
1995 default:
1996 if (!AGGREGATE_TYPE_P (type))
1997 return fold_convert (type, integer_zero_node);
1998 return build_constructor (type, NULL);
1999 }
2000 }
2001
2002
2003 /* Build a BINFO with LEN language slots. */
2004
2005 tree
2006 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2007 {
2008 tree t;
2009 size_t length = (offsetof (struct tree_binfo, base_binfos)
2010 + vec<tree, va_gc>::embedded_size (base_binfos));
2011
2012 record_node_allocation_statistics (TREE_BINFO, length);
2013
2014 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2015
2016 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2017
2018 TREE_SET_CODE (t, TREE_BINFO);
2019
2020 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2021
2022 return t;
2023 }
2024
2025 /* Create a CASE_LABEL_EXPR tree node and return it. */
2026
2027 tree
2028 build_case_label (tree low_value, tree high_value, tree label_decl)
2029 {
2030 tree t = make_node (CASE_LABEL_EXPR);
2031
2032 TREE_TYPE (t) = void_type_node;
2033 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2034
2035 CASE_LOW (t) = low_value;
2036 CASE_HIGH (t) = high_value;
2037 CASE_LABEL (t) = label_decl;
2038 CASE_CHAIN (t) = NULL_TREE;
2039
2040 return t;
2041 }
2042
2043 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2044 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2045 The latter determines the length of the HOST_WIDE_INT vector. */
2046
2047 tree
2048 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2049 {
2050 tree t;
2051 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2052 + sizeof (struct tree_int_cst));
2053
2054 gcc_assert (len);
2055 record_node_allocation_statistics (INTEGER_CST, length);
2056
2057 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2058
2059 TREE_SET_CODE (t, INTEGER_CST);
2060 TREE_INT_CST_NUNITS (t) = len;
2061 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2062 /* to_offset can only be applied to trees that are offset_int-sized
2063 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2064 must be exactly the precision of offset_int and so LEN is correct. */
2065 if (ext_len <= OFFSET_INT_ELTS)
2066 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2067 else
2068 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2069
2070 TREE_CONSTANT (t) = 1;
2071
2072 return t;
2073 }
2074
2075 /* Build a newly constructed TREE_VEC node of length LEN. */
2076
2077 tree
2078 make_tree_vec_stat (int len MEM_STAT_DECL)
2079 {
2080 tree t;
2081 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2082
2083 record_node_allocation_statistics (TREE_VEC, length);
2084
2085 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2086
2087 TREE_SET_CODE (t, TREE_VEC);
2088 TREE_VEC_LENGTH (t) = len;
2089
2090 return t;
2091 }
2092
2093 /* Grow a TREE_VEC node to new length LEN. */
2094
2095 tree
2096 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2097 {
2098 gcc_assert (TREE_CODE (v) == TREE_VEC);
2099
2100 int oldlen = TREE_VEC_LENGTH (v);
2101 gcc_assert (len > oldlen);
2102
2103 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2104 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2105
2106 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2107
2108 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2109
2110 TREE_VEC_LENGTH (v) = len;
2111
2112 return v;
2113 }
2114 \f
2115 /* Return 1 if EXPR is the integer constant zero or a complex constant
2116 of zero. */
2117
2118 int
2119 integer_zerop (const_tree expr)
2120 {
2121 STRIP_NOPS (expr);
2122
2123 switch (TREE_CODE (expr))
2124 {
2125 case INTEGER_CST:
2126 return wi::eq_p (expr, 0);
2127 case COMPLEX_CST:
2128 return (integer_zerop (TREE_REALPART (expr))
2129 && integer_zerop (TREE_IMAGPART (expr)));
2130 case VECTOR_CST:
2131 {
2132 unsigned i;
2133 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2134 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2135 return false;
2136 return true;
2137 }
2138 default:
2139 return false;
2140 }
2141 }
2142
2143 /* Return 1 if EXPR is the integer constant one or the corresponding
2144 complex constant. */
2145
2146 int
2147 integer_onep (const_tree expr)
2148 {
2149 STRIP_NOPS (expr);
2150
2151 switch (TREE_CODE (expr))
2152 {
2153 case INTEGER_CST:
2154 return wi::eq_p (wi::to_widest (expr), 1);
2155 case COMPLEX_CST:
2156 return (integer_onep (TREE_REALPART (expr))
2157 && integer_zerop (TREE_IMAGPART (expr)));
2158 case VECTOR_CST:
2159 {
2160 unsigned i;
2161 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2162 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2163 return false;
2164 return true;
2165 }
2166 default:
2167 return false;
2168 }
2169 }
2170
2171 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2172 it contains, or a complex or vector whose subparts are such integers. */
2173
2174 int
2175 integer_all_onesp (const_tree expr)
2176 {
2177 STRIP_NOPS (expr);
2178
2179 if (TREE_CODE (expr) == COMPLEX_CST
2180 && integer_all_onesp (TREE_REALPART (expr))
2181 && integer_all_onesp (TREE_IMAGPART (expr)))
2182 return 1;
2183
2184 else if (TREE_CODE (expr) == VECTOR_CST)
2185 {
2186 unsigned i;
2187 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2188 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2189 return 0;
2190 return 1;
2191 }
2192
2193 else if (TREE_CODE (expr) != INTEGER_CST)
2194 return 0;
2195
2196 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2197 }
2198
2199 /* Return 1 if EXPR is the integer constant minus one. */
2200
2201 int
2202 integer_minus_onep (const_tree expr)
2203 {
2204 STRIP_NOPS (expr);
2205
2206 if (TREE_CODE (expr) == COMPLEX_CST)
2207 return (integer_all_onesp (TREE_REALPART (expr))
2208 && integer_zerop (TREE_IMAGPART (expr)));
2209 else
2210 return integer_all_onesp (expr);
2211 }
2212
2213 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2214 one bit on). */
2215
2216 int
2217 integer_pow2p (const_tree expr)
2218 {
2219 STRIP_NOPS (expr);
2220
2221 if (TREE_CODE (expr) == COMPLEX_CST
2222 && integer_pow2p (TREE_REALPART (expr))
2223 && integer_zerop (TREE_IMAGPART (expr)))
2224 return 1;
2225
2226 if (TREE_CODE (expr) != INTEGER_CST)
2227 return 0;
2228
2229 return wi::popcount (expr) == 1;
2230 }
2231
2232 /* Return 1 if EXPR is an integer constant other than zero or a
2233 complex constant other than zero. */
2234
2235 int
2236 integer_nonzerop (const_tree expr)
2237 {
2238 STRIP_NOPS (expr);
2239
2240 return ((TREE_CODE (expr) == INTEGER_CST
2241 && !wi::eq_p (expr, 0))
2242 || (TREE_CODE (expr) == COMPLEX_CST
2243 && (integer_nonzerop (TREE_REALPART (expr))
2244 || integer_nonzerop (TREE_IMAGPART (expr)))));
2245 }
2246
2247 /* Return 1 if EXPR is the fixed-point constant zero. */
2248
2249 int
2250 fixed_zerop (const_tree expr)
2251 {
2252 return (TREE_CODE (expr) == FIXED_CST
2253 && TREE_FIXED_CST (expr).data.is_zero ());
2254 }
2255
2256 /* Return the power of two represented by a tree node known to be a
2257 power of two. */
2258
2259 int
2260 tree_log2 (const_tree expr)
2261 {
2262 STRIP_NOPS (expr);
2263
2264 if (TREE_CODE (expr) == COMPLEX_CST)
2265 return tree_log2 (TREE_REALPART (expr));
2266
2267 return wi::exact_log2 (expr);
2268 }
2269
2270 /* Similar, but return the largest integer Y such that 2 ** Y is less
2271 than or equal to EXPR. */
2272
2273 int
2274 tree_floor_log2 (const_tree expr)
2275 {
2276 STRIP_NOPS (expr);
2277
2278 if (TREE_CODE (expr) == COMPLEX_CST)
2279 return tree_log2 (TREE_REALPART (expr));
2280
2281 return wi::floor_log2 (expr);
2282 }
2283
2284 /* Return number of known trailing zero bits in EXPR, or, if the value of
2285 EXPR is known to be zero, the precision of it's type. */
2286
2287 unsigned int
2288 tree_ctz (const_tree expr)
2289 {
2290 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2291 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2292 return 0;
2293
2294 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2295 switch (TREE_CODE (expr))
2296 {
2297 case INTEGER_CST:
2298 ret1 = wi::ctz (expr);
2299 return MIN (ret1, prec);
2300 case SSA_NAME:
2301 ret1 = wi::ctz (get_nonzero_bits (expr));
2302 return MIN (ret1, prec);
2303 case PLUS_EXPR:
2304 case MINUS_EXPR:
2305 case BIT_IOR_EXPR:
2306 case BIT_XOR_EXPR:
2307 case MIN_EXPR:
2308 case MAX_EXPR:
2309 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2310 if (ret1 == 0)
2311 return ret1;
2312 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2313 return MIN (ret1, ret2);
2314 case POINTER_PLUS_EXPR:
2315 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2316 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2317 /* Second operand is sizetype, which could be in theory
2318 wider than pointer's precision. Make sure we never
2319 return more than prec. */
2320 ret2 = MIN (ret2, prec);
2321 return MIN (ret1, ret2);
2322 case BIT_AND_EXPR:
2323 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2324 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2325 return MAX (ret1, ret2);
2326 case MULT_EXPR:
2327 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2328 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2329 return MIN (ret1 + ret2, prec);
2330 case LSHIFT_EXPR:
2331 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2332 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2333 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2334 {
2335 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2336 return MIN (ret1 + ret2, prec);
2337 }
2338 return ret1;
2339 case RSHIFT_EXPR:
2340 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2341 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2342 {
2343 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2344 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2345 if (ret1 > ret2)
2346 return ret1 - ret2;
2347 }
2348 return 0;
2349 case TRUNC_DIV_EXPR:
2350 case CEIL_DIV_EXPR:
2351 case FLOOR_DIV_EXPR:
2352 case ROUND_DIV_EXPR:
2353 case EXACT_DIV_EXPR:
2354 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2355 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2356 {
2357 int l = tree_log2 (TREE_OPERAND (expr, 1));
2358 if (l >= 0)
2359 {
2360 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2361 ret2 = l;
2362 if (ret1 > ret2)
2363 return ret1 - ret2;
2364 }
2365 }
2366 return 0;
2367 CASE_CONVERT:
2368 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2369 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2370 ret1 = prec;
2371 return MIN (ret1, prec);
2372 case SAVE_EXPR:
2373 return tree_ctz (TREE_OPERAND (expr, 0));
2374 case COND_EXPR:
2375 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2376 if (ret1 == 0)
2377 return 0;
2378 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2379 return MIN (ret1, ret2);
2380 case COMPOUND_EXPR:
2381 return tree_ctz (TREE_OPERAND (expr, 1));
2382 case ADDR_EXPR:
2383 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2384 if (ret1 > BITS_PER_UNIT)
2385 {
2386 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2387 return MIN (ret1, prec);
2388 }
2389 return 0;
2390 default:
2391 return 0;
2392 }
2393 }
2394
2395 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2396 decimal float constants, so don't return 1 for them. */
2397
2398 int
2399 real_zerop (const_tree expr)
2400 {
2401 STRIP_NOPS (expr);
2402
2403 switch (TREE_CODE (expr))
2404 {
2405 case REAL_CST:
2406 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2407 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2408 case COMPLEX_CST:
2409 return real_zerop (TREE_REALPART (expr))
2410 && real_zerop (TREE_IMAGPART (expr));
2411 case VECTOR_CST:
2412 {
2413 unsigned i;
2414 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2415 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2416 return false;
2417 return true;
2418 }
2419 default:
2420 return false;
2421 }
2422 }
2423
2424 /* Return 1 if EXPR is the real constant one in real or complex form.
2425 Trailing zeroes matter for decimal float constants, so don't return
2426 1 for them. */
2427
2428 int
2429 real_onep (const_tree expr)
2430 {
2431 STRIP_NOPS (expr);
2432
2433 switch (TREE_CODE (expr))
2434 {
2435 case REAL_CST:
2436 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2437 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2438 case COMPLEX_CST:
2439 return real_onep (TREE_REALPART (expr))
2440 && real_zerop (TREE_IMAGPART (expr));
2441 case VECTOR_CST:
2442 {
2443 unsigned i;
2444 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2445 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2446 return false;
2447 return true;
2448 }
2449 default:
2450 return false;
2451 }
2452 }
2453
2454 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2455 matter for decimal float constants, so don't return 1 for them. */
2456
2457 int
2458 real_minus_onep (const_tree expr)
2459 {
2460 STRIP_NOPS (expr);
2461
2462 switch (TREE_CODE (expr))
2463 {
2464 case REAL_CST:
2465 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2466 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2467 case COMPLEX_CST:
2468 return real_minus_onep (TREE_REALPART (expr))
2469 && real_zerop (TREE_IMAGPART (expr));
2470 case VECTOR_CST:
2471 {
2472 unsigned i;
2473 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2474 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2475 return false;
2476 return true;
2477 }
2478 default:
2479 return false;
2480 }
2481 }
2482
2483 /* Nonzero if EXP is a constant or a cast of a constant. */
2484
2485 int
2486 really_constant_p (const_tree exp)
2487 {
2488 /* This is not quite the same as STRIP_NOPS. It does more. */
2489 while (CONVERT_EXPR_P (exp)
2490 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2491 exp = TREE_OPERAND (exp, 0);
2492 return TREE_CONSTANT (exp);
2493 }
2494 \f
2495 /* Return first list element whose TREE_VALUE is ELEM.
2496 Return 0 if ELEM is not in LIST. */
2497
2498 tree
2499 value_member (tree elem, tree list)
2500 {
2501 while (list)
2502 {
2503 if (elem == TREE_VALUE (list))
2504 return list;
2505 list = TREE_CHAIN (list);
2506 }
2507 return NULL_TREE;
2508 }
2509
2510 /* Return first list element whose TREE_PURPOSE is ELEM.
2511 Return 0 if ELEM is not in LIST. */
2512
2513 tree
2514 purpose_member (const_tree elem, tree list)
2515 {
2516 while (list)
2517 {
2518 if (elem == TREE_PURPOSE (list))
2519 return list;
2520 list = TREE_CHAIN (list);
2521 }
2522 return NULL_TREE;
2523 }
2524
2525 /* Return true if ELEM is in V. */
2526
2527 bool
2528 vec_member (const_tree elem, vec<tree, va_gc> *v)
2529 {
2530 unsigned ix;
2531 tree t;
2532 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2533 if (elem == t)
2534 return true;
2535 return false;
2536 }
2537
2538 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2539 NULL_TREE. */
2540
2541 tree
2542 chain_index (int idx, tree chain)
2543 {
2544 for (; chain && idx > 0; --idx)
2545 chain = TREE_CHAIN (chain);
2546 return chain;
2547 }
2548
2549 /* Return nonzero if ELEM is part of the chain CHAIN. */
2550
2551 int
2552 chain_member (const_tree elem, const_tree chain)
2553 {
2554 while (chain)
2555 {
2556 if (elem == chain)
2557 return 1;
2558 chain = DECL_CHAIN (chain);
2559 }
2560
2561 return 0;
2562 }
2563
2564 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2565 We expect a null pointer to mark the end of the chain.
2566 This is the Lisp primitive `length'. */
2567
2568 int
2569 list_length (const_tree t)
2570 {
2571 const_tree p = t;
2572 #ifdef ENABLE_TREE_CHECKING
2573 const_tree q = t;
2574 #endif
2575 int len = 0;
2576
2577 while (p)
2578 {
2579 p = TREE_CHAIN (p);
2580 #ifdef ENABLE_TREE_CHECKING
2581 if (len % 2)
2582 q = TREE_CHAIN (q);
2583 gcc_assert (p != q);
2584 #endif
2585 len++;
2586 }
2587
2588 return len;
2589 }
2590
2591 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2592 UNION_TYPE TYPE, or NULL_TREE if none. */
2593
2594 tree
2595 first_field (const_tree type)
2596 {
2597 tree t = TYPE_FIELDS (type);
2598 while (t && TREE_CODE (t) != FIELD_DECL)
2599 t = TREE_CHAIN (t);
2600 return t;
2601 }
2602
2603 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2604 by modifying the last node in chain 1 to point to chain 2.
2605 This is the Lisp primitive `nconc'. */
2606
2607 tree
2608 chainon (tree op1, tree op2)
2609 {
2610 tree t1;
2611
2612 if (!op1)
2613 return op2;
2614 if (!op2)
2615 return op1;
2616
2617 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2618 continue;
2619 TREE_CHAIN (t1) = op2;
2620
2621 #ifdef ENABLE_TREE_CHECKING
2622 {
2623 tree t2;
2624 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2625 gcc_assert (t2 != t1);
2626 }
2627 #endif
2628
2629 return op1;
2630 }
2631
2632 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2633
2634 tree
2635 tree_last (tree chain)
2636 {
2637 tree next;
2638 if (chain)
2639 while ((next = TREE_CHAIN (chain)))
2640 chain = next;
2641 return chain;
2642 }
2643
2644 /* Reverse the order of elements in the chain T,
2645 and return the new head of the chain (old last element). */
2646
2647 tree
2648 nreverse (tree t)
2649 {
2650 tree prev = 0, decl, next;
2651 for (decl = t; decl; decl = next)
2652 {
2653 /* We shouldn't be using this function to reverse BLOCK chains; we
2654 have blocks_nreverse for that. */
2655 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2656 next = TREE_CHAIN (decl);
2657 TREE_CHAIN (decl) = prev;
2658 prev = decl;
2659 }
2660 return prev;
2661 }
2662 \f
2663 /* Return a newly created TREE_LIST node whose
2664 purpose and value fields are PARM and VALUE. */
2665
2666 tree
2667 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2668 {
2669 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2670 TREE_PURPOSE (t) = parm;
2671 TREE_VALUE (t) = value;
2672 return t;
2673 }
2674
2675 /* Build a chain of TREE_LIST nodes from a vector. */
2676
2677 tree
2678 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2679 {
2680 tree ret = NULL_TREE;
2681 tree *pp = &ret;
2682 unsigned int i;
2683 tree t;
2684 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2685 {
2686 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2687 pp = &TREE_CHAIN (*pp);
2688 }
2689 return ret;
2690 }
2691
2692 /* Return a newly created TREE_LIST node whose
2693 purpose and value fields are PURPOSE and VALUE
2694 and whose TREE_CHAIN is CHAIN. */
2695
2696 tree
2697 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2698 {
2699 tree node;
2700
2701 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2702 memset (node, 0, sizeof (struct tree_common));
2703
2704 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2705
2706 TREE_SET_CODE (node, TREE_LIST);
2707 TREE_CHAIN (node) = chain;
2708 TREE_PURPOSE (node) = purpose;
2709 TREE_VALUE (node) = value;
2710 return node;
2711 }
2712
2713 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2714 trees. */
2715
2716 vec<tree, va_gc> *
2717 ctor_to_vec (tree ctor)
2718 {
2719 vec<tree, va_gc> *vec;
2720 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2721 unsigned int ix;
2722 tree val;
2723
2724 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2725 vec->quick_push (val);
2726
2727 return vec;
2728 }
2729 \f
2730 /* Return the size nominally occupied by an object of type TYPE
2731 when it resides in memory. The value is measured in units of bytes,
2732 and its data type is that normally used for type sizes
2733 (which is the first type created by make_signed_type or
2734 make_unsigned_type). */
2735
2736 tree
2737 size_in_bytes (const_tree type)
2738 {
2739 tree t;
2740
2741 if (type == error_mark_node)
2742 return integer_zero_node;
2743
2744 type = TYPE_MAIN_VARIANT (type);
2745 t = TYPE_SIZE_UNIT (type);
2746
2747 if (t == 0)
2748 {
2749 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2750 return size_zero_node;
2751 }
2752
2753 return t;
2754 }
2755
2756 /* Return the size of TYPE (in bytes) as a wide integer
2757 or return -1 if the size can vary or is larger than an integer. */
2758
2759 HOST_WIDE_INT
2760 int_size_in_bytes (const_tree type)
2761 {
2762 tree t;
2763
2764 if (type == error_mark_node)
2765 return 0;
2766
2767 type = TYPE_MAIN_VARIANT (type);
2768 t = TYPE_SIZE_UNIT (type);
2769
2770 if (t && tree_fits_uhwi_p (t))
2771 return TREE_INT_CST_LOW (t);
2772 else
2773 return -1;
2774 }
2775
2776 /* Return the maximum size of TYPE (in bytes) as a wide integer
2777 or return -1 if the size can vary or is larger than an integer. */
2778
2779 HOST_WIDE_INT
2780 max_int_size_in_bytes (const_tree type)
2781 {
2782 HOST_WIDE_INT size = -1;
2783 tree size_tree;
2784
2785 /* If this is an array type, check for a possible MAX_SIZE attached. */
2786
2787 if (TREE_CODE (type) == ARRAY_TYPE)
2788 {
2789 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2790
2791 if (size_tree && tree_fits_uhwi_p (size_tree))
2792 size = tree_to_uhwi (size_tree);
2793 }
2794
2795 /* If we still haven't been able to get a size, see if the language
2796 can compute a maximum size. */
2797
2798 if (size == -1)
2799 {
2800 size_tree = lang_hooks.types.max_size (type);
2801
2802 if (size_tree && tree_fits_uhwi_p (size_tree))
2803 size = tree_to_uhwi (size_tree);
2804 }
2805
2806 return size;
2807 }
2808 \f
2809 /* Return the bit position of FIELD, in bits from the start of the record.
2810 This is a tree of type bitsizetype. */
2811
2812 tree
2813 bit_position (const_tree field)
2814 {
2815 return bit_from_pos (DECL_FIELD_OFFSET (field),
2816 DECL_FIELD_BIT_OFFSET (field));
2817 }
2818
2819 /* Likewise, but return as an integer. It must be representable in
2820 that way (since it could be a signed value, we don't have the
2821 option of returning -1 like int_size_in_byte can. */
2822
2823 HOST_WIDE_INT
2824 int_bit_position (const_tree field)
2825 {
2826 return tree_to_shwi (bit_position (field));
2827 }
2828 \f
2829 /* Return the byte position of FIELD, in bytes from the start of the record.
2830 This is a tree of type sizetype. */
2831
2832 tree
2833 byte_position (const_tree field)
2834 {
2835 return byte_from_pos (DECL_FIELD_OFFSET (field),
2836 DECL_FIELD_BIT_OFFSET (field));
2837 }
2838
2839 /* Likewise, but return as an integer. It must be representable in
2840 that way (since it could be a signed value, we don't have the
2841 option of returning -1 like int_size_in_byte can. */
2842
2843 HOST_WIDE_INT
2844 int_byte_position (const_tree field)
2845 {
2846 return tree_to_shwi (byte_position (field));
2847 }
2848 \f
2849 /* Return the strictest alignment, in bits, that T is known to have. */
2850
2851 unsigned int
2852 expr_align (const_tree t)
2853 {
2854 unsigned int align0, align1;
2855
2856 switch (TREE_CODE (t))
2857 {
2858 CASE_CONVERT: case NON_LVALUE_EXPR:
2859 /* If we have conversions, we know that the alignment of the
2860 object must meet each of the alignments of the types. */
2861 align0 = expr_align (TREE_OPERAND (t, 0));
2862 align1 = TYPE_ALIGN (TREE_TYPE (t));
2863 return MAX (align0, align1);
2864
2865 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2866 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2867 case CLEANUP_POINT_EXPR:
2868 /* These don't change the alignment of an object. */
2869 return expr_align (TREE_OPERAND (t, 0));
2870
2871 case COND_EXPR:
2872 /* The best we can do is say that the alignment is the least aligned
2873 of the two arms. */
2874 align0 = expr_align (TREE_OPERAND (t, 1));
2875 align1 = expr_align (TREE_OPERAND (t, 2));
2876 return MIN (align0, align1);
2877
2878 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2879 meaningfully, it's always 1. */
2880 case LABEL_DECL: case CONST_DECL:
2881 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2882 case FUNCTION_DECL:
2883 gcc_assert (DECL_ALIGN (t) != 0);
2884 return DECL_ALIGN (t);
2885
2886 default:
2887 break;
2888 }
2889
2890 /* Otherwise take the alignment from that of the type. */
2891 return TYPE_ALIGN (TREE_TYPE (t));
2892 }
2893 \f
2894 /* Return, as a tree node, the number of elements for TYPE (which is an
2895 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2896
2897 tree
2898 array_type_nelts (const_tree type)
2899 {
2900 tree index_type, min, max;
2901
2902 /* If they did it with unspecified bounds, then we should have already
2903 given an error about it before we got here. */
2904 if (! TYPE_DOMAIN (type))
2905 return error_mark_node;
2906
2907 index_type = TYPE_DOMAIN (type);
2908 min = TYPE_MIN_VALUE (index_type);
2909 max = TYPE_MAX_VALUE (index_type);
2910
2911 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2912 if (!max)
2913 return error_mark_node;
2914
2915 return (integer_zerop (min)
2916 ? max
2917 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2918 }
2919 \f
2920 /* If arg is static -- a reference to an object in static storage -- then
2921 return the object. This is not the same as the C meaning of `static'.
2922 If arg isn't static, return NULL. */
2923
2924 tree
2925 staticp (tree arg)
2926 {
2927 switch (TREE_CODE (arg))
2928 {
2929 case FUNCTION_DECL:
2930 /* Nested functions are static, even though taking their address will
2931 involve a trampoline as we unnest the nested function and create
2932 the trampoline on the tree level. */
2933 return arg;
2934
2935 case VAR_DECL:
2936 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2937 && ! DECL_THREAD_LOCAL_P (arg)
2938 && ! DECL_DLLIMPORT_P (arg)
2939 ? arg : NULL);
2940
2941 case CONST_DECL:
2942 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2943 ? arg : NULL);
2944
2945 case CONSTRUCTOR:
2946 return TREE_STATIC (arg) ? arg : NULL;
2947
2948 case LABEL_DECL:
2949 case STRING_CST:
2950 return arg;
2951
2952 case COMPONENT_REF:
2953 /* If the thing being referenced is not a field, then it is
2954 something language specific. */
2955 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2956
2957 /* If we are referencing a bitfield, we can't evaluate an
2958 ADDR_EXPR at compile time and so it isn't a constant. */
2959 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2960 return NULL;
2961
2962 return staticp (TREE_OPERAND (arg, 0));
2963
2964 case BIT_FIELD_REF:
2965 return NULL;
2966
2967 case INDIRECT_REF:
2968 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2969
2970 case ARRAY_REF:
2971 case ARRAY_RANGE_REF:
2972 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2973 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2974 return staticp (TREE_OPERAND (arg, 0));
2975 else
2976 return NULL;
2977
2978 case COMPOUND_LITERAL_EXPR:
2979 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2980
2981 default:
2982 return NULL;
2983 }
2984 }
2985
2986 \f
2987
2988
2989 /* Return whether OP is a DECL whose address is function-invariant. */
2990
2991 bool
2992 decl_address_invariant_p (const_tree op)
2993 {
2994 /* The conditions below are slightly less strict than the one in
2995 staticp. */
2996
2997 switch (TREE_CODE (op))
2998 {
2999 case PARM_DECL:
3000 case RESULT_DECL:
3001 case LABEL_DECL:
3002 case FUNCTION_DECL:
3003 return true;
3004
3005 case VAR_DECL:
3006 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3007 || DECL_THREAD_LOCAL_P (op)
3008 || DECL_CONTEXT (op) == current_function_decl
3009 || decl_function_context (op) == current_function_decl)
3010 return true;
3011 break;
3012
3013 case CONST_DECL:
3014 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3015 || decl_function_context (op) == current_function_decl)
3016 return true;
3017 break;
3018
3019 default:
3020 break;
3021 }
3022
3023 return false;
3024 }
3025
3026 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3027
3028 bool
3029 decl_address_ip_invariant_p (const_tree op)
3030 {
3031 /* The conditions below are slightly less strict than the one in
3032 staticp. */
3033
3034 switch (TREE_CODE (op))
3035 {
3036 case LABEL_DECL:
3037 case FUNCTION_DECL:
3038 case STRING_CST:
3039 return true;
3040
3041 case VAR_DECL:
3042 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3043 && !DECL_DLLIMPORT_P (op))
3044 || DECL_THREAD_LOCAL_P (op))
3045 return true;
3046 break;
3047
3048 case CONST_DECL:
3049 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3050 return true;
3051 break;
3052
3053 default:
3054 break;
3055 }
3056
3057 return false;
3058 }
3059
3060
3061 /* Return true if T is function-invariant (internal function, does
3062 not handle arithmetic; that's handled in skip_simple_arithmetic and
3063 tree_invariant_p). */
3064
3065 static bool tree_invariant_p (tree t);
3066
3067 static bool
3068 tree_invariant_p_1 (tree t)
3069 {
3070 tree op;
3071
3072 if (TREE_CONSTANT (t)
3073 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3074 return true;
3075
3076 switch (TREE_CODE (t))
3077 {
3078 case SAVE_EXPR:
3079 return true;
3080
3081 case ADDR_EXPR:
3082 op = TREE_OPERAND (t, 0);
3083 while (handled_component_p (op))
3084 {
3085 switch (TREE_CODE (op))
3086 {
3087 case ARRAY_REF:
3088 case ARRAY_RANGE_REF:
3089 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3090 || TREE_OPERAND (op, 2) != NULL_TREE
3091 || TREE_OPERAND (op, 3) != NULL_TREE)
3092 return false;
3093 break;
3094
3095 case COMPONENT_REF:
3096 if (TREE_OPERAND (op, 2) != NULL_TREE)
3097 return false;
3098 break;
3099
3100 default:;
3101 }
3102 op = TREE_OPERAND (op, 0);
3103 }
3104
3105 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3106
3107 default:
3108 break;
3109 }
3110
3111 return false;
3112 }
3113
3114 /* Return true if T is function-invariant. */
3115
3116 static bool
3117 tree_invariant_p (tree t)
3118 {
3119 tree inner = skip_simple_arithmetic (t);
3120 return tree_invariant_p_1 (inner);
3121 }
3122
3123 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3124 Do this to any expression which may be used in more than one place,
3125 but must be evaluated only once.
3126
3127 Normally, expand_expr would reevaluate the expression each time.
3128 Calling save_expr produces something that is evaluated and recorded
3129 the first time expand_expr is called on it. Subsequent calls to
3130 expand_expr just reuse the recorded value.
3131
3132 The call to expand_expr that generates code that actually computes
3133 the value is the first call *at compile time*. Subsequent calls
3134 *at compile time* generate code to use the saved value.
3135 This produces correct result provided that *at run time* control
3136 always flows through the insns made by the first expand_expr
3137 before reaching the other places where the save_expr was evaluated.
3138 You, the caller of save_expr, must make sure this is so.
3139
3140 Constants, and certain read-only nodes, are returned with no
3141 SAVE_EXPR because that is safe. Expressions containing placeholders
3142 are not touched; see tree.def for an explanation of what these
3143 are used for. */
3144
3145 tree
3146 save_expr (tree expr)
3147 {
3148 tree t = fold (expr);
3149 tree inner;
3150
3151 /* If the tree evaluates to a constant, then we don't want to hide that
3152 fact (i.e. this allows further folding, and direct checks for constants).
3153 However, a read-only object that has side effects cannot be bypassed.
3154 Since it is no problem to reevaluate literals, we just return the
3155 literal node. */
3156 inner = skip_simple_arithmetic (t);
3157 if (TREE_CODE (inner) == ERROR_MARK)
3158 return inner;
3159
3160 if (tree_invariant_p_1 (inner))
3161 return t;
3162
3163 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3164 it means that the size or offset of some field of an object depends on
3165 the value within another field.
3166
3167 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3168 and some variable since it would then need to be both evaluated once and
3169 evaluated more than once. Front-ends must assure this case cannot
3170 happen by surrounding any such subexpressions in their own SAVE_EXPR
3171 and forcing evaluation at the proper time. */
3172 if (contains_placeholder_p (inner))
3173 return t;
3174
3175 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3176 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3177
3178 /* This expression might be placed ahead of a jump to ensure that the
3179 value was computed on both sides of the jump. So make sure it isn't
3180 eliminated as dead. */
3181 TREE_SIDE_EFFECTS (t) = 1;
3182 return t;
3183 }
3184
3185 /* Look inside EXPR into any simple arithmetic operations. Return the
3186 outermost non-arithmetic or non-invariant node. */
3187
3188 tree
3189 skip_simple_arithmetic (tree expr)
3190 {
3191 /* We don't care about whether this can be used as an lvalue in this
3192 context. */
3193 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3194 expr = TREE_OPERAND (expr, 0);
3195
3196 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3197 a constant, it will be more efficient to not make another SAVE_EXPR since
3198 it will allow better simplification and GCSE will be able to merge the
3199 computations if they actually occur. */
3200 while (true)
3201 {
3202 if (UNARY_CLASS_P (expr))
3203 expr = TREE_OPERAND (expr, 0);
3204 else if (BINARY_CLASS_P (expr))
3205 {
3206 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3207 expr = TREE_OPERAND (expr, 0);
3208 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3209 expr = TREE_OPERAND (expr, 1);
3210 else
3211 break;
3212 }
3213 else
3214 break;
3215 }
3216
3217 return expr;
3218 }
3219
3220 /* Look inside EXPR into simple arithmetic operations involving constants.
3221 Return the outermost non-arithmetic or non-constant node. */
3222
3223 tree
3224 skip_simple_constant_arithmetic (tree expr)
3225 {
3226 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3227 expr = TREE_OPERAND (expr, 0);
3228
3229 while (true)
3230 {
3231 if (UNARY_CLASS_P (expr))
3232 expr = TREE_OPERAND (expr, 0);
3233 else if (BINARY_CLASS_P (expr))
3234 {
3235 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3236 expr = TREE_OPERAND (expr, 0);
3237 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3238 expr = TREE_OPERAND (expr, 1);
3239 else
3240 break;
3241 }
3242 else
3243 break;
3244 }
3245
3246 return expr;
3247 }
3248
3249 /* Return which tree structure is used by T. */
3250
3251 enum tree_node_structure_enum
3252 tree_node_structure (const_tree t)
3253 {
3254 const enum tree_code code = TREE_CODE (t);
3255 return tree_node_structure_for_code (code);
3256 }
3257
3258 /* Set various status flags when building a CALL_EXPR object T. */
3259
3260 static void
3261 process_call_operands (tree t)
3262 {
3263 bool side_effects = TREE_SIDE_EFFECTS (t);
3264 bool read_only = false;
3265 int i = call_expr_flags (t);
3266
3267 /* Calls have side-effects, except those to const or pure functions. */
3268 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3269 side_effects = true;
3270 /* Propagate TREE_READONLY of arguments for const functions. */
3271 if (i & ECF_CONST)
3272 read_only = true;
3273
3274 if (!side_effects || read_only)
3275 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3276 {
3277 tree op = TREE_OPERAND (t, i);
3278 if (op && TREE_SIDE_EFFECTS (op))
3279 side_effects = true;
3280 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3281 read_only = false;
3282 }
3283
3284 TREE_SIDE_EFFECTS (t) = side_effects;
3285 TREE_READONLY (t) = read_only;
3286 }
3287 \f
3288 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3289 size or offset that depends on a field within a record. */
3290
3291 bool
3292 contains_placeholder_p (const_tree exp)
3293 {
3294 enum tree_code code;
3295
3296 if (!exp)
3297 return 0;
3298
3299 code = TREE_CODE (exp);
3300 if (code == PLACEHOLDER_EXPR)
3301 return 1;
3302
3303 switch (TREE_CODE_CLASS (code))
3304 {
3305 case tcc_reference:
3306 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3307 position computations since they will be converted into a
3308 WITH_RECORD_EXPR involving the reference, which will assume
3309 here will be valid. */
3310 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3311
3312 case tcc_exceptional:
3313 if (code == TREE_LIST)
3314 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3315 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3316 break;
3317
3318 case tcc_unary:
3319 case tcc_binary:
3320 case tcc_comparison:
3321 case tcc_expression:
3322 switch (code)
3323 {
3324 case COMPOUND_EXPR:
3325 /* Ignoring the first operand isn't quite right, but works best. */
3326 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3327
3328 case COND_EXPR:
3329 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3330 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3331 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3332
3333 case SAVE_EXPR:
3334 /* The save_expr function never wraps anything containing
3335 a PLACEHOLDER_EXPR. */
3336 return 0;
3337
3338 default:
3339 break;
3340 }
3341
3342 switch (TREE_CODE_LENGTH (code))
3343 {
3344 case 1:
3345 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3346 case 2:
3347 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3348 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3349 default:
3350 return 0;
3351 }
3352
3353 case tcc_vl_exp:
3354 switch (code)
3355 {
3356 case CALL_EXPR:
3357 {
3358 const_tree arg;
3359 const_call_expr_arg_iterator iter;
3360 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3361 if (CONTAINS_PLACEHOLDER_P (arg))
3362 return 1;
3363 return 0;
3364 }
3365 default:
3366 return 0;
3367 }
3368
3369 default:
3370 return 0;
3371 }
3372 return 0;
3373 }
3374
3375 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3376 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3377 field positions. */
3378
3379 static bool
3380 type_contains_placeholder_1 (const_tree type)
3381 {
3382 /* If the size contains a placeholder or the parent type (component type in
3383 the case of arrays) type involves a placeholder, this type does. */
3384 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3385 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3386 || (!POINTER_TYPE_P (type)
3387 && TREE_TYPE (type)
3388 && type_contains_placeholder_p (TREE_TYPE (type))))
3389 return true;
3390
3391 /* Now do type-specific checks. Note that the last part of the check above
3392 greatly limits what we have to do below. */
3393 switch (TREE_CODE (type))
3394 {
3395 case VOID_TYPE:
3396 case COMPLEX_TYPE:
3397 case ENUMERAL_TYPE:
3398 case BOOLEAN_TYPE:
3399 case POINTER_TYPE:
3400 case OFFSET_TYPE:
3401 case REFERENCE_TYPE:
3402 case METHOD_TYPE:
3403 case FUNCTION_TYPE:
3404 case VECTOR_TYPE:
3405 case NULLPTR_TYPE:
3406 return false;
3407
3408 case INTEGER_TYPE:
3409 case REAL_TYPE:
3410 case FIXED_POINT_TYPE:
3411 /* Here we just check the bounds. */
3412 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3413 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3414
3415 case ARRAY_TYPE:
3416 /* We have already checked the component type above, so just check the
3417 domain type. */
3418 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3419
3420 case RECORD_TYPE:
3421 case UNION_TYPE:
3422 case QUAL_UNION_TYPE:
3423 {
3424 tree field;
3425
3426 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3427 if (TREE_CODE (field) == FIELD_DECL
3428 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3429 || (TREE_CODE (type) == QUAL_UNION_TYPE
3430 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3431 || type_contains_placeholder_p (TREE_TYPE (field))))
3432 return true;
3433
3434 return false;
3435 }
3436
3437 default:
3438 gcc_unreachable ();
3439 }
3440 }
3441
3442 /* Wrapper around above function used to cache its result. */
3443
3444 bool
3445 type_contains_placeholder_p (tree type)
3446 {
3447 bool result;
3448
3449 /* If the contains_placeholder_bits field has been initialized,
3450 then we know the answer. */
3451 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3452 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3453
3454 /* Indicate that we've seen this type node, and the answer is false.
3455 This is what we want to return if we run into recursion via fields. */
3456 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3457
3458 /* Compute the real value. */
3459 result = type_contains_placeholder_1 (type);
3460
3461 /* Store the real value. */
3462 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3463
3464 return result;
3465 }
3466 \f
3467 /* Push tree EXP onto vector QUEUE if it is not already present. */
3468
3469 static void
3470 push_without_duplicates (tree exp, vec<tree> *queue)
3471 {
3472 unsigned int i;
3473 tree iter;
3474
3475 FOR_EACH_VEC_ELT (*queue, i, iter)
3476 if (simple_cst_equal (iter, exp) == 1)
3477 break;
3478
3479 if (!iter)
3480 queue->safe_push (exp);
3481 }
3482
3483 /* Given a tree EXP, find all occurrences of references to fields
3484 in a PLACEHOLDER_EXPR and place them in vector REFS without
3485 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3486 we assume here that EXP contains only arithmetic expressions
3487 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3488 argument list. */
3489
3490 void
3491 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3492 {
3493 enum tree_code code = TREE_CODE (exp);
3494 tree inner;
3495 int i;
3496
3497 /* We handle TREE_LIST and COMPONENT_REF separately. */
3498 if (code == TREE_LIST)
3499 {
3500 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3501 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3502 }
3503 else if (code == COMPONENT_REF)
3504 {
3505 for (inner = TREE_OPERAND (exp, 0);
3506 REFERENCE_CLASS_P (inner);
3507 inner = TREE_OPERAND (inner, 0))
3508 ;
3509
3510 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3511 push_without_duplicates (exp, refs);
3512 else
3513 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3514 }
3515 else
3516 switch (TREE_CODE_CLASS (code))
3517 {
3518 case tcc_constant:
3519 break;
3520
3521 case tcc_declaration:
3522 /* Variables allocated to static storage can stay. */
3523 if (!TREE_STATIC (exp))
3524 push_without_duplicates (exp, refs);
3525 break;
3526
3527 case tcc_expression:
3528 /* This is the pattern built in ada/make_aligning_type. */
3529 if (code == ADDR_EXPR
3530 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3531 {
3532 push_without_duplicates (exp, refs);
3533 break;
3534 }
3535
3536 /* Fall through... */
3537
3538 case tcc_exceptional:
3539 case tcc_unary:
3540 case tcc_binary:
3541 case tcc_comparison:
3542 case tcc_reference:
3543 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3544 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3545 break;
3546
3547 case tcc_vl_exp:
3548 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3549 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3550 break;
3551
3552 default:
3553 gcc_unreachable ();
3554 }
3555 }
3556
3557 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3558 return a tree with all occurrences of references to F in a
3559 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3560 CONST_DECLs. Note that we assume here that EXP contains only
3561 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3562 occurring only in their argument list. */
3563
3564 tree
3565 substitute_in_expr (tree exp, tree f, tree r)
3566 {
3567 enum tree_code code = TREE_CODE (exp);
3568 tree op0, op1, op2, op3;
3569 tree new_tree;
3570
3571 /* We handle TREE_LIST and COMPONENT_REF separately. */
3572 if (code == TREE_LIST)
3573 {
3574 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3575 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3576 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3577 return exp;
3578
3579 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3580 }
3581 else if (code == COMPONENT_REF)
3582 {
3583 tree inner;
3584
3585 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3586 and it is the right field, replace it with R. */
3587 for (inner = TREE_OPERAND (exp, 0);
3588 REFERENCE_CLASS_P (inner);
3589 inner = TREE_OPERAND (inner, 0))
3590 ;
3591
3592 /* The field. */
3593 op1 = TREE_OPERAND (exp, 1);
3594
3595 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3596 return r;
3597
3598 /* If this expression hasn't been completed let, leave it alone. */
3599 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3600 return exp;
3601
3602 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3603 if (op0 == TREE_OPERAND (exp, 0))
3604 return exp;
3605
3606 new_tree
3607 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3608 }
3609 else
3610 switch (TREE_CODE_CLASS (code))
3611 {
3612 case tcc_constant:
3613 return exp;
3614
3615 case tcc_declaration:
3616 if (exp == f)
3617 return r;
3618 else
3619 return exp;
3620
3621 case tcc_expression:
3622 if (exp == f)
3623 return r;
3624
3625 /* Fall through... */
3626
3627 case tcc_exceptional:
3628 case tcc_unary:
3629 case tcc_binary:
3630 case tcc_comparison:
3631 case tcc_reference:
3632 switch (TREE_CODE_LENGTH (code))
3633 {
3634 case 0:
3635 return exp;
3636
3637 case 1:
3638 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3639 if (op0 == TREE_OPERAND (exp, 0))
3640 return exp;
3641
3642 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3643 break;
3644
3645 case 2:
3646 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3647 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3648
3649 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3650 return exp;
3651
3652 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3653 break;
3654
3655 case 3:
3656 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3657 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3658 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3659
3660 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3661 && op2 == TREE_OPERAND (exp, 2))
3662 return exp;
3663
3664 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3665 break;
3666
3667 case 4:
3668 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3669 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3670 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3671 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3672
3673 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3674 && op2 == TREE_OPERAND (exp, 2)
3675 && op3 == TREE_OPERAND (exp, 3))
3676 return exp;
3677
3678 new_tree
3679 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3680 break;
3681
3682 default:
3683 gcc_unreachable ();
3684 }
3685 break;
3686
3687 case tcc_vl_exp:
3688 {
3689 int i;
3690
3691 new_tree = NULL_TREE;
3692
3693 /* If we are trying to replace F with a constant, inline back
3694 functions which do nothing else than computing a value from
3695 the arguments they are passed. This makes it possible to
3696 fold partially or entirely the replacement expression. */
3697 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3698 {
3699 tree t = maybe_inline_call_in_expr (exp);
3700 if (t)
3701 return SUBSTITUTE_IN_EXPR (t, f, r);
3702 }
3703
3704 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3705 {
3706 tree op = TREE_OPERAND (exp, i);
3707 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3708 if (new_op != op)
3709 {
3710 if (!new_tree)
3711 new_tree = copy_node (exp);
3712 TREE_OPERAND (new_tree, i) = new_op;
3713 }
3714 }
3715
3716 if (new_tree)
3717 {
3718 new_tree = fold (new_tree);
3719 if (TREE_CODE (new_tree) == CALL_EXPR)
3720 process_call_operands (new_tree);
3721 }
3722 else
3723 return exp;
3724 }
3725 break;
3726
3727 default:
3728 gcc_unreachable ();
3729 }
3730
3731 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3732
3733 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3734 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3735
3736 return new_tree;
3737 }
3738
3739 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3740 for it within OBJ, a tree that is an object or a chain of references. */
3741
3742 tree
3743 substitute_placeholder_in_expr (tree exp, tree obj)
3744 {
3745 enum tree_code code = TREE_CODE (exp);
3746 tree op0, op1, op2, op3;
3747 tree new_tree;
3748
3749 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3750 in the chain of OBJ. */
3751 if (code == PLACEHOLDER_EXPR)
3752 {
3753 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3754 tree elt;
3755
3756 for (elt = obj; elt != 0;
3757 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3758 || TREE_CODE (elt) == COND_EXPR)
3759 ? TREE_OPERAND (elt, 1)
3760 : (REFERENCE_CLASS_P (elt)
3761 || UNARY_CLASS_P (elt)
3762 || BINARY_CLASS_P (elt)
3763 || VL_EXP_CLASS_P (elt)
3764 || EXPRESSION_CLASS_P (elt))
3765 ? TREE_OPERAND (elt, 0) : 0))
3766 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3767 return elt;
3768
3769 for (elt = obj; elt != 0;
3770 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3771 || TREE_CODE (elt) == COND_EXPR)
3772 ? TREE_OPERAND (elt, 1)
3773 : (REFERENCE_CLASS_P (elt)
3774 || UNARY_CLASS_P (elt)
3775 || BINARY_CLASS_P (elt)
3776 || VL_EXP_CLASS_P (elt)
3777 || EXPRESSION_CLASS_P (elt))
3778 ? TREE_OPERAND (elt, 0) : 0))
3779 if (POINTER_TYPE_P (TREE_TYPE (elt))
3780 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3781 == need_type))
3782 return fold_build1 (INDIRECT_REF, need_type, elt);
3783
3784 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3785 survives until RTL generation, there will be an error. */
3786 return exp;
3787 }
3788
3789 /* TREE_LIST is special because we need to look at TREE_VALUE
3790 and TREE_CHAIN, not TREE_OPERANDS. */
3791 else if (code == TREE_LIST)
3792 {
3793 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3794 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3795 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3796 return exp;
3797
3798 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3799 }
3800 else
3801 switch (TREE_CODE_CLASS (code))
3802 {
3803 case tcc_constant:
3804 case tcc_declaration:
3805 return exp;
3806
3807 case tcc_exceptional:
3808 case tcc_unary:
3809 case tcc_binary:
3810 case tcc_comparison:
3811 case tcc_expression:
3812 case tcc_reference:
3813 case tcc_statement:
3814 switch (TREE_CODE_LENGTH (code))
3815 {
3816 case 0:
3817 return exp;
3818
3819 case 1:
3820 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3821 if (op0 == TREE_OPERAND (exp, 0))
3822 return exp;
3823
3824 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3825 break;
3826
3827 case 2:
3828 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3829 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3830
3831 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3832 return exp;
3833
3834 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3835 break;
3836
3837 case 3:
3838 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3839 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3840 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3841
3842 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3843 && op2 == TREE_OPERAND (exp, 2))
3844 return exp;
3845
3846 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3847 break;
3848
3849 case 4:
3850 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3851 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3852 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3853 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3854
3855 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3856 && op2 == TREE_OPERAND (exp, 2)
3857 && op3 == TREE_OPERAND (exp, 3))
3858 return exp;
3859
3860 new_tree
3861 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3862 break;
3863
3864 default:
3865 gcc_unreachable ();
3866 }
3867 break;
3868
3869 case tcc_vl_exp:
3870 {
3871 int i;
3872
3873 new_tree = NULL_TREE;
3874
3875 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3876 {
3877 tree op = TREE_OPERAND (exp, i);
3878 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3879 if (new_op != op)
3880 {
3881 if (!new_tree)
3882 new_tree = copy_node (exp);
3883 TREE_OPERAND (new_tree, i) = new_op;
3884 }
3885 }
3886
3887 if (new_tree)
3888 {
3889 new_tree = fold (new_tree);
3890 if (TREE_CODE (new_tree) == CALL_EXPR)
3891 process_call_operands (new_tree);
3892 }
3893 else
3894 return exp;
3895 }
3896 break;
3897
3898 default:
3899 gcc_unreachable ();
3900 }
3901
3902 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3903
3904 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3905 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3906
3907 return new_tree;
3908 }
3909 \f
3910
3911 /* Subroutine of stabilize_reference; this is called for subtrees of
3912 references. Any expression with side-effects must be put in a SAVE_EXPR
3913 to ensure that it is only evaluated once.
3914
3915 We don't put SAVE_EXPR nodes around everything, because assigning very
3916 simple expressions to temporaries causes us to miss good opportunities
3917 for optimizations. Among other things, the opportunity to fold in the
3918 addition of a constant into an addressing mode often gets lost, e.g.
3919 "y[i+1] += x;". In general, we take the approach that we should not make
3920 an assignment unless we are forced into it - i.e., that any non-side effect
3921 operator should be allowed, and that cse should take care of coalescing
3922 multiple utterances of the same expression should that prove fruitful. */
3923
3924 static tree
3925 stabilize_reference_1 (tree e)
3926 {
3927 tree result;
3928 enum tree_code code = TREE_CODE (e);
3929
3930 /* We cannot ignore const expressions because it might be a reference
3931 to a const array but whose index contains side-effects. But we can
3932 ignore things that are actual constant or that already have been
3933 handled by this function. */
3934
3935 if (tree_invariant_p (e))
3936 return e;
3937
3938 switch (TREE_CODE_CLASS (code))
3939 {
3940 case tcc_exceptional:
3941 case tcc_type:
3942 case tcc_declaration:
3943 case tcc_comparison:
3944 case tcc_statement:
3945 case tcc_expression:
3946 case tcc_reference:
3947 case tcc_vl_exp:
3948 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3949 so that it will only be evaluated once. */
3950 /* The reference (r) and comparison (<) classes could be handled as
3951 below, but it is generally faster to only evaluate them once. */
3952 if (TREE_SIDE_EFFECTS (e))
3953 return save_expr (e);
3954 return e;
3955
3956 case tcc_constant:
3957 /* Constants need no processing. In fact, we should never reach
3958 here. */
3959 return e;
3960
3961 case tcc_binary:
3962 /* Division is slow and tends to be compiled with jumps,
3963 especially the division by powers of 2 that is often
3964 found inside of an array reference. So do it just once. */
3965 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3966 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3967 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3968 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3969 return save_expr (e);
3970 /* Recursively stabilize each operand. */
3971 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3972 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3973 break;
3974
3975 case tcc_unary:
3976 /* Recursively stabilize each operand. */
3977 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3978 break;
3979
3980 default:
3981 gcc_unreachable ();
3982 }
3983
3984 TREE_TYPE (result) = TREE_TYPE (e);
3985 TREE_READONLY (result) = TREE_READONLY (e);
3986 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3987 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3988
3989 return result;
3990 }
3991
3992 /* Stabilize a reference so that we can use it any number of times
3993 without causing its operands to be evaluated more than once.
3994 Returns the stabilized reference. This works by means of save_expr,
3995 so see the caveats in the comments about save_expr.
3996
3997 Also allows conversion expressions whose operands are references.
3998 Any other kind of expression is returned unchanged. */
3999
4000 tree
4001 stabilize_reference (tree ref)
4002 {
4003 tree result;
4004 enum tree_code code = TREE_CODE (ref);
4005
4006 switch (code)
4007 {
4008 case VAR_DECL:
4009 case PARM_DECL:
4010 case RESULT_DECL:
4011 /* No action is needed in this case. */
4012 return ref;
4013
4014 CASE_CONVERT:
4015 case FLOAT_EXPR:
4016 case FIX_TRUNC_EXPR:
4017 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4018 break;
4019
4020 case INDIRECT_REF:
4021 result = build_nt (INDIRECT_REF,
4022 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4023 break;
4024
4025 case COMPONENT_REF:
4026 result = build_nt (COMPONENT_REF,
4027 stabilize_reference (TREE_OPERAND (ref, 0)),
4028 TREE_OPERAND (ref, 1), NULL_TREE);
4029 break;
4030
4031 case BIT_FIELD_REF:
4032 result = build_nt (BIT_FIELD_REF,
4033 stabilize_reference (TREE_OPERAND (ref, 0)),
4034 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4035 break;
4036
4037 case ARRAY_REF:
4038 result = build_nt (ARRAY_REF,
4039 stabilize_reference (TREE_OPERAND (ref, 0)),
4040 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4041 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4042 break;
4043
4044 case ARRAY_RANGE_REF:
4045 result = build_nt (ARRAY_RANGE_REF,
4046 stabilize_reference (TREE_OPERAND (ref, 0)),
4047 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4048 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4049 break;
4050
4051 case COMPOUND_EXPR:
4052 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4053 it wouldn't be ignored. This matters when dealing with
4054 volatiles. */
4055 return stabilize_reference_1 (ref);
4056
4057 /* If arg isn't a kind of lvalue we recognize, make no change.
4058 Caller should recognize the error for an invalid lvalue. */
4059 default:
4060 return ref;
4061
4062 case ERROR_MARK:
4063 return error_mark_node;
4064 }
4065
4066 TREE_TYPE (result) = TREE_TYPE (ref);
4067 TREE_READONLY (result) = TREE_READONLY (ref);
4068 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4069 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4070
4071 return result;
4072 }
4073 \f
4074 /* Low-level constructors for expressions. */
4075
4076 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4077 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4078
4079 void
4080 recompute_tree_invariant_for_addr_expr (tree t)
4081 {
4082 tree node;
4083 bool tc = true, se = false;
4084
4085 /* We started out assuming this address is both invariant and constant, but
4086 does not have side effects. Now go down any handled components and see if
4087 any of them involve offsets that are either non-constant or non-invariant.
4088 Also check for side-effects.
4089
4090 ??? Note that this code makes no attempt to deal with the case where
4091 taking the address of something causes a copy due to misalignment. */
4092
4093 #define UPDATE_FLAGS(NODE) \
4094 do { tree _node = (NODE); \
4095 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4096 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4097
4098 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4099 node = TREE_OPERAND (node, 0))
4100 {
4101 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4102 array reference (probably made temporarily by the G++ front end),
4103 so ignore all the operands. */
4104 if ((TREE_CODE (node) == ARRAY_REF
4105 || TREE_CODE (node) == ARRAY_RANGE_REF)
4106 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4107 {
4108 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4109 if (TREE_OPERAND (node, 2))
4110 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4111 if (TREE_OPERAND (node, 3))
4112 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4113 }
4114 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4115 FIELD_DECL, apparently. The G++ front end can put something else
4116 there, at least temporarily. */
4117 else if (TREE_CODE (node) == COMPONENT_REF
4118 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4119 {
4120 if (TREE_OPERAND (node, 2))
4121 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4122 }
4123 }
4124
4125 node = lang_hooks.expr_to_decl (node, &tc, &se);
4126
4127 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4128 the address, since &(*a)->b is a form of addition. If it's a constant, the
4129 address is constant too. If it's a decl, its address is constant if the
4130 decl is static. Everything else is not constant and, furthermore,
4131 taking the address of a volatile variable is not volatile. */
4132 if (TREE_CODE (node) == INDIRECT_REF
4133 || TREE_CODE (node) == MEM_REF)
4134 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4135 else if (CONSTANT_CLASS_P (node))
4136 ;
4137 else if (DECL_P (node))
4138 tc &= (staticp (node) != NULL_TREE);
4139 else
4140 {
4141 tc = false;
4142 se |= TREE_SIDE_EFFECTS (node);
4143 }
4144
4145
4146 TREE_CONSTANT (t) = tc;
4147 TREE_SIDE_EFFECTS (t) = se;
4148 #undef UPDATE_FLAGS
4149 }
4150
4151 /* Build an expression of code CODE, data type TYPE, and operands as
4152 specified. Expressions and reference nodes can be created this way.
4153 Constants, decls, types and misc nodes cannot be.
4154
4155 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4156 enough for all extant tree codes. */
4157
4158 tree
4159 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4160 {
4161 tree t;
4162
4163 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4164
4165 t = make_node_stat (code PASS_MEM_STAT);
4166 TREE_TYPE (t) = tt;
4167
4168 return t;
4169 }
4170
4171 tree
4172 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4173 {
4174 int length = sizeof (struct tree_exp);
4175 tree t;
4176
4177 record_node_allocation_statistics (code, length);
4178
4179 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4180
4181 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4182
4183 memset (t, 0, sizeof (struct tree_common));
4184
4185 TREE_SET_CODE (t, code);
4186
4187 TREE_TYPE (t) = type;
4188 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4189 TREE_OPERAND (t, 0) = node;
4190 if (node && !TYPE_P (node))
4191 {
4192 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4193 TREE_READONLY (t) = TREE_READONLY (node);
4194 }
4195
4196 if (TREE_CODE_CLASS (code) == tcc_statement)
4197 TREE_SIDE_EFFECTS (t) = 1;
4198 else switch (code)
4199 {
4200 case VA_ARG_EXPR:
4201 /* All of these have side-effects, no matter what their
4202 operands are. */
4203 TREE_SIDE_EFFECTS (t) = 1;
4204 TREE_READONLY (t) = 0;
4205 break;
4206
4207 case INDIRECT_REF:
4208 /* Whether a dereference is readonly has nothing to do with whether
4209 its operand is readonly. */
4210 TREE_READONLY (t) = 0;
4211 break;
4212
4213 case ADDR_EXPR:
4214 if (node)
4215 recompute_tree_invariant_for_addr_expr (t);
4216 break;
4217
4218 default:
4219 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4220 && node && !TYPE_P (node)
4221 && TREE_CONSTANT (node))
4222 TREE_CONSTANT (t) = 1;
4223 if (TREE_CODE_CLASS (code) == tcc_reference
4224 && node && TREE_THIS_VOLATILE (node))
4225 TREE_THIS_VOLATILE (t) = 1;
4226 break;
4227 }
4228
4229 return t;
4230 }
4231
4232 #define PROCESS_ARG(N) \
4233 do { \
4234 TREE_OPERAND (t, N) = arg##N; \
4235 if (arg##N &&!TYPE_P (arg##N)) \
4236 { \
4237 if (TREE_SIDE_EFFECTS (arg##N)) \
4238 side_effects = 1; \
4239 if (!TREE_READONLY (arg##N) \
4240 && !CONSTANT_CLASS_P (arg##N)) \
4241 (void) (read_only = 0); \
4242 if (!TREE_CONSTANT (arg##N)) \
4243 (void) (constant = 0); \
4244 } \
4245 } while (0)
4246
4247 tree
4248 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4249 {
4250 bool constant, read_only, side_effects;
4251 tree t;
4252
4253 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4254
4255 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4256 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4257 /* When sizetype precision doesn't match that of pointers
4258 we need to be able to build explicit extensions or truncations
4259 of the offset argument. */
4260 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4261 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4262 && TREE_CODE (arg1) == INTEGER_CST);
4263
4264 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4265 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4266 && ptrofftype_p (TREE_TYPE (arg1)));
4267
4268 t = make_node_stat (code PASS_MEM_STAT);
4269 TREE_TYPE (t) = tt;
4270
4271 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4272 result based on those same flags for the arguments. But if the
4273 arguments aren't really even `tree' expressions, we shouldn't be trying
4274 to do this. */
4275
4276 /* Expressions without side effects may be constant if their
4277 arguments are as well. */
4278 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4279 || TREE_CODE_CLASS (code) == tcc_binary);
4280 read_only = 1;
4281 side_effects = TREE_SIDE_EFFECTS (t);
4282
4283 PROCESS_ARG (0);
4284 PROCESS_ARG (1);
4285
4286 TREE_READONLY (t) = read_only;
4287 TREE_CONSTANT (t) = constant;
4288 TREE_SIDE_EFFECTS (t) = side_effects;
4289 TREE_THIS_VOLATILE (t)
4290 = (TREE_CODE_CLASS (code) == tcc_reference
4291 && arg0 && TREE_THIS_VOLATILE (arg0));
4292
4293 return t;
4294 }
4295
4296
4297 tree
4298 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4299 tree arg2 MEM_STAT_DECL)
4300 {
4301 bool constant, read_only, side_effects;
4302 tree t;
4303
4304 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4305 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4306
4307 t = make_node_stat (code PASS_MEM_STAT);
4308 TREE_TYPE (t) = tt;
4309
4310 read_only = 1;
4311
4312 /* As a special exception, if COND_EXPR has NULL branches, we
4313 assume that it is a gimple statement and always consider
4314 it to have side effects. */
4315 if (code == COND_EXPR
4316 && tt == void_type_node
4317 && arg1 == NULL_TREE
4318 && arg2 == NULL_TREE)
4319 side_effects = true;
4320 else
4321 side_effects = TREE_SIDE_EFFECTS (t);
4322
4323 PROCESS_ARG (0);
4324 PROCESS_ARG (1);
4325 PROCESS_ARG (2);
4326
4327 if (code == COND_EXPR)
4328 TREE_READONLY (t) = read_only;
4329
4330 TREE_SIDE_EFFECTS (t) = side_effects;
4331 TREE_THIS_VOLATILE (t)
4332 = (TREE_CODE_CLASS (code) == tcc_reference
4333 && arg0 && TREE_THIS_VOLATILE (arg0));
4334
4335 return t;
4336 }
4337
4338 tree
4339 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4340 tree arg2, tree arg3 MEM_STAT_DECL)
4341 {
4342 bool constant, read_only, side_effects;
4343 tree t;
4344
4345 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4346
4347 t = make_node_stat (code PASS_MEM_STAT);
4348 TREE_TYPE (t) = tt;
4349
4350 side_effects = TREE_SIDE_EFFECTS (t);
4351
4352 PROCESS_ARG (0);
4353 PROCESS_ARG (1);
4354 PROCESS_ARG (2);
4355 PROCESS_ARG (3);
4356
4357 TREE_SIDE_EFFECTS (t) = side_effects;
4358 TREE_THIS_VOLATILE (t)
4359 = (TREE_CODE_CLASS (code) == tcc_reference
4360 && arg0 && TREE_THIS_VOLATILE (arg0));
4361
4362 return t;
4363 }
4364
4365 tree
4366 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4367 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4368 {
4369 bool constant, read_only, side_effects;
4370 tree t;
4371
4372 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4373
4374 t = make_node_stat (code PASS_MEM_STAT);
4375 TREE_TYPE (t) = tt;
4376
4377 side_effects = TREE_SIDE_EFFECTS (t);
4378
4379 PROCESS_ARG (0);
4380 PROCESS_ARG (1);
4381 PROCESS_ARG (2);
4382 PROCESS_ARG (3);
4383 PROCESS_ARG (4);
4384
4385 TREE_SIDE_EFFECTS (t) = side_effects;
4386 TREE_THIS_VOLATILE (t)
4387 = (TREE_CODE_CLASS (code) == tcc_reference
4388 && arg0 && TREE_THIS_VOLATILE (arg0));
4389
4390 return t;
4391 }
4392
4393 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4394 on the pointer PTR. */
4395
4396 tree
4397 build_simple_mem_ref_loc (location_t loc, tree ptr)
4398 {
4399 HOST_WIDE_INT offset = 0;
4400 tree ptype = TREE_TYPE (ptr);
4401 tree tem;
4402 /* For convenience allow addresses that collapse to a simple base
4403 and offset. */
4404 if (TREE_CODE (ptr) == ADDR_EXPR
4405 && (handled_component_p (TREE_OPERAND (ptr, 0))
4406 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4407 {
4408 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4409 gcc_assert (ptr);
4410 ptr = build_fold_addr_expr (ptr);
4411 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4412 }
4413 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4414 ptr, build_int_cst (ptype, offset));
4415 SET_EXPR_LOCATION (tem, loc);
4416 return tem;
4417 }
4418
4419 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4420
4421 offset_int
4422 mem_ref_offset (const_tree t)
4423 {
4424 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4425 }
4426
4427 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4428 offsetted by OFFSET units. */
4429
4430 tree
4431 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4432 {
4433 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4434 build_fold_addr_expr (base),
4435 build_int_cst (ptr_type_node, offset));
4436 tree addr = build1 (ADDR_EXPR, type, ref);
4437 recompute_tree_invariant_for_addr_expr (addr);
4438 return addr;
4439 }
4440
4441 /* Similar except don't specify the TREE_TYPE
4442 and leave the TREE_SIDE_EFFECTS as 0.
4443 It is permissible for arguments to be null,
4444 or even garbage if their values do not matter. */
4445
4446 tree
4447 build_nt (enum tree_code code, ...)
4448 {
4449 tree t;
4450 int length;
4451 int i;
4452 va_list p;
4453
4454 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4455
4456 va_start (p, code);
4457
4458 t = make_node (code);
4459 length = TREE_CODE_LENGTH (code);
4460
4461 for (i = 0; i < length; i++)
4462 TREE_OPERAND (t, i) = va_arg (p, tree);
4463
4464 va_end (p);
4465 return t;
4466 }
4467
4468 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4469 tree vec. */
4470
4471 tree
4472 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4473 {
4474 tree ret, t;
4475 unsigned int ix;
4476
4477 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4478 CALL_EXPR_FN (ret) = fn;
4479 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4480 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4481 CALL_EXPR_ARG (ret, ix) = t;
4482 return ret;
4483 }
4484 \f
4485 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4486 We do NOT enter this node in any sort of symbol table.
4487
4488 LOC is the location of the decl.
4489
4490 layout_decl is used to set up the decl's storage layout.
4491 Other slots are initialized to 0 or null pointers. */
4492
4493 tree
4494 build_decl_stat (location_t loc, enum tree_code code, tree name,
4495 tree type MEM_STAT_DECL)
4496 {
4497 tree t;
4498
4499 t = make_node_stat (code PASS_MEM_STAT);
4500 DECL_SOURCE_LOCATION (t) = loc;
4501
4502 /* if (type == error_mark_node)
4503 type = integer_type_node; */
4504 /* That is not done, deliberately, so that having error_mark_node
4505 as the type can suppress useless errors in the use of this variable. */
4506
4507 DECL_NAME (t) = name;
4508 TREE_TYPE (t) = type;
4509
4510 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4511 layout_decl (t, 0);
4512
4513 return t;
4514 }
4515
4516 /* Builds and returns function declaration with NAME and TYPE. */
4517
4518 tree
4519 build_fn_decl (const char *name, tree type)
4520 {
4521 tree id = get_identifier (name);
4522 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4523
4524 DECL_EXTERNAL (decl) = 1;
4525 TREE_PUBLIC (decl) = 1;
4526 DECL_ARTIFICIAL (decl) = 1;
4527 TREE_NOTHROW (decl) = 1;
4528
4529 return decl;
4530 }
4531
4532 vec<tree, va_gc> *all_translation_units;
4533
4534 /* Builds a new translation-unit decl with name NAME, queues it in the
4535 global list of translation-unit decls and returns it. */
4536
4537 tree
4538 build_translation_unit_decl (tree name)
4539 {
4540 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4541 name, NULL_TREE);
4542 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4543 vec_safe_push (all_translation_units, tu);
4544 return tu;
4545 }
4546
4547 \f
4548 /* BLOCK nodes are used to represent the structure of binding contours
4549 and declarations, once those contours have been exited and their contents
4550 compiled. This information is used for outputting debugging info. */
4551
4552 tree
4553 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4554 {
4555 tree block = make_node (BLOCK);
4556
4557 BLOCK_VARS (block) = vars;
4558 BLOCK_SUBBLOCKS (block) = subblocks;
4559 BLOCK_SUPERCONTEXT (block) = supercontext;
4560 BLOCK_CHAIN (block) = chain;
4561 return block;
4562 }
4563
4564 \f
4565 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4566
4567 LOC is the location to use in tree T. */
4568
4569 void
4570 protected_set_expr_location (tree t, location_t loc)
4571 {
4572 if (t && CAN_HAVE_LOCATION_P (t))
4573 SET_EXPR_LOCATION (t, loc);
4574 }
4575 \f
4576 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4577 is ATTRIBUTE. */
4578
4579 tree
4580 build_decl_attribute_variant (tree ddecl, tree attribute)
4581 {
4582 DECL_ATTRIBUTES (ddecl) = attribute;
4583 return ddecl;
4584 }
4585
4586 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4587 is ATTRIBUTE and its qualifiers are QUALS.
4588
4589 Record such modified types already made so we don't make duplicates. */
4590
4591 tree
4592 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4593 {
4594 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4595 {
4596 inchash::hash hstate;
4597 tree ntype;
4598 int i;
4599 tree t;
4600 enum tree_code code = TREE_CODE (ttype);
4601
4602 /* Building a distinct copy of a tagged type is inappropriate; it
4603 causes breakage in code that expects there to be a one-to-one
4604 relationship between a struct and its fields.
4605 build_duplicate_type is another solution (as used in
4606 handle_transparent_union_attribute), but that doesn't play well
4607 with the stronger C++ type identity model. */
4608 if (TREE_CODE (ttype) == RECORD_TYPE
4609 || TREE_CODE (ttype) == UNION_TYPE
4610 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4611 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4612 {
4613 warning (OPT_Wattributes,
4614 "ignoring attributes applied to %qT after definition",
4615 TYPE_MAIN_VARIANT (ttype));
4616 return build_qualified_type (ttype, quals);
4617 }
4618
4619 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4620 ntype = build_distinct_type_copy (ttype);
4621
4622 TYPE_ATTRIBUTES (ntype) = attribute;
4623
4624 hstate.add_int (code);
4625 if (TREE_TYPE (ntype))
4626 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4627 attribute_hash_list (attribute, hstate);
4628
4629 switch (TREE_CODE (ntype))
4630 {
4631 case FUNCTION_TYPE:
4632 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4633 break;
4634 case ARRAY_TYPE:
4635 if (TYPE_DOMAIN (ntype))
4636 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4637 break;
4638 case INTEGER_TYPE:
4639 t = TYPE_MAX_VALUE (ntype);
4640 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4641 hstate.add_object (TREE_INT_CST_ELT (t, i));
4642 break;
4643 case REAL_TYPE:
4644 case FIXED_POINT_TYPE:
4645 {
4646 unsigned int precision = TYPE_PRECISION (ntype);
4647 hstate.add_object (precision);
4648 }
4649 break;
4650 default:
4651 break;
4652 }
4653
4654 ntype = type_hash_canon (hstate.end(), ntype);
4655
4656 /* If the target-dependent attributes make NTYPE different from
4657 its canonical type, we will need to use structural equality
4658 checks for this type. */
4659 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4660 || !comp_type_attributes (ntype, ttype))
4661 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4662 else if (TYPE_CANONICAL (ntype) == ntype)
4663 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4664
4665 ttype = build_qualified_type (ntype, quals);
4666 }
4667 else if (TYPE_QUALS (ttype) != quals)
4668 ttype = build_qualified_type (ttype, quals);
4669
4670 return ttype;
4671 }
4672
4673 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4674 the same. */
4675
4676 static bool
4677 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4678 {
4679 tree cl1, cl2;
4680 for (cl1 = clauses1, cl2 = clauses2;
4681 cl1 && cl2;
4682 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4683 {
4684 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4685 return false;
4686 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4687 {
4688 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4689 OMP_CLAUSE_DECL (cl2)) != 1)
4690 return false;
4691 }
4692 switch (OMP_CLAUSE_CODE (cl1))
4693 {
4694 case OMP_CLAUSE_ALIGNED:
4695 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4696 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4697 return false;
4698 break;
4699 case OMP_CLAUSE_LINEAR:
4700 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4701 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4702 return false;
4703 break;
4704 case OMP_CLAUSE_SIMDLEN:
4705 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4706 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4707 return false;
4708 default:
4709 break;
4710 }
4711 }
4712 return true;
4713 }
4714
4715 /* Compare two constructor-element-type constants. Return 1 if the lists
4716 are known to be equal; otherwise return 0. */
4717
4718 static bool
4719 simple_cst_list_equal (const_tree l1, const_tree l2)
4720 {
4721 while (l1 != NULL_TREE && l2 != NULL_TREE)
4722 {
4723 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4724 return false;
4725
4726 l1 = TREE_CHAIN (l1);
4727 l2 = TREE_CHAIN (l2);
4728 }
4729
4730 return l1 == l2;
4731 }
4732
4733 /* Compare two attributes for their value identity. Return true if the
4734 attribute values are known to be equal; otherwise return false.
4735 */
4736
4737 static bool
4738 attribute_value_equal (const_tree attr1, const_tree attr2)
4739 {
4740 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4741 return true;
4742
4743 if (TREE_VALUE (attr1) != NULL_TREE
4744 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4745 && TREE_VALUE (attr2) != NULL
4746 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4747 return (simple_cst_list_equal (TREE_VALUE (attr1),
4748 TREE_VALUE (attr2)) == 1);
4749
4750 if ((flag_openmp || flag_openmp_simd)
4751 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4752 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4753 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4754 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4755 TREE_VALUE (attr2));
4756
4757 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4758 }
4759
4760 /* Return 0 if the attributes for two types are incompatible, 1 if they
4761 are compatible, and 2 if they are nearly compatible (which causes a
4762 warning to be generated). */
4763 int
4764 comp_type_attributes (const_tree type1, const_tree type2)
4765 {
4766 const_tree a1 = TYPE_ATTRIBUTES (type1);
4767 const_tree a2 = TYPE_ATTRIBUTES (type2);
4768 const_tree a;
4769
4770 if (a1 == a2)
4771 return 1;
4772 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4773 {
4774 const struct attribute_spec *as;
4775 const_tree attr;
4776
4777 as = lookup_attribute_spec (get_attribute_name (a));
4778 if (!as || as->affects_type_identity == false)
4779 continue;
4780
4781 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4782 if (!attr || !attribute_value_equal (a, attr))
4783 break;
4784 }
4785 if (!a)
4786 {
4787 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4788 {
4789 const struct attribute_spec *as;
4790
4791 as = lookup_attribute_spec (get_attribute_name (a));
4792 if (!as || as->affects_type_identity == false)
4793 continue;
4794
4795 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4796 break;
4797 /* We don't need to compare trees again, as we did this
4798 already in first loop. */
4799 }
4800 /* All types - affecting identity - are equal, so
4801 there is no need to call target hook for comparison. */
4802 if (!a)
4803 return 1;
4804 }
4805 /* As some type combinations - like default calling-convention - might
4806 be compatible, we have to call the target hook to get the final result. */
4807 return targetm.comp_type_attributes (type1, type2);
4808 }
4809
4810 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4811 is ATTRIBUTE.
4812
4813 Record such modified types already made so we don't make duplicates. */
4814
4815 tree
4816 build_type_attribute_variant (tree ttype, tree attribute)
4817 {
4818 return build_type_attribute_qual_variant (ttype, attribute,
4819 TYPE_QUALS (ttype));
4820 }
4821
4822
4823 /* Reset the expression *EXPR_P, a size or position.
4824
4825 ??? We could reset all non-constant sizes or positions. But it's cheap
4826 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4827
4828 We need to reset self-referential sizes or positions because they cannot
4829 be gimplified and thus can contain a CALL_EXPR after the gimplification
4830 is finished, which will run afoul of LTO streaming. And they need to be
4831 reset to something essentially dummy but not constant, so as to preserve
4832 the properties of the object they are attached to. */
4833
4834 static inline void
4835 free_lang_data_in_one_sizepos (tree *expr_p)
4836 {
4837 tree expr = *expr_p;
4838 if (CONTAINS_PLACEHOLDER_P (expr))
4839 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4840 }
4841
4842
4843 /* Reset all the fields in a binfo node BINFO. We only keep
4844 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4845
4846 static void
4847 free_lang_data_in_binfo (tree binfo)
4848 {
4849 unsigned i;
4850 tree t;
4851
4852 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4853
4854 BINFO_VIRTUALS (binfo) = NULL_TREE;
4855 BINFO_BASE_ACCESSES (binfo) = NULL;
4856 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4857 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4858
4859 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4860 free_lang_data_in_binfo (t);
4861 }
4862
4863
4864 /* Reset all language specific information still present in TYPE. */
4865
4866 static void
4867 free_lang_data_in_type (tree type)
4868 {
4869 gcc_assert (TYPE_P (type));
4870
4871 /* Give the FE a chance to remove its own data first. */
4872 lang_hooks.free_lang_data (type);
4873
4874 TREE_LANG_FLAG_0 (type) = 0;
4875 TREE_LANG_FLAG_1 (type) = 0;
4876 TREE_LANG_FLAG_2 (type) = 0;
4877 TREE_LANG_FLAG_3 (type) = 0;
4878 TREE_LANG_FLAG_4 (type) = 0;
4879 TREE_LANG_FLAG_5 (type) = 0;
4880 TREE_LANG_FLAG_6 (type) = 0;
4881
4882 if (TREE_CODE (type) == FUNCTION_TYPE)
4883 {
4884 /* Remove the const and volatile qualifiers from arguments. The
4885 C++ front end removes them, but the C front end does not,
4886 leading to false ODR violation errors when merging two
4887 instances of the same function signature compiled by
4888 different front ends. */
4889 tree p;
4890
4891 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4892 {
4893 tree arg_type = TREE_VALUE (p);
4894
4895 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4896 {
4897 int quals = TYPE_QUALS (arg_type)
4898 & ~TYPE_QUAL_CONST
4899 & ~TYPE_QUAL_VOLATILE;
4900 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4901 free_lang_data_in_type (TREE_VALUE (p));
4902 }
4903 }
4904 }
4905
4906 /* Remove members that are not actually FIELD_DECLs from the field
4907 list of an aggregate. These occur in C++. */
4908 if (RECORD_OR_UNION_TYPE_P (type))
4909 {
4910 tree prev, member;
4911
4912 /* Note that TYPE_FIELDS can be shared across distinct
4913 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4914 to be removed, we cannot set its TREE_CHAIN to NULL.
4915 Otherwise, we would not be able to find all the other fields
4916 in the other instances of this TREE_TYPE.
4917
4918 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4919 prev = NULL_TREE;
4920 member = TYPE_FIELDS (type);
4921 while (member)
4922 {
4923 if (TREE_CODE (member) == FIELD_DECL
4924 || TREE_CODE (member) == TYPE_DECL)
4925 {
4926 if (prev)
4927 TREE_CHAIN (prev) = member;
4928 else
4929 TYPE_FIELDS (type) = member;
4930 prev = member;
4931 }
4932
4933 member = TREE_CHAIN (member);
4934 }
4935
4936 if (prev)
4937 TREE_CHAIN (prev) = NULL_TREE;
4938 else
4939 TYPE_FIELDS (type) = NULL_TREE;
4940
4941 TYPE_METHODS (type) = NULL_TREE;
4942 if (TYPE_BINFO (type))
4943 free_lang_data_in_binfo (TYPE_BINFO (type));
4944 }
4945 else
4946 {
4947 /* For non-aggregate types, clear out the language slot (which
4948 overloads TYPE_BINFO). */
4949 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4950
4951 if (INTEGRAL_TYPE_P (type)
4952 || SCALAR_FLOAT_TYPE_P (type)
4953 || FIXED_POINT_TYPE_P (type))
4954 {
4955 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4956 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4957 }
4958 }
4959
4960 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4961 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4962
4963 if (TYPE_CONTEXT (type)
4964 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4965 {
4966 tree ctx = TYPE_CONTEXT (type);
4967 do
4968 {
4969 ctx = BLOCK_SUPERCONTEXT (ctx);
4970 }
4971 while (ctx && TREE_CODE (ctx) == BLOCK);
4972 TYPE_CONTEXT (type) = ctx;
4973 }
4974 }
4975
4976
4977 /* Return true if DECL may need an assembler name to be set. */
4978
4979 static inline bool
4980 need_assembler_name_p (tree decl)
4981 {
4982 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4983 if (TREE_CODE (decl) != FUNCTION_DECL
4984 && TREE_CODE (decl) != VAR_DECL)
4985 return false;
4986
4987 /* If DECL already has its assembler name set, it does not need a
4988 new one. */
4989 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4990 || DECL_ASSEMBLER_NAME_SET_P (decl))
4991 return false;
4992
4993 /* Abstract decls do not need an assembler name. */
4994 if (DECL_ABSTRACT (decl))
4995 return false;
4996
4997 /* For VAR_DECLs, only static, public and external symbols need an
4998 assembler name. */
4999 if (TREE_CODE (decl) == VAR_DECL
5000 && !TREE_STATIC (decl)
5001 && !TREE_PUBLIC (decl)
5002 && !DECL_EXTERNAL (decl))
5003 return false;
5004
5005 if (TREE_CODE (decl) == FUNCTION_DECL)
5006 {
5007 /* Do not set assembler name on builtins. Allow RTL expansion to
5008 decide whether to expand inline or via a regular call. */
5009 if (DECL_BUILT_IN (decl)
5010 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5011 return false;
5012
5013 /* Functions represented in the callgraph need an assembler name. */
5014 if (cgraph_node::get (decl) != NULL)
5015 return true;
5016
5017 /* Unused and not public functions don't need an assembler name. */
5018 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5019 return false;
5020 }
5021
5022 return true;
5023 }
5024
5025
5026 /* Reset all language specific information still present in symbol
5027 DECL. */
5028
5029 static void
5030 free_lang_data_in_decl (tree decl)
5031 {
5032 gcc_assert (DECL_P (decl));
5033
5034 /* Give the FE a chance to remove its own data first. */
5035 lang_hooks.free_lang_data (decl);
5036
5037 TREE_LANG_FLAG_0 (decl) = 0;
5038 TREE_LANG_FLAG_1 (decl) = 0;
5039 TREE_LANG_FLAG_2 (decl) = 0;
5040 TREE_LANG_FLAG_3 (decl) = 0;
5041 TREE_LANG_FLAG_4 (decl) = 0;
5042 TREE_LANG_FLAG_5 (decl) = 0;
5043 TREE_LANG_FLAG_6 (decl) = 0;
5044
5045 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5046 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5047 if (TREE_CODE (decl) == FIELD_DECL)
5048 {
5049 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5050 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5051 DECL_QUALIFIER (decl) = NULL_TREE;
5052 }
5053
5054 if (TREE_CODE (decl) == FUNCTION_DECL)
5055 {
5056 struct cgraph_node *node;
5057 if (!(node = cgraph_node::get (decl))
5058 || (!node->definition && !node->clones))
5059 {
5060 if (node)
5061 node->release_body ();
5062 else
5063 {
5064 release_function_body (decl);
5065 DECL_ARGUMENTS (decl) = NULL;
5066 DECL_RESULT (decl) = NULL;
5067 DECL_INITIAL (decl) = error_mark_node;
5068 }
5069 }
5070 if (gimple_has_body_p (decl))
5071 {
5072 tree t;
5073
5074 /* If DECL has a gimple body, then the context for its
5075 arguments must be DECL. Otherwise, it doesn't really
5076 matter, as we will not be emitting any code for DECL. In
5077 general, there may be other instances of DECL created by
5078 the front end and since PARM_DECLs are generally shared,
5079 their DECL_CONTEXT changes as the replicas of DECL are
5080 created. The only time where DECL_CONTEXT is important
5081 is for the FUNCTION_DECLs that have a gimple body (since
5082 the PARM_DECL will be used in the function's body). */
5083 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5084 DECL_CONTEXT (t) = decl;
5085 }
5086
5087 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5088 At this point, it is not needed anymore. */
5089 DECL_SAVED_TREE (decl) = NULL_TREE;
5090
5091 /* Clear the abstract origin if it refers to a method. Otherwise
5092 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5093 origin will not be output correctly. */
5094 if (DECL_ABSTRACT_ORIGIN (decl)
5095 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5096 && RECORD_OR_UNION_TYPE_P
5097 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5098 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5099
5100 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5101 DECL_VINDEX referring to itself into a vtable slot number as it
5102 should. Happens with functions that are copied and then forgotten
5103 about. Just clear it, it won't matter anymore. */
5104 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5105 DECL_VINDEX (decl) = NULL_TREE;
5106 }
5107 else if (TREE_CODE (decl) == VAR_DECL)
5108 {
5109 if ((DECL_EXTERNAL (decl)
5110 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5111 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5112 DECL_INITIAL (decl) = NULL_TREE;
5113 }
5114 else if (TREE_CODE (decl) == TYPE_DECL
5115 || TREE_CODE (decl) == FIELD_DECL)
5116 DECL_INITIAL (decl) = NULL_TREE;
5117 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5118 && DECL_INITIAL (decl)
5119 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5120 {
5121 /* Strip builtins from the translation-unit BLOCK. We still have targets
5122 without builtin_decl_explicit support and also builtins are shared
5123 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5124 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5125 while (*nextp)
5126 {
5127 tree var = *nextp;
5128 if (TREE_CODE (var) == FUNCTION_DECL
5129 && DECL_BUILT_IN (var))
5130 *nextp = TREE_CHAIN (var);
5131 else
5132 nextp = &TREE_CHAIN (var);
5133 }
5134 }
5135 }
5136
5137
5138 /* Data used when collecting DECLs and TYPEs for language data removal. */
5139
5140 struct free_lang_data_d
5141 {
5142 /* Worklist to avoid excessive recursion. */
5143 vec<tree> worklist;
5144
5145 /* Set of traversed objects. Used to avoid duplicate visits. */
5146 struct pointer_set_t *pset;
5147
5148 /* Array of symbols to process with free_lang_data_in_decl. */
5149 vec<tree> decls;
5150
5151 /* Array of types to process with free_lang_data_in_type. */
5152 vec<tree> types;
5153 };
5154
5155
5156 /* Save all language fields needed to generate proper debug information
5157 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5158
5159 static void
5160 save_debug_info_for_decl (tree t)
5161 {
5162 /*struct saved_debug_info_d *sdi;*/
5163
5164 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5165
5166 /* FIXME. Partial implementation for saving debug info removed. */
5167 }
5168
5169
5170 /* Save all language fields needed to generate proper debug information
5171 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5172
5173 static void
5174 save_debug_info_for_type (tree t)
5175 {
5176 /*struct saved_debug_info_d *sdi;*/
5177
5178 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5179
5180 /* FIXME. Partial implementation for saving debug info removed. */
5181 }
5182
5183
5184 /* Add type or decl T to one of the list of tree nodes that need their
5185 language data removed. The lists are held inside FLD. */
5186
5187 static void
5188 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5189 {
5190 if (DECL_P (t))
5191 {
5192 fld->decls.safe_push (t);
5193 if (debug_info_level > DINFO_LEVEL_TERSE)
5194 save_debug_info_for_decl (t);
5195 }
5196 else if (TYPE_P (t))
5197 {
5198 fld->types.safe_push (t);
5199 if (debug_info_level > DINFO_LEVEL_TERSE)
5200 save_debug_info_for_type (t);
5201 }
5202 else
5203 gcc_unreachable ();
5204 }
5205
5206 /* Push tree node T into FLD->WORKLIST. */
5207
5208 static inline void
5209 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5210 {
5211 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5212 fld->worklist.safe_push ((t));
5213 }
5214
5215
5216 /* Operand callback helper for free_lang_data_in_node. *TP is the
5217 subtree operand being considered. */
5218
5219 static tree
5220 find_decls_types_r (tree *tp, int *ws, void *data)
5221 {
5222 tree t = *tp;
5223 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5224
5225 if (TREE_CODE (t) == TREE_LIST)
5226 return NULL_TREE;
5227
5228 /* Language specific nodes will be removed, so there is no need
5229 to gather anything under them. */
5230 if (is_lang_specific (t))
5231 {
5232 *ws = 0;
5233 return NULL_TREE;
5234 }
5235
5236 if (DECL_P (t))
5237 {
5238 /* Note that walk_tree does not traverse every possible field in
5239 decls, so we have to do our own traversals here. */
5240 add_tree_to_fld_list (t, fld);
5241
5242 fld_worklist_push (DECL_NAME (t), fld);
5243 fld_worklist_push (DECL_CONTEXT (t), fld);
5244 fld_worklist_push (DECL_SIZE (t), fld);
5245 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5246
5247 /* We are going to remove everything under DECL_INITIAL for
5248 TYPE_DECLs. No point walking them. */
5249 if (TREE_CODE (t) != TYPE_DECL)
5250 fld_worklist_push (DECL_INITIAL (t), fld);
5251
5252 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5253 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5254
5255 if (TREE_CODE (t) == FUNCTION_DECL)
5256 {
5257 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5258 fld_worklist_push (DECL_RESULT (t), fld);
5259 }
5260 else if (TREE_CODE (t) == TYPE_DECL)
5261 {
5262 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5263 }
5264 else if (TREE_CODE (t) == FIELD_DECL)
5265 {
5266 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5267 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5268 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5269 fld_worklist_push (DECL_FCONTEXT (t), fld);
5270 }
5271
5272 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5273 && DECL_HAS_VALUE_EXPR_P (t))
5274 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5275
5276 if (TREE_CODE (t) != FIELD_DECL
5277 && TREE_CODE (t) != TYPE_DECL)
5278 fld_worklist_push (TREE_CHAIN (t), fld);
5279 *ws = 0;
5280 }
5281 else if (TYPE_P (t))
5282 {
5283 /* Note that walk_tree does not traverse every possible field in
5284 types, so we have to do our own traversals here. */
5285 add_tree_to_fld_list (t, fld);
5286
5287 if (!RECORD_OR_UNION_TYPE_P (t))
5288 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5289 fld_worklist_push (TYPE_SIZE (t), fld);
5290 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5291 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5292 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5293 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5294 fld_worklist_push (TYPE_NAME (t), fld);
5295 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5296 them and thus do not and want not to reach unused pointer types
5297 this way. */
5298 if (!POINTER_TYPE_P (t))
5299 fld_worklist_push (TYPE_MINVAL (t), fld);
5300 if (!RECORD_OR_UNION_TYPE_P (t))
5301 fld_worklist_push (TYPE_MAXVAL (t), fld);
5302 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5303 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5304 do not and want not to reach unused variants this way. */
5305 if (TYPE_CONTEXT (t))
5306 {
5307 tree ctx = TYPE_CONTEXT (t);
5308 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5309 So push that instead. */
5310 while (ctx && TREE_CODE (ctx) == BLOCK)
5311 ctx = BLOCK_SUPERCONTEXT (ctx);
5312 fld_worklist_push (ctx, fld);
5313 }
5314 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5315 and want not to reach unused types this way. */
5316
5317 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5318 {
5319 unsigned i;
5320 tree tem;
5321 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5322 fld_worklist_push (TREE_TYPE (tem), fld);
5323 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5324 if (tem
5325 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5326 && TREE_CODE (tem) == TREE_LIST)
5327 do
5328 {
5329 fld_worklist_push (TREE_VALUE (tem), fld);
5330 tem = TREE_CHAIN (tem);
5331 }
5332 while (tem);
5333 }
5334 if (RECORD_OR_UNION_TYPE_P (t))
5335 {
5336 tree tem;
5337 /* Push all TYPE_FIELDS - there can be interleaving interesting
5338 and non-interesting things. */
5339 tem = TYPE_FIELDS (t);
5340 while (tem)
5341 {
5342 if (TREE_CODE (tem) == FIELD_DECL
5343 || TREE_CODE (tem) == TYPE_DECL)
5344 fld_worklist_push (tem, fld);
5345 tem = TREE_CHAIN (tem);
5346 }
5347 }
5348
5349 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5350 *ws = 0;
5351 }
5352 else if (TREE_CODE (t) == BLOCK)
5353 {
5354 tree tem;
5355 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5356 fld_worklist_push (tem, fld);
5357 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5358 fld_worklist_push (tem, fld);
5359 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5360 }
5361
5362 if (TREE_CODE (t) != IDENTIFIER_NODE
5363 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5364 fld_worklist_push (TREE_TYPE (t), fld);
5365
5366 return NULL_TREE;
5367 }
5368
5369
5370 /* Find decls and types in T. */
5371
5372 static void
5373 find_decls_types (tree t, struct free_lang_data_d *fld)
5374 {
5375 while (1)
5376 {
5377 if (!pointer_set_contains (fld->pset, t))
5378 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5379 if (fld->worklist.is_empty ())
5380 break;
5381 t = fld->worklist.pop ();
5382 }
5383 }
5384
5385 /* Translate all the types in LIST with the corresponding runtime
5386 types. */
5387
5388 static tree
5389 get_eh_types_for_runtime (tree list)
5390 {
5391 tree head, prev;
5392
5393 if (list == NULL_TREE)
5394 return NULL_TREE;
5395
5396 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5397 prev = head;
5398 list = TREE_CHAIN (list);
5399 while (list)
5400 {
5401 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5402 TREE_CHAIN (prev) = n;
5403 prev = TREE_CHAIN (prev);
5404 list = TREE_CHAIN (list);
5405 }
5406
5407 return head;
5408 }
5409
5410
5411 /* Find decls and types referenced in EH region R and store them in
5412 FLD->DECLS and FLD->TYPES. */
5413
5414 static void
5415 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5416 {
5417 switch (r->type)
5418 {
5419 case ERT_CLEANUP:
5420 break;
5421
5422 case ERT_TRY:
5423 {
5424 eh_catch c;
5425
5426 /* The types referenced in each catch must first be changed to the
5427 EH types used at runtime. This removes references to FE types
5428 in the region. */
5429 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5430 {
5431 c->type_list = get_eh_types_for_runtime (c->type_list);
5432 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5433 }
5434 }
5435 break;
5436
5437 case ERT_ALLOWED_EXCEPTIONS:
5438 r->u.allowed.type_list
5439 = get_eh_types_for_runtime (r->u.allowed.type_list);
5440 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5441 break;
5442
5443 case ERT_MUST_NOT_THROW:
5444 walk_tree (&r->u.must_not_throw.failure_decl,
5445 find_decls_types_r, fld, fld->pset);
5446 break;
5447 }
5448 }
5449
5450
5451 /* Find decls and types referenced in cgraph node N and store them in
5452 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5453 look for *every* kind of DECL and TYPE node reachable from N,
5454 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5455 NAMESPACE_DECLs, etc). */
5456
5457 static void
5458 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5459 {
5460 basic_block bb;
5461 struct function *fn;
5462 unsigned ix;
5463 tree t;
5464
5465 find_decls_types (n->decl, fld);
5466
5467 if (!gimple_has_body_p (n->decl))
5468 return;
5469
5470 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5471
5472 fn = DECL_STRUCT_FUNCTION (n->decl);
5473
5474 /* Traverse locals. */
5475 FOR_EACH_LOCAL_DECL (fn, ix, t)
5476 find_decls_types (t, fld);
5477
5478 /* Traverse EH regions in FN. */
5479 {
5480 eh_region r;
5481 FOR_ALL_EH_REGION_FN (r, fn)
5482 find_decls_types_in_eh_region (r, fld);
5483 }
5484
5485 /* Traverse every statement in FN. */
5486 FOR_EACH_BB_FN (bb, fn)
5487 {
5488 gimple_stmt_iterator si;
5489 unsigned i;
5490
5491 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5492 {
5493 gimple phi = gsi_stmt (si);
5494
5495 for (i = 0; i < gimple_phi_num_args (phi); i++)
5496 {
5497 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5498 find_decls_types (*arg_p, fld);
5499 }
5500 }
5501
5502 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5503 {
5504 gimple stmt = gsi_stmt (si);
5505
5506 if (is_gimple_call (stmt))
5507 find_decls_types (gimple_call_fntype (stmt), fld);
5508
5509 for (i = 0; i < gimple_num_ops (stmt); i++)
5510 {
5511 tree arg = gimple_op (stmt, i);
5512 find_decls_types (arg, fld);
5513 }
5514 }
5515 }
5516 }
5517
5518
5519 /* Find decls and types referenced in varpool node N and store them in
5520 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5521 look for *every* kind of DECL and TYPE node reachable from N,
5522 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5523 NAMESPACE_DECLs, etc). */
5524
5525 static void
5526 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5527 {
5528 find_decls_types (v->decl, fld);
5529 }
5530
5531 /* If T needs an assembler name, have one created for it. */
5532
5533 void
5534 assign_assembler_name_if_neeeded (tree t)
5535 {
5536 if (need_assembler_name_p (t))
5537 {
5538 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5539 diagnostics that use input_location to show locus
5540 information. The problem here is that, at this point,
5541 input_location is generally anchored to the end of the file
5542 (since the parser is long gone), so we don't have a good
5543 position to pin it to.
5544
5545 To alleviate this problem, this uses the location of T's
5546 declaration. Examples of this are
5547 testsuite/g++.dg/template/cond2.C and
5548 testsuite/g++.dg/template/pr35240.C. */
5549 location_t saved_location = input_location;
5550 input_location = DECL_SOURCE_LOCATION (t);
5551
5552 decl_assembler_name (t);
5553
5554 input_location = saved_location;
5555 }
5556 }
5557
5558
5559 /* Free language specific information for every operand and expression
5560 in every node of the call graph. This process operates in three stages:
5561
5562 1- Every callgraph node and varpool node is traversed looking for
5563 decls and types embedded in them. This is a more exhaustive
5564 search than that done by find_referenced_vars, because it will
5565 also collect individual fields, decls embedded in types, etc.
5566
5567 2- All the decls found are sent to free_lang_data_in_decl.
5568
5569 3- All the types found are sent to free_lang_data_in_type.
5570
5571 The ordering between decls and types is important because
5572 free_lang_data_in_decl sets assembler names, which includes
5573 mangling. So types cannot be freed up until assembler names have
5574 been set up. */
5575
5576 static void
5577 free_lang_data_in_cgraph (void)
5578 {
5579 struct cgraph_node *n;
5580 varpool_node *v;
5581 struct free_lang_data_d fld;
5582 tree t;
5583 unsigned i;
5584 alias_pair *p;
5585
5586 /* Initialize sets and arrays to store referenced decls and types. */
5587 fld.pset = pointer_set_create ();
5588 fld.worklist.create (0);
5589 fld.decls.create (100);
5590 fld.types.create (100);
5591
5592 /* Find decls and types in the body of every function in the callgraph. */
5593 FOR_EACH_FUNCTION (n)
5594 find_decls_types_in_node (n, &fld);
5595
5596 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5597 find_decls_types (p->decl, &fld);
5598
5599 /* Find decls and types in every varpool symbol. */
5600 FOR_EACH_VARIABLE (v)
5601 find_decls_types_in_var (v, &fld);
5602
5603 /* Set the assembler name on every decl found. We need to do this
5604 now because free_lang_data_in_decl will invalidate data needed
5605 for mangling. This breaks mangling on interdependent decls. */
5606 FOR_EACH_VEC_ELT (fld.decls, i, t)
5607 assign_assembler_name_if_neeeded (t);
5608
5609 /* Traverse every decl found freeing its language data. */
5610 FOR_EACH_VEC_ELT (fld.decls, i, t)
5611 free_lang_data_in_decl (t);
5612
5613 /* Traverse every type found freeing its language data. */
5614 FOR_EACH_VEC_ELT (fld.types, i, t)
5615 free_lang_data_in_type (t);
5616
5617 pointer_set_destroy (fld.pset);
5618 fld.worklist.release ();
5619 fld.decls.release ();
5620 fld.types.release ();
5621 }
5622
5623
5624 /* Free resources that are used by FE but are not needed once they are done. */
5625
5626 static unsigned
5627 free_lang_data (void)
5628 {
5629 unsigned i;
5630
5631 /* If we are the LTO frontend we have freed lang-specific data already. */
5632 if (in_lto_p
5633 || !flag_generate_lto)
5634 return 0;
5635
5636 /* Allocate and assign alias sets to the standard integer types
5637 while the slots are still in the way the frontends generated them. */
5638 for (i = 0; i < itk_none; ++i)
5639 if (integer_types[i])
5640 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5641
5642 /* Traverse the IL resetting language specific information for
5643 operands, expressions, etc. */
5644 free_lang_data_in_cgraph ();
5645
5646 /* Create gimple variants for common types. */
5647 ptrdiff_type_node = integer_type_node;
5648 fileptr_type_node = ptr_type_node;
5649
5650 /* Reset some langhooks. Do not reset types_compatible_p, it may
5651 still be used indirectly via the get_alias_set langhook. */
5652 lang_hooks.dwarf_name = lhd_dwarf_name;
5653 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5654 /* We do not want the default decl_assembler_name implementation,
5655 rather if we have fixed everything we want a wrapper around it
5656 asserting that all non-local symbols already got their assembler
5657 name and only produce assembler names for local symbols. Or rather
5658 make sure we never call decl_assembler_name on local symbols and
5659 devise a separate, middle-end private scheme for it. */
5660
5661 /* Reset diagnostic machinery. */
5662 tree_diagnostics_defaults (global_dc);
5663
5664 return 0;
5665 }
5666
5667
5668 namespace {
5669
5670 const pass_data pass_data_ipa_free_lang_data =
5671 {
5672 SIMPLE_IPA_PASS, /* type */
5673 "*free_lang_data", /* name */
5674 OPTGROUP_NONE, /* optinfo_flags */
5675 TV_IPA_FREE_LANG_DATA, /* tv_id */
5676 0, /* properties_required */
5677 0, /* properties_provided */
5678 0, /* properties_destroyed */
5679 0, /* todo_flags_start */
5680 0, /* todo_flags_finish */
5681 };
5682
5683 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5684 {
5685 public:
5686 pass_ipa_free_lang_data (gcc::context *ctxt)
5687 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5688 {}
5689
5690 /* opt_pass methods: */
5691 virtual unsigned int execute (function *) { return free_lang_data (); }
5692
5693 }; // class pass_ipa_free_lang_data
5694
5695 } // anon namespace
5696
5697 simple_ipa_opt_pass *
5698 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5699 {
5700 return new pass_ipa_free_lang_data (ctxt);
5701 }
5702
5703 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5704 ATTR_NAME. Also used internally by remove_attribute(). */
5705 bool
5706 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5707 {
5708 size_t ident_len = IDENTIFIER_LENGTH (ident);
5709
5710 if (ident_len == attr_len)
5711 {
5712 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5713 return true;
5714 }
5715 else if (ident_len == attr_len + 4)
5716 {
5717 /* There is the possibility that ATTR is 'text' and IDENT is
5718 '__text__'. */
5719 const char *p = IDENTIFIER_POINTER (ident);
5720 if (p[0] == '_' && p[1] == '_'
5721 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5722 && strncmp (attr_name, p + 2, attr_len) == 0)
5723 return true;
5724 }
5725
5726 return false;
5727 }
5728
5729 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5730 of ATTR_NAME, and LIST is not NULL_TREE. */
5731 tree
5732 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5733 {
5734 while (list)
5735 {
5736 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5737
5738 if (ident_len == attr_len)
5739 {
5740 if (!strcmp (attr_name,
5741 IDENTIFIER_POINTER (get_attribute_name (list))))
5742 break;
5743 }
5744 /* TODO: If we made sure that attributes were stored in the
5745 canonical form without '__...__' (ie, as in 'text' as opposed
5746 to '__text__') then we could avoid the following case. */
5747 else if (ident_len == attr_len + 4)
5748 {
5749 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5750 if (p[0] == '_' && p[1] == '_'
5751 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5752 && strncmp (attr_name, p + 2, attr_len) == 0)
5753 break;
5754 }
5755 list = TREE_CHAIN (list);
5756 }
5757
5758 return list;
5759 }
5760
5761 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5762 return a pointer to the attribute's list first element if the attribute
5763 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5764 '__text__'). */
5765
5766 tree
5767 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5768 tree list)
5769 {
5770 while (list)
5771 {
5772 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5773
5774 if (attr_len > ident_len)
5775 {
5776 list = TREE_CHAIN (list);
5777 continue;
5778 }
5779
5780 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5781
5782 if (strncmp (attr_name, p, attr_len) == 0)
5783 break;
5784
5785 /* TODO: If we made sure that attributes were stored in the
5786 canonical form without '__...__' (ie, as in 'text' as opposed
5787 to '__text__') then we could avoid the following case. */
5788 if (p[0] == '_' && p[1] == '_' &&
5789 strncmp (attr_name, p + 2, attr_len) == 0)
5790 break;
5791
5792 list = TREE_CHAIN (list);
5793 }
5794
5795 return list;
5796 }
5797
5798
5799 /* A variant of lookup_attribute() that can be used with an identifier
5800 as the first argument, and where the identifier can be either
5801 'text' or '__text__'.
5802
5803 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5804 return a pointer to the attribute's list element if the attribute
5805 is part of the list, or NULL_TREE if not found. If the attribute
5806 appears more than once, this only returns the first occurrence; the
5807 TREE_CHAIN of the return value should be passed back in if further
5808 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5809 can be in the form 'text' or '__text__'. */
5810 static tree
5811 lookup_ident_attribute (tree attr_identifier, tree list)
5812 {
5813 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5814
5815 while (list)
5816 {
5817 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5818 == IDENTIFIER_NODE);
5819
5820 /* Identifiers can be compared directly for equality. */
5821 if (attr_identifier == get_attribute_name (list))
5822 break;
5823
5824 /* If they are not equal, they may still be one in the form
5825 'text' while the other one is in the form '__text__'. TODO:
5826 If we were storing attributes in normalized 'text' form, then
5827 this could all go away and we could take full advantage of
5828 the fact that we're comparing identifiers. :-) */
5829 {
5830 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5831 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5832
5833 if (ident_len == attr_len + 4)
5834 {
5835 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5836 const char *q = IDENTIFIER_POINTER (attr_identifier);
5837 if (p[0] == '_' && p[1] == '_'
5838 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5839 && strncmp (q, p + 2, attr_len) == 0)
5840 break;
5841 }
5842 else if (ident_len + 4 == attr_len)
5843 {
5844 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5845 const char *q = IDENTIFIER_POINTER (attr_identifier);
5846 if (q[0] == '_' && q[1] == '_'
5847 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5848 && strncmp (q + 2, p, ident_len) == 0)
5849 break;
5850 }
5851 }
5852 list = TREE_CHAIN (list);
5853 }
5854
5855 return list;
5856 }
5857
5858 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5859 modified list. */
5860
5861 tree
5862 remove_attribute (const char *attr_name, tree list)
5863 {
5864 tree *p;
5865 size_t attr_len = strlen (attr_name);
5866
5867 gcc_checking_assert (attr_name[0] != '_');
5868
5869 for (p = &list; *p; )
5870 {
5871 tree l = *p;
5872 /* TODO: If we were storing attributes in normalized form, here
5873 we could use a simple strcmp(). */
5874 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5875 *p = TREE_CHAIN (l);
5876 else
5877 p = &TREE_CHAIN (l);
5878 }
5879
5880 return list;
5881 }
5882
5883 /* Return an attribute list that is the union of a1 and a2. */
5884
5885 tree
5886 merge_attributes (tree a1, tree a2)
5887 {
5888 tree attributes;
5889
5890 /* Either one unset? Take the set one. */
5891
5892 if ((attributes = a1) == 0)
5893 attributes = a2;
5894
5895 /* One that completely contains the other? Take it. */
5896
5897 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5898 {
5899 if (attribute_list_contained (a2, a1))
5900 attributes = a2;
5901 else
5902 {
5903 /* Pick the longest list, and hang on the other list. */
5904
5905 if (list_length (a1) < list_length (a2))
5906 attributes = a2, a2 = a1;
5907
5908 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5909 {
5910 tree a;
5911 for (a = lookup_ident_attribute (get_attribute_name (a2),
5912 attributes);
5913 a != NULL_TREE && !attribute_value_equal (a, a2);
5914 a = lookup_ident_attribute (get_attribute_name (a2),
5915 TREE_CHAIN (a)))
5916 ;
5917 if (a == NULL_TREE)
5918 {
5919 a1 = copy_node (a2);
5920 TREE_CHAIN (a1) = attributes;
5921 attributes = a1;
5922 }
5923 }
5924 }
5925 }
5926 return attributes;
5927 }
5928
5929 /* Given types T1 and T2, merge their attributes and return
5930 the result. */
5931
5932 tree
5933 merge_type_attributes (tree t1, tree t2)
5934 {
5935 return merge_attributes (TYPE_ATTRIBUTES (t1),
5936 TYPE_ATTRIBUTES (t2));
5937 }
5938
5939 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5940 the result. */
5941
5942 tree
5943 merge_decl_attributes (tree olddecl, tree newdecl)
5944 {
5945 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5946 DECL_ATTRIBUTES (newdecl));
5947 }
5948
5949 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5950
5951 /* Specialization of merge_decl_attributes for various Windows targets.
5952
5953 This handles the following situation:
5954
5955 __declspec (dllimport) int foo;
5956 int foo;
5957
5958 The second instance of `foo' nullifies the dllimport. */
5959
5960 tree
5961 merge_dllimport_decl_attributes (tree old, tree new_tree)
5962 {
5963 tree a;
5964 int delete_dllimport_p = 1;
5965
5966 /* What we need to do here is remove from `old' dllimport if it doesn't
5967 appear in `new'. dllimport behaves like extern: if a declaration is
5968 marked dllimport and a definition appears later, then the object
5969 is not dllimport'd. We also remove a `new' dllimport if the old list
5970 contains dllexport: dllexport always overrides dllimport, regardless
5971 of the order of declaration. */
5972 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5973 delete_dllimport_p = 0;
5974 else if (DECL_DLLIMPORT_P (new_tree)
5975 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5976 {
5977 DECL_DLLIMPORT_P (new_tree) = 0;
5978 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5979 "dllimport ignored", new_tree);
5980 }
5981 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5982 {
5983 /* Warn about overriding a symbol that has already been used, e.g.:
5984 extern int __attribute__ ((dllimport)) foo;
5985 int* bar () {return &foo;}
5986 int foo;
5987 */
5988 if (TREE_USED (old))
5989 {
5990 warning (0, "%q+D redeclared without dllimport attribute "
5991 "after being referenced with dll linkage", new_tree);
5992 /* If we have used a variable's address with dllimport linkage,
5993 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5994 decl may already have had TREE_CONSTANT computed.
5995 We still remove the attribute so that assembler code refers
5996 to '&foo rather than '_imp__foo'. */
5997 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5998 DECL_DLLIMPORT_P (new_tree) = 1;
5999 }
6000
6001 /* Let an inline definition silently override the external reference,
6002 but otherwise warn about attribute inconsistency. */
6003 else if (TREE_CODE (new_tree) == VAR_DECL
6004 || !DECL_DECLARED_INLINE_P (new_tree))
6005 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6006 "previous dllimport ignored", new_tree);
6007 }
6008 else
6009 delete_dllimport_p = 0;
6010
6011 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6012
6013 if (delete_dllimport_p)
6014 a = remove_attribute ("dllimport", a);
6015
6016 return a;
6017 }
6018
6019 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6020 struct attribute_spec.handler. */
6021
6022 tree
6023 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6024 bool *no_add_attrs)
6025 {
6026 tree node = *pnode;
6027 bool is_dllimport;
6028
6029 /* These attributes may apply to structure and union types being created,
6030 but otherwise should pass to the declaration involved. */
6031 if (!DECL_P (node))
6032 {
6033 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6034 | (int) ATTR_FLAG_ARRAY_NEXT))
6035 {
6036 *no_add_attrs = true;
6037 return tree_cons (name, args, NULL_TREE);
6038 }
6039 if (TREE_CODE (node) == RECORD_TYPE
6040 || TREE_CODE (node) == UNION_TYPE)
6041 {
6042 node = TYPE_NAME (node);
6043 if (!node)
6044 return NULL_TREE;
6045 }
6046 else
6047 {
6048 warning (OPT_Wattributes, "%qE attribute ignored",
6049 name);
6050 *no_add_attrs = true;
6051 return NULL_TREE;
6052 }
6053 }
6054
6055 if (TREE_CODE (node) != FUNCTION_DECL
6056 && TREE_CODE (node) != VAR_DECL
6057 && TREE_CODE (node) != TYPE_DECL)
6058 {
6059 *no_add_attrs = true;
6060 warning (OPT_Wattributes, "%qE attribute ignored",
6061 name);
6062 return NULL_TREE;
6063 }
6064
6065 if (TREE_CODE (node) == TYPE_DECL
6066 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6067 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6068 {
6069 *no_add_attrs = true;
6070 warning (OPT_Wattributes, "%qE attribute ignored",
6071 name);
6072 return NULL_TREE;
6073 }
6074
6075 is_dllimport = is_attribute_p ("dllimport", name);
6076
6077 /* Report error on dllimport ambiguities seen now before they cause
6078 any damage. */
6079 if (is_dllimport)
6080 {
6081 /* Honor any target-specific overrides. */
6082 if (!targetm.valid_dllimport_attribute_p (node))
6083 *no_add_attrs = true;
6084
6085 else if (TREE_CODE (node) == FUNCTION_DECL
6086 && DECL_DECLARED_INLINE_P (node))
6087 {
6088 warning (OPT_Wattributes, "inline function %q+D declared as "
6089 " dllimport: attribute ignored", node);
6090 *no_add_attrs = true;
6091 }
6092 /* Like MS, treat definition of dllimported variables and
6093 non-inlined functions on declaration as syntax errors. */
6094 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6095 {
6096 error ("function %q+D definition is marked dllimport", node);
6097 *no_add_attrs = true;
6098 }
6099
6100 else if (TREE_CODE (node) == VAR_DECL)
6101 {
6102 if (DECL_INITIAL (node))
6103 {
6104 error ("variable %q+D definition is marked dllimport",
6105 node);
6106 *no_add_attrs = true;
6107 }
6108
6109 /* `extern' needn't be specified with dllimport.
6110 Specify `extern' now and hope for the best. Sigh. */
6111 DECL_EXTERNAL (node) = 1;
6112 /* Also, implicitly give dllimport'd variables declared within
6113 a function global scope, unless declared static. */
6114 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6115 TREE_PUBLIC (node) = 1;
6116 }
6117
6118 if (*no_add_attrs == false)
6119 DECL_DLLIMPORT_P (node) = 1;
6120 }
6121 else if (TREE_CODE (node) == FUNCTION_DECL
6122 && DECL_DECLARED_INLINE_P (node)
6123 && flag_keep_inline_dllexport)
6124 /* An exported function, even if inline, must be emitted. */
6125 DECL_EXTERNAL (node) = 0;
6126
6127 /* Report error if symbol is not accessible at global scope. */
6128 if (!TREE_PUBLIC (node)
6129 && (TREE_CODE (node) == VAR_DECL
6130 || TREE_CODE (node) == FUNCTION_DECL))
6131 {
6132 error ("external linkage required for symbol %q+D because of "
6133 "%qE attribute", node, name);
6134 *no_add_attrs = true;
6135 }
6136
6137 /* A dllexport'd entity must have default visibility so that other
6138 program units (shared libraries or the main executable) can see
6139 it. A dllimport'd entity must have default visibility so that
6140 the linker knows that undefined references within this program
6141 unit can be resolved by the dynamic linker. */
6142 if (!*no_add_attrs)
6143 {
6144 if (DECL_VISIBILITY_SPECIFIED (node)
6145 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6146 error ("%qE implies default visibility, but %qD has already "
6147 "been declared with a different visibility",
6148 name, node);
6149 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6150 DECL_VISIBILITY_SPECIFIED (node) = 1;
6151 }
6152
6153 return NULL_TREE;
6154 }
6155
6156 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6157 \f
6158 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6159 of the various TYPE_QUAL values. */
6160
6161 static void
6162 set_type_quals (tree type, int type_quals)
6163 {
6164 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6165 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6166 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6167 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6168 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6169 }
6170
6171 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6172
6173 bool
6174 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6175 {
6176 return (TYPE_QUALS (cand) == type_quals
6177 && TYPE_NAME (cand) == TYPE_NAME (base)
6178 /* Apparently this is needed for Objective-C. */
6179 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6180 /* Check alignment. */
6181 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6182 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6183 TYPE_ATTRIBUTES (base)));
6184 }
6185
6186 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6187
6188 static bool
6189 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6190 {
6191 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6192 && TYPE_NAME (cand) == TYPE_NAME (base)
6193 /* Apparently this is needed for Objective-C. */
6194 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6195 /* Check alignment. */
6196 && TYPE_ALIGN (cand) == align
6197 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6198 TYPE_ATTRIBUTES (base)));
6199 }
6200
6201 /* This function checks to see if TYPE matches the size one of the built-in
6202 atomic types, and returns that core atomic type. */
6203
6204 static tree
6205 find_atomic_core_type (tree type)
6206 {
6207 tree base_atomic_type;
6208
6209 /* Only handle complete types. */
6210 if (TYPE_SIZE (type) == NULL_TREE)
6211 return NULL_TREE;
6212
6213 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6214 switch (type_size)
6215 {
6216 case 8:
6217 base_atomic_type = atomicQI_type_node;
6218 break;
6219
6220 case 16:
6221 base_atomic_type = atomicHI_type_node;
6222 break;
6223
6224 case 32:
6225 base_atomic_type = atomicSI_type_node;
6226 break;
6227
6228 case 64:
6229 base_atomic_type = atomicDI_type_node;
6230 break;
6231
6232 case 128:
6233 base_atomic_type = atomicTI_type_node;
6234 break;
6235
6236 default:
6237 base_atomic_type = NULL_TREE;
6238 }
6239
6240 return base_atomic_type;
6241 }
6242
6243 /* Return a version of the TYPE, qualified as indicated by the
6244 TYPE_QUALS, if one exists. If no qualified version exists yet,
6245 return NULL_TREE. */
6246
6247 tree
6248 get_qualified_type (tree type, int type_quals)
6249 {
6250 tree t;
6251
6252 if (TYPE_QUALS (type) == type_quals)
6253 return type;
6254
6255 /* Search the chain of variants to see if there is already one there just
6256 like the one we need to have. If so, use that existing one. We must
6257 preserve the TYPE_NAME, since there is code that depends on this. */
6258 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6259 if (check_qualified_type (t, type, type_quals))
6260 return t;
6261
6262 return NULL_TREE;
6263 }
6264
6265 /* Like get_qualified_type, but creates the type if it does not
6266 exist. This function never returns NULL_TREE. */
6267
6268 tree
6269 build_qualified_type (tree type, int type_quals)
6270 {
6271 tree t;
6272
6273 /* See if we already have the appropriate qualified variant. */
6274 t = get_qualified_type (type, type_quals);
6275
6276 /* If not, build it. */
6277 if (!t)
6278 {
6279 t = build_variant_type_copy (type);
6280 set_type_quals (t, type_quals);
6281
6282 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6283 {
6284 /* See if this object can map to a basic atomic type. */
6285 tree atomic_type = find_atomic_core_type (type);
6286 if (atomic_type)
6287 {
6288 /* Ensure the alignment of this type is compatible with
6289 the required alignment of the atomic type. */
6290 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6291 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6292 }
6293 }
6294
6295 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6296 /* Propagate structural equality. */
6297 SET_TYPE_STRUCTURAL_EQUALITY (t);
6298 else if (TYPE_CANONICAL (type) != type)
6299 /* Build the underlying canonical type, since it is different
6300 from TYPE. */
6301 {
6302 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6303 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6304 }
6305 else
6306 /* T is its own canonical type. */
6307 TYPE_CANONICAL (t) = t;
6308
6309 }
6310
6311 return t;
6312 }
6313
6314 /* Create a variant of type T with alignment ALIGN. */
6315
6316 tree
6317 build_aligned_type (tree type, unsigned int align)
6318 {
6319 tree t;
6320
6321 if (TYPE_PACKED (type)
6322 || TYPE_ALIGN (type) == align)
6323 return type;
6324
6325 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6326 if (check_aligned_type (t, type, align))
6327 return t;
6328
6329 t = build_variant_type_copy (type);
6330 TYPE_ALIGN (t) = align;
6331
6332 return t;
6333 }
6334
6335 /* Create a new distinct copy of TYPE. The new type is made its own
6336 MAIN_VARIANT. If TYPE requires structural equality checks, the
6337 resulting type requires structural equality checks; otherwise, its
6338 TYPE_CANONICAL points to itself. */
6339
6340 tree
6341 build_distinct_type_copy (tree type)
6342 {
6343 tree t = copy_node (type);
6344
6345 TYPE_POINTER_TO (t) = 0;
6346 TYPE_REFERENCE_TO (t) = 0;
6347
6348 /* Set the canonical type either to a new equivalence class, or
6349 propagate the need for structural equality checks. */
6350 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6351 SET_TYPE_STRUCTURAL_EQUALITY (t);
6352 else
6353 TYPE_CANONICAL (t) = t;
6354
6355 /* Make it its own variant. */
6356 TYPE_MAIN_VARIANT (t) = t;
6357 TYPE_NEXT_VARIANT (t) = 0;
6358
6359 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6360 whose TREE_TYPE is not t. This can also happen in the Ada
6361 frontend when using subtypes. */
6362
6363 return t;
6364 }
6365
6366 /* Create a new variant of TYPE, equivalent but distinct. This is so
6367 the caller can modify it. TYPE_CANONICAL for the return type will
6368 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6369 are considered equal by the language itself (or that both types
6370 require structural equality checks). */
6371
6372 tree
6373 build_variant_type_copy (tree type)
6374 {
6375 tree t, m = TYPE_MAIN_VARIANT (type);
6376
6377 t = build_distinct_type_copy (type);
6378
6379 /* Since we're building a variant, assume that it is a non-semantic
6380 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6381 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6382
6383 /* Add the new type to the chain of variants of TYPE. */
6384 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6385 TYPE_NEXT_VARIANT (m) = t;
6386 TYPE_MAIN_VARIANT (t) = m;
6387
6388 return t;
6389 }
6390 \f
6391 /* Return true if the from tree in both tree maps are equal. */
6392
6393 int
6394 tree_map_base_eq (const void *va, const void *vb)
6395 {
6396 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6397 *const b = (const struct tree_map_base *) vb;
6398 return (a->from == b->from);
6399 }
6400
6401 /* Hash a from tree in a tree_base_map. */
6402
6403 unsigned int
6404 tree_map_base_hash (const void *item)
6405 {
6406 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6407 }
6408
6409 /* Return true if this tree map structure is marked for garbage collection
6410 purposes. We simply return true if the from tree is marked, so that this
6411 structure goes away when the from tree goes away. */
6412
6413 int
6414 tree_map_base_marked_p (const void *p)
6415 {
6416 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6417 }
6418
6419 /* Hash a from tree in a tree_map. */
6420
6421 unsigned int
6422 tree_map_hash (const void *item)
6423 {
6424 return (((const struct tree_map *) item)->hash);
6425 }
6426
6427 /* Hash a from tree in a tree_decl_map. */
6428
6429 unsigned int
6430 tree_decl_map_hash (const void *item)
6431 {
6432 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6433 }
6434
6435 /* Return the initialization priority for DECL. */
6436
6437 priority_type
6438 decl_init_priority_lookup (tree decl)
6439 {
6440 symtab_node *snode = symtab_node::get (decl);
6441
6442 if (!snode)
6443 return DEFAULT_INIT_PRIORITY;
6444 return
6445 snode->get_init_priority ();
6446 }
6447
6448 /* Return the finalization priority for DECL. */
6449
6450 priority_type
6451 decl_fini_priority_lookup (tree decl)
6452 {
6453 cgraph_node *node = cgraph_node::get (decl);
6454
6455 if (!node)
6456 return DEFAULT_INIT_PRIORITY;
6457 return
6458 node->get_fini_priority ();
6459 }
6460
6461 /* Set the initialization priority for DECL to PRIORITY. */
6462
6463 void
6464 decl_init_priority_insert (tree decl, priority_type priority)
6465 {
6466 struct symtab_node *snode;
6467
6468 if (priority == DEFAULT_INIT_PRIORITY)
6469 {
6470 snode = symtab_node::get (decl);
6471 if (!snode)
6472 return;
6473 }
6474 else if (TREE_CODE (decl) == VAR_DECL)
6475 snode = varpool_node::get_create (decl);
6476 else
6477 snode = cgraph_node::get_create (decl);
6478 snode->set_init_priority (priority);
6479 }
6480
6481 /* Set the finalization priority for DECL to PRIORITY. */
6482
6483 void
6484 decl_fini_priority_insert (tree decl, priority_type priority)
6485 {
6486 struct cgraph_node *node;
6487
6488 if (priority == DEFAULT_INIT_PRIORITY)
6489 {
6490 node = cgraph_node::get (decl);
6491 if (!node)
6492 return;
6493 }
6494 else
6495 node = cgraph_node::get_create (decl);
6496 node->set_fini_priority (priority);
6497 }
6498
6499 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6500
6501 static void
6502 print_debug_expr_statistics (void)
6503 {
6504 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6505 (long) htab_size (debug_expr_for_decl),
6506 (long) htab_elements (debug_expr_for_decl),
6507 htab_collisions (debug_expr_for_decl));
6508 }
6509
6510 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6511
6512 static void
6513 print_value_expr_statistics (void)
6514 {
6515 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6516 (long) htab_size (value_expr_for_decl),
6517 (long) htab_elements (value_expr_for_decl),
6518 htab_collisions (value_expr_for_decl));
6519 }
6520
6521 /* Lookup a debug expression for FROM, and return it if we find one. */
6522
6523 tree
6524 decl_debug_expr_lookup (tree from)
6525 {
6526 struct tree_decl_map *h, in;
6527 in.base.from = from;
6528
6529 h = (struct tree_decl_map *)
6530 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6531 if (h)
6532 return h->to;
6533 return NULL_TREE;
6534 }
6535
6536 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6537
6538 void
6539 decl_debug_expr_insert (tree from, tree to)
6540 {
6541 struct tree_decl_map *h;
6542 void **loc;
6543
6544 h = ggc_alloc<tree_decl_map> ();
6545 h->base.from = from;
6546 h->to = to;
6547 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6548 INSERT);
6549 *(struct tree_decl_map **) loc = h;
6550 }
6551
6552 /* Lookup a value expression for FROM, and return it if we find one. */
6553
6554 tree
6555 decl_value_expr_lookup (tree from)
6556 {
6557 struct tree_decl_map *h, in;
6558 in.base.from = from;
6559
6560 h = (struct tree_decl_map *)
6561 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6562 if (h)
6563 return h->to;
6564 return NULL_TREE;
6565 }
6566
6567 /* Insert a mapping FROM->TO in the value expression hashtable. */
6568
6569 void
6570 decl_value_expr_insert (tree from, tree to)
6571 {
6572 struct tree_decl_map *h;
6573 void **loc;
6574
6575 h = ggc_alloc<tree_decl_map> ();
6576 h->base.from = from;
6577 h->to = to;
6578 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6579 INSERT);
6580 *(struct tree_decl_map **) loc = h;
6581 }
6582
6583 /* Lookup a vector of debug arguments for FROM, and return it if we
6584 find one. */
6585
6586 vec<tree, va_gc> **
6587 decl_debug_args_lookup (tree from)
6588 {
6589 struct tree_vec_map *h, in;
6590
6591 if (!DECL_HAS_DEBUG_ARGS_P (from))
6592 return NULL;
6593 gcc_checking_assert (debug_args_for_decl != NULL);
6594 in.base.from = from;
6595 h = (struct tree_vec_map *)
6596 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6597 if (h)
6598 return &h->to;
6599 return NULL;
6600 }
6601
6602 /* Insert a mapping FROM->empty vector of debug arguments in the value
6603 expression hashtable. */
6604
6605 vec<tree, va_gc> **
6606 decl_debug_args_insert (tree from)
6607 {
6608 struct tree_vec_map *h;
6609 void **loc;
6610
6611 if (DECL_HAS_DEBUG_ARGS_P (from))
6612 return decl_debug_args_lookup (from);
6613 if (debug_args_for_decl == NULL)
6614 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6615 tree_vec_map_eq, 0);
6616 h = ggc_alloc<tree_vec_map> ();
6617 h->base.from = from;
6618 h->to = NULL;
6619 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6620 INSERT);
6621 *(struct tree_vec_map **) loc = h;
6622 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6623 return &h->to;
6624 }
6625
6626 /* Hashing of types so that we don't make duplicates.
6627 The entry point is `type_hash_canon'. */
6628
6629 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6630 with types in the TREE_VALUE slots), by adding the hash codes
6631 of the individual types. */
6632
6633 static void
6634 type_hash_list (const_tree list, inchash::hash &hstate)
6635 {
6636 const_tree tail;
6637
6638 for (tail = list; tail; tail = TREE_CHAIN (tail))
6639 if (TREE_VALUE (tail) != error_mark_node)
6640 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6641 }
6642
6643 /* These are the Hashtable callback functions. */
6644
6645 /* Returns true iff the types are equivalent. */
6646
6647 static int
6648 type_hash_eq (const void *va, const void *vb)
6649 {
6650 const struct type_hash *const a = (const struct type_hash *) va,
6651 *const b = (const struct type_hash *) vb;
6652
6653 /* First test the things that are the same for all types. */
6654 if (a->hash != b->hash
6655 || TREE_CODE (a->type) != TREE_CODE (b->type)
6656 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6657 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6658 TYPE_ATTRIBUTES (b->type))
6659 || (TREE_CODE (a->type) != COMPLEX_TYPE
6660 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6661 return 0;
6662
6663 /* Be careful about comparing arrays before and after the element type
6664 has been completed; don't compare TYPE_ALIGN unless both types are
6665 complete. */
6666 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6667 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6668 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6669 return 0;
6670
6671 switch (TREE_CODE (a->type))
6672 {
6673 case VOID_TYPE:
6674 case COMPLEX_TYPE:
6675 case POINTER_TYPE:
6676 case REFERENCE_TYPE:
6677 case NULLPTR_TYPE:
6678 return 1;
6679
6680 case VECTOR_TYPE:
6681 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6682
6683 case ENUMERAL_TYPE:
6684 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6685 && !(TYPE_VALUES (a->type)
6686 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6687 && TYPE_VALUES (b->type)
6688 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6689 && type_list_equal (TYPE_VALUES (a->type),
6690 TYPE_VALUES (b->type))))
6691 return 0;
6692
6693 /* ... fall through ... */
6694
6695 case INTEGER_TYPE:
6696 case REAL_TYPE:
6697 case BOOLEAN_TYPE:
6698 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6699 return false;
6700 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6701 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6702 TYPE_MAX_VALUE (b->type)))
6703 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6704 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6705 TYPE_MIN_VALUE (b->type))));
6706
6707 case FIXED_POINT_TYPE:
6708 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6709
6710 case OFFSET_TYPE:
6711 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6712
6713 case METHOD_TYPE:
6714 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6715 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6716 || (TYPE_ARG_TYPES (a->type)
6717 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6718 && TYPE_ARG_TYPES (b->type)
6719 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6720 && type_list_equal (TYPE_ARG_TYPES (a->type),
6721 TYPE_ARG_TYPES (b->type)))))
6722 break;
6723 return 0;
6724 case ARRAY_TYPE:
6725 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6726
6727 case RECORD_TYPE:
6728 case UNION_TYPE:
6729 case QUAL_UNION_TYPE:
6730 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6731 || (TYPE_FIELDS (a->type)
6732 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6733 && TYPE_FIELDS (b->type)
6734 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6735 && type_list_equal (TYPE_FIELDS (a->type),
6736 TYPE_FIELDS (b->type))));
6737
6738 case FUNCTION_TYPE:
6739 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6740 || (TYPE_ARG_TYPES (a->type)
6741 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6742 && TYPE_ARG_TYPES (b->type)
6743 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6744 && type_list_equal (TYPE_ARG_TYPES (a->type),
6745 TYPE_ARG_TYPES (b->type))))
6746 break;
6747 return 0;
6748
6749 default:
6750 return 0;
6751 }
6752
6753 if (lang_hooks.types.type_hash_eq != NULL)
6754 return lang_hooks.types.type_hash_eq (a->type, b->type);
6755
6756 return 1;
6757 }
6758
6759 /* Return the cached hash value. */
6760
6761 static hashval_t
6762 type_hash_hash (const void *item)
6763 {
6764 return ((const struct type_hash *) item)->hash;
6765 }
6766
6767 /* Look in the type hash table for a type isomorphic to TYPE.
6768 If one is found, return it. Otherwise return 0. */
6769
6770 static tree
6771 type_hash_lookup (hashval_t hashcode, tree type)
6772 {
6773 struct type_hash *h, in;
6774
6775 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6776 must call that routine before comparing TYPE_ALIGNs. */
6777 layout_type (type);
6778
6779 in.hash = hashcode;
6780 in.type = type;
6781
6782 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6783 hashcode);
6784 if (h)
6785 return h->type;
6786 return NULL_TREE;
6787 }
6788
6789 /* Add an entry to the type-hash-table
6790 for a type TYPE whose hash code is HASHCODE. */
6791
6792 static void
6793 type_hash_add (hashval_t hashcode, tree type)
6794 {
6795 struct type_hash *h;
6796 void **loc;
6797
6798 h = ggc_alloc<type_hash> ();
6799 h->hash = hashcode;
6800 h->type = type;
6801 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6802 *loc = (void *)h;
6803 }
6804
6805 /* Given TYPE, and HASHCODE its hash code, return the canonical
6806 object for an identical type if one already exists.
6807 Otherwise, return TYPE, and record it as the canonical object.
6808
6809 To use this function, first create a type of the sort you want.
6810 Then compute its hash code from the fields of the type that
6811 make it different from other similar types.
6812 Then call this function and use the value. */
6813
6814 tree
6815 type_hash_canon (unsigned int hashcode, tree type)
6816 {
6817 tree t1;
6818
6819 /* The hash table only contains main variants, so ensure that's what we're
6820 being passed. */
6821 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6822
6823 /* See if the type is in the hash table already. If so, return it.
6824 Otherwise, add the type. */
6825 t1 = type_hash_lookup (hashcode, type);
6826 if (t1 != 0)
6827 {
6828 if (GATHER_STATISTICS)
6829 {
6830 tree_code_counts[(int) TREE_CODE (type)]--;
6831 tree_node_counts[(int) t_kind]--;
6832 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6833 }
6834 return t1;
6835 }
6836 else
6837 {
6838 type_hash_add (hashcode, type);
6839 return type;
6840 }
6841 }
6842
6843 /* See if the data pointed to by the type hash table is marked. We consider
6844 it marked if the type is marked or if a debug type number or symbol
6845 table entry has been made for the type. */
6846
6847 static int
6848 type_hash_marked_p (const void *p)
6849 {
6850 const_tree const type = ((const struct type_hash *) p)->type;
6851
6852 return ggc_marked_p (type);
6853 }
6854
6855 static void
6856 print_type_hash_statistics (void)
6857 {
6858 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6859 (long) htab_size (type_hash_table),
6860 (long) htab_elements (type_hash_table),
6861 htab_collisions (type_hash_table));
6862 }
6863
6864 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6865 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6866 by adding the hash codes of the individual attributes. */
6867
6868 static void
6869 attribute_hash_list (const_tree list, inchash::hash &hstate)
6870 {
6871 const_tree tail;
6872
6873 for (tail = list; tail; tail = TREE_CHAIN (tail))
6874 /* ??? Do we want to add in TREE_VALUE too? */
6875 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6876 }
6877
6878 /* Given two lists of attributes, return true if list l2 is
6879 equivalent to l1. */
6880
6881 int
6882 attribute_list_equal (const_tree l1, const_tree l2)
6883 {
6884 if (l1 == l2)
6885 return 1;
6886
6887 return attribute_list_contained (l1, l2)
6888 && attribute_list_contained (l2, l1);
6889 }
6890
6891 /* Given two lists of attributes, return true if list L2 is
6892 completely contained within L1. */
6893 /* ??? This would be faster if attribute names were stored in a canonicalized
6894 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6895 must be used to show these elements are equivalent (which they are). */
6896 /* ??? It's not clear that attributes with arguments will always be handled
6897 correctly. */
6898
6899 int
6900 attribute_list_contained (const_tree l1, const_tree l2)
6901 {
6902 const_tree t1, t2;
6903
6904 /* First check the obvious, maybe the lists are identical. */
6905 if (l1 == l2)
6906 return 1;
6907
6908 /* Maybe the lists are similar. */
6909 for (t1 = l1, t2 = l2;
6910 t1 != 0 && t2 != 0
6911 && get_attribute_name (t1) == get_attribute_name (t2)
6912 && TREE_VALUE (t1) == TREE_VALUE (t2);
6913 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6914 ;
6915
6916 /* Maybe the lists are equal. */
6917 if (t1 == 0 && t2 == 0)
6918 return 1;
6919
6920 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6921 {
6922 const_tree attr;
6923 /* This CONST_CAST is okay because lookup_attribute does not
6924 modify its argument and the return value is assigned to a
6925 const_tree. */
6926 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6927 CONST_CAST_TREE (l1));
6928 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6929 attr = lookup_ident_attribute (get_attribute_name (t2),
6930 TREE_CHAIN (attr)))
6931 ;
6932
6933 if (attr == NULL_TREE)
6934 return 0;
6935 }
6936
6937 return 1;
6938 }
6939
6940 /* Given two lists of types
6941 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6942 return 1 if the lists contain the same types in the same order.
6943 Also, the TREE_PURPOSEs must match. */
6944
6945 int
6946 type_list_equal (const_tree l1, const_tree l2)
6947 {
6948 const_tree t1, t2;
6949
6950 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6951 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6952 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6953 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6954 && (TREE_TYPE (TREE_PURPOSE (t1))
6955 == TREE_TYPE (TREE_PURPOSE (t2))))))
6956 return 0;
6957
6958 return t1 == t2;
6959 }
6960
6961 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6962 given by TYPE. If the argument list accepts variable arguments,
6963 then this function counts only the ordinary arguments. */
6964
6965 int
6966 type_num_arguments (const_tree type)
6967 {
6968 int i = 0;
6969 tree t;
6970
6971 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6972 /* If the function does not take a variable number of arguments,
6973 the last element in the list will have type `void'. */
6974 if (VOID_TYPE_P (TREE_VALUE (t)))
6975 break;
6976 else
6977 ++i;
6978
6979 return i;
6980 }
6981
6982 /* Nonzero if integer constants T1 and T2
6983 represent the same constant value. */
6984
6985 int
6986 tree_int_cst_equal (const_tree t1, const_tree t2)
6987 {
6988 if (t1 == t2)
6989 return 1;
6990
6991 if (t1 == 0 || t2 == 0)
6992 return 0;
6993
6994 if (TREE_CODE (t1) == INTEGER_CST
6995 && TREE_CODE (t2) == INTEGER_CST
6996 && wi::to_widest (t1) == wi::to_widest (t2))
6997 return 1;
6998
6999 return 0;
7000 }
7001
7002 /* Return true if T is an INTEGER_CST whose numerical value (extended
7003 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7004
7005 bool
7006 tree_fits_shwi_p (const_tree t)
7007 {
7008 return (t != NULL_TREE
7009 && TREE_CODE (t) == INTEGER_CST
7010 && wi::fits_shwi_p (wi::to_widest (t)));
7011 }
7012
7013 /* Return true if T is an INTEGER_CST whose numerical value (extended
7014 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7015
7016 bool
7017 tree_fits_uhwi_p (const_tree t)
7018 {
7019 return (t != NULL_TREE
7020 && TREE_CODE (t) == INTEGER_CST
7021 && wi::fits_uhwi_p (wi::to_widest (t)));
7022 }
7023
7024 /* T is an INTEGER_CST whose numerical value (extended according to
7025 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7026 HOST_WIDE_INT. */
7027
7028 HOST_WIDE_INT
7029 tree_to_shwi (const_tree t)
7030 {
7031 gcc_assert (tree_fits_shwi_p (t));
7032 return TREE_INT_CST_LOW (t);
7033 }
7034
7035 /* T is an INTEGER_CST whose numerical value (extended according to
7036 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7037 HOST_WIDE_INT. */
7038
7039 unsigned HOST_WIDE_INT
7040 tree_to_uhwi (const_tree t)
7041 {
7042 gcc_assert (tree_fits_uhwi_p (t));
7043 return TREE_INT_CST_LOW (t);
7044 }
7045
7046 /* Return the most significant (sign) bit of T. */
7047
7048 int
7049 tree_int_cst_sign_bit (const_tree t)
7050 {
7051 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7052
7053 return wi::extract_uhwi (t, bitno, 1);
7054 }
7055
7056 /* Return an indication of the sign of the integer constant T.
7057 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7058 Note that -1 will never be returned if T's type is unsigned. */
7059
7060 int
7061 tree_int_cst_sgn (const_tree t)
7062 {
7063 if (wi::eq_p (t, 0))
7064 return 0;
7065 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7066 return 1;
7067 else if (wi::neg_p (t))
7068 return -1;
7069 else
7070 return 1;
7071 }
7072
7073 /* Return the minimum number of bits needed to represent VALUE in a
7074 signed or unsigned type, UNSIGNEDP says which. */
7075
7076 unsigned int
7077 tree_int_cst_min_precision (tree value, signop sgn)
7078 {
7079 /* If the value is negative, compute its negative minus 1. The latter
7080 adjustment is because the absolute value of the largest negative value
7081 is one larger than the largest positive value. This is equivalent to
7082 a bit-wise negation, so use that operation instead. */
7083
7084 if (tree_int_cst_sgn (value) < 0)
7085 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7086
7087 /* Return the number of bits needed, taking into account the fact
7088 that we need one more bit for a signed than unsigned type.
7089 If value is 0 or -1, the minimum precision is 1 no matter
7090 whether unsignedp is true or false. */
7091
7092 if (integer_zerop (value))
7093 return 1;
7094 else
7095 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7096 }
7097
7098 /* Return truthvalue of whether T1 is the same tree structure as T2.
7099 Return 1 if they are the same.
7100 Return 0 if they are understandably different.
7101 Return -1 if either contains tree structure not understood by
7102 this function. */
7103
7104 int
7105 simple_cst_equal (const_tree t1, const_tree t2)
7106 {
7107 enum tree_code code1, code2;
7108 int cmp;
7109 int i;
7110
7111 if (t1 == t2)
7112 return 1;
7113 if (t1 == 0 || t2 == 0)
7114 return 0;
7115
7116 code1 = TREE_CODE (t1);
7117 code2 = TREE_CODE (t2);
7118
7119 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7120 {
7121 if (CONVERT_EXPR_CODE_P (code2)
7122 || code2 == NON_LVALUE_EXPR)
7123 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7124 else
7125 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7126 }
7127
7128 else if (CONVERT_EXPR_CODE_P (code2)
7129 || code2 == NON_LVALUE_EXPR)
7130 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7131
7132 if (code1 != code2)
7133 return 0;
7134
7135 switch (code1)
7136 {
7137 case INTEGER_CST:
7138 return wi::to_widest (t1) == wi::to_widest (t2);
7139
7140 case REAL_CST:
7141 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7142
7143 case FIXED_CST:
7144 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7145
7146 case STRING_CST:
7147 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7148 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7149 TREE_STRING_LENGTH (t1)));
7150
7151 case CONSTRUCTOR:
7152 {
7153 unsigned HOST_WIDE_INT idx;
7154 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7155 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7156
7157 if (vec_safe_length (v1) != vec_safe_length (v2))
7158 return false;
7159
7160 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7161 /* ??? Should we handle also fields here? */
7162 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7163 return false;
7164 return true;
7165 }
7166
7167 case SAVE_EXPR:
7168 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7169
7170 case CALL_EXPR:
7171 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7172 if (cmp <= 0)
7173 return cmp;
7174 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7175 return 0;
7176 {
7177 const_tree arg1, arg2;
7178 const_call_expr_arg_iterator iter1, iter2;
7179 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7180 arg2 = first_const_call_expr_arg (t2, &iter2);
7181 arg1 && arg2;
7182 arg1 = next_const_call_expr_arg (&iter1),
7183 arg2 = next_const_call_expr_arg (&iter2))
7184 {
7185 cmp = simple_cst_equal (arg1, arg2);
7186 if (cmp <= 0)
7187 return cmp;
7188 }
7189 return arg1 == arg2;
7190 }
7191
7192 case TARGET_EXPR:
7193 /* Special case: if either target is an unallocated VAR_DECL,
7194 it means that it's going to be unified with whatever the
7195 TARGET_EXPR is really supposed to initialize, so treat it
7196 as being equivalent to anything. */
7197 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7198 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7199 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7200 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7201 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7202 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7203 cmp = 1;
7204 else
7205 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7206
7207 if (cmp <= 0)
7208 return cmp;
7209
7210 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7211
7212 case WITH_CLEANUP_EXPR:
7213 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7214 if (cmp <= 0)
7215 return cmp;
7216
7217 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7218
7219 case COMPONENT_REF:
7220 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7221 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7222
7223 return 0;
7224
7225 case VAR_DECL:
7226 case PARM_DECL:
7227 case CONST_DECL:
7228 case FUNCTION_DECL:
7229 return 0;
7230
7231 default:
7232 break;
7233 }
7234
7235 /* This general rule works for most tree codes. All exceptions should be
7236 handled above. If this is a language-specific tree code, we can't
7237 trust what might be in the operand, so say we don't know
7238 the situation. */
7239 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7240 return -1;
7241
7242 switch (TREE_CODE_CLASS (code1))
7243 {
7244 case tcc_unary:
7245 case tcc_binary:
7246 case tcc_comparison:
7247 case tcc_expression:
7248 case tcc_reference:
7249 case tcc_statement:
7250 cmp = 1;
7251 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7252 {
7253 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7254 if (cmp <= 0)
7255 return cmp;
7256 }
7257
7258 return cmp;
7259
7260 default:
7261 return -1;
7262 }
7263 }
7264
7265 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7266 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7267 than U, respectively. */
7268
7269 int
7270 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7271 {
7272 if (tree_int_cst_sgn (t) < 0)
7273 return -1;
7274 else if (!tree_fits_uhwi_p (t))
7275 return 1;
7276 else if (TREE_INT_CST_LOW (t) == u)
7277 return 0;
7278 else if (TREE_INT_CST_LOW (t) < u)
7279 return -1;
7280 else
7281 return 1;
7282 }
7283
7284 /* Return true if SIZE represents a constant size that is in bounds of
7285 what the middle-end and the backend accepts (covering not more than
7286 half of the address-space). */
7287
7288 bool
7289 valid_constant_size_p (const_tree size)
7290 {
7291 if (! tree_fits_uhwi_p (size)
7292 || TREE_OVERFLOW (size)
7293 || tree_int_cst_sign_bit (size) != 0)
7294 return false;
7295 return true;
7296 }
7297
7298 /* Return the precision of the type, or for a complex or vector type the
7299 precision of the type of its elements. */
7300
7301 unsigned int
7302 element_precision (const_tree type)
7303 {
7304 enum tree_code code = TREE_CODE (type);
7305 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7306 type = TREE_TYPE (type);
7307
7308 return TYPE_PRECISION (type);
7309 }
7310
7311 /* Return true if CODE represents an associative tree code. Otherwise
7312 return false. */
7313 bool
7314 associative_tree_code (enum tree_code code)
7315 {
7316 switch (code)
7317 {
7318 case BIT_IOR_EXPR:
7319 case BIT_AND_EXPR:
7320 case BIT_XOR_EXPR:
7321 case PLUS_EXPR:
7322 case MULT_EXPR:
7323 case MIN_EXPR:
7324 case MAX_EXPR:
7325 return true;
7326
7327 default:
7328 break;
7329 }
7330 return false;
7331 }
7332
7333 /* Return true if CODE represents a commutative tree code. Otherwise
7334 return false. */
7335 bool
7336 commutative_tree_code (enum tree_code code)
7337 {
7338 switch (code)
7339 {
7340 case PLUS_EXPR:
7341 case MULT_EXPR:
7342 case MULT_HIGHPART_EXPR:
7343 case MIN_EXPR:
7344 case MAX_EXPR:
7345 case BIT_IOR_EXPR:
7346 case BIT_XOR_EXPR:
7347 case BIT_AND_EXPR:
7348 case NE_EXPR:
7349 case EQ_EXPR:
7350 case UNORDERED_EXPR:
7351 case ORDERED_EXPR:
7352 case UNEQ_EXPR:
7353 case LTGT_EXPR:
7354 case TRUTH_AND_EXPR:
7355 case TRUTH_XOR_EXPR:
7356 case TRUTH_OR_EXPR:
7357 case WIDEN_MULT_EXPR:
7358 case VEC_WIDEN_MULT_HI_EXPR:
7359 case VEC_WIDEN_MULT_LO_EXPR:
7360 case VEC_WIDEN_MULT_EVEN_EXPR:
7361 case VEC_WIDEN_MULT_ODD_EXPR:
7362 return true;
7363
7364 default:
7365 break;
7366 }
7367 return false;
7368 }
7369
7370 /* Return true if CODE represents a ternary tree code for which the
7371 first two operands are commutative. Otherwise return false. */
7372 bool
7373 commutative_ternary_tree_code (enum tree_code code)
7374 {
7375 switch (code)
7376 {
7377 case WIDEN_MULT_PLUS_EXPR:
7378 case WIDEN_MULT_MINUS_EXPR:
7379 return true;
7380
7381 default:
7382 break;
7383 }
7384 return false;
7385 }
7386
7387 namespace inchash
7388 {
7389
7390 /* Generate a hash value for an expression. This can be used iteratively
7391 by passing a previous result as the HSTATE argument.
7392
7393 This function is intended to produce the same hash for expressions which
7394 would compare equal using operand_equal_p. */
7395 void
7396 add_expr (const_tree t, inchash::hash &hstate)
7397 {
7398 int i;
7399 enum tree_code code;
7400 enum tree_code_class tclass;
7401
7402 if (t == NULL_TREE)
7403 {
7404 hstate.merge_hash (0);
7405 return;
7406 }
7407
7408 code = TREE_CODE (t);
7409
7410 switch (code)
7411 {
7412 /* Alas, constants aren't shared, so we can't rely on pointer
7413 identity. */
7414 case VOID_CST:
7415 hstate.merge_hash (0);
7416 return;
7417 case INTEGER_CST:
7418 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7419 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7420 return;
7421 case REAL_CST:
7422 {
7423 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7424 hstate.merge_hash (val2);
7425 return;
7426 }
7427 case FIXED_CST:
7428 {
7429 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7430 hstate.merge_hash (val2);
7431 return;
7432 }
7433 case STRING_CST:
7434 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7435 return;
7436 case COMPLEX_CST:
7437 inchash::add_expr (TREE_REALPART (t), hstate);
7438 inchash::add_expr (TREE_IMAGPART (t), hstate);
7439 return;
7440 case VECTOR_CST:
7441 {
7442 unsigned i;
7443 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7444 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7445 return;
7446 }
7447 case SSA_NAME:
7448 /* We can just compare by pointer. */
7449 hstate.add_wide_int (SSA_NAME_VERSION (t));
7450 return;
7451 case PLACEHOLDER_EXPR:
7452 /* The node itself doesn't matter. */
7453 return;
7454 case TREE_LIST:
7455 /* A list of expressions, for a CALL_EXPR or as the elements of a
7456 VECTOR_CST. */
7457 for (; t; t = TREE_CHAIN (t))
7458 inchash::add_expr (TREE_VALUE (t), hstate);
7459 return;
7460 case CONSTRUCTOR:
7461 {
7462 unsigned HOST_WIDE_INT idx;
7463 tree field, value;
7464 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7465 {
7466 inchash::add_expr (field, hstate);
7467 inchash::add_expr (value, hstate);
7468 }
7469 return;
7470 }
7471 case FUNCTION_DECL:
7472 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7473 Otherwise nodes that compare equal according to operand_equal_p might
7474 get different hash codes. However, don't do this for machine specific
7475 or front end builtins, since the function code is overloaded in those
7476 cases. */
7477 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7478 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7479 {
7480 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7481 code = TREE_CODE (t);
7482 }
7483 /* FALL THROUGH */
7484 default:
7485 tclass = TREE_CODE_CLASS (code);
7486
7487 if (tclass == tcc_declaration)
7488 {
7489 /* DECL's have a unique ID */
7490 hstate.add_wide_int (DECL_UID (t));
7491 }
7492 else
7493 {
7494 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7495
7496 hstate.add_object (code);
7497
7498 /* Don't hash the type, that can lead to having nodes which
7499 compare equal according to operand_equal_p, but which
7500 have different hash codes. */
7501 if (CONVERT_EXPR_CODE_P (code)
7502 || code == NON_LVALUE_EXPR)
7503 {
7504 /* Make sure to include signness in the hash computation. */
7505 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7506 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7507 }
7508
7509 else if (commutative_tree_code (code))
7510 {
7511 /* It's a commutative expression. We want to hash it the same
7512 however it appears. We do this by first hashing both operands
7513 and then rehashing based on the order of their independent
7514 hashes. */
7515 inchash::hash one, two;
7516 inchash::add_expr (TREE_OPERAND (t, 0), one);
7517 inchash::add_expr (TREE_OPERAND (t, 1), two);
7518 hstate.add_commutative (one, two);
7519 }
7520 else
7521 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7522 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7523 }
7524 return;
7525 }
7526 }
7527
7528 }
7529
7530 /* Constructors for pointer, array and function types.
7531 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7532 constructed by language-dependent code, not here.) */
7533
7534 /* Construct, lay out and return the type of pointers to TO_TYPE with
7535 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7536 reference all of memory. If such a type has already been
7537 constructed, reuse it. */
7538
7539 tree
7540 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7541 bool can_alias_all)
7542 {
7543 tree t;
7544
7545 if (to_type == error_mark_node)
7546 return error_mark_node;
7547
7548 /* If the pointed-to type has the may_alias attribute set, force
7549 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7550 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7551 can_alias_all = true;
7552
7553 /* In some cases, languages will have things that aren't a POINTER_TYPE
7554 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7555 In that case, return that type without regard to the rest of our
7556 operands.
7557
7558 ??? This is a kludge, but consistent with the way this function has
7559 always operated and there doesn't seem to be a good way to avoid this
7560 at the moment. */
7561 if (TYPE_POINTER_TO (to_type) != 0
7562 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7563 return TYPE_POINTER_TO (to_type);
7564
7565 /* First, if we already have a type for pointers to TO_TYPE and it's
7566 the proper mode, use it. */
7567 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7568 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7569 return t;
7570
7571 t = make_node (POINTER_TYPE);
7572
7573 TREE_TYPE (t) = to_type;
7574 SET_TYPE_MODE (t, mode);
7575 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7576 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7577 TYPE_POINTER_TO (to_type) = t;
7578
7579 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7580 SET_TYPE_STRUCTURAL_EQUALITY (t);
7581 else if (TYPE_CANONICAL (to_type) != to_type)
7582 TYPE_CANONICAL (t)
7583 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7584 mode, can_alias_all);
7585
7586 /* Lay out the type. This function has many callers that are concerned
7587 with expression-construction, and this simplifies them all. */
7588 layout_type (t);
7589
7590 return t;
7591 }
7592
7593 /* By default build pointers in ptr_mode. */
7594
7595 tree
7596 build_pointer_type (tree to_type)
7597 {
7598 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7599 : TYPE_ADDR_SPACE (to_type);
7600 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7601 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7602 }
7603
7604 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7605
7606 tree
7607 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7608 bool can_alias_all)
7609 {
7610 tree t;
7611
7612 if (to_type == error_mark_node)
7613 return error_mark_node;
7614
7615 /* If the pointed-to type has the may_alias attribute set, force
7616 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7617 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7618 can_alias_all = true;
7619
7620 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7621 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7622 In that case, return that type without regard to the rest of our
7623 operands.
7624
7625 ??? This is a kludge, but consistent with the way this function has
7626 always operated and there doesn't seem to be a good way to avoid this
7627 at the moment. */
7628 if (TYPE_REFERENCE_TO (to_type) != 0
7629 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7630 return TYPE_REFERENCE_TO (to_type);
7631
7632 /* First, if we already have a type for pointers to TO_TYPE and it's
7633 the proper mode, use it. */
7634 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7635 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7636 return t;
7637
7638 t = make_node (REFERENCE_TYPE);
7639
7640 TREE_TYPE (t) = to_type;
7641 SET_TYPE_MODE (t, mode);
7642 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7643 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7644 TYPE_REFERENCE_TO (to_type) = t;
7645
7646 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7647 SET_TYPE_STRUCTURAL_EQUALITY (t);
7648 else if (TYPE_CANONICAL (to_type) != to_type)
7649 TYPE_CANONICAL (t)
7650 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7651 mode, can_alias_all);
7652
7653 layout_type (t);
7654
7655 return t;
7656 }
7657
7658
7659 /* Build the node for the type of references-to-TO_TYPE by default
7660 in ptr_mode. */
7661
7662 tree
7663 build_reference_type (tree to_type)
7664 {
7665 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7666 : TYPE_ADDR_SPACE (to_type);
7667 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7668 return build_reference_type_for_mode (to_type, pointer_mode, false);
7669 }
7670
7671 #define MAX_INT_CACHED_PREC \
7672 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7673 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7674
7675 /* Builds a signed or unsigned integer type of precision PRECISION.
7676 Used for C bitfields whose precision does not match that of
7677 built-in target types. */
7678 tree
7679 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7680 int unsignedp)
7681 {
7682 tree itype, ret;
7683
7684 if (unsignedp)
7685 unsignedp = MAX_INT_CACHED_PREC + 1;
7686
7687 if (precision <= MAX_INT_CACHED_PREC)
7688 {
7689 itype = nonstandard_integer_type_cache[precision + unsignedp];
7690 if (itype)
7691 return itype;
7692 }
7693
7694 itype = make_node (INTEGER_TYPE);
7695 TYPE_PRECISION (itype) = precision;
7696
7697 if (unsignedp)
7698 fixup_unsigned_type (itype);
7699 else
7700 fixup_signed_type (itype);
7701
7702 ret = itype;
7703 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7704 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7705 if (precision <= MAX_INT_CACHED_PREC)
7706 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7707
7708 return ret;
7709 }
7710
7711 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7712 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7713 is true, reuse such a type that has already been constructed. */
7714
7715 static tree
7716 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7717 {
7718 tree itype = make_node (INTEGER_TYPE);
7719 inchash::hash hstate;
7720
7721 TREE_TYPE (itype) = type;
7722
7723 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7724 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7725
7726 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7727 SET_TYPE_MODE (itype, TYPE_MODE (type));
7728 TYPE_SIZE (itype) = TYPE_SIZE (type);
7729 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7730 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7731 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7732
7733 if (!shared)
7734 return itype;
7735
7736 if ((TYPE_MIN_VALUE (itype)
7737 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7738 || (TYPE_MAX_VALUE (itype)
7739 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7740 {
7741 /* Since we cannot reliably merge this type, we need to compare it using
7742 structural equality checks. */
7743 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7744 return itype;
7745 }
7746
7747 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7748 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7749 hstate.merge_hash (TYPE_HASH (type));
7750 itype = type_hash_canon (hstate.end (), itype);
7751
7752 return itype;
7753 }
7754
7755 /* Wrapper around build_range_type_1 with SHARED set to true. */
7756
7757 tree
7758 build_range_type (tree type, tree lowval, tree highval)
7759 {
7760 return build_range_type_1 (type, lowval, highval, true);
7761 }
7762
7763 /* Wrapper around build_range_type_1 with SHARED set to false. */
7764
7765 tree
7766 build_nonshared_range_type (tree type, tree lowval, tree highval)
7767 {
7768 return build_range_type_1 (type, lowval, highval, false);
7769 }
7770
7771 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7772 MAXVAL should be the maximum value in the domain
7773 (one less than the length of the array).
7774
7775 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7776 We don't enforce this limit, that is up to caller (e.g. language front end).
7777 The limit exists because the result is a signed type and we don't handle
7778 sizes that use more than one HOST_WIDE_INT. */
7779
7780 tree
7781 build_index_type (tree maxval)
7782 {
7783 return build_range_type (sizetype, size_zero_node, maxval);
7784 }
7785
7786 /* Return true if the debug information for TYPE, a subtype, should be emitted
7787 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7788 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7789 debug info and doesn't reflect the source code. */
7790
7791 bool
7792 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7793 {
7794 tree base_type = TREE_TYPE (type), low, high;
7795
7796 /* Subrange types have a base type which is an integral type. */
7797 if (!INTEGRAL_TYPE_P (base_type))
7798 return false;
7799
7800 /* Get the real bounds of the subtype. */
7801 if (lang_hooks.types.get_subrange_bounds)
7802 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7803 else
7804 {
7805 low = TYPE_MIN_VALUE (type);
7806 high = TYPE_MAX_VALUE (type);
7807 }
7808
7809 /* If the type and its base type have the same representation and the same
7810 name, then the type is not a subrange but a copy of the base type. */
7811 if ((TREE_CODE (base_type) == INTEGER_TYPE
7812 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7813 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7814 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7815 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7816 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7817 return false;
7818
7819 if (lowval)
7820 *lowval = low;
7821 if (highval)
7822 *highval = high;
7823 return true;
7824 }
7825
7826 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7827 and number of elements specified by the range of values of INDEX_TYPE.
7828 If SHARED is true, reuse such a type that has already been constructed. */
7829
7830 static tree
7831 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7832 {
7833 tree t;
7834
7835 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7836 {
7837 error ("arrays of functions are not meaningful");
7838 elt_type = integer_type_node;
7839 }
7840
7841 t = make_node (ARRAY_TYPE);
7842 TREE_TYPE (t) = elt_type;
7843 TYPE_DOMAIN (t) = index_type;
7844 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7845 layout_type (t);
7846
7847 /* If the element type is incomplete at this point we get marked for
7848 structural equality. Do not record these types in the canonical
7849 type hashtable. */
7850 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7851 return t;
7852
7853 if (shared)
7854 {
7855 inchash::hash hstate;
7856 hstate.add_object (TYPE_HASH (elt_type));
7857 if (index_type)
7858 hstate.add_object (TYPE_HASH (index_type));
7859 t = type_hash_canon (hstate.end (), t);
7860 }
7861
7862 if (TYPE_CANONICAL (t) == t)
7863 {
7864 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7865 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7866 SET_TYPE_STRUCTURAL_EQUALITY (t);
7867 else if (TYPE_CANONICAL (elt_type) != elt_type
7868 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7869 TYPE_CANONICAL (t)
7870 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7871 index_type
7872 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7873 shared);
7874 }
7875
7876 return t;
7877 }
7878
7879 /* Wrapper around build_array_type_1 with SHARED set to true. */
7880
7881 tree
7882 build_array_type (tree elt_type, tree index_type)
7883 {
7884 return build_array_type_1 (elt_type, index_type, true);
7885 }
7886
7887 /* Wrapper around build_array_type_1 with SHARED set to false. */
7888
7889 tree
7890 build_nonshared_array_type (tree elt_type, tree index_type)
7891 {
7892 return build_array_type_1 (elt_type, index_type, false);
7893 }
7894
7895 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7896 sizetype. */
7897
7898 tree
7899 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7900 {
7901 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7902 }
7903
7904 /* Recursively examines the array elements of TYPE, until a non-array
7905 element type is found. */
7906
7907 tree
7908 strip_array_types (tree type)
7909 {
7910 while (TREE_CODE (type) == ARRAY_TYPE)
7911 type = TREE_TYPE (type);
7912
7913 return type;
7914 }
7915
7916 /* Computes the canonical argument types from the argument type list
7917 ARGTYPES.
7918
7919 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7920 on entry to this function, or if any of the ARGTYPES are
7921 structural.
7922
7923 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7924 true on entry to this function, or if any of the ARGTYPES are
7925 non-canonical.
7926
7927 Returns a canonical argument list, which may be ARGTYPES when the
7928 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7929 true) or would not differ from ARGTYPES. */
7930
7931 static tree
7932 maybe_canonicalize_argtypes (tree argtypes,
7933 bool *any_structural_p,
7934 bool *any_noncanonical_p)
7935 {
7936 tree arg;
7937 bool any_noncanonical_argtypes_p = false;
7938
7939 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7940 {
7941 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7942 /* Fail gracefully by stating that the type is structural. */
7943 *any_structural_p = true;
7944 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7945 *any_structural_p = true;
7946 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7947 || TREE_PURPOSE (arg))
7948 /* If the argument has a default argument, we consider it
7949 non-canonical even though the type itself is canonical.
7950 That way, different variants of function and method types
7951 with default arguments will all point to the variant with
7952 no defaults as their canonical type. */
7953 any_noncanonical_argtypes_p = true;
7954 }
7955
7956 if (*any_structural_p)
7957 return argtypes;
7958
7959 if (any_noncanonical_argtypes_p)
7960 {
7961 /* Build the canonical list of argument types. */
7962 tree canon_argtypes = NULL_TREE;
7963 bool is_void = false;
7964
7965 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7966 {
7967 if (arg == void_list_node)
7968 is_void = true;
7969 else
7970 canon_argtypes = tree_cons (NULL_TREE,
7971 TYPE_CANONICAL (TREE_VALUE (arg)),
7972 canon_argtypes);
7973 }
7974
7975 canon_argtypes = nreverse (canon_argtypes);
7976 if (is_void)
7977 canon_argtypes = chainon (canon_argtypes, void_list_node);
7978
7979 /* There is a non-canonical type. */
7980 *any_noncanonical_p = true;
7981 return canon_argtypes;
7982 }
7983
7984 /* The canonical argument types are the same as ARGTYPES. */
7985 return argtypes;
7986 }
7987
7988 /* Construct, lay out and return
7989 the type of functions returning type VALUE_TYPE
7990 given arguments of types ARG_TYPES.
7991 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7992 are data type nodes for the arguments of the function.
7993 If such a type has already been constructed, reuse it. */
7994
7995 tree
7996 build_function_type (tree value_type, tree arg_types)
7997 {
7998 tree t;
7999 inchash::hash hstate;
8000 bool any_structural_p, any_noncanonical_p;
8001 tree canon_argtypes;
8002
8003 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8004 {
8005 error ("function return type cannot be function");
8006 value_type = integer_type_node;
8007 }
8008
8009 /* Make a node of the sort we want. */
8010 t = make_node (FUNCTION_TYPE);
8011 TREE_TYPE (t) = value_type;
8012 TYPE_ARG_TYPES (t) = arg_types;
8013
8014 /* If we already have such a type, use the old one. */
8015 hstate.add_object (TYPE_HASH (value_type));
8016 type_hash_list (arg_types, hstate);
8017 t = type_hash_canon (hstate.end (), t);
8018
8019 /* Set up the canonical type. */
8020 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8021 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8022 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8023 &any_structural_p,
8024 &any_noncanonical_p);
8025 if (any_structural_p)
8026 SET_TYPE_STRUCTURAL_EQUALITY (t);
8027 else if (any_noncanonical_p)
8028 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8029 canon_argtypes);
8030
8031 if (!COMPLETE_TYPE_P (t))
8032 layout_type (t);
8033 return t;
8034 }
8035
8036 /* Build a function type. The RETURN_TYPE is the type returned by the
8037 function. If VAARGS is set, no void_type_node is appended to the
8038 the list. ARGP must be always be terminated be a NULL_TREE. */
8039
8040 static tree
8041 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8042 {
8043 tree t, args, last;
8044
8045 t = va_arg (argp, tree);
8046 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8047 args = tree_cons (NULL_TREE, t, args);
8048
8049 if (vaargs)
8050 {
8051 last = args;
8052 if (args != NULL_TREE)
8053 args = nreverse (args);
8054 gcc_assert (last != void_list_node);
8055 }
8056 else if (args == NULL_TREE)
8057 args = void_list_node;
8058 else
8059 {
8060 last = args;
8061 args = nreverse (args);
8062 TREE_CHAIN (last) = void_list_node;
8063 }
8064 args = build_function_type (return_type, args);
8065
8066 return args;
8067 }
8068
8069 /* Build a function type. The RETURN_TYPE is the type returned by the
8070 function. If additional arguments are provided, they are
8071 additional argument types. The list of argument types must always
8072 be terminated by NULL_TREE. */
8073
8074 tree
8075 build_function_type_list (tree return_type, ...)
8076 {
8077 tree args;
8078 va_list p;
8079
8080 va_start (p, return_type);
8081 args = build_function_type_list_1 (false, return_type, p);
8082 va_end (p);
8083 return args;
8084 }
8085
8086 /* Build a variable argument function type. The RETURN_TYPE is the
8087 type returned by the function. If additional arguments are provided,
8088 they are additional argument types. The list of argument types must
8089 always be terminated by NULL_TREE. */
8090
8091 tree
8092 build_varargs_function_type_list (tree return_type, ...)
8093 {
8094 tree args;
8095 va_list p;
8096
8097 va_start (p, return_type);
8098 args = build_function_type_list_1 (true, return_type, p);
8099 va_end (p);
8100
8101 return args;
8102 }
8103
8104 /* Build a function type. RETURN_TYPE is the type returned by the
8105 function; VAARGS indicates whether the function takes varargs. The
8106 function takes N named arguments, the types of which are provided in
8107 ARG_TYPES. */
8108
8109 static tree
8110 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8111 tree *arg_types)
8112 {
8113 int i;
8114 tree t = vaargs ? NULL_TREE : void_list_node;
8115
8116 for (i = n - 1; i >= 0; i--)
8117 t = tree_cons (NULL_TREE, arg_types[i], t);
8118
8119 return build_function_type (return_type, t);
8120 }
8121
8122 /* Build a function type. RETURN_TYPE is the type returned by the
8123 function. The function takes N named arguments, the types of which
8124 are provided in ARG_TYPES. */
8125
8126 tree
8127 build_function_type_array (tree return_type, int n, tree *arg_types)
8128 {
8129 return build_function_type_array_1 (false, return_type, n, arg_types);
8130 }
8131
8132 /* Build a variable argument function type. RETURN_TYPE is the type
8133 returned by the function. The function takes N named arguments, the
8134 types of which are provided in ARG_TYPES. */
8135
8136 tree
8137 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8138 {
8139 return build_function_type_array_1 (true, return_type, n, arg_types);
8140 }
8141
8142 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8143 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8144 for the method. An implicit additional parameter (of type
8145 pointer-to-BASETYPE) is added to the ARGTYPES. */
8146
8147 tree
8148 build_method_type_directly (tree basetype,
8149 tree rettype,
8150 tree argtypes)
8151 {
8152 tree t;
8153 tree ptype;
8154 inchash::hash hstate;
8155 bool any_structural_p, any_noncanonical_p;
8156 tree canon_argtypes;
8157
8158 /* Make a node of the sort we want. */
8159 t = make_node (METHOD_TYPE);
8160
8161 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8162 TREE_TYPE (t) = rettype;
8163 ptype = build_pointer_type (basetype);
8164
8165 /* The actual arglist for this function includes a "hidden" argument
8166 which is "this". Put it into the list of argument types. */
8167 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8168 TYPE_ARG_TYPES (t) = argtypes;
8169
8170 /* If we already have such a type, use the old one. */
8171 hstate.add_object (TYPE_HASH (basetype));
8172 hstate.add_object (TYPE_HASH (rettype));
8173 type_hash_list (argtypes, hstate);
8174 t = type_hash_canon (hstate.end (), t);
8175
8176 /* Set up the canonical type. */
8177 any_structural_p
8178 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8179 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8180 any_noncanonical_p
8181 = (TYPE_CANONICAL (basetype) != basetype
8182 || TYPE_CANONICAL (rettype) != rettype);
8183 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8184 &any_structural_p,
8185 &any_noncanonical_p);
8186 if (any_structural_p)
8187 SET_TYPE_STRUCTURAL_EQUALITY (t);
8188 else if (any_noncanonical_p)
8189 TYPE_CANONICAL (t)
8190 = build_method_type_directly (TYPE_CANONICAL (basetype),
8191 TYPE_CANONICAL (rettype),
8192 canon_argtypes);
8193 if (!COMPLETE_TYPE_P (t))
8194 layout_type (t);
8195
8196 return t;
8197 }
8198
8199 /* Construct, lay out and return the type of methods belonging to class
8200 BASETYPE and whose arguments and values are described by TYPE.
8201 If that type exists already, reuse it.
8202 TYPE must be a FUNCTION_TYPE node. */
8203
8204 tree
8205 build_method_type (tree basetype, tree type)
8206 {
8207 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8208
8209 return build_method_type_directly (basetype,
8210 TREE_TYPE (type),
8211 TYPE_ARG_TYPES (type));
8212 }
8213
8214 /* Construct, lay out and return the type of offsets to a value
8215 of type TYPE, within an object of type BASETYPE.
8216 If a suitable offset type exists already, reuse it. */
8217
8218 tree
8219 build_offset_type (tree basetype, tree type)
8220 {
8221 tree t;
8222 inchash::hash hstate;
8223
8224 /* Make a node of the sort we want. */
8225 t = make_node (OFFSET_TYPE);
8226
8227 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8228 TREE_TYPE (t) = type;
8229
8230 /* If we already have such a type, use the old one. */
8231 hstate.add_object (TYPE_HASH (basetype));
8232 hstate.add_object (TYPE_HASH (type));
8233 t = type_hash_canon (hstate.end (), t);
8234
8235 if (!COMPLETE_TYPE_P (t))
8236 layout_type (t);
8237
8238 if (TYPE_CANONICAL (t) == t)
8239 {
8240 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8241 || TYPE_STRUCTURAL_EQUALITY_P (type))
8242 SET_TYPE_STRUCTURAL_EQUALITY (t);
8243 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8244 || TYPE_CANONICAL (type) != type)
8245 TYPE_CANONICAL (t)
8246 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8247 TYPE_CANONICAL (type));
8248 }
8249
8250 return t;
8251 }
8252
8253 /* Create a complex type whose components are COMPONENT_TYPE. */
8254
8255 tree
8256 build_complex_type (tree component_type)
8257 {
8258 tree t;
8259 inchash::hash hstate;
8260
8261 gcc_assert (INTEGRAL_TYPE_P (component_type)
8262 || SCALAR_FLOAT_TYPE_P (component_type)
8263 || FIXED_POINT_TYPE_P (component_type));
8264
8265 /* Make a node of the sort we want. */
8266 t = make_node (COMPLEX_TYPE);
8267
8268 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8269
8270 /* If we already have such a type, use the old one. */
8271 hstate.add_object (TYPE_HASH (component_type));
8272 t = type_hash_canon (hstate.end (), t);
8273
8274 if (!COMPLETE_TYPE_P (t))
8275 layout_type (t);
8276
8277 if (TYPE_CANONICAL (t) == t)
8278 {
8279 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8280 SET_TYPE_STRUCTURAL_EQUALITY (t);
8281 else if (TYPE_CANONICAL (component_type) != component_type)
8282 TYPE_CANONICAL (t)
8283 = build_complex_type (TYPE_CANONICAL (component_type));
8284 }
8285
8286 /* We need to create a name, since complex is a fundamental type. */
8287 if (! TYPE_NAME (t))
8288 {
8289 const char *name;
8290 if (component_type == char_type_node)
8291 name = "complex char";
8292 else if (component_type == signed_char_type_node)
8293 name = "complex signed char";
8294 else if (component_type == unsigned_char_type_node)
8295 name = "complex unsigned char";
8296 else if (component_type == short_integer_type_node)
8297 name = "complex short int";
8298 else if (component_type == short_unsigned_type_node)
8299 name = "complex short unsigned int";
8300 else if (component_type == integer_type_node)
8301 name = "complex int";
8302 else if (component_type == unsigned_type_node)
8303 name = "complex unsigned int";
8304 else if (component_type == long_integer_type_node)
8305 name = "complex long int";
8306 else if (component_type == long_unsigned_type_node)
8307 name = "complex long unsigned int";
8308 else if (component_type == long_long_integer_type_node)
8309 name = "complex long long int";
8310 else if (component_type == long_long_unsigned_type_node)
8311 name = "complex long long unsigned int";
8312 else
8313 name = 0;
8314
8315 if (name != 0)
8316 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8317 get_identifier (name), t);
8318 }
8319
8320 return build_qualified_type (t, TYPE_QUALS (component_type));
8321 }
8322
8323 /* If TYPE is a real or complex floating-point type and the target
8324 does not directly support arithmetic on TYPE then return the wider
8325 type to be used for arithmetic on TYPE. Otherwise, return
8326 NULL_TREE. */
8327
8328 tree
8329 excess_precision_type (tree type)
8330 {
8331 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8332 {
8333 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8334 switch (TREE_CODE (type))
8335 {
8336 case REAL_TYPE:
8337 switch (flt_eval_method)
8338 {
8339 case 1:
8340 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8341 return double_type_node;
8342 break;
8343 case 2:
8344 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8345 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8346 return long_double_type_node;
8347 break;
8348 default:
8349 gcc_unreachable ();
8350 }
8351 break;
8352 case COMPLEX_TYPE:
8353 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8354 return NULL_TREE;
8355 switch (flt_eval_method)
8356 {
8357 case 1:
8358 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8359 return complex_double_type_node;
8360 break;
8361 case 2:
8362 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8363 || (TYPE_MODE (TREE_TYPE (type))
8364 == TYPE_MODE (double_type_node)))
8365 return complex_long_double_type_node;
8366 break;
8367 default:
8368 gcc_unreachable ();
8369 }
8370 break;
8371 default:
8372 break;
8373 }
8374 }
8375 return NULL_TREE;
8376 }
8377 \f
8378 /* Return OP, stripped of any conversions to wider types as much as is safe.
8379 Converting the value back to OP's type makes a value equivalent to OP.
8380
8381 If FOR_TYPE is nonzero, we return a value which, if converted to
8382 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8383
8384 OP must have integer, real or enumeral type. Pointers are not allowed!
8385
8386 There are some cases where the obvious value we could return
8387 would regenerate to OP if converted to OP's type,
8388 but would not extend like OP to wider types.
8389 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8390 For example, if OP is (unsigned short)(signed char)-1,
8391 we avoid returning (signed char)-1 if FOR_TYPE is int,
8392 even though extending that to an unsigned short would regenerate OP,
8393 since the result of extending (signed char)-1 to (int)
8394 is different from (int) OP. */
8395
8396 tree
8397 get_unwidened (tree op, tree for_type)
8398 {
8399 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8400 tree type = TREE_TYPE (op);
8401 unsigned final_prec
8402 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8403 int uns
8404 = (for_type != 0 && for_type != type
8405 && final_prec > TYPE_PRECISION (type)
8406 && TYPE_UNSIGNED (type));
8407 tree win = op;
8408
8409 while (CONVERT_EXPR_P (op))
8410 {
8411 int bitschange;
8412
8413 /* TYPE_PRECISION on vector types has different meaning
8414 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8415 so avoid them here. */
8416 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8417 break;
8418
8419 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8420 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8421
8422 /* Truncations are many-one so cannot be removed.
8423 Unless we are later going to truncate down even farther. */
8424 if (bitschange < 0
8425 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8426 break;
8427
8428 /* See what's inside this conversion. If we decide to strip it,
8429 we will set WIN. */
8430 op = TREE_OPERAND (op, 0);
8431
8432 /* If we have not stripped any zero-extensions (uns is 0),
8433 we can strip any kind of extension.
8434 If we have previously stripped a zero-extension,
8435 only zero-extensions can safely be stripped.
8436 Any extension can be stripped if the bits it would produce
8437 are all going to be discarded later by truncating to FOR_TYPE. */
8438
8439 if (bitschange > 0)
8440 {
8441 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8442 win = op;
8443 /* TYPE_UNSIGNED says whether this is a zero-extension.
8444 Let's avoid computing it if it does not affect WIN
8445 and if UNS will not be needed again. */
8446 if ((uns
8447 || CONVERT_EXPR_P (op))
8448 && TYPE_UNSIGNED (TREE_TYPE (op)))
8449 {
8450 uns = 1;
8451 win = op;
8452 }
8453 }
8454 }
8455
8456 /* If we finally reach a constant see if it fits in for_type and
8457 in that case convert it. */
8458 if (for_type
8459 && TREE_CODE (win) == INTEGER_CST
8460 && TREE_TYPE (win) != for_type
8461 && int_fits_type_p (win, for_type))
8462 win = fold_convert (for_type, win);
8463
8464 return win;
8465 }
8466 \f
8467 /* Return OP or a simpler expression for a narrower value
8468 which can be sign-extended or zero-extended to give back OP.
8469 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8470 or 0 if the value should be sign-extended. */
8471
8472 tree
8473 get_narrower (tree op, int *unsignedp_ptr)
8474 {
8475 int uns = 0;
8476 int first = 1;
8477 tree win = op;
8478 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8479
8480 while (TREE_CODE (op) == NOP_EXPR)
8481 {
8482 int bitschange
8483 = (TYPE_PRECISION (TREE_TYPE (op))
8484 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8485
8486 /* Truncations are many-one so cannot be removed. */
8487 if (bitschange < 0)
8488 break;
8489
8490 /* See what's inside this conversion. If we decide to strip it,
8491 we will set WIN. */
8492
8493 if (bitschange > 0)
8494 {
8495 op = TREE_OPERAND (op, 0);
8496 /* An extension: the outermost one can be stripped,
8497 but remember whether it is zero or sign extension. */
8498 if (first)
8499 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8500 /* Otherwise, if a sign extension has been stripped,
8501 only sign extensions can now be stripped;
8502 if a zero extension has been stripped, only zero-extensions. */
8503 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8504 break;
8505 first = 0;
8506 }
8507 else /* bitschange == 0 */
8508 {
8509 /* A change in nominal type can always be stripped, but we must
8510 preserve the unsignedness. */
8511 if (first)
8512 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8513 first = 0;
8514 op = TREE_OPERAND (op, 0);
8515 /* Keep trying to narrow, but don't assign op to win if it
8516 would turn an integral type into something else. */
8517 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8518 continue;
8519 }
8520
8521 win = op;
8522 }
8523
8524 if (TREE_CODE (op) == COMPONENT_REF
8525 /* Since type_for_size always gives an integer type. */
8526 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8527 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8528 /* Ensure field is laid out already. */
8529 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8530 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8531 {
8532 unsigned HOST_WIDE_INT innerprec
8533 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8534 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8535 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8536 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8537
8538 /* We can get this structure field in a narrower type that fits it,
8539 but the resulting extension to its nominal type (a fullword type)
8540 must satisfy the same conditions as for other extensions.
8541
8542 Do this only for fields that are aligned (not bit-fields),
8543 because when bit-field insns will be used there is no
8544 advantage in doing this. */
8545
8546 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8547 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8548 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8549 && type != 0)
8550 {
8551 if (first)
8552 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8553 win = fold_convert (type, op);
8554 }
8555 }
8556
8557 *unsignedp_ptr = uns;
8558 return win;
8559 }
8560 \f
8561 /* Returns true if integer constant C has a value that is permissible
8562 for type TYPE (an INTEGER_TYPE). */
8563
8564 bool
8565 int_fits_type_p (const_tree c, const_tree type)
8566 {
8567 tree type_low_bound, type_high_bound;
8568 bool ok_for_low_bound, ok_for_high_bound;
8569 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8570
8571 retry:
8572 type_low_bound = TYPE_MIN_VALUE (type);
8573 type_high_bound = TYPE_MAX_VALUE (type);
8574
8575 /* If at least one bound of the type is a constant integer, we can check
8576 ourselves and maybe make a decision. If no such decision is possible, but
8577 this type is a subtype, try checking against that. Otherwise, use
8578 fits_to_tree_p, which checks against the precision.
8579
8580 Compute the status for each possibly constant bound, and return if we see
8581 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8582 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8583 for "constant known to fit". */
8584
8585 /* Check if c >= type_low_bound. */
8586 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8587 {
8588 if (tree_int_cst_lt (c, type_low_bound))
8589 return false;
8590 ok_for_low_bound = true;
8591 }
8592 else
8593 ok_for_low_bound = false;
8594
8595 /* Check if c <= type_high_bound. */
8596 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8597 {
8598 if (tree_int_cst_lt (type_high_bound, c))
8599 return false;
8600 ok_for_high_bound = true;
8601 }
8602 else
8603 ok_for_high_bound = false;
8604
8605 /* If the constant fits both bounds, the result is known. */
8606 if (ok_for_low_bound && ok_for_high_bound)
8607 return true;
8608
8609 /* Perform some generic filtering which may allow making a decision
8610 even if the bounds are not constant. First, negative integers
8611 never fit in unsigned types, */
8612 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8613 return false;
8614
8615 /* Second, narrower types always fit in wider ones. */
8616 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8617 return true;
8618
8619 /* Third, unsigned integers with top bit set never fit signed types. */
8620 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8621 {
8622 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8623 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8624 {
8625 /* When a tree_cst is converted to a wide-int, the precision
8626 is taken from the type. However, if the precision of the
8627 mode underneath the type is smaller than that, it is
8628 possible that the value will not fit. The test below
8629 fails if any bit is set between the sign bit of the
8630 underlying mode and the top bit of the type. */
8631 if (wi::ne_p (wi::zext (c, prec - 1), c))
8632 return false;
8633 }
8634 else if (wi::neg_p (c))
8635 return false;
8636 }
8637
8638 /* If we haven't been able to decide at this point, there nothing more we
8639 can check ourselves here. Look at the base type if we have one and it
8640 has the same precision. */
8641 if (TREE_CODE (type) == INTEGER_TYPE
8642 && TREE_TYPE (type) != 0
8643 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8644 {
8645 type = TREE_TYPE (type);
8646 goto retry;
8647 }
8648
8649 /* Or to fits_to_tree_p, if nothing else. */
8650 return wi::fits_to_tree_p (c, type);
8651 }
8652
8653 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8654 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8655 represented (assuming two's-complement arithmetic) within the bit
8656 precision of the type are returned instead. */
8657
8658 void
8659 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8660 {
8661 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8662 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8663 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8664 else
8665 {
8666 if (TYPE_UNSIGNED (type))
8667 mpz_set_ui (min, 0);
8668 else
8669 {
8670 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8671 wi::to_mpz (mn, min, SIGNED);
8672 }
8673 }
8674
8675 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8676 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8677 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8678 else
8679 {
8680 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8681 wi::to_mpz (mn, max, TYPE_SIGN (type));
8682 }
8683 }
8684
8685 /* Return true if VAR is an automatic variable defined in function FN. */
8686
8687 bool
8688 auto_var_in_fn_p (const_tree var, const_tree fn)
8689 {
8690 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8691 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8692 || TREE_CODE (var) == PARM_DECL)
8693 && ! TREE_STATIC (var))
8694 || TREE_CODE (var) == LABEL_DECL
8695 || TREE_CODE (var) == RESULT_DECL));
8696 }
8697
8698 /* Subprogram of following function. Called by walk_tree.
8699
8700 Return *TP if it is an automatic variable or parameter of the
8701 function passed in as DATA. */
8702
8703 static tree
8704 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8705 {
8706 tree fn = (tree) data;
8707
8708 if (TYPE_P (*tp))
8709 *walk_subtrees = 0;
8710
8711 else if (DECL_P (*tp)
8712 && auto_var_in_fn_p (*tp, fn))
8713 return *tp;
8714
8715 return NULL_TREE;
8716 }
8717
8718 /* Returns true if T is, contains, or refers to a type with variable
8719 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8720 arguments, but not the return type. If FN is nonzero, only return
8721 true if a modifier of the type or position of FN is a variable or
8722 parameter inside FN.
8723
8724 This concept is more general than that of C99 'variably modified types':
8725 in C99, a struct type is never variably modified because a VLA may not
8726 appear as a structure member. However, in GNU C code like:
8727
8728 struct S { int i[f()]; };
8729
8730 is valid, and other languages may define similar constructs. */
8731
8732 bool
8733 variably_modified_type_p (tree type, tree fn)
8734 {
8735 tree t;
8736
8737 /* Test if T is either variable (if FN is zero) or an expression containing
8738 a variable in FN. If TYPE isn't gimplified, return true also if
8739 gimplify_one_sizepos would gimplify the expression into a local
8740 variable. */
8741 #define RETURN_TRUE_IF_VAR(T) \
8742 do { tree _t = (T); \
8743 if (_t != NULL_TREE \
8744 && _t != error_mark_node \
8745 && TREE_CODE (_t) != INTEGER_CST \
8746 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8747 && (!fn \
8748 || (!TYPE_SIZES_GIMPLIFIED (type) \
8749 && !is_gimple_sizepos (_t)) \
8750 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8751 return true; } while (0)
8752
8753 if (type == error_mark_node)
8754 return false;
8755
8756 /* If TYPE itself has variable size, it is variably modified. */
8757 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8758 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8759
8760 switch (TREE_CODE (type))
8761 {
8762 case POINTER_TYPE:
8763 case REFERENCE_TYPE:
8764 case VECTOR_TYPE:
8765 if (variably_modified_type_p (TREE_TYPE (type), fn))
8766 return true;
8767 break;
8768
8769 case FUNCTION_TYPE:
8770 case METHOD_TYPE:
8771 /* If TYPE is a function type, it is variably modified if the
8772 return type is variably modified. */
8773 if (variably_modified_type_p (TREE_TYPE (type), fn))
8774 return true;
8775 break;
8776
8777 case INTEGER_TYPE:
8778 case REAL_TYPE:
8779 case FIXED_POINT_TYPE:
8780 case ENUMERAL_TYPE:
8781 case BOOLEAN_TYPE:
8782 /* Scalar types are variably modified if their end points
8783 aren't constant. */
8784 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8785 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8786 break;
8787
8788 case RECORD_TYPE:
8789 case UNION_TYPE:
8790 case QUAL_UNION_TYPE:
8791 /* We can't see if any of the fields are variably-modified by the
8792 definition we normally use, since that would produce infinite
8793 recursion via pointers. */
8794 /* This is variably modified if some field's type is. */
8795 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8796 if (TREE_CODE (t) == FIELD_DECL)
8797 {
8798 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8799 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8800 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8801
8802 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8803 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8804 }
8805 break;
8806
8807 case ARRAY_TYPE:
8808 /* Do not call ourselves to avoid infinite recursion. This is
8809 variably modified if the element type is. */
8810 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8811 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8812 break;
8813
8814 default:
8815 break;
8816 }
8817
8818 /* The current language may have other cases to check, but in general,
8819 all other types are not variably modified. */
8820 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8821
8822 #undef RETURN_TRUE_IF_VAR
8823 }
8824
8825 /* Given a DECL or TYPE, return the scope in which it was declared, or
8826 NULL_TREE if there is no containing scope. */
8827
8828 tree
8829 get_containing_scope (const_tree t)
8830 {
8831 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8832 }
8833
8834 /* Return the innermost context enclosing DECL that is
8835 a FUNCTION_DECL, or zero if none. */
8836
8837 tree
8838 decl_function_context (const_tree decl)
8839 {
8840 tree context;
8841
8842 if (TREE_CODE (decl) == ERROR_MARK)
8843 return 0;
8844
8845 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8846 where we look up the function at runtime. Such functions always take
8847 a first argument of type 'pointer to real context'.
8848
8849 C++ should really be fixed to use DECL_CONTEXT for the real context,
8850 and use something else for the "virtual context". */
8851 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8852 context
8853 = TYPE_MAIN_VARIANT
8854 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8855 else
8856 context = DECL_CONTEXT (decl);
8857
8858 while (context && TREE_CODE (context) != FUNCTION_DECL)
8859 {
8860 if (TREE_CODE (context) == BLOCK)
8861 context = BLOCK_SUPERCONTEXT (context);
8862 else
8863 context = get_containing_scope (context);
8864 }
8865
8866 return context;
8867 }
8868
8869 /* Return the innermost context enclosing DECL that is
8870 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8871 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8872
8873 tree
8874 decl_type_context (const_tree decl)
8875 {
8876 tree context = DECL_CONTEXT (decl);
8877
8878 while (context)
8879 switch (TREE_CODE (context))
8880 {
8881 case NAMESPACE_DECL:
8882 case TRANSLATION_UNIT_DECL:
8883 return NULL_TREE;
8884
8885 case RECORD_TYPE:
8886 case UNION_TYPE:
8887 case QUAL_UNION_TYPE:
8888 return context;
8889
8890 case TYPE_DECL:
8891 case FUNCTION_DECL:
8892 context = DECL_CONTEXT (context);
8893 break;
8894
8895 case BLOCK:
8896 context = BLOCK_SUPERCONTEXT (context);
8897 break;
8898
8899 default:
8900 gcc_unreachable ();
8901 }
8902
8903 return NULL_TREE;
8904 }
8905
8906 /* CALL is a CALL_EXPR. Return the declaration for the function
8907 called, or NULL_TREE if the called function cannot be
8908 determined. */
8909
8910 tree
8911 get_callee_fndecl (const_tree call)
8912 {
8913 tree addr;
8914
8915 if (call == error_mark_node)
8916 return error_mark_node;
8917
8918 /* It's invalid to call this function with anything but a
8919 CALL_EXPR. */
8920 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8921
8922 /* The first operand to the CALL is the address of the function
8923 called. */
8924 addr = CALL_EXPR_FN (call);
8925
8926 /* If there is no function, return early. */
8927 if (addr == NULL_TREE)
8928 return NULL_TREE;
8929
8930 STRIP_NOPS (addr);
8931
8932 /* If this is a readonly function pointer, extract its initial value. */
8933 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8934 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8935 && DECL_INITIAL (addr))
8936 addr = DECL_INITIAL (addr);
8937
8938 /* If the address is just `&f' for some function `f', then we know
8939 that `f' is being called. */
8940 if (TREE_CODE (addr) == ADDR_EXPR
8941 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8942 return TREE_OPERAND (addr, 0);
8943
8944 /* We couldn't figure out what was being called. */
8945 return NULL_TREE;
8946 }
8947
8948 /* Print debugging information about tree nodes generated during the compile,
8949 and any language-specific information. */
8950
8951 void
8952 dump_tree_statistics (void)
8953 {
8954 if (GATHER_STATISTICS)
8955 {
8956 int i;
8957 int total_nodes, total_bytes;
8958 fprintf (stderr, "Kind Nodes Bytes\n");
8959 fprintf (stderr, "---------------------------------------\n");
8960 total_nodes = total_bytes = 0;
8961 for (i = 0; i < (int) all_kinds; i++)
8962 {
8963 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8964 tree_node_counts[i], tree_node_sizes[i]);
8965 total_nodes += tree_node_counts[i];
8966 total_bytes += tree_node_sizes[i];
8967 }
8968 fprintf (stderr, "---------------------------------------\n");
8969 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8970 fprintf (stderr, "---------------------------------------\n");
8971 fprintf (stderr, "Code Nodes\n");
8972 fprintf (stderr, "----------------------------\n");
8973 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8974 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8975 tree_code_counts[i]);
8976 fprintf (stderr, "----------------------------\n");
8977 ssanames_print_statistics ();
8978 phinodes_print_statistics ();
8979 }
8980 else
8981 fprintf (stderr, "(No per-node statistics)\n");
8982
8983 print_type_hash_statistics ();
8984 print_debug_expr_statistics ();
8985 print_value_expr_statistics ();
8986 lang_hooks.print_statistics ();
8987 }
8988 \f
8989 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8990
8991 /* Generate a crc32 of a byte. */
8992
8993 static unsigned
8994 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8995 {
8996 unsigned ix;
8997
8998 for (ix = bits; ix--; value <<= 1)
8999 {
9000 unsigned feedback;
9001
9002 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9003 chksum <<= 1;
9004 chksum ^= feedback;
9005 }
9006 return chksum;
9007 }
9008
9009 /* Generate a crc32 of a 32-bit unsigned. */
9010
9011 unsigned
9012 crc32_unsigned (unsigned chksum, unsigned value)
9013 {
9014 return crc32_unsigned_bits (chksum, value, 32);
9015 }
9016
9017 /* Generate a crc32 of a byte. */
9018
9019 unsigned
9020 crc32_byte (unsigned chksum, char byte)
9021 {
9022 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9023 }
9024
9025 /* Generate a crc32 of a string. */
9026
9027 unsigned
9028 crc32_string (unsigned chksum, const char *string)
9029 {
9030 do
9031 {
9032 chksum = crc32_byte (chksum, *string);
9033 }
9034 while (*string++);
9035 return chksum;
9036 }
9037
9038 /* P is a string that will be used in a symbol. Mask out any characters
9039 that are not valid in that context. */
9040
9041 void
9042 clean_symbol_name (char *p)
9043 {
9044 for (; *p; p++)
9045 if (! (ISALNUM (*p)
9046 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9047 || *p == '$'
9048 #endif
9049 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9050 || *p == '.'
9051 #endif
9052 ))
9053 *p = '_';
9054 }
9055
9056 /* Generate a name for a special-purpose function.
9057 The generated name may need to be unique across the whole link.
9058 Changes to this function may also require corresponding changes to
9059 xstrdup_mask_random.
9060 TYPE is some string to identify the purpose of this function to the
9061 linker or collect2; it must start with an uppercase letter,
9062 one of:
9063 I - for constructors
9064 D - for destructors
9065 N - for C++ anonymous namespaces
9066 F - for DWARF unwind frame information. */
9067
9068 tree
9069 get_file_function_name (const char *type)
9070 {
9071 char *buf;
9072 const char *p;
9073 char *q;
9074
9075 /* If we already have a name we know to be unique, just use that. */
9076 if (first_global_object_name)
9077 p = q = ASTRDUP (first_global_object_name);
9078 /* If the target is handling the constructors/destructors, they
9079 will be local to this file and the name is only necessary for
9080 debugging purposes.
9081 We also assign sub_I and sub_D sufixes to constructors called from
9082 the global static constructors. These are always local. */
9083 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9084 || (strncmp (type, "sub_", 4) == 0
9085 && (type[4] == 'I' || type[4] == 'D')))
9086 {
9087 const char *file = main_input_filename;
9088 if (! file)
9089 file = LOCATION_FILE (input_location);
9090 /* Just use the file's basename, because the full pathname
9091 might be quite long. */
9092 p = q = ASTRDUP (lbasename (file));
9093 }
9094 else
9095 {
9096 /* Otherwise, the name must be unique across the entire link.
9097 We don't have anything that we know to be unique to this translation
9098 unit, so use what we do have and throw in some randomness. */
9099 unsigned len;
9100 const char *name = weak_global_object_name;
9101 const char *file = main_input_filename;
9102
9103 if (! name)
9104 name = "";
9105 if (! file)
9106 file = LOCATION_FILE (input_location);
9107
9108 len = strlen (file);
9109 q = (char *) alloca (9 + 17 + len + 1);
9110 memcpy (q, file, len + 1);
9111
9112 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9113 crc32_string (0, name), get_random_seed (false));
9114
9115 p = q;
9116 }
9117
9118 clean_symbol_name (q);
9119 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9120 + strlen (type));
9121
9122 /* Set up the name of the file-level functions we may need.
9123 Use a global object (which is already required to be unique over
9124 the program) rather than the file name (which imposes extra
9125 constraints). */
9126 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9127
9128 return get_identifier (buf);
9129 }
9130 \f
9131 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9132
9133 /* Complain that the tree code of NODE does not match the expected 0
9134 terminated list of trailing codes. The trailing code list can be
9135 empty, for a more vague error message. FILE, LINE, and FUNCTION
9136 are of the caller. */
9137
9138 void
9139 tree_check_failed (const_tree node, const char *file,
9140 int line, const char *function, ...)
9141 {
9142 va_list args;
9143 const char *buffer;
9144 unsigned length = 0;
9145 enum tree_code code;
9146
9147 va_start (args, function);
9148 while ((code = (enum tree_code) va_arg (args, int)))
9149 length += 4 + strlen (get_tree_code_name (code));
9150 va_end (args);
9151 if (length)
9152 {
9153 char *tmp;
9154 va_start (args, function);
9155 length += strlen ("expected ");
9156 buffer = tmp = (char *) alloca (length);
9157 length = 0;
9158 while ((code = (enum tree_code) va_arg (args, int)))
9159 {
9160 const char *prefix = length ? " or " : "expected ";
9161
9162 strcpy (tmp + length, prefix);
9163 length += strlen (prefix);
9164 strcpy (tmp + length, get_tree_code_name (code));
9165 length += strlen (get_tree_code_name (code));
9166 }
9167 va_end (args);
9168 }
9169 else
9170 buffer = "unexpected node";
9171
9172 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9173 buffer, get_tree_code_name (TREE_CODE (node)),
9174 function, trim_filename (file), line);
9175 }
9176
9177 /* Complain that the tree code of NODE does match the expected 0
9178 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9179 the caller. */
9180
9181 void
9182 tree_not_check_failed (const_tree node, const char *file,
9183 int line, const char *function, ...)
9184 {
9185 va_list args;
9186 char *buffer;
9187 unsigned length = 0;
9188 enum tree_code code;
9189
9190 va_start (args, function);
9191 while ((code = (enum tree_code) va_arg (args, int)))
9192 length += 4 + strlen (get_tree_code_name (code));
9193 va_end (args);
9194 va_start (args, function);
9195 buffer = (char *) alloca (length);
9196 length = 0;
9197 while ((code = (enum tree_code) va_arg (args, int)))
9198 {
9199 if (length)
9200 {
9201 strcpy (buffer + length, " or ");
9202 length += 4;
9203 }
9204 strcpy (buffer + length, get_tree_code_name (code));
9205 length += strlen (get_tree_code_name (code));
9206 }
9207 va_end (args);
9208
9209 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9210 buffer, get_tree_code_name (TREE_CODE (node)),
9211 function, trim_filename (file), line);
9212 }
9213
9214 /* Similar to tree_check_failed, except that we check for a class of tree
9215 code, given in CL. */
9216
9217 void
9218 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9219 const char *file, int line, const char *function)
9220 {
9221 internal_error
9222 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9223 TREE_CODE_CLASS_STRING (cl),
9224 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9225 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9226 }
9227
9228 /* Similar to tree_check_failed, except that instead of specifying a
9229 dozen codes, use the knowledge that they're all sequential. */
9230
9231 void
9232 tree_range_check_failed (const_tree node, const char *file, int line,
9233 const char *function, enum tree_code c1,
9234 enum tree_code c2)
9235 {
9236 char *buffer;
9237 unsigned length = 0;
9238 unsigned int c;
9239
9240 for (c = c1; c <= c2; ++c)
9241 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9242
9243 length += strlen ("expected ");
9244 buffer = (char *) alloca (length);
9245 length = 0;
9246
9247 for (c = c1; c <= c2; ++c)
9248 {
9249 const char *prefix = length ? " or " : "expected ";
9250
9251 strcpy (buffer + length, prefix);
9252 length += strlen (prefix);
9253 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9254 length += strlen (get_tree_code_name ((enum tree_code) c));
9255 }
9256
9257 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9258 buffer, get_tree_code_name (TREE_CODE (node)),
9259 function, trim_filename (file), line);
9260 }
9261
9262
9263 /* Similar to tree_check_failed, except that we check that a tree does
9264 not have the specified code, given in CL. */
9265
9266 void
9267 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9268 const char *file, int line, const char *function)
9269 {
9270 internal_error
9271 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9272 TREE_CODE_CLASS_STRING (cl),
9273 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9274 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9275 }
9276
9277
9278 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9279
9280 void
9281 omp_clause_check_failed (const_tree node, const char *file, int line,
9282 const char *function, enum omp_clause_code code)
9283 {
9284 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9285 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9286 function, trim_filename (file), line);
9287 }
9288
9289
9290 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9291
9292 void
9293 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9294 const char *function, enum omp_clause_code c1,
9295 enum omp_clause_code c2)
9296 {
9297 char *buffer;
9298 unsigned length = 0;
9299 unsigned int c;
9300
9301 for (c = c1; c <= c2; ++c)
9302 length += 4 + strlen (omp_clause_code_name[c]);
9303
9304 length += strlen ("expected ");
9305 buffer = (char *) alloca (length);
9306 length = 0;
9307
9308 for (c = c1; c <= c2; ++c)
9309 {
9310 const char *prefix = length ? " or " : "expected ";
9311
9312 strcpy (buffer + length, prefix);
9313 length += strlen (prefix);
9314 strcpy (buffer + length, omp_clause_code_name[c]);
9315 length += strlen (omp_clause_code_name[c]);
9316 }
9317
9318 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9319 buffer, omp_clause_code_name[TREE_CODE (node)],
9320 function, trim_filename (file), line);
9321 }
9322
9323
9324 #undef DEFTREESTRUCT
9325 #define DEFTREESTRUCT(VAL, NAME) NAME,
9326
9327 static const char *ts_enum_names[] = {
9328 #include "treestruct.def"
9329 };
9330 #undef DEFTREESTRUCT
9331
9332 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9333
9334 /* Similar to tree_class_check_failed, except that we check for
9335 whether CODE contains the tree structure identified by EN. */
9336
9337 void
9338 tree_contains_struct_check_failed (const_tree node,
9339 const enum tree_node_structure_enum en,
9340 const char *file, int line,
9341 const char *function)
9342 {
9343 internal_error
9344 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9345 TS_ENUM_NAME (en),
9346 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9347 }
9348
9349
9350 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9351 (dynamically sized) vector. */
9352
9353 void
9354 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9355 const char *function)
9356 {
9357 internal_error
9358 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9359 idx + 1, len, function, trim_filename (file), line);
9360 }
9361
9362 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9363 (dynamically sized) vector. */
9364
9365 void
9366 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9367 const char *function)
9368 {
9369 internal_error
9370 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9371 idx + 1, len, function, trim_filename (file), line);
9372 }
9373
9374 /* Similar to above, except that the check is for the bounds of the operand
9375 vector of an expression node EXP. */
9376
9377 void
9378 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9379 int line, const char *function)
9380 {
9381 enum tree_code code = TREE_CODE (exp);
9382 internal_error
9383 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9384 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9385 function, trim_filename (file), line);
9386 }
9387
9388 /* Similar to above, except that the check is for the number of
9389 operands of an OMP_CLAUSE node. */
9390
9391 void
9392 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9393 int line, const char *function)
9394 {
9395 internal_error
9396 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9397 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9398 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9399 trim_filename (file), line);
9400 }
9401 #endif /* ENABLE_TREE_CHECKING */
9402 \f
9403 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9404 and mapped to the machine mode MODE. Initialize its fields and build
9405 the information necessary for debugging output. */
9406
9407 static tree
9408 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9409 {
9410 tree t;
9411 inchash::hash hstate;
9412
9413 t = make_node (VECTOR_TYPE);
9414 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9415 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9416 SET_TYPE_MODE (t, mode);
9417
9418 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9419 SET_TYPE_STRUCTURAL_EQUALITY (t);
9420 else if (TYPE_CANONICAL (innertype) != innertype
9421 || mode != VOIDmode)
9422 TYPE_CANONICAL (t)
9423 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9424
9425 layout_type (t);
9426
9427 hstate.add_wide_int (VECTOR_TYPE);
9428 hstate.add_wide_int (nunits);
9429 hstate.add_wide_int (mode);
9430 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9431 t = type_hash_canon (hstate.end (), t);
9432
9433 /* We have built a main variant, based on the main variant of the
9434 inner type. Use it to build the variant we return. */
9435 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9436 && TREE_TYPE (t) != innertype)
9437 return build_type_attribute_qual_variant (t,
9438 TYPE_ATTRIBUTES (innertype),
9439 TYPE_QUALS (innertype));
9440
9441 return t;
9442 }
9443
9444 static tree
9445 make_or_reuse_type (unsigned size, int unsignedp)
9446 {
9447 if (size == INT_TYPE_SIZE)
9448 return unsignedp ? unsigned_type_node : integer_type_node;
9449 if (size == CHAR_TYPE_SIZE)
9450 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9451 if (size == SHORT_TYPE_SIZE)
9452 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9453 if (size == LONG_TYPE_SIZE)
9454 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9455 if (size == LONG_LONG_TYPE_SIZE)
9456 return (unsignedp ? long_long_unsigned_type_node
9457 : long_long_integer_type_node);
9458 if (size == 128 && int128_integer_type_node)
9459 return (unsignedp ? int128_unsigned_type_node
9460 : int128_integer_type_node);
9461
9462 if (unsignedp)
9463 return make_unsigned_type (size);
9464 else
9465 return make_signed_type (size);
9466 }
9467
9468 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9469
9470 static tree
9471 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9472 {
9473 if (satp)
9474 {
9475 if (size == SHORT_FRACT_TYPE_SIZE)
9476 return unsignedp ? sat_unsigned_short_fract_type_node
9477 : sat_short_fract_type_node;
9478 if (size == FRACT_TYPE_SIZE)
9479 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9480 if (size == LONG_FRACT_TYPE_SIZE)
9481 return unsignedp ? sat_unsigned_long_fract_type_node
9482 : sat_long_fract_type_node;
9483 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9484 return unsignedp ? sat_unsigned_long_long_fract_type_node
9485 : sat_long_long_fract_type_node;
9486 }
9487 else
9488 {
9489 if (size == SHORT_FRACT_TYPE_SIZE)
9490 return unsignedp ? unsigned_short_fract_type_node
9491 : short_fract_type_node;
9492 if (size == FRACT_TYPE_SIZE)
9493 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9494 if (size == LONG_FRACT_TYPE_SIZE)
9495 return unsignedp ? unsigned_long_fract_type_node
9496 : long_fract_type_node;
9497 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9498 return unsignedp ? unsigned_long_long_fract_type_node
9499 : long_long_fract_type_node;
9500 }
9501
9502 return make_fract_type (size, unsignedp, satp);
9503 }
9504
9505 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9506
9507 static tree
9508 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9509 {
9510 if (satp)
9511 {
9512 if (size == SHORT_ACCUM_TYPE_SIZE)
9513 return unsignedp ? sat_unsigned_short_accum_type_node
9514 : sat_short_accum_type_node;
9515 if (size == ACCUM_TYPE_SIZE)
9516 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9517 if (size == LONG_ACCUM_TYPE_SIZE)
9518 return unsignedp ? sat_unsigned_long_accum_type_node
9519 : sat_long_accum_type_node;
9520 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9521 return unsignedp ? sat_unsigned_long_long_accum_type_node
9522 : sat_long_long_accum_type_node;
9523 }
9524 else
9525 {
9526 if (size == SHORT_ACCUM_TYPE_SIZE)
9527 return unsignedp ? unsigned_short_accum_type_node
9528 : short_accum_type_node;
9529 if (size == ACCUM_TYPE_SIZE)
9530 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9531 if (size == LONG_ACCUM_TYPE_SIZE)
9532 return unsignedp ? unsigned_long_accum_type_node
9533 : long_accum_type_node;
9534 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9535 return unsignedp ? unsigned_long_long_accum_type_node
9536 : long_long_accum_type_node;
9537 }
9538
9539 return make_accum_type (size, unsignedp, satp);
9540 }
9541
9542
9543 /* Create an atomic variant node for TYPE. This routine is called
9544 during initialization of data types to create the 5 basic atomic
9545 types. The generic build_variant_type function requires these to
9546 already be set up in order to function properly, so cannot be
9547 called from there. If ALIGN is non-zero, then ensure alignment is
9548 overridden to this value. */
9549
9550 static tree
9551 build_atomic_base (tree type, unsigned int align)
9552 {
9553 tree t;
9554
9555 /* Make sure its not already registered. */
9556 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9557 return t;
9558
9559 t = build_variant_type_copy (type);
9560 set_type_quals (t, TYPE_QUAL_ATOMIC);
9561
9562 if (align)
9563 TYPE_ALIGN (t) = align;
9564
9565 return t;
9566 }
9567
9568 /* Create nodes for all integer types (and error_mark_node) using the sizes
9569 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9570 SHORT_DOUBLE specifies whether double should be of the same precision
9571 as float. */
9572
9573 void
9574 build_common_tree_nodes (bool signed_char, bool short_double)
9575 {
9576 error_mark_node = make_node (ERROR_MARK);
9577 TREE_TYPE (error_mark_node) = error_mark_node;
9578
9579 initialize_sizetypes ();
9580
9581 /* Define both `signed char' and `unsigned char'. */
9582 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9583 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9584 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9585 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9586
9587 /* Define `char', which is like either `signed char' or `unsigned char'
9588 but not the same as either. */
9589 char_type_node
9590 = (signed_char
9591 ? make_signed_type (CHAR_TYPE_SIZE)
9592 : make_unsigned_type (CHAR_TYPE_SIZE));
9593 TYPE_STRING_FLAG (char_type_node) = 1;
9594
9595 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9596 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9597 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9598 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9599 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9600 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9601 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9602 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9603 #if HOST_BITS_PER_WIDE_INT >= 64
9604 /* TODO: This isn't correct, but as logic depends at the moment on
9605 host's instead of target's wide-integer.
9606 If there is a target not supporting TImode, but has an 128-bit
9607 integer-scalar register, this target check needs to be adjusted. */
9608 if (targetm.scalar_mode_supported_p (TImode))
9609 {
9610 int128_integer_type_node = make_signed_type (128);
9611 int128_unsigned_type_node = make_unsigned_type (128);
9612 }
9613 #endif
9614
9615 /* Define a boolean type. This type only represents boolean values but
9616 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9617 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9618 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9619 TYPE_PRECISION (boolean_type_node) = 1;
9620 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9621
9622 /* Define what type to use for size_t. */
9623 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9624 size_type_node = unsigned_type_node;
9625 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9626 size_type_node = long_unsigned_type_node;
9627 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9628 size_type_node = long_long_unsigned_type_node;
9629 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9630 size_type_node = short_unsigned_type_node;
9631 else
9632 gcc_unreachable ();
9633
9634 /* Fill in the rest of the sized types. Reuse existing type nodes
9635 when possible. */
9636 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9637 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9638 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9639 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9640 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9641
9642 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9643 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9644 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9645 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9646 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9647
9648 /* Don't call build_qualified type for atomics. That routine does
9649 special processing for atomics, and until they are initialized
9650 it's better not to make that call.
9651
9652 Check to see if there is a target override for atomic types. */
9653
9654 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9655 targetm.atomic_align_for_mode (QImode));
9656 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9657 targetm.atomic_align_for_mode (HImode));
9658 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9659 targetm.atomic_align_for_mode (SImode));
9660 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9661 targetm.atomic_align_for_mode (DImode));
9662 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9663 targetm.atomic_align_for_mode (TImode));
9664
9665 access_public_node = get_identifier ("public");
9666 access_protected_node = get_identifier ("protected");
9667 access_private_node = get_identifier ("private");
9668
9669 /* Define these next since types below may used them. */
9670 integer_zero_node = build_int_cst (integer_type_node, 0);
9671 integer_one_node = build_int_cst (integer_type_node, 1);
9672 integer_three_node = build_int_cst (integer_type_node, 3);
9673 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9674
9675 size_zero_node = size_int (0);
9676 size_one_node = size_int (1);
9677 bitsize_zero_node = bitsize_int (0);
9678 bitsize_one_node = bitsize_int (1);
9679 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9680
9681 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9682 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9683
9684 void_type_node = make_node (VOID_TYPE);
9685 layout_type (void_type_node);
9686
9687 /* We are not going to have real types in C with less than byte alignment,
9688 so we might as well not have any types that claim to have it. */
9689 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9690 TYPE_USER_ALIGN (void_type_node) = 0;
9691
9692 void_node = make_node (VOID_CST);
9693 TREE_TYPE (void_node) = void_type_node;
9694
9695 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9696 layout_type (TREE_TYPE (null_pointer_node));
9697
9698 ptr_type_node = build_pointer_type (void_type_node);
9699 const_ptr_type_node
9700 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9701 fileptr_type_node = ptr_type_node;
9702
9703 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9704
9705 float_type_node = make_node (REAL_TYPE);
9706 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9707 layout_type (float_type_node);
9708
9709 double_type_node = make_node (REAL_TYPE);
9710 if (short_double)
9711 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9712 else
9713 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9714 layout_type (double_type_node);
9715
9716 long_double_type_node = make_node (REAL_TYPE);
9717 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9718 layout_type (long_double_type_node);
9719
9720 float_ptr_type_node = build_pointer_type (float_type_node);
9721 double_ptr_type_node = build_pointer_type (double_type_node);
9722 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9723 integer_ptr_type_node = build_pointer_type (integer_type_node);
9724
9725 /* Fixed size integer types. */
9726 uint16_type_node = build_nonstandard_integer_type (16, true);
9727 uint32_type_node = build_nonstandard_integer_type (32, true);
9728 uint64_type_node = build_nonstandard_integer_type (64, true);
9729
9730 /* Decimal float types. */
9731 dfloat32_type_node = make_node (REAL_TYPE);
9732 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9733 layout_type (dfloat32_type_node);
9734 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9735 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9736
9737 dfloat64_type_node = make_node (REAL_TYPE);
9738 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9739 layout_type (dfloat64_type_node);
9740 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9741 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9742
9743 dfloat128_type_node = make_node (REAL_TYPE);
9744 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9745 layout_type (dfloat128_type_node);
9746 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9747 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9748
9749 complex_integer_type_node = build_complex_type (integer_type_node);
9750 complex_float_type_node = build_complex_type (float_type_node);
9751 complex_double_type_node = build_complex_type (double_type_node);
9752 complex_long_double_type_node = build_complex_type (long_double_type_node);
9753
9754 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9755 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9756 sat_ ## KIND ## _type_node = \
9757 make_sat_signed_ ## KIND ## _type (SIZE); \
9758 sat_unsigned_ ## KIND ## _type_node = \
9759 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9760 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9761 unsigned_ ## KIND ## _type_node = \
9762 make_unsigned_ ## KIND ## _type (SIZE);
9763
9764 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9765 sat_ ## WIDTH ## KIND ## _type_node = \
9766 make_sat_signed_ ## KIND ## _type (SIZE); \
9767 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9768 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9769 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9770 unsigned_ ## WIDTH ## KIND ## _type_node = \
9771 make_unsigned_ ## KIND ## _type (SIZE);
9772
9773 /* Make fixed-point type nodes based on four different widths. */
9774 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9775 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9776 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9777 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9778 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9779
9780 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9781 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9782 NAME ## _type_node = \
9783 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9784 u ## NAME ## _type_node = \
9785 make_or_reuse_unsigned_ ## KIND ## _type \
9786 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9787 sat_ ## NAME ## _type_node = \
9788 make_or_reuse_sat_signed_ ## KIND ## _type \
9789 (GET_MODE_BITSIZE (MODE ## mode)); \
9790 sat_u ## NAME ## _type_node = \
9791 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9792 (GET_MODE_BITSIZE (U ## MODE ## mode));
9793
9794 /* Fixed-point type and mode nodes. */
9795 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9796 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9797 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9798 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9799 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9800 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9801 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9802 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9803 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9804 MAKE_FIXED_MODE_NODE (accum, da, DA)
9805 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9806
9807 {
9808 tree t = targetm.build_builtin_va_list ();
9809
9810 /* Many back-ends define record types without setting TYPE_NAME.
9811 If we copied the record type here, we'd keep the original
9812 record type without a name. This breaks name mangling. So,
9813 don't copy record types and let c_common_nodes_and_builtins()
9814 declare the type to be __builtin_va_list. */
9815 if (TREE_CODE (t) != RECORD_TYPE)
9816 t = build_variant_type_copy (t);
9817
9818 va_list_type_node = t;
9819 }
9820 }
9821
9822 /* Modify DECL for given flags.
9823 TM_PURE attribute is set only on types, so the function will modify
9824 DECL's type when ECF_TM_PURE is used. */
9825
9826 void
9827 set_call_expr_flags (tree decl, int flags)
9828 {
9829 if (flags & ECF_NOTHROW)
9830 TREE_NOTHROW (decl) = 1;
9831 if (flags & ECF_CONST)
9832 TREE_READONLY (decl) = 1;
9833 if (flags & ECF_PURE)
9834 DECL_PURE_P (decl) = 1;
9835 if (flags & ECF_LOOPING_CONST_OR_PURE)
9836 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9837 if (flags & ECF_NOVOPS)
9838 DECL_IS_NOVOPS (decl) = 1;
9839 if (flags & ECF_NORETURN)
9840 TREE_THIS_VOLATILE (decl) = 1;
9841 if (flags & ECF_MALLOC)
9842 DECL_IS_MALLOC (decl) = 1;
9843 if (flags & ECF_RETURNS_TWICE)
9844 DECL_IS_RETURNS_TWICE (decl) = 1;
9845 if (flags & ECF_LEAF)
9846 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9847 NULL, DECL_ATTRIBUTES (decl));
9848 if ((flags & ECF_TM_PURE) && flag_tm)
9849 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9850 /* Looping const or pure is implied by noreturn.
9851 There is currently no way to declare looping const or looping pure alone. */
9852 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9853 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9854 }
9855
9856
9857 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9858
9859 static void
9860 local_define_builtin (const char *name, tree type, enum built_in_function code,
9861 const char *library_name, int ecf_flags)
9862 {
9863 tree decl;
9864
9865 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9866 library_name, NULL_TREE);
9867 set_call_expr_flags (decl, ecf_flags);
9868
9869 set_builtin_decl (code, decl, true);
9870 }
9871
9872 /* Call this function after instantiating all builtins that the language
9873 front end cares about. This will build the rest of the builtins that
9874 are relied upon by the tree optimizers and the middle-end. */
9875
9876 void
9877 build_common_builtin_nodes (void)
9878 {
9879 tree tmp, ftype;
9880 int ecf_flags;
9881
9882 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9883 {
9884 ftype = build_function_type (void_type_node, void_list_node);
9885 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9886 "__builtin_unreachable",
9887 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9888 | ECF_CONST | ECF_LEAF);
9889 }
9890
9891 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9892 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9893 {
9894 ftype = build_function_type_list (ptr_type_node,
9895 ptr_type_node, const_ptr_type_node,
9896 size_type_node, NULL_TREE);
9897
9898 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9899 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9900 "memcpy", ECF_NOTHROW | ECF_LEAF);
9901 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9902 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9903 "memmove", ECF_NOTHROW | ECF_LEAF);
9904 }
9905
9906 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9907 {
9908 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9909 const_ptr_type_node, size_type_node,
9910 NULL_TREE);
9911 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9912 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9913 }
9914
9915 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9916 {
9917 ftype = build_function_type_list (ptr_type_node,
9918 ptr_type_node, integer_type_node,
9919 size_type_node, NULL_TREE);
9920 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9921 "memset", ECF_NOTHROW | ECF_LEAF);
9922 }
9923
9924 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9925 {
9926 ftype = build_function_type_list (ptr_type_node,
9927 size_type_node, NULL_TREE);
9928 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9929 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9930 }
9931
9932 ftype = build_function_type_list (ptr_type_node, size_type_node,
9933 size_type_node, NULL_TREE);
9934 local_define_builtin ("__builtin_alloca_with_align", ftype,
9935 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9936 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9937
9938 /* If we're checking the stack, `alloca' can throw. */
9939 if (flag_stack_check)
9940 {
9941 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9942 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9943 }
9944
9945 ftype = build_function_type_list (void_type_node,
9946 ptr_type_node, ptr_type_node,
9947 ptr_type_node, NULL_TREE);
9948 local_define_builtin ("__builtin_init_trampoline", ftype,
9949 BUILT_IN_INIT_TRAMPOLINE,
9950 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9951 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9952 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9953 "__builtin_init_heap_trampoline",
9954 ECF_NOTHROW | ECF_LEAF);
9955
9956 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9957 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9958 BUILT_IN_ADJUST_TRAMPOLINE,
9959 "__builtin_adjust_trampoline",
9960 ECF_CONST | ECF_NOTHROW);
9961
9962 ftype = build_function_type_list (void_type_node,
9963 ptr_type_node, ptr_type_node, NULL_TREE);
9964 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9965 BUILT_IN_NONLOCAL_GOTO,
9966 "__builtin_nonlocal_goto",
9967 ECF_NORETURN | ECF_NOTHROW);
9968
9969 ftype = build_function_type_list (void_type_node,
9970 ptr_type_node, ptr_type_node, NULL_TREE);
9971 local_define_builtin ("__builtin_setjmp_setup", ftype,
9972 BUILT_IN_SETJMP_SETUP,
9973 "__builtin_setjmp_setup", ECF_NOTHROW);
9974
9975 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9977 BUILT_IN_SETJMP_RECEIVER,
9978 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9979
9980 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9981 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9982 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9983
9984 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9985 local_define_builtin ("__builtin_stack_restore", ftype,
9986 BUILT_IN_STACK_RESTORE,
9987 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9988
9989 /* If there's a possibility that we might use the ARM EABI, build the
9990 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9991 if (targetm.arm_eabi_unwinder)
9992 {
9993 ftype = build_function_type_list (void_type_node, NULL_TREE);
9994 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9995 BUILT_IN_CXA_END_CLEANUP,
9996 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9997 }
9998
9999 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10000 local_define_builtin ("__builtin_unwind_resume", ftype,
10001 BUILT_IN_UNWIND_RESUME,
10002 ((targetm_common.except_unwind_info (&global_options)
10003 == UI_SJLJ)
10004 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10005 ECF_NORETURN);
10006
10007 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10008 {
10009 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10010 NULL_TREE);
10011 local_define_builtin ("__builtin_return_address", ftype,
10012 BUILT_IN_RETURN_ADDRESS,
10013 "__builtin_return_address",
10014 ECF_NOTHROW);
10015 }
10016
10017 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10018 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10019 {
10020 ftype = build_function_type_list (void_type_node, ptr_type_node,
10021 ptr_type_node, NULL_TREE);
10022 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10023 local_define_builtin ("__cyg_profile_func_enter", ftype,
10024 BUILT_IN_PROFILE_FUNC_ENTER,
10025 "__cyg_profile_func_enter", 0);
10026 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10027 local_define_builtin ("__cyg_profile_func_exit", ftype,
10028 BUILT_IN_PROFILE_FUNC_EXIT,
10029 "__cyg_profile_func_exit", 0);
10030 }
10031
10032 /* The exception object and filter values from the runtime. The argument
10033 must be zero before exception lowering, i.e. from the front end. After
10034 exception lowering, it will be the region number for the exception
10035 landing pad. These functions are PURE instead of CONST to prevent
10036 them from being hoisted past the exception edge that will initialize
10037 its value in the landing pad. */
10038 ftype = build_function_type_list (ptr_type_node,
10039 integer_type_node, NULL_TREE);
10040 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10041 /* Only use TM_PURE if we we have TM language support. */
10042 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10043 ecf_flags |= ECF_TM_PURE;
10044 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10045 "__builtin_eh_pointer", ecf_flags);
10046
10047 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10048 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10049 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10050 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10051
10052 ftype = build_function_type_list (void_type_node,
10053 integer_type_node, integer_type_node,
10054 NULL_TREE);
10055 local_define_builtin ("__builtin_eh_copy_values", ftype,
10056 BUILT_IN_EH_COPY_VALUES,
10057 "__builtin_eh_copy_values", ECF_NOTHROW);
10058
10059 /* Complex multiplication and division. These are handled as builtins
10060 rather than optabs because emit_library_call_value doesn't support
10061 complex. Further, we can do slightly better with folding these
10062 beasties if the real and complex parts of the arguments are separate. */
10063 {
10064 int mode;
10065
10066 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10067 {
10068 char mode_name_buf[4], *q;
10069 const char *p;
10070 enum built_in_function mcode, dcode;
10071 tree type, inner_type;
10072 const char *prefix = "__";
10073
10074 if (targetm.libfunc_gnu_prefix)
10075 prefix = "__gnu_";
10076
10077 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10078 if (type == NULL)
10079 continue;
10080 inner_type = TREE_TYPE (type);
10081
10082 ftype = build_function_type_list (type, inner_type, inner_type,
10083 inner_type, inner_type, NULL_TREE);
10084
10085 mcode = ((enum built_in_function)
10086 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10087 dcode = ((enum built_in_function)
10088 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10089
10090 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10091 *q = TOLOWER (*p);
10092 *q = '\0';
10093
10094 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10095 NULL);
10096 local_define_builtin (built_in_names[mcode], ftype, mcode,
10097 built_in_names[mcode],
10098 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10099
10100 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10101 NULL);
10102 local_define_builtin (built_in_names[dcode], ftype, dcode,
10103 built_in_names[dcode],
10104 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10105 }
10106 }
10107 }
10108
10109 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10110 better way.
10111
10112 If we requested a pointer to a vector, build up the pointers that
10113 we stripped off while looking for the inner type. Similarly for
10114 return values from functions.
10115
10116 The argument TYPE is the top of the chain, and BOTTOM is the
10117 new type which we will point to. */
10118
10119 tree
10120 reconstruct_complex_type (tree type, tree bottom)
10121 {
10122 tree inner, outer;
10123
10124 if (TREE_CODE (type) == POINTER_TYPE)
10125 {
10126 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10127 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10128 TYPE_REF_CAN_ALIAS_ALL (type));
10129 }
10130 else if (TREE_CODE (type) == REFERENCE_TYPE)
10131 {
10132 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10133 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10134 TYPE_REF_CAN_ALIAS_ALL (type));
10135 }
10136 else if (TREE_CODE (type) == ARRAY_TYPE)
10137 {
10138 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10139 outer = build_array_type (inner, TYPE_DOMAIN (type));
10140 }
10141 else if (TREE_CODE (type) == FUNCTION_TYPE)
10142 {
10143 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10144 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10145 }
10146 else if (TREE_CODE (type) == METHOD_TYPE)
10147 {
10148 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10149 /* The build_method_type_directly() routine prepends 'this' to argument list,
10150 so we must compensate by getting rid of it. */
10151 outer
10152 = build_method_type_directly
10153 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10154 inner,
10155 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10156 }
10157 else if (TREE_CODE (type) == OFFSET_TYPE)
10158 {
10159 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10160 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10161 }
10162 else
10163 return bottom;
10164
10165 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10166 TYPE_QUALS (type));
10167 }
10168
10169 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10170 the inner type. */
10171 tree
10172 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10173 {
10174 int nunits;
10175
10176 switch (GET_MODE_CLASS (mode))
10177 {
10178 case MODE_VECTOR_INT:
10179 case MODE_VECTOR_FLOAT:
10180 case MODE_VECTOR_FRACT:
10181 case MODE_VECTOR_UFRACT:
10182 case MODE_VECTOR_ACCUM:
10183 case MODE_VECTOR_UACCUM:
10184 nunits = GET_MODE_NUNITS (mode);
10185 break;
10186
10187 case MODE_INT:
10188 /* Check that there are no leftover bits. */
10189 gcc_assert (GET_MODE_BITSIZE (mode)
10190 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10191
10192 nunits = GET_MODE_BITSIZE (mode)
10193 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10194 break;
10195
10196 default:
10197 gcc_unreachable ();
10198 }
10199
10200 return make_vector_type (innertype, nunits, mode);
10201 }
10202
10203 /* Similarly, but takes the inner type and number of units, which must be
10204 a power of two. */
10205
10206 tree
10207 build_vector_type (tree innertype, int nunits)
10208 {
10209 return make_vector_type (innertype, nunits, VOIDmode);
10210 }
10211
10212 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10213
10214 tree
10215 build_opaque_vector_type (tree innertype, int nunits)
10216 {
10217 tree t = make_vector_type (innertype, nunits, VOIDmode);
10218 tree cand;
10219 /* We always build the non-opaque variant before the opaque one,
10220 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10221 cand = TYPE_NEXT_VARIANT (t);
10222 if (cand
10223 && TYPE_VECTOR_OPAQUE (cand)
10224 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10225 return cand;
10226 /* Othewise build a variant type and make sure to queue it after
10227 the non-opaque type. */
10228 cand = build_distinct_type_copy (t);
10229 TYPE_VECTOR_OPAQUE (cand) = true;
10230 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10231 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10232 TYPE_NEXT_VARIANT (t) = cand;
10233 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10234 return cand;
10235 }
10236
10237
10238 /* Given an initializer INIT, return TRUE if INIT is zero or some
10239 aggregate of zeros. Otherwise return FALSE. */
10240 bool
10241 initializer_zerop (const_tree init)
10242 {
10243 tree elt;
10244
10245 STRIP_NOPS (init);
10246
10247 switch (TREE_CODE (init))
10248 {
10249 case INTEGER_CST:
10250 return integer_zerop (init);
10251
10252 case REAL_CST:
10253 /* ??? Note that this is not correct for C4X float formats. There,
10254 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10255 negative exponent. */
10256 return real_zerop (init)
10257 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10258
10259 case FIXED_CST:
10260 return fixed_zerop (init);
10261
10262 case COMPLEX_CST:
10263 return integer_zerop (init)
10264 || (real_zerop (init)
10265 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10266 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10267
10268 case VECTOR_CST:
10269 {
10270 unsigned i;
10271 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10272 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10273 return false;
10274 return true;
10275 }
10276
10277 case CONSTRUCTOR:
10278 {
10279 unsigned HOST_WIDE_INT idx;
10280
10281 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10282 if (!initializer_zerop (elt))
10283 return false;
10284 return true;
10285 }
10286
10287 case STRING_CST:
10288 {
10289 int i;
10290
10291 /* We need to loop through all elements to handle cases like
10292 "\0" and "\0foobar". */
10293 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10294 if (TREE_STRING_POINTER (init)[i] != '\0')
10295 return false;
10296
10297 return true;
10298 }
10299
10300 default:
10301 return false;
10302 }
10303 }
10304
10305 /* Check if vector VEC consists of all the equal elements and
10306 that the number of elements corresponds to the type of VEC.
10307 The function returns first element of the vector
10308 or NULL_TREE if the vector is not uniform. */
10309 tree
10310 uniform_vector_p (const_tree vec)
10311 {
10312 tree first, t;
10313 unsigned i;
10314
10315 if (vec == NULL_TREE)
10316 return NULL_TREE;
10317
10318 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10319
10320 if (TREE_CODE (vec) == VECTOR_CST)
10321 {
10322 first = VECTOR_CST_ELT (vec, 0);
10323 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10324 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10325 return NULL_TREE;
10326
10327 return first;
10328 }
10329
10330 else if (TREE_CODE (vec) == CONSTRUCTOR)
10331 {
10332 first = error_mark_node;
10333
10334 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10335 {
10336 if (i == 0)
10337 {
10338 first = t;
10339 continue;
10340 }
10341 if (!operand_equal_p (first, t, 0))
10342 return NULL_TREE;
10343 }
10344 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10345 return NULL_TREE;
10346
10347 return first;
10348 }
10349
10350 return NULL_TREE;
10351 }
10352
10353 /* Build an empty statement at location LOC. */
10354
10355 tree
10356 build_empty_stmt (location_t loc)
10357 {
10358 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10359 SET_EXPR_LOCATION (t, loc);
10360 return t;
10361 }
10362
10363
10364 /* Build an OpenMP clause with code CODE. LOC is the location of the
10365 clause. */
10366
10367 tree
10368 build_omp_clause (location_t loc, enum omp_clause_code code)
10369 {
10370 tree t;
10371 int size, length;
10372
10373 length = omp_clause_num_ops[code];
10374 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10375
10376 record_node_allocation_statistics (OMP_CLAUSE, size);
10377
10378 t = (tree) ggc_internal_alloc (size);
10379 memset (t, 0, size);
10380 TREE_SET_CODE (t, OMP_CLAUSE);
10381 OMP_CLAUSE_SET_CODE (t, code);
10382 OMP_CLAUSE_LOCATION (t) = loc;
10383
10384 return t;
10385 }
10386
10387 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10388 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10389 Except for the CODE and operand count field, other storage for the
10390 object is initialized to zeros. */
10391
10392 tree
10393 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10394 {
10395 tree t;
10396 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10397
10398 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10399 gcc_assert (len >= 1);
10400
10401 record_node_allocation_statistics (code, length);
10402
10403 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10404
10405 TREE_SET_CODE (t, code);
10406
10407 /* Can't use TREE_OPERAND to store the length because if checking is
10408 enabled, it will try to check the length before we store it. :-P */
10409 t->exp.operands[0] = build_int_cst (sizetype, len);
10410
10411 return t;
10412 }
10413
10414 /* Helper function for build_call_* functions; build a CALL_EXPR with
10415 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10416 the argument slots. */
10417
10418 static tree
10419 build_call_1 (tree return_type, tree fn, int nargs)
10420 {
10421 tree t;
10422
10423 t = build_vl_exp (CALL_EXPR, nargs + 3);
10424 TREE_TYPE (t) = return_type;
10425 CALL_EXPR_FN (t) = fn;
10426 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10427
10428 return t;
10429 }
10430
10431 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10432 FN and a null static chain slot. NARGS is the number of call arguments
10433 which are specified as "..." arguments. */
10434
10435 tree
10436 build_call_nary (tree return_type, tree fn, int nargs, ...)
10437 {
10438 tree ret;
10439 va_list args;
10440 va_start (args, nargs);
10441 ret = build_call_valist (return_type, fn, nargs, args);
10442 va_end (args);
10443 return ret;
10444 }
10445
10446 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10447 FN and a null static chain slot. NARGS is the number of call arguments
10448 which are specified as a va_list ARGS. */
10449
10450 tree
10451 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10452 {
10453 tree t;
10454 int i;
10455
10456 t = build_call_1 (return_type, fn, nargs);
10457 for (i = 0; i < nargs; i++)
10458 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10459 process_call_operands (t);
10460 return t;
10461 }
10462
10463 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10464 FN and a null static chain slot. NARGS is the number of call arguments
10465 which are specified as a tree array ARGS. */
10466
10467 tree
10468 build_call_array_loc (location_t loc, tree return_type, tree fn,
10469 int nargs, const tree *args)
10470 {
10471 tree t;
10472 int i;
10473
10474 t = build_call_1 (return_type, fn, nargs);
10475 for (i = 0; i < nargs; i++)
10476 CALL_EXPR_ARG (t, i) = args[i];
10477 process_call_operands (t);
10478 SET_EXPR_LOCATION (t, loc);
10479 return t;
10480 }
10481
10482 /* Like build_call_array, but takes a vec. */
10483
10484 tree
10485 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10486 {
10487 tree ret, t;
10488 unsigned int ix;
10489
10490 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10491 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10492 CALL_EXPR_ARG (ret, ix) = t;
10493 process_call_operands (ret);
10494 return ret;
10495 }
10496
10497 /* Conveniently construct a function call expression. FNDECL names the
10498 function to be called and N arguments are passed in the array
10499 ARGARRAY. */
10500
10501 tree
10502 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10503 {
10504 tree fntype = TREE_TYPE (fndecl);
10505 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10506
10507 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10508 }
10509
10510 /* Conveniently construct a function call expression. FNDECL names the
10511 function to be called and the arguments are passed in the vector
10512 VEC. */
10513
10514 tree
10515 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10516 {
10517 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10518 vec_safe_address (vec));
10519 }
10520
10521
10522 /* Conveniently construct a function call expression. FNDECL names the
10523 function to be called, N is the number of arguments, and the "..."
10524 parameters are the argument expressions. */
10525
10526 tree
10527 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10528 {
10529 va_list ap;
10530 tree *argarray = XALLOCAVEC (tree, n);
10531 int i;
10532
10533 va_start (ap, n);
10534 for (i = 0; i < n; i++)
10535 argarray[i] = va_arg (ap, tree);
10536 va_end (ap);
10537 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10538 }
10539
10540 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10541 varargs macros aren't supported by all bootstrap compilers. */
10542
10543 tree
10544 build_call_expr (tree fndecl, int n, ...)
10545 {
10546 va_list ap;
10547 tree *argarray = XALLOCAVEC (tree, n);
10548 int i;
10549
10550 va_start (ap, n);
10551 for (i = 0; i < n; i++)
10552 argarray[i] = va_arg (ap, tree);
10553 va_end (ap);
10554 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10555 }
10556
10557 /* Build internal call expression. This is just like CALL_EXPR, except
10558 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10559 internal function. */
10560
10561 tree
10562 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10563 tree type, int n, ...)
10564 {
10565 va_list ap;
10566 int i;
10567
10568 tree fn = build_call_1 (type, NULL_TREE, n);
10569 va_start (ap, n);
10570 for (i = 0; i < n; i++)
10571 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10572 va_end (ap);
10573 SET_EXPR_LOCATION (fn, loc);
10574 CALL_EXPR_IFN (fn) = ifn;
10575 return fn;
10576 }
10577
10578 /* Create a new constant string literal and return a char* pointer to it.
10579 The STRING_CST value is the LEN characters at STR. */
10580 tree
10581 build_string_literal (int len, const char *str)
10582 {
10583 tree t, elem, index, type;
10584
10585 t = build_string (len, str);
10586 elem = build_type_variant (char_type_node, 1, 0);
10587 index = build_index_type (size_int (len - 1));
10588 type = build_array_type (elem, index);
10589 TREE_TYPE (t) = type;
10590 TREE_CONSTANT (t) = 1;
10591 TREE_READONLY (t) = 1;
10592 TREE_STATIC (t) = 1;
10593
10594 type = build_pointer_type (elem);
10595 t = build1 (ADDR_EXPR, type,
10596 build4 (ARRAY_REF, elem,
10597 t, integer_zero_node, NULL_TREE, NULL_TREE));
10598 return t;
10599 }
10600
10601
10602
10603 /* Return true if T (assumed to be a DECL) must be assigned a memory
10604 location. */
10605
10606 bool
10607 needs_to_live_in_memory (const_tree t)
10608 {
10609 return (TREE_ADDRESSABLE (t)
10610 || is_global_var (t)
10611 || (TREE_CODE (t) == RESULT_DECL
10612 && !DECL_BY_REFERENCE (t)
10613 && aggregate_value_p (t, current_function_decl)));
10614 }
10615
10616 /* Return value of a constant X and sign-extend it. */
10617
10618 HOST_WIDE_INT
10619 int_cst_value (const_tree x)
10620 {
10621 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10622 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10623
10624 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10625 gcc_assert (cst_and_fits_in_hwi (x));
10626
10627 if (bits < HOST_BITS_PER_WIDE_INT)
10628 {
10629 bool negative = ((val >> (bits - 1)) & 1) != 0;
10630 if (negative)
10631 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10632 else
10633 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10634 }
10635
10636 return val;
10637 }
10638
10639 /* If TYPE is an integral or pointer type, return an integer type with
10640 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10641 if TYPE is already an integer type of signedness UNSIGNEDP. */
10642
10643 tree
10644 signed_or_unsigned_type_for (int unsignedp, tree type)
10645 {
10646 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10647 return type;
10648
10649 if (TREE_CODE (type) == VECTOR_TYPE)
10650 {
10651 tree inner = TREE_TYPE (type);
10652 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10653 if (!inner2)
10654 return NULL_TREE;
10655 if (inner == inner2)
10656 return type;
10657 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10658 }
10659
10660 if (!INTEGRAL_TYPE_P (type)
10661 && !POINTER_TYPE_P (type)
10662 && TREE_CODE (type) != OFFSET_TYPE)
10663 return NULL_TREE;
10664
10665 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10666 }
10667
10668 /* If TYPE is an integral or pointer type, return an integer type with
10669 the same precision which is unsigned, or itself if TYPE is already an
10670 unsigned integer type. */
10671
10672 tree
10673 unsigned_type_for (tree type)
10674 {
10675 return signed_or_unsigned_type_for (1, type);
10676 }
10677
10678 /* If TYPE is an integral or pointer type, return an integer type with
10679 the same precision which is signed, or itself if TYPE is already a
10680 signed integer type. */
10681
10682 tree
10683 signed_type_for (tree type)
10684 {
10685 return signed_or_unsigned_type_for (0, type);
10686 }
10687
10688 /* If TYPE is a vector type, return a signed integer vector type with the
10689 same width and number of subparts. Otherwise return boolean_type_node. */
10690
10691 tree
10692 truth_type_for (tree type)
10693 {
10694 if (TREE_CODE (type) == VECTOR_TYPE)
10695 {
10696 tree elem = lang_hooks.types.type_for_size
10697 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10698 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10699 }
10700 else
10701 return boolean_type_node;
10702 }
10703
10704 /* Returns the largest value obtainable by casting something in INNER type to
10705 OUTER type. */
10706
10707 tree
10708 upper_bound_in_type (tree outer, tree inner)
10709 {
10710 unsigned int det = 0;
10711 unsigned oprec = TYPE_PRECISION (outer);
10712 unsigned iprec = TYPE_PRECISION (inner);
10713 unsigned prec;
10714
10715 /* Compute a unique number for every combination. */
10716 det |= (oprec > iprec) ? 4 : 0;
10717 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10718 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10719
10720 /* Determine the exponent to use. */
10721 switch (det)
10722 {
10723 case 0:
10724 case 1:
10725 /* oprec <= iprec, outer: signed, inner: don't care. */
10726 prec = oprec - 1;
10727 break;
10728 case 2:
10729 case 3:
10730 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10731 prec = oprec;
10732 break;
10733 case 4:
10734 /* oprec > iprec, outer: signed, inner: signed. */
10735 prec = iprec - 1;
10736 break;
10737 case 5:
10738 /* oprec > iprec, outer: signed, inner: unsigned. */
10739 prec = iprec;
10740 break;
10741 case 6:
10742 /* oprec > iprec, outer: unsigned, inner: signed. */
10743 prec = oprec;
10744 break;
10745 case 7:
10746 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10747 prec = iprec;
10748 break;
10749 default:
10750 gcc_unreachable ();
10751 }
10752
10753 return wide_int_to_tree (outer,
10754 wi::mask (prec, false, TYPE_PRECISION (outer)));
10755 }
10756
10757 /* Returns the smallest value obtainable by casting something in INNER type to
10758 OUTER type. */
10759
10760 tree
10761 lower_bound_in_type (tree outer, tree inner)
10762 {
10763 unsigned oprec = TYPE_PRECISION (outer);
10764 unsigned iprec = TYPE_PRECISION (inner);
10765
10766 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10767 and obtain 0. */
10768 if (TYPE_UNSIGNED (outer)
10769 /* If we are widening something of an unsigned type, OUTER type
10770 contains all values of INNER type. In particular, both INNER
10771 and OUTER types have zero in common. */
10772 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10773 return build_int_cst (outer, 0);
10774 else
10775 {
10776 /* If we are widening a signed type to another signed type, we
10777 want to obtain -2^^(iprec-1). If we are keeping the
10778 precision or narrowing to a signed type, we want to obtain
10779 -2^(oprec-1). */
10780 unsigned prec = oprec > iprec ? iprec : oprec;
10781 return wide_int_to_tree (outer,
10782 wi::mask (prec - 1, true,
10783 TYPE_PRECISION (outer)));
10784 }
10785 }
10786
10787 /* Return nonzero if two operands that are suitable for PHI nodes are
10788 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10789 SSA_NAME or invariant. Note that this is strictly an optimization.
10790 That is, callers of this function can directly call operand_equal_p
10791 and get the same result, only slower. */
10792
10793 int
10794 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10795 {
10796 if (arg0 == arg1)
10797 return 1;
10798 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10799 return 0;
10800 return operand_equal_p (arg0, arg1, 0);
10801 }
10802
10803 /* Returns number of zeros at the end of binary representation of X. */
10804
10805 tree
10806 num_ending_zeros (const_tree x)
10807 {
10808 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10809 }
10810
10811
10812 #define WALK_SUBTREE(NODE) \
10813 do \
10814 { \
10815 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10816 if (result) \
10817 return result; \
10818 } \
10819 while (0)
10820
10821 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10822 be walked whenever a type is seen in the tree. Rest of operands and return
10823 value are as for walk_tree. */
10824
10825 static tree
10826 walk_type_fields (tree type, walk_tree_fn func, void *data,
10827 struct pointer_set_t *pset, walk_tree_lh lh)
10828 {
10829 tree result = NULL_TREE;
10830
10831 switch (TREE_CODE (type))
10832 {
10833 case POINTER_TYPE:
10834 case REFERENCE_TYPE:
10835 case VECTOR_TYPE:
10836 /* We have to worry about mutually recursive pointers. These can't
10837 be written in C. They can in Ada. It's pathological, but
10838 there's an ACATS test (c38102a) that checks it. Deal with this
10839 by checking if we're pointing to another pointer, that one
10840 points to another pointer, that one does too, and we have no htab.
10841 If so, get a hash table. We check three levels deep to avoid
10842 the cost of the hash table if we don't need one. */
10843 if (POINTER_TYPE_P (TREE_TYPE (type))
10844 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10845 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10846 && !pset)
10847 {
10848 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10849 func, data);
10850 if (result)
10851 return result;
10852
10853 break;
10854 }
10855
10856 /* ... fall through ... */
10857
10858 case COMPLEX_TYPE:
10859 WALK_SUBTREE (TREE_TYPE (type));
10860 break;
10861
10862 case METHOD_TYPE:
10863 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10864
10865 /* Fall through. */
10866
10867 case FUNCTION_TYPE:
10868 WALK_SUBTREE (TREE_TYPE (type));
10869 {
10870 tree arg;
10871
10872 /* We never want to walk into default arguments. */
10873 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10874 WALK_SUBTREE (TREE_VALUE (arg));
10875 }
10876 break;
10877
10878 case ARRAY_TYPE:
10879 /* Don't follow this nodes's type if a pointer for fear that
10880 we'll have infinite recursion. If we have a PSET, then we
10881 need not fear. */
10882 if (pset
10883 || (!POINTER_TYPE_P (TREE_TYPE (type))
10884 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10885 WALK_SUBTREE (TREE_TYPE (type));
10886 WALK_SUBTREE (TYPE_DOMAIN (type));
10887 break;
10888
10889 case OFFSET_TYPE:
10890 WALK_SUBTREE (TREE_TYPE (type));
10891 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10892 break;
10893
10894 default:
10895 break;
10896 }
10897
10898 return NULL_TREE;
10899 }
10900
10901 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10902 called with the DATA and the address of each sub-tree. If FUNC returns a
10903 non-NULL value, the traversal is stopped, and the value returned by FUNC
10904 is returned. If PSET is non-NULL it is used to record the nodes visited,
10905 and to avoid visiting a node more than once. */
10906
10907 tree
10908 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10909 struct pointer_set_t *pset, walk_tree_lh lh)
10910 {
10911 enum tree_code code;
10912 int walk_subtrees;
10913 tree result;
10914
10915 #define WALK_SUBTREE_TAIL(NODE) \
10916 do \
10917 { \
10918 tp = & (NODE); \
10919 goto tail_recurse; \
10920 } \
10921 while (0)
10922
10923 tail_recurse:
10924 /* Skip empty subtrees. */
10925 if (!*tp)
10926 return NULL_TREE;
10927
10928 /* Don't walk the same tree twice, if the user has requested
10929 that we avoid doing so. */
10930 if (pset && pointer_set_insert (pset, *tp))
10931 return NULL_TREE;
10932
10933 /* Call the function. */
10934 walk_subtrees = 1;
10935 result = (*func) (tp, &walk_subtrees, data);
10936
10937 /* If we found something, return it. */
10938 if (result)
10939 return result;
10940
10941 code = TREE_CODE (*tp);
10942
10943 /* Even if we didn't, FUNC may have decided that there was nothing
10944 interesting below this point in the tree. */
10945 if (!walk_subtrees)
10946 {
10947 /* But we still need to check our siblings. */
10948 if (code == TREE_LIST)
10949 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10950 else if (code == OMP_CLAUSE)
10951 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10952 else
10953 return NULL_TREE;
10954 }
10955
10956 if (lh)
10957 {
10958 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10959 if (result || !walk_subtrees)
10960 return result;
10961 }
10962
10963 switch (code)
10964 {
10965 case ERROR_MARK:
10966 case IDENTIFIER_NODE:
10967 case INTEGER_CST:
10968 case REAL_CST:
10969 case FIXED_CST:
10970 case VECTOR_CST:
10971 case STRING_CST:
10972 case BLOCK:
10973 case PLACEHOLDER_EXPR:
10974 case SSA_NAME:
10975 case FIELD_DECL:
10976 case RESULT_DECL:
10977 /* None of these have subtrees other than those already walked
10978 above. */
10979 break;
10980
10981 case TREE_LIST:
10982 WALK_SUBTREE (TREE_VALUE (*tp));
10983 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10984 break;
10985
10986 case TREE_VEC:
10987 {
10988 int len = TREE_VEC_LENGTH (*tp);
10989
10990 if (len == 0)
10991 break;
10992
10993 /* Walk all elements but the first. */
10994 while (--len)
10995 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10996
10997 /* Now walk the first one as a tail call. */
10998 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10999 }
11000
11001 case COMPLEX_CST:
11002 WALK_SUBTREE (TREE_REALPART (*tp));
11003 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11004
11005 case CONSTRUCTOR:
11006 {
11007 unsigned HOST_WIDE_INT idx;
11008 constructor_elt *ce;
11009
11010 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11011 idx++)
11012 WALK_SUBTREE (ce->value);
11013 }
11014 break;
11015
11016 case SAVE_EXPR:
11017 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11018
11019 case BIND_EXPR:
11020 {
11021 tree decl;
11022 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11023 {
11024 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11025 into declarations that are just mentioned, rather than
11026 declared; they don't really belong to this part of the tree.
11027 And, we can see cycles: the initializer for a declaration
11028 can refer to the declaration itself. */
11029 WALK_SUBTREE (DECL_INITIAL (decl));
11030 WALK_SUBTREE (DECL_SIZE (decl));
11031 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11032 }
11033 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11034 }
11035
11036 case STATEMENT_LIST:
11037 {
11038 tree_stmt_iterator i;
11039 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11040 WALK_SUBTREE (*tsi_stmt_ptr (i));
11041 }
11042 break;
11043
11044 case OMP_CLAUSE:
11045 switch (OMP_CLAUSE_CODE (*tp))
11046 {
11047 case OMP_CLAUSE_PRIVATE:
11048 case OMP_CLAUSE_SHARED:
11049 case OMP_CLAUSE_FIRSTPRIVATE:
11050 case OMP_CLAUSE_COPYIN:
11051 case OMP_CLAUSE_COPYPRIVATE:
11052 case OMP_CLAUSE_FINAL:
11053 case OMP_CLAUSE_IF:
11054 case OMP_CLAUSE_NUM_THREADS:
11055 case OMP_CLAUSE_SCHEDULE:
11056 case OMP_CLAUSE_UNIFORM:
11057 case OMP_CLAUSE_DEPEND:
11058 case OMP_CLAUSE_NUM_TEAMS:
11059 case OMP_CLAUSE_THREAD_LIMIT:
11060 case OMP_CLAUSE_DEVICE:
11061 case OMP_CLAUSE_DIST_SCHEDULE:
11062 case OMP_CLAUSE_SAFELEN:
11063 case OMP_CLAUSE_SIMDLEN:
11064 case OMP_CLAUSE__LOOPTEMP_:
11065 case OMP_CLAUSE__SIMDUID_:
11066 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11067 /* FALLTHRU */
11068
11069 case OMP_CLAUSE_NOWAIT:
11070 case OMP_CLAUSE_ORDERED:
11071 case OMP_CLAUSE_DEFAULT:
11072 case OMP_CLAUSE_UNTIED:
11073 case OMP_CLAUSE_MERGEABLE:
11074 case OMP_CLAUSE_PROC_BIND:
11075 case OMP_CLAUSE_INBRANCH:
11076 case OMP_CLAUSE_NOTINBRANCH:
11077 case OMP_CLAUSE_FOR:
11078 case OMP_CLAUSE_PARALLEL:
11079 case OMP_CLAUSE_SECTIONS:
11080 case OMP_CLAUSE_TASKGROUP:
11081 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11082
11083 case OMP_CLAUSE_LASTPRIVATE:
11084 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11085 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11086 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11087
11088 case OMP_CLAUSE_COLLAPSE:
11089 {
11090 int i;
11091 for (i = 0; i < 3; i++)
11092 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11093 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11094 }
11095
11096 case OMP_CLAUSE_LINEAR:
11097 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11098 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11099 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11100 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11101
11102 case OMP_CLAUSE_ALIGNED:
11103 case OMP_CLAUSE_FROM:
11104 case OMP_CLAUSE_TO:
11105 case OMP_CLAUSE_MAP:
11106 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11107 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11108 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11109
11110 case OMP_CLAUSE_REDUCTION:
11111 {
11112 int i;
11113 for (i = 0; i < 4; i++)
11114 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11115 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11116 }
11117
11118 default:
11119 gcc_unreachable ();
11120 }
11121 break;
11122
11123 case TARGET_EXPR:
11124 {
11125 int i, len;
11126
11127 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11128 But, we only want to walk once. */
11129 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11130 for (i = 0; i < len; ++i)
11131 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11132 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11133 }
11134
11135 case DECL_EXPR:
11136 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11137 defining. We only want to walk into these fields of a type in this
11138 case and not in the general case of a mere reference to the type.
11139
11140 The criterion is as follows: if the field can be an expression, it
11141 must be walked only here. This should be in keeping with the fields
11142 that are directly gimplified in gimplify_type_sizes in order for the
11143 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11144 variable-sized types.
11145
11146 Note that DECLs get walked as part of processing the BIND_EXPR. */
11147 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11148 {
11149 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11150 if (TREE_CODE (*type_p) == ERROR_MARK)
11151 return NULL_TREE;
11152
11153 /* Call the function for the type. See if it returns anything or
11154 doesn't want us to continue. If we are to continue, walk both
11155 the normal fields and those for the declaration case. */
11156 result = (*func) (type_p, &walk_subtrees, data);
11157 if (result || !walk_subtrees)
11158 return result;
11159
11160 /* But do not walk a pointed-to type since it may itself need to
11161 be walked in the declaration case if it isn't anonymous. */
11162 if (!POINTER_TYPE_P (*type_p))
11163 {
11164 result = walk_type_fields (*type_p, func, data, pset, lh);
11165 if (result)
11166 return result;
11167 }
11168
11169 /* If this is a record type, also walk the fields. */
11170 if (RECORD_OR_UNION_TYPE_P (*type_p))
11171 {
11172 tree field;
11173
11174 for (field = TYPE_FIELDS (*type_p); field;
11175 field = DECL_CHAIN (field))
11176 {
11177 /* We'd like to look at the type of the field, but we can
11178 easily get infinite recursion. So assume it's pointed
11179 to elsewhere in the tree. Also, ignore things that
11180 aren't fields. */
11181 if (TREE_CODE (field) != FIELD_DECL)
11182 continue;
11183
11184 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11185 WALK_SUBTREE (DECL_SIZE (field));
11186 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11187 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11188 WALK_SUBTREE (DECL_QUALIFIER (field));
11189 }
11190 }
11191
11192 /* Same for scalar types. */
11193 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11194 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11195 || TREE_CODE (*type_p) == INTEGER_TYPE
11196 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11197 || TREE_CODE (*type_p) == REAL_TYPE)
11198 {
11199 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11200 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11201 }
11202
11203 WALK_SUBTREE (TYPE_SIZE (*type_p));
11204 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11205 }
11206 /* FALLTHRU */
11207
11208 default:
11209 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11210 {
11211 int i, len;
11212
11213 /* Walk over all the sub-trees of this operand. */
11214 len = TREE_OPERAND_LENGTH (*tp);
11215
11216 /* Go through the subtrees. We need to do this in forward order so
11217 that the scope of a FOR_EXPR is handled properly. */
11218 if (len)
11219 {
11220 for (i = 0; i < len - 1; ++i)
11221 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11222 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11223 }
11224 }
11225 /* If this is a type, walk the needed fields in the type. */
11226 else if (TYPE_P (*tp))
11227 return walk_type_fields (*tp, func, data, pset, lh);
11228 break;
11229 }
11230
11231 /* We didn't find what we were looking for. */
11232 return NULL_TREE;
11233
11234 #undef WALK_SUBTREE_TAIL
11235 }
11236 #undef WALK_SUBTREE
11237
11238 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11239
11240 tree
11241 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11242 walk_tree_lh lh)
11243 {
11244 tree result;
11245 struct pointer_set_t *pset;
11246
11247 pset = pointer_set_create ();
11248 result = walk_tree_1 (tp, func, data, pset, lh);
11249 pointer_set_destroy (pset);
11250 return result;
11251 }
11252
11253
11254 tree
11255 tree_block (tree t)
11256 {
11257 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11258
11259 if (IS_EXPR_CODE_CLASS (c))
11260 return LOCATION_BLOCK (t->exp.locus);
11261 gcc_unreachable ();
11262 return NULL;
11263 }
11264
11265 void
11266 tree_set_block (tree t, tree b)
11267 {
11268 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11269
11270 if (IS_EXPR_CODE_CLASS (c))
11271 {
11272 if (b)
11273 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11274 else
11275 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11276 }
11277 else
11278 gcc_unreachable ();
11279 }
11280
11281 /* Create a nameless artificial label and put it in the current
11282 function context. The label has a location of LOC. Returns the
11283 newly created label. */
11284
11285 tree
11286 create_artificial_label (location_t loc)
11287 {
11288 tree lab = build_decl (loc,
11289 LABEL_DECL, NULL_TREE, void_type_node);
11290
11291 DECL_ARTIFICIAL (lab) = 1;
11292 DECL_IGNORED_P (lab) = 1;
11293 DECL_CONTEXT (lab) = current_function_decl;
11294 return lab;
11295 }
11296
11297 /* Given a tree, try to return a useful variable name that we can use
11298 to prefix a temporary that is being assigned the value of the tree.
11299 I.E. given <temp> = &A, return A. */
11300
11301 const char *
11302 get_name (tree t)
11303 {
11304 tree stripped_decl;
11305
11306 stripped_decl = t;
11307 STRIP_NOPS (stripped_decl);
11308 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11309 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11310 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11311 {
11312 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11313 if (!name)
11314 return NULL;
11315 return IDENTIFIER_POINTER (name);
11316 }
11317 else
11318 {
11319 switch (TREE_CODE (stripped_decl))
11320 {
11321 case ADDR_EXPR:
11322 return get_name (TREE_OPERAND (stripped_decl, 0));
11323 default:
11324 return NULL;
11325 }
11326 }
11327 }
11328
11329 /* Return true if TYPE has a variable argument list. */
11330
11331 bool
11332 stdarg_p (const_tree fntype)
11333 {
11334 function_args_iterator args_iter;
11335 tree n = NULL_TREE, t;
11336
11337 if (!fntype)
11338 return false;
11339
11340 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11341 {
11342 n = t;
11343 }
11344
11345 return n != NULL_TREE && n != void_type_node;
11346 }
11347
11348 /* Return true if TYPE has a prototype. */
11349
11350 bool
11351 prototype_p (tree fntype)
11352 {
11353 tree t;
11354
11355 gcc_assert (fntype != NULL_TREE);
11356
11357 t = TYPE_ARG_TYPES (fntype);
11358 return (t != NULL_TREE);
11359 }
11360
11361 /* If BLOCK is inlined from an __attribute__((__artificial__))
11362 routine, return pointer to location from where it has been
11363 called. */
11364 location_t *
11365 block_nonartificial_location (tree block)
11366 {
11367 location_t *ret = NULL;
11368
11369 while (block && TREE_CODE (block) == BLOCK
11370 && BLOCK_ABSTRACT_ORIGIN (block))
11371 {
11372 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11373
11374 while (TREE_CODE (ao) == BLOCK
11375 && BLOCK_ABSTRACT_ORIGIN (ao)
11376 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11377 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11378
11379 if (TREE_CODE (ao) == FUNCTION_DECL)
11380 {
11381 /* If AO is an artificial inline, point RET to the
11382 call site locus at which it has been inlined and continue
11383 the loop, in case AO's caller is also an artificial
11384 inline. */
11385 if (DECL_DECLARED_INLINE_P (ao)
11386 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11387 ret = &BLOCK_SOURCE_LOCATION (block);
11388 else
11389 break;
11390 }
11391 else if (TREE_CODE (ao) != BLOCK)
11392 break;
11393
11394 block = BLOCK_SUPERCONTEXT (block);
11395 }
11396 return ret;
11397 }
11398
11399
11400 /* If EXP is inlined from an __attribute__((__artificial__))
11401 function, return the location of the original call expression. */
11402
11403 location_t
11404 tree_nonartificial_location (tree exp)
11405 {
11406 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11407
11408 if (loc)
11409 return *loc;
11410 else
11411 return EXPR_LOCATION (exp);
11412 }
11413
11414
11415 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11416 nodes. */
11417
11418 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11419
11420 static hashval_t
11421 cl_option_hash_hash (const void *x)
11422 {
11423 const_tree const t = (const_tree) x;
11424 const char *p;
11425 size_t i;
11426 size_t len = 0;
11427 hashval_t hash = 0;
11428
11429 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11430 {
11431 p = (const char *)TREE_OPTIMIZATION (t);
11432 len = sizeof (struct cl_optimization);
11433 }
11434
11435 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11436 {
11437 p = (const char *)TREE_TARGET_OPTION (t);
11438 len = sizeof (struct cl_target_option);
11439 }
11440
11441 else
11442 gcc_unreachable ();
11443
11444 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11445 something else. */
11446 for (i = 0; i < len; i++)
11447 if (p[i])
11448 hash = (hash << 4) ^ ((i << 2) | p[i]);
11449
11450 return hash;
11451 }
11452
11453 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11454 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11455 same. */
11456
11457 static int
11458 cl_option_hash_eq (const void *x, const void *y)
11459 {
11460 const_tree const xt = (const_tree) x;
11461 const_tree const yt = (const_tree) y;
11462 const char *xp;
11463 const char *yp;
11464 size_t len;
11465
11466 if (TREE_CODE (xt) != TREE_CODE (yt))
11467 return 0;
11468
11469 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11470 {
11471 xp = (const char *)TREE_OPTIMIZATION (xt);
11472 yp = (const char *)TREE_OPTIMIZATION (yt);
11473 len = sizeof (struct cl_optimization);
11474 }
11475
11476 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11477 {
11478 xp = (const char *)TREE_TARGET_OPTION (xt);
11479 yp = (const char *)TREE_TARGET_OPTION (yt);
11480 len = sizeof (struct cl_target_option);
11481 }
11482
11483 else
11484 gcc_unreachable ();
11485
11486 return (memcmp (xp, yp, len) == 0);
11487 }
11488
11489 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11490
11491 tree
11492 build_optimization_node (struct gcc_options *opts)
11493 {
11494 tree t;
11495 void **slot;
11496
11497 /* Use the cache of optimization nodes. */
11498
11499 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11500 opts);
11501
11502 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11503 t = (tree) *slot;
11504 if (!t)
11505 {
11506 /* Insert this one into the hash table. */
11507 t = cl_optimization_node;
11508 *slot = t;
11509
11510 /* Make a new node for next time round. */
11511 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11512 }
11513
11514 return t;
11515 }
11516
11517 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11518
11519 tree
11520 build_target_option_node (struct gcc_options *opts)
11521 {
11522 tree t;
11523 void **slot;
11524
11525 /* Use the cache of optimization nodes. */
11526
11527 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11528 opts);
11529
11530 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11531 t = (tree) *slot;
11532 if (!t)
11533 {
11534 /* Insert this one into the hash table. */
11535 t = cl_target_option_node;
11536 *slot = t;
11537
11538 /* Make a new node for next time round. */
11539 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11540 }
11541
11542 return t;
11543 }
11544
11545 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11546 Called through htab_traverse. */
11547
11548 static int
11549 prepare_target_option_node_for_pch (void **slot, void *)
11550 {
11551 tree node = (tree) *slot;
11552 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11553 TREE_TARGET_GLOBALS (node) = NULL;
11554 return 1;
11555 }
11556
11557 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11558 so that they aren't saved during PCH writing. */
11559
11560 void
11561 prepare_target_option_nodes_for_pch (void)
11562 {
11563 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11564 NULL);
11565 }
11566
11567 /* Determine the "ultimate origin" of a block. The block may be an inlined
11568 instance of an inlined instance of a block which is local to an inline
11569 function, so we have to trace all of the way back through the origin chain
11570 to find out what sort of node actually served as the original seed for the
11571 given block. */
11572
11573 tree
11574 block_ultimate_origin (const_tree block)
11575 {
11576 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11577
11578 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11579 nodes in the function to point to themselves; ignore that if
11580 we're trying to output the abstract instance of this function. */
11581 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11582 return NULL_TREE;
11583
11584 if (immediate_origin == NULL_TREE)
11585 return NULL_TREE;
11586 else
11587 {
11588 tree ret_val;
11589 tree lookahead = immediate_origin;
11590
11591 do
11592 {
11593 ret_val = lookahead;
11594 lookahead = (TREE_CODE (ret_val) == BLOCK
11595 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11596 }
11597 while (lookahead != NULL && lookahead != ret_val);
11598
11599 /* The block's abstract origin chain may not be the *ultimate* origin of
11600 the block. It could lead to a DECL that has an abstract origin set.
11601 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11602 will give us if it has one). Note that DECL's abstract origins are
11603 supposed to be the most distant ancestor (or so decl_ultimate_origin
11604 claims), so we don't need to loop following the DECL origins. */
11605 if (DECL_P (ret_val))
11606 return DECL_ORIGIN (ret_val);
11607
11608 return ret_val;
11609 }
11610 }
11611
11612 /* Return true iff conversion in EXP generates no instruction. Mark
11613 it inline so that we fully inline into the stripping functions even
11614 though we have two uses of this function. */
11615
11616 static inline bool
11617 tree_nop_conversion (const_tree exp)
11618 {
11619 tree outer_type, inner_type;
11620
11621 if (!CONVERT_EXPR_P (exp)
11622 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11623 return false;
11624 if (TREE_OPERAND (exp, 0) == error_mark_node)
11625 return false;
11626
11627 outer_type = TREE_TYPE (exp);
11628 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11629
11630 if (!inner_type)
11631 return false;
11632
11633 /* Use precision rather then machine mode when we can, which gives
11634 the correct answer even for submode (bit-field) types. */
11635 if ((INTEGRAL_TYPE_P (outer_type)
11636 || POINTER_TYPE_P (outer_type)
11637 || TREE_CODE (outer_type) == OFFSET_TYPE)
11638 && (INTEGRAL_TYPE_P (inner_type)
11639 || POINTER_TYPE_P (inner_type)
11640 || TREE_CODE (inner_type) == OFFSET_TYPE))
11641 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11642
11643 /* Otherwise fall back on comparing machine modes (e.g. for
11644 aggregate types, floats). */
11645 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11646 }
11647
11648 /* Return true iff conversion in EXP generates no instruction. Don't
11649 consider conversions changing the signedness. */
11650
11651 static bool
11652 tree_sign_nop_conversion (const_tree exp)
11653 {
11654 tree outer_type, inner_type;
11655
11656 if (!tree_nop_conversion (exp))
11657 return false;
11658
11659 outer_type = TREE_TYPE (exp);
11660 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11661
11662 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11663 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11664 }
11665
11666 /* Strip conversions from EXP according to tree_nop_conversion and
11667 return the resulting expression. */
11668
11669 tree
11670 tree_strip_nop_conversions (tree exp)
11671 {
11672 while (tree_nop_conversion (exp))
11673 exp = TREE_OPERAND (exp, 0);
11674 return exp;
11675 }
11676
11677 /* Strip conversions from EXP according to tree_sign_nop_conversion
11678 and return the resulting expression. */
11679
11680 tree
11681 tree_strip_sign_nop_conversions (tree exp)
11682 {
11683 while (tree_sign_nop_conversion (exp))
11684 exp = TREE_OPERAND (exp, 0);
11685 return exp;
11686 }
11687
11688 /* Avoid any floating point extensions from EXP. */
11689 tree
11690 strip_float_extensions (tree exp)
11691 {
11692 tree sub, expt, subt;
11693
11694 /* For floating point constant look up the narrowest type that can hold
11695 it properly and handle it like (type)(narrowest_type)constant.
11696 This way we can optimize for instance a=a*2.0 where "a" is float
11697 but 2.0 is double constant. */
11698 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11699 {
11700 REAL_VALUE_TYPE orig;
11701 tree type = NULL;
11702
11703 orig = TREE_REAL_CST (exp);
11704 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11705 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11706 type = float_type_node;
11707 else if (TYPE_PRECISION (TREE_TYPE (exp))
11708 > TYPE_PRECISION (double_type_node)
11709 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11710 type = double_type_node;
11711 if (type)
11712 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11713 }
11714
11715 if (!CONVERT_EXPR_P (exp))
11716 return exp;
11717
11718 sub = TREE_OPERAND (exp, 0);
11719 subt = TREE_TYPE (sub);
11720 expt = TREE_TYPE (exp);
11721
11722 if (!FLOAT_TYPE_P (subt))
11723 return exp;
11724
11725 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11726 return exp;
11727
11728 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11729 return exp;
11730
11731 return strip_float_extensions (sub);
11732 }
11733
11734 /* Strip out all handled components that produce invariant
11735 offsets. */
11736
11737 const_tree
11738 strip_invariant_refs (const_tree op)
11739 {
11740 while (handled_component_p (op))
11741 {
11742 switch (TREE_CODE (op))
11743 {
11744 case ARRAY_REF:
11745 case ARRAY_RANGE_REF:
11746 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11747 || TREE_OPERAND (op, 2) != NULL_TREE
11748 || TREE_OPERAND (op, 3) != NULL_TREE)
11749 return NULL;
11750 break;
11751
11752 case COMPONENT_REF:
11753 if (TREE_OPERAND (op, 2) != NULL_TREE)
11754 return NULL;
11755 break;
11756
11757 default:;
11758 }
11759 op = TREE_OPERAND (op, 0);
11760 }
11761
11762 return op;
11763 }
11764
11765 static GTY(()) tree gcc_eh_personality_decl;
11766
11767 /* Return the GCC personality function decl. */
11768
11769 tree
11770 lhd_gcc_personality (void)
11771 {
11772 if (!gcc_eh_personality_decl)
11773 gcc_eh_personality_decl = build_personality_function ("gcc");
11774 return gcc_eh_personality_decl;
11775 }
11776
11777 /* TARGET is a call target of GIMPLE call statement
11778 (obtained by gimple_call_fn). Return true if it is
11779 OBJ_TYPE_REF representing an virtual call of C++ method.
11780 (As opposed to OBJ_TYPE_REF representing objc calls
11781 through a cast where middle-end devirtualization machinery
11782 can't apply.) */
11783
11784 bool
11785 virtual_method_call_p (tree target)
11786 {
11787 if (TREE_CODE (target) != OBJ_TYPE_REF)
11788 return false;
11789 target = TREE_TYPE (target);
11790 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11791 target = TREE_TYPE (target);
11792 if (TREE_CODE (target) == FUNCTION_TYPE)
11793 return false;
11794 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11795 return true;
11796 }
11797
11798 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11799
11800 tree
11801 obj_type_ref_class (tree ref)
11802 {
11803 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11804 ref = TREE_TYPE (ref);
11805 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11806 ref = TREE_TYPE (ref);
11807 /* We look for type THIS points to. ObjC also builds
11808 OBJ_TYPE_REF with non-method calls, Their first parameter
11809 ID however also corresponds to class type. */
11810 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11811 || TREE_CODE (ref) == FUNCTION_TYPE);
11812 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11813 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11814 return TREE_TYPE (ref);
11815 }
11816
11817 /* Return true if T is in anonymous namespace. */
11818
11819 bool
11820 type_in_anonymous_namespace_p (const_tree t)
11821 {
11822 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11823 bulitin types; those have CONTEXT NULL. */
11824 if (!TYPE_CONTEXT (t))
11825 return false;
11826 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11827 }
11828
11829 /* Try to find a base info of BINFO that would have its field decl at offset
11830 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11831 found, return, otherwise return NULL_TREE. */
11832
11833 tree
11834 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11835 {
11836 tree type = BINFO_TYPE (binfo);
11837
11838 while (true)
11839 {
11840 HOST_WIDE_INT pos, size;
11841 tree fld;
11842 int i;
11843
11844 if (types_same_for_odr (type, expected_type))
11845 return binfo;
11846 if (offset < 0)
11847 return NULL_TREE;
11848
11849 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11850 {
11851 if (TREE_CODE (fld) != FIELD_DECL)
11852 continue;
11853
11854 pos = int_bit_position (fld);
11855 size = tree_to_uhwi (DECL_SIZE (fld));
11856 if (pos <= offset && (pos + size) > offset)
11857 break;
11858 }
11859 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11860 return NULL_TREE;
11861
11862 if (!DECL_ARTIFICIAL (fld))
11863 {
11864 binfo = TYPE_BINFO (TREE_TYPE (fld));
11865 if (!binfo)
11866 return NULL_TREE;
11867 }
11868 /* Offset 0 indicates the primary base, whose vtable contents are
11869 represented in the binfo for the derived class. */
11870 else if (offset != 0)
11871 {
11872 tree base_binfo, binfo2 = binfo;
11873
11874 /* Find BINFO corresponding to FLD. This is bit harder
11875 by a fact that in virtual inheritance we may need to walk down
11876 the non-virtual inheritance chain. */
11877 while (true)
11878 {
11879 tree containing_binfo = NULL, found_binfo = NULL;
11880 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11881 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11882 {
11883 found_binfo = base_binfo;
11884 break;
11885 }
11886 else
11887 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11888 - tree_to_shwi (BINFO_OFFSET (binfo)))
11889 * BITS_PER_UNIT < pos
11890 /* Rule out types with no virtual methods or we can get confused
11891 here by zero sized bases. */
11892 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11893 && (!containing_binfo
11894 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11895 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11896 containing_binfo = base_binfo;
11897 if (found_binfo)
11898 {
11899 binfo = found_binfo;
11900 break;
11901 }
11902 if (!containing_binfo)
11903 return NULL_TREE;
11904 binfo2 = containing_binfo;
11905 }
11906 }
11907
11908 type = TREE_TYPE (fld);
11909 offset -= pos;
11910 }
11911 }
11912
11913 /* Returns true if X is a typedef decl. */
11914
11915 bool
11916 is_typedef_decl (tree x)
11917 {
11918 return (x && TREE_CODE (x) == TYPE_DECL
11919 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11920 }
11921
11922 /* Returns true iff TYPE is a type variant created for a typedef. */
11923
11924 bool
11925 typedef_variant_p (tree type)
11926 {
11927 return is_typedef_decl (TYPE_NAME (type));
11928 }
11929
11930 /* Warn about a use of an identifier which was marked deprecated. */
11931 void
11932 warn_deprecated_use (tree node, tree attr)
11933 {
11934 const char *msg;
11935
11936 if (node == 0 || !warn_deprecated_decl)
11937 return;
11938
11939 if (!attr)
11940 {
11941 if (DECL_P (node))
11942 attr = DECL_ATTRIBUTES (node);
11943 else if (TYPE_P (node))
11944 {
11945 tree decl = TYPE_STUB_DECL (node);
11946 if (decl)
11947 attr = lookup_attribute ("deprecated",
11948 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11949 }
11950 }
11951
11952 if (attr)
11953 attr = lookup_attribute ("deprecated", attr);
11954
11955 if (attr)
11956 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11957 else
11958 msg = NULL;
11959
11960 if (DECL_P (node))
11961 {
11962 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11963 if (msg)
11964 warning (OPT_Wdeprecated_declarations,
11965 "%qD is deprecated (declared at %r%s:%d%R): %s",
11966 node, "locus", xloc.file, xloc.line, msg);
11967 else
11968 warning (OPT_Wdeprecated_declarations,
11969 "%qD is deprecated (declared at %r%s:%d%R)",
11970 node, "locus", xloc.file, xloc.line);
11971 }
11972 else if (TYPE_P (node))
11973 {
11974 tree what = NULL_TREE;
11975 tree decl = TYPE_STUB_DECL (node);
11976
11977 if (TYPE_NAME (node))
11978 {
11979 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11980 what = TYPE_NAME (node);
11981 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11982 && DECL_NAME (TYPE_NAME (node)))
11983 what = DECL_NAME (TYPE_NAME (node));
11984 }
11985
11986 if (decl)
11987 {
11988 expanded_location xloc
11989 = expand_location (DECL_SOURCE_LOCATION (decl));
11990 if (what)
11991 {
11992 if (msg)
11993 warning (OPT_Wdeprecated_declarations,
11994 "%qE is deprecated (declared at %r%s:%d%R): %s",
11995 what, "locus", xloc.file, xloc.line, msg);
11996 else
11997 warning (OPT_Wdeprecated_declarations,
11998 "%qE is deprecated (declared at %r%s:%d%R)",
11999 what, "locus", xloc.file, xloc.line);
12000 }
12001 else
12002 {
12003 if (msg)
12004 warning (OPT_Wdeprecated_declarations,
12005 "type is deprecated (declared at %r%s:%d%R): %s",
12006 "locus", xloc.file, xloc.line, msg);
12007 else
12008 warning (OPT_Wdeprecated_declarations,
12009 "type is deprecated (declared at %r%s:%d%R)",
12010 "locus", xloc.file, xloc.line);
12011 }
12012 }
12013 else
12014 {
12015 if (what)
12016 {
12017 if (msg)
12018 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12019 what, msg);
12020 else
12021 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12022 }
12023 else
12024 {
12025 if (msg)
12026 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12027 msg);
12028 else
12029 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12030 }
12031 }
12032 }
12033 }
12034
12035 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12036 somewhere in it. */
12037
12038 bool
12039 contains_bitfld_component_ref_p (const_tree ref)
12040 {
12041 while (handled_component_p (ref))
12042 {
12043 if (TREE_CODE (ref) == COMPONENT_REF
12044 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12045 return true;
12046 ref = TREE_OPERAND (ref, 0);
12047 }
12048
12049 return false;
12050 }
12051
12052 /* Try to determine whether a TRY_CATCH expression can fall through.
12053 This is a subroutine of block_may_fallthru. */
12054
12055 static bool
12056 try_catch_may_fallthru (const_tree stmt)
12057 {
12058 tree_stmt_iterator i;
12059
12060 /* If the TRY block can fall through, the whole TRY_CATCH can
12061 fall through. */
12062 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12063 return true;
12064
12065 i = tsi_start (TREE_OPERAND (stmt, 1));
12066 switch (TREE_CODE (tsi_stmt (i)))
12067 {
12068 case CATCH_EXPR:
12069 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12070 catch expression and a body. The whole TRY_CATCH may fall
12071 through iff any of the catch bodies falls through. */
12072 for (; !tsi_end_p (i); tsi_next (&i))
12073 {
12074 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12075 return true;
12076 }
12077 return false;
12078
12079 case EH_FILTER_EXPR:
12080 /* The exception filter expression only matters if there is an
12081 exception. If the exception does not match EH_FILTER_TYPES,
12082 we will execute EH_FILTER_FAILURE, and we will fall through
12083 if that falls through. If the exception does match
12084 EH_FILTER_TYPES, the stack unwinder will continue up the
12085 stack, so we will not fall through. We don't know whether we
12086 will throw an exception which matches EH_FILTER_TYPES or not,
12087 so we just ignore EH_FILTER_TYPES and assume that we might
12088 throw an exception which doesn't match. */
12089 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12090
12091 default:
12092 /* This case represents statements to be executed when an
12093 exception occurs. Those statements are implicitly followed
12094 by a RESX statement to resume execution after the exception.
12095 So in this case the TRY_CATCH never falls through. */
12096 return false;
12097 }
12098 }
12099
12100 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12101 need not be 100% accurate; simply be conservative and return true if we
12102 don't know. This is used only to avoid stupidly generating extra code.
12103 If we're wrong, we'll just delete the extra code later. */
12104
12105 bool
12106 block_may_fallthru (const_tree block)
12107 {
12108 /* This CONST_CAST is okay because expr_last returns its argument
12109 unmodified and we assign it to a const_tree. */
12110 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12111
12112 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12113 {
12114 case GOTO_EXPR:
12115 case RETURN_EXPR:
12116 /* Easy cases. If the last statement of the block implies
12117 control transfer, then we can't fall through. */
12118 return false;
12119
12120 case SWITCH_EXPR:
12121 /* If SWITCH_LABELS is set, this is lowered, and represents a
12122 branch to a selected label and hence can not fall through.
12123 Otherwise SWITCH_BODY is set, and the switch can fall
12124 through. */
12125 return SWITCH_LABELS (stmt) == NULL_TREE;
12126
12127 case COND_EXPR:
12128 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12129 return true;
12130 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12131
12132 case BIND_EXPR:
12133 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12134
12135 case TRY_CATCH_EXPR:
12136 return try_catch_may_fallthru (stmt);
12137
12138 case TRY_FINALLY_EXPR:
12139 /* The finally clause is always executed after the try clause,
12140 so if it does not fall through, then the try-finally will not
12141 fall through. Otherwise, if the try clause does not fall
12142 through, then when the finally clause falls through it will
12143 resume execution wherever the try clause was going. So the
12144 whole try-finally will only fall through if both the try
12145 clause and the finally clause fall through. */
12146 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12147 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12148
12149 case MODIFY_EXPR:
12150 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12151 stmt = TREE_OPERAND (stmt, 1);
12152 else
12153 return true;
12154 /* FALLTHRU */
12155
12156 case CALL_EXPR:
12157 /* Functions that do not return do not fall through. */
12158 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12159
12160 case CLEANUP_POINT_EXPR:
12161 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12162
12163 case TARGET_EXPR:
12164 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12165
12166 case ERROR_MARK:
12167 return true;
12168
12169 default:
12170 return lang_hooks.block_may_fallthru (stmt);
12171 }
12172 }
12173
12174 /* True if we are using EH to handle cleanups. */
12175 static bool using_eh_for_cleanups_flag = false;
12176
12177 /* This routine is called from front ends to indicate eh should be used for
12178 cleanups. */
12179 void
12180 using_eh_for_cleanups (void)
12181 {
12182 using_eh_for_cleanups_flag = true;
12183 }
12184
12185 /* Query whether EH is used for cleanups. */
12186 bool
12187 using_eh_for_cleanups_p (void)
12188 {
12189 return using_eh_for_cleanups_flag;
12190 }
12191
12192 /* Wrapper for tree_code_name to ensure that tree code is valid */
12193 const char *
12194 get_tree_code_name (enum tree_code code)
12195 {
12196 const char *invalid = "<invalid tree code>";
12197
12198 if (code >= MAX_TREE_CODES)
12199 return invalid;
12200
12201 return tree_code_name[code];
12202 }
12203
12204 /* Drops the TREE_OVERFLOW flag from T. */
12205
12206 tree
12207 drop_tree_overflow (tree t)
12208 {
12209 gcc_checking_assert (TREE_OVERFLOW (t));
12210
12211 /* For tree codes with a sharing machinery re-build the result. */
12212 if (TREE_CODE (t) == INTEGER_CST)
12213 return wide_int_to_tree (TREE_TYPE (t), t);
12214
12215 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12216 and drop the flag. */
12217 t = copy_node (t);
12218 TREE_OVERFLOW (t) = 0;
12219 return t;
12220 }
12221
12222 /* Given a memory reference expression T, return its base address.
12223 The base address of a memory reference expression is the main
12224 object being referenced. For instance, the base address for
12225 'array[i].fld[j]' is 'array'. You can think of this as stripping
12226 away the offset part from a memory address.
12227
12228 This function calls handled_component_p to strip away all the inner
12229 parts of the memory reference until it reaches the base object. */
12230
12231 tree
12232 get_base_address (tree t)
12233 {
12234 while (handled_component_p (t))
12235 t = TREE_OPERAND (t, 0);
12236
12237 if ((TREE_CODE (t) == MEM_REF
12238 || TREE_CODE (t) == TARGET_MEM_REF)
12239 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12240 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12241
12242 /* ??? Either the alias oracle or all callers need to properly deal
12243 with WITH_SIZE_EXPRs before we can look through those. */
12244 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12245 return NULL_TREE;
12246
12247 return t;
12248 }
12249
12250 #include "gt-tree.h"