Revert "replace several uses of the anon namespace with GCC_FINAL"
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "tree.h"
35 #include "gimple.h"
36 #include "rtl.h"
37 #include "ssa.h"
38 #include "flags.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "calls.h"
43 #include "attribs.h"
44 #include "varasm.h"
45 #include "tm_p.h"
46 #include "toplev.h" /* get_random_seed */
47 #include "filenames.h"
48 #include "output.h"
49 #include "target.h"
50 #include "common/common-target.h"
51 #include "langhooks.h"
52 #include "tree-inline.h"
53 #include "tree-iterator.h"
54 #include "internal-fn.h"
55 #include "gimple-iterator.h"
56 #include "gimplify.h"
57 #include "cgraph.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "stmt.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66 #include "params.h"
67 #include "tree-pass.h"
68 #include "langhooks-def.h"
69 #include "diagnostic.h"
70 #include "tree-diagnostic.h"
71 #include "tree-pretty-print.h"
72 #include "except.h"
73 #include "debug.h"
74 #include "intl.h"
75 #include "builtins.h"
76 #include "print-tree.h"
77 #include "ipa-utils.h"
78
79 /* Tree code classes. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
82 #define END_OF_BASE_TREE_CODES tcc_exceptional,
83
84 const enum tree_code_class tree_code_type[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Table indexed by tree code giving number of expression
92 operands beyond the fixed part of the node structure.
93 Not used for types or decls. */
94
95 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
96 #define END_OF_BASE_TREE_CODES 0,
97
98 const unsigned char tree_code_length[] = {
99 #include "all-tree.def"
100 };
101
102 #undef DEFTREECODE
103 #undef END_OF_BASE_TREE_CODES
104
105 /* Names of tree components.
106 Used for printing out the tree and error messages. */
107 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
108 #define END_OF_BASE_TREE_CODES "@dummy",
109
110 static const char *const tree_code_name[] = {
111 #include "all-tree.def"
112 };
113
114 #undef DEFTREECODE
115 #undef END_OF_BASE_TREE_CODES
116
117 /* Each tree code class has an associated string representation.
118 These must correspond to the tree_code_class entries. */
119
120 const char *const tree_code_class_strings[] =
121 {
122 "exceptional",
123 "constant",
124 "type",
125 "declaration",
126 "reference",
127 "comparison",
128 "unary",
129 "binary",
130 "statement",
131 "vl_exp",
132 "expression"
133 };
134
135 /* obstack.[ch] explicitly declined to prototype this. */
136 extern int _obstack_allocated_p (struct obstack *h, void *obj);
137
138 /* Statistics-gathering stuff. */
139
140 static int tree_code_counts[MAX_TREE_CODES];
141 int tree_node_counts[(int) all_kinds];
142 int tree_node_sizes[(int) all_kinds];
143
144 /* Keep in sync with tree.h:enum tree_node_kind. */
145 static const char * const tree_node_kind_names[] = {
146 "decls",
147 "types",
148 "blocks",
149 "stmts",
150 "refs",
151 "exprs",
152 "constants",
153 "identifiers",
154 "vecs",
155 "binfos",
156 "ssa names",
157 "constructors",
158 "random kinds",
159 "lang_decl kinds",
160 "lang_type kinds",
161 "omp clauses",
162 };
163
164 /* Unique id for next decl created. */
165 static GTY(()) int next_decl_uid;
166 /* Unique id for next type created. */
167 static GTY(()) int next_type_uid = 1;
168 /* Unique id for next debug decl created. Use negative numbers,
169 to catch erroneous uses. */
170 static GTY(()) int next_debug_decl_uid;
171
172 /* Since we cannot rehash a type after it is in the table, we have to
173 keep the hash code. */
174
175 struct GTY((for_user)) type_hash {
176 unsigned long hash;
177 tree type;
178 };
179
180 /* Initial size of the hash table (rounded to next prime). */
181 #define TYPE_HASH_INITIAL_SIZE 1000
182
183 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
184 {
185 static hashval_t hash (type_hash *t) { return t->hash; }
186 static bool equal (type_hash *a, type_hash *b);
187
188 static int
189 keep_cache_entry (type_hash *&t)
190 {
191 return ggc_marked_p (t->type);
192 }
193 };
194
195 /* Now here is the hash table. When recording a type, it is added to
196 the slot whose index is the hash code. Note that the hash table is
197 used for several kinds of types (function types, array types and
198 array index range types, for now). While all these live in the
199 same table, they are completely independent, and the hash code is
200 computed differently for each of these. */
201
202 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
203
204 /* Hash table and temporary node for larger integer const values. */
205 static GTY (()) tree int_cst_node;
206
207 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
208 {
209 static hashval_t hash (tree t);
210 static bool equal (tree x, tree y);
211 };
212
213 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
214
215 /* Hash table for optimization flags and target option flags. Use the same
216 hash table for both sets of options. Nodes for building the current
217 optimization and target option nodes. The assumption is most of the time
218 the options created will already be in the hash table, so we avoid
219 allocating and freeing up a node repeatably. */
220 static GTY (()) tree cl_optimization_node;
221 static GTY (()) tree cl_target_option_node;
222
223 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
224 {
225 static hashval_t hash (tree t);
226 static bool equal (tree x, tree y);
227 };
228
229 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
230
231 /* General tree->tree mapping structure for use in hash tables. */
232
233
234 static GTY ((cache))
235 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
236
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
239
240 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
241 {
242 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
243
244 static bool
245 equal (tree_vec_map *a, tree_vec_map *b)
246 {
247 return a->base.from == b->base.from;
248 }
249
250 static int
251 keep_cache_entry (tree_vec_map *&m)
252 {
253 return ggc_marked_p (m->base.from);
254 }
255 };
256
257 static GTY ((cache))
258 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
259
260 static void set_type_quals (tree, int);
261 static void print_type_hash_statistics (void);
262 static void print_debug_expr_statistics (void);
263 static void print_value_expr_statistics (void);
264 static void type_hash_list (const_tree, inchash::hash &);
265 static void attribute_hash_list (const_tree, inchash::hash &);
266
267 tree global_trees[TI_MAX];
268 tree integer_types[itk_none];
269
270 bool int_n_enabled_p[NUM_INT_N_ENTS];
271 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
272
273 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
274
275 /* Number of operands for each OpenMP clause. */
276 unsigned const char omp_clause_num_ops[] =
277 {
278 0, /* OMP_CLAUSE_ERROR */
279 1, /* OMP_CLAUSE_PRIVATE */
280 1, /* OMP_CLAUSE_SHARED */
281 1, /* OMP_CLAUSE_FIRSTPRIVATE */
282 2, /* OMP_CLAUSE_LASTPRIVATE */
283 4, /* OMP_CLAUSE_REDUCTION */
284 1, /* OMP_CLAUSE_COPYIN */
285 1, /* OMP_CLAUSE_COPYPRIVATE */
286 3, /* OMP_CLAUSE_LINEAR */
287 2, /* OMP_CLAUSE_ALIGNED */
288 1, /* OMP_CLAUSE_DEPEND */
289 1, /* OMP_CLAUSE_UNIFORM */
290 2, /* OMP_CLAUSE_FROM */
291 2, /* OMP_CLAUSE_TO */
292 2, /* OMP_CLAUSE_MAP */
293 2, /* OMP_CLAUSE__CACHE_ */
294 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
295 1, /* OMP_CLAUSE_USE_DEVICE */
296 2, /* OMP_CLAUSE_GANG */
297 1, /* OMP_CLAUSE_ASYNC */
298 1, /* OMP_CLAUSE_WAIT */
299 0, /* OMP_CLAUSE_AUTO */
300 0, /* OMP_CLAUSE_SEQ */
301 1, /* OMP_CLAUSE__LOOPTEMP_ */
302 1, /* OMP_CLAUSE_IF */
303 1, /* OMP_CLAUSE_NUM_THREADS */
304 1, /* OMP_CLAUSE_SCHEDULE */
305 0, /* OMP_CLAUSE_NOWAIT */
306 0, /* OMP_CLAUSE_ORDERED */
307 0, /* OMP_CLAUSE_DEFAULT */
308 3, /* OMP_CLAUSE_COLLAPSE */
309 0, /* OMP_CLAUSE_UNTIED */
310 1, /* OMP_CLAUSE_FINAL */
311 0, /* OMP_CLAUSE_MERGEABLE */
312 1, /* OMP_CLAUSE_DEVICE */
313 1, /* OMP_CLAUSE_DIST_SCHEDULE */
314 0, /* OMP_CLAUSE_INBRANCH */
315 0, /* OMP_CLAUSE_NOTINBRANCH */
316 1, /* OMP_CLAUSE_NUM_TEAMS */
317 1, /* OMP_CLAUSE_THREAD_LIMIT */
318 0, /* OMP_CLAUSE_PROC_BIND */
319 1, /* OMP_CLAUSE_SAFELEN */
320 1, /* OMP_CLAUSE_SIMDLEN */
321 0, /* OMP_CLAUSE_FOR */
322 0, /* OMP_CLAUSE_PARALLEL */
323 0, /* OMP_CLAUSE_SECTIONS */
324 0, /* OMP_CLAUSE_TASKGROUP */
325 1, /* OMP_CLAUSE__SIMDUID_ */
326 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
327 0, /* OMP_CLAUSE_INDEPENDENT */
328 1, /* OMP_CLAUSE_WORKER */
329 1, /* OMP_CLAUSE_VECTOR */
330 1, /* OMP_CLAUSE_NUM_GANGS */
331 1, /* OMP_CLAUSE_NUM_WORKERS */
332 1, /* OMP_CLAUSE_VECTOR_LENGTH */
333 };
334
335 const char * const omp_clause_code_name[] =
336 {
337 "error_clause",
338 "private",
339 "shared",
340 "firstprivate",
341 "lastprivate",
342 "reduction",
343 "copyin",
344 "copyprivate",
345 "linear",
346 "aligned",
347 "depend",
348 "uniform",
349 "from",
350 "to",
351 "map",
352 "_cache_",
353 "device_resident",
354 "use_device",
355 "gang",
356 "async",
357 "wait",
358 "auto",
359 "seq",
360 "_looptemp_",
361 "if",
362 "num_threads",
363 "schedule",
364 "nowait",
365 "ordered",
366 "default",
367 "collapse",
368 "untied",
369 "final",
370 "mergeable",
371 "device",
372 "dist_schedule",
373 "inbranch",
374 "notinbranch",
375 "num_teams",
376 "thread_limit",
377 "proc_bind",
378 "safelen",
379 "simdlen",
380 "for",
381 "parallel",
382 "sections",
383 "taskgroup",
384 "_simduid_",
385 "_Cilk_for_count_",
386 "independent",
387 "worker",
388 "vector",
389 "num_gangs",
390 "num_workers",
391 "vector_length"
392 };
393
394
395 /* Return the tree node structure used by tree code CODE. */
396
397 static inline enum tree_node_structure_enum
398 tree_node_structure_for_code (enum tree_code code)
399 {
400 switch (TREE_CODE_CLASS (code))
401 {
402 case tcc_declaration:
403 {
404 switch (code)
405 {
406 case FIELD_DECL:
407 return TS_FIELD_DECL;
408 case PARM_DECL:
409 return TS_PARM_DECL;
410 case VAR_DECL:
411 return TS_VAR_DECL;
412 case LABEL_DECL:
413 return TS_LABEL_DECL;
414 case RESULT_DECL:
415 return TS_RESULT_DECL;
416 case DEBUG_EXPR_DECL:
417 return TS_DECL_WRTL;
418 case CONST_DECL:
419 return TS_CONST_DECL;
420 case TYPE_DECL:
421 return TS_TYPE_DECL;
422 case FUNCTION_DECL:
423 return TS_FUNCTION_DECL;
424 case TRANSLATION_UNIT_DECL:
425 return TS_TRANSLATION_UNIT_DECL;
426 default:
427 return TS_DECL_NON_COMMON;
428 }
429 }
430 case tcc_type:
431 return TS_TYPE_NON_COMMON;
432 case tcc_reference:
433 case tcc_comparison:
434 case tcc_unary:
435 case tcc_binary:
436 case tcc_expression:
437 case tcc_statement:
438 case tcc_vl_exp:
439 return TS_EXP;
440 default: /* tcc_constant and tcc_exceptional */
441 break;
442 }
443 switch (code)
444 {
445 /* tcc_constant cases. */
446 case VOID_CST: return TS_TYPED;
447 case INTEGER_CST: return TS_INT_CST;
448 case REAL_CST: return TS_REAL_CST;
449 case FIXED_CST: return TS_FIXED_CST;
450 case COMPLEX_CST: return TS_COMPLEX;
451 case VECTOR_CST: return TS_VECTOR;
452 case STRING_CST: return TS_STRING;
453 /* tcc_exceptional cases. */
454 case ERROR_MARK: return TS_COMMON;
455 case IDENTIFIER_NODE: return TS_IDENTIFIER;
456 case TREE_LIST: return TS_LIST;
457 case TREE_VEC: return TS_VEC;
458 case SSA_NAME: return TS_SSA_NAME;
459 case PLACEHOLDER_EXPR: return TS_COMMON;
460 case STATEMENT_LIST: return TS_STATEMENT_LIST;
461 case BLOCK: return TS_BLOCK;
462 case CONSTRUCTOR: return TS_CONSTRUCTOR;
463 case TREE_BINFO: return TS_BINFO;
464 case OMP_CLAUSE: return TS_OMP_CLAUSE;
465 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
466 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
467
468 default:
469 gcc_unreachable ();
470 }
471 }
472
473
474 /* Initialize tree_contains_struct to describe the hierarchy of tree
475 nodes. */
476
477 static void
478 initialize_tree_contains_struct (void)
479 {
480 unsigned i;
481
482 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
483 {
484 enum tree_code code;
485 enum tree_node_structure_enum ts_code;
486
487 code = (enum tree_code) i;
488 ts_code = tree_node_structure_for_code (code);
489
490 /* Mark the TS structure itself. */
491 tree_contains_struct[code][ts_code] = 1;
492
493 /* Mark all the structures that TS is derived from. */
494 switch (ts_code)
495 {
496 case TS_TYPED:
497 case TS_BLOCK:
498 MARK_TS_BASE (code);
499 break;
500
501 case TS_COMMON:
502 case TS_INT_CST:
503 case TS_REAL_CST:
504 case TS_FIXED_CST:
505 case TS_VECTOR:
506 case TS_STRING:
507 case TS_COMPLEX:
508 case TS_SSA_NAME:
509 case TS_CONSTRUCTOR:
510 case TS_EXP:
511 case TS_STATEMENT_LIST:
512 MARK_TS_TYPED (code);
513 break;
514
515 case TS_IDENTIFIER:
516 case TS_DECL_MINIMAL:
517 case TS_TYPE_COMMON:
518 case TS_LIST:
519 case TS_VEC:
520 case TS_BINFO:
521 case TS_OMP_CLAUSE:
522 case TS_OPTIMIZATION:
523 case TS_TARGET_OPTION:
524 MARK_TS_COMMON (code);
525 break;
526
527 case TS_TYPE_WITH_LANG_SPECIFIC:
528 MARK_TS_TYPE_COMMON (code);
529 break;
530
531 case TS_TYPE_NON_COMMON:
532 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
533 break;
534
535 case TS_DECL_COMMON:
536 MARK_TS_DECL_MINIMAL (code);
537 break;
538
539 case TS_DECL_WRTL:
540 case TS_CONST_DECL:
541 MARK_TS_DECL_COMMON (code);
542 break;
543
544 case TS_DECL_NON_COMMON:
545 MARK_TS_DECL_WITH_VIS (code);
546 break;
547
548 case TS_DECL_WITH_VIS:
549 case TS_PARM_DECL:
550 case TS_LABEL_DECL:
551 case TS_RESULT_DECL:
552 MARK_TS_DECL_WRTL (code);
553 break;
554
555 case TS_FIELD_DECL:
556 MARK_TS_DECL_COMMON (code);
557 break;
558
559 case TS_VAR_DECL:
560 MARK_TS_DECL_WITH_VIS (code);
561 break;
562
563 case TS_TYPE_DECL:
564 case TS_FUNCTION_DECL:
565 MARK_TS_DECL_NON_COMMON (code);
566 break;
567
568 case TS_TRANSLATION_UNIT_DECL:
569 MARK_TS_DECL_COMMON (code);
570 break;
571
572 default:
573 gcc_unreachable ();
574 }
575 }
576
577 /* Basic consistency checks for attributes used in fold. */
578 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
579 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
580 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
581 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
582 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
583 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
584 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
585 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
586 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
587 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
588 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
589 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
590 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
591 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
592 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
593 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
594 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
595 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
596 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
597 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
598 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
599 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
600 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
601 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
602 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
603 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
604 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
605 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
606 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
607 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
608 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
609 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
611 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
613 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
614 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
616 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
617 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
618 }
619
620
621 /* Init tree.c. */
622
623 void
624 init_ttree (void)
625 {
626 /* Initialize the hash table of types. */
627 type_hash_table
628 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
629
630 debug_expr_for_decl
631 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
632
633 value_expr_for_decl
634 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
635
636 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
637
638 int_cst_node = make_int_cst (1, 1);
639
640 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
641
642 cl_optimization_node = make_node (OPTIMIZATION_NODE);
643 cl_target_option_node = make_node (TARGET_OPTION_NODE);
644
645 /* Initialize the tree_contains_struct array. */
646 initialize_tree_contains_struct ();
647 lang_hooks.init_ts ();
648 }
649
650 \f
651 /* The name of the object as the assembler will see it (but before any
652 translations made by ASM_OUTPUT_LABELREF). Often this is the same
653 as DECL_NAME. It is an IDENTIFIER_NODE. */
654 tree
655 decl_assembler_name (tree decl)
656 {
657 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
658 lang_hooks.set_decl_assembler_name (decl);
659 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
660 }
661
662 /* When the target supports COMDAT groups, this indicates which group the
663 DECL is associated with. This can be either an IDENTIFIER_NODE or a
664 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
665 tree
666 decl_comdat_group (const_tree node)
667 {
668 struct symtab_node *snode = symtab_node::get (node);
669 if (!snode)
670 return NULL;
671 return snode->get_comdat_group ();
672 }
673
674 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
675 tree
676 decl_comdat_group_id (const_tree node)
677 {
678 struct symtab_node *snode = symtab_node::get (node);
679 if (!snode)
680 return NULL;
681 return snode->get_comdat_group_id ();
682 }
683
684 /* When the target supports named section, return its name as IDENTIFIER_NODE
685 or NULL if it is in no section. */
686 const char *
687 decl_section_name (const_tree node)
688 {
689 struct symtab_node *snode = symtab_node::get (node);
690 if (!snode)
691 return NULL;
692 return snode->get_section ();
693 }
694
695 /* Set section name of NODE to VALUE (that is expected to be
696 identifier node) */
697 void
698 set_decl_section_name (tree node, const char *value)
699 {
700 struct symtab_node *snode;
701
702 if (value == NULL)
703 {
704 snode = symtab_node::get (node);
705 if (!snode)
706 return;
707 }
708 else if (TREE_CODE (node) == VAR_DECL)
709 snode = varpool_node::get_create (node);
710 else
711 snode = cgraph_node::get_create (node);
712 snode->set_section (value);
713 }
714
715 /* Return TLS model of a variable NODE. */
716 enum tls_model
717 decl_tls_model (const_tree node)
718 {
719 struct varpool_node *snode = varpool_node::get (node);
720 if (!snode)
721 return TLS_MODEL_NONE;
722 return snode->tls_model;
723 }
724
725 /* Set TLS model of variable NODE to MODEL. */
726 void
727 set_decl_tls_model (tree node, enum tls_model model)
728 {
729 struct varpool_node *vnode;
730
731 if (model == TLS_MODEL_NONE)
732 {
733 vnode = varpool_node::get (node);
734 if (!vnode)
735 return;
736 }
737 else
738 vnode = varpool_node::get_create (node);
739 vnode->tls_model = model;
740 }
741
742 /* Compute the number of bytes occupied by a tree with code CODE.
743 This function cannot be used for nodes that have variable sizes,
744 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
745 size_t
746 tree_code_size (enum tree_code code)
747 {
748 switch (TREE_CODE_CLASS (code))
749 {
750 case tcc_declaration: /* A decl node */
751 {
752 switch (code)
753 {
754 case FIELD_DECL:
755 return sizeof (struct tree_field_decl);
756 case PARM_DECL:
757 return sizeof (struct tree_parm_decl);
758 case VAR_DECL:
759 return sizeof (struct tree_var_decl);
760 case LABEL_DECL:
761 return sizeof (struct tree_label_decl);
762 case RESULT_DECL:
763 return sizeof (struct tree_result_decl);
764 case CONST_DECL:
765 return sizeof (struct tree_const_decl);
766 case TYPE_DECL:
767 return sizeof (struct tree_type_decl);
768 case FUNCTION_DECL:
769 return sizeof (struct tree_function_decl);
770 case DEBUG_EXPR_DECL:
771 return sizeof (struct tree_decl_with_rtl);
772 case TRANSLATION_UNIT_DECL:
773 return sizeof (struct tree_translation_unit_decl);
774 case NAMESPACE_DECL:
775 case IMPORTED_DECL:
776 case NAMELIST_DECL:
777 return sizeof (struct tree_decl_non_common);
778 default:
779 return lang_hooks.tree_size (code);
780 }
781 }
782
783 case tcc_type: /* a type node */
784 return sizeof (struct tree_type_non_common);
785
786 case tcc_reference: /* a reference */
787 case tcc_expression: /* an expression */
788 case tcc_statement: /* an expression with side effects */
789 case tcc_comparison: /* a comparison expression */
790 case tcc_unary: /* a unary arithmetic expression */
791 case tcc_binary: /* a binary arithmetic expression */
792 return (sizeof (struct tree_exp)
793 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
794
795 case tcc_constant: /* a constant */
796 switch (code)
797 {
798 case VOID_CST: return sizeof (struct tree_typed);
799 case INTEGER_CST: gcc_unreachable ();
800 case REAL_CST: return sizeof (struct tree_real_cst);
801 case FIXED_CST: return sizeof (struct tree_fixed_cst);
802 case COMPLEX_CST: return sizeof (struct tree_complex);
803 case VECTOR_CST: return sizeof (struct tree_vector);
804 case STRING_CST: gcc_unreachable ();
805 default:
806 return lang_hooks.tree_size (code);
807 }
808
809 case tcc_exceptional: /* something random, like an identifier. */
810 switch (code)
811 {
812 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
813 case TREE_LIST: return sizeof (struct tree_list);
814
815 case ERROR_MARK:
816 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
817
818 case TREE_VEC:
819 case OMP_CLAUSE: gcc_unreachable ();
820
821 case SSA_NAME: return sizeof (struct tree_ssa_name);
822
823 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
824 case BLOCK: return sizeof (struct tree_block);
825 case CONSTRUCTOR: return sizeof (struct tree_constructor);
826 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
827 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
828
829 default:
830 return lang_hooks.tree_size (code);
831 }
832
833 default:
834 gcc_unreachable ();
835 }
836 }
837
838 /* Compute the number of bytes occupied by NODE. This routine only
839 looks at TREE_CODE, except for those nodes that have variable sizes. */
840 size_t
841 tree_size (const_tree node)
842 {
843 const enum tree_code code = TREE_CODE (node);
844 switch (code)
845 {
846 case INTEGER_CST:
847 return (sizeof (struct tree_int_cst)
848 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
849
850 case TREE_BINFO:
851 return (offsetof (struct tree_binfo, base_binfos)
852 + vec<tree, va_gc>
853 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
854
855 case TREE_VEC:
856 return (sizeof (struct tree_vec)
857 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
858
859 case VECTOR_CST:
860 return (sizeof (struct tree_vector)
861 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
862
863 case STRING_CST:
864 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
865
866 case OMP_CLAUSE:
867 return (sizeof (struct tree_omp_clause)
868 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
869 * sizeof (tree));
870
871 default:
872 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
873 return (sizeof (struct tree_exp)
874 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
875 else
876 return tree_code_size (code);
877 }
878 }
879
880 /* Record interesting allocation statistics for a tree node with CODE
881 and LENGTH. */
882
883 static void
884 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
885 size_t length ATTRIBUTE_UNUSED)
886 {
887 enum tree_code_class type = TREE_CODE_CLASS (code);
888 tree_node_kind kind;
889
890 if (!GATHER_STATISTICS)
891 return;
892
893 switch (type)
894 {
895 case tcc_declaration: /* A decl node */
896 kind = d_kind;
897 break;
898
899 case tcc_type: /* a type node */
900 kind = t_kind;
901 break;
902
903 case tcc_statement: /* an expression with side effects */
904 kind = s_kind;
905 break;
906
907 case tcc_reference: /* a reference */
908 kind = r_kind;
909 break;
910
911 case tcc_expression: /* an expression */
912 case tcc_comparison: /* a comparison expression */
913 case tcc_unary: /* a unary arithmetic expression */
914 case tcc_binary: /* a binary arithmetic expression */
915 kind = e_kind;
916 break;
917
918 case tcc_constant: /* a constant */
919 kind = c_kind;
920 break;
921
922 case tcc_exceptional: /* something random, like an identifier. */
923 switch (code)
924 {
925 case IDENTIFIER_NODE:
926 kind = id_kind;
927 break;
928
929 case TREE_VEC:
930 kind = vec_kind;
931 break;
932
933 case TREE_BINFO:
934 kind = binfo_kind;
935 break;
936
937 case SSA_NAME:
938 kind = ssa_name_kind;
939 break;
940
941 case BLOCK:
942 kind = b_kind;
943 break;
944
945 case CONSTRUCTOR:
946 kind = constr_kind;
947 break;
948
949 case OMP_CLAUSE:
950 kind = omp_clause_kind;
951 break;
952
953 default:
954 kind = x_kind;
955 break;
956 }
957 break;
958
959 case tcc_vl_exp:
960 kind = e_kind;
961 break;
962
963 default:
964 gcc_unreachable ();
965 }
966
967 tree_code_counts[(int) code]++;
968 tree_node_counts[(int) kind]++;
969 tree_node_sizes[(int) kind] += length;
970 }
971
972 /* Allocate and return a new UID from the DECL_UID namespace. */
973
974 int
975 allocate_decl_uid (void)
976 {
977 return next_decl_uid++;
978 }
979
980 /* Return a newly allocated node of code CODE. For decl and type
981 nodes, some other fields are initialized. The rest of the node is
982 initialized to zero. This function cannot be used for TREE_VEC,
983 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
984 tree_code_size.
985
986 Achoo! I got a code in the node. */
987
988 tree
989 make_node_stat (enum tree_code code MEM_STAT_DECL)
990 {
991 tree t;
992 enum tree_code_class type = TREE_CODE_CLASS (code);
993 size_t length = tree_code_size (code);
994
995 record_node_allocation_statistics (code, length);
996
997 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
998 TREE_SET_CODE (t, code);
999
1000 switch (type)
1001 {
1002 case tcc_statement:
1003 TREE_SIDE_EFFECTS (t) = 1;
1004 break;
1005
1006 case tcc_declaration:
1007 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1008 {
1009 if (code == FUNCTION_DECL)
1010 {
1011 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1012 DECL_MODE (t) = FUNCTION_MODE;
1013 }
1014 else
1015 DECL_ALIGN (t) = 1;
1016 }
1017 DECL_SOURCE_LOCATION (t) = input_location;
1018 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1019 DECL_UID (t) = --next_debug_decl_uid;
1020 else
1021 {
1022 DECL_UID (t) = allocate_decl_uid ();
1023 SET_DECL_PT_UID (t, -1);
1024 }
1025 if (TREE_CODE (t) == LABEL_DECL)
1026 LABEL_DECL_UID (t) = -1;
1027
1028 break;
1029
1030 case tcc_type:
1031 TYPE_UID (t) = next_type_uid++;
1032 TYPE_ALIGN (t) = BITS_PER_UNIT;
1033 TYPE_USER_ALIGN (t) = 0;
1034 TYPE_MAIN_VARIANT (t) = t;
1035 TYPE_CANONICAL (t) = t;
1036
1037 /* Default to no attributes for type, but let target change that. */
1038 TYPE_ATTRIBUTES (t) = NULL_TREE;
1039 targetm.set_default_type_attributes (t);
1040
1041 /* We have not yet computed the alias set for this type. */
1042 TYPE_ALIAS_SET (t) = -1;
1043 break;
1044
1045 case tcc_constant:
1046 TREE_CONSTANT (t) = 1;
1047 break;
1048
1049 case tcc_expression:
1050 switch (code)
1051 {
1052 case INIT_EXPR:
1053 case MODIFY_EXPR:
1054 case VA_ARG_EXPR:
1055 case PREDECREMENT_EXPR:
1056 case PREINCREMENT_EXPR:
1057 case POSTDECREMENT_EXPR:
1058 case POSTINCREMENT_EXPR:
1059 /* All of these have side-effects, no matter what their
1060 operands are. */
1061 TREE_SIDE_EFFECTS (t) = 1;
1062 break;
1063
1064 default:
1065 break;
1066 }
1067 break;
1068
1069 case tcc_exceptional:
1070 switch (code)
1071 {
1072 case TARGET_OPTION_NODE:
1073 TREE_TARGET_OPTION(t)
1074 = ggc_cleared_alloc<struct cl_target_option> ();
1075 break;
1076
1077 case OPTIMIZATION_NODE:
1078 TREE_OPTIMIZATION (t)
1079 = ggc_cleared_alloc<struct cl_optimization> ();
1080 break;
1081
1082 default:
1083 break;
1084 }
1085 break;
1086
1087 default:
1088 /* Other classes need no special treatment. */
1089 break;
1090 }
1091
1092 return t;
1093 }
1094 \f
1095 /* Return a new node with the same contents as NODE except that its
1096 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1097
1098 tree
1099 copy_node_stat (tree node MEM_STAT_DECL)
1100 {
1101 tree t;
1102 enum tree_code code = TREE_CODE (node);
1103 size_t length;
1104
1105 gcc_assert (code != STATEMENT_LIST);
1106
1107 length = tree_size (node);
1108 record_node_allocation_statistics (code, length);
1109 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1110 memcpy (t, node, length);
1111
1112 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1113 TREE_CHAIN (t) = 0;
1114 TREE_ASM_WRITTEN (t) = 0;
1115 TREE_VISITED (t) = 0;
1116
1117 if (TREE_CODE_CLASS (code) == tcc_declaration)
1118 {
1119 if (code == DEBUG_EXPR_DECL)
1120 DECL_UID (t) = --next_debug_decl_uid;
1121 else
1122 {
1123 DECL_UID (t) = allocate_decl_uid ();
1124 if (DECL_PT_UID_SET_P (node))
1125 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1126 }
1127 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1128 && DECL_HAS_VALUE_EXPR_P (node))
1129 {
1130 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1131 DECL_HAS_VALUE_EXPR_P (t) = 1;
1132 }
1133 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1134 if (TREE_CODE (node) == VAR_DECL)
1135 {
1136 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1137 t->decl_with_vis.symtab_node = NULL;
1138 }
1139 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1140 {
1141 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1142 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1143 }
1144 if (TREE_CODE (node) == FUNCTION_DECL)
1145 {
1146 DECL_STRUCT_FUNCTION (t) = NULL;
1147 t->decl_with_vis.symtab_node = NULL;
1148 }
1149 }
1150 else if (TREE_CODE_CLASS (code) == tcc_type)
1151 {
1152 TYPE_UID (t) = next_type_uid++;
1153 /* The following is so that the debug code for
1154 the copy is different from the original type.
1155 The two statements usually duplicate each other
1156 (because they clear fields of the same union),
1157 but the optimizer should catch that. */
1158 TYPE_SYMTAB_POINTER (t) = 0;
1159 TYPE_SYMTAB_ADDRESS (t) = 0;
1160
1161 /* Do not copy the values cache. */
1162 if (TYPE_CACHED_VALUES_P (t))
1163 {
1164 TYPE_CACHED_VALUES_P (t) = 0;
1165 TYPE_CACHED_VALUES (t) = NULL_TREE;
1166 }
1167 }
1168 else if (code == TARGET_OPTION_NODE)
1169 {
1170 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1171 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1172 sizeof (struct cl_target_option));
1173 }
1174 else if (code == OPTIMIZATION_NODE)
1175 {
1176 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1177 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1178 sizeof (struct cl_optimization));
1179 }
1180
1181 return t;
1182 }
1183
1184 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1185 For example, this can copy a list made of TREE_LIST nodes. */
1186
1187 tree
1188 copy_list (tree list)
1189 {
1190 tree head;
1191 tree prev, next;
1192
1193 if (list == 0)
1194 return 0;
1195
1196 head = prev = copy_node (list);
1197 next = TREE_CHAIN (list);
1198 while (next)
1199 {
1200 TREE_CHAIN (prev) = copy_node (next);
1201 prev = TREE_CHAIN (prev);
1202 next = TREE_CHAIN (next);
1203 }
1204 return head;
1205 }
1206
1207 \f
1208 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1209 INTEGER_CST with value CST and type TYPE. */
1210
1211 static unsigned int
1212 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1213 {
1214 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1215 /* We need an extra zero HWI if CST is an unsigned integer with its
1216 upper bit set, and if CST occupies a whole number of HWIs. */
1217 if (TYPE_UNSIGNED (type)
1218 && wi::neg_p (cst)
1219 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1220 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1221 return cst.get_len ();
1222 }
1223
1224 /* Return a new INTEGER_CST with value CST and type TYPE. */
1225
1226 static tree
1227 build_new_int_cst (tree type, const wide_int &cst)
1228 {
1229 unsigned int len = cst.get_len ();
1230 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1231 tree nt = make_int_cst (len, ext_len);
1232
1233 if (len < ext_len)
1234 {
1235 --ext_len;
1236 TREE_INT_CST_ELT (nt, ext_len) = 0;
1237 for (unsigned int i = len; i < ext_len; ++i)
1238 TREE_INT_CST_ELT (nt, i) = -1;
1239 }
1240 else if (TYPE_UNSIGNED (type)
1241 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1242 {
1243 len--;
1244 TREE_INT_CST_ELT (nt, len)
1245 = zext_hwi (cst.elt (len),
1246 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1247 }
1248
1249 for (unsigned int i = 0; i < len; i++)
1250 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1251 TREE_TYPE (nt) = type;
1252 return nt;
1253 }
1254
1255 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1256
1257 tree
1258 build_int_cst (tree type, HOST_WIDE_INT low)
1259 {
1260 /* Support legacy code. */
1261 if (!type)
1262 type = integer_type_node;
1263
1264 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1265 }
1266
1267 tree
1268 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1269 {
1270 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1271 }
1272
1273 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1274
1275 tree
1276 build_int_cst_type (tree type, HOST_WIDE_INT low)
1277 {
1278 gcc_assert (type);
1279 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1280 }
1281
1282 /* Constructs tree in type TYPE from with value given by CST. Signedness
1283 of CST is assumed to be the same as the signedness of TYPE. */
1284
1285 tree
1286 double_int_to_tree (tree type, double_int cst)
1287 {
1288 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1289 }
1290
1291 /* We force the wide_int CST to the range of the type TYPE by sign or
1292 zero extending it. OVERFLOWABLE indicates if we are interested in
1293 overflow of the value, when >0 we are only interested in signed
1294 overflow, for <0 we are interested in any overflow. OVERFLOWED
1295 indicates whether overflow has already occurred. CONST_OVERFLOWED
1296 indicates whether constant overflow has already occurred. We force
1297 T's value to be within range of T's type (by setting to 0 or 1 all
1298 the bits outside the type's range). We set TREE_OVERFLOWED if,
1299 OVERFLOWED is nonzero,
1300 or OVERFLOWABLE is >0 and signed overflow occurs
1301 or OVERFLOWABLE is <0 and any overflow occurs
1302 We return a new tree node for the extended wide_int. The node
1303 is shared if no overflow flags are set. */
1304
1305
1306 tree
1307 force_fit_type (tree type, const wide_int_ref &cst,
1308 int overflowable, bool overflowed)
1309 {
1310 signop sign = TYPE_SIGN (type);
1311
1312 /* If we need to set overflow flags, return a new unshared node. */
1313 if (overflowed || !wi::fits_to_tree_p (cst, type))
1314 {
1315 if (overflowed
1316 || overflowable < 0
1317 || (overflowable > 0 && sign == SIGNED))
1318 {
1319 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1320 tree t = build_new_int_cst (type, tmp);
1321 TREE_OVERFLOW (t) = 1;
1322 return t;
1323 }
1324 }
1325
1326 /* Else build a shared node. */
1327 return wide_int_to_tree (type, cst);
1328 }
1329
1330 /* These are the hash table functions for the hash table of INTEGER_CST
1331 nodes of a sizetype. */
1332
1333 /* Return the hash code X, an INTEGER_CST. */
1334
1335 hashval_t
1336 int_cst_hasher::hash (tree x)
1337 {
1338 const_tree const t = x;
1339 hashval_t code = TYPE_UID (TREE_TYPE (t));
1340 int i;
1341
1342 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1343 code ^= TREE_INT_CST_ELT (t, i);
1344
1345 return code;
1346 }
1347
1348 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1349 is the same as that given by *Y, which is the same. */
1350
1351 bool
1352 int_cst_hasher::equal (tree x, tree y)
1353 {
1354 const_tree const xt = x;
1355 const_tree const yt = y;
1356
1357 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1358 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1359 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1360 return false;
1361
1362 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1363 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1364 return false;
1365
1366 return true;
1367 }
1368
1369 /* Create an INT_CST node of TYPE and value CST.
1370 The returned node is always shared. For small integers we use a
1371 per-type vector cache, for larger ones we use a single hash table.
1372 The value is extended from its precision according to the sign of
1373 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1374 the upper bits and ensures that hashing and value equality based
1375 upon the underlying HOST_WIDE_INTs works without masking. */
1376
1377 tree
1378 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1379 {
1380 tree t;
1381 int ix = -1;
1382 int limit = 0;
1383
1384 gcc_assert (type);
1385 unsigned int prec = TYPE_PRECISION (type);
1386 signop sgn = TYPE_SIGN (type);
1387
1388 /* Verify that everything is canonical. */
1389 int l = pcst.get_len ();
1390 if (l > 1)
1391 {
1392 if (pcst.elt (l - 1) == 0)
1393 gcc_checking_assert (pcst.elt (l - 2) < 0);
1394 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1395 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1396 }
1397
1398 wide_int cst = wide_int::from (pcst, prec, sgn);
1399 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1400
1401 if (ext_len == 1)
1402 {
1403 /* We just need to store a single HOST_WIDE_INT. */
1404 HOST_WIDE_INT hwi;
1405 if (TYPE_UNSIGNED (type))
1406 hwi = cst.to_uhwi ();
1407 else
1408 hwi = cst.to_shwi ();
1409
1410 switch (TREE_CODE (type))
1411 {
1412 case NULLPTR_TYPE:
1413 gcc_assert (hwi == 0);
1414 /* Fallthru. */
1415
1416 case POINTER_TYPE:
1417 case REFERENCE_TYPE:
1418 case POINTER_BOUNDS_TYPE:
1419 /* Cache NULL pointer and zero bounds. */
1420 if (hwi == 0)
1421 {
1422 limit = 1;
1423 ix = 0;
1424 }
1425 break;
1426
1427 case BOOLEAN_TYPE:
1428 /* Cache false or true. */
1429 limit = 2;
1430 if (hwi < 2)
1431 ix = hwi;
1432 break;
1433
1434 case INTEGER_TYPE:
1435 case OFFSET_TYPE:
1436 if (TYPE_SIGN (type) == UNSIGNED)
1437 {
1438 /* Cache [0, N). */
1439 limit = INTEGER_SHARE_LIMIT;
1440 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1441 ix = hwi;
1442 }
1443 else
1444 {
1445 /* Cache [-1, N). */
1446 limit = INTEGER_SHARE_LIMIT + 1;
1447 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1448 ix = hwi + 1;
1449 }
1450 break;
1451
1452 case ENUMERAL_TYPE:
1453 break;
1454
1455 default:
1456 gcc_unreachable ();
1457 }
1458
1459 if (ix >= 0)
1460 {
1461 /* Look for it in the type's vector of small shared ints. */
1462 if (!TYPE_CACHED_VALUES_P (type))
1463 {
1464 TYPE_CACHED_VALUES_P (type) = 1;
1465 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1466 }
1467
1468 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1469 if (t)
1470 /* Make sure no one is clobbering the shared constant. */
1471 gcc_checking_assert (TREE_TYPE (t) == type
1472 && TREE_INT_CST_NUNITS (t) == 1
1473 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1474 && TREE_INT_CST_EXT_NUNITS (t) == 1
1475 && TREE_INT_CST_ELT (t, 0) == hwi);
1476 else
1477 {
1478 /* Create a new shared int. */
1479 t = build_new_int_cst (type, cst);
1480 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1481 }
1482 }
1483 else
1484 {
1485 /* Use the cache of larger shared ints, using int_cst_node as
1486 a temporary. */
1487
1488 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1489 TREE_TYPE (int_cst_node) = type;
1490
1491 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1492 t = *slot;
1493 if (!t)
1494 {
1495 /* Insert this one into the hash table. */
1496 t = int_cst_node;
1497 *slot = t;
1498 /* Make a new node for next time round. */
1499 int_cst_node = make_int_cst (1, 1);
1500 }
1501 }
1502 }
1503 else
1504 {
1505 /* The value either hashes properly or we drop it on the floor
1506 for the gc to take care of. There will not be enough of them
1507 to worry about. */
1508
1509 tree nt = build_new_int_cst (type, cst);
1510 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1511 t = *slot;
1512 if (!t)
1513 {
1514 /* Insert this one into the hash table. */
1515 t = nt;
1516 *slot = t;
1517 }
1518 }
1519
1520 return t;
1521 }
1522
1523 void
1524 cache_integer_cst (tree t)
1525 {
1526 tree type = TREE_TYPE (t);
1527 int ix = -1;
1528 int limit = 0;
1529 int prec = TYPE_PRECISION (type);
1530
1531 gcc_assert (!TREE_OVERFLOW (t));
1532
1533 switch (TREE_CODE (type))
1534 {
1535 case NULLPTR_TYPE:
1536 gcc_assert (integer_zerop (t));
1537 /* Fallthru. */
1538
1539 case POINTER_TYPE:
1540 case REFERENCE_TYPE:
1541 /* Cache NULL pointer. */
1542 if (integer_zerop (t))
1543 {
1544 limit = 1;
1545 ix = 0;
1546 }
1547 break;
1548
1549 case BOOLEAN_TYPE:
1550 /* Cache false or true. */
1551 limit = 2;
1552 if (wi::ltu_p (t, 2))
1553 ix = TREE_INT_CST_ELT (t, 0);
1554 break;
1555
1556 case INTEGER_TYPE:
1557 case OFFSET_TYPE:
1558 if (TYPE_UNSIGNED (type))
1559 {
1560 /* Cache 0..N */
1561 limit = INTEGER_SHARE_LIMIT;
1562
1563 /* This is a little hokie, but if the prec is smaller than
1564 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1565 obvious test will not get the correct answer. */
1566 if (prec < HOST_BITS_PER_WIDE_INT)
1567 {
1568 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1569 ix = tree_to_uhwi (t);
1570 }
1571 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1572 ix = tree_to_uhwi (t);
1573 }
1574 else
1575 {
1576 /* Cache -1..N */
1577 limit = INTEGER_SHARE_LIMIT + 1;
1578
1579 if (integer_minus_onep (t))
1580 ix = 0;
1581 else if (!wi::neg_p (t))
1582 {
1583 if (prec < HOST_BITS_PER_WIDE_INT)
1584 {
1585 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1586 ix = tree_to_shwi (t) + 1;
1587 }
1588 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1589 ix = tree_to_shwi (t) + 1;
1590 }
1591 }
1592 break;
1593
1594 case ENUMERAL_TYPE:
1595 break;
1596
1597 default:
1598 gcc_unreachable ();
1599 }
1600
1601 if (ix >= 0)
1602 {
1603 /* Look for it in the type's vector of small shared ints. */
1604 if (!TYPE_CACHED_VALUES_P (type))
1605 {
1606 TYPE_CACHED_VALUES_P (type) = 1;
1607 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1608 }
1609
1610 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1611 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1612 }
1613 else
1614 {
1615 /* Use the cache of larger shared ints. */
1616 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1617 /* If there is already an entry for the number verify it's the
1618 same. */
1619 if (*slot)
1620 gcc_assert (wi::eq_p (tree (*slot), t));
1621 else
1622 /* Otherwise insert this one into the hash table. */
1623 *slot = t;
1624 }
1625 }
1626
1627
1628 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1629 and the rest are zeros. */
1630
1631 tree
1632 build_low_bits_mask (tree type, unsigned bits)
1633 {
1634 gcc_assert (bits <= TYPE_PRECISION (type));
1635
1636 return wide_int_to_tree (type, wi::mask (bits, false,
1637 TYPE_PRECISION (type)));
1638 }
1639
1640 /* Checks that X is integer constant that can be expressed in (unsigned)
1641 HOST_WIDE_INT without loss of precision. */
1642
1643 bool
1644 cst_and_fits_in_hwi (const_tree x)
1645 {
1646 if (TREE_CODE (x) != INTEGER_CST)
1647 return false;
1648
1649 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1650 return false;
1651
1652 return TREE_INT_CST_NUNITS (x) == 1;
1653 }
1654
1655 /* Build a newly constructed VECTOR_CST node of length LEN. */
1656
1657 tree
1658 make_vector_stat (unsigned len MEM_STAT_DECL)
1659 {
1660 tree t;
1661 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1662
1663 record_node_allocation_statistics (VECTOR_CST, length);
1664
1665 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1666
1667 TREE_SET_CODE (t, VECTOR_CST);
1668 TREE_CONSTANT (t) = 1;
1669
1670 return t;
1671 }
1672
1673 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1674 are in a list pointed to by VALS. */
1675
1676 tree
1677 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1678 {
1679 int over = 0;
1680 unsigned cnt = 0;
1681 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1682 TREE_TYPE (v) = type;
1683
1684 /* Iterate through elements and check for overflow. */
1685 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1686 {
1687 tree value = vals[cnt];
1688
1689 VECTOR_CST_ELT (v, cnt) = value;
1690
1691 /* Don't crash if we get an address constant. */
1692 if (!CONSTANT_CLASS_P (value))
1693 continue;
1694
1695 over |= TREE_OVERFLOW (value);
1696 }
1697
1698 TREE_OVERFLOW (v) = over;
1699 return v;
1700 }
1701
1702 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1703 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1704
1705 tree
1706 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1707 {
1708 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1709 unsigned HOST_WIDE_INT idx;
1710 tree value;
1711
1712 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1713 vec[idx] = value;
1714 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1715 vec[idx] = build_zero_cst (TREE_TYPE (type));
1716
1717 return build_vector (type, vec);
1718 }
1719
1720 /* Build a vector of type VECTYPE where all the elements are SCs. */
1721 tree
1722 build_vector_from_val (tree vectype, tree sc)
1723 {
1724 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1725
1726 if (sc == error_mark_node)
1727 return sc;
1728
1729 /* Verify that the vector type is suitable for SC. Note that there
1730 is some inconsistency in the type-system with respect to restrict
1731 qualifications of pointers. Vector types always have a main-variant
1732 element type and the qualification is applied to the vector-type.
1733 So TREE_TYPE (vector-type) does not return a properly qualified
1734 vector element-type. */
1735 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1736 TREE_TYPE (vectype)));
1737
1738 if (CONSTANT_CLASS_P (sc))
1739 {
1740 tree *v = XALLOCAVEC (tree, nunits);
1741 for (i = 0; i < nunits; ++i)
1742 v[i] = sc;
1743 return build_vector (vectype, v);
1744 }
1745 else
1746 {
1747 vec<constructor_elt, va_gc> *v;
1748 vec_alloc (v, nunits);
1749 for (i = 0; i < nunits; ++i)
1750 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1751 return build_constructor (vectype, v);
1752 }
1753 }
1754
1755 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1756 are in the vec pointed to by VALS. */
1757 tree
1758 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1759 {
1760 tree c = make_node (CONSTRUCTOR);
1761 unsigned int i;
1762 constructor_elt *elt;
1763 bool constant_p = true;
1764 bool side_effects_p = false;
1765
1766 TREE_TYPE (c) = type;
1767 CONSTRUCTOR_ELTS (c) = vals;
1768
1769 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1770 {
1771 /* Mostly ctors will have elts that don't have side-effects, so
1772 the usual case is to scan all the elements. Hence a single
1773 loop for both const and side effects, rather than one loop
1774 each (with early outs). */
1775 if (!TREE_CONSTANT (elt->value))
1776 constant_p = false;
1777 if (TREE_SIDE_EFFECTS (elt->value))
1778 side_effects_p = true;
1779 }
1780
1781 TREE_SIDE_EFFECTS (c) = side_effects_p;
1782 TREE_CONSTANT (c) = constant_p;
1783
1784 return c;
1785 }
1786
1787 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1788 INDEX and VALUE. */
1789 tree
1790 build_constructor_single (tree type, tree index, tree value)
1791 {
1792 vec<constructor_elt, va_gc> *v;
1793 constructor_elt elt = {index, value};
1794
1795 vec_alloc (v, 1);
1796 v->quick_push (elt);
1797
1798 return build_constructor (type, v);
1799 }
1800
1801
1802 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1803 are in a list pointed to by VALS. */
1804 tree
1805 build_constructor_from_list (tree type, tree vals)
1806 {
1807 tree t;
1808 vec<constructor_elt, va_gc> *v = NULL;
1809
1810 if (vals)
1811 {
1812 vec_alloc (v, list_length (vals));
1813 for (t = vals; t; t = TREE_CHAIN (t))
1814 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1815 }
1816
1817 return build_constructor (type, v);
1818 }
1819
1820 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1821 of elements, provided as index/value pairs. */
1822
1823 tree
1824 build_constructor_va (tree type, int nelts, ...)
1825 {
1826 vec<constructor_elt, va_gc> *v = NULL;
1827 va_list p;
1828
1829 va_start (p, nelts);
1830 vec_alloc (v, nelts);
1831 while (nelts--)
1832 {
1833 tree index = va_arg (p, tree);
1834 tree value = va_arg (p, tree);
1835 CONSTRUCTOR_APPEND_ELT (v, index, value);
1836 }
1837 va_end (p);
1838 return build_constructor (type, v);
1839 }
1840
1841 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1842
1843 tree
1844 build_fixed (tree type, FIXED_VALUE_TYPE f)
1845 {
1846 tree v;
1847 FIXED_VALUE_TYPE *fp;
1848
1849 v = make_node (FIXED_CST);
1850 fp = ggc_alloc<fixed_value> ();
1851 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1852
1853 TREE_TYPE (v) = type;
1854 TREE_FIXED_CST_PTR (v) = fp;
1855 return v;
1856 }
1857
1858 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1859
1860 tree
1861 build_real (tree type, REAL_VALUE_TYPE d)
1862 {
1863 tree v;
1864 REAL_VALUE_TYPE *dp;
1865 int overflow = 0;
1866
1867 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1868 Consider doing it via real_convert now. */
1869
1870 v = make_node (REAL_CST);
1871 dp = ggc_alloc<real_value> ();
1872 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1873
1874 TREE_TYPE (v) = type;
1875 TREE_REAL_CST_PTR (v) = dp;
1876 TREE_OVERFLOW (v) = overflow;
1877 return v;
1878 }
1879
1880 /* Return a new REAL_CST node whose type is TYPE
1881 and whose value is the integer value of the INTEGER_CST node I. */
1882
1883 REAL_VALUE_TYPE
1884 real_value_from_int_cst (const_tree type, const_tree i)
1885 {
1886 REAL_VALUE_TYPE d;
1887
1888 /* Clear all bits of the real value type so that we can later do
1889 bitwise comparisons to see if two values are the same. */
1890 memset (&d, 0, sizeof d);
1891
1892 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1893 TYPE_SIGN (TREE_TYPE (i)));
1894 return d;
1895 }
1896
1897 /* Given a tree representing an integer constant I, return a tree
1898 representing the same value as a floating-point constant of type TYPE. */
1899
1900 tree
1901 build_real_from_int_cst (tree type, const_tree i)
1902 {
1903 tree v;
1904 int overflow = TREE_OVERFLOW (i);
1905
1906 v = build_real (type, real_value_from_int_cst (type, i));
1907
1908 TREE_OVERFLOW (v) |= overflow;
1909 return v;
1910 }
1911
1912 /* Return a newly constructed STRING_CST node whose value is
1913 the LEN characters at STR.
1914 Note that for a C string literal, LEN should include the trailing NUL.
1915 The TREE_TYPE is not initialized. */
1916
1917 tree
1918 build_string (int len, const char *str)
1919 {
1920 tree s;
1921 size_t length;
1922
1923 /* Do not waste bytes provided by padding of struct tree_string. */
1924 length = len + offsetof (struct tree_string, str) + 1;
1925
1926 record_node_allocation_statistics (STRING_CST, length);
1927
1928 s = (tree) ggc_internal_alloc (length);
1929
1930 memset (s, 0, sizeof (struct tree_typed));
1931 TREE_SET_CODE (s, STRING_CST);
1932 TREE_CONSTANT (s) = 1;
1933 TREE_STRING_LENGTH (s) = len;
1934 memcpy (s->string.str, str, len);
1935 s->string.str[len] = '\0';
1936
1937 return s;
1938 }
1939
1940 /* Return a newly constructed COMPLEX_CST node whose value is
1941 specified by the real and imaginary parts REAL and IMAG.
1942 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1943 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1944
1945 tree
1946 build_complex (tree type, tree real, tree imag)
1947 {
1948 tree t = make_node (COMPLEX_CST);
1949
1950 TREE_REALPART (t) = real;
1951 TREE_IMAGPART (t) = imag;
1952 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1953 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1954 return t;
1955 }
1956
1957 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
1958 element is set to 1. In particular, this is 1 + i for complex types. */
1959
1960 tree
1961 build_each_one_cst (tree type)
1962 {
1963 if (TREE_CODE (type) == COMPLEX_TYPE)
1964 {
1965 tree scalar = build_one_cst (TREE_TYPE (type));
1966 return build_complex (type, scalar, scalar);
1967 }
1968 else
1969 return build_one_cst (type);
1970 }
1971
1972 /* Return a constant of arithmetic type TYPE which is the
1973 multiplicative identity of the set TYPE. */
1974
1975 tree
1976 build_one_cst (tree type)
1977 {
1978 switch (TREE_CODE (type))
1979 {
1980 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1982 case OFFSET_TYPE:
1983 return build_int_cst (type, 1);
1984
1985 case REAL_TYPE:
1986 return build_real (type, dconst1);
1987
1988 case FIXED_POINT_TYPE:
1989 /* We can only generate 1 for accum types. */
1990 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1991 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1992
1993 case VECTOR_TYPE:
1994 {
1995 tree scalar = build_one_cst (TREE_TYPE (type));
1996
1997 return build_vector_from_val (type, scalar);
1998 }
1999
2000 case COMPLEX_TYPE:
2001 return build_complex (type,
2002 build_one_cst (TREE_TYPE (type)),
2003 build_zero_cst (TREE_TYPE (type)));
2004
2005 default:
2006 gcc_unreachable ();
2007 }
2008 }
2009
2010 /* Return an integer of type TYPE containing all 1's in as much precision as
2011 it contains, or a complex or vector whose subparts are such integers. */
2012
2013 tree
2014 build_all_ones_cst (tree type)
2015 {
2016 if (TREE_CODE (type) == COMPLEX_TYPE)
2017 {
2018 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2019 return build_complex (type, scalar, scalar);
2020 }
2021 else
2022 return build_minus_one_cst (type);
2023 }
2024
2025 /* Return a constant of arithmetic type TYPE which is the
2026 opposite of the multiplicative identity of the set TYPE. */
2027
2028 tree
2029 build_minus_one_cst (tree type)
2030 {
2031 switch (TREE_CODE (type))
2032 {
2033 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2034 case POINTER_TYPE: case REFERENCE_TYPE:
2035 case OFFSET_TYPE:
2036 return build_int_cst (type, -1);
2037
2038 case REAL_TYPE:
2039 return build_real (type, dconstm1);
2040
2041 case FIXED_POINT_TYPE:
2042 /* We can only generate 1 for accum types. */
2043 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2044 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2045 TYPE_MODE (type)));
2046
2047 case VECTOR_TYPE:
2048 {
2049 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2050
2051 return build_vector_from_val (type, scalar);
2052 }
2053
2054 case COMPLEX_TYPE:
2055 return build_complex (type,
2056 build_minus_one_cst (TREE_TYPE (type)),
2057 build_zero_cst (TREE_TYPE (type)));
2058
2059 default:
2060 gcc_unreachable ();
2061 }
2062 }
2063
2064 /* Build 0 constant of type TYPE. This is used by constructor folding
2065 and thus the constant should be represented in memory by
2066 zero(es). */
2067
2068 tree
2069 build_zero_cst (tree type)
2070 {
2071 switch (TREE_CODE (type))
2072 {
2073 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2074 case POINTER_TYPE: case REFERENCE_TYPE:
2075 case OFFSET_TYPE: case NULLPTR_TYPE:
2076 return build_int_cst (type, 0);
2077
2078 case REAL_TYPE:
2079 return build_real (type, dconst0);
2080
2081 case FIXED_POINT_TYPE:
2082 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2083
2084 case VECTOR_TYPE:
2085 {
2086 tree scalar = build_zero_cst (TREE_TYPE (type));
2087
2088 return build_vector_from_val (type, scalar);
2089 }
2090
2091 case COMPLEX_TYPE:
2092 {
2093 tree zero = build_zero_cst (TREE_TYPE (type));
2094
2095 return build_complex (type, zero, zero);
2096 }
2097
2098 default:
2099 if (!AGGREGATE_TYPE_P (type))
2100 return fold_convert (type, integer_zero_node);
2101 return build_constructor (type, NULL);
2102 }
2103 }
2104
2105
2106 /* Build a BINFO with LEN language slots. */
2107
2108 tree
2109 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2110 {
2111 tree t;
2112 size_t length = (offsetof (struct tree_binfo, base_binfos)
2113 + vec<tree, va_gc>::embedded_size (base_binfos));
2114
2115 record_node_allocation_statistics (TREE_BINFO, length);
2116
2117 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2118
2119 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2120
2121 TREE_SET_CODE (t, TREE_BINFO);
2122
2123 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2124
2125 return t;
2126 }
2127
2128 /* Create a CASE_LABEL_EXPR tree node and return it. */
2129
2130 tree
2131 build_case_label (tree low_value, tree high_value, tree label_decl)
2132 {
2133 tree t = make_node (CASE_LABEL_EXPR);
2134
2135 TREE_TYPE (t) = void_type_node;
2136 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2137
2138 CASE_LOW (t) = low_value;
2139 CASE_HIGH (t) = high_value;
2140 CASE_LABEL (t) = label_decl;
2141 CASE_CHAIN (t) = NULL_TREE;
2142
2143 return t;
2144 }
2145
2146 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2147 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2148 The latter determines the length of the HOST_WIDE_INT vector. */
2149
2150 tree
2151 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2152 {
2153 tree t;
2154 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2155 + sizeof (struct tree_int_cst));
2156
2157 gcc_assert (len);
2158 record_node_allocation_statistics (INTEGER_CST, length);
2159
2160 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2161
2162 TREE_SET_CODE (t, INTEGER_CST);
2163 TREE_INT_CST_NUNITS (t) = len;
2164 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2165 /* to_offset can only be applied to trees that are offset_int-sized
2166 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2167 must be exactly the precision of offset_int and so LEN is correct. */
2168 if (ext_len <= OFFSET_INT_ELTS)
2169 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2170 else
2171 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2172
2173 TREE_CONSTANT (t) = 1;
2174
2175 return t;
2176 }
2177
2178 /* Build a newly constructed TREE_VEC node of length LEN. */
2179
2180 tree
2181 make_tree_vec_stat (int len MEM_STAT_DECL)
2182 {
2183 tree t;
2184 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2185
2186 record_node_allocation_statistics (TREE_VEC, length);
2187
2188 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2189
2190 TREE_SET_CODE (t, TREE_VEC);
2191 TREE_VEC_LENGTH (t) = len;
2192
2193 return t;
2194 }
2195
2196 /* Grow a TREE_VEC node to new length LEN. */
2197
2198 tree
2199 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2200 {
2201 gcc_assert (TREE_CODE (v) == TREE_VEC);
2202
2203 int oldlen = TREE_VEC_LENGTH (v);
2204 gcc_assert (len > oldlen);
2205
2206 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2207 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2208
2209 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2210
2211 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2212
2213 TREE_VEC_LENGTH (v) = len;
2214
2215 return v;
2216 }
2217 \f
2218 /* Return 1 if EXPR is the integer constant zero or a complex constant
2219 of zero. */
2220
2221 int
2222 integer_zerop (const_tree expr)
2223 {
2224 STRIP_NOPS (expr);
2225
2226 switch (TREE_CODE (expr))
2227 {
2228 case INTEGER_CST:
2229 return wi::eq_p (expr, 0);
2230 case COMPLEX_CST:
2231 return (integer_zerop (TREE_REALPART (expr))
2232 && integer_zerop (TREE_IMAGPART (expr)));
2233 case VECTOR_CST:
2234 {
2235 unsigned i;
2236 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2237 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2238 return false;
2239 return true;
2240 }
2241 default:
2242 return false;
2243 }
2244 }
2245
2246 /* Return 1 if EXPR is the integer constant one or the corresponding
2247 complex constant. */
2248
2249 int
2250 integer_onep (const_tree expr)
2251 {
2252 STRIP_NOPS (expr);
2253
2254 switch (TREE_CODE (expr))
2255 {
2256 case INTEGER_CST:
2257 return wi::eq_p (wi::to_widest (expr), 1);
2258 case COMPLEX_CST:
2259 return (integer_onep (TREE_REALPART (expr))
2260 && integer_zerop (TREE_IMAGPART (expr)));
2261 case VECTOR_CST:
2262 {
2263 unsigned i;
2264 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2265 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2266 return false;
2267 return true;
2268 }
2269 default:
2270 return false;
2271 }
2272 }
2273
2274 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2275 return 1 if every piece is the integer constant one. */
2276
2277 int
2278 integer_each_onep (const_tree expr)
2279 {
2280 STRIP_NOPS (expr);
2281
2282 if (TREE_CODE (expr) == COMPLEX_CST)
2283 return (integer_onep (TREE_REALPART (expr))
2284 && integer_onep (TREE_IMAGPART (expr)));
2285 else
2286 return integer_onep (expr);
2287 }
2288
2289 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2290 it contains, or a complex or vector whose subparts are such integers. */
2291
2292 int
2293 integer_all_onesp (const_tree expr)
2294 {
2295 STRIP_NOPS (expr);
2296
2297 if (TREE_CODE (expr) == COMPLEX_CST
2298 && integer_all_onesp (TREE_REALPART (expr))
2299 && integer_all_onesp (TREE_IMAGPART (expr)))
2300 return 1;
2301
2302 else if (TREE_CODE (expr) == VECTOR_CST)
2303 {
2304 unsigned i;
2305 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2306 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2307 return 0;
2308 return 1;
2309 }
2310
2311 else if (TREE_CODE (expr) != INTEGER_CST)
2312 return 0;
2313
2314 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2315 }
2316
2317 /* Return 1 if EXPR is the integer constant minus one. */
2318
2319 int
2320 integer_minus_onep (const_tree expr)
2321 {
2322 STRIP_NOPS (expr);
2323
2324 if (TREE_CODE (expr) == COMPLEX_CST)
2325 return (integer_all_onesp (TREE_REALPART (expr))
2326 && integer_zerop (TREE_IMAGPART (expr)));
2327 else
2328 return integer_all_onesp (expr);
2329 }
2330
2331 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2332 one bit on). */
2333
2334 int
2335 integer_pow2p (const_tree expr)
2336 {
2337 STRIP_NOPS (expr);
2338
2339 if (TREE_CODE (expr) == COMPLEX_CST
2340 && integer_pow2p (TREE_REALPART (expr))
2341 && integer_zerop (TREE_IMAGPART (expr)))
2342 return 1;
2343
2344 if (TREE_CODE (expr) != INTEGER_CST)
2345 return 0;
2346
2347 return wi::popcount (expr) == 1;
2348 }
2349
2350 /* Return 1 if EXPR is an integer constant other than zero or a
2351 complex constant other than zero. */
2352
2353 int
2354 integer_nonzerop (const_tree expr)
2355 {
2356 STRIP_NOPS (expr);
2357
2358 return ((TREE_CODE (expr) == INTEGER_CST
2359 && !wi::eq_p (expr, 0))
2360 || (TREE_CODE (expr) == COMPLEX_CST
2361 && (integer_nonzerop (TREE_REALPART (expr))
2362 || integer_nonzerop (TREE_IMAGPART (expr)))));
2363 }
2364
2365 /* Return 1 if EXPR is the integer constant one. For vector,
2366 return 1 if every piece is the integer constant minus one
2367 (representing the value TRUE). */
2368
2369 int
2370 integer_truep (const_tree expr)
2371 {
2372 STRIP_NOPS (expr);
2373
2374 if (TREE_CODE (expr) == VECTOR_CST)
2375 return integer_all_onesp (expr);
2376 return integer_onep (expr);
2377 }
2378
2379 /* Return 1 if EXPR is the fixed-point constant zero. */
2380
2381 int
2382 fixed_zerop (const_tree expr)
2383 {
2384 return (TREE_CODE (expr) == FIXED_CST
2385 && TREE_FIXED_CST (expr).data.is_zero ());
2386 }
2387
2388 /* Return the power of two represented by a tree node known to be a
2389 power of two. */
2390
2391 int
2392 tree_log2 (const_tree expr)
2393 {
2394 STRIP_NOPS (expr);
2395
2396 if (TREE_CODE (expr) == COMPLEX_CST)
2397 return tree_log2 (TREE_REALPART (expr));
2398
2399 return wi::exact_log2 (expr);
2400 }
2401
2402 /* Similar, but return the largest integer Y such that 2 ** Y is less
2403 than or equal to EXPR. */
2404
2405 int
2406 tree_floor_log2 (const_tree expr)
2407 {
2408 STRIP_NOPS (expr);
2409
2410 if (TREE_CODE (expr) == COMPLEX_CST)
2411 return tree_log2 (TREE_REALPART (expr));
2412
2413 return wi::floor_log2 (expr);
2414 }
2415
2416 /* Return number of known trailing zero bits in EXPR, or, if the value of
2417 EXPR is known to be zero, the precision of it's type. */
2418
2419 unsigned int
2420 tree_ctz (const_tree expr)
2421 {
2422 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2423 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2424 return 0;
2425
2426 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2427 switch (TREE_CODE (expr))
2428 {
2429 case INTEGER_CST:
2430 ret1 = wi::ctz (expr);
2431 return MIN (ret1, prec);
2432 case SSA_NAME:
2433 ret1 = wi::ctz (get_nonzero_bits (expr));
2434 return MIN (ret1, prec);
2435 case PLUS_EXPR:
2436 case MINUS_EXPR:
2437 case BIT_IOR_EXPR:
2438 case BIT_XOR_EXPR:
2439 case MIN_EXPR:
2440 case MAX_EXPR:
2441 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2442 if (ret1 == 0)
2443 return ret1;
2444 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2445 return MIN (ret1, ret2);
2446 case POINTER_PLUS_EXPR:
2447 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2448 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2449 /* Second operand is sizetype, which could be in theory
2450 wider than pointer's precision. Make sure we never
2451 return more than prec. */
2452 ret2 = MIN (ret2, prec);
2453 return MIN (ret1, ret2);
2454 case BIT_AND_EXPR:
2455 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2456 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2457 return MAX (ret1, ret2);
2458 case MULT_EXPR:
2459 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2460 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2461 return MIN (ret1 + ret2, prec);
2462 case LSHIFT_EXPR:
2463 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2464 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2465 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2466 {
2467 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2468 return MIN (ret1 + ret2, prec);
2469 }
2470 return ret1;
2471 case RSHIFT_EXPR:
2472 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2473 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2474 {
2475 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2476 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2477 if (ret1 > ret2)
2478 return ret1 - ret2;
2479 }
2480 return 0;
2481 case TRUNC_DIV_EXPR:
2482 case CEIL_DIV_EXPR:
2483 case FLOOR_DIV_EXPR:
2484 case ROUND_DIV_EXPR:
2485 case EXACT_DIV_EXPR:
2486 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2487 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2488 {
2489 int l = tree_log2 (TREE_OPERAND (expr, 1));
2490 if (l >= 0)
2491 {
2492 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2493 ret2 = l;
2494 if (ret1 > ret2)
2495 return ret1 - ret2;
2496 }
2497 }
2498 return 0;
2499 CASE_CONVERT:
2500 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2501 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2502 ret1 = prec;
2503 return MIN (ret1, prec);
2504 case SAVE_EXPR:
2505 return tree_ctz (TREE_OPERAND (expr, 0));
2506 case COND_EXPR:
2507 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2508 if (ret1 == 0)
2509 return 0;
2510 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2511 return MIN (ret1, ret2);
2512 case COMPOUND_EXPR:
2513 return tree_ctz (TREE_OPERAND (expr, 1));
2514 case ADDR_EXPR:
2515 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2516 if (ret1 > BITS_PER_UNIT)
2517 {
2518 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2519 return MIN (ret1, prec);
2520 }
2521 return 0;
2522 default:
2523 return 0;
2524 }
2525 }
2526
2527 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2528 decimal float constants, so don't return 1 for them. */
2529
2530 int
2531 real_zerop (const_tree expr)
2532 {
2533 STRIP_NOPS (expr);
2534
2535 switch (TREE_CODE (expr))
2536 {
2537 case REAL_CST:
2538 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2539 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2540 case COMPLEX_CST:
2541 return real_zerop (TREE_REALPART (expr))
2542 && real_zerop (TREE_IMAGPART (expr));
2543 case VECTOR_CST:
2544 {
2545 unsigned i;
2546 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2547 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2548 return false;
2549 return true;
2550 }
2551 default:
2552 return false;
2553 }
2554 }
2555
2556 /* Return 1 if EXPR is the real constant one in real or complex form.
2557 Trailing zeroes matter for decimal float constants, so don't return
2558 1 for them. */
2559
2560 int
2561 real_onep (const_tree expr)
2562 {
2563 STRIP_NOPS (expr);
2564
2565 switch (TREE_CODE (expr))
2566 {
2567 case REAL_CST:
2568 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2569 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2570 case COMPLEX_CST:
2571 return real_onep (TREE_REALPART (expr))
2572 && real_zerop (TREE_IMAGPART (expr));
2573 case VECTOR_CST:
2574 {
2575 unsigned i;
2576 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2577 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2578 return false;
2579 return true;
2580 }
2581 default:
2582 return false;
2583 }
2584 }
2585
2586 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2587 matter for decimal float constants, so don't return 1 for them. */
2588
2589 int
2590 real_minus_onep (const_tree expr)
2591 {
2592 STRIP_NOPS (expr);
2593
2594 switch (TREE_CODE (expr))
2595 {
2596 case REAL_CST:
2597 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2598 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2599 case COMPLEX_CST:
2600 return real_minus_onep (TREE_REALPART (expr))
2601 && real_zerop (TREE_IMAGPART (expr));
2602 case VECTOR_CST:
2603 {
2604 unsigned i;
2605 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2606 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2607 return false;
2608 return true;
2609 }
2610 default:
2611 return false;
2612 }
2613 }
2614
2615 /* Nonzero if EXP is a constant or a cast of a constant. */
2616
2617 int
2618 really_constant_p (const_tree exp)
2619 {
2620 /* This is not quite the same as STRIP_NOPS. It does more. */
2621 while (CONVERT_EXPR_P (exp)
2622 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2623 exp = TREE_OPERAND (exp, 0);
2624 return TREE_CONSTANT (exp);
2625 }
2626 \f
2627 /* Return first list element whose TREE_VALUE is ELEM.
2628 Return 0 if ELEM is not in LIST. */
2629
2630 tree
2631 value_member (tree elem, tree list)
2632 {
2633 while (list)
2634 {
2635 if (elem == TREE_VALUE (list))
2636 return list;
2637 list = TREE_CHAIN (list);
2638 }
2639 return NULL_TREE;
2640 }
2641
2642 /* Return first list element whose TREE_PURPOSE is ELEM.
2643 Return 0 if ELEM is not in LIST. */
2644
2645 tree
2646 purpose_member (const_tree elem, tree list)
2647 {
2648 while (list)
2649 {
2650 if (elem == TREE_PURPOSE (list))
2651 return list;
2652 list = TREE_CHAIN (list);
2653 }
2654 return NULL_TREE;
2655 }
2656
2657 /* Return true if ELEM is in V. */
2658
2659 bool
2660 vec_member (const_tree elem, vec<tree, va_gc> *v)
2661 {
2662 unsigned ix;
2663 tree t;
2664 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2665 if (elem == t)
2666 return true;
2667 return false;
2668 }
2669
2670 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2671 NULL_TREE. */
2672
2673 tree
2674 chain_index (int idx, tree chain)
2675 {
2676 for (; chain && idx > 0; --idx)
2677 chain = TREE_CHAIN (chain);
2678 return chain;
2679 }
2680
2681 /* Return nonzero if ELEM is part of the chain CHAIN. */
2682
2683 int
2684 chain_member (const_tree elem, const_tree chain)
2685 {
2686 while (chain)
2687 {
2688 if (elem == chain)
2689 return 1;
2690 chain = DECL_CHAIN (chain);
2691 }
2692
2693 return 0;
2694 }
2695
2696 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2697 We expect a null pointer to mark the end of the chain.
2698 This is the Lisp primitive `length'. */
2699
2700 int
2701 list_length (const_tree t)
2702 {
2703 const_tree p = t;
2704 #ifdef ENABLE_TREE_CHECKING
2705 const_tree q = t;
2706 #endif
2707 int len = 0;
2708
2709 while (p)
2710 {
2711 p = TREE_CHAIN (p);
2712 #ifdef ENABLE_TREE_CHECKING
2713 if (len % 2)
2714 q = TREE_CHAIN (q);
2715 gcc_assert (p != q);
2716 #endif
2717 len++;
2718 }
2719
2720 return len;
2721 }
2722
2723 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2724 UNION_TYPE TYPE, or NULL_TREE if none. */
2725
2726 tree
2727 first_field (const_tree type)
2728 {
2729 tree t = TYPE_FIELDS (type);
2730 while (t && TREE_CODE (t) != FIELD_DECL)
2731 t = TREE_CHAIN (t);
2732 return t;
2733 }
2734
2735 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2736 by modifying the last node in chain 1 to point to chain 2.
2737 This is the Lisp primitive `nconc'. */
2738
2739 tree
2740 chainon (tree op1, tree op2)
2741 {
2742 tree t1;
2743
2744 if (!op1)
2745 return op2;
2746 if (!op2)
2747 return op1;
2748
2749 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2750 continue;
2751 TREE_CHAIN (t1) = op2;
2752
2753 #ifdef ENABLE_TREE_CHECKING
2754 {
2755 tree t2;
2756 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2757 gcc_assert (t2 != t1);
2758 }
2759 #endif
2760
2761 return op1;
2762 }
2763
2764 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2765
2766 tree
2767 tree_last (tree chain)
2768 {
2769 tree next;
2770 if (chain)
2771 while ((next = TREE_CHAIN (chain)))
2772 chain = next;
2773 return chain;
2774 }
2775
2776 /* Reverse the order of elements in the chain T,
2777 and return the new head of the chain (old last element). */
2778
2779 tree
2780 nreverse (tree t)
2781 {
2782 tree prev = 0, decl, next;
2783 for (decl = t; decl; decl = next)
2784 {
2785 /* We shouldn't be using this function to reverse BLOCK chains; we
2786 have blocks_nreverse for that. */
2787 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2788 next = TREE_CHAIN (decl);
2789 TREE_CHAIN (decl) = prev;
2790 prev = decl;
2791 }
2792 return prev;
2793 }
2794 \f
2795 /* Return a newly created TREE_LIST node whose
2796 purpose and value fields are PARM and VALUE. */
2797
2798 tree
2799 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2800 {
2801 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2802 TREE_PURPOSE (t) = parm;
2803 TREE_VALUE (t) = value;
2804 return t;
2805 }
2806
2807 /* Build a chain of TREE_LIST nodes from a vector. */
2808
2809 tree
2810 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2811 {
2812 tree ret = NULL_TREE;
2813 tree *pp = &ret;
2814 unsigned int i;
2815 tree t;
2816 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2817 {
2818 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2819 pp = &TREE_CHAIN (*pp);
2820 }
2821 return ret;
2822 }
2823
2824 /* Return a newly created TREE_LIST node whose
2825 purpose and value fields are PURPOSE and VALUE
2826 and whose TREE_CHAIN is CHAIN. */
2827
2828 tree
2829 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2830 {
2831 tree node;
2832
2833 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2834 memset (node, 0, sizeof (struct tree_common));
2835
2836 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2837
2838 TREE_SET_CODE (node, TREE_LIST);
2839 TREE_CHAIN (node) = chain;
2840 TREE_PURPOSE (node) = purpose;
2841 TREE_VALUE (node) = value;
2842 return node;
2843 }
2844
2845 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2846 trees. */
2847
2848 vec<tree, va_gc> *
2849 ctor_to_vec (tree ctor)
2850 {
2851 vec<tree, va_gc> *vec;
2852 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2853 unsigned int ix;
2854 tree val;
2855
2856 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2857 vec->quick_push (val);
2858
2859 return vec;
2860 }
2861 \f
2862 /* Return the size nominally occupied by an object of type TYPE
2863 when it resides in memory. The value is measured in units of bytes,
2864 and its data type is that normally used for type sizes
2865 (which is the first type created by make_signed_type or
2866 make_unsigned_type). */
2867
2868 tree
2869 size_in_bytes (const_tree type)
2870 {
2871 tree t;
2872
2873 if (type == error_mark_node)
2874 return integer_zero_node;
2875
2876 type = TYPE_MAIN_VARIANT (type);
2877 t = TYPE_SIZE_UNIT (type);
2878
2879 if (t == 0)
2880 {
2881 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2882 return size_zero_node;
2883 }
2884
2885 return t;
2886 }
2887
2888 /* Return the size of TYPE (in bytes) as a wide integer
2889 or return -1 if the size can vary or is larger than an integer. */
2890
2891 HOST_WIDE_INT
2892 int_size_in_bytes (const_tree type)
2893 {
2894 tree t;
2895
2896 if (type == error_mark_node)
2897 return 0;
2898
2899 type = TYPE_MAIN_VARIANT (type);
2900 t = TYPE_SIZE_UNIT (type);
2901
2902 if (t && tree_fits_uhwi_p (t))
2903 return TREE_INT_CST_LOW (t);
2904 else
2905 return -1;
2906 }
2907
2908 /* Return the maximum size of TYPE (in bytes) as a wide integer
2909 or return -1 if the size can vary or is larger than an integer. */
2910
2911 HOST_WIDE_INT
2912 max_int_size_in_bytes (const_tree type)
2913 {
2914 HOST_WIDE_INT size = -1;
2915 tree size_tree;
2916
2917 /* If this is an array type, check for a possible MAX_SIZE attached. */
2918
2919 if (TREE_CODE (type) == ARRAY_TYPE)
2920 {
2921 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2922
2923 if (size_tree && tree_fits_uhwi_p (size_tree))
2924 size = tree_to_uhwi (size_tree);
2925 }
2926
2927 /* If we still haven't been able to get a size, see if the language
2928 can compute a maximum size. */
2929
2930 if (size == -1)
2931 {
2932 size_tree = lang_hooks.types.max_size (type);
2933
2934 if (size_tree && tree_fits_uhwi_p (size_tree))
2935 size = tree_to_uhwi (size_tree);
2936 }
2937
2938 return size;
2939 }
2940 \f
2941 /* Return the bit position of FIELD, in bits from the start of the record.
2942 This is a tree of type bitsizetype. */
2943
2944 tree
2945 bit_position (const_tree field)
2946 {
2947 return bit_from_pos (DECL_FIELD_OFFSET (field),
2948 DECL_FIELD_BIT_OFFSET (field));
2949 }
2950 \f
2951 /* Return the byte position of FIELD, in bytes from the start of the record.
2952 This is a tree of type sizetype. */
2953
2954 tree
2955 byte_position (const_tree field)
2956 {
2957 return byte_from_pos (DECL_FIELD_OFFSET (field),
2958 DECL_FIELD_BIT_OFFSET (field));
2959 }
2960
2961 /* Likewise, but return as an integer. It must be representable in
2962 that way (since it could be a signed value, we don't have the
2963 option of returning -1 like int_size_in_byte can. */
2964
2965 HOST_WIDE_INT
2966 int_byte_position (const_tree field)
2967 {
2968 return tree_to_shwi (byte_position (field));
2969 }
2970 \f
2971 /* Return the strictest alignment, in bits, that T is known to have. */
2972
2973 unsigned int
2974 expr_align (const_tree t)
2975 {
2976 unsigned int align0, align1;
2977
2978 switch (TREE_CODE (t))
2979 {
2980 CASE_CONVERT: case NON_LVALUE_EXPR:
2981 /* If we have conversions, we know that the alignment of the
2982 object must meet each of the alignments of the types. */
2983 align0 = expr_align (TREE_OPERAND (t, 0));
2984 align1 = TYPE_ALIGN (TREE_TYPE (t));
2985 return MAX (align0, align1);
2986
2987 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2988 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2989 case CLEANUP_POINT_EXPR:
2990 /* These don't change the alignment of an object. */
2991 return expr_align (TREE_OPERAND (t, 0));
2992
2993 case COND_EXPR:
2994 /* The best we can do is say that the alignment is the least aligned
2995 of the two arms. */
2996 align0 = expr_align (TREE_OPERAND (t, 1));
2997 align1 = expr_align (TREE_OPERAND (t, 2));
2998 return MIN (align0, align1);
2999
3000 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3001 meaningfully, it's always 1. */
3002 case LABEL_DECL: case CONST_DECL:
3003 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3004 case FUNCTION_DECL:
3005 gcc_assert (DECL_ALIGN (t) != 0);
3006 return DECL_ALIGN (t);
3007
3008 default:
3009 break;
3010 }
3011
3012 /* Otherwise take the alignment from that of the type. */
3013 return TYPE_ALIGN (TREE_TYPE (t));
3014 }
3015 \f
3016 /* Return, as a tree node, the number of elements for TYPE (which is an
3017 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3018
3019 tree
3020 array_type_nelts (const_tree type)
3021 {
3022 tree index_type, min, max;
3023
3024 /* If they did it with unspecified bounds, then we should have already
3025 given an error about it before we got here. */
3026 if (! TYPE_DOMAIN (type))
3027 return error_mark_node;
3028
3029 index_type = TYPE_DOMAIN (type);
3030 min = TYPE_MIN_VALUE (index_type);
3031 max = TYPE_MAX_VALUE (index_type);
3032
3033 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3034 if (!max)
3035 return error_mark_node;
3036
3037 return (integer_zerop (min)
3038 ? max
3039 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3040 }
3041 \f
3042 /* If arg is static -- a reference to an object in static storage -- then
3043 return the object. This is not the same as the C meaning of `static'.
3044 If arg isn't static, return NULL. */
3045
3046 tree
3047 staticp (tree arg)
3048 {
3049 switch (TREE_CODE (arg))
3050 {
3051 case FUNCTION_DECL:
3052 /* Nested functions are static, even though taking their address will
3053 involve a trampoline as we unnest the nested function and create
3054 the trampoline on the tree level. */
3055 return arg;
3056
3057 case VAR_DECL:
3058 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3059 && ! DECL_THREAD_LOCAL_P (arg)
3060 && ! DECL_DLLIMPORT_P (arg)
3061 ? arg : NULL);
3062
3063 case CONST_DECL:
3064 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3065 ? arg : NULL);
3066
3067 case CONSTRUCTOR:
3068 return TREE_STATIC (arg) ? arg : NULL;
3069
3070 case LABEL_DECL:
3071 case STRING_CST:
3072 return arg;
3073
3074 case COMPONENT_REF:
3075 /* If the thing being referenced is not a field, then it is
3076 something language specific. */
3077 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3078
3079 /* If we are referencing a bitfield, we can't evaluate an
3080 ADDR_EXPR at compile time and so it isn't a constant. */
3081 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3082 return NULL;
3083
3084 return staticp (TREE_OPERAND (arg, 0));
3085
3086 case BIT_FIELD_REF:
3087 return NULL;
3088
3089 case INDIRECT_REF:
3090 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3091
3092 case ARRAY_REF:
3093 case ARRAY_RANGE_REF:
3094 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3095 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3096 return staticp (TREE_OPERAND (arg, 0));
3097 else
3098 return NULL;
3099
3100 case COMPOUND_LITERAL_EXPR:
3101 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3102
3103 default:
3104 return NULL;
3105 }
3106 }
3107
3108 \f
3109
3110
3111 /* Return whether OP is a DECL whose address is function-invariant. */
3112
3113 bool
3114 decl_address_invariant_p (const_tree op)
3115 {
3116 /* The conditions below are slightly less strict than the one in
3117 staticp. */
3118
3119 switch (TREE_CODE (op))
3120 {
3121 case PARM_DECL:
3122 case RESULT_DECL:
3123 case LABEL_DECL:
3124 case FUNCTION_DECL:
3125 return true;
3126
3127 case VAR_DECL:
3128 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3129 || DECL_THREAD_LOCAL_P (op)
3130 || DECL_CONTEXT (op) == current_function_decl
3131 || decl_function_context (op) == current_function_decl)
3132 return true;
3133 break;
3134
3135 case CONST_DECL:
3136 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3137 || decl_function_context (op) == current_function_decl)
3138 return true;
3139 break;
3140
3141 default:
3142 break;
3143 }
3144
3145 return false;
3146 }
3147
3148 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3149
3150 bool
3151 decl_address_ip_invariant_p (const_tree op)
3152 {
3153 /* The conditions below are slightly less strict than the one in
3154 staticp. */
3155
3156 switch (TREE_CODE (op))
3157 {
3158 case LABEL_DECL:
3159 case FUNCTION_DECL:
3160 case STRING_CST:
3161 return true;
3162
3163 case VAR_DECL:
3164 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3165 && !DECL_DLLIMPORT_P (op))
3166 || DECL_THREAD_LOCAL_P (op))
3167 return true;
3168 break;
3169
3170 case CONST_DECL:
3171 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3172 return true;
3173 break;
3174
3175 default:
3176 break;
3177 }
3178
3179 return false;
3180 }
3181
3182
3183 /* Return true if T is function-invariant (internal function, does
3184 not handle arithmetic; that's handled in skip_simple_arithmetic and
3185 tree_invariant_p). */
3186
3187 static bool tree_invariant_p (tree t);
3188
3189 static bool
3190 tree_invariant_p_1 (tree t)
3191 {
3192 tree op;
3193
3194 if (TREE_CONSTANT (t)
3195 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3196 return true;
3197
3198 switch (TREE_CODE (t))
3199 {
3200 case SAVE_EXPR:
3201 return true;
3202
3203 case ADDR_EXPR:
3204 op = TREE_OPERAND (t, 0);
3205 while (handled_component_p (op))
3206 {
3207 switch (TREE_CODE (op))
3208 {
3209 case ARRAY_REF:
3210 case ARRAY_RANGE_REF:
3211 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3212 || TREE_OPERAND (op, 2) != NULL_TREE
3213 || TREE_OPERAND (op, 3) != NULL_TREE)
3214 return false;
3215 break;
3216
3217 case COMPONENT_REF:
3218 if (TREE_OPERAND (op, 2) != NULL_TREE)
3219 return false;
3220 break;
3221
3222 default:;
3223 }
3224 op = TREE_OPERAND (op, 0);
3225 }
3226
3227 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3228
3229 default:
3230 break;
3231 }
3232
3233 return false;
3234 }
3235
3236 /* Return true if T is function-invariant. */
3237
3238 static bool
3239 tree_invariant_p (tree t)
3240 {
3241 tree inner = skip_simple_arithmetic (t);
3242 return tree_invariant_p_1 (inner);
3243 }
3244
3245 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3246 Do this to any expression which may be used in more than one place,
3247 but must be evaluated only once.
3248
3249 Normally, expand_expr would reevaluate the expression each time.
3250 Calling save_expr produces something that is evaluated and recorded
3251 the first time expand_expr is called on it. Subsequent calls to
3252 expand_expr just reuse the recorded value.
3253
3254 The call to expand_expr that generates code that actually computes
3255 the value is the first call *at compile time*. Subsequent calls
3256 *at compile time* generate code to use the saved value.
3257 This produces correct result provided that *at run time* control
3258 always flows through the insns made by the first expand_expr
3259 before reaching the other places where the save_expr was evaluated.
3260 You, the caller of save_expr, must make sure this is so.
3261
3262 Constants, and certain read-only nodes, are returned with no
3263 SAVE_EXPR because that is safe. Expressions containing placeholders
3264 are not touched; see tree.def for an explanation of what these
3265 are used for. */
3266
3267 tree
3268 save_expr (tree expr)
3269 {
3270 tree t = fold (expr);
3271 tree inner;
3272
3273 /* If the tree evaluates to a constant, then we don't want to hide that
3274 fact (i.e. this allows further folding, and direct checks for constants).
3275 However, a read-only object that has side effects cannot be bypassed.
3276 Since it is no problem to reevaluate literals, we just return the
3277 literal node. */
3278 inner = skip_simple_arithmetic (t);
3279 if (TREE_CODE (inner) == ERROR_MARK)
3280 return inner;
3281
3282 if (tree_invariant_p_1 (inner))
3283 return t;
3284
3285 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3286 it means that the size or offset of some field of an object depends on
3287 the value within another field.
3288
3289 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3290 and some variable since it would then need to be both evaluated once and
3291 evaluated more than once. Front-ends must assure this case cannot
3292 happen by surrounding any such subexpressions in their own SAVE_EXPR
3293 and forcing evaluation at the proper time. */
3294 if (contains_placeholder_p (inner))
3295 return t;
3296
3297 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3298 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3299
3300 /* This expression might be placed ahead of a jump to ensure that the
3301 value was computed on both sides of the jump. So make sure it isn't
3302 eliminated as dead. */
3303 TREE_SIDE_EFFECTS (t) = 1;
3304 return t;
3305 }
3306
3307 /* Look inside EXPR into any simple arithmetic operations. Return the
3308 outermost non-arithmetic or non-invariant node. */
3309
3310 tree
3311 skip_simple_arithmetic (tree expr)
3312 {
3313 /* We don't care about whether this can be used as an lvalue in this
3314 context. */
3315 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3316 expr = TREE_OPERAND (expr, 0);
3317
3318 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3319 a constant, it will be more efficient to not make another SAVE_EXPR since
3320 it will allow better simplification and GCSE will be able to merge the
3321 computations if they actually occur. */
3322 while (true)
3323 {
3324 if (UNARY_CLASS_P (expr))
3325 expr = TREE_OPERAND (expr, 0);
3326 else if (BINARY_CLASS_P (expr))
3327 {
3328 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3329 expr = TREE_OPERAND (expr, 0);
3330 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3331 expr = TREE_OPERAND (expr, 1);
3332 else
3333 break;
3334 }
3335 else
3336 break;
3337 }
3338
3339 return expr;
3340 }
3341
3342 /* Look inside EXPR into simple arithmetic operations involving constants.
3343 Return the outermost non-arithmetic or non-constant node. */
3344
3345 tree
3346 skip_simple_constant_arithmetic (tree expr)
3347 {
3348 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3349 expr = TREE_OPERAND (expr, 0);
3350
3351 while (true)
3352 {
3353 if (UNARY_CLASS_P (expr))
3354 expr = TREE_OPERAND (expr, 0);
3355 else if (BINARY_CLASS_P (expr))
3356 {
3357 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3358 expr = TREE_OPERAND (expr, 0);
3359 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3360 expr = TREE_OPERAND (expr, 1);
3361 else
3362 break;
3363 }
3364 else
3365 break;
3366 }
3367
3368 return expr;
3369 }
3370
3371 /* Return which tree structure is used by T. */
3372
3373 enum tree_node_structure_enum
3374 tree_node_structure (const_tree t)
3375 {
3376 const enum tree_code code = TREE_CODE (t);
3377 return tree_node_structure_for_code (code);
3378 }
3379
3380 /* Set various status flags when building a CALL_EXPR object T. */
3381
3382 static void
3383 process_call_operands (tree t)
3384 {
3385 bool side_effects = TREE_SIDE_EFFECTS (t);
3386 bool read_only = false;
3387 int i = call_expr_flags (t);
3388
3389 /* Calls have side-effects, except those to const or pure functions. */
3390 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3391 side_effects = true;
3392 /* Propagate TREE_READONLY of arguments for const functions. */
3393 if (i & ECF_CONST)
3394 read_only = true;
3395
3396 if (!side_effects || read_only)
3397 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3398 {
3399 tree op = TREE_OPERAND (t, i);
3400 if (op && TREE_SIDE_EFFECTS (op))
3401 side_effects = true;
3402 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3403 read_only = false;
3404 }
3405
3406 TREE_SIDE_EFFECTS (t) = side_effects;
3407 TREE_READONLY (t) = read_only;
3408 }
3409 \f
3410 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3411 size or offset that depends on a field within a record. */
3412
3413 bool
3414 contains_placeholder_p (const_tree exp)
3415 {
3416 enum tree_code code;
3417
3418 if (!exp)
3419 return 0;
3420
3421 code = TREE_CODE (exp);
3422 if (code == PLACEHOLDER_EXPR)
3423 return 1;
3424
3425 switch (TREE_CODE_CLASS (code))
3426 {
3427 case tcc_reference:
3428 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3429 position computations since they will be converted into a
3430 WITH_RECORD_EXPR involving the reference, which will assume
3431 here will be valid. */
3432 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3433
3434 case tcc_exceptional:
3435 if (code == TREE_LIST)
3436 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3437 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3438 break;
3439
3440 case tcc_unary:
3441 case tcc_binary:
3442 case tcc_comparison:
3443 case tcc_expression:
3444 switch (code)
3445 {
3446 case COMPOUND_EXPR:
3447 /* Ignoring the first operand isn't quite right, but works best. */
3448 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3449
3450 case COND_EXPR:
3451 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3452 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3453 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3454
3455 case SAVE_EXPR:
3456 /* The save_expr function never wraps anything containing
3457 a PLACEHOLDER_EXPR. */
3458 return 0;
3459
3460 default:
3461 break;
3462 }
3463
3464 switch (TREE_CODE_LENGTH (code))
3465 {
3466 case 1:
3467 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3468 case 2:
3469 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3470 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3471 default:
3472 return 0;
3473 }
3474
3475 case tcc_vl_exp:
3476 switch (code)
3477 {
3478 case CALL_EXPR:
3479 {
3480 const_tree arg;
3481 const_call_expr_arg_iterator iter;
3482 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3483 if (CONTAINS_PLACEHOLDER_P (arg))
3484 return 1;
3485 return 0;
3486 }
3487 default:
3488 return 0;
3489 }
3490
3491 default:
3492 return 0;
3493 }
3494 return 0;
3495 }
3496
3497 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3498 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3499 field positions. */
3500
3501 static bool
3502 type_contains_placeholder_1 (const_tree type)
3503 {
3504 /* If the size contains a placeholder or the parent type (component type in
3505 the case of arrays) type involves a placeholder, this type does. */
3506 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3507 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3508 || (!POINTER_TYPE_P (type)
3509 && TREE_TYPE (type)
3510 && type_contains_placeholder_p (TREE_TYPE (type))))
3511 return true;
3512
3513 /* Now do type-specific checks. Note that the last part of the check above
3514 greatly limits what we have to do below. */
3515 switch (TREE_CODE (type))
3516 {
3517 case VOID_TYPE:
3518 case POINTER_BOUNDS_TYPE:
3519 case COMPLEX_TYPE:
3520 case ENUMERAL_TYPE:
3521 case BOOLEAN_TYPE:
3522 case POINTER_TYPE:
3523 case OFFSET_TYPE:
3524 case REFERENCE_TYPE:
3525 case METHOD_TYPE:
3526 case FUNCTION_TYPE:
3527 case VECTOR_TYPE:
3528 case NULLPTR_TYPE:
3529 return false;
3530
3531 case INTEGER_TYPE:
3532 case REAL_TYPE:
3533 case FIXED_POINT_TYPE:
3534 /* Here we just check the bounds. */
3535 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3536 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3537
3538 case ARRAY_TYPE:
3539 /* We have already checked the component type above, so just check the
3540 domain type. */
3541 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3542
3543 case RECORD_TYPE:
3544 case UNION_TYPE:
3545 case QUAL_UNION_TYPE:
3546 {
3547 tree field;
3548
3549 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3550 if (TREE_CODE (field) == FIELD_DECL
3551 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3552 || (TREE_CODE (type) == QUAL_UNION_TYPE
3553 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3554 || type_contains_placeholder_p (TREE_TYPE (field))))
3555 return true;
3556
3557 return false;
3558 }
3559
3560 default:
3561 gcc_unreachable ();
3562 }
3563 }
3564
3565 /* Wrapper around above function used to cache its result. */
3566
3567 bool
3568 type_contains_placeholder_p (tree type)
3569 {
3570 bool result;
3571
3572 /* If the contains_placeholder_bits field has been initialized,
3573 then we know the answer. */
3574 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3575 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3576
3577 /* Indicate that we've seen this type node, and the answer is false.
3578 This is what we want to return if we run into recursion via fields. */
3579 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3580
3581 /* Compute the real value. */
3582 result = type_contains_placeholder_1 (type);
3583
3584 /* Store the real value. */
3585 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3586
3587 return result;
3588 }
3589 \f
3590 /* Push tree EXP onto vector QUEUE if it is not already present. */
3591
3592 static void
3593 push_without_duplicates (tree exp, vec<tree> *queue)
3594 {
3595 unsigned int i;
3596 tree iter;
3597
3598 FOR_EACH_VEC_ELT (*queue, i, iter)
3599 if (simple_cst_equal (iter, exp) == 1)
3600 break;
3601
3602 if (!iter)
3603 queue->safe_push (exp);
3604 }
3605
3606 /* Given a tree EXP, find all occurrences of references to fields
3607 in a PLACEHOLDER_EXPR and place them in vector REFS without
3608 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3609 we assume here that EXP contains only arithmetic expressions
3610 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3611 argument list. */
3612
3613 void
3614 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3615 {
3616 enum tree_code code = TREE_CODE (exp);
3617 tree inner;
3618 int i;
3619
3620 /* We handle TREE_LIST and COMPONENT_REF separately. */
3621 if (code == TREE_LIST)
3622 {
3623 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3624 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3625 }
3626 else if (code == COMPONENT_REF)
3627 {
3628 for (inner = TREE_OPERAND (exp, 0);
3629 REFERENCE_CLASS_P (inner);
3630 inner = TREE_OPERAND (inner, 0))
3631 ;
3632
3633 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3634 push_without_duplicates (exp, refs);
3635 else
3636 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3637 }
3638 else
3639 switch (TREE_CODE_CLASS (code))
3640 {
3641 case tcc_constant:
3642 break;
3643
3644 case tcc_declaration:
3645 /* Variables allocated to static storage can stay. */
3646 if (!TREE_STATIC (exp))
3647 push_without_duplicates (exp, refs);
3648 break;
3649
3650 case tcc_expression:
3651 /* This is the pattern built in ada/make_aligning_type. */
3652 if (code == ADDR_EXPR
3653 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3654 {
3655 push_without_duplicates (exp, refs);
3656 break;
3657 }
3658
3659 /* Fall through... */
3660
3661 case tcc_exceptional:
3662 case tcc_unary:
3663 case tcc_binary:
3664 case tcc_comparison:
3665 case tcc_reference:
3666 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3667 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3668 break;
3669
3670 case tcc_vl_exp:
3671 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3672 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3673 break;
3674
3675 default:
3676 gcc_unreachable ();
3677 }
3678 }
3679
3680 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3681 return a tree with all occurrences of references to F in a
3682 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3683 CONST_DECLs. Note that we assume here that EXP contains only
3684 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3685 occurring only in their argument list. */
3686
3687 tree
3688 substitute_in_expr (tree exp, tree f, tree r)
3689 {
3690 enum tree_code code = TREE_CODE (exp);
3691 tree op0, op1, op2, op3;
3692 tree new_tree;
3693
3694 /* We handle TREE_LIST and COMPONENT_REF separately. */
3695 if (code == TREE_LIST)
3696 {
3697 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3698 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3699 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3700 return exp;
3701
3702 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3703 }
3704 else if (code == COMPONENT_REF)
3705 {
3706 tree inner;
3707
3708 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3709 and it is the right field, replace it with R. */
3710 for (inner = TREE_OPERAND (exp, 0);
3711 REFERENCE_CLASS_P (inner);
3712 inner = TREE_OPERAND (inner, 0))
3713 ;
3714
3715 /* The field. */
3716 op1 = TREE_OPERAND (exp, 1);
3717
3718 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3719 return r;
3720
3721 /* If this expression hasn't been completed let, leave it alone. */
3722 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3723 return exp;
3724
3725 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3726 if (op0 == TREE_OPERAND (exp, 0))
3727 return exp;
3728
3729 new_tree
3730 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3731 }
3732 else
3733 switch (TREE_CODE_CLASS (code))
3734 {
3735 case tcc_constant:
3736 return exp;
3737
3738 case tcc_declaration:
3739 if (exp == f)
3740 return r;
3741 else
3742 return exp;
3743
3744 case tcc_expression:
3745 if (exp == f)
3746 return r;
3747
3748 /* Fall through... */
3749
3750 case tcc_exceptional:
3751 case tcc_unary:
3752 case tcc_binary:
3753 case tcc_comparison:
3754 case tcc_reference:
3755 switch (TREE_CODE_LENGTH (code))
3756 {
3757 case 0:
3758 return exp;
3759
3760 case 1:
3761 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3762 if (op0 == TREE_OPERAND (exp, 0))
3763 return exp;
3764
3765 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3766 break;
3767
3768 case 2:
3769 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3770 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3771
3772 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3773 return exp;
3774
3775 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3776 break;
3777
3778 case 3:
3779 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3780 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3781 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3782
3783 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3784 && op2 == TREE_OPERAND (exp, 2))
3785 return exp;
3786
3787 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3788 break;
3789
3790 case 4:
3791 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3792 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3793 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3794 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3795
3796 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3797 && op2 == TREE_OPERAND (exp, 2)
3798 && op3 == TREE_OPERAND (exp, 3))
3799 return exp;
3800
3801 new_tree
3802 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3803 break;
3804
3805 default:
3806 gcc_unreachable ();
3807 }
3808 break;
3809
3810 case tcc_vl_exp:
3811 {
3812 int i;
3813
3814 new_tree = NULL_TREE;
3815
3816 /* If we are trying to replace F with a constant, inline back
3817 functions which do nothing else than computing a value from
3818 the arguments they are passed. This makes it possible to
3819 fold partially or entirely the replacement expression. */
3820 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3821 {
3822 tree t = maybe_inline_call_in_expr (exp);
3823 if (t)
3824 return SUBSTITUTE_IN_EXPR (t, f, r);
3825 }
3826
3827 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3828 {
3829 tree op = TREE_OPERAND (exp, i);
3830 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3831 if (new_op != op)
3832 {
3833 if (!new_tree)
3834 new_tree = copy_node (exp);
3835 TREE_OPERAND (new_tree, i) = new_op;
3836 }
3837 }
3838
3839 if (new_tree)
3840 {
3841 new_tree = fold (new_tree);
3842 if (TREE_CODE (new_tree) == CALL_EXPR)
3843 process_call_operands (new_tree);
3844 }
3845 else
3846 return exp;
3847 }
3848 break;
3849
3850 default:
3851 gcc_unreachable ();
3852 }
3853
3854 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3855
3856 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3857 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3858
3859 return new_tree;
3860 }
3861
3862 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3863 for it within OBJ, a tree that is an object or a chain of references. */
3864
3865 tree
3866 substitute_placeholder_in_expr (tree exp, tree obj)
3867 {
3868 enum tree_code code = TREE_CODE (exp);
3869 tree op0, op1, op2, op3;
3870 tree new_tree;
3871
3872 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3873 in the chain of OBJ. */
3874 if (code == PLACEHOLDER_EXPR)
3875 {
3876 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3877 tree elt;
3878
3879 for (elt = obj; elt != 0;
3880 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3881 || TREE_CODE (elt) == COND_EXPR)
3882 ? TREE_OPERAND (elt, 1)
3883 : (REFERENCE_CLASS_P (elt)
3884 || UNARY_CLASS_P (elt)
3885 || BINARY_CLASS_P (elt)
3886 || VL_EXP_CLASS_P (elt)
3887 || EXPRESSION_CLASS_P (elt))
3888 ? TREE_OPERAND (elt, 0) : 0))
3889 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3890 return elt;
3891
3892 for (elt = obj; elt != 0;
3893 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3894 || TREE_CODE (elt) == COND_EXPR)
3895 ? TREE_OPERAND (elt, 1)
3896 : (REFERENCE_CLASS_P (elt)
3897 || UNARY_CLASS_P (elt)
3898 || BINARY_CLASS_P (elt)
3899 || VL_EXP_CLASS_P (elt)
3900 || EXPRESSION_CLASS_P (elt))
3901 ? TREE_OPERAND (elt, 0) : 0))
3902 if (POINTER_TYPE_P (TREE_TYPE (elt))
3903 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3904 == need_type))
3905 return fold_build1 (INDIRECT_REF, need_type, elt);
3906
3907 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3908 survives until RTL generation, there will be an error. */
3909 return exp;
3910 }
3911
3912 /* TREE_LIST is special because we need to look at TREE_VALUE
3913 and TREE_CHAIN, not TREE_OPERANDS. */
3914 else if (code == TREE_LIST)
3915 {
3916 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3917 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3918 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3919 return exp;
3920
3921 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3922 }
3923 else
3924 switch (TREE_CODE_CLASS (code))
3925 {
3926 case tcc_constant:
3927 case tcc_declaration:
3928 return exp;
3929
3930 case tcc_exceptional:
3931 case tcc_unary:
3932 case tcc_binary:
3933 case tcc_comparison:
3934 case tcc_expression:
3935 case tcc_reference:
3936 case tcc_statement:
3937 switch (TREE_CODE_LENGTH (code))
3938 {
3939 case 0:
3940 return exp;
3941
3942 case 1:
3943 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3944 if (op0 == TREE_OPERAND (exp, 0))
3945 return exp;
3946
3947 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3948 break;
3949
3950 case 2:
3951 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3952 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3953
3954 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3955 return exp;
3956
3957 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3958 break;
3959
3960 case 3:
3961 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3962 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3963 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3964
3965 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3966 && op2 == TREE_OPERAND (exp, 2))
3967 return exp;
3968
3969 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3970 break;
3971
3972 case 4:
3973 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3974 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3975 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3976 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3977
3978 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3979 && op2 == TREE_OPERAND (exp, 2)
3980 && op3 == TREE_OPERAND (exp, 3))
3981 return exp;
3982
3983 new_tree
3984 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3985 break;
3986
3987 default:
3988 gcc_unreachable ();
3989 }
3990 break;
3991
3992 case tcc_vl_exp:
3993 {
3994 int i;
3995
3996 new_tree = NULL_TREE;
3997
3998 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3999 {
4000 tree op = TREE_OPERAND (exp, i);
4001 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4002 if (new_op != op)
4003 {
4004 if (!new_tree)
4005 new_tree = copy_node (exp);
4006 TREE_OPERAND (new_tree, i) = new_op;
4007 }
4008 }
4009
4010 if (new_tree)
4011 {
4012 new_tree = fold (new_tree);
4013 if (TREE_CODE (new_tree) == CALL_EXPR)
4014 process_call_operands (new_tree);
4015 }
4016 else
4017 return exp;
4018 }
4019 break;
4020
4021 default:
4022 gcc_unreachable ();
4023 }
4024
4025 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4026
4027 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4028 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4029
4030 return new_tree;
4031 }
4032 \f
4033
4034 /* Subroutine of stabilize_reference; this is called for subtrees of
4035 references. Any expression with side-effects must be put in a SAVE_EXPR
4036 to ensure that it is only evaluated once.
4037
4038 We don't put SAVE_EXPR nodes around everything, because assigning very
4039 simple expressions to temporaries causes us to miss good opportunities
4040 for optimizations. Among other things, the opportunity to fold in the
4041 addition of a constant into an addressing mode often gets lost, e.g.
4042 "y[i+1] += x;". In general, we take the approach that we should not make
4043 an assignment unless we are forced into it - i.e., that any non-side effect
4044 operator should be allowed, and that cse should take care of coalescing
4045 multiple utterances of the same expression should that prove fruitful. */
4046
4047 static tree
4048 stabilize_reference_1 (tree e)
4049 {
4050 tree result;
4051 enum tree_code code = TREE_CODE (e);
4052
4053 /* We cannot ignore const expressions because it might be a reference
4054 to a const array but whose index contains side-effects. But we can
4055 ignore things that are actual constant or that already have been
4056 handled by this function. */
4057
4058 if (tree_invariant_p (e))
4059 return e;
4060
4061 switch (TREE_CODE_CLASS (code))
4062 {
4063 case tcc_exceptional:
4064 case tcc_type:
4065 case tcc_declaration:
4066 case tcc_comparison:
4067 case tcc_statement:
4068 case tcc_expression:
4069 case tcc_reference:
4070 case tcc_vl_exp:
4071 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4072 so that it will only be evaluated once. */
4073 /* The reference (r) and comparison (<) classes could be handled as
4074 below, but it is generally faster to only evaluate them once. */
4075 if (TREE_SIDE_EFFECTS (e))
4076 return save_expr (e);
4077 return e;
4078
4079 case tcc_constant:
4080 /* Constants need no processing. In fact, we should never reach
4081 here. */
4082 return e;
4083
4084 case tcc_binary:
4085 /* Division is slow and tends to be compiled with jumps,
4086 especially the division by powers of 2 that is often
4087 found inside of an array reference. So do it just once. */
4088 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4089 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4090 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4091 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4092 return save_expr (e);
4093 /* Recursively stabilize each operand. */
4094 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4095 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4096 break;
4097
4098 case tcc_unary:
4099 /* Recursively stabilize each operand. */
4100 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4101 break;
4102
4103 default:
4104 gcc_unreachable ();
4105 }
4106
4107 TREE_TYPE (result) = TREE_TYPE (e);
4108 TREE_READONLY (result) = TREE_READONLY (e);
4109 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4110 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4111
4112 return result;
4113 }
4114
4115 /* Stabilize a reference so that we can use it any number of times
4116 without causing its operands to be evaluated more than once.
4117 Returns the stabilized reference. This works by means of save_expr,
4118 so see the caveats in the comments about save_expr.
4119
4120 Also allows conversion expressions whose operands are references.
4121 Any other kind of expression is returned unchanged. */
4122
4123 tree
4124 stabilize_reference (tree ref)
4125 {
4126 tree result;
4127 enum tree_code code = TREE_CODE (ref);
4128
4129 switch (code)
4130 {
4131 case VAR_DECL:
4132 case PARM_DECL:
4133 case RESULT_DECL:
4134 /* No action is needed in this case. */
4135 return ref;
4136
4137 CASE_CONVERT:
4138 case FLOAT_EXPR:
4139 case FIX_TRUNC_EXPR:
4140 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4141 break;
4142
4143 case INDIRECT_REF:
4144 result = build_nt (INDIRECT_REF,
4145 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4146 break;
4147
4148 case COMPONENT_REF:
4149 result = build_nt (COMPONENT_REF,
4150 stabilize_reference (TREE_OPERAND (ref, 0)),
4151 TREE_OPERAND (ref, 1), NULL_TREE);
4152 break;
4153
4154 case BIT_FIELD_REF:
4155 result = build_nt (BIT_FIELD_REF,
4156 stabilize_reference (TREE_OPERAND (ref, 0)),
4157 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4158 break;
4159
4160 case ARRAY_REF:
4161 result = build_nt (ARRAY_REF,
4162 stabilize_reference (TREE_OPERAND (ref, 0)),
4163 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4164 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4165 break;
4166
4167 case ARRAY_RANGE_REF:
4168 result = build_nt (ARRAY_RANGE_REF,
4169 stabilize_reference (TREE_OPERAND (ref, 0)),
4170 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4171 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4172 break;
4173
4174 case COMPOUND_EXPR:
4175 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4176 it wouldn't be ignored. This matters when dealing with
4177 volatiles. */
4178 return stabilize_reference_1 (ref);
4179
4180 /* If arg isn't a kind of lvalue we recognize, make no change.
4181 Caller should recognize the error for an invalid lvalue. */
4182 default:
4183 return ref;
4184
4185 case ERROR_MARK:
4186 return error_mark_node;
4187 }
4188
4189 TREE_TYPE (result) = TREE_TYPE (ref);
4190 TREE_READONLY (result) = TREE_READONLY (ref);
4191 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4192 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4193
4194 return result;
4195 }
4196 \f
4197 /* Low-level constructors for expressions. */
4198
4199 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4200 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4201
4202 void
4203 recompute_tree_invariant_for_addr_expr (tree t)
4204 {
4205 tree node;
4206 bool tc = true, se = false;
4207
4208 /* We started out assuming this address is both invariant and constant, but
4209 does not have side effects. Now go down any handled components and see if
4210 any of them involve offsets that are either non-constant or non-invariant.
4211 Also check for side-effects.
4212
4213 ??? Note that this code makes no attempt to deal with the case where
4214 taking the address of something causes a copy due to misalignment. */
4215
4216 #define UPDATE_FLAGS(NODE) \
4217 do { tree _node = (NODE); \
4218 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4219 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4220
4221 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4222 node = TREE_OPERAND (node, 0))
4223 {
4224 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4225 array reference (probably made temporarily by the G++ front end),
4226 so ignore all the operands. */
4227 if ((TREE_CODE (node) == ARRAY_REF
4228 || TREE_CODE (node) == ARRAY_RANGE_REF)
4229 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4230 {
4231 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4232 if (TREE_OPERAND (node, 2))
4233 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4234 if (TREE_OPERAND (node, 3))
4235 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4236 }
4237 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4238 FIELD_DECL, apparently. The G++ front end can put something else
4239 there, at least temporarily. */
4240 else if (TREE_CODE (node) == COMPONENT_REF
4241 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4242 {
4243 if (TREE_OPERAND (node, 2))
4244 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4245 }
4246 }
4247
4248 node = lang_hooks.expr_to_decl (node, &tc, &se);
4249
4250 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4251 the address, since &(*a)->b is a form of addition. If it's a constant, the
4252 address is constant too. If it's a decl, its address is constant if the
4253 decl is static. Everything else is not constant and, furthermore,
4254 taking the address of a volatile variable is not volatile. */
4255 if (TREE_CODE (node) == INDIRECT_REF
4256 || TREE_CODE (node) == MEM_REF)
4257 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4258 else if (CONSTANT_CLASS_P (node))
4259 ;
4260 else if (DECL_P (node))
4261 tc &= (staticp (node) != NULL_TREE);
4262 else
4263 {
4264 tc = false;
4265 se |= TREE_SIDE_EFFECTS (node);
4266 }
4267
4268
4269 TREE_CONSTANT (t) = tc;
4270 TREE_SIDE_EFFECTS (t) = se;
4271 #undef UPDATE_FLAGS
4272 }
4273
4274 /* Build an expression of code CODE, data type TYPE, and operands as
4275 specified. Expressions and reference nodes can be created this way.
4276 Constants, decls, types and misc nodes cannot be.
4277
4278 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4279 enough for all extant tree codes. */
4280
4281 tree
4282 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4283 {
4284 tree t;
4285
4286 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4287
4288 t = make_node_stat (code PASS_MEM_STAT);
4289 TREE_TYPE (t) = tt;
4290
4291 return t;
4292 }
4293
4294 tree
4295 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4296 {
4297 int length = sizeof (struct tree_exp);
4298 tree t;
4299
4300 record_node_allocation_statistics (code, length);
4301
4302 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4303
4304 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4305
4306 memset (t, 0, sizeof (struct tree_common));
4307
4308 TREE_SET_CODE (t, code);
4309
4310 TREE_TYPE (t) = type;
4311 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4312 TREE_OPERAND (t, 0) = node;
4313 if (node && !TYPE_P (node))
4314 {
4315 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4316 TREE_READONLY (t) = TREE_READONLY (node);
4317 }
4318
4319 if (TREE_CODE_CLASS (code) == tcc_statement)
4320 TREE_SIDE_EFFECTS (t) = 1;
4321 else switch (code)
4322 {
4323 case VA_ARG_EXPR:
4324 /* All of these have side-effects, no matter what their
4325 operands are. */
4326 TREE_SIDE_EFFECTS (t) = 1;
4327 TREE_READONLY (t) = 0;
4328 break;
4329
4330 case INDIRECT_REF:
4331 /* Whether a dereference is readonly has nothing to do with whether
4332 its operand is readonly. */
4333 TREE_READONLY (t) = 0;
4334 break;
4335
4336 case ADDR_EXPR:
4337 if (node)
4338 recompute_tree_invariant_for_addr_expr (t);
4339 break;
4340
4341 default:
4342 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4343 && node && !TYPE_P (node)
4344 && TREE_CONSTANT (node))
4345 TREE_CONSTANT (t) = 1;
4346 if (TREE_CODE_CLASS (code) == tcc_reference
4347 && node && TREE_THIS_VOLATILE (node))
4348 TREE_THIS_VOLATILE (t) = 1;
4349 break;
4350 }
4351
4352 return t;
4353 }
4354
4355 #define PROCESS_ARG(N) \
4356 do { \
4357 TREE_OPERAND (t, N) = arg##N; \
4358 if (arg##N &&!TYPE_P (arg##N)) \
4359 { \
4360 if (TREE_SIDE_EFFECTS (arg##N)) \
4361 side_effects = 1; \
4362 if (!TREE_READONLY (arg##N) \
4363 && !CONSTANT_CLASS_P (arg##N)) \
4364 (void) (read_only = 0); \
4365 if (!TREE_CONSTANT (arg##N)) \
4366 (void) (constant = 0); \
4367 } \
4368 } while (0)
4369
4370 tree
4371 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4372 {
4373 bool constant, read_only, side_effects;
4374 tree t;
4375
4376 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4377
4378 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4379 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4380 /* When sizetype precision doesn't match that of pointers
4381 we need to be able to build explicit extensions or truncations
4382 of the offset argument. */
4383 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4384 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4385 && TREE_CODE (arg1) == INTEGER_CST);
4386
4387 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4388 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4389 && ptrofftype_p (TREE_TYPE (arg1)));
4390
4391 t = make_node_stat (code PASS_MEM_STAT);
4392 TREE_TYPE (t) = tt;
4393
4394 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4395 result based on those same flags for the arguments. But if the
4396 arguments aren't really even `tree' expressions, we shouldn't be trying
4397 to do this. */
4398
4399 /* Expressions without side effects may be constant if their
4400 arguments are as well. */
4401 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4402 || TREE_CODE_CLASS (code) == tcc_binary);
4403 read_only = 1;
4404 side_effects = TREE_SIDE_EFFECTS (t);
4405
4406 PROCESS_ARG (0);
4407 PROCESS_ARG (1);
4408
4409 TREE_SIDE_EFFECTS (t) = side_effects;
4410 if (code == MEM_REF)
4411 {
4412 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4413 {
4414 tree o = TREE_OPERAND (arg0, 0);
4415 TREE_READONLY (t) = TREE_READONLY (o);
4416 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4417 }
4418 }
4419 else
4420 {
4421 TREE_READONLY (t) = read_only;
4422 TREE_CONSTANT (t) = constant;
4423 TREE_THIS_VOLATILE (t)
4424 = (TREE_CODE_CLASS (code) == tcc_reference
4425 && arg0 && TREE_THIS_VOLATILE (arg0));
4426 }
4427
4428 return t;
4429 }
4430
4431
4432 tree
4433 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4434 tree arg2 MEM_STAT_DECL)
4435 {
4436 bool constant, read_only, side_effects;
4437 tree t;
4438
4439 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4440 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4441
4442 t = make_node_stat (code PASS_MEM_STAT);
4443 TREE_TYPE (t) = tt;
4444
4445 read_only = 1;
4446
4447 /* As a special exception, if COND_EXPR has NULL branches, we
4448 assume that it is a gimple statement and always consider
4449 it to have side effects. */
4450 if (code == COND_EXPR
4451 && tt == void_type_node
4452 && arg1 == NULL_TREE
4453 && arg2 == NULL_TREE)
4454 side_effects = true;
4455 else
4456 side_effects = TREE_SIDE_EFFECTS (t);
4457
4458 PROCESS_ARG (0);
4459 PROCESS_ARG (1);
4460 PROCESS_ARG (2);
4461
4462 if (code == COND_EXPR)
4463 TREE_READONLY (t) = read_only;
4464
4465 TREE_SIDE_EFFECTS (t) = side_effects;
4466 TREE_THIS_VOLATILE (t)
4467 = (TREE_CODE_CLASS (code) == tcc_reference
4468 && arg0 && TREE_THIS_VOLATILE (arg0));
4469
4470 return t;
4471 }
4472
4473 tree
4474 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4475 tree arg2, tree arg3 MEM_STAT_DECL)
4476 {
4477 bool constant, read_only, side_effects;
4478 tree t;
4479
4480 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4481
4482 t = make_node_stat (code PASS_MEM_STAT);
4483 TREE_TYPE (t) = tt;
4484
4485 side_effects = TREE_SIDE_EFFECTS (t);
4486
4487 PROCESS_ARG (0);
4488 PROCESS_ARG (1);
4489 PROCESS_ARG (2);
4490 PROCESS_ARG (3);
4491
4492 TREE_SIDE_EFFECTS (t) = side_effects;
4493 TREE_THIS_VOLATILE (t)
4494 = (TREE_CODE_CLASS (code) == tcc_reference
4495 && arg0 && TREE_THIS_VOLATILE (arg0));
4496
4497 return t;
4498 }
4499
4500 tree
4501 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4502 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4503 {
4504 bool constant, read_only, side_effects;
4505 tree t;
4506
4507 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4508
4509 t = make_node_stat (code PASS_MEM_STAT);
4510 TREE_TYPE (t) = tt;
4511
4512 side_effects = TREE_SIDE_EFFECTS (t);
4513
4514 PROCESS_ARG (0);
4515 PROCESS_ARG (1);
4516 PROCESS_ARG (2);
4517 PROCESS_ARG (3);
4518 PROCESS_ARG (4);
4519
4520 TREE_SIDE_EFFECTS (t) = side_effects;
4521 if (code == TARGET_MEM_REF)
4522 {
4523 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4524 {
4525 tree o = TREE_OPERAND (arg0, 0);
4526 TREE_READONLY (t) = TREE_READONLY (o);
4527 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4528 }
4529 }
4530 else
4531 TREE_THIS_VOLATILE (t)
4532 = (TREE_CODE_CLASS (code) == tcc_reference
4533 && arg0 && TREE_THIS_VOLATILE (arg0));
4534
4535 return t;
4536 }
4537
4538 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4539 on the pointer PTR. */
4540
4541 tree
4542 build_simple_mem_ref_loc (location_t loc, tree ptr)
4543 {
4544 HOST_WIDE_INT offset = 0;
4545 tree ptype = TREE_TYPE (ptr);
4546 tree tem;
4547 /* For convenience allow addresses that collapse to a simple base
4548 and offset. */
4549 if (TREE_CODE (ptr) == ADDR_EXPR
4550 && (handled_component_p (TREE_OPERAND (ptr, 0))
4551 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4552 {
4553 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4554 gcc_assert (ptr);
4555 ptr = build_fold_addr_expr (ptr);
4556 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4557 }
4558 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4559 ptr, build_int_cst (ptype, offset));
4560 SET_EXPR_LOCATION (tem, loc);
4561 return tem;
4562 }
4563
4564 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4565
4566 offset_int
4567 mem_ref_offset (const_tree t)
4568 {
4569 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4570 }
4571
4572 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4573 offsetted by OFFSET units. */
4574
4575 tree
4576 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4577 {
4578 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4579 build_fold_addr_expr (base),
4580 build_int_cst (ptr_type_node, offset));
4581 tree addr = build1 (ADDR_EXPR, type, ref);
4582 recompute_tree_invariant_for_addr_expr (addr);
4583 return addr;
4584 }
4585
4586 /* Similar except don't specify the TREE_TYPE
4587 and leave the TREE_SIDE_EFFECTS as 0.
4588 It is permissible for arguments to be null,
4589 or even garbage if their values do not matter. */
4590
4591 tree
4592 build_nt (enum tree_code code, ...)
4593 {
4594 tree t;
4595 int length;
4596 int i;
4597 va_list p;
4598
4599 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4600
4601 va_start (p, code);
4602
4603 t = make_node (code);
4604 length = TREE_CODE_LENGTH (code);
4605
4606 for (i = 0; i < length; i++)
4607 TREE_OPERAND (t, i) = va_arg (p, tree);
4608
4609 va_end (p);
4610 return t;
4611 }
4612
4613 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4614 tree vec. */
4615
4616 tree
4617 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4618 {
4619 tree ret, t;
4620 unsigned int ix;
4621
4622 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4623 CALL_EXPR_FN (ret) = fn;
4624 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4625 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4626 CALL_EXPR_ARG (ret, ix) = t;
4627 return ret;
4628 }
4629 \f
4630 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4631 We do NOT enter this node in any sort of symbol table.
4632
4633 LOC is the location of the decl.
4634
4635 layout_decl is used to set up the decl's storage layout.
4636 Other slots are initialized to 0 or null pointers. */
4637
4638 tree
4639 build_decl_stat (location_t loc, enum tree_code code, tree name,
4640 tree type MEM_STAT_DECL)
4641 {
4642 tree t;
4643
4644 t = make_node_stat (code PASS_MEM_STAT);
4645 DECL_SOURCE_LOCATION (t) = loc;
4646
4647 /* if (type == error_mark_node)
4648 type = integer_type_node; */
4649 /* That is not done, deliberately, so that having error_mark_node
4650 as the type can suppress useless errors in the use of this variable. */
4651
4652 DECL_NAME (t) = name;
4653 TREE_TYPE (t) = type;
4654
4655 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4656 layout_decl (t, 0);
4657
4658 return t;
4659 }
4660
4661 /* Builds and returns function declaration with NAME and TYPE. */
4662
4663 tree
4664 build_fn_decl (const char *name, tree type)
4665 {
4666 tree id = get_identifier (name);
4667 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4668
4669 DECL_EXTERNAL (decl) = 1;
4670 TREE_PUBLIC (decl) = 1;
4671 DECL_ARTIFICIAL (decl) = 1;
4672 TREE_NOTHROW (decl) = 1;
4673
4674 return decl;
4675 }
4676
4677 vec<tree, va_gc> *all_translation_units;
4678
4679 /* Builds a new translation-unit decl with name NAME, queues it in the
4680 global list of translation-unit decls and returns it. */
4681
4682 tree
4683 build_translation_unit_decl (tree name)
4684 {
4685 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4686 name, NULL_TREE);
4687 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4688 vec_safe_push (all_translation_units, tu);
4689 return tu;
4690 }
4691
4692 \f
4693 /* BLOCK nodes are used to represent the structure of binding contours
4694 and declarations, once those contours have been exited and their contents
4695 compiled. This information is used for outputting debugging info. */
4696
4697 tree
4698 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4699 {
4700 tree block = make_node (BLOCK);
4701
4702 BLOCK_VARS (block) = vars;
4703 BLOCK_SUBBLOCKS (block) = subblocks;
4704 BLOCK_SUPERCONTEXT (block) = supercontext;
4705 BLOCK_CHAIN (block) = chain;
4706 return block;
4707 }
4708
4709 \f
4710 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4711
4712 LOC is the location to use in tree T. */
4713
4714 void
4715 protected_set_expr_location (tree t, location_t loc)
4716 {
4717 if (CAN_HAVE_LOCATION_P (t))
4718 SET_EXPR_LOCATION (t, loc);
4719 }
4720 \f
4721 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4722 is ATTRIBUTE. */
4723
4724 tree
4725 build_decl_attribute_variant (tree ddecl, tree attribute)
4726 {
4727 DECL_ATTRIBUTES (ddecl) = attribute;
4728 return ddecl;
4729 }
4730
4731 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4732 is ATTRIBUTE and its qualifiers are QUALS.
4733
4734 Record such modified types already made so we don't make duplicates. */
4735
4736 tree
4737 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4738 {
4739 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4740 {
4741 inchash::hash hstate;
4742 tree ntype;
4743 int i;
4744 tree t;
4745 enum tree_code code = TREE_CODE (ttype);
4746
4747 /* Building a distinct copy of a tagged type is inappropriate; it
4748 causes breakage in code that expects there to be a one-to-one
4749 relationship between a struct and its fields.
4750 build_duplicate_type is another solution (as used in
4751 handle_transparent_union_attribute), but that doesn't play well
4752 with the stronger C++ type identity model. */
4753 if (TREE_CODE (ttype) == RECORD_TYPE
4754 || TREE_CODE (ttype) == UNION_TYPE
4755 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4756 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4757 {
4758 warning (OPT_Wattributes,
4759 "ignoring attributes applied to %qT after definition",
4760 TYPE_MAIN_VARIANT (ttype));
4761 return build_qualified_type (ttype, quals);
4762 }
4763
4764 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4765 ntype = build_distinct_type_copy (ttype);
4766
4767 TYPE_ATTRIBUTES (ntype) = attribute;
4768
4769 hstate.add_int (code);
4770 if (TREE_TYPE (ntype))
4771 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4772 attribute_hash_list (attribute, hstate);
4773
4774 switch (TREE_CODE (ntype))
4775 {
4776 case FUNCTION_TYPE:
4777 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4778 break;
4779 case ARRAY_TYPE:
4780 if (TYPE_DOMAIN (ntype))
4781 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4782 break;
4783 case INTEGER_TYPE:
4784 t = TYPE_MAX_VALUE (ntype);
4785 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4786 hstate.add_object (TREE_INT_CST_ELT (t, i));
4787 break;
4788 case REAL_TYPE:
4789 case FIXED_POINT_TYPE:
4790 {
4791 unsigned int precision = TYPE_PRECISION (ntype);
4792 hstate.add_object (precision);
4793 }
4794 break;
4795 default:
4796 break;
4797 }
4798
4799 ntype = type_hash_canon (hstate.end(), ntype);
4800
4801 /* If the target-dependent attributes make NTYPE different from
4802 its canonical type, we will need to use structural equality
4803 checks for this type. */
4804 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4805 || !comp_type_attributes (ntype, ttype))
4806 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4807 else if (TYPE_CANONICAL (ntype) == ntype)
4808 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4809
4810 ttype = build_qualified_type (ntype, quals);
4811 }
4812 else if (TYPE_QUALS (ttype) != quals)
4813 ttype = build_qualified_type (ttype, quals);
4814
4815 return ttype;
4816 }
4817
4818 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4819 the same. */
4820
4821 static bool
4822 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4823 {
4824 tree cl1, cl2;
4825 for (cl1 = clauses1, cl2 = clauses2;
4826 cl1 && cl2;
4827 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4828 {
4829 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4830 return false;
4831 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4832 {
4833 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4834 OMP_CLAUSE_DECL (cl2)) != 1)
4835 return false;
4836 }
4837 switch (OMP_CLAUSE_CODE (cl1))
4838 {
4839 case OMP_CLAUSE_ALIGNED:
4840 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4841 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4842 return false;
4843 break;
4844 case OMP_CLAUSE_LINEAR:
4845 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4846 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4847 return false;
4848 break;
4849 case OMP_CLAUSE_SIMDLEN:
4850 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4851 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4852 return false;
4853 default:
4854 break;
4855 }
4856 }
4857 return true;
4858 }
4859
4860 /* Compare two constructor-element-type constants. Return 1 if the lists
4861 are known to be equal; otherwise return 0. */
4862
4863 static bool
4864 simple_cst_list_equal (const_tree l1, const_tree l2)
4865 {
4866 while (l1 != NULL_TREE && l2 != NULL_TREE)
4867 {
4868 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4869 return false;
4870
4871 l1 = TREE_CHAIN (l1);
4872 l2 = TREE_CHAIN (l2);
4873 }
4874
4875 return l1 == l2;
4876 }
4877
4878 /* Compare two identifier nodes representing attributes. Either one may
4879 be in wrapped __ATTR__ form. Return true if they are the same, false
4880 otherwise. */
4881
4882 static bool
4883 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4884 {
4885 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4886 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4887 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4888
4889 /* Identifiers can be compared directly for equality. */
4890 if (attr1 == attr2)
4891 return true;
4892
4893 /* If they are not equal, they may still be one in the form
4894 'text' while the other one is in the form '__text__'. TODO:
4895 If we were storing attributes in normalized 'text' form, then
4896 this could all go away and we could take full advantage of
4897 the fact that we're comparing identifiers. :-) */
4898 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4899 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4900
4901 if (attr2_len == attr1_len + 4)
4902 {
4903 const char *p = IDENTIFIER_POINTER (attr2);
4904 const char *q = IDENTIFIER_POINTER (attr1);
4905 if (p[0] == '_' && p[1] == '_'
4906 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4907 && strncmp (q, p + 2, attr1_len) == 0)
4908 return true;;
4909 }
4910 else if (attr2_len + 4 == attr1_len)
4911 {
4912 const char *p = IDENTIFIER_POINTER (attr2);
4913 const char *q = IDENTIFIER_POINTER (attr1);
4914 if (q[0] == '_' && q[1] == '_'
4915 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4916 && strncmp (q + 2, p, attr2_len) == 0)
4917 return true;
4918 }
4919
4920 return false;
4921 }
4922
4923 /* Compare two attributes for their value identity. Return true if the
4924 attribute values are known to be equal; otherwise return false. */
4925
4926 bool
4927 attribute_value_equal (const_tree attr1, const_tree attr2)
4928 {
4929 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4930 return true;
4931
4932 if (TREE_VALUE (attr1) != NULL_TREE
4933 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4934 && TREE_VALUE (attr2) != NULL_TREE
4935 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4936 {
4937 /* Handle attribute format. */
4938 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4939 {
4940 attr1 = TREE_VALUE (attr1);
4941 attr2 = TREE_VALUE (attr2);
4942 /* Compare the archetypes (printf/scanf/strftime/...). */
4943 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4944 TREE_VALUE (attr2)))
4945 return false;
4946 /* Archetypes are the same. Compare the rest. */
4947 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4948 TREE_CHAIN (attr2)) == 1);
4949 }
4950 return (simple_cst_list_equal (TREE_VALUE (attr1),
4951 TREE_VALUE (attr2)) == 1);
4952 }
4953
4954 if ((flag_openmp || flag_openmp_simd)
4955 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4956 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4957 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4958 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4959 TREE_VALUE (attr2));
4960
4961 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4962 }
4963
4964 /* Return 0 if the attributes for two types are incompatible, 1 if they
4965 are compatible, and 2 if they are nearly compatible (which causes a
4966 warning to be generated). */
4967 int
4968 comp_type_attributes (const_tree type1, const_tree type2)
4969 {
4970 const_tree a1 = TYPE_ATTRIBUTES (type1);
4971 const_tree a2 = TYPE_ATTRIBUTES (type2);
4972 const_tree a;
4973
4974 if (a1 == a2)
4975 return 1;
4976 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4977 {
4978 const struct attribute_spec *as;
4979 const_tree attr;
4980
4981 as = lookup_attribute_spec (get_attribute_name (a));
4982 if (!as || as->affects_type_identity == false)
4983 continue;
4984
4985 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4986 if (!attr || !attribute_value_equal (a, attr))
4987 break;
4988 }
4989 if (!a)
4990 {
4991 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4992 {
4993 const struct attribute_spec *as;
4994
4995 as = lookup_attribute_spec (get_attribute_name (a));
4996 if (!as || as->affects_type_identity == false)
4997 continue;
4998
4999 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5000 break;
5001 /* We don't need to compare trees again, as we did this
5002 already in first loop. */
5003 }
5004 /* All types - affecting identity - are equal, so
5005 there is no need to call target hook for comparison. */
5006 if (!a)
5007 return 1;
5008 }
5009 /* As some type combinations - like default calling-convention - might
5010 be compatible, we have to call the target hook to get the final result. */
5011 return targetm.comp_type_attributes (type1, type2);
5012 }
5013
5014 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5015 is ATTRIBUTE.
5016
5017 Record such modified types already made so we don't make duplicates. */
5018
5019 tree
5020 build_type_attribute_variant (tree ttype, tree attribute)
5021 {
5022 return build_type_attribute_qual_variant (ttype, attribute,
5023 TYPE_QUALS (ttype));
5024 }
5025
5026
5027 /* Reset the expression *EXPR_P, a size or position.
5028
5029 ??? We could reset all non-constant sizes or positions. But it's cheap
5030 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5031
5032 We need to reset self-referential sizes or positions because they cannot
5033 be gimplified and thus can contain a CALL_EXPR after the gimplification
5034 is finished, which will run afoul of LTO streaming. And they need to be
5035 reset to something essentially dummy but not constant, so as to preserve
5036 the properties of the object they are attached to. */
5037
5038 static inline void
5039 free_lang_data_in_one_sizepos (tree *expr_p)
5040 {
5041 tree expr = *expr_p;
5042 if (CONTAINS_PLACEHOLDER_P (expr))
5043 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5044 }
5045
5046
5047 /* Reset all the fields in a binfo node BINFO. We only keep
5048 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5049
5050 static void
5051 free_lang_data_in_binfo (tree binfo)
5052 {
5053 unsigned i;
5054 tree t;
5055
5056 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5057
5058 BINFO_VIRTUALS (binfo) = NULL_TREE;
5059 BINFO_BASE_ACCESSES (binfo) = NULL;
5060 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5061 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5062
5063 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5064 free_lang_data_in_binfo (t);
5065 }
5066
5067
5068 /* Reset all language specific information still present in TYPE. */
5069
5070 static void
5071 free_lang_data_in_type (tree type)
5072 {
5073 gcc_assert (TYPE_P (type));
5074
5075 /* Give the FE a chance to remove its own data first. */
5076 lang_hooks.free_lang_data (type);
5077
5078 TREE_LANG_FLAG_0 (type) = 0;
5079 TREE_LANG_FLAG_1 (type) = 0;
5080 TREE_LANG_FLAG_2 (type) = 0;
5081 TREE_LANG_FLAG_3 (type) = 0;
5082 TREE_LANG_FLAG_4 (type) = 0;
5083 TREE_LANG_FLAG_5 (type) = 0;
5084 TREE_LANG_FLAG_6 (type) = 0;
5085
5086 if (TREE_CODE (type) == FUNCTION_TYPE)
5087 {
5088 /* Remove the const and volatile qualifiers from arguments. The
5089 C++ front end removes them, but the C front end does not,
5090 leading to false ODR violation errors when merging two
5091 instances of the same function signature compiled by
5092 different front ends. */
5093 tree p;
5094
5095 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5096 {
5097 tree arg_type = TREE_VALUE (p);
5098
5099 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5100 {
5101 int quals = TYPE_QUALS (arg_type)
5102 & ~TYPE_QUAL_CONST
5103 & ~TYPE_QUAL_VOLATILE;
5104 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5105 free_lang_data_in_type (TREE_VALUE (p));
5106 }
5107 /* C++ FE uses TREE_PURPOSE to store initial values. */
5108 TREE_PURPOSE (p) = NULL;
5109 }
5110 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5111 TYPE_MINVAL (type) = NULL;
5112 }
5113 if (TREE_CODE (type) == METHOD_TYPE)
5114 {
5115 tree p;
5116
5117 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5118 {
5119 /* C++ FE uses TREE_PURPOSE to store initial values. */
5120 TREE_PURPOSE (p) = NULL;
5121 }
5122 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5123 TYPE_MINVAL (type) = NULL;
5124 }
5125
5126 /* Remove members that are not actually FIELD_DECLs from the field
5127 list of an aggregate. These occur in C++. */
5128 if (RECORD_OR_UNION_TYPE_P (type))
5129 {
5130 tree prev, member;
5131
5132 /* Note that TYPE_FIELDS can be shared across distinct
5133 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5134 to be removed, we cannot set its TREE_CHAIN to NULL.
5135 Otherwise, we would not be able to find all the other fields
5136 in the other instances of this TREE_TYPE.
5137
5138 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5139 prev = NULL_TREE;
5140 member = TYPE_FIELDS (type);
5141 while (member)
5142 {
5143 if (TREE_CODE (member) == FIELD_DECL
5144 || TREE_CODE (member) == TYPE_DECL)
5145 {
5146 if (prev)
5147 TREE_CHAIN (prev) = member;
5148 else
5149 TYPE_FIELDS (type) = member;
5150 prev = member;
5151 }
5152
5153 member = TREE_CHAIN (member);
5154 }
5155
5156 if (prev)
5157 TREE_CHAIN (prev) = NULL_TREE;
5158 else
5159 TYPE_FIELDS (type) = NULL_TREE;
5160
5161 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5162 and danagle the pointer from time to time. */
5163 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5164 TYPE_VFIELD (type) = NULL_TREE;
5165
5166 /* Remove TYPE_METHODS list. While it would be nice to keep it
5167 to enable ODR warnings about different method lists, doing so
5168 seems to impractically increase size of LTO data streamed.
5169 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5170 by function.c and pretty printers. */
5171 if (TYPE_METHODS (type))
5172 TYPE_METHODS (type) = error_mark_node;
5173 if (TYPE_BINFO (type))
5174 {
5175 free_lang_data_in_binfo (TYPE_BINFO (type));
5176 /* We need to preserve link to bases and virtual table for all
5177 polymorphic types to make devirtualization machinery working.
5178 Debug output cares only about bases, but output also
5179 virtual table pointers so merging of -fdevirtualize and
5180 -fno-devirtualize units is easier. */
5181 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5182 || !flag_devirtualize)
5183 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5184 && !BINFO_VTABLE (TYPE_BINFO (type)))
5185 || debug_info_level != DINFO_LEVEL_NONE))
5186 TYPE_BINFO (type) = NULL;
5187 }
5188 }
5189 else
5190 {
5191 /* For non-aggregate types, clear out the language slot (which
5192 overloads TYPE_BINFO). */
5193 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5194
5195 if (INTEGRAL_TYPE_P (type)
5196 || SCALAR_FLOAT_TYPE_P (type)
5197 || FIXED_POINT_TYPE_P (type))
5198 {
5199 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5200 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5201 }
5202 }
5203
5204 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5205 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5206
5207 if (TYPE_CONTEXT (type)
5208 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5209 {
5210 tree ctx = TYPE_CONTEXT (type);
5211 do
5212 {
5213 ctx = BLOCK_SUPERCONTEXT (ctx);
5214 }
5215 while (ctx && TREE_CODE (ctx) == BLOCK);
5216 TYPE_CONTEXT (type) = ctx;
5217 }
5218 }
5219
5220
5221 /* Return true if DECL may need an assembler name to be set. */
5222
5223 static inline bool
5224 need_assembler_name_p (tree decl)
5225 {
5226 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5227 Rule merging. This makes type_odr_p to return true on those types during
5228 LTO and by comparing the mangled name, we can say what types are intended
5229 to be equivalent across compilation unit.
5230
5231 We do not store names of type_in_anonymous_namespace_p.
5232
5233 Record, union and enumeration type have linkage that allows use
5234 to check type_in_anonymous_namespace_p. We do not mangle compound types
5235 that always can be compared structurally.
5236
5237 Similarly for builtin types, we compare properties of their main variant.
5238 A special case are integer types where mangling do make differences
5239 between char/signed char/unsigned char etc. Storing name for these makes
5240 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5241 See cp/mangle.c:write_builtin_type for details. */
5242
5243 if (flag_lto_odr_type_mering
5244 && TREE_CODE (decl) == TYPE_DECL
5245 && DECL_NAME (decl)
5246 && decl == TYPE_NAME (TREE_TYPE (decl))
5247 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5248 && (type_with_linkage_p (TREE_TYPE (decl))
5249 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5250 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5251 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5252 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5253 if (TREE_CODE (decl) != FUNCTION_DECL
5254 && TREE_CODE (decl) != VAR_DECL)
5255 return false;
5256
5257 /* If DECL already has its assembler name set, it does not need a
5258 new one. */
5259 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5260 || DECL_ASSEMBLER_NAME_SET_P (decl))
5261 return false;
5262
5263 /* Abstract decls do not need an assembler name. */
5264 if (DECL_ABSTRACT_P (decl))
5265 return false;
5266
5267 /* For VAR_DECLs, only static, public and external symbols need an
5268 assembler name. */
5269 if (TREE_CODE (decl) == VAR_DECL
5270 && !TREE_STATIC (decl)
5271 && !TREE_PUBLIC (decl)
5272 && !DECL_EXTERNAL (decl))
5273 return false;
5274
5275 if (TREE_CODE (decl) == FUNCTION_DECL)
5276 {
5277 /* Do not set assembler name on builtins. Allow RTL expansion to
5278 decide whether to expand inline or via a regular call. */
5279 if (DECL_BUILT_IN (decl)
5280 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5281 return false;
5282
5283 /* Functions represented in the callgraph need an assembler name. */
5284 if (cgraph_node::get (decl) != NULL)
5285 return true;
5286
5287 /* Unused and not public functions don't need an assembler name. */
5288 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5289 return false;
5290 }
5291
5292 return true;
5293 }
5294
5295
5296 /* Reset all language specific information still present in symbol
5297 DECL. */
5298
5299 static void
5300 free_lang_data_in_decl (tree decl)
5301 {
5302 gcc_assert (DECL_P (decl));
5303
5304 /* Give the FE a chance to remove its own data first. */
5305 lang_hooks.free_lang_data (decl);
5306
5307 TREE_LANG_FLAG_0 (decl) = 0;
5308 TREE_LANG_FLAG_1 (decl) = 0;
5309 TREE_LANG_FLAG_2 (decl) = 0;
5310 TREE_LANG_FLAG_3 (decl) = 0;
5311 TREE_LANG_FLAG_4 (decl) = 0;
5312 TREE_LANG_FLAG_5 (decl) = 0;
5313 TREE_LANG_FLAG_6 (decl) = 0;
5314
5315 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5316 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5317 if (TREE_CODE (decl) == FIELD_DECL)
5318 {
5319 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5320 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5321 DECL_QUALIFIER (decl) = NULL_TREE;
5322 }
5323
5324 if (TREE_CODE (decl) == FUNCTION_DECL)
5325 {
5326 struct cgraph_node *node;
5327 if (!(node = cgraph_node::get (decl))
5328 || (!node->definition && !node->clones))
5329 {
5330 if (node)
5331 node->release_body ();
5332 else
5333 {
5334 release_function_body (decl);
5335 DECL_ARGUMENTS (decl) = NULL;
5336 DECL_RESULT (decl) = NULL;
5337 DECL_INITIAL (decl) = error_mark_node;
5338 }
5339 }
5340 if (gimple_has_body_p (decl))
5341 {
5342 tree t;
5343
5344 /* If DECL has a gimple body, then the context for its
5345 arguments must be DECL. Otherwise, it doesn't really
5346 matter, as we will not be emitting any code for DECL. In
5347 general, there may be other instances of DECL created by
5348 the front end and since PARM_DECLs are generally shared,
5349 their DECL_CONTEXT changes as the replicas of DECL are
5350 created. The only time where DECL_CONTEXT is important
5351 is for the FUNCTION_DECLs that have a gimple body (since
5352 the PARM_DECL will be used in the function's body). */
5353 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5354 DECL_CONTEXT (t) = decl;
5355 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5356 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5357 = target_option_default_node;
5358 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5359 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5360 = optimization_default_node;
5361 }
5362
5363 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5364 At this point, it is not needed anymore. */
5365 DECL_SAVED_TREE (decl) = NULL_TREE;
5366
5367 /* Clear the abstract origin if it refers to a method. Otherwise
5368 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5369 origin will not be output correctly. */
5370 if (DECL_ABSTRACT_ORIGIN (decl)
5371 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5372 && RECORD_OR_UNION_TYPE_P
5373 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5374 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5375
5376 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5377 DECL_VINDEX referring to itself into a vtable slot number as it
5378 should. Happens with functions that are copied and then forgotten
5379 about. Just clear it, it won't matter anymore. */
5380 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5381 DECL_VINDEX (decl) = NULL_TREE;
5382 }
5383 else if (TREE_CODE (decl) == VAR_DECL)
5384 {
5385 if ((DECL_EXTERNAL (decl)
5386 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5387 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5388 DECL_INITIAL (decl) = NULL_TREE;
5389 }
5390 else if (TREE_CODE (decl) == TYPE_DECL
5391 || TREE_CODE (decl) == FIELD_DECL)
5392 DECL_INITIAL (decl) = NULL_TREE;
5393 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5394 && DECL_INITIAL (decl)
5395 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5396 {
5397 /* Strip builtins from the translation-unit BLOCK. We still have targets
5398 without builtin_decl_explicit support and also builtins are shared
5399 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5400 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5401 while (*nextp)
5402 {
5403 tree var = *nextp;
5404 if (TREE_CODE (var) == FUNCTION_DECL
5405 && DECL_BUILT_IN (var))
5406 *nextp = TREE_CHAIN (var);
5407 else
5408 nextp = &TREE_CHAIN (var);
5409 }
5410 }
5411 }
5412
5413
5414 /* Data used when collecting DECLs and TYPEs for language data removal. */
5415
5416 struct free_lang_data_d
5417 {
5418 /* Worklist to avoid excessive recursion. */
5419 vec<tree> worklist;
5420
5421 /* Set of traversed objects. Used to avoid duplicate visits. */
5422 hash_set<tree> *pset;
5423
5424 /* Array of symbols to process with free_lang_data_in_decl. */
5425 vec<tree> decls;
5426
5427 /* Array of types to process with free_lang_data_in_type. */
5428 vec<tree> types;
5429 };
5430
5431
5432 /* Save all language fields needed to generate proper debug information
5433 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5434
5435 static void
5436 save_debug_info_for_decl (tree t)
5437 {
5438 /*struct saved_debug_info_d *sdi;*/
5439
5440 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5441
5442 /* FIXME. Partial implementation for saving debug info removed. */
5443 }
5444
5445
5446 /* Save all language fields needed to generate proper debug information
5447 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5448
5449 static void
5450 save_debug_info_for_type (tree t)
5451 {
5452 /*struct saved_debug_info_d *sdi;*/
5453
5454 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5455
5456 /* FIXME. Partial implementation for saving debug info removed. */
5457 }
5458
5459
5460 /* Add type or decl T to one of the list of tree nodes that need their
5461 language data removed. The lists are held inside FLD. */
5462
5463 static void
5464 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5465 {
5466 if (DECL_P (t))
5467 {
5468 fld->decls.safe_push (t);
5469 if (debug_info_level > DINFO_LEVEL_TERSE)
5470 save_debug_info_for_decl (t);
5471 }
5472 else if (TYPE_P (t))
5473 {
5474 fld->types.safe_push (t);
5475 if (debug_info_level > DINFO_LEVEL_TERSE)
5476 save_debug_info_for_type (t);
5477 }
5478 else
5479 gcc_unreachable ();
5480 }
5481
5482 /* Push tree node T into FLD->WORKLIST. */
5483
5484 static inline void
5485 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5486 {
5487 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5488 fld->worklist.safe_push ((t));
5489 }
5490
5491
5492 /* Operand callback helper for free_lang_data_in_node. *TP is the
5493 subtree operand being considered. */
5494
5495 static tree
5496 find_decls_types_r (tree *tp, int *ws, void *data)
5497 {
5498 tree t = *tp;
5499 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5500
5501 if (TREE_CODE (t) == TREE_LIST)
5502 return NULL_TREE;
5503
5504 /* Language specific nodes will be removed, so there is no need
5505 to gather anything under them. */
5506 if (is_lang_specific (t))
5507 {
5508 *ws = 0;
5509 return NULL_TREE;
5510 }
5511
5512 if (DECL_P (t))
5513 {
5514 /* Note that walk_tree does not traverse every possible field in
5515 decls, so we have to do our own traversals here. */
5516 add_tree_to_fld_list (t, fld);
5517
5518 fld_worklist_push (DECL_NAME (t), fld);
5519 fld_worklist_push (DECL_CONTEXT (t), fld);
5520 fld_worklist_push (DECL_SIZE (t), fld);
5521 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5522
5523 /* We are going to remove everything under DECL_INITIAL for
5524 TYPE_DECLs. No point walking them. */
5525 if (TREE_CODE (t) != TYPE_DECL)
5526 fld_worklist_push (DECL_INITIAL (t), fld);
5527
5528 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5529 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5530
5531 if (TREE_CODE (t) == FUNCTION_DECL)
5532 {
5533 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5534 fld_worklist_push (DECL_RESULT (t), fld);
5535 }
5536 else if (TREE_CODE (t) == TYPE_DECL)
5537 {
5538 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5539 }
5540 else if (TREE_CODE (t) == FIELD_DECL)
5541 {
5542 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5543 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5544 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5545 fld_worklist_push (DECL_FCONTEXT (t), fld);
5546 }
5547
5548 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5549 && DECL_HAS_VALUE_EXPR_P (t))
5550 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5551
5552 if (TREE_CODE (t) != FIELD_DECL
5553 && TREE_CODE (t) != TYPE_DECL)
5554 fld_worklist_push (TREE_CHAIN (t), fld);
5555 *ws = 0;
5556 }
5557 else if (TYPE_P (t))
5558 {
5559 /* Note that walk_tree does not traverse every possible field in
5560 types, so we have to do our own traversals here. */
5561 add_tree_to_fld_list (t, fld);
5562
5563 if (!RECORD_OR_UNION_TYPE_P (t))
5564 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5565 fld_worklist_push (TYPE_SIZE (t), fld);
5566 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5567 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5568 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5569 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5570 fld_worklist_push (TYPE_NAME (t), fld);
5571 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5572 them and thus do not and want not to reach unused pointer types
5573 this way. */
5574 if (!POINTER_TYPE_P (t))
5575 fld_worklist_push (TYPE_MINVAL (t), fld);
5576 if (!RECORD_OR_UNION_TYPE_P (t))
5577 fld_worklist_push (TYPE_MAXVAL (t), fld);
5578 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5579 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5580 do not and want not to reach unused variants this way. */
5581 if (TYPE_CONTEXT (t))
5582 {
5583 tree ctx = TYPE_CONTEXT (t);
5584 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5585 So push that instead. */
5586 while (ctx && TREE_CODE (ctx) == BLOCK)
5587 ctx = BLOCK_SUPERCONTEXT (ctx);
5588 fld_worklist_push (ctx, fld);
5589 }
5590 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5591 and want not to reach unused types this way. */
5592
5593 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5594 {
5595 unsigned i;
5596 tree tem;
5597 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5598 fld_worklist_push (TREE_TYPE (tem), fld);
5599 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5600 if (tem
5601 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5602 && TREE_CODE (tem) == TREE_LIST)
5603 do
5604 {
5605 fld_worklist_push (TREE_VALUE (tem), fld);
5606 tem = TREE_CHAIN (tem);
5607 }
5608 while (tem);
5609 }
5610 if (RECORD_OR_UNION_TYPE_P (t))
5611 {
5612 tree tem;
5613 /* Push all TYPE_FIELDS - there can be interleaving interesting
5614 and non-interesting things. */
5615 tem = TYPE_FIELDS (t);
5616 while (tem)
5617 {
5618 if (TREE_CODE (tem) == FIELD_DECL
5619 || TREE_CODE (tem) == TYPE_DECL)
5620 fld_worklist_push (tem, fld);
5621 tem = TREE_CHAIN (tem);
5622 }
5623 }
5624
5625 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5626 *ws = 0;
5627 }
5628 else if (TREE_CODE (t) == BLOCK)
5629 {
5630 tree tem;
5631 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5632 fld_worklist_push (tem, fld);
5633 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5634 fld_worklist_push (tem, fld);
5635 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5636 }
5637
5638 if (TREE_CODE (t) != IDENTIFIER_NODE
5639 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5640 fld_worklist_push (TREE_TYPE (t), fld);
5641
5642 return NULL_TREE;
5643 }
5644
5645
5646 /* Find decls and types in T. */
5647
5648 static void
5649 find_decls_types (tree t, struct free_lang_data_d *fld)
5650 {
5651 while (1)
5652 {
5653 if (!fld->pset->contains (t))
5654 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5655 if (fld->worklist.is_empty ())
5656 break;
5657 t = fld->worklist.pop ();
5658 }
5659 }
5660
5661 /* Translate all the types in LIST with the corresponding runtime
5662 types. */
5663
5664 static tree
5665 get_eh_types_for_runtime (tree list)
5666 {
5667 tree head, prev;
5668
5669 if (list == NULL_TREE)
5670 return NULL_TREE;
5671
5672 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5673 prev = head;
5674 list = TREE_CHAIN (list);
5675 while (list)
5676 {
5677 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5678 TREE_CHAIN (prev) = n;
5679 prev = TREE_CHAIN (prev);
5680 list = TREE_CHAIN (list);
5681 }
5682
5683 return head;
5684 }
5685
5686
5687 /* Find decls and types referenced in EH region R and store them in
5688 FLD->DECLS and FLD->TYPES. */
5689
5690 static void
5691 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5692 {
5693 switch (r->type)
5694 {
5695 case ERT_CLEANUP:
5696 break;
5697
5698 case ERT_TRY:
5699 {
5700 eh_catch c;
5701
5702 /* The types referenced in each catch must first be changed to the
5703 EH types used at runtime. This removes references to FE types
5704 in the region. */
5705 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5706 {
5707 c->type_list = get_eh_types_for_runtime (c->type_list);
5708 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5709 }
5710 }
5711 break;
5712
5713 case ERT_ALLOWED_EXCEPTIONS:
5714 r->u.allowed.type_list
5715 = get_eh_types_for_runtime (r->u.allowed.type_list);
5716 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5717 break;
5718
5719 case ERT_MUST_NOT_THROW:
5720 walk_tree (&r->u.must_not_throw.failure_decl,
5721 find_decls_types_r, fld, fld->pset);
5722 break;
5723 }
5724 }
5725
5726
5727 /* Find decls and types referenced in cgraph node N and store them in
5728 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5729 look for *every* kind of DECL and TYPE node reachable from N,
5730 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5731 NAMESPACE_DECLs, etc). */
5732
5733 static void
5734 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5735 {
5736 basic_block bb;
5737 struct function *fn;
5738 unsigned ix;
5739 tree t;
5740
5741 find_decls_types (n->decl, fld);
5742
5743 if (!gimple_has_body_p (n->decl))
5744 return;
5745
5746 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5747
5748 fn = DECL_STRUCT_FUNCTION (n->decl);
5749
5750 /* Traverse locals. */
5751 FOR_EACH_LOCAL_DECL (fn, ix, t)
5752 find_decls_types (t, fld);
5753
5754 /* Traverse EH regions in FN. */
5755 {
5756 eh_region r;
5757 FOR_ALL_EH_REGION_FN (r, fn)
5758 find_decls_types_in_eh_region (r, fld);
5759 }
5760
5761 /* Traverse every statement in FN. */
5762 FOR_EACH_BB_FN (bb, fn)
5763 {
5764 gphi_iterator psi;
5765 gimple_stmt_iterator si;
5766 unsigned i;
5767
5768 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5769 {
5770 gphi *phi = psi.phi ();
5771
5772 for (i = 0; i < gimple_phi_num_args (phi); i++)
5773 {
5774 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5775 find_decls_types (*arg_p, fld);
5776 }
5777 }
5778
5779 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5780 {
5781 gimple stmt = gsi_stmt (si);
5782
5783 if (is_gimple_call (stmt))
5784 find_decls_types (gimple_call_fntype (stmt), fld);
5785
5786 for (i = 0; i < gimple_num_ops (stmt); i++)
5787 {
5788 tree arg = gimple_op (stmt, i);
5789 find_decls_types (arg, fld);
5790 }
5791 }
5792 }
5793 }
5794
5795
5796 /* Find decls and types referenced in varpool node N and store them in
5797 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5798 look for *every* kind of DECL and TYPE node reachable from N,
5799 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5800 NAMESPACE_DECLs, etc). */
5801
5802 static void
5803 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5804 {
5805 find_decls_types (v->decl, fld);
5806 }
5807
5808 /* If T needs an assembler name, have one created for it. */
5809
5810 void
5811 assign_assembler_name_if_neeeded (tree t)
5812 {
5813 if (need_assembler_name_p (t))
5814 {
5815 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5816 diagnostics that use input_location to show locus
5817 information. The problem here is that, at this point,
5818 input_location is generally anchored to the end of the file
5819 (since the parser is long gone), so we don't have a good
5820 position to pin it to.
5821
5822 To alleviate this problem, this uses the location of T's
5823 declaration. Examples of this are
5824 testsuite/g++.dg/template/cond2.C and
5825 testsuite/g++.dg/template/pr35240.C. */
5826 location_t saved_location = input_location;
5827 input_location = DECL_SOURCE_LOCATION (t);
5828
5829 decl_assembler_name (t);
5830
5831 input_location = saved_location;
5832 }
5833 }
5834
5835
5836 /* Free language specific information for every operand and expression
5837 in every node of the call graph. This process operates in three stages:
5838
5839 1- Every callgraph node and varpool node is traversed looking for
5840 decls and types embedded in them. This is a more exhaustive
5841 search than that done by find_referenced_vars, because it will
5842 also collect individual fields, decls embedded in types, etc.
5843
5844 2- All the decls found are sent to free_lang_data_in_decl.
5845
5846 3- All the types found are sent to free_lang_data_in_type.
5847
5848 The ordering between decls and types is important because
5849 free_lang_data_in_decl sets assembler names, which includes
5850 mangling. So types cannot be freed up until assembler names have
5851 been set up. */
5852
5853 static void
5854 free_lang_data_in_cgraph (void)
5855 {
5856 struct cgraph_node *n;
5857 varpool_node *v;
5858 struct free_lang_data_d fld;
5859 tree t;
5860 unsigned i;
5861 alias_pair *p;
5862
5863 /* Initialize sets and arrays to store referenced decls and types. */
5864 fld.pset = new hash_set<tree>;
5865 fld.worklist.create (0);
5866 fld.decls.create (100);
5867 fld.types.create (100);
5868
5869 /* Find decls and types in the body of every function in the callgraph. */
5870 FOR_EACH_FUNCTION (n)
5871 find_decls_types_in_node (n, &fld);
5872
5873 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5874 find_decls_types (p->decl, &fld);
5875
5876 /* Find decls and types in every varpool symbol. */
5877 FOR_EACH_VARIABLE (v)
5878 find_decls_types_in_var (v, &fld);
5879
5880 /* Set the assembler name on every decl found. We need to do this
5881 now because free_lang_data_in_decl will invalidate data needed
5882 for mangling. This breaks mangling on interdependent decls. */
5883 FOR_EACH_VEC_ELT (fld.decls, i, t)
5884 assign_assembler_name_if_neeeded (t);
5885
5886 /* Traverse every decl found freeing its language data. */
5887 FOR_EACH_VEC_ELT (fld.decls, i, t)
5888 free_lang_data_in_decl (t);
5889
5890 /* Traverse every type found freeing its language data. */
5891 FOR_EACH_VEC_ELT (fld.types, i, t)
5892 free_lang_data_in_type (t);
5893 #ifdef ENABLE_CHECKING
5894 FOR_EACH_VEC_ELT (fld.types, i, t)
5895 verify_type (t);
5896 #endif
5897
5898 delete fld.pset;
5899 fld.worklist.release ();
5900 fld.decls.release ();
5901 fld.types.release ();
5902 }
5903
5904
5905 /* Free resources that are used by FE but are not needed once they are done. */
5906
5907 static unsigned
5908 free_lang_data (void)
5909 {
5910 unsigned i;
5911
5912 /* If we are the LTO frontend we have freed lang-specific data already. */
5913 if (in_lto_p
5914 || (!flag_generate_lto && !flag_generate_offload))
5915 return 0;
5916
5917 /* Allocate and assign alias sets to the standard integer types
5918 while the slots are still in the way the frontends generated them. */
5919 for (i = 0; i < itk_none; ++i)
5920 if (integer_types[i])
5921 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5922
5923 /* Traverse the IL resetting language specific information for
5924 operands, expressions, etc. */
5925 free_lang_data_in_cgraph ();
5926
5927 /* Create gimple variants for common types. */
5928 ptrdiff_type_node = integer_type_node;
5929 fileptr_type_node = ptr_type_node;
5930
5931 /* Reset some langhooks. Do not reset types_compatible_p, it may
5932 still be used indirectly via the get_alias_set langhook. */
5933 lang_hooks.dwarf_name = lhd_dwarf_name;
5934 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5935 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5936
5937 /* We do not want the default decl_assembler_name implementation,
5938 rather if we have fixed everything we want a wrapper around it
5939 asserting that all non-local symbols already got their assembler
5940 name and only produce assembler names for local symbols. Or rather
5941 make sure we never call decl_assembler_name on local symbols and
5942 devise a separate, middle-end private scheme for it. */
5943
5944 /* Reset diagnostic machinery. */
5945 tree_diagnostics_defaults (global_dc);
5946
5947 return 0;
5948 }
5949
5950
5951 namespace {
5952
5953 const pass_data pass_data_ipa_free_lang_data =
5954 {
5955 SIMPLE_IPA_PASS, /* type */
5956 "*free_lang_data", /* name */
5957 OPTGROUP_NONE, /* optinfo_flags */
5958 TV_IPA_FREE_LANG_DATA, /* tv_id */
5959 0, /* properties_required */
5960 0, /* properties_provided */
5961 0, /* properties_destroyed */
5962 0, /* todo_flags_start */
5963 0, /* todo_flags_finish */
5964 };
5965
5966 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5967 {
5968 public:
5969 pass_ipa_free_lang_data (gcc::context *ctxt)
5970 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5971 {}
5972
5973 /* opt_pass methods: */
5974 virtual unsigned int execute (function *) { return free_lang_data (); }
5975
5976 }; // class pass_ipa_free_lang_data
5977
5978 } // anon namespace
5979
5980 simple_ipa_opt_pass *
5981 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5982 {
5983 return new pass_ipa_free_lang_data (ctxt);
5984 }
5985
5986 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5987 ATTR_NAME. Also used internally by remove_attribute(). */
5988 bool
5989 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5990 {
5991 size_t ident_len = IDENTIFIER_LENGTH (ident);
5992
5993 if (ident_len == attr_len)
5994 {
5995 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5996 return true;
5997 }
5998 else if (ident_len == attr_len + 4)
5999 {
6000 /* There is the possibility that ATTR is 'text' and IDENT is
6001 '__text__'. */
6002 const char *p = IDENTIFIER_POINTER (ident);
6003 if (p[0] == '_' && p[1] == '_'
6004 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6005 && strncmp (attr_name, p + 2, attr_len) == 0)
6006 return true;
6007 }
6008
6009 return false;
6010 }
6011
6012 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6013 of ATTR_NAME, and LIST is not NULL_TREE. */
6014 tree
6015 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6016 {
6017 while (list)
6018 {
6019 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6020
6021 if (ident_len == attr_len)
6022 {
6023 if (!strcmp (attr_name,
6024 IDENTIFIER_POINTER (get_attribute_name (list))))
6025 break;
6026 }
6027 /* TODO: If we made sure that attributes were stored in the
6028 canonical form without '__...__' (ie, as in 'text' as opposed
6029 to '__text__') then we could avoid the following case. */
6030 else if (ident_len == attr_len + 4)
6031 {
6032 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6033 if (p[0] == '_' && p[1] == '_'
6034 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6035 && strncmp (attr_name, p + 2, attr_len) == 0)
6036 break;
6037 }
6038 list = TREE_CHAIN (list);
6039 }
6040
6041 return list;
6042 }
6043
6044 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6045 return a pointer to the attribute's list first element if the attribute
6046 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6047 '__text__'). */
6048
6049 tree
6050 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6051 tree list)
6052 {
6053 while (list)
6054 {
6055 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6056
6057 if (attr_len > ident_len)
6058 {
6059 list = TREE_CHAIN (list);
6060 continue;
6061 }
6062
6063 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6064
6065 if (strncmp (attr_name, p, attr_len) == 0)
6066 break;
6067
6068 /* TODO: If we made sure that attributes were stored in the
6069 canonical form without '__...__' (ie, as in 'text' as opposed
6070 to '__text__') then we could avoid the following case. */
6071 if (p[0] == '_' && p[1] == '_' &&
6072 strncmp (attr_name, p + 2, attr_len) == 0)
6073 break;
6074
6075 list = TREE_CHAIN (list);
6076 }
6077
6078 return list;
6079 }
6080
6081
6082 /* A variant of lookup_attribute() that can be used with an identifier
6083 as the first argument, and where the identifier can be either
6084 'text' or '__text__'.
6085
6086 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6087 return a pointer to the attribute's list element if the attribute
6088 is part of the list, or NULL_TREE if not found. If the attribute
6089 appears more than once, this only returns the first occurrence; the
6090 TREE_CHAIN of the return value should be passed back in if further
6091 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6092 can be in the form 'text' or '__text__'. */
6093 static tree
6094 lookup_ident_attribute (tree attr_identifier, tree list)
6095 {
6096 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6097
6098 while (list)
6099 {
6100 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6101 == IDENTIFIER_NODE);
6102
6103 if (cmp_attrib_identifiers (attr_identifier,
6104 get_attribute_name (list)))
6105 /* Found it. */
6106 break;
6107 list = TREE_CHAIN (list);
6108 }
6109
6110 return list;
6111 }
6112
6113 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6114 modified list. */
6115
6116 tree
6117 remove_attribute (const char *attr_name, tree list)
6118 {
6119 tree *p;
6120 size_t attr_len = strlen (attr_name);
6121
6122 gcc_checking_assert (attr_name[0] != '_');
6123
6124 for (p = &list; *p; )
6125 {
6126 tree l = *p;
6127 /* TODO: If we were storing attributes in normalized form, here
6128 we could use a simple strcmp(). */
6129 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6130 *p = TREE_CHAIN (l);
6131 else
6132 p = &TREE_CHAIN (l);
6133 }
6134
6135 return list;
6136 }
6137
6138 /* Return an attribute list that is the union of a1 and a2. */
6139
6140 tree
6141 merge_attributes (tree a1, tree a2)
6142 {
6143 tree attributes;
6144
6145 /* Either one unset? Take the set one. */
6146
6147 if ((attributes = a1) == 0)
6148 attributes = a2;
6149
6150 /* One that completely contains the other? Take it. */
6151
6152 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6153 {
6154 if (attribute_list_contained (a2, a1))
6155 attributes = a2;
6156 else
6157 {
6158 /* Pick the longest list, and hang on the other list. */
6159
6160 if (list_length (a1) < list_length (a2))
6161 attributes = a2, a2 = a1;
6162
6163 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6164 {
6165 tree a;
6166 for (a = lookup_ident_attribute (get_attribute_name (a2),
6167 attributes);
6168 a != NULL_TREE && !attribute_value_equal (a, a2);
6169 a = lookup_ident_attribute (get_attribute_name (a2),
6170 TREE_CHAIN (a)))
6171 ;
6172 if (a == NULL_TREE)
6173 {
6174 a1 = copy_node (a2);
6175 TREE_CHAIN (a1) = attributes;
6176 attributes = a1;
6177 }
6178 }
6179 }
6180 }
6181 return attributes;
6182 }
6183
6184 /* Given types T1 and T2, merge their attributes and return
6185 the result. */
6186
6187 tree
6188 merge_type_attributes (tree t1, tree t2)
6189 {
6190 return merge_attributes (TYPE_ATTRIBUTES (t1),
6191 TYPE_ATTRIBUTES (t2));
6192 }
6193
6194 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6195 the result. */
6196
6197 tree
6198 merge_decl_attributes (tree olddecl, tree newdecl)
6199 {
6200 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6201 DECL_ATTRIBUTES (newdecl));
6202 }
6203
6204 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6205
6206 /* Specialization of merge_decl_attributes for various Windows targets.
6207
6208 This handles the following situation:
6209
6210 __declspec (dllimport) int foo;
6211 int foo;
6212
6213 The second instance of `foo' nullifies the dllimport. */
6214
6215 tree
6216 merge_dllimport_decl_attributes (tree old, tree new_tree)
6217 {
6218 tree a;
6219 int delete_dllimport_p = 1;
6220
6221 /* What we need to do here is remove from `old' dllimport if it doesn't
6222 appear in `new'. dllimport behaves like extern: if a declaration is
6223 marked dllimport and a definition appears later, then the object
6224 is not dllimport'd. We also remove a `new' dllimport if the old list
6225 contains dllexport: dllexport always overrides dllimport, regardless
6226 of the order of declaration. */
6227 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6228 delete_dllimport_p = 0;
6229 else if (DECL_DLLIMPORT_P (new_tree)
6230 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6231 {
6232 DECL_DLLIMPORT_P (new_tree) = 0;
6233 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6234 "dllimport ignored", new_tree);
6235 }
6236 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6237 {
6238 /* Warn about overriding a symbol that has already been used, e.g.:
6239 extern int __attribute__ ((dllimport)) foo;
6240 int* bar () {return &foo;}
6241 int foo;
6242 */
6243 if (TREE_USED (old))
6244 {
6245 warning (0, "%q+D redeclared without dllimport attribute "
6246 "after being referenced with dll linkage", new_tree);
6247 /* If we have used a variable's address with dllimport linkage,
6248 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6249 decl may already have had TREE_CONSTANT computed.
6250 We still remove the attribute so that assembler code refers
6251 to '&foo rather than '_imp__foo'. */
6252 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6253 DECL_DLLIMPORT_P (new_tree) = 1;
6254 }
6255
6256 /* Let an inline definition silently override the external reference,
6257 but otherwise warn about attribute inconsistency. */
6258 else if (TREE_CODE (new_tree) == VAR_DECL
6259 || !DECL_DECLARED_INLINE_P (new_tree))
6260 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6261 "previous dllimport ignored", new_tree);
6262 }
6263 else
6264 delete_dllimport_p = 0;
6265
6266 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6267
6268 if (delete_dllimport_p)
6269 a = remove_attribute ("dllimport", a);
6270
6271 return a;
6272 }
6273
6274 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6275 struct attribute_spec.handler. */
6276
6277 tree
6278 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6279 bool *no_add_attrs)
6280 {
6281 tree node = *pnode;
6282 bool is_dllimport;
6283
6284 /* These attributes may apply to structure and union types being created,
6285 but otherwise should pass to the declaration involved. */
6286 if (!DECL_P (node))
6287 {
6288 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6289 | (int) ATTR_FLAG_ARRAY_NEXT))
6290 {
6291 *no_add_attrs = true;
6292 return tree_cons (name, args, NULL_TREE);
6293 }
6294 if (TREE_CODE (node) == RECORD_TYPE
6295 || TREE_CODE (node) == UNION_TYPE)
6296 {
6297 node = TYPE_NAME (node);
6298 if (!node)
6299 return NULL_TREE;
6300 }
6301 else
6302 {
6303 warning (OPT_Wattributes, "%qE attribute ignored",
6304 name);
6305 *no_add_attrs = true;
6306 return NULL_TREE;
6307 }
6308 }
6309
6310 if (TREE_CODE (node) != FUNCTION_DECL
6311 && TREE_CODE (node) != VAR_DECL
6312 && TREE_CODE (node) != TYPE_DECL)
6313 {
6314 *no_add_attrs = true;
6315 warning (OPT_Wattributes, "%qE attribute ignored",
6316 name);
6317 return NULL_TREE;
6318 }
6319
6320 if (TREE_CODE (node) == TYPE_DECL
6321 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6322 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6323 {
6324 *no_add_attrs = true;
6325 warning (OPT_Wattributes, "%qE attribute ignored",
6326 name);
6327 return NULL_TREE;
6328 }
6329
6330 is_dllimport = is_attribute_p ("dllimport", name);
6331
6332 /* Report error on dllimport ambiguities seen now before they cause
6333 any damage. */
6334 if (is_dllimport)
6335 {
6336 /* Honor any target-specific overrides. */
6337 if (!targetm.valid_dllimport_attribute_p (node))
6338 *no_add_attrs = true;
6339
6340 else if (TREE_CODE (node) == FUNCTION_DECL
6341 && DECL_DECLARED_INLINE_P (node))
6342 {
6343 warning (OPT_Wattributes, "inline function %q+D declared as "
6344 " dllimport: attribute ignored", node);
6345 *no_add_attrs = true;
6346 }
6347 /* Like MS, treat definition of dllimported variables and
6348 non-inlined functions on declaration as syntax errors. */
6349 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6350 {
6351 error ("function %q+D definition is marked dllimport", node);
6352 *no_add_attrs = true;
6353 }
6354
6355 else if (TREE_CODE (node) == VAR_DECL)
6356 {
6357 if (DECL_INITIAL (node))
6358 {
6359 error ("variable %q+D definition is marked dllimport",
6360 node);
6361 *no_add_attrs = true;
6362 }
6363
6364 /* `extern' needn't be specified with dllimport.
6365 Specify `extern' now and hope for the best. Sigh. */
6366 DECL_EXTERNAL (node) = 1;
6367 /* Also, implicitly give dllimport'd variables declared within
6368 a function global scope, unless declared static. */
6369 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6370 TREE_PUBLIC (node) = 1;
6371 }
6372
6373 if (*no_add_attrs == false)
6374 DECL_DLLIMPORT_P (node) = 1;
6375 }
6376 else if (TREE_CODE (node) == FUNCTION_DECL
6377 && DECL_DECLARED_INLINE_P (node)
6378 && flag_keep_inline_dllexport)
6379 /* An exported function, even if inline, must be emitted. */
6380 DECL_EXTERNAL (node) = 0;
6381
6382 /* Report error if symbol is not accessible at global scope. */
6383 if (!TREE_PUBLIC (node)
6384 && (TREE_CODE (node) == VAR_DECL
6385 || TREE_CODE (node) == FUNCTION_DECL))
6386 {
6387 error ("external linkage required for symbol %q+D because of "
6388 "%qE attribute", node, name);
6389 *no_add_attrs = true;
6390 }
6391
6392 /* A dllexport'd entity must have default visibility so that other
6393 program units (shared libraries or the main executable) can see
6394 it. A dllimport'd entity must have default visibility so that
6395 the linker knows that undefined references within this program
6396 unit can be resolved by the dynamic linker. */
6397 if (!*no_add_attrs)
6398 {
6399 if (DECL_VISIBILITY_SPECIFIED (node)
6400 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6401 error ("%qE implies default visibility, but %qD has already "
6402 "been declared with a different visibility",
6403 name, node);
6404 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6405 DECL_VISIBILITY_SPECIFIED (node) = 1;
6406 }
6407
6408 return NULL_TREE;
6409 }
6410
6411 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6412 \f
6413 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6414 of the various TYPE_QUAL values. */
6415
6416 static void
6417 set_type_quals (tree type, int type_quals)
6418 {
6419 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6420 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6421 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6422 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6423 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6424 }
6425
6426 /* Returns true iff unqualified CAND and BASE are equivalent. */
6427
6428 bool
6429 check_base_type (const_tree cand, const_tree base)
6430 {
6431 return (TYPE_NAME (cand) == TYPE_NAME (base)
6432 /* Apparently this is needed for Objective-C. */
6433 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6434 /* Check alignment. */
6435 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6436 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6437 TYPE_ATTRIBUTES (base)));
6438 }
6439
6440 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6441
6442 bool
6443 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6444 {
6445 return (TYPE_QUALS (cand) == type_quals
6446 && check_base_type (cand, base));
6447 }
6448
6449 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6450
6451 static bool
6452 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6453 {
6454 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6455 && TYPE_NAME (cand) == TYPE_NAME (base)
6456 /* Apparently this is needed for Objective-C. */
6457 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6458 /* Check alignment. */
6459 && TYPE_ALIGN (cand) == align
6460 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6461 TYPE_ATTRIBUTES (base)));
6462 }
6463
6464 /* This function checks to see if TYPE matches the size one of the built-in
6465 atomic types, and returns that core atomic type. */
6466
6467 static tree
6468 find_atomic_core_type (tree type)
6469 {
6470 tree base_atomic_type;
6471
6472 /* Only handle complete types. */
6473 if (TYPE_SIZE (type) == NULL_TREE)
6474 return NULL_TREE;
6475
6476 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6477 switch (type_size)
6478 {
6479 case 8:
6480 base_atomic_type = atomicQI_type_node;
6481 break;
6482
6483 case 16:
6484 base_atomic_type = atomicHI_type_node;
6485 break;
6486
6487 case 32:
6488 base_atomic_type = atomicSI_type_node;
6489 break;
6490
6491 case 64:
6492 base_atomic_type = atomicDI_type_node;
6493 break;
6494
6495 case 128:
6496 base_atomic_type = atomicTI_type_node;
6497 break;
6498
6499 default:
6500 base_atomic_type = NULL_TREE;
6501 }
6502
6503 return base_atomic_type;
6504 }
6505
6506 /* Return a version of the TYPE, qualified as indicated by the
6507 TYPE_QUALS, if one exists. If no qualified version exists yet,
6508 return NULL_TREE. */
6509
6510 tree
6511 get_qualified_type (tree type, int type_quals)
6512 {
6513 tree t;
6514
6515 if (TYPE_QUALS (type) == type_quals)
6516 return type;
6517
6518 /* Search the chain of variants to see if there is already one there just
6519 like the one we need to have. If so, use that existing one. We must
6520 preserve the TYPE_NAME, since there is code that depends on this. */
6521 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6522 if (check_qualified_type (t, type, type_quals))
6523 return t;
6524
6525 return NULL_TREE;
6526 }
6527
6528 /* Like get_qualified_type, but creates the type if it does not
6529 exist. This function never returns NULL_TREE. */
6530
6531 tree
6532 build_qualified_type (tree type, int type_quals)
6533 {
6534 tree t;
6535
6536 /* See if we already have the appropriate qualified variant. */
6537 t = get_qualified_type (type, type_quals);
6538
6539 /* If not, build it. */
6540 if (!t)
6541 {
6542 t = build_variant_type_copy (type);
6543 set_type_quals (t, type_quals);
6544
6545 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6546 {
6547 /* See if this object can map to a basic atomic type. */
6548 tree atomic_type = find_atomic_core_type (type);
6549 if (atomic_type)
6550 {
6551 /* Ensure the alignment of this type is compatible with
6552 the required alignment of the atomic type. */
6553 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6554 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6555 }
6556 }
6557
6558 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6559 /* Propagate structural equality. */
6560 SET_TYPE_STRUCTURAL_EQUALITY (t);
6561 else if (TYPE_CANONICAL (type) != type)
6562 /* Build the underlying canonical type, since it is different
6563 from TYPE. */
6564 {
6565 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6566 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6567 }
6568 else
6569 /* T is its own canonical type. */
6570 TYPE_CANONICAL (t) = t;
6571
6572 }
6573
6574 return t;
6575 }
6576
6577 /* Create a variant of type T with alignment ALIGN. */
6578
6579 tree
6580 build_aligned_type (tree type, unsigned int align)
6581 {
6582 tree t;
6583
6584 if (TYPE_PACKED (type)
6585 || TYPE_ALIGN (type) == align)
6586 return type;
6587
6588 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6589 if (check_aligned_type (t, type, align))
6590 return t;
6591
6592 t = build_variant_type_copy (type);
6593 TYPE_ALIGN (t) = align;
6594
6595 return t;
6596 }
6597
6598 /* Create a new distinct copy of TYPE. The new type is made its own
6599 MAIN_VARIANT. If TYPE requires structural equality checks, the
6600 resulting type requires structural equality checks; otherwise, its
6601 TYPE_CANONICAL points to itself. */
6602
6603 tree
6604 build_distinct_type_copy (tree type)
6605 {
6606 tree t = copy_node (type);
6607
6608 TYPE_POINTER_TO (t) = 0;
6609 TYPE_REFERENCE_TO (t) = 0;
6610
6611 /* Set the canonical type either to a new equivalence class, or
6612 propagate the need for structural equality checks. */
6613 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6614 SET_TYPE_STRUCTURAL_EQUALITY (t);
6615 else
6616 TYPE_CANONICAL (t) = t;
6617
6618 /* Make it its own variant. */
6619 TYPE_MAIN_VARIANT (t) = t;
6620 TYPE_NEXT_VARIANT (t) = 0;
6621
6622 /* We do not record methods in type copies nor variants
6623 so we do not need to keep them up to date when new method
6624 is inserted. */
6625 if (RECORD_OR_UNION_TYPE_P (t))
6626 TYPE_METHODS (t) = NULL_TREE;
6627
6628 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6629 whose TREE_TYPE is not t. This can also happen in the Ada
6630 frontend when using subtypes. */
6631
6632 return t;
6633 }
6634
6635 /* Create a new variant of TYPE, equivalent but distinct. This is so
6636 the caller can modify it. TYPE_CANONICAL for the return type will
6637 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6638 are considered equal by the language itself (or that both types
6639 require structural equality checks). */
6640
6641 tree
6642 build_variant_type_copy (tree type)
6643 {
6644 tree t, m = TYPE_MAIN_VARIANT (type);
6645
6646 t = build_distinct_type_copy (type);
6647
6648 /* Since we're building a variant, assume that it is a non-semantic
6649 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6650 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6651
6652 /* Add the new type to the chain of variants of TYPE. */
6653 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6654 TYPE_NEXT_VARIANT (m) = t;
6655 TYPE_MAIN_VARIANT (t) = m;
6656
6657 return t;
6658 }
6659 \f
6660 /* Return true if the from tree in both tree maps are equal. */
6661
6662 int
6663 tree_map_base_eq (const void *va, const void *vb)
6664 {
6665 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6666 *const b = (const struct tree_map_base *) vb;
6667 return (a->from == b->from);
6668 }
6669
6670 /* Hash a from tree in a tree_base_map. */
6671
6672 unsigned int
6673 tree_map_base_hash (const void *item)
6674 {
6675 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6676 }
6677
6678 /* Return true if this tree map structure is marked for garbage collection
6679 purposes. We simply return true if the from tree is marked, so that this
6680 structure goes away when the from tree goes away. */
6681
6682 int
6683 tree_map_base_marked_p (const void *p)
6684 {
6685 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6686 }
6687
6688 /* Hash a from tree in a tree_map. */
6689
6690 unsigned int
6691 tree_map_hash (const void *item)
6692 {
6693 return (((const struct tree_map *) item)->hash);
6694 }
6695
6696 /* Hash a from tree in a tree_decl_map. */
6697
6698 unsigned int
6699 tree_decl_map_hash (const void *item)
6700 {
6701 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6702 }
6703
6704 /* Return the initialization priority for DECL. */
6705
6706 priority_type
6707 decl_init_priority_lookup (tree decl)
6708 {
6709 symtab_node *snode = symtab_node::get (decl);
6710
6711 if (!snode)
6712 return DEFAULT_INIT_PRIORITY;
6713 return
6714 snode->get_init_priority ();
6715 }
6716
6717 /* Return the finalization priority for DECL. */
6718
6719 priority_type
6720 decl_fini_priority_lookup (tree decl)
6721 {
6722 cgraph_node *node = cgraph_node::get (decl);
6723
6724 if (!node)
6725 return DEFAULT_INIT_PRIORITY;
6726 return
6727 node->get_fini_priority ();
6728 }
6729
6730 /* Set the initialization priority for DECL to PRIORITY. */
6731
6732 void
6733 decl_init_priority_insert (tree decl, priority_type priority)
6734 {
6735 struct symtab_node *snode;
6736
6737 if (priority == DEFAULT_INIT_PRIORITY)
6738 {
6739 snode = symtab_node::get (decl);
6740 if (!snode)
6741 return;
6742 }
6743 else if (TREE_CODE (decl) == VAR_DECL)
6744 snode = varpool_node::get_create (decl);
6745 else
6746 snode = cgraph_node::get_create (decl);
6747 snode->set_init_priority (priority);
6748 }
6749
6750 /* Set the finalization priority for DECL to PRIORITY. */
6751
6752 void
6753 decl_fini_priority_insert (tree decl, priority_type priority)
6754 {
6755 struct cgraph_node *node;
6756
6757 if (priority == DEFAULT_INIT_PRIORITY)
6758 {
6759 node = cgraph_node::get (decl);
6760 if (!node)
6761 return;
6762 }
6763 else
6764 node = cgraph_node::get_create (decl);
6765 node->set_fini_priority (priority);
6766 }
6767
6768 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6769
6770 static void
6771 print_debug_expr_statistics (void)
6772 {
6773 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6774 (long) debug_expr_for_decl->size (),
6775 (long) debug_expr_for_decl->elements (),
6776 debug_expr_for_decl->collisions ());
6777 }
6778
6779 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6780
6781 static void
6782 print_value_expr_statistics (void)
6783 {
6784 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6785 (long) value_expr_for_decl->size (),
6786 (long) value_expr_for_decl->elements (),
6787 value_expr_for_decl->collisions ());
6788 }
6789
6790 /* Lookup a debug expression for FROM, and return it if we find one. */
6791
6792 tree
6793 decl_debug_expr_lookup (tree from)
6794 {
6795 struct tree_decl_map *h, in;
6796 in.base.from = from;
6797
6798 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6799 if (h)
6800 return h->to;
6801 return NULL_TREE;
6802 }
6803
6804 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6805
6806 void
6807 decl_debug_expr_insert (tree from, tree to)
6808 {
6809 struct tree_decl_map *h;
6810
6811 h = ggc_alloc<tree_decl_map> ();
6812 h->base.from = from;
6813 h->to = to;
6814 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6815 }
6816
6817 /* Lookup a value expression for FROM, and return it if we find one. */
6818
6819 tree
6820 decl_value_expr_lookup (tree from)
6821 {
6822 struct tree_decl_map *h, in;
6823 in.base.from = from;
6824
6825 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6826 if (h)
6827 return h->to;
6828 return NULL_TREE;
6829 }
6830
6831 /* Insert a mapping FROM->TO in the value expression hashtable. */
6832
6833 void
6834 decl_value_expr_insert (tree from, tree to)
6835 {
6836 struct tree_decl_map *h;
6837
6838 h = ggc_alloc<tree_decl_map> ();
6839 h->base.from = from;
6840 h->to = to;
6841 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6842 }
6843
6844 /* Lookup a vector of debug arguments for FROM, and return it if we
6845 find one. */
6846
6847 vec<tree, va_gc> **
6848 decl_debug_args_lookup (tree from)
6849 {
6850 struct tree_vec_map *h, in;
6851
6852 if (!DECL_HAS_DEBUG_ARGS_P (from))
6853 return NULL;
6854 gcc_checking_assert (debug_args_for_decl != NULL);
6855 in.base.from = from;
6856 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6857 if (h)
6858 return &h->to;
6859 return NULL;
6860 }
6861
6862 /* Insert a mapping FROM->empty vector of debug arguments in the value
6863 expression hashtable. */
6864
6865 vec<tree, va_gc> **
6866 decl_debug_args_insert (tree from)
6867 {
6868 struct tree_vec_map *h;
6869 tree_vec_map **loc;
6870
6871 if (DECL_HAS_DEBUG_ARGS_P (from))
6872 return decl_debug_args_lookup (from);
6873 if (debug_args_for_decl == NULL)
6874 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6875 h = ggc_alloc<tree_vec_map> ();
6876 h->base.from = from;
6877 h->to = NULL;
6878 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6879 *loc = h;
6880 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6881 return &h->to;
6882 }
6883
6884 /* Hashing of types so that we don't make duplicates.
6885 The entry point is `type_hash_canon'. */
6886
6887 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6888 with types in the TREE_VALUE slots), by adding the hash codes
6889 of the individual types. */
6890
6891 static void
6892 type_hash_list (const_tree list, inchash::hash &hstate)
6893 {
6894 const_tree tail;
6895
6896 for (tail = list; tail; tail = TREE_CHAIN (tail))
6897 if (TREE_VALUE (tail) != error_mark_node)
6898 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6899 }
6900
6901 /* These are the Hashtable callback functions. */
6902
6903 /* Returns true iff the types are equivalent. */
6904
6905 bool
6906 type_cache_hasher::equal (type_hash *a, type_hash *b)
6907 {
6908 /* First test the things that are the same for all types. */
6909 if (a->hash != b->hash
6910 || TREE_CODE (a->type) != TREE_CODE (b->type)
6911 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6912 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6913 TYPE_ATTRIBUTES (b->type))
6914 || (TREE_CODE (a->type) != COMPLEX_TYPE
6915 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6916 return 0;
6917
6918 /* Be careful about comparing arrays before and after the element type
6919 has been completed; don't compare TYPE_ALIGN unless both types are
6920 complete. */
6921 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6922 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6923 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6924 return 0;
6925
6926 switch (TREE_CODE (a->type))
6927 {
6928 case VOID_TYPE:
6929 case COMPLEX_TYPE:
6930 case POINTER_TYPE:
6931 case REFERENCE_TYPE:
6932 case NULLPTR_TYPE:
6933 return 1;
6934
6935 case VECTOR_TYPE:
6936 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6937
6938 case ENUMERAL_TYPE:
6939 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6940 && !(TYPE_VALUES (a->type)
6941 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6942 && TYPE_VALUES (b->type)
6943 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6944 && type_list_equal (TYPE_VALUES (a->type),
6945 TYPE_VALUES (b->type))))
6946 return 0;
6947
6948 /* ... fall through ... */
6949
6950 case INTEGER_TYPE:
6951 case REAL_TYPE:
6952 case BOOLEAN_TYPE:
6953 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6954 return false;
6955 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6956 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6957 TYPE_MAX_VALUE (b->type)))
6958 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6959 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6960 TYPE_MIN_VALUE (b->type))));
6961
6962 case FIXED_POINT_TYPE:
6963 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6964
6965 case OFFSET_TYPE:
6966 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6967
6968 case METHOD_TYPE:
6969 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6970 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6971 || (TYPE_ARG_TYPES (a->type)
6972 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6973 && TYPE_ARG_TYPES (b->type)
6974 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6975 && type_list_equal (TYPE_ARG_TYPES (a->type),
6976 TYPE_ARG_TYPES (b->type)))))
6977 break;
6978 return 0;
6979 case ARRAY_TYPE:
6980 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6981
6982 case RECORD_TYPE:
6983 case UNION_TYPE:
6984 case QUAL_UNION_TYPE:
6985 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6986 || (TYPE_FIELDS (a->type)
6987 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6988 && TYPE_FIELDS (b->type)
6989 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6990 && type_list_equal (TYPE_FIELDS (a->type),
6991 TYPE_FIELDS (b->type))));
6992
6993 case FUNCTION_TYPE:
6994 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6995 || (TYPE_ARG_TYPES (a->type)
6996 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6997 && TYPE_ARG_TYPES (b->type)
6998 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6999 && type_list_equal (TYPE_ARG_TYPES (a->type),
7000 TYPE_ARG_TYPES (b->type))))
7001 break;
7002 return 0;
7003
7004 default:
7005 return 0;
7006 }
7007
7008 if (lang_hooks.types.type_hash_eq != NULL)
7009 return lang_hooks.types.type_hash_eq (a->type, b->type);
7010
7011 return 1;
7012 }
7013
7014 /* Given TYPE, and HASHCODE its hash code, return the canonical
7015 object for an identical type if one already exists.
7016 Otherwise, return TYPE, and record it as the canonical object.
7017
7018 To use this function, first create a type of the sort you want.
7019 Then compute its hash code from the fields of the type that
7020 make it different from other similar types.
7021 Then call this function and use the value. */
7022
7023 tree
7024 type_hash_canon (unsigned int hashcode, tree type)
7025 {
7026 type_hash in;
7027 type_hash **loc;
7028
7029 /* The hash table only contains main variants, so ensure that's what we're
7030 being passed. */
7031 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7032
7033 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7034 must call that routine before comparing TYPE_ALIGNs. */
7035 layout_type (type);
7036
7037 in.hash = hashcode;
7038 in.type = type;
7039
7040 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7041 if (*loc)
7042 {
7043 tree t1 = ((type_hash *) *loc)->type;
7044 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7045 if (GATHER_STATISTICS)
7046 {
7047 tree_code_counts[(int) TREE_CODE (type)]--;
7048 tree_node_counts[(int) t_kind]--;
7049 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7050 }
7051 return t1;
7052 }
7053 else
7054 {
7055 struct type_hash *h;
7056
7057 h = ggc_alloc<type_hash> ();
7058 h->hash = hashcode;
7059 h->type = type;
7060 *loc = h;
7061
7062 return type;
7063 }
7064 }
7065
7066 static void
7067 print_type_hash_statistics (void)
7068 {
7069 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7070 (long) type_hash_table->size (),
7071 (long) type_hash_table->elements (),
7072 type_hash_table->collisions ());
7073 }
7074
7075 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7076 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7077 by adding the hash codes of the individual attributes. */
7078
7079 static void
7080 attribute_hash_list (const_tree list, inchash::hash &hstate)
7081 {
7082 const_tree tail;
7083
7084 for (tail = list; tail; tail = TREE_CHAIN (tail))
7085 /* ??? Do we want to add in TREE_VALUE too? */
7086 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7087 }
7088
7089 /* Given two lists of attributes, return true if list l2 is
7090 equivalent to l1. */
7091
7092 int
7093 attribute_list_equal (const_tree l1, const_tree l2)
7094 {
7095 if (l1 == l2)
7096 return 1;
7097
7098 return attribute_list_contained (l1, l2)
7099 && attribute_list_contained (l2, l1);
7100 }
7101
7102 /* Given two lists of attributes, return true if list L2 is
7103 completely contained within L1. */
7104 /* ??? This would be faster if attribute names were stored in a canonicalized
7105 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7106 must be used to show these elements are equivalent (which they are). */
7107 /* ??? It's not clear that attributes with arguments will always be handled
7108 correctly. */
7109
7110 int
7111 attribute_list_contained (const_tree l1, const_tree l2)
7112 {
7113 const_tree t1, t2;
7114
7115 /* First check the obvious, maybe the lists are identical. */
7116 if (l1 == l2)
7117 return 1;
7118
7119 /* Maybe the lists are similar. */
7120 for (t1 = l1, t2 = l2;
7121 t1 != 0 && t2 != 0
7122 && get_attribute_name (t1) == get_attribute_name (t2)
7123 && TREE_VALUE (t1) == TREE_VALUE (t2);
7124 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7125 ;
7126
7127 /* Maybe the lists are equal. */
7128 if (t1 == 0 && t2 == 0)
7129 return 1;
7130
7131 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7132 {
7133 const_tree attr;
7134 /* This CONST_CAST is okay because lookup_attribute does not
7135 modify its argument and the return value is assigned to a
7136 const_tree. */
7137 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7138 CONST_CAST_TREE (l1));
7139 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7140 attr = lookup_ident_attribute (get_attribute_name (t2),
7141 TREE_CHAIN (attr)))
7142 ;
7143
7144 if (attr == NULL_TREE)
7145 return 0;
7146 }
7147
7148 return 1;
7149 }
7150
7151 /* Given two lists of types
7152 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7153 return 1 if the lists contain the same types in the same order.
7154 Also, the TREE_PURPOSEs must match. */
7155
7156 int
7157 type_list_equal (const_tree l1, const_tree l2)
7158 {
7159 const_tree t1, t2;
7160
7161 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7162 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7163 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7164 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7165 && (TREE_TYPE (TREE_PURPOSE (t1))
7166 == TREE_TYPE (TREE_PURPOSE (t2))))))
7167 return 0;
7168
7169 return t1 == t2;
7170 }
7171
7172 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7173 given by TYPE. If the argument list accepts variable arguments,
7174 then this function counts only the ordinary arguments. */
7175
7176 int
7177 type_num_arguments (const_tree type)
7178 {
7179 int i = 0;
7180 tree t;
7181
7182 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7183 /* If the function does not take a variable number of arguments,
7184 the last element in the list will have type `void'. */
7185 if (VOID_TYPE_P (TREE_VALUE (t)))
7186 break;
7187 else
7188 ++i;
7189
7190 return i;
7191 }
7192
7193 /* Nonzero if integer constants T1 and T2
7194 represent the same constant value. */
7195
7196 int
7197 tree_int_cst_equal (const_tree t1, const_tree t2)
7198 {
7199 if (t1 == t2)
7200 return 1;
7201
7202 if (t1 == 0 || t2 == 0)
7203 return 0;
7204
7205 if (TREE_CODE (t1) == INTEGER_CST
7206 && TREE_CODE (t2) == INTEGER_CST
7207 && wi::to_widest (t1) == wi::to_widest (t2))
7208 return 1;
7209
7210 return 0;
7211 }
7212
7213 /* Return true if T is an INTEGER_CST whose numerical value (extended
7214 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7215
7216 bool
7217 tree_fits_shwi_p (const_tree t)
7218 {
7219 return (t != NULL_TREE
7220 && TREE_CODE (t) == INTEGER_CST
7221 && wi::fits_shwi_p (wi::to_widest (t)));
7222 }
7223
7224 /* Return true if T is an INTEGER_CST whose numerical value (extended
7225 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7226
7227 bool
7228 tree_fits_uhwi_p (const_tree t)
7229 {
7230 return (t != NULL_TREE
7231 && TREE_CODE (t) == INTEGER_CST
7232 && wi::fits_uhwi_p (wi::to_widest (t)));
7233 }
7234
7235 /* T is an INTEGER_CST whose numerical value (extended according to
7236 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7237 HOST_WIDE_INT. */
7238
7239 HOST_WIDE_INT
7240 tree_to_shwi (const_tree t)
7241 {
7242 gcc_assert (tree_fits_shwi_p (t));
7243 return TREE_INT_CST_LOW (t);
7244 }
7245
7246 /* T is an INTEGER_CST whose numerical value (extended according to
7247 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7248 HOST_WIDE_INT. */
7249
7250 unsigned HOST_WIDE_INT
7251 tree_to_uhwi (const_tree t)
7252 {
7253 gcc_assert (tree_fits_uhwi_p (t));
7254 return TREE_INT_CST_LOW (t);
7255 }
7256
7257 /* Return the most significant (sign) bit of T. */
7258
7259 int
7260 tree_int_cst_sign_bit (const_tree t)
7261 {
7262 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7263
7264 return wi::extract_uhwi (t, bitno, 1);
7265 }
7266
7267 /* Return an indication of the sign of the integer constant T.
7268 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7269 Note that -1 will never be returned if T's type is unsigned. */
7270
7271 int
7272 tree_int_cst_sgn (const_tree t)
7273 {
7274 if (wi::eq_p (t, 0))
7275 return 0;
7276 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7277 return 1;
7278 else if (wi::neg_p (t))
7279 return -1;
7280 else
7281 return 1;
7282 }
7283
7284 /* Return the minimum number of bits needed to represent VALUE in a
7285 signed or unsigned type, UNSIGNEDP says which. */
7286
7287 unsigned int
7288 tree_int_cst_min_precision (tree value, signop sgn)
7289 {
7290 /* If the value is negative, compute its negative minus 1. The latter
7291 adjustment is because the absolute value of the largest negative value
7292 is one larger than the largest positive value. This is equivalent to
7293 a bit-wise negation, so use that operation instead. */
7294
7295 if (tree_int_cst_sgn (value) < 0)
7296 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7297
7298 /* Return the number of bits needed, taking into account the fact
7299 that we need one more bit for a signed than unsigned type.
7300 If value is 0 or -1, the minimum precision is 1 no matter
7301 whether unsignedp is true or false. */
7302
7303 if (integer_zerop (value))
7304 return 1;
7305 else
7306 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7307 }
7308
7309 /* Return truthvalue of whether T1 is the same tree structure as T2.
7310 Return 1 if they are the same.
7311 Return 0 if they are understandably different.
7312 Return -1 if either contains tree structure not understood by
7313 this function. */
7314
7315 int
7316 simple_cst_equal (const_tree t1, const_tree t2)
7317 {
7318 enum tree_code code1, code2;
7319 int cmp;
7320 int i;
7321
7322 if (t1 == t2)
7323 return 1;
7324 if (t1 == 0 || t2 == 0)
7325 return 0;
7326
7327 code1 = TREE_CODE (t1);
7328 code2 = TREE_CODE (t2);
7329
7330 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7331 {
7332 if (CONVERT_EXPR_CODE_P (code2)
7333 || code2 == NON_LVALUE_EXPR)
7334 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7335 else
7336 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7337 }
7338
7339 else if (CONVERT_EXPR_CODE_P (code2)
7340 || code2 == NON_LVALUE_EXPR)
7341 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7342
7343 if (code1 != code2)
7344 return 0;
7345
7346 switch (code1)
7347 {
7348 case INTEGER_CST:
7349 return wi::to_widest (t1) == wi::to_widest (t2);
7350
7351 case REAL_CST:
7352 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7353
7354 case FIXED_CST:
7355 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7356
7357 case STRING_CST:
7358 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7359 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7360 TREE_STRING_LENGTH (t1)));
7361
7362 case CONSTRUCTOR:
7363 {
7364 unsigned HOST_WIDE_INT idx;
7365 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7366 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7367
7368 if (vec_safe_length (v1) != vec_safe_length (v2))
7369 return false;
7370
7371 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7372 /* ??? Should we handle also fields here? */
7373 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7374 return false;
7375 return true;
7376 }
7377
7378 case SAVE_EXPR:
7379 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7380
7381 case CALL_EXPR:
7382 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7383 if (cmp <= 0)
7384 return cmp;
7385 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7386 return 0;
7387 {
7388 const_tree arg1, arg2;
7389 const_call_expr_arg_iterator iter1, iter2;
7390 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7391 arg2 = first_const_call_expr_arg (t2, &iter2);
7392 arg1 && arg2;
7393 arg1 = next_const_call_expr_arg (&iter1),
7394 arg2 = next_const_call_expr_arg (&iter2))
7395 {
7396 cmp = simple_cst_equal (arg1, arg2);
7397 if (cmp <= 0)
7398 return cmp;
7399 }
7400 return arg1 == arg2;
7401 }
7402
7403 case TARGET_EXPR:
7404 /* Special case: if either target is an unallocated VAR_DECL,
7405 it means that it's going to be unified with whatever the
7406 TARGET_EXPR is really supposed to initialize, so treat it
7407 as being equivalent to anything. */
7408 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7409 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7410 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7411 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7412 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7413 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7414 cmp = 1;
7415 else
7416 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7417
7418 if (cmp <= 0)
7419 return cmp;
7420
7421 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7422
7423 case WITH_CLEANUP_EXPR:
7424 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7425 if (cmp <= 0)
7426 return cmp;
7427
7428 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7429
7430 case COMPONENT_REF:
7431 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7432 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7433
7434 return 0;
7435
7436 case VAR_DECL:
7437 case PARM_DECL:
7438 case CONST_DECL:
7439 case FUNCTION_DECL:
7440 return 0;
7441
7442 default:
7443 break;
7444 }
7445
7446 /* This general rule works for most tree codes. All exceptions should be
7447 handled above. If this is a language-specific tree code, we can't
7448 trust what might be in the operand, so say we don't know
7449 the situation. */
7450 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7451 return -1;
7452
7453 switch (TREE_CODE_CLASS (code1))
7454 {
7455 case tcc_unary:
7456 case tcc_binary:
7457 case tcc_comparison:
7458 case tcc_expression:
7459 case tcc_reference:
7460 case tcc_statement:
7461 cmp = 1;
7462 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7463 {
7464 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7465 if (cmp <= 0)
7466 return cmp;
7467 }
7468
7469 return cmp;
7470
7471 default:
7472 return -1;
7473 }
7474 }
7475
7476 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7477 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7478 than U, respectively. */
7479
7480 int
7481 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7482 {
7483 if (tree_int_cst_sgn (t) < 0)
7484 return -1;
7485 else if (!tree_fits_uhwi_p (t))
7486 return 1;
7487 else if (TREE_INT_CST_LOW (t) == u)
7488 return 0;
7489 else if (TREE_INT_CST_LOW (t) < u)
7490 return -1;
7491 else
7492 return 1;
7493 }
7494
7495 /* Return true if SIZE represents a constant size that is in bounds of
7496 what the middle-end and the backend accepts (covering not more than
7497 half of the address-space). */
7498
7499 bool
7500 valid_constant_size_p (const_tree size)
7501 {
7502 if (! tree_fits_uhwi_p (size)
7503 || TREE_OVERFLOW (size)
7504 || tree_int_cst_sign_bit (size) != 0)
7505 return false;
7506 return true;
7507 }
7508
7509 /* Return the precision of the type, or for a complex or vector type the
7510 precision of the type of its elements. */
7511
7512 unsigned int
7513 element_precision (const_tree type)
7514 {
7515 enum tree_code code = TREE_CODE (type);
7516 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7517 type = TREE_TYPE (type);
7518
7519 return TYPE_PRECISION (type);
7520 }
7521
7522 /* Return true if CODE represents an associative tree code. Otherwise
7523 return false. */
7524 bool
7525 associative_tree_code (enum tree_code code)
7526 {
7527 switch (code)
7528 {
7529 case BIT_IOR_EXPR:
7530 case BIT_AND_EXPR:
7531 case BIT_XOR_EXPR:
7532 case PLUS_EXPR:
7533 case MULT_EXPR:
7534 case MIN_EXPR:
7535 case MAX_EXPR:
7536 return true;
7537
7538 default:
7539 break;
7540 }
7541 return false;
7542 }
7543
7544 /* Return true if CODE represents a commutative tree code. Otherwise
7545 return false. */
7546 bool
7547 commutative_tree_code (enum tree_code code)
7548 {
7549 switch (code)
7550 {
7551 case PLUS_EXPR:
7552 case MULT_EXPR:
7553 case MULT_HIGHPART_EXPR:
7554 case MIN_EXPR:
7555 case MAX_EXPR:
7556 case BIT_IOR_EXPR:
7557 case BIT_XOR_EXPR:
7558 case BIT_AND_EXPR:
7559 case NE_EXPR:
7560 case EQ_EXPR:
7561 case UNORDERED_EXPR:
7562 case ORDERED_EXPR:
7563 case UNEQ_EXPR:
7564 case LTGT_EXPR:
7565 case TRUTH_AND_EXPR:
7566 case TRUTH_XOR_EXPR:
7567 case TRUTH_OR_EXPR:
7568 case WIDEN_MULT_EXPR:
7569 case VEC_WIDEN_MULT_HI_EXPR:
7570 case VEC_WIDEN_MULT_LO_EXPR:
7571 case VEC_WIDEN_MULT_EVEN_EXPR:
7572 case VEC_WIDEN_MULT_ODD_EXPR:
7573 return true;
7574
7575 default:
7576 break;
7577 }
7578 return false;
7579 }
7580
7581 /* Return true if CODE represents a ternary tree code for which the
7582 first two operands are commutative. Otherwise return false. */
7583 bool
7584 commutative_ternary_tree_code (enum tree_code code)
7585 {
7586 switch (code)
7587 {
7588 case WIDEN_MULT_PLUS_EXPR:
7589 case WIDEN_MULT_MINUS_EXPR:
7590 case DOT_PROD_EXPR:
7591 case FMA_EXPR:
7592 return true;
7593
7594 default:
7595 break;
7596 }
7597 return false;
7598 }
7599
7600 /* Returns true if CODE can overflow. */
7601
7602 bool
7603 operation_can_overflow (enum tree_code code)
7604 {
7605 switch (code)
7606 {
7607 case PLUS_EXPR:
7608 case MINUS_EXPR:
7609 case MULT_EXPR:
7610 case LSHIFT_EXPR:
7611 /* Can overflow in various ways. */
7612 return true;
7613 case TRUNC_DIV_EXPR:
7614 case EXACT_DIV_EXPR:
7615 case FLOOR_DIV_EXPR:
7616 case CEIL_DIV_EXPR:
7617 /* For INT_MIN / -1. */
7618 return true;
7619 case NEGATE_EXPR:
7620 case ABS_EXPR:
7621 /* For -INT_MIN. */
7622 return true;
7623 default:
7624 /* These operators cannot overflow. */
7625 return false;
7626 }
7627 }
7628
7629 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7630 ftrapv doesn't generate trapping insns for CODE. */
7631
7632 bool
7633 operation_no_trapping_overflow (tree type, enum tree_code code)
7634 {
7635 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7636
7637 /* We don't generate instructions that trap on overflow for complex or vector
7638 types. */
7639 if (!INTEGRAL_TYPE_P (type))
7640 return true;
7641
7642 if (!TYPE_OVERFLOW_TRAPS (type))
7643 return true;
7644
7645 switch (code)
7646 {
7647 case PLUS_EXPR:
7648 case MINUS_EXPR:
7649 case MULT_EXPR:
7650 case NEGATE_EXPR:
7651 case ABS_EXPR:
7652 /* These operators can overflow, and -ftrapv generates trapping code for
7653 these. */
7654 return false;
7655 case TRUNC_DIV_EXPR:
7656 case EXACT_DIV_EXPR:
7657 case FLOOR_DIV_EXPR:
7658 case CEIL_DIV_EXPR:
7659 case LSHIFT_EXPR:
7660 /* These operators can overflow, but -ftrapv does not generate trapping
7661 code for these. */
7662 return true;
7663 default:
7664 /* These operators cannot overflow. */
7665 return true;
7666 }
7667 }
7668
7669 namespace inchash
7670 {
7671
7672 /* Generate a hash value for an expression. This can be used iteratively
7673 by passing a previous result as the HSTATE argument.
7674
7675 This function is intended to produce the same hash for expressions which
7676 would compare equal using operand_equal_p. */
7677 void
7678 add_expr (const_tree t, inchash::hash &hstate)
7679 {
7680 int i;
7681 enum tree_code code;
7682 enum tree_code_class tclass;
7683
7684 if (t == NULL_TREE)
7685 {
7686 hstate.merge_hash (0);
7687 return;
7688 }
7689
7690 code = TREE_CODE (t);
7691
7692 switch (code)
7693 {
7694 /* Alas, constants aren't shared, so we can't rely on pointer
7695 identity. */
7696 case VOID_CST:
7697 hstate.merge_hash (0);
7698 return;
7699 case INTEGER_CST:
7700 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7701 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7702 return;
7703 case REAL_CST:
7704 {
7705 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7706 hstate.merge_hash (val2);
7707 return;
7708 }
7709 case FIXED_CST:
7710 {
7711 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7712 hstate.merge_hash (val2);
7713 return;
7714 }
7715 case STRING_CST:
7716 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7717 return;
7718 case COMPLEX_CST:
7719 inchash::add_expr (TREE_REALPART (t), hstate);
7720 inchash::add_expr (TREE_IMAGPART (t), hstate);
7721 return;
7722 case VECTOR_CST:
7723 {
7724 unsigned i;
7725 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7726 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7727 return;
7728 }
7729 case SSA_NAME:
7730 /* We can just compare by pointer. */
7731 hstate.add_wide_int (SSA_NAME_VERSION (t));
7732 return;
7733 case PLACEHOLDER_EXPR:
7734 /* The node itself doesn't matter. */
7735 return;
7736 case TREE_LIST:
7737 /* A list of expressions, for a CALL_EXPR or as the elements of a
7738 VECTOR_CST. */
7739 for (; t; t = TREE_CHAIN (t))
7740 inchash::add_expr (TREE_VALUE (t), hstate);
7741 return;
7742 case CONSTRUCTOR:
7743 {
7744 unsigned HOST_WIDE_INT idx;
7745 tree field, value;
7746 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7747 {
7748 inchash::add_expr (field, hstate);
7749 inchash::add_expr (value, hstate);
7750 }
7751 return;
7752 }
7753 case FUNCTION_DECL:
7754 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7755 Otherwise nodes that compare equal according to operand_equal_p might
7756 get different hash codes. However, don't do this for machine specific
7757 or front end builtins, since the function code is overloaded in those
7758 cases. */
7759 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7760 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7761 {
7762 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7763 code = TREE_CODE (t);
7764 }
7765 /* FALL THROUGH */
7766 default:
7767 tclass = TREE_CODE_CLASS (code);
7768
7769 if (tclass == tcc_declaration)
7770 {
7771 /* DECL's have a unique ID */
7772 hstate.add_wide_int (DECL_UID (t));
7773 }
7774 else
7775 {
7776 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7777
7778 hstate.add_object (code);
7779
7780 /* Don't hash the type, that can lead to having nodes which
7781 compare equal according to operand_equal_p, but which
7782 have different hash codes. */
7783 if (CONVERT_EXPR_CODE_P (code)
7784 || code == NON_LVALUE_EXPR)
7785 {
7786 /* Make sure to include signness in the hash computation. */
7787 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7788 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7789 }
7790
7791 else if (commutative_tree_code (code))
7792 {
7793 /* It's a commutative expression. We want to hash it the same
7794 however it appears. We do this by first hashing both operands
7795 and then rehashing based on the order of their independent
7796 hashes. */
7797 inchash::hash one, two;
7798 inchash::add_expr (TREE_OPERAND (t, 0), one);
7799 inchash::add_expr (TREE_OPERAND (t, 1), two);
7800 hstate.add_commutative (one, two);
7801 }
7802 else
7803 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7804 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7805 }
7806 return;
7807 }
7808 }
7809
7810 }
7811
7812 /* Constructors for pointer, array and function types.
7813 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7814 constructed by language-dependent code, not here.) */
7815
7816 /* Construct, lay out and return the type of pointers to TO_TYPE with
7817 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7818 reference all of memory. If such a type has already been
7819 constructed, reuse it. */
7820
7821 tree
7822 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7823 bool can_alias_all)
7824 {
7825 tree t;
7826 bool could_alias = can_alias_all;
7827
7828 if (to_type == error_mark_node)
7829 return error_mark_node;
7830
7831 /* If the pointed-to type has the may_alias attribute set, force
7832 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7833 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7834 can_alias_all = true;
7835
7836 /* In some cases, languages will have things that aren't a POINTER_TYPE
7837 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7838 In that case, return that type without regard to the rest of our
7839 operands.
7840
7841 ??? This is a kludge, but consistent with the way this function has
7842 always operated and there doesn't seem to be a good way to avoid this
7843 at the moment. */
7844 if (TYPE_POINTER_TO (to_type) != 0
7845 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7846 return TYPE_POINTER_TO (to_type);
7847
7848 /* First, if we already have a type for pointers to TO_TYPE and it's
7849 the proper mode, use it. */
7850 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7851 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7852 return t;
7853
7854 t = make_node (POINTER_TYPE);
7855
7856 TREE_TYPE (t) = to_type;
7857 SET_TYPE_MODE (t, mode);
7858 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7859 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7860 TYPE_POINTER_TO (to_type) = t;
7861
7862 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7863 SET_TYPE_STRUCTURAL_EQUALITY (t);
7864 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7865 TYPE_CANONICAL (t)
7866 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7867 mode, false);
7868
7869 /* Lay out the type. This function has many callers that are concerned
7870 with expression-construction, and this simplifies them all. */
7871 layout_type (t);
7872
7873 return t;
7874 }
7875
7876 /* By default build pointers in ptr_mode. */
7877
7878 tree
7879 build_pointer_type (tree to_type)
7880 {
7881 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7882 : TYPE_ADDR_SPACE (to_type);
7883 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7884 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7885 }
7886
7887 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7888
7889 tree
7890 build_reference_type_for_mode (tree to_type, machine_mode mode,
7891 bool can_alias_all)
7892 {
7893 tree t;
7894 bool could_alias = can_alias_all;
7895
7896 if (to_type == error_mark_node)
7897 return error_mark_node;
7898
7899 /* If the pointed-to type has the may_alias attribute set, force
7900 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7901 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7902 can_alias_all = true;
7903
7904 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7905 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7906 In that case, return that type without regard to the rest of our
7907 operands.
7908
7909 ??? This is a kludge, but consistent with the way this function has
7910 always operated and there doesn't seem to be a good way to avoid this
7911 at the moment. */
7912 if (TYPE_REFERENCE_TO (to_type) != 0
7913 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7914 return TYPE_REFERENCE_TO (to_type);
7915
7916 /* First, if we already have a type for pointers to TO_TYPE and it's
7917 the proper mode, use it. */
7918 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7919 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7920 return t;
7921
7922 t = make_node (REFERENCE_TYPE);
7923
7924 TREE_TYPE (t) = to_type;
7925 SET_TYPE_MODE (t, mode);
7926 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7927 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7928 TYPE_REFERENCE_TO (to_type) = t;
7929
7930 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7931 SET_TYPE_STRUCTURAL_EQUALITY (t);
7932 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7933 TYPE_CANONICAL (t)
7934 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7935 mode, false);
7936
7937 layout_type (t);
7938
7939 return t;
7940 }
7941
7942
7943 /* Build the node for the type of references-to-TO_TYPE by default
7944 in ptr_mode. */
7945
7946 tree
7947 build_reference_type (tree to_type)
7948 {
7949 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7950 : TYPE_ADDR_SPACE (to_type);
7951 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7952 return build_reference_type_for_mode (to_type, pointer_mode, false);
7953 }
7954
7955 #define MAX_INT_CACHED_PREC \
7956 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7957 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7958
7959 /* Builds a signed or unsigned integer type of precision PRECISION.
7960 Used for C bitfields whose precision does not match that of
7961 built-in target types. */
7962 tree
7963 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7964 int unsignedp)
7965 {
7966 tree itype, ret;
7967
7968 if (unsignedp)
7969 unsignedp = MAX_INT_CACHED_PREC + 1;
7970
7971 if (precision <= MAX_INT_CACHED_PREC)
7972 {
7973 itype = nonstandard_integer_type_cache[precision + unsignedp];
7974 if (itype)
7975 return itype;
7976 }
7977
7978 itype = make_node (INTEGER_TYPE);
7979 TYPE_PRECISION (itype) = precision;
7980
7981 if (unsignedp)
7982 fixup_unsigned_type (itype);
7983 else
7984 fixup_signed_type (itype);
7985
7986 ret = itype;
7987 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7988 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7989 if (precision <= MAX_INT_CACHED_PREC)
7990 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7991
7992 return ret;
7993 }
7994
7995 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7996 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7997 is true, reuse such a type that has already been constructed. */
7998
7999 static tree
8000 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8001 {
8002 tree itype = make_node (INTEGER_TYPE);
8003 inchash::hash hstate;
8004
8005 TREE_TYPE (itype) = type;
8006
8007 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8008 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8009
8010 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8011 SET_TYPE_MODE (itype, TYPE_MODE (type));
8012 TYPE_SIZE (itype) = TYPE_SIZE (type);
8013 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8014 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8015 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8016
8017 if (!shared)
8018 return itype;
8019
8020 if ((TYPE_MIN_VALUE (itype)
8021 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8022 || (TYPE_MAX_VALUE (itype)
8023 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8024 {
8025 /* Since we cannot reliably merge this type, we need to compare it using
8026 structural equality checks. */
8027 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8028 return itype;
8029 }
8030
8031 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8032 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8033 hstate.merge_hash (TYPE_HASH (type));
8034 itype = type_hash_canon (hstate.end (), itype);
8035
8036 return itype;
8037 }
8038
8039 /* Wrapper around build_range_type_1 with SHARED set to true. */
8040
8041 tree
8042 build_range_type (tree type, tree lowval, tree highval)
8043 {
8044 return build_range_type_1 (type, lowval, highval, true);
8045 }
8046
8047 /* Wrapper around build_range_type_1 with SHARED set to false. */
8048
8049 tree
8050 build_nonshared_range_type (tree type, tree lowval, tree highval)
8051 {
8052 return build_range_type_1 (type, lowval, highval, false);
8053 }
8054
8055 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8056 MAXVAL should be the maximum value in the domain
8057 (one less than the length of the array).
8058
8059 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8060 We don't enforce this limit, that is up to caller (e.g. language front end).
8061 The limit exists because the result is a signed type and we don't handle
8062 sizes that use more than one HOST_WIDE_INT. */
8063
8064 tree
8065 build_index_type (tree maxval)
8066 {
8067 return build_range_type (sizetype, size_zero_node, maxval);
8068 }
8069
8070 /* Return true if the debug information for TYPE, a subtype, should be emitted
8071 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8072 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8073 debug info and doesn't reflect the source code. */
8074
8075 bool
8076 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8077 {
8078 tree base_type = TREE_TYPE (type), low, high;
8079
8080 /* Subrange types have a base type which is an integral type. */
8081 if (!INTEGRAL_TYPE_P (base_type))
8082 return false;
8083
8084 /* Get the real bounds of the subtype. */
8085 if (lang_hooks.types.get_subrange_bounds)
8086 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8087 else
8088 {
8089 low = TYPE_MIN_VALUE (type);
8090 high = TYPE_MAX_VALUE (type);
8091 }
8092
8093 /* If the type and its base type have the same representation and the same
8094 name, then the type is not a subrange but a copy of the base type. */
8095 if ((TREE_CODE (base_type) == INTEGER_TYPE
8096 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8097 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8098 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8099 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8100 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8101 return false;
8102
8103 if (lowval)
8104 *lowval = low;
8105 if (highval)
8106 *highval = high;
8107 return true;
8108 }
8109
8110 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8111 and number of elements specified by the range of values of INDEX_TYPE.
8112 If SHARED is true, reuse such a type that has already been constructed. */
8113
8114 static tree
8115 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8116 {
8117 tree t;
8118
8119 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8120 {
8121 error ("arrays of functions are not meaningful");
8122 elt_type = integer_type_node;
8123 }
8124
8125 t = make_node (ARRAY_TYPE);
8126 TREE_TYPE (t) = elt_type;
8127 TYPE_DOMAIN (t) = index_type;
8128 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8129 layout_type (t);
8130
8131 /* If the element type is incomplete at this point we get marked for
8132 structural equality. Do not record these types in the canonical
8133 type hashtable. */
8134 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8135 return t;
8136
8137 if (shared)
8138 {
8139 inchash::hash hstate;
8140 hstate.add_object (TYPE_HASH (elt_type));
8141 if (index_type)
8142 hstate.add_object (TYPE_HASH (index_type));
8143 t = type_hash_canon (hstate.end (), t);
8144 }
8145
8146 if (TYPE_CANONICAL (t) == t)
8147 {
8148 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8149 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8150 SET_TYPE_STRUCTURAL_EQUALITY (t);
8151 else if (TYPE_CANONICAL (elt_type) != elt_type
8152 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8153 TYPE_CANONICAL (t)
8154 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8155 index_type
8156 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8157 shared);
8158 }
8159
8160 return t;
8161 }
8162
8163 /* Wrapper around build_array_type_1 with SHARED set to true. */
8164
8165 tree
8166 build_array_type (tree elt_type, tree index_type)
8167 {
8168 return build_array_type_1 (elt_type, index_type, true);
8169 }
8170
8171 /* Wrapper around build_array_type_1 with SHARED set to false. */
8172
8173 tree
8174 build_nonshared_array_type (tree elt_type, tree index_type)
8175 {
8176 return build_array_type_1 (elt_type, index_type, false);
8177 }
8178
8179 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8180 sizetype. */
8181
8182 tree
8183 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8184 {
8185 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8186 }
8187
8188 /* Recursively examines the array elements of TYPE, until a non-array
8189 element type is found. */
8190
8191 tree
8192 strip_array_types (tree type)
8193 {
8194 while (TREE_CODE (type) == ARRAY_TYPE)
8195 type = TREE_TYPE (type);
8196
8197 return type;
8198 }
8199
8200 /* Computes the canonical argument types from the argument type list
8201 ARGTYPES.
8202
8203 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8204 on entry to this function, or if any of the ARGTYPES are
8205 structural.
8206
8207 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8208 true on entry to this function, or if any of the ARGTYPES are
8209 non-canonical.
8210
8211 Returns a canonical argument list, which may be ARGTYPES when the
8212 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8213 true) or would not differ from ARGTYPES. */
8214
8215 static tree
8216 maybe_canonicalize_argtypes (tree argtypes,
8217 bool *any_structural_p,
8218 bool *any_noncanonical_p)
8219 {
8220 tree arg;
8221 bool any_noncanonical_argtypes_p = false;
8222
8223 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8224 {
8225 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8226 /* Fail gracefully by stating that the type is structural. */
8227 *any_structural_p = true;
8228 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8229 *any_structural_p = true;
8230 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8231 || TREE_PURPOSE (arg))
8232 /* If the argument has a default argument, we consider it
8233 non-canonical even though the type itself is canonical.
8234 That way, different variants of function and method types
8235 with default arguments will all point to the variant with
8236 no defaults as their canonical type. */
8237 any_noncanonical_argtypes_p = true;
8238 }
8239
8240 if (*any_structural_p)
8241 return argtypes;
8242
8243 if (any_noncanonical_argtypes_p)
8244 {
8245 /* Build the canonical list of argument types. */
8246 tree canon_argtypes = NULL_TREE;
8247 bool is_void = false;
8248
8249 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8250 {
8251 if (arg == void_list_node)
8252 is_void = true;
8253 else
8254 canon_argtypes = tree_cons (NULL_TREE,
8255 TYPE_CANONICAL (TREE_VALUE (arg)),
8256 canon_argtypes);
8257 }
8258
8259 canon_argtypes = nreverse (canon_argtypes);
8260 if (is_void)
8261 canon_argtypes = chainon (canon_argtypes, void_list_node);
8262
8263 /* There is a non-canonical type. */
8264 *any_noncanonical_p = true;
8265 return canon_argtypes;
8266 }
8267
8268 /* The canonical argument types are the same as ARGTYPES. */
8269 return argtypes;
8270 }
8271
8272 /* Construct, lay out and return
8273 the type of functions returning type VALUE_TYPE
8274 given arguments of types ARG_TYPES.
8275 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8276 are data type nodes for the arguments of the function.
8277 If such a type has already been constructed, reuse it. */
8278
8279 tree
8280 build_function_type (tree value_type, tree arg_types)
8281 {
8282 tree t;
8283 inchash::hash hstate;
8284 bool any_structural_p, any_noncanonical_p;
8285 tree canon_argtypes;
8286
8287 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8288 {
8289 error ("function return type cannot be function");
8290 value_type = integer_type_node;
8291 }
8292
8293 /* Make a node of the sort we want. */
8294 t = make_node (FUNCTION_TYPE);
8295 TREE_TYPE (t) = value_type;
8296 TYPE_ARG_TYPES (t) = arg_types;
8297
8298 /* If we already have such a type, use the old one. */
8299 hstate.add_object (TYPE_HASH (value_type));
8300 type_hash_list (arg_types, hstate);
8301 t = type_hash_canon (hstate.end (), t);
8302
8303 /* Set up the canonical type. */
8304 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8305 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8306 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8307 &any_structural_p,
8308 &any_noncanonical_p);
8309 if (any_structural_p)
8310 SET_TYPE_STRUCTURAL_EQUALITY (t);
8311 else if (any_noncanonical_p)
8312 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8313 canon_argtypes);
8314
8315 if (!COMPLETE_TYPE_P (t))
8316 layout_type (t);
8317 return t;
8318 }
8319
8320 /* Build a function type. The RETURN_TYPE is the type returned by the
8321 function. If VAARGS is set, no void_type_node is appended to the
8322 the list. ARGP must be always be terminated be a NULL_TREE. */
8323
8324 static tree
8325 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8326 {
8327 tree t, args, last;
8328
8329 t = va_arg (argp, tree);
8330 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8331 args = tree_cons (NULL_TREE, t, args);
8332
8333 if (vaargs)
8334 {
8335 last = args;
8336 if (args != NULL_TREE)
8337 args = nreverse (args);
8338 gcc_assert (last != void_list_node);
8339 }
8340 else if (args == NULL_TREE)
8341 args = void_list_node;
8342 else
8343 {
8344 last = args;
8345 args = nreverse (args);
8346 TREE_CHAIN (last) = void_list_node;
8347 }
8348 args = build_function_type (return_type, args);
8349
8350 return args;
8351 }
8352
8353 /* Build a function type. The RETURN_TYPE is the type returned by the
8354 function. If additional arguments are provided, they are
8355 additional argument types. The list of argument types must always
8356 be terminated by NULL_TREE. */
8357
8358 tree
8359 build_function_type_list (tree return_type, ...)
8360 {
8361 tree args;
8362 va_list p;
8363
8364 va_start (p, return_type);
8365 args = build_function_type_list_1 (false, return_type, p);
8366 va_end (p);
8367 return args;
8368 }
8369
8370 /* Build a variable argument function type. The RETURN_TYPE is the
8371 type returned by the function. If additional arguments are provided,
8372 they are additional argument types. The list of argument types must
8373 always be terminated by NULL_TREE. */
8374
8375 tree
8376 build_varargs_function_type_list (tree return_type, ...)
8377 {
8378 tree args;
8379 va_list p;
8380
8381 va_start (p, return_type);
8382 args = build_function_type_list_1 (true, return_type, p);
8383 va_end (p);
8384
8385 return args;
8386 }
8387
8388 /* Build a function type. RETURN_TYPE is the type returned by the
8389 function; VAARGS indicates whether the function takes varargs. The
8390 function takes N named arguments, the types of which are provided in
8391 ARG_TYPES. */
8392
8393 static tree
8394 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8395 tree *arg_types)
8396 {
8397 int i;
8398 tree t = vaargs ? NULL_TREE : void_list_node;
8399
8400 for (i = n - 1; i >= 0; i--)
8401 t = tree_cons (NULL_TREE, arg_types[i], t);
8402
8403 return build_function_type (return_type, t);
8404 }
8405
8406 /* Build a function type. RETURN_TYPE is the type returned by the
8407 function. The function takes N named arguments, the types of which
8408 are provided in ARG_TYPES. */
8409
8410 tree
8411 build_function_type_array (tree return_type, int n, tree *arg_types)
8412 {
8413 return build_function_type_array_1 (false, return_type, n, arg_types);
8414 }
8415
8416 /* Build a variable argument function type. RETURN_TYPE is the type
8417 returned by the function. The function takes N named arguments, the
8418 types of which are provided in ARG_TYPES. */
8419
8420 tree
8421 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8422 {
8423 return build_function_type_array_1 (true, return_type, n, arg_types);
8424 }
8425
8426 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8427 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8428 for the method. An implicit additional parameter (of type
8429 pointer-to-BASETYPE) is added to the ARGTYPES. */
8430
8431 tree
8432 build_method_type_directly (tree basetype,
8433 tree rettype,
8434 tree argtypes)
8435 {
8436 tree t;
8437 tree ptype;
8438 inchash::hash hstate;
8439 bool any_structural_p, any_noncanonical_p;
8440 tree canon_argtypes;
8441
8442 /* Make a node of the sort we want. */
8443 t = make_node (METHOD_TYPE);
8444
8445 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8446 TREE_TYPE (t) = rettype;
8447 ptype = build_pointer_type (basetype);
8448
8449 /* The actual arglist for this function includes a "hidden" argument
8450 which is "this". Put it into the list of argument types. */
8451 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8452 TYPE_ARG_TYPES (t) = argtypes;
8453
8454 /* If we already have such a type, use the old one. */
8455 hstate.add_object (TYPE_HASH (basetype));
8456 hstate.add_object (TYPE_HASH (rettype));
8457 type_hash_list (argtypes, hstate);
8458 t = type_hash_canon (hstate.end (), t);
8459
8460 /* Set up the canonical type. */
8461 any_structural_p
8462 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8463 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8464 any_noncanonical_p
8465 = (TYPE_CANONICAL (basetype) != basetype
8466 || TYPE_CANONICAL (rettype) != rettype);
8467 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8468 &any_structural_p,
8469 &any_noncanonical_p);
8470 if (any_structural_p)
8471 SET_TYPE_STRUCTURAL_EQUALITY (t);
8472 else if (any_noncanonical_p)
8473 TYPE_CANONICAL (t)
8474 = build_method_type_directly (TYPE_CANONICAL (basetype),
8475 TYPE_CANONICAL (rettype),
8476 canon_argtypes);
8477 if (!COMPLETE_TYPE_P (t))
8478 layout_type (t);
8479
8480 return t;
8481 }
8482
8483 /* Construct, lay out and return the type of methods belonging to class
8484 BASETYPE and whose arguments and values are described by TYPE.
8485 If that type exists already, reuse it.
8486 TYPE must be a FUNCTION_TYPE node. */
8487
8488 tree
8489 build_method_type (tree basetype, tree type)
8490 {
8491 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8492
8493 return build_method_type_directly (basetype,
8494 TREE_TYPE (type),
8495 TYPE_ARG_TYPES (type));
8496 }
8497
8498 /* Construct, lay out and return the type of offsets to a value
8499 of type TYPE, within an object of type BASETYPE.
8500 If a suitable offset type exists already, reuse it. */
8501
8502 tree
8503 build_offset_type (tree basetype, tree type)
8504 {
8505 tree t;
8506 inchash::hash hstate;
8507
8508 /* Make a node of the sort we want. */
8509 t = make_node (OFFSET_TYPE);
8510
8511 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8512 TREE_TYPE (t) = type;
8513
8514 /* If we already have such a type, use the old one. */
8515 hstate.add_object (TYPE_HASH (basetype));
8516 hstate.add_object (TYPE_HASH (type));
8517 t = type_hash_canon (hstate.end (), t);
8518
8519 if (!COMPLETE_TYPE_P (t))
8520 layout_type (t);
8521
8522 if (TYPE_CANONICAL (t) == t)
8523 {
8524 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8525 || TYPE_STRUCTURAL_EQUALITY_P (type))
8526 SET_TYPE_STRUCTURAL_EQUALITY (t);
8527 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8528 || TYPE_CANONICAL (type) != type)
8529 TYPE_CANONICAL (t)
8530 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8531 TYPE_CANONICAL (type));
8532 }
8533
8534 return t;
8535 }
8536
8537 /* Create a complex type whose components are COMPONENT_TYPE. */
8538
8539 tree
8540 build_complex_type (tree component_type)
8541 {
8542 tree t;
8543 inchash::hash hstate;
8544
8545 gcc_assert (INTEGRAL_TYPE_P (component_type)
8546 || SCALAR_FLOAT_TYPE_P (component_type)
8547 || FIXED_POINT_TYPE_P (component_type));
8548
8549 /* Make a node of the sort we want. */
8550 t = make_node (COMPLEX_TYPE);
8551
8552 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8553
8554 /* If we already have such a type, use the old one. */
8555 hstate.add_object (TYPE_HASH (component_type));
8556 t = type_hash_canon (hstate.end (), t);
8557
8558 if (!COMPLETE_TYPE_P (t))
8559 layout_type (t);
8560
8561 if (TYPE_CANONICAL (t) == t)
8562 {
8563 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8564 SET_TYPE_STRUCTURAL_EQUALITY (t);
8565 else if (TYPE_CANONICAL (component_type) != component_type)
8566 TYPE_CANONICAL (t)
8567 = build_complex_type (TYPE_CANONICAL (component_type));
8568 }
8569
8570 /* We need to create a name, since complex is a fundamental type. */
8571 if (! TYPE_NAME (t))
8572 {
8573 const char *name;
8574 if (component_type == char_type_node)
8575 name = "complex char";
8576 else if (component_type == signed_char_type_node)
8577 name = "complex signed char";
8578 else if (component_type == unsigned_char_type_node)
8579 name = "complex unsigned char";
8580 else if (component_type == short_integer_type_node)
8581 name = "complex short int";
8582 else if (component_type == short_unsigned_type_node)
8583 name = "complex short unsigned int";
8584 else if (component_type == integer_type_node)
8585 name = "complex int";
8586 else if (component_type == unsigned_type_node)
8587 name = "complex unsigned int";
8588 else if (component_type == long_integer_type_node)
8589 name = "complex long int";
8590 else if (component_type == long_unsigned_type_node)
8591 name = "complex long unsigned int";
8592 else if (component_type == long_long_integer_type_node)
8593 name = "complex long long int";
8594 else if (component_type == long_long_unsigned_type_node)
8595 name = "complex long long unsigned int";
8596 else
8597 name = 0;
8598
8599 if (name != 0)
8600 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8601 get_identifier (name), t);
8602 }
8603
8604 return build_qualified_type (t, TYPE_QUALS (component_type));
8605 }
8606
8607 /* If TYPE is a real or complex floating-point type and the target
8608 does not directly support arithmetic on TYPE then return the wider
8609 type to be used for arithmetic on TYPE. Otherwise, return
8610 NULL_TREE. */
8611
8612 tree
8613 excess_precision_type (tree type)
8614 {
8615 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8616 {
8617 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8618 switch (TREE_CODE (type))
8619 {
8620 case REAL_TYPE:
8621 switch (flt_eval_method)
8622 {
8623 case 1:
8624 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8625 return double_type_node;
8626 break;
8627 case 2:
8628 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8629 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8630 return long_double_type_node;
8631 break;
8632 default:
8633 gcc_unreachable ();
8634 }
8635 break;
8636 case COMPLEX_TYPE:
8637 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8638 return NULL_TREE;
8639 switch (flt_eval_method)
8640 {
8641 case 1:
8642 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8643 return complex_double_type_node;
8644 break;
8645 case 2:
8646 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8647 || (TYPE_MODE (TREE_TYPE (type))
8648 == TYPE_MODE (double_type_node)))
8649 return complex_long_double_type_node;
8650 break;
8651 default:
8652 gcc_unreachable ();
8653 }
8654 break;
8655 default:
8656 break;
8657 }
8658 }
8659 return NULL_TREE;
8660 }
8661 \f
8662 /* Return OP, stripped of any conversions to wider types as much as is safe.
8663 Converting the value back to OP's type makes a value equivalent to OP.
8664
8665 If FOR_TYPE is nonzero, we return a value which, if converted to
8666 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8667
8668 OP must have integer, real or enumeral type. Pointers are not allowed!
8669
8670 There are some cases where the obvious value we could return
8671 would regenerate to OP if converted to OP's type,
8672 but would not extend like OP to wider types.
8673 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8674 For example, if OP is (unsigned short)(signed char)-1,
8675 we avoid returning (signed char)-1 if FOR_TYPE is int,
8676 even though extending that to an unsigned short would regenerate OP,
8677 since the result of extending (signed char)-1 to (int)
8678 is different from (int) OP. */
8679
8680 tree
8681 get_unwidened (tree op, tree for_type)
8682 {
8683 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8684 tree type = TREE_TYPE (op);
8685 unsigned final_prec
8686 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8687 int uns
8688 = (for_type != 0 && for_type != type
8689 && final_prec > TYPE_PRECISION (type)
8690 && TYPE_UNSIGNED (type));
8691 tree win = op;
8692
8693 while (CONVERT_EXPR_P (op))
8694 {
8695 int bitschange;
8696
8697 /* TYPE_PRECISION on vector types has different meaning
8698 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8699 so avoid them here. */
8700 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8701 break;
8702
8703 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8704 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8705
8706 /* Truncations are many-one so cannot be removed.
8707 Unless we are later going to truncate down even farther. */
8708 if (bitschange < 0
8709 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8710 break;
8711
8712 /* See what's inside this conversion. If we decide to strip it,
8713 we will set WIN. */
8714 op = TREE_OPERAND (op, 0);
8715
8716 /* If we have not stripped any zero-extensions (uns is 0),
8717 we can strip any kind of extension.
8718 If we have previously stripped a zero-extension,
8719 only zero-extensions can safely be stripped.
8720 Any extension can be stripped if the bits it would produce
8721 are all going to be discarded later by truncating to FOR_TYPE. */
8722
8723 if (bitschange > 0)
8724 {
8725 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8726 win = op;
8727 /* TYPE_UNSIGNED says whether this is a zero-extension.
8728 Let's avoid computing it if it does not affect WIN
8729 and if UNS will not be needed again. */
8730 if ((uns
8731 || CONVERT_EXPR_P (op))
8732 && TYPE_UNSIGNED (TREE_TYPE (op)))
8733 {
8734 uns = 1;
8735 win = op;
8736 }
8737 }
8738 }
8739
8740 /* If we finally reach a constant see if it fits in for_type and
8741 in that case convert it. */
8742 if (for_type
8743 && TREE_CODE (win) == INTEGER_CST
8744 && TREE_TYPE (win) != for_type
8745 && int_fits_type_p (win, for_type))
8746 win = fold_convert (for_type, win);
8747
8748 return win;
8749 }
8750 \f
8751 /* Return OP or a simpler expression for a narrower value
8752 which can be sign-extended or zero-extended to give back OP.
8753 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8754 or 0 if the value should be sign-extended. */
8755
8756 tree
8757 get_narrower (tree op, int *unsignedp_ptr)
8758 {
8759 int uns = 0;
8760 int first = 1;
8761 tree win = op;
8762 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8763
8764 while (TREE_CODE (op) == NOP_EXPR)
8765 {
8766 int bitschange
8767 = (TYPE_PRECISION (TREE_TYPE (op))
8768 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8769
8770 /* Truncations are many-one so cannot be removed. */
8771 if (bitschange < 0)
8772 break;
8773
8774 /* See what's inside this conversion. If we decide to strip it,
8775 we will set WIN. */
8776
8777 if (bitschange > 0)
8778 {
8779 op = TREE_OPERAND (op, 0);
8780 /* An extension: the outermost one can be stripped,
8781 but remember whether it is zero or sign extension. */
8782 if (first)
8783 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8784 /* Otherwise, if a sign extension has been stripped,
8785 only sign extensions can now be stripped;
8786 if a zero extension has been stripped, only zero-extensions. */
8787 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8788 break;
8789 first = 0;
8790 }
8791 else /* bitschange == 0 */
8792 {
8793 /* A change in nominal type can always be stripped, but we must
8794 preserve the unsignedness. */
8795 if (first)
8796 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8797 first = 0;
8798 op = TREE_OPERAND (op, 0);
8799 /* Keep trying to narrow, but don't assign op to win if it
8800 would turn an integral type into something else. */
8801 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8802 continue;
8803 }
8804
8805 win = op;
8806 }
8807
8808 if (TREE_CODE (op) == COMPONENT_REF
8809 /* Since type_for_size always gives an integer type. */
8810 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8811 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8812 /* Ensure field is laid out already. */
8813 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8814 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8815 {
8816 unsigned HOST_WIDE_INT innerprec
8817 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8818 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8819 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8820 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8821
8822 /* We can get this structure field in a narrower type that fits it,
8823 but the resulting extension to its nominal type (a fullword type)
8824 must satisfy the same conditions as for other extensions.
8825
8826 Do this only for fields that are aligned (not bit-fields),
8827 because when bit-field insns will be used there is no
8828 advantage in doing this. */
8829
8830 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8831 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8832 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8833 && type != 0)
8834 {
8835 if (first)
8836 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8837 win = fold_convert (type, op);
8838 }
8839 }
8840
8841 *unsignedp_ptr = uns;
8842 return win;
8843 }
8844 \f
8845 /* Returns true if integer constant C has a value that is permissible
8846 for type TYPE (an INTEGER_TYPE). */
8847
8848 bool
8849 int_fits_type_p (const_tree c, const_tree type)
8850 {
8851 tree type_low_bound, type_high_bound;
8852 bool ok_for_low_bound, ok_for_high_bound;
8853 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8854
8855 retry:
8856 type_low_bound = TYPE_MIN_VALUE (type);
8857 type_high_bound = TYPE_MAX_VALUE (type);
8858
8859 /* If at least one bound of the type is a constant integer, we can check
8860 ourselves and maybe make a decision. If no such decision is possible, but
8861 this type is a subtype, try checking against that. Otherwise, use
8862 fits_to_tree_p, which checks against the precision.
8863
8864 Compute the status for each possibly constant bound, and return if we see
8865 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8866 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8867 for "constant known to fit". */
8868
8869 /* Check if c >= type_low_bound. */
8870 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8871 {
8872 if (tree_int_cst_lt (c, type_low_bound))
8873 return false;
8874 ok_for_low_bound = true;
8875 }
8876 else
8877 ok_for_low_bound = false;
8878
8879 /* Check if c <= type_high_bound. */
8880 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8881 {
8882 if (tree_int_cst_lt (type_high_bound, c))
8883 return false;
8884 ok_for_high_bound = true;
8885 }
8886 else
8887 ok_for_high_bound = false;
8888
8889 /* If the constant fits both bounds, the result is known. */
8890 if (ok_for_low_bound && ok_for_high_bound)
8891 return true;
8892
8893 /* Perform some generic filtering which may allow making a decision
8894 even if the bounds are not constant. First, negative integers
8895 never fit in unsigned types, */
8896 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8897 return false;
8898
8899 /* Second, narrower types always fit in wider ones. */
8900 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8901 return true;
8902
8903 /* Third, unsigned integers with top bit set never fit signed types. */
8904 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8905 {
8906 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8907 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8908 {
8909 /* When a tree_cst is converted to a wide-int, the precision
8910 is taken from the type. However, if the precision of the
8911 mode underneath the type is smaller than that, it is
8912 possible that the value will not fit. The test below
8913 fails if any bit is set between the sign bit of the
8914 underlying mode and the top bit of the type. */
8915 if (wi::ne_p (wi::zext (c, prec - 1), c))
8916 return false;
8917 }
8918 else if (wi::neg_p (c))
8919 return false;
8920 }
8921
8922 /* If we haven't been able to decide at this point, there nothing more we
8923 can check ourselves here. Look at the base type if we have one and it
8924 has the same precision. */
8925 if (TREE_CODE (type) == INTEGER_TYPE
8926 && TREE_TYPE (type) != 0
8927 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8928 {
8929 type = TREE_TYPE (type);
8930 goto retry;
8931 }
8932
8933 /* Or to fits_to_tree_p, if nothing else. */
8934 return wi::fits_to_tree_p (c, type);
8935 }
8936
8937 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8938 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8939 represented (assuming two's-complement arithmetic) within the bit
8940 precision of the type are returned instead. */
8941
8942 void
8943 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8944 {
8945 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8946 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8947 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8948 else
8949 {
8950 if (TYPE_UNSIGNED (type))
8951 mpz_set_ui (min, 0);
8952 else
8953 {
8954 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8955 wi::to_mpz (mn, min, SIGNED);
8956 }
8957 }
8958
8959 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8960 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8961 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8962 else
8963 {
8964 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8965 wi::to_mpz (mn, max, TYPE_SIGN (type));
8966 }
8967 }
8968
8969 /* Return true if VAR is an automatic variable defined in function FN. */
8970
8971 bool
8972 auto_var_in_fn_p (const_tree var, const_tree fn)
8973 {
8974 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8975 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8976 || TREE_CODE (var) == PARM_DECL)
8977 && ! TREE_STATIC (var))
8978 || TREE_CODE (var) == LABEL_DECL
8979 || TREE_CODE (var) == RESULT_DECL));
8980 }
8981
8982 /* Subprogram of following function. Called by walk_tree.
8983
8984 Return *TP if it is an automatic variable or parameter of the
8985 function passed in as DATA. */
8986
8987 static tree
8988 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8989 {
8990 tree fn = (tree) data;
8991
8992 if (TYPE_P (*tp))
8993 *walk_subtrees = 0;
8994
8995 else if (DECL_P (*tp)
8996 && auto_var_in_fn_p (*tp, fn))
8997 return *tp;
8998
8999 return NULL_TREE;
9000 }
9001
9002 /* Returns true if T is, contains, or refers to a type with variable
9003 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9004 arguments, but not the return type. If FN is nonzero, only return
9005 true if a modifier of the type or position of FN is a variable or
9006 parameter inside FN.
9007
9008 This concept is more general than that of C99 'variably modified types':
9009 in C99, a struct type is never variably modified because a VLA may not
9010 appear as a structure member. However, in GNU C code like:
9011
9012 struct S { int i[f()]; };
9013
9014 is valid, and other languages may define similar constructs. */
9015
9016 bool
9017 variably_modified_type_p (tree type, tree fn)
9018 {
9019 tree t;
9020
9021 /* Test if T is either variable (if FN is zero) or an expression containing
9022 a variable in FN. If TYPE isn't gimplified, return true also if
9023 gimplify_one_sizepos would gimplify the expression into a local
9024 variable. */
9025 #define RETURN_TRUE_IF_VAR(T) \
9026 do { tree _t = (T); \
9027 if (_t != NULL_TREE \
9028 && _t != error_mark_node \
9029 && TREE_CODE (_t) != INTEGER_CST \
9030 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9031 && (!fn \
9032 || (!TYPE_SIZES_GIMPLIFIED (type) \
9033 && !is_gimple_sizepos (_t)) \
9034 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9035 return true; } while (0)
9036
9037 if (type == error_mark_node)
9038 return false;
9039
9040 /* If TYPE itself has variable size, it is variably modified. */
9041 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9042 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9043
9044 switch (TREE_CODE (type))
9045 {
9046 case POINTER_TYPE:
9047 case REFERENCE_TYPE:
9048 case VECTOR_TYPE:
9049 if (variably_modified_type_p (TREE_TYPE (type), fn))
9050 return true;
9051 break;
9052
9053 case FUNCTION_TYPE:
9054 case METHOD_TYPE:
9055 /* If TYPE is a function type, it is variably modified if the
9056 return type is variably modified. */
9057 if (variably_modified_type_p (TREE_TYPE (type), fn))
9058 return true;
9059 break;
9060
9061 case INTEGER_TYPE:
9062 case REAL_TYPE:
9063 case FIXED_POINT_TYPE:
9064 case ENUMERAL_TYPE:
9065 case BOOLEAN_TYPE:
9066 /* Scalar types are variably modified if their end points
9067 aren't constant. */
9068 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9069 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9070 break;
9071
9072 case RECORD_TYPE:
9073 case UNION_TYPE:
9074 case QUAL_UNION_TYPE:
9075 /* We can't see if any of the fields are variably-modified by the
9076 definition we normally use, since that would produce infinite
9077 recursion via pointers. */
9078 /* This is variably modified if some field's type is. */
9079 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9080 if (TREE_CODE (t) == FIELD_DECL)
9081 {
9082 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9083 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9084 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9085
9086 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9087 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9088 }
9089 break;
9090
9091 case ARRAY_TYPE:
9092 /* Do not call ourselves to avoid infinite recursion. This is
9093 variably modified if the element type is. */
9094 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9095 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9096 break;
9097
9098 default:
9099 break;
9100 }
9101
9102 /* The current language may have other cases to check, but in general,
9103 all other types are not variably modified. */
9104 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9105
9106 #undef RETURN_TRUE_IF_VAR
9107 }
9108
9109 /* Given a DECL or TYPE, return the scope in which it was declared, or
9110 NULL_TREE if there is no containing scope. */
9111
9112 tree
9113 get_containing_scope (const_tree t)
9114 {
9115 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9116 }
9117
9118 /* Return the innermost context enclosing DECL that is
9119 a FUNCTION_DECL, or zero if none. */
9120
9121 tree
9122 decl_function_context (const_tree decl)
9123 {
9124 tree context;
9125
9126 if (TREE_CODE (decl) == ERROR_MARK)
9127 return 0;
9128
9129 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9130 where we look up the function at runtime. Such functions always take
9131 a first argument of type 'pointer to real context'.
9132
9133 C++ should really be fixed to use DECL_CONTEXT for the real context,
9134 and use something else for the "virtual context". */
9135 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9136 context
9137 = TYPE_MAIN_VARIANT
9138 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9139 else
9140 context = DECL_CONTEXT (decl);
9141
9142 while (context && TREE_CODE (context) != FUNCTION_DECL)
9143 {
9144 if (TREE_CODE (context) == BLOCK)
9145 context = BLOCK_SUPERCONTEXT (context);
9146 else
9147 context = get_containing_scope (context);
9148 }
9149
9150 return context;
9151 }
9152
9153 /* Return the innermost context enclosing DECL that is
9154 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9155 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9156
9157 tree
9158 decl_type_context (const_tree decl)
9159 {
9160 tree context = DECL_CONTEXT (decl);
9161
9162 while (context)
9163 switch (TREE_CODE (context))
9164 {
9165 case NAMESPACE_DECL:
9166 case TRANSLATION_UNIT_DECL:
9167 return NULL_TREE;
9168
9169 case RECORD_TYPE:
9170 case UNION_TYPE:
9171 case QUAL_UNION_TYPE:
9172 return context;
9173
9174 case TYPE_DECL:
9175 case FUNCTION_DECL:
9176 context = DECL_CONTEXT (context);
9177 break;
9178
9179 case BLOCK:
9180 context = BLOCK_SUPERCONTEXT (context);
9181 break;
9182
9183 default:
9184 gcc_unreachable ();
9185 }
9186
9187 return NULL_TREE;
9188 }
9189
9190 /* CALL is a CALL_EXPR. Return the declaration for the function
9191 called, or NULL_TREE if the called function cannot be
9192 determined. */
9193
9194 tree
9195 get_callee_fndecl (const_tree call)
9196 {
9197 tree addr;
9198
9199 if (call == error_mark_node)
9200 return error_mark_node;
9201
9202 /* It's invalid to call this function with anything but a
9203 CALL_EXPR. */
9204 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9205
9206 /* The first operand to the CALL is the address of the function
9207 called. */
9208 addr = CALL_EXPR_FN (call);
9209
9210 /* If there is no function, return early. */
9211 if (addr == NULL_TREE)
9212 return NULL_TREE;
9213
9214 STRIP_NOPS (addr);
9215
9216 /* If this is a readonly function pointer, extract its initial value. */
9217 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9218 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9219 && DECL_INITIAL (addr))
9220 addr = DECL_INITIAL (addr);
9221
9222 /* If the address is just `&f' for some function `f', then we know
9223 that `f' is being called. */
9224 if (TREE_CODE (addr) == ADDR_EXPR
9225 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9226 return TREE_OPERAND (addr, 0);
9227
9228 /* We couldn't figure out what was being called. */
9229 return NULL_TREE;
9230 }
9231
9232 #define TREE_MEM_USAGE_SPACES 40
9233
9234 /* Print debugging information about tree nodes generated during the compile,
9235 and any language-specific information. */
9236
9237 void
9238 dump_tree_statistics (void)
9239 {
9240 if (GATHER_STATISTICS)
9241 {
9242 int i;
9243 int total_nodes, total_bytes;
9244 fprintf (stderr, "\nKind Nodes Bytes\n");
9245 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9246 total_nodes = total_bytes = 0;
9247 for (i = 0; i < (int) all_kinds; i++)
9248 {
9249 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9250 tree_node_counts[i], tree_node_sizes[i]);
9251 total_nodes += tree_node_counts[i];
9252 total_bytes += tree_node_sizes[i];
9253 }
9254 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9255 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9256 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9257 fprintf (stderr, "Code Nodes\n");
9258 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9259 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9260 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9261 tree_code_counts[i]);
9262 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9263 fprintf (stderr, "\n");
9264 ssanames_print_statistics ();
9265 fprintf (stderr, "\n");
9266 phinodes_print_statistics ();
9267 fprintf (stderr, "\n");
9268 }
9269 else
9270 fprintf (stderr, "(No per-node statistics)\n");
9271
9272 print_type_hash_statistics ();
9273 print_debug_expr_statistics ();
9274 print_value_expr_statistics ();
9275 lang_hooks.print_statistics ();
9276 }
9277 \f
9278 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9279
9280 /* Generate a crc32 of a byte. */
9281
9282 static unsigned
9283 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9284 {
9285 unsigned ix;
9286
9287 for (ix = bits; ix--; value <<= 1)
9288 {
9289 unsigned feedback;
9290
9291 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9292 chksum <<= 1;
9293 chksum ^= feedback;
9294 }
9295 return chksum;
9296 }
9297
9298 /* Generate a crc32 of a 32-bit unsigned. */
9299
9300 unsigned
9301 crc32_unsigned (unsigned chksum, unsigned value)
9302 {
9303 return crc32_unsigned_bits (chksum, value, 32);
9304 }
9305
9306 /* Generate a crc32 of a byte. */
9307
9308 unsigned
9309 crc32_byte (unsigned chksum, char byte)
9310 {
9311 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9312 }
9313
9314 /* Generate a crc32 of a string. */
9315
9316 unsigned
9317 crc32_string (unsigned chksum, const char *string)
9318 {
9319 do
9320 {
9321 chksum = crc32_byte (chksum, *string);
9322 }
9323 while (*string++);
9324 return chksum;
9325 }
9326
9327 /* P is a string that will be used in a symbol. Mask out any characters
9328 that are not valid in that context. */
9329
9330 void
9331 clean_symbol_name (char *p)
9332 {
9333 for (; *p; p++)
9334 if (! (ISALNUM (*p)
9335 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9336 || *p == '$'
9337 #endif
9338 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9339 || *p == '.'
9340 #endif
9341 ))
9342 *p = '_';
9343 }
9344
9345 /* For anonymous aggregate types, we need some sort of name to
9346 hold on to. In practice, this should not appear, but it should
9347 not be harmful if it does. */
9348 bool
9349 anon_aggrname_p(const_tree id_node)
9350 {
9351 #ifndef NO_DOT_IN_LABEL
9352 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9353 && IDENTIFIER_POINTER (id_node)[1] == '_');
9354 #else /* NO_DOT_IN_LABEL */
9355 #ifndef NO_DOLLAR_IN_LABEL
9356 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9357 && IDENTIFIER_POINTER (id_node)[1] == '_');
9358 #else /* NO_DOLLAR_IN_LABEL */
9359 #define ANON_AGGRNAME_PREFIX "__anon_"
9360 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9361 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9362 #endif /* NO_DOLLAR_IN_LABEL */
9363 #endif /* NO_DOT_IN_LABEL */
9364 }
9365
9366 /* Return a format for an anonymous aggregate name. */
9367 const char *
9368 anon_aggrname_format()
9369 {
9370 #ifndef NO_DOT_IN_LABEL
9371 return "._%d";
9372 #else /* NO_DOT_IN_LABEL */
9373 #ifndef NO_DOLLAR_IN_LABEL
9374 return "$_%d";
9375 #else /* NO_DOLLAR_IN_LABEL */
9376 return "__anon_%d";
9377 #endif /* NO_DOLLAR_IN_LABEL */
9378 #endif /* NO_DOT_IN_LABEL */
9379 }
9380
9381 /* Generate a name for a special-purpose function.
9382 The generated name may need to be unique across the whole link.
9383 Changes to this function may also require corresponding changes to
9384 xstrdup_mask_random.
9385 TYPE is some string to identify the purpose of this function to the
9386 linker or collect2; it must start with an uppercase letter,
9387 one of:
9388 I - for constructors
9389 D - for destructors
9390 N - for C++ anonymous namespaces
9391 F - for DWARF unwind frame information. */
9392
9393 tree
9394 get_file_function_name (const char *type)
9395 {
9396 char *buf;
9397 const char *p;
9398 char *q;
9399
9400 /* If we already have a name we know to be unique, just use that. */
9401 if (first_global_object_name)
9402 p = q = ASTRDUP (first_global_object_name);
9403 /* If the target is handling the constructors/destructors, they
9404 will be local to this file and the name is only necessary for
9405 debugging purposes.
9406 We also assign sub_I and sub_D sufixes to constructors called from
9407 the global static constructors. These are always local. */
9408 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9409 || (strncmp (type, "sub_", 4) == 0
9410 && (type[4] == 'I' || type[4] == 'D')))
9411 {
9412 const char *file = main_input_filename;
9413 if (! file)
9414 file = LOCATION_FILE (input_location);
9415 /* Just use the file's basename, because the full pathname
9416 might be quite long. */
9417 p = q = ASTRDUP (lbasename (file));
9418 }
9419 else
9420 {
9421 /* Otherwise, the name must be unique across the entire link.
9422 We don't have anything that we know to be unique to this translation
9423 unit, so use what we do have and throw in some randomness. */
9424 unsigned len;
9425 const char *name = weak_global_object_name;
9426 const char *file = main_input_filename;
9427
9428 if (! name)
9429 name = "";
9430 if (! file)
9431 file = LOCATION_FILE (input_location);
9432
9433 len = strlen (file);
9434 q = (char *) alloca (9 + 17 + len + 1);
9435 memcpy (q, file, len + 1);
9436
9437 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9438 crc32_string (0, name), get_random_seed (false));
9439
9440 p = q;
9441 }
9442
9443 clean_symbol_name (q);
9444 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9445 + strlen (type));
9446
9447 /* Set up the name of the file-level functions we may need.
9448 Use a global object (which is already required to be unique over
9449 the program) rather than the file name (which imposes extra
9450 constraints). */
9451 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9452
9453 return get_identifier (buf);
9454 }
9455 \f
9456 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9457
9458 /* Complain that the tree code of NODE does not match the expected 0
9459 terminated list of trailing codes. The trailing code list can be
9460 empty, for a more vague error message. FILE, LINE, and FUNCTION
9461 are of the caller. */
9462
9463 void
9464 tree_check_failed (const_tree node, const char *file,
9465 int line, const char *function, ...)
9466 {
9467 va_list args;
9468 const char *buffer;
9469 unsigned length = 0;
9470 enum tree_code code;
9471
9472 va_start (args, function);
9473 while ((code = (enum tree_code) va_arg (args, int)))
9474 length += 4 + strlen (get_tree_code_name (code));
9475 va_end (args);
9476 if (length)
9477 {
9478 char *tmp;
9479 va_start (args, function);
9480 length += strlen ("expected ");
9481 buffer = tmp = (char *) alloca (length);
9482 length = 0;
9483 while ((code = (enum tree_code) va_arg (args, int)))
9484 {
9485 const char *prefix = length ? " or " : "expected ";
9486
9487 strcpy (tmp + length, prefix);
9488 length += strlen (prefix);
9489 strcpy (tmp + length, get_tree_code_name (code));
9490 length += strlen (get_tree_code_name (code));
9491 }
9492 va_end (args);
9493 }
9494 else
9495 buffer = "unexpected node";
9496
9497 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9498 buffer, get_tree_code_name (TREE_CODE (node)),
9499 function, trim_filename (file), line);
9500 }
9501
9502 /* Complain that the tree code of NODE does match the expected 0
9503 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9504 the caller. */
9505
9506 void
9507 tree_not_check_failed (const_tree node, const char *file,
9508 int line, const char *function, ...)
9509 {
9510 va_list args;
9511 char *buffer;
9512 unsigned length = 0;
9513 enum tree_code code;
9514
9515 va_start (args, function);
9516 while ((code = (enum tree_code) va_arg (args, int)))
9517 length += 4 + strlen (get_tree_code_name (code));
9518 va_end (args);
9519 va_start (args, function);
9520 buffer = (char *) alloca (length);
9521 length = 0;
9522 while ((code = (enum tree_code) va_arg (args, int)))
9523 {
9524 if (length)
9525 {
9526 strcpy (buffer + length, " or ");
9527 length += 4;
9528 }
9529 strcpy (buffer + length, get_tree_code_name (code));
9530 length += strlen (get_tree_code_name (code));
9531 }
9532 va_end (args);
9533
9534 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9535 buffer, get_tree_code_name (TREE_CODE (node)),
9536 function, trim_filename (file), line);
9537 }
9538
9539 /* Similar to tree_check_failed, except that we check for a class of tree
9540 code, given in CL. */
9541
9542 void
9543 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9544 const char *file, int line, const char *function)
9545 {
9546 internal_error
9547 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9548 TREE_CODE_CLASS_STRING (cl),
9549 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9550 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9551 }
9552
9553 /* Similar to tree_check_failed, except that instead of specifying a
9554 dozen codes, use the knowledge that they're all sequential. */
9555
9556 void
9557 tree_range_check_failed (const_tree node, const char *file, int line,
9558 const char *function, enum tree_code c1,
9559 enum tree_code c2)
9560 {
9561 char *buffer;
9562 unsigned length = 0;
9563 unsigned int c;
9564
9565 for (c = c1; c <= c2; ++c)
9566 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9567
9568 length += strlen ("expected ");
9569 buffer = (char *) alloca (length);
9570 length = 0;
9571
9572 for (c = c1; c <= c2; ++c)
9573 {
9574 const char *prefix = length ? " or " : "expected ";
9575
9576 strcpy (buffer + length, prefix);
9577 length += strlen (prefix);
9578 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9579 length += strlen (get_tree_code_name ((enum tree_code) c));
9580 }
9581
9582 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9583 buffer, get_tree_code_name (TREE_CODE (node)),
9584 function, trim_filename (file), line);
9585 }
9586
9587
9588 /* Similar to tree_check_failed, except that we check that a tree does
9589 not have the specified code, given in CL. */
9590
9591 void
9592 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9593 const char *file, int line, const char *function)
9594 {
9595 internal_error
9596 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9597 TREE_CODE_CLASS_STRING (cl),
9598 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9599 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9600 }
9601
9602
9603 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9604
9605 void
9606 omp_clause_check_failed (const_tree node, const char *file, int line,
9607 const char *function, enum omp_clause_code code)
9608 {
9609 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9610 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9611 function, trim_filename (file), line);
9612 }
9613
9614
9615 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9616
9617 void
9618 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9619 const char *function, enum omp_clause_code c1,
9620 enum omp_clause_code c2)
9621 {
9622 char *buffer;
9623 unsigned length = 0;
9624 unsigned int c;
9625
9626 for (c = c1; c <= c2; ++c)
9627 length += 4 + strlen (omp_clause_code_name[c]);
9628
9629 length += strlen ("expected ");
9630 buffer = (char *) alloca (length);
9631 length = 0;
9632
9633 for (c = c1; c <= c2; ++c)
9634 {
9635 const char *prefix = length ? " or " : "expected ";
9636
9637 strcpy (buffer + length, prefix);
9638 length += strlen (prefix);
9639 strcpy (buffer + length, omp_clause_code_name[c]);
9640 length += strlen (omp_clause_code_name[c]);
9641 }
9642
9643 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9644 buffer, omp_clause_code_name[TREE_CODE (node)],
9645 function, trim_filename (file), line);
9646 }
9647
9648
9649 #undef DEFTREESTRUCT
9650 #define DEFTREESTRUCT(VAL, NAME) NAME,
9651
9652 static const char *ts_enum_names[] = {
9653 #include "treestruct.def"
9654 };
9655 #undef DEFTREESTRUCT
9656
9657 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9658
9659 /* Similar to tree_class_check_failed, except that we check for
9660 whether CODE contains the tree structure identified by EN. */
9661
9662 void
9663 tree_contains_struct_check_failed (const_tree node,
9664 const enum tree_node_structure_enum en,
9665 const char *file, int line,
9666 const char *function)
9667 {
9668 internal_error
9669 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9670 TS_ENUM_NAME (en),
9671 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9672 }
9673
9674
9675 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9676 (dynamically sized) vector. */
9677
9678 void
9679 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9680 const char *function)
9681 {
9682 internal_error
9683 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9684 idx + 1, len, function, trim_filename (file), line);
9685 }
9686
9687 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9688 (dynamically sized) vector. */
9689
9690 void
9691 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9692 const char *function)
9693 {
9694 internal_error
9695 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9696 idx + 1, len, function, trim_filename (file), line);
9697 }
9698
9699 /* Similar to above, except that the check is for the bounds of the operand
9700 vector of an expression node EXP. */
9701
9702 void
9703 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9704 int line, const char *function)
9705 {
9706 enum tree_code code = TREE_CODE (exp);
9707 internal_error
9708 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9709 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9710 function, trim_filename (file), line);
9711 }
9712
9713 /* Similar to above, except that the check is for the number of
9714 operands of an OMP_CLAUSE node. */
9715
9716 void
9717 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9718 int line, const char *function)
9719 {
9720 internal_error
9721 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9722 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9723 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9724 trim_filename (file), line);
9725 }
9726 #endif /* ENABLE_TREE_CHECKING */
9727 \f
9728 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9729 and mapped to the machine mode MODE. Initialize its fields and build
9730 the information necessary for debugging output. */
9731
9732 static tree
9733 make_vector_type (tree innertype, int nunits, machine_mode mode)
9734 {
9735 tree t;
9736 inchash::hash hstate;
9737
9738 t = make_node (VECTOR_TYPE);
9739 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9740 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9741 SET_TYPE_MODE (t, mode);
9742
9743 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9744 SET_TYPE_STRUCTURAL_EQUALITY (t);
9745 else if (TYPE_CANONICAL (innertype) != innertype
9746 || mode != VOIDmode)
9747 TYPE_CANONICAL (t)
9748 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9749
9750 layout_type (t);
9751
9752 hstate.add_wide_int (VECTOR_TYPE);
9753 hstate.add_wide_int (nunits);
9754 hstate.add_wide_int (mode);
9755 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9756 t = type_hash_canon (hstate.end (), t);
9757
9758 /* We have built a main variant, based on the main variant of the
9759 inner type. Use it to build the variant we return. */
9760 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9761 && TREE_TYPE (t) != innertype)
9762 return build_type_attribute_qual_variant (t,
9763 TYPE_ATTRIBUTES (innertype),
9764 TYPE_QUALS (innertype));
9765
9766 return t;
9767 }
9768
9769 static tree
9770 make_or_reuse_type (unsigned size, int unsignedp)
9771 {
9772 int i;
9773
9774 if (size == INT_TYPE_SIZE)
9775 return unsignedp ? unsigned_type_node : integer_type_node;
9776 if (size == CHAR_TYPE_SIZE)
9777 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9778 if (size == SHORT_TYPE_SIZE)
9779 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9780 if (size == LONG_TYPE_SIZE)
9781 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9782 if (size == LONG_LONG_TYPE_SIZE)
9783 return (unsignedp ? long_long_unsigned_type_node
9784 : long_long_integer_type_node);
9785
9786 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9787 if (size == int_n_data[i].bitsize
9788 && int_n_enabled_p[i])
9789 return (unsignedp ? int_n_trees[i].unsigned_type
9790 : int_n_trees[i].signed_type);
9791
9792 if (unsignedp)
9793 return make_unsigned_type (size);
9794 else
9795 return make_signed_type (size);
9796 }
9797
9798 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9799
9800 static tree
9801 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9802 {
9803 if (satp)
9804 {
9805 if (size == SHORT_FRACT_TYPE_SIZE)
9806 return unsignedp ? sat_unsigned_short_fract_type_node
9807 : sat_short_fract_type_node;
9808 if (size == FRACT_TYPE_SIZE)
9809 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9810 if (size == LONG_FRACT_TYPE_SIZE)
9811 return unsignedp ? sat_unsigned_long_fract_type_node
9812 : sat_long_fract_type_node;
9813 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9814 return unsignedp ? sat_unsigned_long_long_fract_type_node
9815 : sat_long_long_fract_type_node;
9816 }
9817 else
9818 {
9819 if (size == SHORT_FRACT_TYPE_SIZE)
9820 return unsignedp ? unsigned_short_fract_type_node
9821 : short_fract_type_node;
9822 if (size == FRACT_TYPE_SIZE)
9823 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9824 if (size == LONG_FRACT_TYPE_SIZE)
9825 return unsignedp ? unsigned_long_fract_type_node
9826 : long_fract_type_node;
9827 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9828 return unsignedp ? unsigned_long_long_fract_type_node
9829 : long_long_fract_type_node;
9830 }
9831
9832 return make_fract_type (size, unsignedp, satp);
9833 }
9834
9835 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9836
9837 static tree
9838 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9839 {
9840 if (satp)
9841 {
9842 if (size == SHORT_ACCUM_TYPE_SIZE)
9843 return unsignedp ? sat_unsigned_short_accum_type_node
9844 : sat_short_accum_type_node;
9845 if (size == ACCUM_TYPE_SIZE)
9846 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9847 if (size == LONG_ACCUM_TYPE_SIZE)
9848 return unsignedp ? sat_unsigned_long_accum_type_node
9849 : sat_long_accum_type_node;
9850 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9851 return unsignedp ? sat_unsigned_long_long_accum_type_node
9852 : sat_long_long_accum_type_node;
9853 }
9854 else
9855 {
9856 if (size == SHORT_ACCUM_TYPE_SIZE)
9857 return unsignedp ? unsigned_short_accum_type_node
9858 : short_accum_type_node;
9859 if (size == ACCUM_TYPE_SIZE)
9860 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9861 if (size == LONG_ACCUM_TYPE_SIZE)
9862 return unsignedp ? unsigned_long_accum_type_node
9863 : long_accum_type_node;
9864 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9865 return unsignedp ? unsigned_long_long_accum_type_node
9866 : long_long_accum_type_node;
9867 }
9868
9869 return make_accum_type (size, unsignedp, satp);
9870 }
9871
9872
9873 /* Create an atomic variant node for TYPE. This routine is called
9874 during initialization of data types to create the 5 basic atomic
9875 types. The generic build_variant_type function requires these to
9876 already be set up in order to function properly, so cannot be
9877 called from there. If ALIGN is non-zero, then ensure alignment is
9878 overridden to this value. */
9879
9880 static tree
9881 build_atomic_base (tree type, unsigned int align)
9882 {
9883 tree t;
9884
9885 /* Make sure its not already registered. */
9886 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9887 return t;
9888
9889 t = build_variant_type_copy (type);
9890 set_type_quals (t, TYPE_QUAL_ATOMIC);
9891
9892 if (align)
9893 TYPE_ALIGN (t) = align;
9894
9895 return t;
9896 }
9897
9898 /* Create nodes for all integer types (and error_mark_node) using the sizes
9899 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9900 SHORT_DOUBLE specifies whether double should be of the same precision
9901 as float. */
9902
9903 void
9904 build_common_tree_nodes (bool signed_char, bool short_double)
9905 {
9906 int i;
9907
9908 error_mark_node = make_node (ERROR_MARK);
9909 TREE_TYPE (error_mark_node) = error_mark_node;
9910
9911 initialize_sizetypes ();
9912
9913 /* Define both `signed char' and `unsigned char'. */
9914 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9915 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9916 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9917 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9918
9919 /* Define `char', which is like either `signed char' or `unsigned char'
9920 but not the same as either. */
9921 char_type_node
9922 = (signed_char
9923 ? make_signed_type (CHAR_TYPE_SIZE)
9924 : make_unsigned_type (CHAR_TYPE_SIZE));
9925 TYPE_STRING_FLAG (char_type_node) = 1;
9926
9927 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9928 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9929 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9930 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9931 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9932 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9933 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9934 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9935
9936 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9937 {
9938 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9939 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9940 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9941 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9942
9943 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9944 && int_n_enabled_p[i])
9945 {
9946 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9947 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9948 }
9949 }
9950
9951 /* Define a boolean type. This type only represents boolean values but
9952 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9953 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9954 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9955 TYPE_PRECISION (boolean_type_node) = 1;
9956 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9957
9958 /* Define what type to use for size_t. */
9959 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9960 size_type_node = unsigned_type_node;
9961 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9962 size_type_node = long_unsigned_type_node;
9963 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9964 size_type_node = long_long_unsigned_type_node;
9965 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9966 size_type_node = short_unsigned_type_node;
9967 else
9968 {
9969 int i;
9970
9971 size_type_node = NULL_TREE;
9972 for (i = 0; i < NUM_INT_N_ENTS; i++)
9973 if (int_n_enabled_p[i])
9974 {
9975 char name[50];
9976 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9977
9978 if (strcmp (name, SIZE_TYPE) == 0)
9979 {
9980 size_type_node = int_n_trees[i].unsigned_type;
9981 }
9982 }
9983 if (size_type_node == NULL_TREE)
9984 gcc_unreachable ();
9985 }
9986
9987 /* Fill in the rest of the sized types. Reuse existing type nodes
9988 when possible. */
9989 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9990 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9991 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9992 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9993 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9994
9995 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9996 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9997 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9998 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9999 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10000
10001 /* Don't call build_qualified type for atomics. That routine does
10002 special processing for atomics, and until they are initialized
10003 it's better not to make that call.
10004
10005 Check to see if there is a target override for atomic types. */
10006
10007 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10008 targetm.atomic_align_for_mode (QImode));
10009 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10010 targetm.atomic_align_for_mode (HImode));
10011 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10012 targetm.atomic_align_for_mode (SImode));
10013 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10014 targetm.atomic_align_for_mode (DImode));
10015 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10016 targetm.atomic_align_for_mode (TImode));
10017
10018 access_public_node = get_identifier ("public");
10019 access_protected_node = get_identifier ("protected");
10020 access_private_node = get_identifier ("private");
10021
10022 /* Define these next since types below may used them. */
10023 integer_zero_node = build_int_cst (integer_type_node, 0);
10024 integer_one_node = build_int_cst (integer_type_node, 1);
10025 integer_three_node = build_int_cst (integer_type_node, 3);
10026 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10027
10028 size_zero_node = size_int (0);
10029 size_one_node = size_int (1);
10030 bitsize_zero_node = bitsize_int (0);
10031 bitsize_one_node = bitsize_int (1);
10032 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10033
10034 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10035 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10036
10037 void_type_node = make_node (VOID_TYPE);
10038 layout_type (void_type_node);
10039
10040 pointer_bounds_type_node = targetm.chkp_bound_type ();
10041
10042 /* We are not going to have real types in C with less than byte alignment,
10043 so we might as well not have any types that claim to have it. */
10044 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10045 TYPE_USER_ALIGN (void_type_node) = 0;
10046
10047 void_node = make_node (VOID_CST);
10048 TREE_TYPE (void_node) = void_type_node;
10049
10050 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10051 layout_type (TREE_TYPE (null_pointer_node));
10052
10053 ptr_type_node = build_pointer_type (void_type_node);
10054 const_ptr_type_node
10055 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10056 fileptr_type_node = ptr_type_node;
10057
10058 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10059
10060 float_type_node = make_node (REAL_TYPE);
10061 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10062 layout_type (float_type_node);
10063
10064 double_type_node = make_node (REAL_TYPE);
10065 if (short_double)
10066 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10067 else
10068 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10069 layout_type (double_type_node);
10070
10071 long_double_type_node = make_node (REAL_TYPE);
10072 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10073 layout_type (long_double_type_node);
10074
10075 float_ptr_type_node = build_pointer_type (float_type_node);
10076 double_ptr_type_node = build_pointer_type (double_type_node);
10077 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10078 integer_ptr_type_node = build_pointer_type (integer_type_node);
10079
10080 /* Fixed size integer types. */
10081 uint16_type_node = make_or_reuse_type (16, 1);
10082 uint32_type_node = make_or_reuse_type (32, 1);
10083 uint64_type_node = make_or_reuse_type (64, 1);
10084
10085 /* Decimal float types. */
10086 dfloat32_type_node = make_node (REAL_TYPE);
10087 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10088 layout_type (dfloat32_type_node);
10089 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10090 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10091
10092 dfloat64_type_node = make_node (REAL_TYPE);
10093 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10094 layout_type (dfloat64_type_node);
10095 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10096 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10097
10098 dfloat128_type_node = make_node (REAL_TYPE);
10099 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10100 layout_type (dfloat128_type_node);
10101 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10102 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10103
10104 complex_integer_type_node = build_complex_type (integer_type_node);
10105 complex_float_type_node = build_complex_type (float_type_node);
10106 complex_double_type_node = build_complex_type (double_type_node);
10107 complex_long_double_type_node = build_complex_type (long_double_type_node);
10108
10109 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10110 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10111 sat_ ## KIND ## _type_node = \
10112 make_sat_signed_ ## KIND ## _type (SIZE); \
10113 sat_unsigned_ ## KIND ## _type_node = \
10114 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10115 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10116 unsigned_ ## KIND ## _type_node = \
10117 make_unsigned_ ## KIND ## _type (SIZE);
10118
10119 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10120 sat_ ## WIDTH ## KIND ## _type_node = \
10121 make_sat_signed_ ## KIND ## _type (SIZE); \
10122 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10123 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10124 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10125 unsigned_ ## WIDTH ## KIND ## _type_node = \
10126 make_unsigned_ ## KIND ## _type (SIZE);
10127
10128 /* Make fixed-point type nodes based on four different widths. */
10129 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10130 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10131 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10132 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10133 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10134
10135 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10136 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10137 NAME ## _type_node = \
10138 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10139 u ## NAME ## _type_node = \
10140 make_or_reuse_unsigned_ ## KIND ## _type \
10141 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10142 sat_ ## NAME ## _type_node = \
10143 make_or_reuse_sat_signed_ ## KIND ## _type \
10144 (GET_MODE_BITSIZE (MODE ## mode)); \
10145 sat_u ## NAME ## _type_node = \
10146 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10147 (GET_MODE_BITSIZE (U ## MODE ## mode));
10148
10149 /* Fixed-point type and mode nodes. */
10150 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10151 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10152 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10153 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10154 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10155 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10156 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10157 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10158 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10159 MAKE_FIXED_MODE_NODE (accum, da, DA)
10160 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10161
10162 {
10163 tree t = targetm.build_builtin_va_list ();
10164
10165 /* Many back-ends define record types without setting TYPE_NAME.
10166 If we copied the record type here, we'd keep the original
10167 record type without a name. This breaks name mangling. So,
10168 don't copy record types and let c_common_nodes_and_builtins()
10169 declare the type to be __builtin_va_list. */
10170 if (TREE_CODE (t) != RECORD_TYPE)
10171 t = build_variant_type_copy (t);
10172
10173 va_list_type_node = t;
10174 }
10175 }
10176
10177 /* Modify DECL for given flags.
10178 TM_PURE attribute is set only on types, so the function will modify
10179 DECL's type when ECF_TM_PURE is used. */
10180
10181 void
10182 set_call_expr_flags (tree decl, int flags)
10183 {
10184 if (flags & ECF_NOTHROW)
10185 TREE_NOTHROW (decl) = 1;
10186 if (flags & ECF_CONST)
10187 TREE_READONLY (decl) = 1;
10188 if (flags & ECF_PURE)
10189 DECL_PURE_P (decl) = 1;
10190 if (flags & ECF_LOOPING_CONST_OR_PURE)
10191 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10192 if (flags & ECF_NOVOPS)
10193 DECL_IS_NOVOPS (decl) = 1;
10194 if (flags & ECF_NORETURN)
10195 TREE_THIS_VOLATILE (decl) = 1;
10196 if (flags & ECF_MALLOC)
10197 DECL_IS_MALLOC (decl) = 1;
10198 if (flags & ECF_RETURNS_TWICE)
10199 DECL_IS_RETURNS_TWICE (decl) = 1;
10200 if (flags & ECF_LEAF)
10201 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10202 NULL, DECL_ATTRIBUTES (decl));
10203 if ((flags & ECF_TM_PURE) && flag_tm)
10204 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10205 /* Looping const or pure is implied by noreturn.
10206 There is currently no way to declare looping const or looping pure alone. */
10207 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10208 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10209 }
10210
10211
10212 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10213
10214 static void
10215 local_define_builtin (const char *name, tree type, enum built_in_function code,
10216 const char *library_name, int ecf_flags)
10217 {
10218 tree decl;
10219
10220 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10221 library_name, NULL_TREE);
10222 set_call_expr_flags (decl, ecf_flags);
10223
10224 set_builtin_decl (code, decl, true);
10225 }
10226
10227 /* Call this function after instantiating all builtins that the language
10228 front end cares about. This will build the rest of the builtins
10229 and internal functions that are relied upon by the tree optimizers and
10230 the middle-end. */
10231
10232 void
10233 build_common_builtin_nodes (void)
10234 {
10235 tree tmp, ftype;
10236 int ecf_flags;
10237
10238 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10239 {
10240 ftype = build_function_type (void_type_node, void_list_node);
10241 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10242 "__builtin_unreachable",
10243 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10244 | ECF_CONST);
10245 }
10246
10247 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10248 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10249 {
10250 ftype = build_function_type_list (ptr_type_node,
10251 ptr_type_node, const_ptr_type_node,
10252 size_type_node, NULL_TREE);
10253
10254 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10255 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10256 "memcpy", ECF_NOTHROW | ECF_LEAF);
10257 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10258 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10259 "memmove", ECF_NOTHROW | ECF_LEAF);
10260 }
10261
10262 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10263 {
10264 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10265 const_ptr_type_node, size_type_node,
10266 NULL_TREE);
10267 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10268 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10269 }
10270
10271 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10272 {
10273 ftype = build_function_type_list (ptr_type_node,
10274 ptr_type_node, integer_type_node,
10275 size_type_node, NULL_TREE);
10276 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10277 "memset", ECF_NOTHROW | ECF_LEAF);
10278 }
10279
10280 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10281 {
10282 ftype = build_function_type_list (ptr_type_node,
10283 size_type_node, NULL_TREE);
10284 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10285 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10286 }
10287
10288 ftype = build_function_type_list (ptr_type_node, size_type_node,
10289 size_type_node, NULL_TREE);
10290 local_define_builtin ("__builtin_alloca_with_align", ftype,
10291 BUILT_IN_ALLOCA_WITH_ALIGN,
10292 "__builtin_alloca_with_align",
10293 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10294
10295 /* If we're checking the stack, `alloca' can throw. */
10296 if (flag_stack_check)
10297 {
10298 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10299 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10300 }
10301
10302 ftype = build_function_type_list (void_type_node,
10303 ptr_type_node, ptr_type_node,
10304 ptr_type_node, NULL_TREE);
10305 local_define_builtin ("__builtin_init_trampoline", ftype,
10306 BUILT_IN_INIT_TRAMPOLINE,
10307 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10308 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10309 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10310 "__builtin_init_heap_trampoline",
10311 ECF_NOTHROW | ECF_LEAF);
10312
10313 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10314 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10315 BUILT_IN_ADJUST_TRAMPOLINE,
10316 "__builtin_adjust_trampoline",
10317 ECF_CONST | ECF_NOTHROW);
10318
10319 ftype = build_function_type_list (void_type_node,
10320 ptr_type_node, ptr_type_node, NULL_TREE);
10321 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10322 BUILT_IN_NONLOCAL_GOTO,
10323 "__builtin_nonlocal_goto",
10324 ECF_NORETURN | ECF_NOTHROW);
10325
10326 ftype = build_function_type_list (void_type_node,
10327 ptr_type_node, ptr_type_node, NULL_TREE);
10328 local_define_builtin ("__builtin_setjmp_setup", ftype,
10329 BUILT_IN_SETJMP_SETUP,
10330 "__builtin_setjmp_setup", ECF_NOTHROW);
10331
10332 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10333 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10334 BUILT_IN_SETJMP_RECEIVER,
10335 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10336
10337 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10338 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10339 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10340
10341 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10342 local_define_builtin ("__builtin_stack_restore", ftype,
10343 BUILT_IN_STACK_RESTORE,
10344 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10345
10346 /* If there's a possibility that we might use the ARM EABI, build the
10347 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10348 if (targetm.arm_eabi_unwinder)
10349 {
10350 ftype = build_function_type_list (void_type_node, NULL_TREE);
10351 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10352 BUILT_IN_CXA_END_CLEANUP,
10353 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10354 }
10355
10356 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10357 local_define_builtin ("__builtin_unwind_resume", ftype,
10358 BUILT_IN_UNWIND_RESUME,
10359 ((targetm_common.except_unwind_info (&global_options)
10360 == UI_SJLJ)
10361 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10362 ECF_NORETURN);
10363
10364 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10365 {
10366 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10367 NULL_TREE);
10368 local_define_builtin ("__builtin_return_address", ftype,
10369 BUILT_IN_RETURN_ADDRESS,
10370 "__builtin_return_address",
10371 ECF_NOTHROW);
10372 }
10373
10374 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10375 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10376 {
10377 ftype = build_function_type_list (void_type_node, ptr_type_node,
10378 ptr_type_node, NULL_TREE);
10379 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10380 local_define_builtin ("__cyg_profile_func_enter", ftype,
10381 BUILT_IN_PROFILE_FUNC_ENTER,
10382 "__cyg_profile_func_enter", 0);
10383 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10384 local_define_builtin ("__cyg_profile_func_exit", ftype,
10385 BUILT_IN_PROFILE_FUNC_EXIT,
10386 "__cyg_profile_func_exit", 0);
10387 }
10388
10389 /* The exception object and filter values from the runtime. The argument
10390 must be zero before exception lowering, i.e. from the front end. After
10391 exception lowering, it will be the region number for the exception
10392 landing pad. These functions are PURE instead of CONST to prevent
10393 them from being hoisted past the exception edge that will initialize
10394 its value in the landing pad. */
10395 ftype = build_function_type_list (ptr_type_node,
10396 integer_type_node, NULL_TREE);
10397 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10398 /* Only use TM_PURE if we have TM language support. */
10399 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10400 ecf_flags |= ECF_TM_PURE;
10401 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10402 "__builtin_eh_pointer", ecf_flags);
10403
10404 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10405 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10406 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10407 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10408
10409 ftype = build_function_type_list (void_type_node,
10410 integer_type_node, integer_type_node,
10411 NULL_TREE);
10412 local_define_builtin ("__builtin_eh_copy_values", ftype,
10413 BUILT_IN_EH_COPY_VALUES,
10414 "__builtin_eh_copy_values", ECF_NOTHROW);
10415
10416 /* Complex multiplication and division. These are handled as builtins
10417 rather than optabs because emit_library_call_value doesn't support
10418 complex. Further, we can do slightly better with folding these
10419 beasties if the real and complex parts of the arguments are separate. */
10420 {
10421 int mode;
10422
10423 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10424 {
10425 char mode_name_buf[4], *q;
10426 const char *p;
10427 enum built_in_function mcode, dcode;
10428 tree type, inner_type;
10429 const char *prefix = "__";
10430
10431 if (targetm.libfunc_gnu_prefix)
10432 prefix = "__gnu_";
10433
10434 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10435 if (type == NULL)
10436 continue;
10437 inner_type = TREE_TYPE (type);
10438
10439 ftype = build_function_type_list (type, inner_type, inner_type,
10440 inner_type, inner_type, NULL_TREE);
10441
10442 mcode = ((enum built_in_function)
10443 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10444 dcode = ((enum built_in_function)
10445 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10446
10447 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10448 *q = TOLOWER (*p);
10449 *q = '\0';
10450
10451 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10452 NULL);
10453 local_define_builtin (built_in_names[mcode], ftype, mcode,
10454 built_in_names[mcode],
10455 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10456
10457 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10458 NULL);
10459 local_define_builtin (built_in_names[dcode], ftype, dcode,
10460 built_in_names[dcode],
10461 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10462 }
10463 }
10464
10465 init_internal_fns ();
10466 }
10467
10468 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10469 better way.
10470
10471 If we requested a pointer to a vector, build up the pointers that
10472 we stripped off while looking for the inner type. Similarly for
10473 return values from functions.
10474
10475 The argument TYPE is the top of the chain, and BOTTOM is the
10476 new type which we will point to. */
10477
10478 tree
10479 reconstruct_complex_type (tree type, tree bottom)
10480 {
10481 tree inner, outer;
10482
10483 if (TREE_CODE (type) == POINTER_TYPE)
10484 {
10485 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10486 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10487 TYPE_REF_CAN_ALIAS_ALL (type));
10488 }
10489 else if (TREE_CODE (type) == REFERENCE_TYPE)
10490 {
10491 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10492 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10493 TYPE_REF_CAN_ALIAS_ALL (type));
10494 }
10495 else if (TREE_CODE (type) == ARRAY_TYPE)
10496 {
10497 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10498 outer = build_array_type (inner, TYPE_DOMAIN (type));
10499 }
10500 else if (TREE_CODE (type) == FUNCTION_TYPE)
10501 {
10502 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10503 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10504 }
10505 else if (TREE_CODE (type) == METHOD_TYPE)
10506 {
10507 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10508 /* The build_method_type_directly() routine prepends 'this' to argument list,
10509 so we must compensate by getting rid of it. */
10510 outer
10511 = build_method_type_directly
10512 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10513 inner,
10514 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10515 }
10516 else if (TREE_CODE (type) == OFFSET_TYPE)
10517 {
10518 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10519 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10520 }
10521 else
10522 return bottom;
10523
10524 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10525 TYPE_QUALS (type));
10526 }
10527
10528 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10529 the inner type. */
10530 tree
10531 build_vector_type_for_mode (tree innertype, machine_mode mode)
10532 {
10533 int nunits;
10534
10535 switch (GET_MODE_CLASS (mode))
10536 {
10537 case MODE_VECTOR_INT:
10538 case MODE_VECTOR_FLOAT:
10539 case MODE_VECTOR_FRACT:
10540 case MODE_VECTOR_UFRACT:
10541 case MODE_VECTOR_ACCUM:
10542 case MODE_VECTOR_UACCUM:
10543 nunits = GET_MODE_NUNITS (mode);
10544 break;
10545
10546 case MODE_INT:
10547 /* Check that there are no leftover bits. */
10548 gcc_assert (GET_MODE_BITSIZE (mode)
10549 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10550
10551 nunits = GET_MODE_BITSIZE (mode)
10552 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10553 break;
10554
10555 default:
10556 gcc_unreachable ();
10557 }
10558
10559 return make_vector_type (innertype, nunits, mode);
10560 }
10561
10562 /* Similarly, but takes the inner type and number of units, which must be
10563 a power of two. */
10564
10565 tree
10566 build_vector_type (tree innertype, int nunits)
10567 {
10568 return make_vector_type (innertype, nunits, VOIDmode);
10569 }
10570
10571 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10572
10573 tree
10574 build_opaque_vector_type (tree innertype, int nunits)
10575 {
10576 tree t = make_vector_type (innertype, nunits, VOIDmode);
10577 tree cand;
10578 /* We always build the non-opaque variant before the opaque one,
10579 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10580 cand = TYPE_NEXT_VARIANT (t);
10581 if (cand
10582 && TYPE_VECTOR_OPAQUE (cand)
10583 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10584 return cand;
10585 /* Othewise build a variant type and make sure to queue it after
10586 the non-opaque type. */
10587 cand = build_distinct_type_copy (t);
10588 TYPE_VECTOR_OPAQUE (cand) = true;
10589 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10590 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10591 TYPE_NEXT_VARIANT (t) = cand;
10592 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10593 return cand;
10594 }
10595
10596
10597 /* Given an initializer INIT, return TRUE if INIT is zero or some
10598 aggregate of zeros. Otherwise return FALSE. */
10599 bool
10600 initializer_zerop (const_tree init)
10601 {
10602 tree elt;
10603
10604 STRIP_NOPS (init);
10605
10606 switch (TREE_CODE (init))
10607 {
10608 case INTEGER_CST:
10609 return integer_zerop (init);
10610
10611 case REAL_CST:
10612 /* ??? Note that this is not correct for C4X float formats. There,
10613 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10614 negative exponent. */
10615 return real_zerop (init)
10616 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10617
10618 case FIXED_CST:
10619 return fixed_zerop (init);
10620
10621 case COMPLEX_CST:
10622 return integer_zerop (init)
10623 || (real_zerop (init)
10624 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10625 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10626
10627 case VECTOR_CST:
10628 {
10629 unsigned i;
10630 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10631 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10632 return false;
10633 return true;
10634 }
10635
10636 case CONSTRUCTOR:
10637 {
10638 unsigned HOST_WIDE_INT idx;
10639
10640 if (TREE_CLOBBER_P (init))
10641 return false;
10642 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10643 if (!initializer_zerop (elt))
10644 return false;
10645 return true;
10646 }
10647
10648 case STRING_CST:
10649 {
10650 int i;
10651
10652 /* We need to loop through all elements to handle cases like
10653 "\0" and "\0foobar". */
10654 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10655 if (TREE_STRING_POINTER (init)[i] != '\0')
10656 return false;
10657
10658 return true;
10659 }
10660
10661 default:
10662 return false;
10663 }
10664 }
10665
10666 /* Check if vector VEC consists of all the equal elements and
10667 that the number of elements corresponds to the type of VEC.
10668 The function returns first element of the vector
10669 or NULL_TREE if the vector is not uniform. */
10670 tree
10671 uniform_vector_p (const_tree vec)
10672 {
10673 tree first, t;
10674 unsigned i;
10675
10676 if (vec == NULL_TREE)
10677 return NULL_TREE;
10678
10679 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10680
10681 if (TREE_CODE (vec) == VECTOR_CST)
10682 {
10683 first = VECTOR_CST_ELT (vec, 0);
10684 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10685 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10686 return NULL_TREE;
10687
10688 return first;
10689 }
10690
10691 else if (TREE_CODE (vec) == CONSTRUCTOR)
10692 {
10693 first = error_mark_node;
10694
10695 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10696 {
10697 if (i == 0)
10698 {
10699 first = t;
10700 continue;
10701 }
10702 if (!operand_equal_p (first, t, 0))
10703 return NULL_TREE;
10704 }
10705 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10706 return NULL_TREE;
10707
10708 return first;
10709 }
10710
10711 return NULL_TREE;
10712 }
10713
10714 /* Build an empty statement at location LOC. */
10715
10716 tree
10717 build_empty_stmt (location_t loc)
10718 {
10719 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10720 SET_EXPR_LOCATION (t, loc);
10721 return t;
10722 }
10723
10724
10725 /* Build an OpenMP clause with code CODE. LOC is the location of the
10726 clause. */
10727
10728 tree
10729 build_omp_clause (location_t loc, enum omp_clause_code code)
10730 {
10731 tree t;
10732 int size, length;
10733
10734 length = omp_clause_num_ops[code];
10735 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10736
10737 record_node_allocation_statistics (OMP_CLAUSE, size);
10738
10739 t = (tree) ggc_internal_alloc (size);
10740 memset (t, 0, size);
10741 TREE_SET_CODE (t, OMP_CLAUSE);
10742 OMP_CLAUSE_SET_CODE (t, code);
10743 OMP_CLAUSE_LOCATION (t) = loc;
10744
10745 return t;
10746 }
10747
10748 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10749 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10750 Except for the CODE and operand count field, other storage for the
10751 object is initialized to zeros. */
10752
10753 tree
10754 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10755 {
10756 tree t;
10757 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10758
10759 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10760 gcc_assert (len >= 1);
10761
10762 record_node_allocation_statistics (code, length);
10763
10764 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10765
10766 TREE_SET_CODE (t, code);
10767
10768 /* Can't use TREE_OPERAND to store the length because if checking is
10769 enabled, it will try to check the length before we store it. :-P */
10770 t->exp.operands[0] = build_int_cst (sizetype, len);
10771
10772 return t;
10773 }
10774
10775 /* Helper function for build_call_* functions; build a CALL_EXPR with
10776 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10777 the argument slots. */
10778
10779 static tree
10780 build_call_1 (tree return_type, tree fn, int nargs)
10781 {
10782 tree t;
10783
10784 t = build_vl_exp (CALL_EXPR, nargs + 3);
10785 TREE_TYPE (t) = return_type;
10786 CALL_EXPR_FN (t) = fn;
10787 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10788
10789 return t;
10790 }
10791
10792 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10793 FN and a null static chain slot. NARGS is the number of call arguments
10794 which are specified as "..." arguments. */
10795
10796 tree
10797 build_call_nary (tree return_type, tree fn, int nargs, ...)
10798 {
10799 tree ret;
10800 va_list args;
10801 va_start (args, nargs);
10802 ret = build_call_valist (return_type, fn, nargs, args);
10803 va_end (args);
10804 return ret;
10805 }
10806
10807 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10808 FN and a null static chain slot. NARGS is the number of call arguments
10809 which are specified as a va_list ARGS. */
10810
10811 tree
10812 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10813 {
10814 tree t;
10815 int i;
10816
10817 t = build_call_1 (return_type, fn, nargs);
10818 for (i = 0; i < nargs; i++)
10819 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10820 process_call_operands (t);
10821 return t;
10822 }
10823
10824 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10825 FN and a null static chain slot. NARGS is the number of call arguments
10826 which are specified as a tree array ARGS. */
10827
10828 tree
10829 build_call_array_loc (location_t loc, tree return_type, tree fn,
10830 int nargs, const tree *args)
10831 {
10832 tree t;
10833 int i;
10834
10835 t = build_call_1 (return_type, fn, nargs);
10836 for (i = 0; i < nargs; i++)
10837 CALL_EXPR_ARG (t, i) = args[i];
10838 process_call_operands (t);
10839 SET_EXPR_LOCATION (t, loc);
10840 return t;
10841 }
10842
10843 /* Like build_call_array, but takes a vec. */
10844
10845 tree
10846 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10847 {
10848 tree ret, t;
10849 unsigned int ix;
10850
10851 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10852 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10853 CALL_EXPR_ARG (ret, ix) = t;
10854 process_call_operands (ret);
10855 return ret;
10856 }
10857
10858 /* Conveniently construct a function call expression. FNDECL names the
10859 function to be called and N arguments are passed in the array
10860 ARGARRAY. */
10861
10862 tree
10863 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10864 {
10865 tree fntype = TREE_TYPE (fndecl);
10866 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10867
10868 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10869 }
10870
10871 /* Conveniently construct a function call expression. FNDECL names the
10872 function to be called and the arguments are passed in the vector
10873 VEC. */
10874
10875 tree
10876 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10877 {
10878 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10879 vec_safe_address (vec));
10880 }
10881
10882
10883 /* Conveniently construct a function call expression. FNDECL names the
10884 function to be called, N is the number of arguments, and the "..."
10885 parameters are the argument expressions. */
10886
10887 tree
10888 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10889 {
10890 va_list ap;
10891 tree *argarray = XALLOCAVEC (tree, n);
10892 int i;
10893
10894 va_start (ap, n);
10895 for (i = 0; i < n; i++)
10896 argarray[i] = va_arg (ap, tree);
10897 va_end (ap);
10898 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10899 }
10900
10901 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10902 varargs macros aren't supported by all bootstrap compilers. */
10903
10904 tree
10905 build_call_expr (tree fndecl, int n, ...)
10906 {
10907 va_list ap;
10908 tree *argarray = XALLOCAVEC (tree, n);
10909 int i;
10910
10911 va_start (ap, n);
10912 for (i = 0; i < n; i++)
10913 argarray[i] = va_arg (ap, tree);
10914 va_end (ap);
10915 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10916 }
10917
10918 /* Build internal call expression. This is just like CALL_EXPR, except
10919 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10920 internal function. */
10921
10922 tree
10923 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10924 tree type, int n, ...)
10925 {
10926 va_list ap;
10927 int i;
10928
10929 tree fn = build_call_1 (type, NULL_TREE, n);
10930 va_start (ap, n);
10931 for (i = 0; i < n; i++)
10932 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10933 va_end (ap);
10934 SET_EXPR_LOCATION (fn, loc);
10935 CALL_EXPR_IFN (fn) = ifn;
10936 return fn;
10937 }
10938
10939 /* Create a new constant string literal and return a char* pointer to it.
10940 The STRING_CST value is the LEN characters at STR. */
10941 tree
10942 build_string_literal (int len, const char *str)
10943 {
10944 tree t, elem, index, type;
10945
10946 t = build_string (len, str);
10947 elem = build_type_variant (char_type_node, 1, 0);
10948 index = build_index_type (size_int (len - 1));
10949 type = build_array_type (elem, index);
10950 TREE_TYPE (t) = type;
10951 TREE_CONSTANT (t) = 1;
10952 TREE_READONLY (t) = 1;
10953 TREE_STATIC (t) = 1;
10954
10955 type = build_pointer_type (elem);
10956 t = build1 (ADDR_EXPR, type,
10957 build4 (ARRAY_REF, elem,
10958 t, integer_zero_node, NULL_TREE, NULL_TREE));
10959 return t;
10960 }
10961
10962
10963
10964 /* Return true if T (assumed to be a DECL) must be assigned a memory
10965 location. */
10966
10967 bool
10968 needs_to_live_in_memory (const_tree t)
10969 {
10970 return (TREE_ADDRESSABLE (t)
10971 || is_global_var (t)
10972 || (TREE_CODE (t) == RESULT_DECL
10973 && !DECL_BY_REFERENCE (t)
10974 && aggregate_value_p (t, current_function_decl)));
10975 }
10976
10977 /* Return value of a constant X and sign-extend it. */
10978
10979 HOST_WIDE_INT
10980 int_cst_value (const_tree x)
10981 {
10982 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10983 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10984
10985 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10986 gcc_assert (cst_and_fits_in_hwi (x));
10987
10988 if (bits < HOST_BITS_PER_WIDE_INT)
10989 {
10990 bool negative = ((val >> (bits - 1)) & 1) != 0;
10991 if (negative)
10992 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10993 else
10994 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10995 }
10996
10997 return val;
10998 }
10999
11000 /* If TYPE is an integral or pointer type, return an integer type with
11001 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11002 if TYPE is already an integer type of signedness UNSIGNEDP. */
11003
11004 tree
11005 signed_or_unsigned_type_for (int unsignedp, tree type)
11006 {
11007 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11008 return type;
11009
11010 if (TREE_CODE (type) == VECTOR_TYPE)
11011 {
11012 tree inner = TREE_TYPE (type);
11013 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11014 if (!inner2)
11015 return NULL_TREE;
11016 if (inner == inner2)
11017 return type;
11018 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11019 }
11020
11021 if (!INTEGRAL_TYPE_P (type)
11022 && !POINTER_TYPE_P (type)
11023 && TREE_CODE (type) != OFFSET_TYPE)
11024 return NULL_TREE;
11025
11026 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11027 }
11028
11029 /* If TYPE is an integral or pointer type, return an integer type with
11030 the same precision which is unsigned, or itself if TYPE is already an
11031 unsigned integer type. */
11032
11033 tree
11034 unsigned_type_for (tree type)
11035 {
11036 return signed_or_unsigned_type_for (1, type);
11037 }
11038
11039 /* If TYPE is an integral or pointer type, return an integer type with
11040 the same precision which is signed, or itself if TYPE is already a
11041 signed integer type. */
11042
11043 tree
11044 signed_type_for (tree type)
11045 {
11046 return signed_or_unsigned_type_for (0, type);
11047 }
11048
11049 /* If TYPE is a vector type, return a signed integer vector type with the
11050 same width and number of subparts. Otherwise return boolean_type_node. */
11051
11052 tree
11053 truth_type_for (tree type)
11054 {
11055 if (TREE_CODE (type) == VECTOR_TYPE)
11056 {
11057 tree elem = lang_hooks.types.type_for_size
11058 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
11059 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
11060 }
11061 else
11062 return boolean_type_node;
11063 }
11064
11065 /* Returns the largest value obtainable by casting something in INNER type to
11066 OUTER type. */
11067
11068 tree
11069 upper_bound_in_type (tree outer, tree inner)
11070 {
11071 unsigned int det = 0;
11072 unsigned oprec = TYPE_PRECISION (outer);
11073 unsigned iprec = TYPE_PRECISION (inner);
11074 unsigned prec;
11075
11076 /* Compute a unique number for every combination. */
11077 det |= (oprec > iprec) ? 4 : 0;
11078 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11079 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11080
11081 /* Determine the exponent to use. */
11082 switch (det)
11083 {
11084 case 0:
11085 case 1:
11086 /* oprec <= iprec, outer: signed, inner: don't care. */
11087 prec = oprec - 1;
11088 break;
11089 case 2:
11090 case 3:
11091 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11092 prec = oprec;
11093 break;
11094 case 4:
11095 /* oprec > iprec, outer: signed, inner: signed. */
11096 prec = iprec - 1;
11097 break;
11098 case 5:
11099 /* oprec > iprec, outer: signed, inner: unsigned. */
11100 prec = iprec;
11101 break;
11102 case 6:
11103 /* oprec > iprec, outer: unsigned, inner: signed. */
11104 prec = oprec;
11105 break;
11106 case 7:
11107 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11108 prec = iprec;
11109 break;
11110 default:
11111 gcc_unreachable ();
11112 }
11113
11114 return wide_int_to_tree (outer,
11115 wi::mask (prec, false, TYPE_PRECISION (outer)));
11116 }
11117
11118 /* Returns the smallest value obtainable by casting something in INNER type to
11119 OUTER type. */
11120
11121 tree
11122 lower_bound_in_type (tree outer, tree inner)
11123 {
11124 unsigned oprec = TYPE_PRECISION (outer);
11125 unsigned iprec = TYPE_PRECISION (inner);
11126
11127 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11128 and obtain 0. */
11129 if (TYPE_UNSIGNED (outer)
11130 /* If we are widening something of an unsigned type, OUTER type
11131 contains all values of INNER type. In particular, both INNER
11132 and OUTER types have zero in common. */
11133 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11134 return build_int_cst (outer, 0);
11135 else
11136 {
11137 /* If we are widening a signed type to another signed type, we
11138 want to obtain -2^^(iprec-1). If we are keeping the
11139 precision or narrowing to a signed type, we want to obtain
11140 -2^(oprec-1). */
11141 unsigned prec = oprec > iprec ? iprec : oprec;
11142 return wide_int_to_tree (outer,
11143 wi::mask (prec - 1, true,
11144 TYPE_PRECISION (outer)));
11145 }
11146 }
11147
11148 /* Return nonzero if two operands that are suitable for PHI nodes are
11149 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11150 SSA_NAME or invariant. Note that this is strictly an optimization.
11151 That is, callers of this function can directly call operand_equal_p
11152 and get the same result, only slower. */
11153
11154 int
11155 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11156 {
11157 if (arg0 == arg1)
11158 return 1;
11159 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11160 return 0;
11161 return operand_equal_p (arg0, arg1, 0);
11162 }
11163
11164 /* Returns number of zeros at the end of binary representation of X. */
11165
11166 tree
11167 num_ending_zeros (const_tree x)
11168 {
11169 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11170 }
11171
11172
11173 #define WALK_SUBTREE(NODE) \
11174 do \
11175 { \
11176 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11177 if (result) \
11178 return result; \
11179 } \
11180 while (0)
11181
11182 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11183 be walked whenever a type is seen in the tree. Rest of operands and return
11184 value are as for walk_tree. */
11185
11186 static tree
11187 walk_type_fields (tree type, walk_tree_fn func, void *data,
11188 hash_set<tree> *pset, walk_tree_lh lh)
11189 {
11190 tree result = NULL_TREE;
11191
11192 switch (TREE_CODE (type))
11193 {
11194 case POINTER_TYPE:
11195 case REFERENCE_TYPE:
11196 case VECTOR_TYPE:
11197 /* We have to worry about mutually recursive pointers. These can't
11198 be written in C. They can in Ada. It's pathological, but
11199 there's an ACATS test (c38102a) that checks it. Deal with this
11200 by checking if we're pointing to another pointer, that one
11201 points to another pointer, that one does too, and we have no htab.
11202 If so, get a hash table. We check three levels deep to avoid
11203 the cost of the hash table if we don't need one. */
11204 if (POINTER_TYPE_P (TREE_TYPE (type))
11205 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11206 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11207 && !pset)
11208 {
11209 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11210 func, data);
11211 if (result)
11212 return result;
11213
11214 break;
11215 }
11216
11217 /* ... fall through ... */
11218
11219 case COMPLEX_TYPE:
11220 WALK_SUBTREE (TREE_TYPE (type));
11221 break;
11222
11223 case METHOD_TYPE:
11224 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11225
11226 /* Fall through. */
11227
11228 case FUNCTION_TYPE:
11229 WALK_SUBTREE (TREE_TYPE (type));
11230 {
11231 tree arg;
11232
11233 /* We never want to walk into default arguments. */
11234 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11235 WALK_SUBTREE (TREE_VALUE (arg));
11236 }
11237 break;
11238
11239 case ARRAY_TYPE:
11240 /* Don't follow this nodes's type if a pointer for fear that
11241 we'll have infinite recursion. If we have a PSET, then we
11242 need not fear. */
11243 if (pset
11244 || (!POINTER_TYPE_P (TREE_TYPE (type))
11245 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11246 WALK_SUBTREE (TREE_TYPE (type));
11247 WALK_SUBTREE (TYPE_DOMAIN (type));
11248 break;
11249
11250 case OFFSET_TYPE:
11251 WALK_SUBTREE (TREE_TYPE (type));
11252 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11253 break;
11254
11255 default:
11256 break;
11257 }
11258
11259 return NULL_TREE;
11260 }
11261
11262 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11263 called with the DATA and the address of each sub-tree. If FUNC returns a
11264 non-NULL value, the traversal is stopped, and the value returned by FUNC
11265 is returned. If PSET is non-NULL it is used to record the nodes visited,
11266 and to avoid visiting a node more than once. */
11267
11268 tree
11269 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11270 hash_set<tree> *pset, walk_tree_lh lh)
11271 {
11272 enum tree_code code;
11273 int walk_subtrees;
11274 tree result;
11275
11276 #define WALK_SUBTREE_TAIL(NODE) \
11277 do \
11278 { \
11279 tp = & (NODE); \
11280 goto tail_recurse; \
11281 } \
11282 while (0)
11283
11284 tail_recurse:
11285 /* Skip empty subtrees. */
11286 if (!*tp)
11287 return NULL_TREE;
11288
11289 /* Don't walk the same tree twice, if the user has requested
11290 that we avoid doing so. */
11291 if (pset && pset->add (*tp))
11292 return NULL_TREE;
11293
11294 /* Call the function. */
11295 walk_subtrees = 1;
11296 result = (*func) (tp, &walk_subtrees, data);
11297
11298 /* If we found something, return it. */
11299 if (result)
11300 return result;
11301
11302 code = TREE_CODE (*tp);
11303
11304 /* Even if we didn't, FUNC may have decided that there was nothing
11305 interesting below this point in the tree. */
11306 if (!walk_subtrees)
11307 {
11308 /* But we still need to check our siblings. */
11309 if (code == TREE_LIST)
11310 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11311 else if (code == OMP_CLAUSE)
11312 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11313 else
11314 return NULL_TREE;
11315 }
11316
11317 if (lh)
11318 {
11319 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11320 if (result || !walk_subtrees)
11321 return result;
11322 }
11323
11324 switch (code)
11325 {
11326 case ERROR_MARK:
11327 case IDENTIFIER_NODE:
11328 case INTEGER_CST:
11329 case REAL_CST:
11330 case FIXED_CST:
11331 case VECTOR_CST:
11332 case STRING_CST:
11333 case BLOCK:
11334 case PLACEHOLDER_EXPR:
11335 case SSA_NAME:
11336 case FIELD_DECL:
11337 case RESULT_DECL:
11338 /* None of these have subtrees other than those already walked
11339 above. */
11340 break;
11341
11342 case TREE_LIST:
11343 WALK_SUBTREE (TREE_VALUE (*tp));
11344 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11345 break;
11346
11347 case TREE_VEC:
11348 {
11349 int len = TREE_VEC_LENGTH (*tp);
11350
11351 if (len == 0)
11352 break;
11353
11354 /* Walk all elements but the first. */
11355 while (--len)
11356 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11357
11358 /* Now walk the first one as a tail call. */
11359 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11360 }
11361
11362 case COMPLEX_CST:
11363 WALK_SUBTREE (TREE_REALPART (*tp));
11364 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11365
11366 case CONSTRUCTOR:
11367 {
11368 unsigned HOST_WIDE_INT idx;
11369 constructor_elt *ce;
11370
11371 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11372 idx++)
11373 WALK_SUBTREE (ce->value);
11374 }
11375 break;
11376
11377 case SAVE_EXPR:
11378 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11379
11380 case BIND_EXPR:
11381 {
11382 tree decl;
11383 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11384 {
11385 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11386 into declarations that are just mentioned, rather than
11387 declared; they don't really belong to this part of the tree.
11388 And, we can see cycles: the initializer for a declaration
11389 can refer to the declaration itself. */
11390 WALK_SUBTREE (DECL_INITIAL (decl));
11391 WALK_SUBTREE (DECL_SIZE (decl));
11392 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11393 }
11394 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11395 }
11396
11397 case STATEMENT_LIST:
11398 {
11399 tree_stmt_iterator i;
11400 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11401 WALK_SUBTREE (*tsi_stmt_ptr (i));
11402 }
11403 break;
11404
11405 case OMP_CLAUSE:
11406 switch (OMP_CLAUSE_CODE (*tp))
11407 {
11408 case OMP_CLAUSE_GANG:
11409 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11410 /* FALLTHRU */
11411
11412 case OMP_CLAUSE_DEVICE_RESIDENT:
11413 case OMP_CLAUSE_USE_DEVICE:
11414 case OMP_CLAUSE_ASYNC:
11415 case OMP_CLAUSE_WAIT:
11416 case OMP_CLAUSE_WORKER:
11417 case OMP_CLAUSE_VECTOR:
11418 case OMP_CLAUSE_NUM_GANGS:
11419 case OMP_CLAUSE_NUM_WORKERS:
11420 case OMP_CLAUSE_VECTOR_LENGTH:
11421 case OMP_CLAUSE_PRIVATE:
11422 case OMP_CLAUSE_SHARED:
11423 case OMP_CLAUSE_FIRSTPRIVATE:
11424 case OMP_CLAUSE_COPYIN:
11425 case OMP_CLAUSE_COPYPRIVATE:
11426 case OMP_CLAUSE_FINAL:
11427 case OMP_CLAUSE_IF:
11428 case OMP_CLAUSE_NUM_THREADS:
11429 case OMP_CLAUSE_SCHEDULE:
11430 case OMP_CLAUSE_UNIFORM:
11431 case OMP_CLAUSE_DEPEND:
11432 case OMP_CLAUSE_NUM_TEAMS:
11433 case OMP_CLAUSE_THREAD_LIMIT:
11434 case OMP_CLAUSE_DEVICE:
11435 case OMP_CLAUSE_DIST_SCHEDULE:
11436 case OMP_CLAUSE_SAFELEN:
11437 case OMP_CLAUSE_SIMDLEN:
11438 case OMP_CLAUSE__LOOPTEMP_:
11439 case OMP_CLAUSE__SIMDUID_:
11440 case OMP_CLAUSE__CILK_FOR_COUNT_:
11441 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11442 /* FALLTHRU */
11443
11444 case OMP_CLAUSE_INDEPENDENT:
11445 case OMP_CLAUSE_NOWAIT:
11446 case OMP_CLAUSE_ORDERED:
11447 case OMP_CLAUSE_DEFAULT:
11448 case OMP_CLAUSE_UNTIED:
11449 case OMP_CLAUSE_MERGEABLE:
11450 case OMP_CLAUSE_PROC_BIND:
11451 case OMP_CLAUSE_INBRANCH:
11452 case OMP_CLAUSE_NOTINBRANCH:
11453 case OMP_CLAUSE_FOR:
11454 case OMP_CLAUSE_PARALLEL:
11455 case OMP_CLAUSE_SECTIONS:
11456 case OMP_CLAUSE_TASKGROUP:
11457 case OMP_CLAUSE_AUTO:
11458 case OMP_CLAUSE_SEQ:
11459 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11460
11461 case OMP_CLAUSE_LASTPRIVATE:
11462 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11463 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11464 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11465
11466 case OMP_CLAUSE_COLLAPSE:
11467 {
11468 int i;
11469 for (i = 0; i < 3; i++)
11470 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11471 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11472 }
11473
11474 case OMP_CLAUSE_LINEAR:
11475 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11476 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11477 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11478 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11479
11480 case OMP_CLAUSE_ALIGNED:
11481 case OMP_CLAUSE_FROM:
11482 case OMP_CLAUSE_TO:
11483 case OMP_CLAUSE_MAP:
11484 case OMP_CLAUSE__CACHE_:
11485 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11486 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11487 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11488
11489 case OMP_CLAUSE_REDUCTION:
11490 {
11491 int i;
11492 for (i = 0; i < 4; i++)
11493 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11494 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11495 }
11496
11497 default:
11498 gcc_unreachable ();
11499 }
11500 break;
11501
11502 case TARGET_EXPR:
11503 {
11504 int i, len;
11505
11506 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11507 But, we only want to walk once. */
11508 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11509 for (i = 0; i < len; ++i)
11510 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11511 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11512 }
11513
11514 case DECL_EXPR:
11515 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11516 defining. We only want to walk into these fields of a type in this
11517 case and not in the general case of a mere reference to the type.
11518
11519 The criterion is as follows: if the field can be an expression, it
11520 must be walked only here. This should be in keeping with the fields
11521 that are directly gimplified in gimplify_type_sizes in order for the
11522 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11523 variable-sized types.
11524
11525 Note that DECLs get walked as part of processing the BIND_EXPR. */
11526 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11527 {
11528 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11529 if (TREE_CODE (*type_p) == ERROR_MARK)
11530 return NULL_TREE;
11531
11532 /* Call the function for the type. See if it returns anything or
11533 doesn't want us to continue. If we are to continue, walk both
11534 the normal fields and those for the declaration case. */
11535 result = (*func) (type_p, &walk_subtrees, data);
11536 if (result || !walk_subtrees)
11537 return result;
11538
11539 /* But do not walk a pointed-to type since it may itself need to
11540 be walked in the declaration case if it isn't anonymous. */
11541 if (!POINTER_TYPE_P (*type_p))
11542 {
11543 result = walk_type_fields (*type_p, func, data, pset, lh);
11544 if (result)
11545 return result;
11546 }
11547
11548 /* If this is a record type, also walk the fields. */
11549 if (RECORD_OR_UNION_TYPE_P (*type_p))
11550 {
11551 tree field;
11552
11553 for (field = TYPE_FIELDS (*type_p); field;
11554 field = DECL_CHAIN (field))
11555 {
11556 /* We'd like to look at the type of the field, but we can
11557 easily get infinite recursion. So assume it's pointed
11558 to elsewhere in the tree. Also, ignore things that
11559 aren't fields. */
11560 if (TREE_CODE (field) != FIELD_DECL)
11561 continue;
11562
11563 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11564 WALK_SUBTREE (DECL_SIZE (field));
11565 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11566 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11567 WALK_SUBTREE (DECL_QUALIFIER (field));
11568 }
11569 }
11570
11571 /* Same for scalar types. */
11572 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11573 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11574 || TREE_CODE (*type_p) == INTEGER_TYPE
11575 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11576 || TREE_CODE (*type_p) == REAL_TYPE)
11577 {
11578 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11579 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11580 }
11581
11582 WALK_SUBTREE (TYPE_SIZE (*type_p));
11583 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11584 }
11585 /* FALLTHRU */
11586
11587 default:
11588 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11589 {
11590 int i, len;
11591
11592 /* Walk over all the sub-trees of this operand. */
11593 len = TREE_OPERAND_LENGTH (*tp);
11594
11595 /* Go through the subtrees. We need to do this in forward order so
11596 that the scope of a FOR_EXPR is handled properly. */
11597 if (len)
11598 {
11599 for (i = 0; i < len - 1; ++i)
11600 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11601 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11602 }
11603 }
11604 /* If this is a type, walk the needed fields in the type. */
11605 else if (TYPE_P (*tp))
11606 return walk_type_fields (*tp, func, data, pset, lh);
11607 break;
11608 }
11609
11610 /* We didn't find what we were looking for. */
11611 return NULL_TREE;
11612
11613 #undef WALK_SUBTREE_TAIL
11614 }
11615 #undef WALK_SUBTREE
11616
11617 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11618
11619 tree
11620 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11621 walk_tree_lh lh)
11622 {
11623 tree result;
11624
11625 hash_set<tree> pset;
11626 result = walk_tree_1 (tp, func, data, &pset, lh);
11627 return result;
11628 }
11629
11630
11631 tree
11632 tree_block (tree t)
11633 {
11634 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11635
11636 if (IS_EXPR_CODE_CLASS (c))
11637 return LOCATION_BLOCK (t->exp.locus);
11638 gcc_unreachable ();
11639 return NULL;
11640 }
11641
11642 void
11643 tree_set_block (tree t, tree b)
11644 {
11645 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11646
11647 if (IS_EXPR_CODE_CLASS (c))
11648 {
11649 if (b)
11650 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11651 else
11652 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11653 }
11654 else
11655 gcc_unreachable ();
11656 }
11657
11658 /* Create a nameless artificial label and put it in the current
11659 function context. The label has a location of LOC. Returns the
11660 newly created label. */
11661
11662 tree
11663 create_artificial_label (location_t loc)
11664 {
11665 tree lab = build_decl (loc,
11666 LABEL_DECL, NULL_TREE, void_type_node);
11667
11668 DECL_ARTIFICIAL (lab) = 1;
11669 DECL_IGNORED_P (lab) = 1;
11670 DECL_CONTEXT (lab) = current_function_decl;
11671 return lab;
11672 }
11673
11674 /* Given a tree, try to return a useful variable name that we can use
11675 to prefix a temporary that is being assigned the value of the tree.
11676 I.E. given <temp> = &A, return A. */
11677
11678 const char *
11679 get_name (tree t)
11680 {
11681 tree stripped_decl;
11682
11683 stripped_decl = t;
11684 STRIP_NOPS (stripped_decl);
11685 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11686 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11687 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11688 {
11689 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11690 if (!name)
11691 return NULL;
11692 return IDENTIFIER_POINTER (name);
11693 }
11694 else
11695 {
11696 switch (TREE_CODE (stripped_decl))
11697 {
11698 case ADDR_EXPR:
11699 return get_name (TREE_OPERAND (stripped_decl, 0));
11700 default:
11701 return NULL;
11702 }
11703 }
11704 }
11705
11706 /* Return true if TYPE has a variable argument list. */
11707
11708 bool
11709 stdarg_p (const_tree fntype)
11710 {
11711 function_args_iterator args_iter;
11712 tree n = NULL_TREE, t;
11713
11714 if (!fntype)
11715 return false;
11716
11717 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11718 {
11719 n = t;
11720 }
11721
11722 return n != NULL_TREE && n != void_type_node;
11723 }
11724
11725 /* Return true if TYPE has a prototype. */
11726
11727 bool
11728 prototype_p (const_tree fntype)
11729 {
11730 tree t;
11731
11732 gcc_assert (fntype != NULL_TREE);
11733
11734 t = TYPE_ARG_TYPES (fntype);
11735 return (t != NULL_TREE);
11736 }
11737
11738 /* If BLOCK is inlined from an __attribute__((__artificial__))
11739 routine, return pointer to location from where it has been
11740 called. */
11741 location_t *
11742 block_nonartificial_location (tree block)
11743 {
11744 location_t *ret = NULL;
11745
11746 while (block && TREE_CODE (block) == BLOCK
11747 && BLOCK_ABSTRACT_ORIGIN (block))
11748 {
11749 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11750
11751 while (TREE_CODE (ao) == BLOCK
11752 && BLOCK_ABSTRACT_ORIGIN (ao)
11753 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11754 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11755
11756 if (TREE_CODE (ao) == FUNCTION_DECL)
11757 {
11758 /* If AO is an artificial inline, point RET to the
11759 call site locus at which it has been inlined and continue
11760 the loop, in case AO's caller is also an artificial
11761 inline. */
11762 if (DECL_DECLARED_INLINE_P (ao)
11763 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11764 ret = &BLOCK_SOURCE_LOCATION (block);
11765 else
11766 break;
11767 }
11768 else if (TREE_CODE (ao) != BLOCK)
11769 break;
11770
11771 block = BLOCK_SUPERCONTEXT (block);
11772 }
11773 return ret;
11774 }
11775
11776
11777 /* If EXP is inlined from an __attribute__((__artificial__))
11778 function, return the location of the original call expression. */
11779
11780 location_t
11781 tree_nonartificial_location (tree exp)
11782 {
11783 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11784
11785 if (loc)
11786 return *loc;
11787 else
11788 return EXPR_LOCATION (exp);
11789 }
11790
11791
11792 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11793 nodes. */
11794
11795 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11796
11797 hashval_t
11798 cl_option_hasher::hash (tree x)
11799 {
11800 const_tree const t = x;
11801 const char *p;
11802 size_t i;
11803 size_t len = 0;
11804 hashval_t hash = 0;
11805
11806 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11807 {
11808 p = (const char *)TREE_OPTIMIZATION (t);
11809 len = sizeof (struct cl_optimization);
11810 }
11811
11812 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11813 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11814
11815 else
11816 gcc_unreachable ();
11817
11818 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11819 something else. */
11820 for (i = 0; i < len; i++)
11821 if (p[i])
11822 hash = (hash << 4) ^ ((i << 2) | p[i]);
11823
11824 return hash;
11825 }
11826
11827 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11828 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11829 same. */
11830
11831 bool
11832 cl_option_hasher::equal (tree x, tree y)
11833 {
11834 const_tree const xt = x;
11835 const_tree const yt = y;
11836 const char *xp;
11837 const char *yp;
11838 size_t len;
11839
11840 if (TREE_CODE (xt) != TREE_CODE (yt))
11841 return 0;
11842
11843 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11844 {
11845 xp = (const char *)TREE_OPTIMIZATION (xt);
11846 yp = (const char *)TREE_OPTIMIZATION (yt);
11847 len = sizeof (struct cl_optimization);
11848 }
11849
11850 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11851 {
11852 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11853 TREE_TARGET_OPTION (yt));
11854 }
11855
11856 else
11857 gcc_unreachable ();
11858
11859 return (memcmp (xp, yp, len) == 0);
11860 }
11861
11862 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11863
11864 tree
11865 build_optimization_node (struct gcc_options *opts)
11866 {
11867 tree t;
11868
11869 /* Use the cache of optimization nodes. */
11870
11871 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11872 opts);
11873
11874 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11875 t = *slot;
11876 if (!t)
11877 {
11878 /* Insert this one into the hash table. */
11879 t = cl_optimization_node;
11880 *slot = t;
11881
11882 /* Make a new node for next time round. */
11883 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11884 }
11885
11886 return t;
11887 }
11888
11889 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11890
11891 tree
11892 build_target_option_node (struct gcc_options *opts)
11893 {
11894 tree t;
11895
11896 /* Use the cache of optimization nodes. */
11897
11898 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11899 opts);
11900
11901 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11902 t = *slot;
11903 if (!t)
11904 {
11905 /* Insert this one into the hash table. */
11906 t = cl_target_option_node;
11907 *slot = t;
11908
11909 /* Make a new node for next time round. */
11910 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11911 }
11912
11913 return t;
11914 }
11915
11916 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11917 so that they aren't saved during PCH writing. */
11918
11919 void
11920 prepare_target_option_nodes_for_pch (void)
11921 {
11922 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11923 for (; iter != cl_option_hash_table->end (); ++iter)
11924 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11925 TREE_TARGET_GLOBALS (*iter) = NULL;
11926 }
11927
11928 /* Determine the "ultimate origin" of a block. The block may be an inlined
11929 instance of an inlined instance of a block which is local to an inline
11930 function, so we have to trace all of the way back through the origin chain
11931 to find out what sort of node actually served as the original seed for the
11932 given block. */
11933
11934 tree
11935 block_ultimate_origin (const_tree block)
11936 {
11937 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11938
11939 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11940 we're trying to output the abstract instance of this function. */
11941 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11942 return NULL_TREE;
11943
11944 if (immediate_origin == NULL_TREE)
11945 return NULL_TREE;
11946 else
11947 {
11948 tree ret_val;
11949 tree lookahead = immediate_origin;
11950
11951 do
11952 {
11953 ret_val = lookahead;
11954 lookahead = (TREE_CODE (ret_val) == BLOCK
11955 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11956 }
11957 while (lookahead != NULL && lookahead != ret_val);
11958
11959 /* The block's abstract origin chain may not be the *ultimate* origin of
11960 the block. It could lead to a DECL that has an abstract origin set.
11961 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11962 will give us if it has one). Note that DECL's abstract origins are
11963 supposed to be the most distant ancestor (or so decl_ultimate_origin
11964 claims), so we don't need to loop following the DECL origins. */
11965 if (DECL_P (ret_val))
11966 return DECL_ORIGIN (ret_val);
11967
11968 return ret_val;
11969 }
11970 }
11971
11972 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11973 no instruction. */
11974
11975 bool
11976 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11977 {
11978 /* Use precision rather then machine mode when we can, which gives
11979 the correct answer even for submode (bit-field) types. */
11980 if ((INTEGRAL_TYPE_P (outer_type)
11981 || POINTER_TYPE_P (outer_type)
11982 || TREE_CODE (outer_type) == OFFSET_TYPE)
11983 && (INTEGRAL_TYPE_P (inner_type)
11984 || POINTER_TYPE_P (inner_type)
11985 || TREE_CODE (inner_type) == OFFSET_TYPE))
11986 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11987
11988 /* Otherwise fall back on comparing machine modes (e.g. for
11989 aggregate types, floats). */
11990 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11991 }
11992
11993 /* Return true iff conversion in EXP generates no instruction. Mark
11994 it inline so that we fully inline into the stripping functions even
11995 though we have two uses of this function. */
11996
11997 static inline bool
11998 tree_nop_conversion (const_tree exp)
11999 {
12000 tree outer_type, inner_type;
12001
12002 if (!CONVERT_EXPR_P (exp)
12003 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12004 return false;
12005 if (TREE_OPERAND (exp, 0) == error_mark_node)
12006 return false;
12007
12008 outer_type = TREE_TYPE (exp);
12009 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12010
12011 if (!inner_type)
12012 return false;
12013
12014 return tree_nop_conversion_p (outer_type, inner_type);
12015 }
12016
12017 /* Return true iff conversion in EXP generates no instruction. Don't
12018 consider conversions changing the signedness. */
12019
12020 static bool
12021 tree_sign_nop_conversion (const_tree exp)
12022 {
12023 tree outer_type, inner_type;
12024
12025 if (!tree_nop_conversion (exp))
12026 return false;
12027
12028 outer_type = TREE_TYPE (exp);
12029 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12030
12031 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12032 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12033 }
12034
12035 /* Strip conversions from EXP according to tree_nop_conversion and
12036 return the resulting expression. */
12037
12038 tree
12039 tree_strip_nop_conversions (tree exp)
12040 {
12041 while (tree_nop_conversion (exp))
12042 exp = TREE_OPERAND (exp, 0);
12043 return exp;
12044 }
12045
12046 /* Strip conversions from EXP according to tree_sign_nop_conversion
12047 and return the resulting expression. */
12048
12049 tree
12050 tree_strip_sign_nop_conversions (tree exp)
12051 {
12052 while (tree_sign_nop_conversion (exp))
12053 exp = TREE_OPERAND (exp, 0);
12054 return exp;
12055 }
12056
12057 /* Avoid any floating point extensions from EXP. */
12058 tree
12059 strip_float_extensions (tree exp)
12060 {
12061 tree sub, expt, subt;
12062
12063 /* For floating point constant look up the narrowest type that can hold
12064 it properly and handle it like (type)(narrowest_type)constant.
12065 This way we can optimize for instance a=a*2.0 where "a" is float
12066 but 2.0 is double constant. */
12067 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12068 {
12069 REAL_VALUE_TYPE orig;
12070 tree type = NULL;
12071
12072 orig = TREE_REAL_CST (exp);
12073 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12074 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12075 type = float_type_node;
12076 else if (TYPE_PRECISION (TREE_TYPE (exp))
12077 > TYPE_PRECISION (double_type_node)
12078 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12079 type = double_type_node;
12080 if (type)
12081 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
12082 }
12083
12084 if (!CONVERT_EXPR_P (exp))
12085 return exp;
12086
12087 sub = TREE_OPERAND (exp, 0);
12088 subt = TREE_TYPE (sub);
12089 expt = TREE_TYPE (exp);
12090
12091 if (!FLOAT_TYPE_P (subt))
12092 return exp;
12093
12094 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12095 return exp;
12096
12097 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12098 return exp;
12099
12100 return strip_float_extensions (sub);
12101 }
12102
12103 /* Strip out all handled components that produce invariant
12104 offsets. */
12105
12106 const_tree
12107 strip_invariant_refs (const_tree op)
12108 {
12109 while (handled_component_p (op))
12110 {
12111 switch (TREE_CODE (op))
12112 {
12113 case ARRAY_REF:
12114 case ARRAY_RANGE_REF:
12115 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12116 || TREE_OPERAND (op, 2) != NULL_TREE
12117 || TREE_OPERAND (op, 3) != NULL_TREE)
12118 return NULL;
12119 break;
12120
12121 case COMPONENT_REF:
12122 if (TREE_OPERAND (op, 2) != NULL_TREE)
12123 return NULL;
12124 break;
12125
12126 default:;
12127 }
12128 op = TREE_OPERAND (op, 0);
12129 }
12130
12131 return op;
12132 }
12133
12134 static GTY(()) tree gcc_eh_personality_decl;
12135
12136 /* Return the GCC personality function decl. */
12137
12138 tree
12139 lhd_gcc_personality (void)
12140 {
12141 if (!gcc_eh_personality_decl)
12142 gcc_eh_personality_decl = build_personality_function ("gcc");
12143 return gcc_eh_personality_decl;
12144 }
12145
12146 /* TARGET is a call target of GIMPLE call statement
12147 (obtained by gimple_call_fn). Return true if it is
12148 OBJ_TYPE_REF representing an virtual call of C++ method.
12149 (As opposed to OBJ_TYPE_REF representing objc calls
12150 through a cast where middle-end devirtualization machinery
12151 can't apply.) */
12152
12153 bool
12154 virtual_method_call_p (const_tree target)
12155 {
12156 if (TREE_CODE (target) != OBJ_TYPE_REF)
12157 return false;
12158 tree t = TREE_TYPE (target);
12159 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12160 t = TREE_TYPE (t);
12161 if (TREE_CODE (t) == FUNCTION_TYPE)
12162 return false;
12163 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12164 /* If we do not have BINFO associated, it means that type was built
12165 without devirtualization enabled. Do not consider this a virtual
12166 call. */
12167 if (!TYPE_BINFO (obj_type_ref_class (target)))
12168 return false;
12169 return true;
12170 }
12171
12172 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12173
12174 tree
12175 obj_type_ref_class (const_tree ref)
12176 {
12177 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12178 ref = TREE_TYPE (ref);
12179 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12180 ref = TREE_TYPE (ref);
12181 /* We look for type THIS points to. ObjC also builds
12182 OBJ_TYPE_REF with non-method calls, Their first parameter
12183 ID however also corresponds to class type. */
12184 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12185 || TREE_CODE (ref) == FUNCTION_TYPE);
12186 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12187 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12188 return TREE_TYPE (ref);
12189 }
12190
12191 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12192
12193 static tree
12194 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12195 {
12196 unsigned int i;
12197 tree base_binfo, b;
12198
12199 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12200 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12201 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12202 return base_binfo;
12203 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12204 return b;
12205 return NULL;
12206 }
12207
12208 /* Try to find a base info of BINFO that would have its field decl at offset
12209 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12210 found, return, otherwise return NULL_TREE. */
12211
12212 tree
12213 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12214 {
12215 tree type = BINFO_TYPE (binfo);
12216
12217 while (true)
12218 {
12219 HOST_WIDE_INT pos, size;
12220 tree fld;
12221 int i;
12222
12223 if (types_same_for_odr (type, expected_type))
12224 return binfo;
12225 if (offset < 0)
12226 return NULL_TREE;
12227
12228 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12229 {
12230 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12231 continue;
12232
12233 pos = int_bit_position (fld);
12234 size = tree_to_uhwi (DECL_SIZE (fld));
12235 if (pos <= offset && (pos + size) > offset)
12236 break;
12237 }
12238 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12239 return NULL_TREE;
12240
12241 /* Offset 0 indicates the primary base, whose vtable contents are
12242 represented in the binfo for the derived class. */
12243 else if (offset != 0)
12244 {
12245 tree found_binfo = NULL, base_binfo;
12246 /* Offsets in BINFO are in bytes relative to the whole structure
12247 while POS is in bits relative to the containing field. */
12248 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12249 / BITS_PER_UNIT);
12250
12251 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12252 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12253 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12254 {
12255 found_binfo = base_binfo;
12256 break;
12257 }
12258 if (found_binfo)
12259 binfo = found_binfo;
12260 else
12261 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12262 binfo_offset);
12263 }
12264
12265 type = TREE_TYPE (fld);
12266 offset -= pos;
12267 }
12268 }
12269
12270 /* Returns true if X is a typedef decl. */
12271
12272 bool
12273 is_typedef_decl (const_tree x)
12274 {
12275 return (x && TREE_CODE (x) == TYPE_DECL
12276 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12277 }
12278
12279 /* Returns true iff TYPE is a type variant created for a typedef. */
12280
12281 bool
12282 typedef_variant_p (const_tree type)
12283 {
12284 return is_typedef_decl (TYPE_NAME (type));
12285 }
12286
12287 /* Warn about a use of an identifier which was marked deprecated. */
12288 void
12289 warn_deprecated_use (tree node, tree attr)
12290 {
12291 const char *msg;
12292
12293 if (node == 0 || !warn_deprecated_decl)
12294 return;
12295
12296 if (!attr)
12297 {
12298 if (DECL_P (node))
12299 attr = DECL_ATTRIBUTES (node);
12300 else if (TYPE_P (node))
12301 {
12302 tree decl = TYPE_STUB_DECL (node);
12303 if (decl)
12304 attr = lookup_attribute ("deprecated",
12305 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12306 }
12307 }
12308
12309 if (attr)
12310 attr = lookup_attribute ("deprecated", attr);
12311
12312 if (attr)
12313 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12314 else
12315 msg = NULL;
12316
12317 bool w;
12318 if (DECL_P (node))
12319 {
12320 if (msg)
12321 w = warning (OPT_Wdeprecated_declarations,
12322 "%qD is deprecated: %s", node, msg);
12323 else
12324 w = warning (OPT_Wdeprecated_declarations,
12325 "%qD is deprecated", node);
12326 if (w)
12327 inform (DECL_SOURCE_LOCATION (node), "declared here");
12328 }
12329 else if (TYPE_P (node))
12330 {
12331 tree what = NULL_TREE;
12332 tree decl = TYPE_STUB_DECL (node);
12333
12334 if (TYPE_NAME (node))
12335 {
12336 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12337 what = TYPE_NAME (node);
12338 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12339 && DECL_NAME (TYPE_NAME (node)))
12340 what = DECL_NAME (TYPE_NAME (node));
12341 }
12342
12343 if (decl)
12344 {
12345 if (what)
12346 {
12347 if (msg)
12348 w = warning (OPT_Wdeprecated_declarations,
12349 "%qE is deprecated: %s", what, msg);
12350 else
12351 w = warning (OPT_Wdeprecated_declarations,
12352 "%qE is deprecated", what);
12353 }
12354 else
12355 {
12356 if (msg)
12357 w = warning (OPT_Wdeprecated_declarations,
12358 "type is deprecated: %s", msg);
12359 else
12360 w = warning (OPT_Wdeprecated_declarations,
12361 "type is deprecated");
12362 }
12363 if (w)
12364 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12365 }
12366 else
12367 {
12368 if (what)
12369 {
12370 if (msg)
12371 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12372 what, msg);
12373 else
12374 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12375 }
12376 else
12377 {
12378 if (msg)
12379 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12380 msg);
12381 else
12382 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12383 }
12384 }
12385 }
12386 }
12387
12388 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12389 somewhere in it. */
12390
12391 bool
12392 contains_bitfld_component_ref_p (const_tree ref)
12393 {
12394 while (handled_component_p (ref))
12395 {
12396 if (TREE_CODE (ref) == COMPONENT_REF
12397 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12398 return true;
12399 ref = TREE_OPERAND (ref, 0);
12400 }
12401
12402 return false;
12403 }
12404
12405 /* Try to determine whether a TRY_CATCH expression can fall through.
12406 This is a subroutine of block_may_fallthru. */
12407
12408 static bool
12409 try_catch_may_fallthru (const_tree stmt)
12410 {
12411 tree_stmt_iterator i;
12412
12413 /* If the TRY block can fall through, the whole TRY_CATCH can
12414 fall through. */
12415 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12416 return true;
12417
12418 i = tsi_start (TREE_OPERAND (stmt, 1));
12419 switch (TREE_CODE (tsi_stmt (i)))
12420 {
12421 case CATCH_EXPR:
12422 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12423 catch expression and a body. The whole TRY_CATCH may fall
12424 through iff any of the catch bodies falls through. */
12425 for (; !tsi_end_p (i); tsi_next (&i))
12426 {
12427 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12428 return true;
12429 }
12430 return false;
12431
12432 case EH_FILTER_EXPR:
12433 /* The exception filter expression only matters if there is an
12434 exception. If the exception does not match EH_FILTER_TYPES,
12435 we will execute EH_FILTER_FAILURE, and we will fall through
12436 if that falls through. If the exception does match
12437 EH_FILTER_TYPES, the stack unwinder will continue up the
12438 stack, so we will not fall through. We don't know whether we
12439 will throw an exception which matches EH_FILTER_TYPES or not,
12440 so we just ignore EH_FILTER_TYPES and assume that we might
12441 throw an exception which doesn't match. */
12442 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12443
12444 default:
12445 /* This case represents statements to be executed when an
12446 exception occurs. Those statements are implicitly followed
12447 by a RESX statement to resume execution after the exception.
12448 So in this case the TRY_CATCH never falls through. */
12449 return false;
12450 }
12451 }
12452
12453 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12454 need not be 100% accurate; simply be conservative and return true if we
12455 don't know. This is used only to avoid stupidly generating extra code.
12456 If we're wrong, we'll just delete the extra code later. */
12457
12458 bool
12459 block_may_fallthru (const_tree block)
12460 {
12461 /* This CONST_CAST is okay because expr_last returns its argument
12462 unmodified and we assign it to a const_tree. */
12463 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12464
12465 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12466 {
12467 case GOTO_EXPR:
12468 case RETURN_EXPR:
12469 /* Easy cases. If the last statement of the block implies
12470 control transfer, then we can't fall through. */
12471 return false;
12472
12473 case SWITCH_EXPR:
12474 /* If SWITCH_LABELS is set, this is lowered, and represents a
12475 branch to a selected label and hence can not fall through.
12476 Otherwise SWITCH_BODY is set, and the switch can fall
12477 through. */
12478 return SWITCH_LABELS (stmt) == NULL_TREE;
12479
12480 case COND_EXPR:
12481 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12482 return true;
12483 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12484
12485 case BIND_EXPR:
12486 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12487
12488 case TRY_CATCH_EXPR:
12489 return try_catch_may_fallthru (stmt);
12490
12491 case TRY_FINALLY_EXPR:
12492 /* The finally clause is always executed after the try clause,
12493 so if it does not fall through, then the try-finally will not
12494 fall through. Otherwise, if the try clause does not fall
12495 through, then when the finally clause falls through it will
12496 resume execution wherever the try clause was going. So the
12497 whole try-finally will only fall through if both the try
12498 clause and the finally clause fall through. */
12499 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12500 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12501
12502 case MODIFY_EXPR:
12503 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12504 stmt = TREE_OPERAND (stmt, 1);
12505 else
12506 return true;
12507 /* FALLTHRU */
12508
12509 case CALL_EXPR:
12510 /* Functions that do not return do not fall through. */
12511 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12512
12513 case CLEANUP_POINT_EXPR:
12514 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12515
12516 case TARGET_EXPR:
12517 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12518
12519 case ERROR_MARK:
12520 return true;
12521
12522 default:
12523 return lang_hooks.block_may_fallthru (stmt);
12524 }
12525 }
12526
12527 /* True if we are using EH to handle cleanups. */
12528 static bool using_eh_for_cleanups_flag = false;
12529
12530 /* This routine is called from front ends to indicate eh should be used for
12531 cleanups. */
12532 void
12533 using_eh_for_cleanups (void)
12534 {
12535 using_eh_for_cleanups_flag = true;
12536 }
12537
12538 /* Query whether EH is used for cleanups. */
12539 bool
12540 using_eh_for_cleanups_p (void)
12541 {
12542 return using_eh_for_cleanups_flag;
12543 }
12544
12545 /* Wrapper for tree_code_name to ensure that tree code is valid */
12546 const char *
12547 get_tree_code_name (enum tree_code code)
12548 {
12549 const char *invalid = "<invalid tree code>";
12550
12551 if (code >= MAX_TREE_CODES)
12552 return invalid;
12553
12554 return tree_code_name[code];
12555 }
12556
12557 /* Drops the TREE_OVERFLOW flag from T. */
12558
12559 tree
12560 drop_tree_overflow (tree t)
12561 {
12562 gcc_checking_assert (TREE_OVERFLOW (t));
12563
12564 /* For tree codes with a sharing machinery re-build the result. */
12565 if (TREE_CODE (t) == INTEGER_CST)
12566 return wide_int_to_tree (TREE_TYPE (t), t);
12567
12568 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12569 and drop the flag. */
12570 t = copy_node (t);
12571 TREE_OVERFLOW (t) = 0;
12572 return t;
12573 }
12574
12575 /* Given a memory reference expression T, return its base address.
12576 The base address of a memory reference expression is the main
12577 object being referenced. For instance, the base address for
12578 'array[i].fld[j]' is 'array'. You can think of this as stripping
12579 away the offset part from a memory address.
12580
12581 This function calls handled_component_p to strip away all the inner
12582 parts of the memory reference until it reaches the base object. */
12583
12584 tree
12585 get_base_address (tree t)
12586 {
12587 while (handled_component_p (t))
12588 t = TREE_OPERAND (t, 0);
12589
12590 if ((TREE_CODE (t) == MEM_REF
12591 || TREE_CODE (t) == TARGET_MEM_REF)
12592 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12593 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12594
12595 /* ??? Either the alias oracle or all callers need to properly deal
12596 with WITH_SIZE_EXPRs before we can look through those. */
12597 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12598 return NULL_TREE;
12599
12600 return t;
12601 }
12602
12603 /* Return a tree of sizetype representing the size, in bytes, of the element
12604 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12605
12606 tree
12607 array_ref_element_size (tree exp)
12608 {
12609 tree aligned_size = TREE_OPERAND (exp, 3);
12610 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12611 location_t loc = EXPR_LOCATION (exp);
12612
12613 /* If a size was specified in the ARRAY_REF, it's the size measured
12614 in alignment units of the element type. So multiply by that value. */
12615 if (aligned_size)
12616 {
12617 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12618 sizetype from another type of the same width and signedness. */
12619 if (TREE_TYPE (aligned_size) != sizetype)
12620 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12621 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12622 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12623 }
12624
12625 /* Otherwise, take the size from that of the element type. Substitute
12626 any PLACEHOLDER_EXPR that we have. */
12627 else
12628 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12629 }
12630
12631 /* Return a tree representing the lower bound of the array mentioned in
12632 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12633
12634 tree
12635 array_ref_low_bound (tree exp)
12636 {
12637 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12638
12639 /* If a lower bound is specified in EXP, use it. */
12640 if (TREE_OPERAND (exp, 2))
12641 return TREE_OPERAND (exp, 2);
12642
12643 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12644 substituting for a PLACEHOLDER_EXPR as needed. */
12645 if (domain_type && TYPE_MIN_VALUE (domain_type))
12646 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12647
12648 /* Otherwise, return a zero of the appropriate type. */
12649 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12650 }
12651
12652 /* Return a tree representing the upper bound of the array mentioned in
12653 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12654
12655 tree
12656 array_ref_up_bound (tree exp)
12657 {
12658 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12659
12660 /* If there is a domain type and it has an upper bound, use it, substituting
12661 for a PLACEHOLDER_EXPR as needed. */
12662 if (domain_type && TYPE_MAX_VALUE (domain_type))
12663 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12664
12665 /* Otherwise fail. */
12666 return NULL_TREE;
12667 }
12668
12669 /* Returns true if REF is an array reference to an array at the end of
12670 a structure. If this is the case, the array may be allocated larger
12671 than its upper bound implies. */
12672
12673 bool
12674 array_at_struct_end_p (tree ref)
12675 {
12676 if (TREE_CODE (ref) != ARRAY_REF
12677 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12678 return false;
12679
12680 while (handled_component_p (ref))
12681 {
12682 /* If the reference chain contains a component reference to a
12683 non-union type and there follows another field the reference
12684 is not at the end of a structure. */
12685 if (TREE_CODE (ref) == COMPONENT_REF
12686 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12687 {
12688 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12689 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12690 nextf = DECL_CHAIN (nextf);
12691 if (nextf)
12692 return false;
12693 }
12694
12695 ref = TREE_OPERAND (ref, 0);
12696 }
12697
12698 /* If the reference is based on a declared entity, the size of the array
12699 is constrained by its given domain. */
12700 if (DECL_P (ref))
12701 return false;
12702
12703 return true;
12704 }
12705
12706 /* Return a tree representing the offset, in bytes, of the field referenced
12707 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12708
12709 tree
12710 component_ref_field_offset (tree exp)
12711 {
12712 tree aligned_offset = TREE_OPERAND (exp, 2);
12713 tree field = TREE_OPERAND (exp, 1);
12714 location_t loc = EXPR_LOCATION (exp);
12715
12716 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12717 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12718 value. */
12719 if (aligned_offset)
12720 {
12721 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12722 sizetype from another type of the same width and signedness. */
12723 if (TREE_TYPE (aligned_offset) != sizetype)
12724 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12725 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12726 size_int (DECL_OFFSET_ALIGN (field)
12727 / BITS_PER_UNIT));
12728 }
12729
12730 /* Otherwise, take the offset from that of the field. Substitute
12731 any PLACEHOLDER_EXPR that we have. */
12732 else
12733 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12734 }
12735
12736 /* Return the machine mode of T. For vectors, returns the mode of the
12737 inner type. The main use case is to feed the result to HONOR_NANS,
12738 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12739
12740 machine_mode
12741 element_mode (const_tree t)
12742 {
12743 if (!TYPE_P (t))
12744 t = TREE_TYPE (t);
12745 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12746 t = TREE_TYPE (t);
12747 return TYPE_MODE (t);
12748 }
12749
12750
12751 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12752 TV. TV should be the more specified variant (i.e. the main variant). */
12753
12754 static bool
12755 verify_type_variant (const_tree t, tree tv)
12756 {
12757 /* Type variant can differ by:
12758
12759 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12760 ENCODE_QUAL_ADDR_SPACE.
12761 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12762 in this case some values may not be set in the variant types
12763 (see TYPE_COMPLETE_P checks).
12764 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12765 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12766 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12767 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12768 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12769 this is necessary to make it possible to merge types form different TUs
12770 - arrays, pointers and references may have TREE_TYPE that is a variant
12771 of TREE_TYPE of their main variants.
12772 - aggregates may have new TYPE_FIELDS list that list variants of
12773 the main variant TYPE_FIELDS.
12774 - vector types may differ by TYPE_VECTOR_OPAQUE
12775 - TYPE_METHODS is always NULL for vairant types and maintained for
12776 main variant only.
12777 */
12778
12779 /* Convenience macro for matching individual fields. */
12780 #define verify_variant_match(flag) \
12781 do { \
12782 if (flag (tv) != flag (t)) \
12783 { \
12784 error ("type variant differs by " #flag "."); \
12785 debug_tree (tv); \
12786 return false; \
12787 } \
12788 } while (false)
12789
12790 /* tree_base checks. */
12791
12792 verify_variant_match (TREE_CODE);
12793 /* FIXME: Ada builds non-artificial variants of artificial types. */
12794 if (TYPE_ARTIFICIAL (tv) && 0)
12795 verify_variant_match (TYPE_ARTIFICIAL);
12796 if (POINTER_TYPE_P (tv))
12797 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12798 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12799 verify_variant_match (TYPE_UNSIGNED);
12800 verify_variant_match (TYPE_ALIGN_OK);
12801 verify_variant_match (TYPE_PACKED);
12802 if (TREE_CODE (t) == REFERENCE_TYPE)
12803 verify_variant_match (TYPE_REF_IS_RVALUE);
12804 verify_variant_match (TYPE_SATURATING);
12805 /* FIXME: This check trigger during libstdc++ build. */
12806 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12807 verify_variant_match (TYPE_FINAL_P);
12808
12809 /* tree_type_common checks. */
12810
12811 if (COMPLETE_TYPE_P (t))
12812 {
12813 verify_variant_match (TYPE_SIZE);
12814 verify_variant_match (TYPE_MODE);
12815 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12816 /* FIXME: ideally we should compare pointer equality, but java FE
12817 produce variants where size is INTEGER_CST of different type (int
12818 wrt size_type) during libjava biuld. */
12819 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12820 {
12821 error ("type variant has different TYPE_SIZE_UNIT");
12822 debug_tree (tv);
12823 error ("type variant's TYPE_SIZE_UNIT");
12824 debug_tree (TYPE_SIZE_UNIT (tv));
12825 error ("type's TYPE_SIZE_UNIT");
12826 debug_tree (TYPE_SIZE_UNIT (t));
12827 return false;
12828 }
12829 }
12830 verify_variant_match (TYPE_PRECISION);
12831 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12832 if (RECORD_OR_UNION_TYPE_P (t))
12833 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12834 else if (TREE_CODE (t) == ARRAY_TYPE)
12835 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12836 /* During LTO we merge variant lists from diferent translation units
12837 that may differ BY TYPE_CONTEXT that in turn may point
12838 to TRANSLATION_UNIT_DECL.
12839 Ada also builds variants of types with different TYPE_CONTEXT. */
12840 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12841 verify_variant_match (TYPE_CONTEXT);
12842 verify_variant_match (TYPE_STRING_FLAG);
12843 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12844 verify_variant_match (TYPE_ALIAS_SET);
12845
12846 /* tree_type_non_common checks. */
12847
12848 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12849 and dangle the pointer from time to time. */
12850 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12851 && (in_lto_p || !TYPE_VFIELD (tv)
12852 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12853 {
12854 error ("type variant has different TYPE_VFIELD");
12855 debug_tree (tv);
12856 return false;
12857 }
12858 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12859 || TREE_CODE (t) == INTEGER_TYPE
12860 || TREE_CODE (t) == BOOLEAN_TYPE
12861 || TREE_CODE (t) == REAL_TYPE
12862 || TREE_CODE (t) == FIXED_POINT_TYPE)
12863 {
12864 verify_variant_match (TYPE_MAX_VALUE);
12865 verify_variant_match (TYPE_MIN_VALUE);
12866 }
12867 if (TREE_CODE (t) == METHOD_TYPE)
12868 verify_variant_match (TYPE_METHOD_BASETYPE);
12869 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12870 {
12871 error ("type variant has TYPE_METHODS");
12872 debug_tree (tv);
12873 return false;
12874 }
12875 if (TREE_CODE (t) == OFFSET_TYPE)
12876 verify_variant_match (TYPE_OFFSET_BASETYPE);
12877 if (TREE_CODE (t) == ARRAY_TYPE)
12878 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12879 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12880 or even type's main variant. This is needed to make bootstrap pass
12881 and the bug seems new in GCC 5.
12882 C++ FE should be updated to make this consistent and we should check
12883 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12884 is a match with main variant.
12885
12886 Also disable the check for Java for now because of parser hack that builds
12887 first an dummy BINFO and then sometimes replace it by real BINFO in some
12888 of the copies. */
12889 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12890 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12891 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12892 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12893 at LTO time only. */
12894 && (in_lto_p && odr_type_p (t)))
12895 {
12896 error ("type variant has different TYPE_BINFO");
12897 debug_tree (tv);
12898 error ("type variant's TYPE_BINFO");
12899 debug_tree (TYPE_BINFO (tv));
12900 error ("type's TYPE_BINFO");
12901 debug_tree (TYPE_BINFO (t));
12902 return false;
12903 }
12904
12905 /* Check various uses of TYPE_VALUES_RAW. */
12906 if (TREE_CODE (t) == ENUMERAL_TYPE)
12907 verify_variant_match (TYPE_VALUES);
12908 else if (TREE_CODE (t) == ARRAY_TYPE)
12909 verify_variant_match (TYPE_DOMAIN);
12910 /* Permit incomplete variants of complete type. While FEs may complete
12911 all variants, this does not happen for C++ templates in all cases. */
12912 else if (RECORD_OR_UNION_TYPE_P (t)
12913 && COMPLETE_TYPE_P (t)
12914 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12915 {
12916 tree f1, f2;
12917
12918 /* Fortran builds qualified variants as new records with items of
12919 qualified type. Verify that they looks same. */
12920 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12921 f1 && f2;
12922 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12923 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12924 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12925 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12926 /* FIXME: gfc_nonrestricted_type builds all types as variants
12927 with exception of pointer types. It deeply copies the type
12928 which means that we may end up with a variant type
12929 referring non-variant pointer. We may change it to
12930 produce types as variants, too, like
12931 objc_get_protocol_qualified_type does. */
12932 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12933 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12934 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12935 break;
12936 if (f1 || f2)
12937 {
12938 error ("type variant has different TYPE_FIELDS");
12939 debug_tree (tv);
12940 error ("first mismatch is field");
12941 debug_tree (f1);
12942 error ("and field");
12943 debug_tree (f2);
12944 return false;
12945 }
12946 }
12947 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12948 verify_variant_match (TYPE_ARG_TYPES);
12949 /* For C++ the qualified variant of array type is really an array type
12950 of qualified TREE_TYPE.
12951 objc builds variants of pointer where pointer to type is a variant, too
12952 in objc_get_protocol_qualified_type. */
12953 if (TREE_TYPE (t) != TREE_TYPE (tv)
12954 && ((TREE_CODE (t) != ARRAY_TYPE
12955 && !POINTER_TYPE_P (t))
12956 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12957 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12958 {
12959 error ("type variant has different TREE_TYPE");
12960 debug_tree (tv);
12961 error ("type variant's TREE_TYPE");
12962 debug_tree (TREE_TYPE (tv));
12963 error ("type's TREE_TYPE");
12964 debug_tree (TREE_TYPE (t));
12965 return false;
12966 }
12967 if (type_with_alias_set_p (t)
12968 && !gimple_canonical_types_compatible_p (t, tv, false))
12969 {
12970 error ("type is not compatible with its vairant");
12971 debug_tree (tv);
12972 error ("type variant's TREE_TYPE");
12973 debug_tree (TREE_TYPE (tv));
12974 error ("type's TREE_TYPE");
12975 debug_tree (TREE_TYPE (t));
12976 return false;
12977 }
12978 return true;
12979 #undef verify_variant_match
12980 }
12981
12982
12983 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12984 the middle-end types_compatible_p function. It needs to avoid
12985 claiming types are different for types that should be treated
12986 the same with respect to TBAA. Canonical types are also used
12987 for IL consistency checks via the useless_type_conversion_p
12988 predicate which does not handle all type kinds itself but falls
12989 back to pointer-comparison of TYPE_CANONICAL for aggregates
12990 for example. */
12991
12992 /* Return true iff T1 and T2 are structurally identical for what
12993 TBAA is concerned.
12994 This function is used both by lto.c canonical type merging and by the
12995 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12996 that have TYPE_CANONICAL defined and assume them equivalent. */
12997
12998 bool
12999 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13000 bool trust_type_canonical)
13001 {
13002 /* Type variants should be same as the main variant. When not doing sanity
13003 checking to verify this fact, go to main variants and save some work. */
13004 if (trust_type_canonical)
13005 {
13006 t1 = TYPE_MAIN_VARIANT (t1);
13007 t2 = TYPE_MAIN_VARIANT (t2);
13008 }
13009
13010 /* Check first for the obvious case of pointer identity. */
13011 if (t1 == t2)
13012 return true;
13013
13014 /* Check that we have two types to compare. */
13015 if (t1 == NULL_TREE || t2 == NULL_TREE)
13016 return false;
13017
13018 /* We consider complete types always compatible with incomplete type.
13019 This does not make sense for canonical type calculation and thus we
13020 need to ensure that we are never called on it.
13021
13022 FIXME: For more correctness the function probably should have three modes
13023 1) mode assuming that types are complete mathcing their structure
13024 2) mode allowing incomplete types but producing equivalence classes
13025 and thus ignoring all info from complete types
13026 3) mode allowing incomplete types to match complete but checking
13027 compatibility between complete types.
13028
13029 1 and 2 can be used for canonical type calculation. 3 is the real
13030 definition of type compatibility that can be used i.e. for warnings during
13031 declaration merging. */
13032
13033 gcc_assert (!trust_type_canonical
13034 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13035 /* If the types have been previously registered and found equal
13036 they still are. */
13037 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13038 && trust_type_canonical)
13039 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13040
13041 /* Can't be the same type if the types don't have the same code. */
13042 if (tree_code_for_canonical_type_merging (TREE_CODE (t1))
13043 != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13044 return false;
13045
13046 /* Qualifiers do not matter for canonical type comparison purposes. */
13047
13048 /* Void types and nullptr types are always the same. */
13049 if (TREE_CODE (t1) == VOID_TYPE
13050 || TREE_CODE (t1) == NULLPTR_TYPE)
13051 return true;
13052
13053 /* Can't be the same type if they have different mode. */
13054 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13055 return false;
13056
13057 /* Non-aggregate types can be handled cheaply. */
13058 if (INTEGRAL_TYPE_P (t1)
13059 || SCALAR_FLOAT_TYPE_P (t1)
13060 || FIXED_POINT_TYPE_P (t1)
13061 || TREE_CODE (t1) == VECTOR_TYPE
13062 || TREE_CODE (t1) == COMPLEX_TYPE
13063 || TREE_CODE (t1) == OFFSET_TYPE
13064 || POINTER_TYPE_P (t1))
13065 {
13066 /* Can't be the same type if they have different sign or precision. */
13067 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
13068 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
13069 return false;
13070
13071 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13072 interoperable with "signed char". Unless all frontends are revisited
13073 to agree on these types, we must ignore the flag completely. */
13074
13075 /* Fortran standard define C_PTR type that is compatible with every
13076 C pointer. For this reason we need to glob all pointers into one.
13077 Still pointers in different address spaces are not compatible. */
13078 if (POINTER_TYPE_P (t1))
13079 {
13080 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13081 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13082 return false;
13083 }
13084
13085 /* Tail-recurse to components. */
13086 if (TREE_CODE (t1) == VECTOR_TYPE
13087 || TREE_CODE (t1) == COMPLEX_TYPE)
13088 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13089 TREE_TYPE (t2),
13090 trust_type_canonical);
13091
13092 return true;
13093 }
13094
13095 /* Do type-specific comparisons. */
13096 switch (TREE_CODE (t1))
13097 {
13098 case ARRAY_TYPE:
13099 /* Array types are the same if the element types are the same and
13100 the number of elements are the same. */
13101 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13102 trust_type_canonical)
13103 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13104 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13105 return false;
13106 else
13107 {
13108 tree i1 = TYPE_DOMAIN (t1);
13109 tree i2 = TYPE_DOMAIN (t2);
13110
13111 /* For an incomplete external array, the type domain can be
13112 NULL_TREE. Check this condition also. */
13113 if (i1 == NULL_TREE && i2 == NULL_TREE)
13114 return true;
13115 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13116 return false;
13117 else
13118 {
13119 tree min1 = TYPE_MIN_VALUE (i1);
13120 tree min2 = TYPE_MIN_VALUE (i2);
13121 tree max1 = TYPE_MAX_VALUE (i1);
13122 tree max2 = TYPE_MAX_VALUE (i2);
13123
13124 /* The minimum/maximum values have to be the same. */
13125 if ((min1 == min2
13126 || (min1 && min2
13127 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13128 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13129 || operand_equal_p (min1, min2, 0))))
13130 && (max1 == max2
13131 || (max1 && max2
13132 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13133 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13134 || operand_equal_p (max1, max2, 0)))))
13135 return true;
13136 else
13137 return false;
13138 }
13139 }
13140
13141 case METHOD_TYPE:
13142 case FUNCTION_TYPE:
13143 /* Function types are the same if the return type and arguments types
13144 are the same. */
13145 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13146 trust_type_canonical))
13147 return false;
13148
13149 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13150 return true;
13151 else
13152 {
13153 tree parms1, parms2;
13154
13155 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13156 parms1 && parms2;
13157 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13158 {
13159 if (!gimple_canonical_types_compatible_p
13160 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13161 trust_type_canonical))
13162 return false;
13163 }
13164
13165 if (parms1 || parms2)
13166 return false;
13167
13168 return true;
13169 }
13170
13171 case RECORD_TYPE:
13172 case UNION_TYPE:
13173 case QUAL_UNION_TYPE:
13174 {
13175 tree f1, f2;
13176
13177 /* For aggregate types, all the fields must be the same. */
13178 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13179 f1 || f2;
13180 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13181 {
13182 /* Skip non-fields. */
13183 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13184 f1 = TREE_CHAIN (f1);
13185 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13186 f2 = TREE_CHAIN (f2);
13187 if (!f1 || !f2)
13188 break;
13189 /* The fields must have the same name, offset and type. */
13190 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13191 || !gimple_compare_field_offset (f1, f2)
13192 || !gimple_canonical_types_compatible_p
13193 (TREE_TYPE (f1), TREE_TYPE (f2),
13194 trust_type_canonical))
13195 return false;
13196 }
13197
13198 /* If one aggregate has more fields than the other, they
13199 are not the same. */
13200 if (f1 || f2)
13201 return false;
13202
13203 return true;
13204 }
13205
13206 default:
13207 /* Consider all types with language specific trees in them mutually
13208 compatible. This is executed only from verify_type and false
13209 positives can be tolerated. */
13210 gcc_assert (!in_lto_p);
13211 return true;
13212 }
13213 }
13214
13215 /* Verify type T. */
13216
13217 void
13218 verify_type (const_tree t)
13219 {
13220 bool error_found = false;
13221 tree mv = TYPE_MAIN_VARIANT (t);
13222 if (!mv)
13223 {
13224 error ("Main variant is not defined");
13225 error_found = true;
13226 }
13227 else if (mv != TYPE_MAIN_VARIANT (mv))
13228 {
13229 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13230 debug_tree (mv);
13231 error_found = true;
13232 }
13233 else if (t != mv && !verify_type_variant (t, mv))
13234 error_found = true;
13235
13236 tree ct = TYPE_CANONICAL (t);
13237 if (!ct)
13238 ;
13239 else if (TYPE_CANONICAL (t) != ct)
13240 {
13241 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13242 debug_tree (ct);
13243 error_found = true;
13244 }
13245 /* Method and function types can not be used to address memory and thus
13246 TYPE_CANONICAL really matters only for determining useless conversions.
13247
13248 FIXME: C++ FE produce declarations of builtin functions that are not
13249 compatible with main variants. */
13250 else if (TREE_CODE (t) == FUNCTION_TYPE)
13251 ;
13252 else if (t != ct
13253 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13254 with variably sized arrays because their sizes possibly
13255 gimplified to different variables. */
13256 && !variably_modified_type_p (ct, NULL)
13257 && !gimple_canonical_types_compatible_p (t, ct, false))
13258 {
13259 error ("TYPE_CANONICAL is not compatible");
13260 debug_tree (ct);
13261 error_found = true;
13262 }
13263
13264
13265 /* Check various uses of TYPE_MINVAL. */
13266 if (RECORD_OR_UNION_TYPE_P (t))
13267 {
13268 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13269 and danagle the pointer from time to time. */
13270 if (TYPE_VFIELD (t)
13271 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13272 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13273 {
13274 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13275 debug_tree (TYPE_VFIELD (t));
13276 error_found = true;
13277 }
13278 }
13279 else if (TREE_CODE (t) == POINTER_TYPE)
13280 {
13281 if (TYPE_NEXT_PTR_TO (t)
13282 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13283 {
13284 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13285 debug_tree (TYPE_NEXT_PTR_TO (t));
13286 error_found = true;
13287 }
13288 }
13289 else if (TREE_CODE (t) == REFERENCE_TYPE)
13290 {
13291 if (TYPE_NEXT_REF_TO (t)
13292 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13293 {
13294 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13295 debug_tree (TYPE_NEXT_REF_TO (t));
13296 error_found = true;
13297 }
13298 }
13299 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13300 || TREE_CODE (t) == FIXED_POINT_TYPE)
13301 {
13302 /* FIXME: The following check should pass:
13303 useless_type_conversion_p (const_cast <tree> (t),
13304 TREE_TYPE (TYPE_MIN_VALUE (t))
13305 but does not for C sizetypes in LTO. */
13306 }
13307 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13308 else if (TYPE_MINVAL (t)
13309 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13310 || in_lto_p))
13311 {
13312 error ("TYPE_MINVAL non-NULL");
13313 debug_tree (TYPE_MINVAL (t));
13314 error_found = true;
13315 }
13316
13317 /* Check various uses of TYPE_MAXVAL. */
13318 if (RECORD_OR_UNION_TYPE_P (t))
13319 {
13320 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13321 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13322 && TYPE_METHODS (t) != error_mark_node)
13323 {
13324 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13325 debug_tree (TYPE_METHODS (t));
13326 error_found = true;
13327 }
13328 }
13329 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13330 {
13331 if (TYPE_METHOD_BASETYPE (t)
13332 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13333 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13334 {
13335 error ("TYPE_METHOD_BASETYPE is not record nor union");
13336 debug_tree (TYPE_METHOD_BASETYPE (t));
13337 error_found = true;
13338 }
13339 }
13340 else if (TREE_CODE (t) == OFFSET_TYPE)
13341 {
13342 if (TYPE_OFFSET_BASETYPE (t)
13343 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13344 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13345 {
13346 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13347 debug_tree (TYPE_OFFSET_BASETYPE (t));
13348 error_found = true;
13349 }
13350 }
13351 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13352 || TREE_CODE (t) == FIXED_POINT_TYPE)
13353 {
13354 /* FIXME: The following check should pass:
13355 useless_type_conversion_p (const_cast <tree> (t),
13356 TREE_TYPE (TYPE_MAX_VALUE (t))
13357 but does not for C sizetypes in LTO. */
13358 }
13359 else if (TREE_CODE (t) == ARRAY_TYPE)
13360 {
13361 if (TYPE_ARRAY_MAX_SIZE (t)
13362 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13363 {
13364 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13365 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13366 error_found = true;
13367 }
13368 }
13369 else if (TYPE_MAXVAL (t))
13370 {
13371 error ("TYPE_MAXVAL non-NULL");
13372 debug_tree (TYPE_MAXVAL (t));
13373 error_found = true;
13374 }
13375
13376 /* Check various uses of TYPE_BINFO. */
13377 if (RECORD_OR_UNION_TYPE_P (t))
13378 {
13379 if (!TYPE_BINFO (t))
13380 ;
13381 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13382 {
13383 error ("TYPE_BINFO is not TREE_BINFO");
13384 debug_tree (TYPE_BINFO (t));
13385 error_found = true;
13386 }
13387 /* FIXME: Java builds invalid empty binfos that do not have
13388 TREE_TYPE set. */
13389 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13390 {
13391 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13392 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13393 error_found = true;
13394 }
13395 }
13396 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13397 {
13398 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13399 debug_tree (TYPE_LANG_SLOT_1 (t));
13400 error_found = true;
13401 }
13402
13403 /* Check various uses of TYPE_VALUES_RAW. */
13404 if (TREE_CODE (t) == ENUMERAL_TYPE)
13405 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13406 {
13407 tree value = TREE_VALUE (l);
13408 tree name = TREE_PURPOSE (l);
13409
13410 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13411 CONST_DECL of ENUMERAL TYPE. */
13412 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13413 {
13414 error ("Enum value is not CONST_DECL or INTEGER_CST");
13415 debug_tree (value);
13416 debug_tree (name);
13417 error_found = true;
13418 }
13419 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13420 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13421 {
13422 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13423 debug_tree (value);
13424 debug_tree (name);
13425 error_found = true;
13426 }
13427 if (TREE_CODE (name) != IDENTIFIER_NODE)
13428 {
13429 error ("Enum value name is not IDENTIFIER_NODE");
13430 debug_tree (value);
13431 debug_tree (name);
13432 error_found = true;
13433 }
13434 }
13435 else if (TREE_CODE (t) == ARRAY_TYPE)
13436 {
13437 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13438 {
13439 error ("Array TYPE_DOMAIN is not integer type");
13440 debug_tree (TYPE_DOMAIN (t));
13441 error_found = true;
13442 }
13443 }
13444 else if (RECORD_OR_UNION_TYPE_P (t))
13445 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13446 {
13447 /* TODO: verify properties of decls. */
13448 if (TREE_CODE (fld) == FIELD_DECL)
13449 ;
13450 else if (TREE_CODE (fld) == TYPE_DECL)
13451 ;
13452 else if (TREE_CODE (fld) == CONST_DECL)
13453 ;
13454 else if (TREE_CODE (fld) == VAR_DECL)
13455 ;
13456 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13457 ;
13458 else if (TREE_CODE (fld) == USING_DECL)
13459 ;
13460 else
13461 {
13462 error ("Wrong tree in TYPE_FIELDS list");
13463 debug_tree (fld);
13464 error_found = true;
13465 }
13466 }
13467 else if (TREE_CODE (t) == INTEGER_TYPE
13468 || TREE_CODE (t) == BOOLEAN_TYPE
13469 || TREE_CODE (t) == OFFSET_TYPE
13470 || TREE_CODE (t) == REFERENCE_TYPE
13471 || TREE_CODE (t) == NULLPTR_TYPE
13472 || TREE_CODE (t) == POINTER_TYPE)
13473 {
13474 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13475 {
13476 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13477 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13478 error_found = true;
13479 }
13480 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13481 {
13482 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13483 debug_tree (TYPE_CACHED_VALUES (t));
13484 error_found = true;
13485 }
13486 /* Verify just enough of cache to ensure that no one copied it to new type.
13487 All copying should go by copy_node that should clear it. */
13488 else if (TYPE_CACHED_VALUES_P (t))
13489 {
13490 int i;
13491 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13492 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13493 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13494 {
13495 error ("wrong TYPE_CACHED_VALUES entry");
13496 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13497 error_found = true;
13498 break;
13499 }
13500 }
13501 }
13502 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13503 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13504 {
13505 /* C++ FE uses TREE_PURPOSE to store initial values. */
13506 if (TREE_PURPOSE (l) && in_lto_p)
13507 {
13508 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13509 debug_tree (l);
13510 error_found = true;
13511 }
13512 if (!TYPE_P (TREE_VALUE (l)))
13513 {
13514 error ("Wrong entry in TYPE_ARG_TYPES list");
13515 debug_tree (l);
13516 error_found = true;
13517 }
13518 }
13519 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13520 {
13521 error ("TYPE_VALUES_RAW field is non-NULL");
13522 debug_tree (TYPE_VALUES_RAW (t));
13523 error_found = true;
13524 }
13525 if (TREE_CODE (t) != INTEGER_TYPE
13526 && TREE_CODE (t) != BOOLEAN_TYPE
13527 && TREE_CODE (t) != OFFSET_TYPE
13528 && TREE_CODE (t) != REFERENCE_TYPE
13529 && TREE_CODE (t) != NULLPTR_TYPE
13530 && TREE_CODE (t) != POINTER_TYPE
13531 && TYPE_CACHED_VALUES_P (t))
13532 {
13533 error ("TYPE_CACHED_VALUES_P is set while it should not");
13534 error_found = true;
13535 }
13536 if (TYPE_STRING_FLAG (t)
13537 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13538 {
13539 error ("TYPE_STRING_FLAG is set on wrong type code");
13540 error_found = true;
13541 }
13542 else if (TYPE_STRING_FLAG (t))
13543 {
13544 const_tree b = t;
13545 if (TREE_CODE (b) == ARRAY_TYPE)
13546 b = TREE_TYPE (t);
13547 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13548 that is 32bits. */
13549 if (TREE_CODE (b) != INTEGER_TYPE)
13550 {
13551 error ("TYPE_STRING_FLAG is set on type that does not look like "
13552 "char nor array of chars");
13553 error_found = true;
13554 }
13555 }
13556
13557 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13558 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13559 of a type. */
13560 if (TREE_CODE (t) == METHOD_TYPE
13561 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13562 {
13563 error ("TYPE_METHOD_BASETYPE is not main variant");
13564 error_found = true;
13565 }
13566
13567 if (error_found)
13568 {
13569 debug_tree (const_cast <tree> (t));
13570 internal_error ("verify_type failed");
13571 }
13572 }
13573
13574 #include "gt-tree.h"